From 0cc38a20e8a93cedd960469722c06cf7066cdd1d Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 14 Aug 2024 09:42:39 -0700 Subject: [PATCH 001/108] chore: [Many APIs] Update gapic-generator-python to v1.18.5 (#12997) - [ ] Regenerate this pull request now. PiperOrigin-RevId: 661268868 Source-Link: https://github.com/googleapis/googleapis/commit/f7d214cb08cd7d9b018d44564a8b184263f64177 Source-Link: https://github.com/googleapis/googleapis-gen/commit/79a8411bbdb25a983fa3aae8c0e14327df129f94 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWJpZ3F1ZXJ5LWxvZ2dpbmcvLk93bEJvdC55YW1sIiwiaCI6Ijc5YTg0MTFiYmRiMjVhOTgzZmEzYWFlOGMwZTE0MzI3ZGYxMjlmOTQifQ== Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWJpZ3F1ZXJ5LW1pZ3JhdGlvbi8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWJpZ3F1ZXJ5LXJlc2VydmF0aW9uLy5Pd2xCb3QueWFtbCIsImgiOiI3OWE4NDExYmJkYjI1YTk4M2ZhM2FhZThjMGUxNDMyN2RmMTI5Zjk0In0= Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWJpbGxpbmctYnVkZ2V0cy8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWJpbGxpbmcvLk93bEJvdC55YW1sIiwiaCI6Ijc5YTg0MTFiYmRiMjVhOTgzZmEzYWFlOGMwZTE0MzI3ZGYxMjlmOTQifQ== Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWJpbmFyeS1hdXRob3JpemF0aW9uLy5Pd2xCb3QueWFtbCIsImgiOiI3OWE4NDExYmJkYjI1YTk4M2ZhM2FhZThjMGUxNDMyN2RmMTI5Zjk0In0= Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWJ1aWxkLy5Pd2xCb3QueWFtbCIsImgiOiI3OWE4NDExYmJkYjI1YTk4M2ZhM2FhZThjMGUxNDMyN2RmMTI5Zjk0In0= Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWNlcnRpZmljYXRlLW1hbmFnZXIvLk93bEJvdC55YW1sIiwiaCI6Ijc5YTg0MTFiYmRiMjVhOTgzZmEzYWFlOGMwZTE0MzI3ZGYxMjlmOTQifQ== Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWNoYW5uZWwvLk93bEJvdC55YW1sIiwiaCI6Ijc5YTg0MTFiYmRiMjVhOTgzZmEzYWFlOGMwZTE0MzI3ZGYxMjlmOTQifQ== Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWNsb3VkY29udHJvbHNwYXJ0bmVyLy5Pd2xCb3QueWFtbCIsImgiOiI3OWE4NDExYmJkYjI1YTk4M2ZhM2FhZThjMGUxNDMyN2RmMTI5Zjk0In0= Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWNsb3VkcXVvdGFzLy5Pd2xCb3QueWFtbCIsImgiOiI3OWE4NDExYmJkYjI1YTk4M2ZhM2FhZThjMGUxNDMyN2RmMTI5Zjk0In0= Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWNvbW1lcmNlLWNvbnN1bWVyLXByb2N1cmVtZW50Ly5Pd2xCb3QueWFtbCIsImgiOiI3OWE4NDExYmJkYjI1YTk4M2ZhM2FhZThjMGUxNDMyN2RmMTI5Zjk0In0= Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWNvbW1vbi8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWNvbXB1dGUvLk93bEJvdC55YW1sIiwiaCI6Ijc5YTg0MTFiYmRiMjVhOTgzZmEzYWFlOGMwZTE0MzI3ZGYxMjlmOTQifQ== Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWNvbmZpZGVudGlhbGNvbXB1dGluZy8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWNvbmZpZy8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWNvbnRhY3QtY2VudGVyLWluc2lnaHRzLy5Pd2xCb3QueWFtbCIsImgiOiI3OWE4NDExYmJkYjI1YTk4M2ZhM2FhZThjMGUxNDMyN2RmMTI5Zjk0In0= Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWNvbnRhaW5lci8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWNvbnRhaW5lcmFuYWx5c2lzLy5Pd2xCb3QueWFtbCIsImgiOiI3OWE4NDExYmJkYjI1YTk4M2ZhM2FhZThjMGUxNDMyN2RmMTI5Zjk0In0= Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWNvbnRlbnR3YXJlaG91c2UvLk93bEJvdC55YW1sIiwiaCI6Ijc5YTg0MTFiYmRiMjVhOTgzZmEzYWFlOGMwZTE0MzI3ZGYxMjlmOTQifQ== --------- Co-authored-by: Owl Bot --- .../cloud/bigquery_logging/gapic_version.py | 2 +- .../bigquery_logging_v1/gapic_version.py | 2 +- .../cloud/bigquery_migration/gapic_version.py | 2 +- .../bigquery_migration_v2/gapic_version.py | 2 +- .../migration_service/async_client.py | 5 +- .../services/migration_service/client.py | 2 +- .../gapic_version.py | 2 +- .../migration_service/async_client.py | 5 +- .../services/migration_service/client.py | 2 +- .../sql_translation_service/async_client.py | 6 +- .../sql_translation_service/client.py | 2 +- ...ta_google.cloud.bigquery.migration.v2.json | 2 +- ...ogle.cloud.bigquery.migration.v2alpha.json | 2 +- .../test_migration_service.py | 63 +- .../test_migration_service.py | 63 +- .../test_sql_translation_service.py | 9 +- .../bigquery_reservation/gapic_version.py | 2 +- .../bigquery_reservation_v1/gapic_version.py | 2 +- .../reservation_service/async_client.py | 6 +- .../services/reservation_service/client.py | 2 +- ..._google.cloud.bigquery.reservation.v1.json | 2 +- .../test_reservation_service.py | 189 +++--- .../cloud/billing/budgets/gapic_version.py | 2 +- .../cloud/billing/budgets_v1/gapic_version.py | 2 +- .../services/budget_service/async_client.py | 5 +- .../services/budget_service/client.py | 2 +- .../billing/budgets_v1beta1/gapic_version.py | 2 +- .../services/budget_service/async_client.py | 5 +- .../services/budget_service/client.py | 2 +- ...adata_google.cloud.billing.budgets.v1.json | 2 +- ..._google.cloud.billing.budgets.v1beta1.json | 2 +- .../gapic/budgets_v1/test_budget_service.py | 45 +- .../budgets_v1beta1/test_budget_service.py | 45 +- .../google/cloud/billing/gapic_version.py | 2 +- .../google/cloud/billing_v1/gapic_version.py | 2 +- .../services/cloud_billing/async_client.py | 5 +- .../services/cloud_billing/client.py | 2 +- .../services/cloud_catalog/async_client.py | 5 +- .../services/cloud_catalog/client.py | 2 +- ...ppet_metadata_google.cloud.billing.v1.json | 2 +- .../gapic/billing_v1/test_cloud_billing.py | 99 ++-- .../gapic/billing_v1/test_cloud_catalog.py | 18 +- .../binaryauthorization/gapic_version.py | 2 +- .../binaryauthorization_v1/gapic_version.py | 2 +- .../async_client.py | 6 +- .../binauthz_management_service_v1/client.py | 2 +- .../services/system_policy_v1/async_client.py | 5 +- .../services/system_policy_v1/client.py | 2 +- .../validation_helper_v1/async_client.py | 6 +- .../services/validation_helper_v1/client.py | 2 +- .../gapic_version.py | 2 +- .../async_client.py | 6 +- .../client.py | 2 +- .../system_policy_v1_beta1/async_client.py | 6 +- .../services/system_policy_v1_beta1/client.py | 2 +- ...a_google.cloud.binaryauthorization.v1.json | 2 +- ...gle.cloud.binaryauthorization.v1beta1.json | 2 +- .../test_binauthz_management_service_v1.py | 63 +- .../test_system_policy_v1.py | 9 +- .../test_validation_helper_v1.py | 9 +- ...st_binauthz_management_service_v1_beta1.py | 63 +- .../test_system_policy_v1_beta1.py | 9 +- .../devtools/cloudbuild/gapic_version.py | 2 +- .../devtools/cloudbuild_v1/gapic_version.py | 2 +- .../services/cloud_build/async_client.py | 5 +- .../services/cloud_build/client.py | 2 +- .../devtools/cloudbuild_v2/gapic_version.py | 2 +- .../repository_manager/async_client.py | 5 +- .../services/repository_manager/client.py | 2 +- ...etadata_google.devtools.cloudbuild.v1.json | 2 +- ...etadata_google.devtools.cloudbuild.v2.json | 2 +- .../gapic/cloudbuild_v1/test_cloud_build.py | 232 ++++---- .../cloudbuild_v2/test_repository_manager.py | 186 +++--- .../certificate_manager/gapic_version.py | 2 +- .../certificate_manager_v1/gapic_version.py | 2 +- .../certificate_manager/async_client.py | 6 +- .../services/certificate_manager/client.py | 2 +- ...ta_google.cloud.certificatemanager.v1.json | 2 +- .../test_certificate_manager.py | 431 ++++++++------ .../google/cloud/channel/gapic_version.py | 2 +- .../google/cloud/channel_v1/gapic_version.py | 2 +- .../async_client.py | 6 +- .../cloud_channel_reports_service/client.py | 2 +- .../cloud_channel_service/async_client.py | 6 +- .../services/cloud_channel_service/client.py | 2 +- ...ppet_metadata_google.cloud.channel.v1.json | 2 +- .../test_cloud_channel_reports_service.py | 37 +- .../channel_v1/test_cloud_channel_service.py | 551 ++++++++++-------- .../cloudcontrolspartner/gapic_version.py | 2 +- .../cloudcontrolspartner_v1/gapic_version.py | 2 +- .../async_client.py | 6 +- .../cloud_controls_partner_core/client.py | 2 +- .../async_client.py | 6 +- .../client.py | 2 +- .../gapic_version.py | 2 +- .../async_client.py | 6 +- .../cloud_controls_partner_core/client.py | 2 +- .../async_client.py | 6 +- .../client.py | 2 +- ..._google.cloud.cloudcontrolspartner.v1.json | 2 +- ...gle.cloud.cloudcontrolspartner.v1beta.json | 2 +- .../test_cloud_controls_partner_core.py | 72 ++- .../test_cloud_controls_partner_monitoring.py | 18 +- .../test_cloud_controls_partner_core.py | 72 ++- .../test_cloud_controls_partner_monitoring.py | 18 +- .../google/cloud/cloudquotas/gapic_version.py | 2 +- .../cloud/cloudquotas_v1/gapic_version.py | 2 +- .../services/cloud_quotas/async_client.py | 5 +- .../services/cloud_quotas/client.py | 2 +- ...et_metadata_google.api.cloudquotas.v1.json | 2 +- .../gapic/cloudquotas_v1/test_cloud_quotas.py | 54 +- .../gapic_version.py | 2 +- .../gapic_version.py | 2 +- .../async_client.py | 6 +- .../consumer_procurement_service/client.py | 2 +- .../gapic_version.py | 2 +- .../async_client.py | 6 +- .../consumer_procurement_service/client.py | 2 +- ...loud.commerce.consumer.procurement.v1.json | 2 +- ...ommerce.consumer.procurement.v1alpha1.json | 2 +- .../test_consumer_procurement_service.py | 37 +- .../test_consumer_procurement_service.py | 37 +- .../google/cloud/common/gapic_version.py | 2 +- .../google/cloud/compute/gapic_version.py | 2 +- .../google/cloud/compute_v1/gapic_version.py | 2 +- .../services/accelerator_types/client.py | 2 +- .../compute_v1/services/addresses/client.py | 2 +- .../compute_v1/services/autoscalers/client.py | 2 +- .../services/backend_buckets/client.py | 2 +- .../services/backend_services/client.py | 2 +- .../compute_v1/services/disk_types/client.py | 2 +- .../cloud/compute_v1/services/disks/client.py | 2 +- .../services/external_vpn_gateways/client.py | 2 +- .../services/firewall_policies/client.py | 2 +- .../compute_v1/services/firewalls/client.py | 2 +- .../services/forwarding_rules/client.py | 2 +- .../services/global_addresses/client.py | 2 +- .../global_forwarding_rules/client.py | 2 +- .../global_network_endpoint_groups/client.py | 2 +- .../services/global_operations/client.py | 2 +- .../global_organization_operations/client.py | 2 +- .../client.py | 2 +- .../services/health_checks/client.py | 2 +- .../services/image_family_views/client.py | 2 +- .../compute_v1/services/images/client.py | 2 +- .../client.py | 2 +- .../instance_group_managers/client.py | 2 +- .../services/instance_groups/client.py | 2 +- .../instance_settings_service/client.py | 2 +- .../services/instance_templates/client.py | 2 +- .../compute_v1/services/instances/client.py | 2 +- .../services/instant_snapshots/client.py | 2 +- .../interconnect_attachments/client.py | 2 +- .../services/interconnect_locations/client.py | 2 +- .../interconnect_remote_locations/client.py | 2 +- .../services/interconnects/client.py | 2 +- .../services/license_codes/client.py | 2 +- .../compute_v1/services/licenses/client.py | 2 +- .../services/machine_images/client.py | 2 +- .../services/machine_types/client.py | 2 +- .../services/network_attachments/client.py | 2 +- .../network_edge_security_services/client.py | 2 +- .../network_endpoint_groups/client.py | 2 +- .../network_firewall_policies/client.py | 2 +- .../compute_v1/services/networks/client.py | 2 +- .../compute_v1/services/node_groups/client.py | 2 +- .../services/node_templates/client.py | 2 +- .../compute_v1/services/node_types/client.py | 2 +- .../services/packet_mirrorings/client.py | 2 +- .../compute_v1/services/projects/client.py | 2 +- .../public_advertised_prefixes/client.py | 2 +- .../public_delegated_prefixes/client.py | 2 +- .../services/region_autoscalers/client.py | 2 +- .../region_backend_services/client.py | 2 +- .../services/region_commitments/client.py | 2 +- .../services/region_disk_types/client.py | 2 +- .../services/region_disks/client.py | 2 +- .../region_health_check_services/client.py | 2 +- .../services/region_health_checks/client.py | 2 +- .../region_instance_group_managers/client.py | 2 +- .../services/region_instance_groups/client.py | 2 +- .../region_instance_templates/client.py | 2 +- .../services/region_instances/client.py | 2 +- .../region_instant_snapshots/client.py | 2 +- .../region_network_endpoint_groups/client.py | 2 +- .../client.py | 2 +- .../region_notification_endpoints/client.py | 2 +- .../services/region_operations/client.py | 2 +- .../region_security_policies/client.py | 2 +- .../region_ssl_certificates/client.py | 2 +- .../services/region_ssl_policies/client.py | 2 +- .../region_target_http_proxies/client.py | 2 +- .../region_target_https_proxies/client.py | 2 +- .../region_target_tcp_proxies/client.py | 2 +- .../services/region_url_maps/client.py | 2 +- .../services/region_zones/client.py | 2 +- .../compute_v1/services/regions/client.py | 2 +- .../services/reservations/client.py | 2 +- .../services/resource_policies/client.py | 2 +- .../compute_v1/services/routers/client.py | 2 +- .../compute_v1/services/routes/client.py | 2 +- .../services/security_policies/client.py | 2 +- .../services/service_attachments/client.py | 2 +- .../snapshot_settings_service/client.py | 2 +- .../compute_v1/services/snapshots/client.py | 2 +- .../services/ssl_certificates/client.py | 2 +- .../services/ssl_policies/client.py | 2 +- .../services/storage_pool_types/client.py | 2 +- .../services/storage_pools/client.py | 2 +- .../compute_v1/services/subnetworks/client.py | 2 +- .../services/target_grpc_proxies/client.py | 2 +- .../services/target_http_proxies/client.py | 2 +- .../services/target_https_proxies/client.py | 2 +- .../services/target_instances/client.py | 2 +- .../services/target_pools/client.py | 2 +- .../services/target_ssl_proxies/client.py | 2 +- .../services/target_tcp_proxies/client.py | 2 +- .../services/target_vpn_gateways/client.py | 2 +- .../compute_v1/services/url_maps/client.py | 2 +- .../services/vpn_gateways/client.py | 2 +- .../compute_v1/services/vpn_tunnels/client.py | 2 +- .../services/zone_operations/client.py | 2 +- .../cloud/compute_v1/services/zones/client.py | 2 +- ...ppet_metadata_google.cloud.compute.v1.json | 2 +- .../confidentialcomputing/gapic_version.py | 2 +- .../confidentialcomputing_v1/gapic_version.py | 2 +- .../confidential_computing/async_client.py | 6 +- .../services/confidential_computing/client.py | 2 +- ...google.cloud.confidentialcomputing.v1.json | 2 +- .../test_confidential_computing.py | 18 +- .../google/cloud/config/gapic_version.py | 2 +- .../google/cloud/config_v1/gapic_version.py | 2 +- .../config_v1/services/config/async_client.py | 5 +- .../cloud/config_v1/services/config/client.py | 2 +- ...ippet_metadata_google.cloud.config.v1.json | 2 +- .../tests/unit/gapic/config_v1/test_config.py | 277 +++++---- .../contact_center_insights/gapic_version.py | 2 +- .../gapic_version.py | 2 +- .../contact_center_insights/async_client.py | 6 +- .../contact_center_insights/client.py | 2 +- ...google.cloud.contactcenterinsights.v1.json | 2 +- .../test_contact_center_insights.py | 451 +++++++------- .../google/cloud/container/gapic_version.py | 2 +- .../cloud/container_v1/gapic_version.py | 2 +- .../services/cluster_manager/async_client.py | 5 +- .../services/cluster_manager/client.py | 2 +- .../cloud/container_v1beta1/gapic_version.py | 2 +- .../services/cluster_manager/async_client.py | 5 +- .../services/cluster_manager/client.py | 2 +- .../snippet_metadata_google.container.v1.json | 2 +- ...pet_metadata_google.container.v1beta1.json | 2 +- .../container_v1/test_cluster_manager.py | 306 +++++----- .../container_v1beta1/test_cluster_manager.py | 315 +++++----- .../containeranalysis/gapic_version.py | 2 +- .../containeranalysis_v1/gapic_version.py | 2 +- .../container_analysis/async_client.py | 5 +- .../services/container_analysis/client.py | 2 +- ..._google.devtools.containeranalysis.v1.json | 2 +- .../test_container_analysis.py | 36 +- .../cloud/contentwarehouse/gapic_version.py | 2 +- .../contentwarehouse_v1/gapic_version.py | 2 +- .../document_link_service/async_client.py | 6 +- .../services/document_link_service/client.py | 2 +- .../document_schema_service/async_client.py | 6 +- .../document_schema_service/client.py | 2 +- .../services/document_service/async_client.py | 5 +- .../services/document_service/client.py | 2 +- .../services/pipeline_service/async_client.py | 5 +- .../services/pipeline_service/client.py | 2 +- .../services/rule_set_service/async_client.py | 5 +- .../services/rule_set_service/client.py | 2 +- .../synonym_set_service/async_client.py | 5 +- .../services/synonym_set_service/client.py | 2 +- ...data_google.cloud.contentwarehouse.v1.json | 2 +- .../test_document_link_service.py | 36 +- .../test_document_schema_service.py | 45 +- .../test_document_service.py | 72 ++- .../test_pipeline_service.py | 19 +- .../test_rule_set_service.py | 45 +- .../test_synonym_set_service.py | 45 +- 280 files changed, 2545 insertions(+), 2165 deletions(-) diff --git a/packages/google-cloud-bigquery-logging/google/cloud/bigquery_logging/gapic_version.py b/packages/google-cloud-bigquery-logging/google/cloud/bigquery_logging/gapic_version.py index 91e6b04fad21..558c8aab67c5 100644 --- a/packages/google-cloud-bigquery-logging/google/cloud/bigquery_logging/gapic_version.py +++ b/packages/google-cloud-bigquery-logging/google/cloud/bigquery_logging/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.4.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-logging/google/cloud/bigquery_logging_v1/gapic_version.py b/packages/google-cloud-bigquery-logging/google/cloud/bigquery_logging_v1/gapic_version.py index 91e6b04fad21..558c8aab67c5 100644 --- a/packages/google-cloud-bigquery-logging/google/cloud/bigquery_logging_v1/gapic_version.py +++ b/packages/google-cloud-bigquery-logging/google/cloud/bigquery_logging_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.4.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration/gapic_version.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration/gapic_version.py index 57646b556e81..558c8aab67c5 100644 --- a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration/gapic_version.py +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.11.9" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/gapic_version.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/gapic_version.py index 57646b556e81..558c8aab67c5 100644 --- a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/gapic_version.py +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.11.9" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/services/migration_service/async_client.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/services/migration_service/async_client.py index 53e74a3d1eb8..bd489ab5e971 100644 --- a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/services/migration_service/async_client.py +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/services/migration_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -203,9 +202,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(MigrationServiceClient).get_transport_class, type(MigrationServiceClient) - ) + get_transport_class = MigrationServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/services/migration_service/client.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/services/migration_service/client.py index 960e0603769c..87b6df7fd5f2 100644 --- a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/services/migration_service/client.py +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/services/migration_service/client.py @@ -693,7 +693,7 @@ def __init__( Type[MigrationServiceTransport], Callable[..., MigrationServiceTransport], ] = ( - type(self).get_transport_class(transport) + MigrationServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., MigrationServiceTransport], transport) ) diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/gapic_version.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/gapic_version.py index 57646b556e81..558c8aab67c5 100644 --- a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/gapic_version.py +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.11.9" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/migration_service/async_client.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/migration_service/async_client.py index 1a664bea858b..78fbfa6e9f1f 100644 --- a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/migration_service/async_client.py +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/migration_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -203,9 +202,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(MigrationServiceClient).get_transport_class, type(MigrationServiceClient) - ) + get_transport_class = MigrationServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/migration_service/client.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/migration_service/client.py index 724464d77f11..32164e645021 100644 --- a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/migration_service/client.py +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/migration_service/client.py @@ -693,7 +693,7 @@ def __init__( Type[MigrationServiceTransport], Callable[..., MigrationServiceTransport], ] = ( - type(self).get_transport_class(transport) + MigrationServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., MigrationServiceTransport], transport) ) diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/async_client.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/async_client.py index 956f35ad59f2..c9f47f4dec54 100644 --- a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/async_client.py +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -188,10 +187,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(SqlTranslationServiceClient).get_transport_class, - type(SqlTranslationServiceClient), - ) + get_transport_class = SqlTranslationServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/client.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/client.py index 8fc2dce51fe9..ff48b7f2af4b 100644 --- a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/client.py +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/client.py @@ -644,7 +644,7 @@ def __init__( Type[SqlTranslationServiceTransport], Callable[..., SqlTranslationServiceTransport], ] = ( - type(self).get_transport_class(transport) + SqlTranslationServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SqlTranslationServiceTransport], transport) ) diff --git a/packages/google-cloud-bigquery-migration/samples/generated_samples/snippet_metadata_google.cloud.bigquery.migration.v2.json b/packages/google-cloud-bigquery-migration/samples/generated_samples/snippet_metadata_google.cloud.bigquery.migration.v2.json index e79de74a1e1e..eb49516af3a7 100644 --- a/packages/google-cloud-bigquery-migration/samples/generated_samples/snippet_metadata_google.cloud.bigquery.migration.v2.json +++ b/packages/google-cloud-bigquery-migration/samples/generated_samples/snippet_metadata_google.cloud.bigquery.migration.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-migration", - "version": "0.11.9" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-bigquery-migration/samples/generated_samples/snippet_metadata_google.cloud.bigquery.migration.v2alpha.json b/packages/google-cloud-bigquery-migration/samples/generated_samples/snippet_metadata_google.cloud.bigquery.migration.v2alpha.json index 040fcb245230..14575559ea90 100644 --- a/packages/google-cloud-bigquery-migration/samples/generated_samples/snippet_metadata_google.cloud.bigquery.migration.v2alpha.json +++ b/packages/google-cloud-bigquery-migration/samples/generated_samples/snippet_metadata_google.cloud.bigquery.migration.v2alpha.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-migration", - "version": "0.11.9" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-bigquery-migration/tests/unit/gapic/bigquery_migration_v2/test_migration_service.py b/packages/google-cloud-bigquery-migration/tests/unit/gapic/bigquery_migration_v2/test_migration_service.py index 82dab12d415f..e77bc503ba08 100644 --- a/packages/google-cloud-bigquery-migration/tests/unit/gapic/bigquery_migration_v2/test_migration_service.py +++ b/packages/google-cloud-bigquery-migration/tests/unit/gapic/bigquery_migration_v2/test_migration_service.py @@ -1306,22 +1306,23 @@ async def test_create_migration_workflow_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_migration_workflow - ] = mock_object + ] = mock_rpc request = {} await client.create_migration_workflow(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_migration_workflow(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1714,22 +1715,23 @@ async def test_get_migration_workflow_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_migration_workflow - ] = mock_object + ] = mock_rpc request = {} await client.get_migration_workflow(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_migration_workflow(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2108,22 +2110,23 @@ async def test_list_migration_workflows_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_migration_workflows - ] = mock_object + ] = mock_rpc request = {} await client.list_migration_workflows(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_migration_workflows(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2695,22 +2698,23 @@ async def test_delete_migration_workflow_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_migration_workflow - ] = mock_object + ] = mock_rpc request = {} await client.delete_migration_workflow(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_migration_workflow(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3067,22 +3071,23 @@ async def test_start_migration_workflow_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.start_migration_workflow - ] = mock_object + ] = mock_rpc request = {} await client.start_migration_workflow(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.start_migration_workflow(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3458,22 +3463,23 @@ async def test_get_migration_subtask_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_migration_subtask - ] = mock_object + ] = mock_rpc request = {} await client.get_migration_subtask(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_migration_subtask(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3858,22 +3864,23 @@ async def test_list_migration_subtasks_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_migration_subtasks - ] = mock_object + ] = mock_rpc request = {} await client.list_migration_subtasks(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_migration_subtasks(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-bigquery-migration/tests/unit/gapic/bigquery_migration_v2alpha/test_migration_service.py b/packages/google-cloud-bigquery-migration/tests/unit/gapic/bigquery_migration_v2alpha/test_migration_service.py index 4f501bf67df1..a990375a119a 100644 --- a/packages/google-cloud-bigquery-migration/tests/unit/gapic/bigquery_migration_v2alpha/test_migration_service.py +++ b/packages/google-cloud-bigquery-migration/tests/unit/gapic/bigquery_migration_v2alpha/test_migration_service.py @@ -1303,22 +1303,23 @@ async def test_create_migration_workflow_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_migration_workflow - ] = mock_object + ] = mock_rpc request = {} await client.create_migration_workflow(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_migration_workflow(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1711,22 +1712,23 @@ async def test_get_migration_workflow_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_migration_workflow - ] = mock_object + ] = mock_rpc request = {} await client.get_migration_workflow(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_migration_workflow(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2105,22 +2107,23 @@ async def test_list_migration_workflows_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_migration_workflows - ] = mock_object + ] = mock_rpc request = {} await client.list_migration_workflows(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_migration_workflows(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2692,22 +2695,23 @@ async def test_delete_migration_workflow_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_migration_workflow - ] = mock_object + ] = mock_rpc request = {} await client.delete_migration_workflow(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_migration_workflow(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3064,22 +3068,23 @@ async def test_start_migration_workflow_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.start_migration_workflow - ] = mock_object + ] = mock_rpc request = {} await client.start_migration_workflow(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.start_migration_workflow(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3455,22 +3460,23 @@ async def test_get_migration_subtask_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_migration_subtask - ] = mock_object + ] = mock_rpc request = {} await client.get_migration_subtask(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_migration_subtask(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3855,22 +3861,23 @@ async def test_list_migration_subtasks_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_migration_subtasks - ] = mock_object + ] = mock_rpc request = {} await client.list_migration_subtasks(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_migration_subtasks(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-bigquery-migration/tests/unit/gapic/bigquery_migration_v2alpha/test_sql_translation_service.py b/packages/google-cloud-bigquery-migration/tests/unit/gapic/bigquery_migration_v2alpha/test_sql_translation_service.py index a2d9f7d9467b..47459b395757 100644 --- a/packages/google-cloud-bigquery-migration/tests/unit/gapic/bigquery_migration_v2alpha/test_sql_translation_service.py +++ b/packages/google-cloud-bigquery-migration/tests/unit/gapic/bigquery_migration_v2alpha/test_sql_translation_service.py @@ -1305,22 +1305,23 @@ async def test_translate_query_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.translate_query - ] = mock_object + ] = mock_rpc request = {} await client.translate_query(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.translate_query(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation/gapic_version.py b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation/gapic_version.py index f192c1b4f03b..558c8aab67c5 100644 --- a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation/gapic_version.py +++ b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.13.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/gapic_version.py b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/gapic_version.py index f192c1b4f03b..558c8aab67c5 100644 --- a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/gapic_version.py +++ b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.13.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/async_client.py b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/async_client.py index be30eabd4aee..5975332939a1 100644 --- a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/async_client.py +++ b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -223,10 +222,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ReservationServiceClient).get_transport_class, - type(ReservationServiceClient), - ) + get_transport_class = ReservationServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/client.py b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/client.py index 53ffea8a0a86..30e6b9de73dd 100644 --- a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/client.py +++ b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/client.py @@ -754,7 +754,7 @@ def __init__( Type[ReservationServiceTransport], Callable[..., ReservationServiceTransport], ] = ( - type(self).get_transport_class(transport) + ReservationServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ReservationServiceTransport], transport) ) diff --git a/packages/google-cloud-bigquery-reservation/samples/generated_samples/snippet_metadata_google.cloud.bigquery.reservation.v1.json b/packages/google-cloud-bigquery-reservation/samples/generated_samples/snippet_metadata_google.cloud.bigquery.reservation.v1.json index 71d0f72acf58..9fa664c51cab 100644 --- a/packages/google-cloud-bigquery-reservation/samples/generated_samples/snippet_metadata_google.cloud.bigquery.reservation.v1.json +++ b/packages/google-cloud-bigquery-reservation/samples/generated_samples/snippet_metadata_google.cloud.bigquery.reservation.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-reservation", - "version": "1.13.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-bigquery-reservation/tests/unit/gapic/bigquery_reservation_v1/test_reservation_service.py b/packages/google-cloud-bigquery-reservation/tests/unit/gapic/bigquery_reservation_v1/test_reservation_service.py index 468d0a25becf..0461f2e9e671 100644 --- a/packages/google-cloud-bigquery-reservation/tests/unit/gapic/bigquery_reservation_v1/test_reservation_service.py +++ b/packages/google-cloud-bigquery-reservation/tests/unit/gapic/bigquery_reservation_v1/test_reservation_service.py @@ -1360,22 +1360,23 @@ async def test_create_reservation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_reservation - ] = mock_object + ] = mock_rpc request = {} await client.create_reservation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_reservation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1777,22 +1778,23 @@ async def test_list_reservations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_reservations - ] = mock_object + ] = mock_rpc request = {} await client.list_reservations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_reservations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2368,22 +2370,23 @@ async def test_get_reservation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_reservation - ] = mock_object + ] = mock_rpc request = {} await client.get_reservation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_reservation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2747,22 +2750,23 @@ async def test_delete_reservation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_reservation - ] = mock_object + ] = mock_rpc request = {} await client.delete_reservation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_reservation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3135,22 +3139,23 @@ async def test_update_reservation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_reservation - ] = mock_object + ] = mock_rpc request = {} await client.update_reservation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_reservation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3563,22 +3568,23 @@ async def test_create_capacity_commitment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_capacity_commitment - ] = mock_object + ] = mock_rpc request = {} await client.create_capacity_commitment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_capacity_commitment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3975,22 +3981,23 @@ async def test_list_capacity_commitments_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_capacity_commitments - ] = mock_object + ] = mock_rpc request = {} await client.list_capacity_commitments(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_capacity_commitments(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4585,22 +4592,23 @@ async def test_get_capacity_commitment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_capacity_commitment - ] = mock_object + ] = mock_rpc request = {} await client.get_capacity_commitment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_capacity_commitment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4978,22 +4986,23 @@ async def test_delete_capacity_commitment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_capacity_commitment - ] = mock_object + ] = mock_rpc request = {} await client.delete_capacity_commitment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_capacity_commitment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5371,22 +5380,23 @@ async def test_update_capacity_commitment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_capacity_commitment - ] = mock_object + ] = mock_rpc request = {} await client.update_capacity_commitment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_capacity_commitment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5776,22 +5786,23 @@ async def test_split_capacity_commitment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.split_capacity_commitment - ] = mock_object + ] = mock_rpc request = {} await client.split_capacity_commitment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.split_capacity_commitment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6189,22 +6200,23 @@ async def test_merge_capacity_commitments_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.merge_capacity_commitments - ] = mock_object + ] = mock_rpc request = {} await client.merge_capacity_commitments(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.merge_capacity_commitments(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6607,22 +6619,23 @@ async def test_create_assignment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_assignment - ] = mock_object + ] = mock_rpc request = {} await client.create_assignment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_assignment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7001,22 +7014,23 @@ async def test_list_assignments_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_assignments - ] = mock_object + ] = mock_rpc request = {} await client.list_assignments(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_assignments(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7562,22 +7576,23 @@ async def test_delete_assignment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_assignment - ] = mock_object + ] = mock_rpc request = {} await client.delete_assignment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_assignment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7943,22 +7958,23 @@ async def test_search_assignments_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search_assignments - ] = mock_object + ] = mock_rpc request = {} await client.search_assignments(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_assignments(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8546,22 +8562,23 @@ async def test_search_all_assignments_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search_all_assignments - ] = mock_object + ] = mock_rpc request = {} await client.search_all_assignments(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_all_assignments(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9146,22 +9163,23 @@ async def test_move_assignment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.move_assignment - ] = mock_object + ] = mock_rpc request = {} await client.move_assignment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.move_assignment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9541,22 +9559,23 @@ async def test_update_assignment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_assignment - ] = mock_object + ] = mock_rpc request = {} await client.update_assignment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_assignment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9946,22 +9965,23 @@ async def test_get_bi_reservation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_bi_reservation - ] = mock_object + ] = mock_rpc request = {} await client.get_bi_reservation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_bi_reservation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10334,22 +10354,23 @@ async def test_update_bi_reservation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_bi_reservation - ] = mock_object + ] = mock_rpc request = {} await client.update_bi_reservation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_bi_reservation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets/gapic_version.py b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets/gapic_version.py index 5464dff981fa..558c8aab67c5 100644 --- a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets/gapic_version.py +++ b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.14.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/gapic_version.py b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/gapic_version.py index 5464dff981fa..558c8aab67c5 100644 --- a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/gapic_version.py +++ b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.14.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/services/budget_service/async_client.py b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/services/budget_service/async_client.py index d763e39c5d4d..ecb32d617b1e 100644 --- a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/services/budget_service/async_client.py +++ b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/services/budget_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -192,9 +191,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(BudgetServiceClient).get_transport_class, type(BudgetServiceClient) - ) + get_transport_class = BudgetServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/services/budget_service/client.py b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/services/budget_service/client.py index 032c3ed2b7f2..2095fbefdf66 100644 --- a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/services/budget_service/client.py +++ b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/services/budget_service/client.py @@ -660,7 +660,7 @@ def __init__( transport_init: Union[ Type[BudgetServiceTransport], Callable[..., BudgetServiceTransport] ] = ( - type(self).get_transport_class(transport) + BudgetServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., BudgetServiceTransport], transport) ) diff --git a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/gapic_version.py b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/gapic_version.py index 5464dff981fa..558c8aab67c5 100644 --- a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/gapic_version.py +++ b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.14.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/services/budget_service/async_client.py b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/services/budget_service/async_client.py index fab157f19282..0c14a2ea58ce 100644 --- a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/services/budget_service/async_client.py +++ b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/services/budget_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -190,9 +189,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(BudgetServiceClient).get_transport_class, type(BudgetServiceClient) - ) + get_transport_class = BudgetServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/services/budget_service/client.py b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/services/budget_service/client.py index 55b70b6245b2..8407b63a3c79 100644 --- a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/services/budget_service/client.py +++ b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/services/budget_service/client.py @@ -656,7 +656,7 @@ def __init__( transport_init: Union[ Type[BudgetServiceTransport], Callable[..., BudgetServiceTransport] ] = ( - type(self).get_transport_class(transport) + BudgetServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., BudgetServiceTransport], transport) ) diff --git a/packages/google-cloud-billing-budgets/samples/generated_samples/snippet_metadata_google.cloud.billing.budgets.v1.json b/packages/google-cloud-billing-budgets/samples/generated_samples/snippet_metadata_google.cloud.billing.budgets.v1.json index 3f2e755b485e..9f6a8ee49f17 100644 --- a/packages/google-cloud-billing-budgets/samples/generated_samples/snippet_metadata_google.cloud.billing.budgets.v1.json +++ b/packages/google-cloud-billing-budgets/samples/generated_samples/snippet_metadata_google.cloud.billing.budgets.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-billing-budgets", - "version": "1.14.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-billing-budgets/samples/generated_samples/snippet_metadata_google.cloud.billing.budgets.v1beta1.json b/packages/google-cloud-billing-budgets/samples/generated_samples/snippet_metadata_google.cloud.billing.budgets.v1beta1.json index 5ef7fcec9913..33b9f5b1220a 100644 --- a/packages/google-cloud-billing-budgets/samples/generated_samples/snippet_metadata_google.cloud.billing.budgets.v1beta1.json +++ b/packages/google-cloud-billing-budgets/samples/generated_samples/snippet_metadata_google.cloud.billing.budgets.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-billing-budgets", - "version": "1.14.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-billing-budgets/tests/unit/gapic/budgets_v1/test_budget_service.py b/packages/google-cloud-billing-budgets/tests/unit/gapic/budgets_v1/test_budget_service.py index cdb8e9652c9c..d62bb8f78e48 100644 --- a/packages/google-cloud-billing-budgets/tests/unit/gapic/budgets_v1/test_budget_service.py +++ b/packages/google-cloud-billing-budgets/tests/unit/gapic/budgets_v1/test_budget_service.py @@ -1290,22 +1290,23 @@ async def test_create_budget_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_budget - ] = mock_object + ] = mock_rpc request = {} await client.create_budget(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_budget(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1666,22 +1667,23 @@ async def test_update_budget_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_budget - ] = mock_object + ] = mock_rpc request = {} await client.update_budget(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_budget(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2044,22 +2046,23 @@ async def test_get_budget_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_budget - ] = mock_object + ] = mock_rpc request = {} await client.get_budget(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_budget(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2412,22 +2415,23 @@ async def test_list_budgets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_budgets - ] = mock_object + ] = mock_rpc request = {} await client.list_budgets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_budgets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2963,22 +2967,23 @@ async def test_delete_budget_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_budget - ] = mock_object + ] = mock_rpc request = {} await client.delete_budget(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_budget(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-billing-budgets/tests/unit/gapic/budgets_v1beta1/test_budget_service.py b/packages/google-cloud-billing-budgets/tests/unit/gapic/budgets_v1beta1/test_budget_service.py index 0abcd892dd5c..b61257bd43de 100644 --- a/packages/google-cloud-billing-budgets/tests/unit/gapic/budgets_v1beta1/test_budget_service.py +++ b/packages/google-cloud-billing-budgets/tests/unit/gapic/budgets_v1beta1/test_budget_service.py @@ -1267,22 +1267,23 @@ async def test_create_budget_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_budget - ] = mock_object + ] = mock_rpc request = {} await client.create_budget(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_budget(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1553,22 +1554,23 @@ async def test_update_budget_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_budget - ] = mock_object + ] = mock_rpc request = {} await client.update_budget(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_budget(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1841,22 +1843,23 @@ async def test_get_budget_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_budget - ] = mock_object + ] = mock_rpc request = {} await client.get_budget(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_budget(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2129,22 +2132,23 @@ async def test_list_budgets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_budgets - ] = mock_object + ] = mock_rpc request = {} await client.list_budgets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_budgets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2598,22 +2602,23 @@ async def test_delete_budget_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_budget - ] = mock_object + ] = mock_rpc request = {} await client.delete_budget(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_budget(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-billing/google/cloud/billing/gapic_version.py b/packages/google-cloud-billing/google/cloud/billing/gapic_version.py index 6748388713a3..558c8aab67c5 100644 --- a/packages/google-cloud-billing/google/cloud/billing/gapic_version.py +++ b/packages/google-cloud-billing/google/cloud/billing/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.13.6" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-billing/google/cloud/billing_v1/gapic_version.py b/packages/google-cloud-billing/google/cloud/billing_v1/gapic_version.py index 6748388713a3..558c8aab67c5 100644 --- a/packages/google-cloud-billing/google/cloud/billing_v1/gapic_version.py +++ b/packages/google-cloud-billing/google/cloud/billing_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.13.6" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_billing/async_client.py b/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_billing/async_client.py index 382147acd562..f31888507582 100644 --- a/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_billing/async_client.py +++ b/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_billing/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -200,9 +199,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(CloudBillingClient).get_transport_class, type(CloudBillingClient) - ) + get_transport_class = CloudBillingClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_billing/client.py b/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_billing/client.py index 82e7c612e5f5..c31f665736cb 100644 --- a/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_billing/client.py +++ b/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_billing/client.py @@ -698,7 +698,7 @@ def __init__( transport_init: Union[ Type[CloudBillingTransport], Callable[..., CloudBillingTransport] ] = ( - type(self).get_transport_class(transport) + CloudBillingClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., CloudBillingTransport], transport) ) diff --git a/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_catalog/async_client.py b/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_catalog/async_client.py index ff4ffbbecf79..b76b512fe9d6 100644 --- a/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_catalog/async_client.py +++ b/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_catalog/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -188,9 +187,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(CloudCatalogClient).get_transport_class, type(CloudCatalogClient) - ) + get_transport_class = CloudCatalogClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_catalog/client.py b/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_catalog/client.py index 36d530d4435d..864deed1c10e 100644 --- a/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_catalog/client.py +++ b/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_catalog/client.py @@ -668,7 +668,7 @@ def __init__( transport_init: Union[ Type[CloudCatalogTransport], Callable[..., CloudCatalogTransport] ] = ( - type(self).get_transport_class(transport) + CloudCatalogClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., CloudCatalogTransport], transport) ) diff --git a/packages/google-cloud-billing/samples/generated_samples/snippet_metadata_google.cloud.billing.v1.json b/packages/google-cloud-billing/samples/generated_samples/snippet_metadata_google.cloud.billing.v1.json index eea27046c944..94a6ddeb90f9 100644 --- a/packages/google-cloud-billing/samples/generated_samples/snippet_metadata_google.cloud.billing.v1.json +++ b/packages/google-cloud-billing/samples/generated_samples/snippet_metadata_google.cloud.billing.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-billing", - "version": "1.13.6" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-billing/tests/unit/gapic/billing_v1/test_cloud_billing.py b/packages/google-cloud-billing/tests/unit/gapic/billing_v1/test_cloud_billing.py index 6368685618ec..e81305c3c0d8 100644 --- a/packages/google-cloud-billing/tests/unit/gapic/billing_v1/test_cloud_billing.py +++ b/packages/google-cloud-billing/tests/unit/gapic/billing_v1/test_cloud_billing.py @@ -1291,22 +1291,23 @@ async def test_get_billing_account_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_billing_account - ] = mock_object + ] = mock_rpc request = {} await client.get_billing_account(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_billing_account(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1690,22 +1691,23 @@ async def test_list_billing_accounts_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_billing_accounts - ] = mock_object + ] = mock_rpc request = {} await client.list_billing_accounts(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_billing_accounts(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2224,22 +2226,23 @@ async def test_update_billing_account_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_billing_account - ] = mock_object + ] = mock_rpc request = {} await client.update_billing_account(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_billing_account(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2642,22 +2645,23 @@ async def test_create_billing_account_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_billing_account - ] = mock_object + ] = mock_rpc request = {} await client.create_billing_account(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_billing_account(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2985,22 +2989,23 @@ async def test_list_project_billing_info_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_project_billing_info - ] = mock_object + ] = mock_rpc request = {} await client.list_project_billing_info(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_project_billing_info(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3586,22 +3591,23 @@ async def test_get_project_billing_info_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_project_billing_info - ] = mock_object + ] = mock_rpc request = {} await client.get_project_billing_info(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_project_billing_info(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3989,22 +3995,23 @@ async def test_update_project_billing_info_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_project_billing_info - ] = mock_object + ] = mock_rpc request = {} await client.update_project_billing_info(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_project_billing_info(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4383,22 +4390,23 @@ async def test_get_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4765,22 +4773,23 @@ async def test_set_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5157,22 +5166,23 @@ async def test_test_iam_permissions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.test_iam_permissions - ] = mock_object + ] = mock_rpc request = {} await client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.test_iam_permissions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5587,22 +5597,23 @@ async def test_move_billing_account_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.move_billing_account - ] = mock_object + ] = mock_rpc request = {} await client.move_billing_account(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.move_billing_account(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-billing/tests/unit/gapic/billing_v1/test_cloud_catalog.py b/packages/google-cloud-billing/tests/unit/gapic/billing_v1/test_cloud_catalog.py index b7a803137581..ae18f1a72cb5 100644 --- a/packages/google-cloud-billing/tests/unit/gapic/billing_v1/test_cloud_catalog.py +++ b/packages/google-cloud-billing/tests/unit/gapic/billing_v1/test_cloud_catalog.py @@ -1263,22 +1263,23 @@ async def test_list_services_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_services - ] = mock_object + ] = mock_rpc request = {} await client.list_services(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_services(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1677,22 +1678,23 @@ async def test_list_skus_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_skus - ] = mock_object + ] = mock_rpc request = {} await client.list_skus(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_skus(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization/gapic_version.py b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization/gapic_version.py index 3ba9a6de4897..558c8aab67c5 100644 --- a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization/gapic_version.py +++ b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.10.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/gapic_version.py b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/gapic_version.py index 3ba9a6de4897..558c8aab67c5 100644 --- a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/gapic_version.py +++ b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.10.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/binauthz_management_service_v1/async_client.py b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/binauthz_management_service_v1/async_client.py index dbdb1c3e69ce..20913d2aee62 100644 --- a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/binauthz_management_service_v1/async_client.py +++ b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/binauthz_management_service_v1/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -212,10 +211,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(BinauthzManagementServiceV1Client).get_transport_class, - type(BinauthzManagementServiceV1Client), - ) + get_transport_class = BinauthzManagementServiceV1Client.get_transport_class def __init__( self, diff --git a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/binauthz_management_service_v1/client.py b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/binauthz_management_service_v1/client.py index ef855ccedc69..91142cfbeb1e 100644 --- a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/binauthz_management_service_v1/client.py +++ b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/binauthz_management_service_v1/client.py @@ -694,7 +694,7 @@ def __init__( Type[BinauthzManagementServiceV1Transport], Callable[..., BinauthzManagementServiceV1Transport], ] = ( - type(self).get_transport_class(transport) + BinauthzManagementServiceV1Client.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast( Callable[..., BinauthzManagementServiceV1Transport], transport diff --git a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/system_policy_v1/async_client.py b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/system_policy_v1/async_client.py index 2438906210b4..609494774d87 100644 --- a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/system_policy_v1/async_client.py +++ b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/system_policy_v1/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -188,9 +187,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(SystemPolicyV1Client).get_transport_class, type(SystemPolicyV1Client) - ) + get_transport_class = SystemPolicyV1Client.get_transport_class def __init__( self, diff --git a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/system_policy_v1/client.py b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/system_policy_v1/client.py index afd3a44b101a..592df00e4ec6 100644 --- a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/system_policy_v1/client.py +++ b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/system_policy_v1/client.py @@ -654,7 +654,7 @@ def __init__( transport_init: Union[ Type[SystemPolicyV1Transport], Callable[..., SystemPolicyV1Transport] ] = ( - type(self).get_transport_class(transport) + SystemPolicyV1Client.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SystemPolicyV1Transport], transport) ) diff --git a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/validation_helper_v1/async_client.py b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/validation_helper_v1/async_client.py index 1c302f4e447a..8570c32538c9 100644 --- a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/validation_helper_v1/async_client.py +++ b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/validation_helper_v1/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -184,10 +183,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ValidationHelperV1Client).get_transport_class, - type(ValidationHelperV1Client), - ) + get_transport_class = ValidationHelperV1Client.get_transport_class def __init__( self, diff --git a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/validation_helper_v1/client.py b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/validation_helper_v1/client.py index 3916342a1680..bf501c96eb59 100644 --- a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/validation_helper_v1/client.py +++ b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/validation_helper_v1/client.py @@ -642,7 +642,7 @@ def __init__( Type[ValidationHelperV1Transport], Callable[..., ValidationHelperV1Transport], ] = ( - type(self).get_transport_class(transport) + ValidationHelperV1Client.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ValidationHelperV1Transport], transport) ) diff --git a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/gapic_version.py b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/gapic_version.py index 3ba9a6de4897..558c8aab67c5 100644 --- a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/gapic_version.py +++ b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.10.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/services/binauthz_management_service_v1_beta1/async_client.py b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/services/binauthz_management_service_v1_beta1/async_client.py index 0b31fdffeda5..ebe7a778f8be 100644 --- a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/services/binauthz_management_service_v1_beta1/async_client.py +++ b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/services/binauthz_management_service_v1_beta1/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -217,10 +216,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(BinauthzManagementServiceV1Beta1Client).get_transport_class, - type(BinauthzManagementServiceV1Beta1Client), - ) + get_transport_class = BinauthzManagementServiceV1Beta1Client.get_transport_class def __init__( self, diff --git a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/services/binauthz_management_service_v1_beta1/client.py b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/services/binauthz_management_service_v1_beta1/client.py index ca6f0c167713..a2e1b58e9735 100644 --- a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/services/binauthz_management_service_v1_beta1/client.py +++ b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/services/binauthz_management_service_v1_beta1/client.py @@ -701,7 +701,7 @@ def __init__( Type[BinauthzManagementServiceV1Beta1Transport], Callable[..., BinauthzManagementServiceV1Beta1Transport], ] = ( - type(self).get_transport_class(transport) + BinauthzManagementServiceV1Beta1Client.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast( Callable[..., BinauthzManagementServiceV1Beta1Transport], transport diff --git a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/services/system_policy_v1_beta1/async_client.py b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/services/system_policy_v1_beta1/async_client.py index 4b813f6005a2..d041cd8db3e8 100644 --- a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/services/system_policy_v1_beta1/async_client.py +++ b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/services/system_policy_v1_beta1/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -188,10 +187,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(SystemPolicyV1Beta1Client).get_transport_class, - type(SystemPolicyV1Beta1Client), - ) + get_transport_class = SystemPolicyV1Beta1Client.get_transport_class def __init__( self, diff --git a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/services/system_policy_v1_beta1/client.py b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/services/system_policy_v1_beta1/client.py index b10b186fb75a..b4df4ff9e7e8 100644 --- a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/services/system_policy_v1_beta1/client.py +++ b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/services/system_policy_v1_beta1/client.py @@ -659,7 +659,7 @@ def __init__( Type[SystemPolicyV1Beta1Transport], Callable[..., SystemPolicyV1Beta1Transport], ] = ( - type(self).get_transport_class(transport) + SystemPolicyV1Beta1Client.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SystemPolicyV1Beta1Transport], transport) ) diff --git a/packages/google-cloud-binary-authorization/samples/generated_samples/snippet_metadata_google.cloud.binaryauthorization.v1.json b/packages/google-cloud-binary-authorization/samples/generated_samples/snippet_metadata_google.cloud.binaryauthorization.v1.json index 622c68895a94..6d7a035dbe2d 100644 --- a/packages/google-cloud-binary-authorization/samples/generated_samples/snippet_metadata_google.cloud.binaryauthorization.v1.json +++ b/packages/google-cloud-binary-authorization/samples/generated_samples/snippet_metadata_google.cloud.binaryauthorization.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-binary-authorization", - "version": "1.10.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-binary-authorization/samples/generated_samples/snippet_metadata_google.cloud.binaryauthorization.v1beta1.json b/packages/google-cloud-binary-authorization/samples/generated_samples/snippet_metadata_google.cloud.binaryauthorization.v1beta1.json index 75ffc586f4c9..c60d5267337f 100644 --- a/packages/google-cloud-binary-authorization/samples/generated_samples/snippet_metadata_google.cloud.binaryauthorization.v1beta1.json +++ b/packages/google-cloud-binary-authorization/samples/generated_samples/snippet_metadata_google.cloud.binaryauthorization.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-binary-authorization", - "version": "1.10.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1/test_binauthz_management_service_v1.py b/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1/test_binauthz_management_service_v1.py index 06cef592aa2a..56f56b95d565 100644 --- a/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1/test_binauthz_management_service_v1.py +++ b/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1/test_binauthz_management_service_v1.py @@ -1377,22 +1377,23 @@ async def test_get_policy_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1749,22 +1750,23 @@ async def test_update_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_policy - ] = mock_object + ] = mock_rpc request = {} await client.update_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2121,22 +2123,23 @@ async def test_create_attestor_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_attestor - ] = mock_object + ] = mock_rpc request = {} await client.create_attestor(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_attestor(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2506,22 +2509,23 @@ async def test_get_attestor_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_attestor - ] = mock_object + ] = mock_rpc request = {} await client.get_attestor(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_attestor(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2867,22 +2871,23 @@ async def test_update_attestor_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_attestor - ] = mock_object + ] = mock_rpc request = {} await client.update_attestor(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_attestor(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3231,22 +3236,23 @@ async def test_list_attestors_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_attestors - ] = mock_object + ] = mock_rpc request = {} await client.list_attestors(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_attestors(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3782,22 +3788,23 @@ async def test_delete_attestor_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_attestor - ] = mock_object + ] = mock_rpc request = {} await client.delete_attestor(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_attestor(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1/test_system_policy_v1.py b/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1/test_system_policy_v1.py index 47c93f794dad..b1d37d46380b 100644 --- a/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1/test_system_policy_v1.py +++ b/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1/test_system_policy_v1.py @@ -1304,22 +1304,23 @@ async def test_get_system_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_system_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_system_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_system_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1/test_validation_helper_v1.py b/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1/test_validation_helper_v1.py index 553869031193..475ff6fdcb0b 100644 --- a/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1/test_validation_helper_v1.py +++ b/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1/test_validation_helper_v1.py @@ -1344,22 +1344,23 @@ async def test_validate_attestation_occurrence_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.validate_attestation_occurrence - ] = mock_object + ] = mock_rpc request = {} await client.validate_attestation_occurrence(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.validate_attestation_occurrence(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1beta1/test_binauthz_management_service_v1_beta1.py b/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1beta1/test_binauthz_management_service_v1_beta1.py index aa245ba21baa..88a803ec0651 100644 --- a/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1beta1/test_binauthz_management_service_v1_beta1.py +++ b/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1beta1/test_binauthz_management_service_v1_beta1.py @@ -1399,22 +1399,23 @@ async def test_get_policy_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1771,22 +1772,23 @@ async def test_update_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_policy - ] = mock_object + ] = mock_rpc request = {} await client.update_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2143,22 +2145,23 @@ async def test_create_attestor_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_attestor - ] = mock_object + ] = mock_rpc request = {} await client.create_attestor(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_attestor(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2528,22 +2531,23 @@ async def test_get_attestor_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_attestor - ] = mock_object + ] = mock_rpc request = {} await client.get_attestor(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_attestor(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2889,22 +2893,23 @@ async def test_update_attestor_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_attestor - ] = mock_object + ] = mock_rpc request = {} await client.update_attestor(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_attestor(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3253,22 +3258,23 @@ async def test_list_attestors_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_attestors - ] = mock_object + ] = mock_rpc request = {} await client.list_attestors(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_attestors(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3804,22 +3810,23 @@ async def test_delete_attestor_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_attestor - ] = mock_object + ] = mock_rpc request = {} await client.delete_attestor(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_attestor(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1beta1/test_system_policy_v1_beta1.py b/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1beta1/test_system_policy_v1_beta1.py index cc0892c0e8dd..3a97b5f5de3f 100644 --- a/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1beta1/test_system_policy_v1_beta1.py +++ b/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1beta1/test_system_policy_v1_beta1.py @@ -1368,22 +1368,23 @@ async def test_get_system_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_system_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_system_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_system_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-build/google/cloud/devtools/cloudbuild/gapic_version.py b/packages/google-cloud-build/google/cloud/devtools/cloudbuild/gapic_version.py index 8f1bcb93f69d..558c8aab67c5 100644 --- a/packages/google-cloud-build/google/cloud/devtools/cloudbuild/gapic_version.py +++ b/packages/google-cloud-build/google/cloud/devtools/cloudbuild/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.24.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/gapic_version.py b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/gapic_version.py index 8f1bcb93f69d..558c8aab67c5 100644 --- a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/gapic_version.py +++ b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.24.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/services/cloud_build/async_client.py b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/services/cloud_build/async_client.py index 647a6f3c5819..45b787724a85 100644 --- a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/services/cloud_build/async_client.py +++ b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/services/cloud_build/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -222,9 +221,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(CloudBuildClient).get_transport_class, type(CloudBuildClient) - ) + get_transport_class = CloudBuildClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py index 88ac1c97bcbc..2a69fbe66407 100644 --- a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py +++ b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py @@ -870,7 +870,7 @@ def __init__( transport_init: Union[ Type[CloudBuildTransport], Callable[..., CloudBuildTransport] ] = ( - type(self).get_transport_class(transport) + CloudBuildClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., CloudBuildTransport], transport) ) diff --git a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v2/gapic_version.py b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v2/gapic_version.py index 8f1bcb93f69d..558c8aab67c5 100644 --- a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v2/gapic_version.py +++ b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.24.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v2/services/repository_manager/async_client.py b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v2/services/repository_manager/async_client.py index 2c8a90b23aa1..58ba51b37193 100644 --- a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v2/services/repository_manager/async_client.py +++ b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v2/services/repository_manager/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -205,9 +204,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(RepositoryManagerClient).get_transport_class, type(RepositoryManagerClient) - ) + get_transport_class = RepositoryManagerClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v2/services/repository_manager/client.py b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v2/services/repository_manager/client.py index ed93324a78ed..8fb9cdfea9af 100644 --- a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v2/services/repository_manager/client.py +++ b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v2/services/repository_manager/client.py @@ -747,7 +747,7 @@ def __init__( Type[RepositoryManagerTransport], Callable[..., RepositoryManagerTransport], ] = ( - type(self).get_transport_class(transport) + RepositoryManagerClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RepositoryManagerTransport], transport) ) diff --git a/packages/google-cloud-build/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v1.json b/packages/google-cloud-build/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v1.json index 7a83c7c8d4e3..e379efab560f 100644 --- a/packages/google-cloud-build/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v1.json +++ b/packages/google-cloud-build/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-build", - "version": "3.24.2" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-build/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v2.json b/packages/google-cloud-build/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v2.json index 3c49b3ef58be..818d3fc2029c 100644 --- a/packages/google-cloud-build/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v2.json +++ b/packages/google-cloud-build/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-build", - "version": "3.24.2" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-build/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py b/packages/google-cloud-build/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py index 87a888133dd9..1135f9395b81 100644 --- a/packages/google-cloud-build/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py +++ b/packages/google-cloud-build/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py @@ -1202,8 +1202,9 @@ def test_create_build_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_build(request) @@ -1257,26 +1258,28 @@ async def test_create_build_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_build - ] = mock_object + ] = mock_rpc request = {} await client.create_build(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_build(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1629,22 +1632,23 @@ async def test_get_build_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_build - ] = mock_object + ] = mock_rpc request = {} await client.get_build(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_build(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1992,22 +1996,23 @@ async def test_list_builds_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_builds - ] = mock_object + ] = mock_rpc request = {} await client.list_builds(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_builds(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2556,22 +2561,23 @@ async def test_cancel_build_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.cancel_build - ] = mock_object + ] = mock_rpc request = {} await client.cancel_build(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.cancel_build(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2861,8 +2867,9 @@ def test_retry_build_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.retry_build(request) @@ -2916,26 +2923,28 @@ async def test_retry_build_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.retry_build - ] = mock_object + ] = mock_rpc request = {} await client.retry_build(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.retry_build(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3200,8 +3209,9 @@ def test_approve_build_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.approve_build(request) @@ -3255,26 +3265,28 @@ async def test_approve_build_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.approve_build - ] = mock_object + ] = mock_rpc request = {} await client.approve_build(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.approve_build(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3645,22 +3657,23 @@ async def test_create_build_trigger_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_build_trigger - ] = mock_object + ] = mock_rpc request = {} await client.create_build_trigger(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_build_trigger(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4052,22 +4065,23 @@ async def test_get_build_trigger_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_build_trigger - ] = mock_object + ] = mock_rpc request = {} await client.get_build_trigger(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_build_trigger(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4433,22 +4447,23 @@ async def test_list_build_triggers_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_build_triggers - ] = mock_object + ] = mock_rpc request = {} await client.list_build_triggers(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_build_triggers(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4978,22 +4993,23 @@ async def test_delete_build_trigger_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_build_trigger - ] = mock_object + ] = mock_rpc request = {} await client.delete_build_trigger(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_build_trigger(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5360,22 +5376,23 @@ async def test_update_build_trigger_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_build_trigger - ] = mock_object + ] = mock_rpc request = {} await client.update_build_trigger(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_build_trigger(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5695,8 +5712,9 @@ def test_run_build_trigger_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.run_build_trigger(request) @@ -5752,26 +5770,28 @@ async def test_run_build_trigger_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.run_build_trigger - ] = mock_object + ] = mock_rpc request = {} await client.run_build_trigger(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.run_build_trigger(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6124,22 +6144,23 @@ async def test_receive_trigger_webhook_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.receive_trigger_webhook - ] = mock_object + ] = mock_rpc request = {} await client.receive_trigger_webhook(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.receive_trigger_webhook(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6368,8 +6389,9 @@ def test_create_worker_pool_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_worker_pool(request) @@ -6425,26 +6447,28 @@ async def test_create_worker_pool_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_worker_pool - ] = mock_object + ] = mock_rpc request = {} await client.create_worker_pool(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_worker_pool(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6795,22 +6819,23 @@ async def test_get_worker_pool_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_worker_pool - ] = mock_object + ] = mock_rpc request = {} await client.get_worker_pool(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_worker_pool(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7088,8 +7113,9 @@ def test_delete_worker_pool_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_worker_pool(request) @@ -7145,26 +7171,28 @@ async def test_delete_worker_pool_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_worker_pool - ] = mock_object + ] = mock_rpc request = {} await client.delete_worker_pool(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_worker_pool(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7433,8 +7461,9 @@ def test_update_worker_pool_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_worker_pool(request) @@ -7490,26 +7519,28 @@ async def test_update_worker_pool_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_worker_pool - ] = mock_object + ] = mock_rpc request = {} await client.update_worker_pool(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_worker_pool(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7854,22 +7885,23 @@ async def test_list_worker_pools_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_worker_pools - ] = mock_object + ] = mock_rpc request = {} await client.list_worker_pools(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_worker_pools(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-build/tests/unit/gapic/cloudbuild_v2/test_repository_manager.py b/packages/google-cloud-build/tests/unit/gapic/cloudbuild_v2/test_repository_manager.py index bf87b1ff1fd3..16354f791f6f 100644 --- a/packages/google-cloud-build/tests/unit/gapic/cloudbuild_v2/test_repository_manager.py +++ b/packages/google-cloud-build/tests/unit/gapic/cloudbuild_v2/test_repository_manager.py @@ -1290,8 +1290,9 @@ def test_create_connection_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_connection(request) @@ -1347,26 +1348,28 @@ async def test_create_connection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_connection - ] = mock_object + ] = mock_rpc request = {} await client.create_connection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_connection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1751,22 +1754,23 @@ async def test_get_connection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_connection - ] = mock_object + ] = mock_rpc request = {} await client.get_connection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_connection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2125,22 +2129,23 @@ async def test_list_connections_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_connections - ] = mock_object + ] = mock_rpc request = {} await client.list_connections(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_connections(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2635,8 +2640,9 @@ def test_update_connection_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_connection(request) @@ -2692,26 +2698,28 @@ async def test_update_connection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_connection - ] = mock_object + ] = mock_rpc request = {} await client.update_connection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_connection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3031,8 +3039,9 @@ def test_delete_connection_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_connection(request) @@ -3088,26 +3097,28 @@ async def test_delete_connection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_connection - ] = mock_object + ] = mock_rpc request = {} await client.delete_connection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_connection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3417,8 +3428,9 @@ def test_create_repository_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_repository(request) @@ -3474,26 +3486,28 @@ async def test_create_repository_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_repository - ] = mock_object + ] = mock_rpc request = {} await client.create_repository(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_repository(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3824,8 +3838,9 @@ def test_batch_create_repositories_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.batch_create_repositories(request) @@ -3881,26 +3896,28 @@ async def test_batch_create_repositories_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_create_repositories - ] = mock_object + ] = mock_rpc request = {} await client.batch_create_repositories(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.batch_create_repositories(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4276,22 +4293,23 @@ async def test_get_repository_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_repository - ] = mock_object + ] = mock_rpc request = {} await client.get_repository(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_repository(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4660,22 +4678,23 @@ async def test_list_repositories_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_repositories - ] = mock_object + ] = mock_rpc request = {} await client.list_repositories(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_repositories(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5190,8 +5209,9 @@ def test_delete_repository_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_repository(request) @@ -5247,26 +5267,28 @@ async def test_delete_repository_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_repository - ] = mock_object + ] = mock_rpc request = {} await client.delete_repository(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_repository(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5635,22 +5657,23 @@ async def test_fetch_read_write_token_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.fetch_read_write_token - ] = mock_object + ] = mock_rpc request = {} await client.fetch_read_write_token(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.fetch_read_write_token(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6012,22 +6035,23 @@ async def test_fetch_read_token_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.fetch_read_token - ] = mock_object + ] = mock_rpc request = {} await client.fetch_read_token(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.fetch_read_token(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6391,22 +6415,23 @@ async def test_fetch_linkable_repositories_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.fetch_linkable_repositories - ] = mock_object + ] = mock_rpc request = {} await client.fetch_linkable_repositories(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.fetch_linkable_repositories(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6884,22 +6909,23 @@ async def test_fetch_git_refs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.fetch_git_refs - ] = mock_object + ] = mock_rpc request = {} await client.fetch_git_refs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.fetch_git_refs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-certificate-manager/google/cloud/certificate_manager/gapic_version.py b/packages/google-cloud-certificate-manager/google/cloud/certificate_manager/gapic_version.py index f1e9cb0c0d05..558c8aab67c5 100644 --- a/packages/google-cloud-certificate-manager/google/cloud/certificate_manager/gapic_version.py +++ b/packages/google-cloud-certificate-manager/google/cloud/certificate_manager/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.7.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-certificate-manager/google/cloud/certificate_manager_v1/gapic_version.py b/packages/google-cloud-certificate-manager/google/cloud/certificate_manager_v1/gapic_version.py index f1e9cb0c0d05..558c8aab67c5 100644 --- a/packages/google-cloud-certificate-manager/google/cloud/certificate_manager_v1/gapic_version.py +++ b/packages/google-cloud-certificate-manager/google/cloud/certificate_manager_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.7.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-certificate-manager/google/cloud/certificate_manager_v1/services/certificate_manager/async_client.py b/packages/google-cloud-certificate-manager/google/cloud/certificate_manager_v1/services/certificate_manager/async_client.py index 3872daefc9ff..5384984359aa 100644 --- a/packages/google-cloud-certificate-manager/google/cloud/certificate_manager_v1/services/certificate_manager/async_client.py +++ b/packages/google-cloud-certificate-manager/google/cloud/certificate_manager_v1/services/certificate_manager/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -260,10 +259,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(CertificateManagerClient).get_transport_class, - type(CertificateManagerClient), - ) + get_transport_class = CertificateManagerClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-certificate-manager/google/cloud/certificate_manager_v1/services/certificate_manager/client.py b/packages/google-cloud-certificate-manager/google/cloud/certificate_manager_v1/services/certificate_manager/client.py index 0f1213ccd73a..958164c19471 100644 --- a/packages/google-cloud-certificate-manager/google/cloud/certificate_manager_v1/services/certificate_manager/client.py +++ b/packages/google-cloud-certificate-manager/google/cloud/certificate_manager_v1/services/certificate_manager/client.py @@ -844,7 +844,7 @@ def __init__( Type[CertificateManagerTransport], Callable[..., CertificateManagerTransport], ] = ( - type(self).get_transport_class(transport) + CertificateManagerClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., CertificateManagerTransport], transport) ) diff --git a/packages/google-cloud-certificate-manager/samples/generated_samples/snippet_metadata_google.cloud.certificatemanager.v1.json b/packages/google-cloud-certificate-manager/samples/generated_samples/snippet_metadata_google.cloud.certificatemanager.v1.json index d66436431ed1..578413176930 100644 --- a/packages/google-cloud-certificate-manager/samples/generated_samples/snippet_metadata_google.cloud.certificatemanager.v1.json +++ b/packages/google-cloud-certificate-manager/samples/generated_samples/snippet_metadata_google.cloud.certificatemanager.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-certificate-manager", - "version": "1.7.2" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-certificate-manager/tests/unit/gapic/certificate_manager_v1/test_certificate_manager.py b/packages/google-cloud-certificate-manager/tests/unit/gapic/certificate_manager_v1/test_certificate_manager.py index 2cea96180b25..1eaad9a4dfa1 100644 --- a/packages/google-cloud-certificate-manager/tests/unit/gapic/certificate_manager_v1/test_certificate_manager.py +++ b/packages/google-cloud-certificate-manager/tests/unit/gapic/certificate_manager_v1/test_certificate_manager.py @@ -1363,22 +1363,23 @@ async def test_list_certificates_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_certificates - ] = mock_object + ] = mock_rpc request = {} await client.list_certificates(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_certificates(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1954,22 +1955,23 @@ async def test_get_certificate_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_certificate - ] = mock_object + ] = mock_rpc request = {} await client.get_certificate(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_certificate(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2283,8 +2285,9 @@ def test_create_certificate_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_certificate(request) @@ -2340,26 +2343,28 @@ async def test_create_certificate_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_certificate - ] = mock_object + ] = mock_rpc request = {} await client.create_certificate(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_certificate(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2686,8 +2691,9 @@ def test_update_certificate_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_certificate(request) @@ -2743,26 +2749,28 @@ async def test_update_certificate_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_certificate - ] = mock_object + ] = mock_rpc request = {} await client.update_certificate(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_certificate(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3083,8 +3091,9 @@ def test_delete_certificate_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_certificate(request) @@ -3140,26 +3149,28 @@ async def test_delete_certificate_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_certificate - ] = mock_object + ] = mock_rpc request = {} await client.delete_certificate(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_certificate(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3538,22 +3549,23 @@ async def test_list_certificate_maps_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_certificate_maps - ] = mock_object + ] = mock_rpc request = {} await client.list_certificate_maps(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_certificate_maps(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4132,22 +4144,23 @@ async def test_get_certificate_map_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_certificate_map - ] = mock_object + ] = mock_rpc request = {} await client.get_certificate_map(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_certificate_map(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4466,8 +4479,9 @@ def test_create_certificate_map_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_certificate_map(request) @@ -4523,26 +4537,28 @@ async def test_create_certificate_map_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_certificate_map - ] = mock_object + ] = mock_rpc request = {} await client.create_certificate_map(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_certificate_map(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4870,8 +4886,9 @@ def test_update_certificate_map_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_certificate_map(request) @@ -4927,26 +4944,28 @@ async def test_update_certificate_map_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_certificate_map - ] = mock_object + ] = mock_rpc request = {} await client.update_certificate_map(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_certificate_map(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5268,8 +5287,9 @@ def test_delete_certificate_map_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_certificate_map(request) @@ -5325,26 +5345,28 @@ async def test_delete_certificate_map_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_certificate_map - ] = mock_object + ] = mock_rpc request = {} await client.delete_certificate_map(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_certificate_map(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5723,22 +5745,23 @@ async def test_list_certificate_map_entries_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_certificate_map_entries - ] = mock_object + ] = mock_rpc request = {} await client.list_certificate_map_entries(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_certificate_map_entries(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6331,22 +6354,23 @@ async def test_get_certificate_map_entry_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_certificate_map_entry - ] = mock_object + ] = mock_rpc request = {} await client.get_certificate_map_entry(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_certificate_map_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6669,8 +6693,9 @@ def test_create_certificate_map_entry_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_certificate_map_entry(request) @@ -6726,26 +6751,28 @@ async def test_create_certificate_map_entry_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_certificate_map_entry - ] = mock_object + ] = mock_rpc request = {} await client.create_certificate_map_entry(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_certificate_map_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7081,8 +7108,9 @@ def test_update_certificate_map_entry_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_certificate_map_entry(request) @@ -7138,26 +7166,28 @@ async def test_update_certificate_map_entry_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_certificate_map_entry - ] = mock_object + ] = mock_rpc request = {} await client.update_certificate_map_entry(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_certificate_map_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7487,8 +7517,9 @@ def test_delete_certificate_map_entry_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_certificate_map_entry(request) @@ -7544,26 +7575,28 @@ async def test_delete_certificate_map_entry_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_certificate_map_entry - ] = mock_object + ] = mock_rpc request = {} await client.delete_certificate_map_entry(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_certificate_map_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7942,22 +7975,23 @@ async def test_list_dns_authorizations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_dns_authorizations - ] = mock_object + ] = mock_rpc request = {} await client.list_dns_authorizations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_dns_authorizations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8545,22 +8579,23 @@ async def test_get_dns_authorization_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_dns_authorization - ] = mock_object + ] = mock_rpc request = {} await client.get_dns_authorization(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_dns_authorization(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8883,8 +8918,9 @@ def test_create_dns_authorization_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_dns_authorization(request) @@ -8940,26 +8976,28 @@ async def test_create_dns_authorization_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_dns_authorization - ] = mock_object + ] = mock_rpc request = {} await client.create_dns_authorization(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_dns_authorization(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9287,8 +9325,9 @@ def test_update_dns_authorization_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_dns_authorization(request) @@ -9344,26 +9383,28 @@ async def test_update_dns_authorization_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_dns_authorization - ] = mock_object + ] = mock_rpc request = {} await client.update_dns_authorization(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_dns_authorization(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9685,8 +9726,9 @@ def test_delete_dns_authorization_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_dns_authorization(request) @@ -9742,26 +9784,28 @@ async def test_delete_dns_authorization_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_dns_authorization - ] = mock_object + ] = mock_rpc request = {} await client.delete_dns_authorization(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_dns_authorization(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10150,22 +10194,23 @@ async def test_list_certificate_issuance_configs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_certificate_issuance_configs - ] = mock_object + ] = mock_rpc request = {} await client.list_certificate_issuance_configs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_certificate_issuance_configs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10774,22 +10819,23 @@ async def test_get_certificate_issuance_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_certificate_issuance_config - ] = mock_object + ] = mock_rpc request = {} await client.get_certificate_issuance_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_certificate_issuance_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11122,8 +11168,9 @@ def test_create_certificate_issuance_config_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_certificate_issuance_config(request) @@ -11182,26 +11229,28 @@ async def test_create_certificate_issuance_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_certificate_issuance_config - ] = mock_object + ] = mock_rpc request = {} await client.create_certificate_issuance_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_certificate_issuance_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11552,8 +11601,9 @@ def test_delete_certificate_issuance_config_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_certificate_issuance_config(request) @@ -11612,26 +11662,28 @@ async def test_delete_certificate_issuance_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_certificate_issuance_config - ] = mock_object + ] = mock_rpc request = {} await client.delete_certificate_issuance_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_certificate_issuance_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12009,22 +12061,23 @@ async def test_list_trust_configs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_trust_configs - ] = mock_object + ] = mock_rpc request = {} await client.list_trust_configs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_trust_configs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12595,22 +12648,23 @@ async def test_get_trust_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_trust_config - ] = mock_object + ] = mock_rpc request = {} await client.get_trust_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_trust_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12919,8 +12973,9 @@ def test_create_trust_config_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_trust_config(request) @@ -12976,26 +13031,28 @@ async def test_create_trust_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_trust_config - ] = mock_object + ] = mock_rpc request = {} await client.create_trust_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_trust_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13322,8 +13379,9 @@ def test_update_trust_config_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_trust_config(request) @@ -13379,26 +13437,28 @@ async def test_update_trust_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_trust_config - ] = mock_object + ] = mock_rpc request = {} await client.update_trust_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_trust_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13721,8 +13781,9 @@ def test_delete_trust_config_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_trust_config(request) @@ -13778,26 +13839,28 @@ async def test_delete_trust_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_trust_config - ] = mock_object + ] = mock_rpc request = {} await client.delete_trust_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_trust_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-channel/google/cloud/channel/gapic_version.py b/packages/google-cloud-channel/google/cloud/channel/gapic_version.py index d413e1807c55..558c8aab67c5 100644 --- a/packages/google-cloud-channel/google/cloud/channel/gapic_version.py +++ b/packages/google-cloud-channel/google/cloud/channel/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.18.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-channel/google/cloud/channel_v1/gapic_version.py b/packages/google-cloud-channel/google/cloud/channel_v1/gapic_version.py index d413e1807c55..558c8aab67c5 100644 --- a/packages/google-cloud-channel/google/cloud/channel_v1/gapic_version.py +++ b/packages/google-cloud-channel/google/cloud/channel_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.18.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_reports_service/async_client.py b/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_reports_service/async_client.py index 3276a8a353e2..85b64f3501fe 100644 --- a/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_reports_service/async_client.py +++ b/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_reports_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -213,10 +212,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(CloudChannelReportsServiceClient).get_transport_class, - type(CloudChannelReportsServiceClient), - ) + get_transport_class = CloudChannelReportsServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_reports_service/client.py b/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_reports_service/client.py index 1abb1598ec2b..94d7c58c7094 100644 --- a/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_reports_service/client.py +++ b/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_reports_service/client.py @@ -694,7 +694,7 @@ def __init__( Type[CloudChannelReportsServiceTransport], Callable[..., CloudChannelReportsServiceTransport], ] = ( - type(self).get_transport_class(transport) + CloudChannelReportsServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., CloudChannelReportsServiceTransport], transport) ) diff --git a/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_service/async_client.py b/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_service/async_client.py index 525d24935ee8..62e7e214d94b 100644 --- a/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_service/async_client.py +++ b/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -262,10 +261,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(CloudChannelServiceClient).get_transport_class, - type(CloudChannelServiceClient), - ) + get_transport_class = CloudChannelServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_service/client.py b/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_service/client.py index 9a7f970d80fa..1edd0dbbd8a9 100644 --- a/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_service/client.py +++ b/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_service/client.py @@ -873,7 +873,7 @@ def __init__( Type[CloudChannelServiceTransport], Callable[..., CloudChannelServiceTransport], ] = ( - type(self).get_transport_class(transport) + CloudChannelServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., CloudChannelServiceTransport], transport) ) diff --git a/packages/google-cloud-channel/samples/generated_samples/snippet_metadata_google.cloud.channel.v1.json b/packages/google-cloud-channel/samples/generated_samples/snippet_metadata_google.cloud.channel.v1.json index 0d2a795960e5..864fdea496be 100644 --- a/packages/google-cloud-channel/samples/generated_samples/snippet_metadata_google.cloud.channel.v1.json +++ b/packages/google-cloud-channel/samples/generated_samples/snippet_metadata_google.cloud.channel.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-channel", - "version": "1.18.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-channel/tests/unit/gapic/channel_v1/test_cloud_channel_reports_service.py b/packages/google-cloud-channel/tests/unit/gapic/channel_v1/test_cloud_channel_reports_service.py index 2f704293a112..623d643f7351 100644 --- a/packages/google-cloud-channel/tests/unit/gapic/channel_v1/test_cloud_channel_reports_service.py +++ b/packages/google-cloud-channel/tests/unit/gapic/channel_v1/test_cloud_channel_reports_service.py @@ -1278,8 +1278,9 @@ def test_run_report_job_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.run_report_job(request) @@ -1333,26 +1334,28 @@ async def test_run_report_job_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.run_report_job - ] = mock_object + ] = mock_rpc request = {} await client.run_report_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.run_report_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1630,22 +1633,23 @@ async def test_fetch_report_results_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.fetch_report_results - ] = mock_object + ] = mock_rpc request = {} await client.fetch_report_results(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.fetch_report_results(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2211,22 +2215,23 @@ async def test_list_reports_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_reports - ] = mock_object + ] = mock_rpc request = {} await client.list_reports(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_reports(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-channel/tests/unit/gapic/channel_v1/test_cloud_channel_service.py b/packages/google-cloud-channel/tests/unit/gapic/channel_v1/test_cloud_channel_service.py index a55d6ca6a8f1..f454d78314e5 100644 --- a/packages/google-cloud-channel/tests/unit/gapic/channel_v1/test_cloud_channel_service.py +++ b/packages/google-cloud-channel/tests/unit/gapic/channel_v1/test_cloud_channel_service.py @@ -1331,22 +1331,23 @@ async def test_list_customers_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_customers - ] = mock_object + ] = mock_rpc request = {} await client.list_customers(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_customers(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1828,22 +1829,23 @@ async def test_get_customer_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_customer - ] = mock_object + ] = mock_rpc request = {} await client.get_customer(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_customer(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2212,22 +2214,23 @@ async def test_check_cloud_identity_accounts_exist_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.check_cloud_identity_accounts_exist - ] = mock_object + ] = mock_rpc request = {} await client.check_cloud_identity_accounts_exist(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.check_cloud_identity_accounts_exist(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2519,22 +2522,23 @@ async def test_create_customer_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_customer - ] = mock_object + ] = mock_rpc request = {} await client.create_customer(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_customer(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2830,22 +2834,23 @@ async def test_update_customer_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_customer - ] = mock_object + ] = mock_rpc request = {} await client.update_customer(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_customer(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3117,22 +3122,23 @@ async def test_delete_customer_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_customer - ] = mock_object + ] = mock_rpc request = {} await client.delete_customer(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_customer(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3503,22 +3509,23 @@ async def test_import_customer_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_customer - ] = mock_object + ] = mock_rpc request = {} await client.import_customer(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.import_customer(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3752,8 +3759,9 @@ def test_provision_cloud_identity_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.provision_cloud_identity(request) @@ -3809,26 +3817,28 @@ async def test_provision_cloud_identity_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.provision_cloud_identity - ] = mock_object + ] = mock_rpc request = {} await client.provision_cloud_identity(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.provision_cloud_identity(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4110,22 +4120,23 @@ async def test_list_entitlements_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_entitlements - ] = mock_object + ] = mock_rpc request = {} await client.list_entitlements(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_entitlements(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4623,22 +4634,23 @@ async def test_list_transferable_skus_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_transferable_skus - ] = mock_object + ] = mock_rpc request = {} await client.list_transferable_skus(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_transferable_skus(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5138,22 +5150,23 @@ async def test_list_transferable_offers_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_transferable_offers - ] = mock_object + ] = mock_rpc request = {} await client.list_transferable_offers(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_transferable_offers(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5653,22 +5666,23 @@ async def test_get_entitlement_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_entitlement - ] = mock_object + ] = mock_rpc request = {} await client.get_entitlement(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_entitlement(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5907,8 +5921,9 @@ def test_create_entitlement_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_entitlement(request) @@ -5964,26 +5979,28 @@ async def test_create_entitlement_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_entitlement - ] = mock_object + ] = mock_rpc request = {} await client.create_entitlement(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_entitlement(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6209,8 +6226,9 @@ def test_change_parameters_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.change_parameters(request) @@ -6266,26 +6284,28 @@ async def test_change_parameters_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.change_parameters - ] = mock_object + ] = mock_rpc request = {} await client.change_parameters(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.change_parameters(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6512,8 +6532,9 @@ def test_change_renewal_settings_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.change_renewal_settings(request) @@ -6569,26 +6590,28 @@ async def test_change_renewal_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.change_renewal_settings - ] = mock_object + ] = mock_rpc request = {} await client.change_renewal_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.change_renewal_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6810,8 +6833,9 @@ def test_change_offer_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.change_offer(request) @@ -6865,26 +6889,28 @@ async def test_change_offer_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.change_offer - ] = mock_object + ] = mock_rpc request = {} await client.change_offer(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.change_offer(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7104,8 +7130,9 @@ def test_start_paid_service_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.start_paid_service(request) @@ -7161,26 +7188,28 @@ async def test_start_paid_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.start_paid_service - ] = mock_object + ] = mock_rpc request = {} await client.start_paid_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.start_paid_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7406,8 +7435,9 @@ def test_suspend_entitlement_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.suspend_entitlement(request) @@ -7463,26 +7493,28 @@ async def test_suspend_entitlement_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.suspend_entitlement - ] = mock_object + ] = mock_rpc request = {} await client.suspend_entitlement(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.suspend_entitlement(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7708,8 +7740,9 @@ def test_cancel_entitlement_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.cancel_entitlement(request) @@ -7765,26 +7798,28 @@ async def test_cancel_entitlement_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.cancel_entitlement - ] = mock_object + ] = mock_rpc request = {} await client.cancel_entitlement(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.cancel_entitlement(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8010,8 +8045,9 @@ def test_activate_entitlement_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.activate_entitlement(request) @@ -8067,26 +8103,28 @@ async def test_activate_entitlement_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.activate_entitlement - ] = mock_object + ] = mock_rpc request = {} await client.activate_entitlement(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.activate_entitlement(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8315,8 +8353,9 @@ def test_transfer_entitlements_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.transfer_entitlements(request) @@ -8372,26 +8411,28 @@ async def test_transfer_entitlements_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.transfer_entitlements - ] = mock_object + ] = mock_rpc request = {} await client.transfer_entitlements(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.transfer_entitlements(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8618,8 +8659,9 @@ def test_transfer_entitlements_to_google_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.transfer_entitlements_to_google(request) @@ -8675,26 +8717,28 @@ async def test_transfer_entitlements_to_google_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.transfer_entitlements_to_google - ] = mock_object + ] = mock_rpc request = {} await client.transfer_entitlements_to_google(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.transfer_entitlements_to_google(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8980,22 +9024,23 @@ async def test_list_channel_partner_links_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_channel_partner_links - ] = mock_object + ] = mock_rpc request = {} await client.list_channel_partner_links(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_channel_partner_links(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9501,22 +9546,23 @@ async def test_get_channel_partner_link_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_channel_partner_link - ] = mock_object + ] = mock_rpc request = {} await client.get_channel_partner_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_channel_partner_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9822,22 +9868,23 @@ async def test_create_channel_partner_link_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_channel_partner_link - ] = mock_object + ] = mock_rpc request = {} await client.create_channel_partner_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_channel_partner_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10144,22 +10191,23 @@ async def test_update_channel_partner_link_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_channel_partner_link - ] = mock_object + ] = mock_rpc request = {} await client.update_channel_partner_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_channel_partner_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10454,22 +10502,23 @@ async def test_get_customer_repricing_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_customer_repricing_config - ] = mock_object + ] = mock_rpc request = {} await client.get_customer_repricing_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_customer_repricing_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10846,22 +10895,23 @@ async def test_list_customer_repricing_configs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_customer_repricing_configs - ] = mock_object + ] = mock_rpc request = {} await client.list_customer_repricing_configs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_customer_repricing_configs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11438,22 +11488,23 @@ async def test_create_customer_repricing_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_customer_repricing_config - ] = mock_object + ] = mock_rpc request = {} await client.create_customer_repricing_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_customer_repricing_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11840,22 +11891,23 @@ async def test_update_customer_repricing_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_customer_repricing_config - ] = mock_object + ] = mock_rpc request = {} await client.update_customer_repricing_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_customer_repricing_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12229,22 +12281,23 @@ async def test_delete_customer_repricing_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_customer_repricing_config - ] = mock_object + ] = mock_rpc request = {} await client.delete_customer_repricing_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_customer_repricing_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12608,22 +12661,23 @@ async def test_get_channel_partner_repricing_config_async_use_cached_wrapped_rpc ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_channel_partner_repricing_config - ] = mock_object + ] = mock_rpc request = {} await client.get_channel_partner_repricing_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_channel_partner_repricing_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13000,22 +13054,23 @@ async def test_list_channel_partner_repricing_configs_async_use_cached_wrapped_r ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_channel_partner_repricing_configs - ] = mock_object + ] = mock_rpc request = {} await client.list_channel_partner_repricing_configs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_channel_partner_repricing_configs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13596,22 +13651,23 @@ async def test_create_channel_partner_repricing_config_async_use_cached_wrapped_ ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_channel_partner_repricing_config - ] = mock_object + ] = mock_rpc request = {} await client.create_channel_partner_repricing_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_channel_partner_repricing_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13998,22 +14054,23 @@ async def test_update_channel_partner_repricing_config_async_use_cached_wrapped_ ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_channel_partner_repricing_config - ] = mock_object + ] = mock_rpc request = {} await client.update_channel_partner_repricing_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_channel_partner_repricing_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14387,22 +14444,23 @@ async def test_delete_channel_partner_repricing_config_async_use_cached_wrapped_ ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_channel_partner_repricing_config - ] = mock_object + ] = mock_rpc request = {} await client.delete_channel_partner_repricing_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_channel_partner_repricing_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14755,22 +14813,23 @@ async def test_list_sku_groups_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_sku_groups - ] = mock_object + ] = mock_rpc request = {} await client.list_sku_groups(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_sku_groups(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15328,22 +15387,23 @@ async def test_list_sku_group_billable_skus_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_sku_group_billable_skus - ] = mock_object + ] = mock_rpc request = {} await client.list_sku_group_billable_skus(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_sku_group_billable_skus(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15910,22 +15970,23 @@ async def test_lookup_offer_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.lookup_offer - ] = mock_object + ] = mock_rpc request = {} await client.lookup_offer(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.lookup_offer(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -16196,22 +16257,23 @@ async def test_list_products_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_products - ] = mock_object + ] = mock_rpc request = {} await client.list_products(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_products(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -16612,22 +16674,23 @@ async def test_list_skus_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_skus - ] = mock_object + ] = mock_rpc request = {} await client.list_skus(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_skus(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -17094,22 +17157,23 @@ async def test_list_offers_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_offers - ] = mock_object + ] = mock_rpc request = {} await client.list_offers(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_offers(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -17587,22 +17651,23 @@ async def test_list_purchasable_skus_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_purchasable_skus - ] = mock_object + ] = mock_rpc request = {} await client.list_purchasable_skus(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_purchasable_skus(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -18094,22 +18159,23 @@ async def test_list_purchasable_offers_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_purchasable_offers - ] = mock_object + ] = mock_rpc request = {} await client.list_purchasable_offers(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_purchasable_offers(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -18592,22 +18658,23 @@ async def test_query_eligible_billing_accounts_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.query_eligible_billing_accounts - ] = mock_object + ] = mock_rpc request = {} await client.query_eligible_billing_accounts(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.query_eligible_billing_accounts(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -18892,22 +18959,23 @@ async def test_register_subscriber_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.register_subscriber - ] = mock_object + ] = mock_rpc request = {} await client.register_subscriber(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.register_subscriber(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -19195,22 +19263,23 @@ async def test_unregister_subscriber_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.unregister_subscriber - ] = mock_object + ] = mock_rpc request = {} await client.unregister_subscriber(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.unregister_subscriber(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -19493,22 +19562,23 @@ async def test_list_subscribers_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_subscribers - ] = mock_object + ] = mock_rpc request = {} await client.list_subscribers(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_subscribers(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -19990,22 +20060,23 @@ async def test_list_entitlement_changes_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_entitlement_changes - ] = mock_object + ] = mock_rpc request = {} await client.list_entitlement_changes(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_entitlement_changes(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner/gapic_version.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner/gapic_version.py index 114e40645800..558c8aab67c5 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner/gapic_version.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.3" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/gapic_version.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/gapic_version.py index 114e40645800..558c8aab67c5 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/gapic_version.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.3" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_core/async_client.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_core/async_client.py index 7ba8aef8e738..569e84b46bce 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_core/async_client.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_core/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -231,10 +230,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(CloudControlsPartnerCoreClient).get_transport_class, - type(CloudControlsPartnerCoreClient), - ) + get_transport_class = CloudControlsPartnerCoreClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_core/client.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_core/client.py index e18ed6347c58..82b86d2f3332 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_core/client.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_core/client.py @@ -798,7 +798,7 @@ def __init__( Type[CloudControlsPartnerCoreTransport], Callable[..., CloudControlsPartnerCoreTransport], ] = ( - type(self).get_transport_class(transport) + CloudControlsPartnerCoreClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., CloudControlsPartnerCoreTransport], transport) ) diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_monitoring/async_client.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_monitoring/async_client.py index 35290d8f376f..c5de6440a7da 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_monitoring/async_client.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_monitoring/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -204,10 +203,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(CloudControlsPartnerMonitoringClient).get_transport_class, - type(CloudControlsPartnerMonitoringClient), - ) + get_transport_class = CloudControlsPartnerMonitoringClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_monitoring/client.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_monitoring/client.py index ef2665fd6df2..0f7d9f952054 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_monitoring/client.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_monitoring/client.py @@ -688,7 +688,7 @@ def __init__( Type[CloudControlsPartnerMonitoringTransport], Callable[..., CloudControlsPartnerMonitoringTransport], ] = ( - type(self).get_transport_class(transport) + CloudControlsPartnerMonitoringClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast( Callable[..., CloudControlsPartnerMonitoringTransport], transport diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/gapic_version.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/gapic_version.py index 114e40645800..558c8aab67c5 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/gapic_version.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.3" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_core/async_client.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_core/async_client.py index e42485a45d1f..89159b48c9e5 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_core/async_client.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_core/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -231,10 +230,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(CloudControlsPartnerCoreClient).get_transport_class, - type(CloudControlsPartnerCoreClient), - ) + get_transport_class = CloudControlsPartnerCoreClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_core/client.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_core/client.py index 039ce6aecbd7..1896fa7459b7 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_core/client.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_core/client.py @@ -798,7 +798,7 @@ def __init__( Type[CloudControlsPartnerCoreTransport], Callable[..., CloudControlsPartnerCoreTransport], ] = ( - type(self).get_transport_class(transport) + CloudControlsPartnerCoreClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., CloudControlsPartnerCoreTransport], transport) ) diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_monitoring/async_client.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_monitoring/async_client.py index 869bf976e6e1..47889c59fb46 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_monitoring/async_client.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_monitoring/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -204,10 +203,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(CloudControlsPartnerMonitoringClient).get_transport_class, - type(CloudControlsPartnerMonitoringClient), - ) + get_transport_class = CloudControlsPartnerMonitoringClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_monitoring/client.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_monitoring/client.py index 71d16cc912b8..edde6d0911ee 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_monitoring/client.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_monitoring/client.py @@ -688,7 +688,7 @@ def __init__( Type[CloudControlsPartnerMonitoringTransport], Callable[..., CloudControlsPartnerMonitoringTransport], ] = ( - type(self).get_transport_class(transport) + CloudControlsPartnerMonitoringClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast( Callable[..., CloudControlsPartnerMonitoringTransport], transport diff --git a/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/snippet_metadata_google.cloud.cloudcontrolspartner.v1.json b/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/snippet_metadata_google.cloud.cloudcontrolspartner.v1.json index 37f6e017d9a3..5a93fc370b33 100644 --- a/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/snippet_metadata_google.cloud.cloudcontrolspartner.v1.json +++ b/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/snippet_metadata_google.cloud.cloudcontrolspartner.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-cloudcontrolspartner", - "version": "0.1.3" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/snippet_metadata_google.cloud.cloudcontrolspartner.v1beta.json b/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/snippet_metadata_google.cloud.cloudcontrolspartner.v1beta.json index d3df2e6d1061..642805220b5c 100644 --- a/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/snippet_metadata_google.cloud.cloudcontrolspartner.v1beta.json +++ b/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/snippet_metadata_google.cloud.cloudcontrolspartner.v1beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-cloudcontrolspartner", - "version": "0.1.3" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1/test_cloud_controls_partner_core.py b/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1/test_cloud_controls_partner_core.py index 2746ec5e2e42..7401a18fff9e 100644 --- a/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1/test_cloud_controls_partner_core.py +++ b/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1/test_cloud_controls_partner_core.py @@ -1388,22 +1388,23 @@ async def test_get_workload_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_workload - ] = mock_object + ] = mock_rpc request = {} await client.get_workload(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_workload(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1776,22 +1777,23 @@ async def test_list_workloads_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_workloads - ] = mock_object + ] = mock_rpc request = {} await client.list_workloads(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_workloads(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2343,22 +2345,23 @@ async def test_get_customer_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_customer - ] = mock_object + ] = mock_rpc request = {} await client.get_customer(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_customer(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2716,22 +2719,23 @@ async def test_list_customers_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_customers - ] = mock_object + ] = mock_rpc request = {} await client.list_customers(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_customers(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3288,22 +3292,23 @@ async def test_get_ekm_connections_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_ekm_connections - ] = mock_object + ] = mock_rpc request = {} await client.get_ekm_connections(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_ekm_connections(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3685,22 +3690,23 @@ async def test_get_partner_permissions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_partner_permissions - ] = mock_object + ] = mock_rpc request = {} await client.get_partner_permissions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_partner_permissions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4088,22 +4094,23 @@ async def test_list_access_approval_requests_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_access_approval_requests - ] = mock_object + ] = mock_rpc request = {} await client.list_access_approval_requests(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_access_approval_requests(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4687,22 +4694,23 @@ async def test_get_partner_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_partner - ] = mock_object + ] = mock_rpc request = {} await client.get_partner(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_partner(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1/test_cloud_controls_partner_monitoring.py b/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1/test_cloud_controls_partner_monitoring.py index 7258a6619007..2f28ff3c4f6b 100644 --- a/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1/test_cloud_controls_partner_monitoring.py +++ b/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1/test_cloud_controls_partner_monitoring.py @@ -1389,22 +1389,23 @@ async def test_list_violations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_violations - ] = mock_object + ] = mock_rpc request = {} await client.list_violations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_violations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1964,22 +1965,23 @@ async def test_get_violation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_violation - ] = mock_object + ] = mock_rpc request = {} await client.get_violation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_violation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1beta/test_cloud_controls_partner_core.py b/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1beta/test_cloud_controls_partner_core.py index 8370f8fbd38d..012e5a4033b2 100644 --- a/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1beta/test_cloud_controls_partner_core.py +++ b/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1beta/test_cloud_controls_partner_core.py @@ -1388,22 +1388,23 @@ async def test_get_workload_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_workload - ] = mock_object + ] = mock_rpc request = {} await client.get_workload(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_workload(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1776,22 +1777,23 @@ async def test_list_workloads_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_workloads - ] = mock_object + ] = mock_rpc request = {} await client.list_workloads(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_workloads(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2343,22 +2345,23 @@ async def test_get_customer_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_customer - ] = mock_object + ] = mock_rpc request = {} await client.get_customer(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_customer(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2716,22 +2719,23 @@ async def test_list_customers_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_customers - ] = mock_object + ] = mock_rpc request = {} await client.list_customers(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_customers(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3288,22 +3292,23 @@ async def test_get_ekm_connections_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_ekm_connections - ] = mock_object + ] = mock_rpc request = {} await client.get_ekm_connections(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_ekm_connections(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3685,22 +3690,23 @@ async def test_get_partner_permissions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_partner_permissions - ] = mock_object + ] = mock_rpc request = {} await client.get_partner_permissions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_partner_permissions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4088,22 +4094,23 @@ async def test_list_access_approval_requests_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_access_approval_requests - ] = mock_object + ] = mock_rpc request = {} await client.list_access_approval_requests(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_access_approval_requests(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4687,22 +4694,23 @@ async def test_get_partner_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_partner - ] = mock_object + ] = mock_rpc request = {} await client.get_partner(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_partner(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1beta/test_cloud_controls_partner_monitoring.py b/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1beta/test_cloud_controls_partner_monitoring.py index 5a1de7e85211..7d6ab9fce47f 100644 --- a/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1beta/test_cloud_controls_partner_monitoring.py +++ b/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1beta/test_cloud_controls_partner_monitoring.py @@ -1389,22 +1389,23 @@ async def test_list_violations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_violations - ] = mock_object + ] = mock_rpc request = {} await client.list_violations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_violations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1964,22 +1965,23 @@ async def test_get_violation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_violation - ] = mock_object + ] = mock_rpc request = {} await client.get_violation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_violation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-cloudquotas/google/cloud/cloudquotas/gapic_version.py b/packages/google-cloud-cloudquotas/google/cloud/cloudquotas/gapic_version.py index 9413c3341313..558c8aab67c5 100644 --- a/packages/google-cloud-cloudquotas/google/cloud/cloudquotas/gapic_version.py +++ b/packages/google-cloud-cloudquotas/google/cloud/cloudquotas/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.10" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/gapic_version.py b/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/gapic_version.py index 9413c3341313..558c8aab67c5 100644 --- a/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/gapic_version.py +++ b/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.10" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/services/cloud_quotas/async_client.py b/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/services/cloud_quotas/async_client.py index fef354c48230..80a6fb9730a7 100644 --- a/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/services/cloud_quotas/async_client.py +++ b/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/services/cloud_quotas/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -200,9 +199,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(CloudQuotasClient).get_transport_class, type(CloudQuotasClient) - ) + get_transport_class = CloudQuotasClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/services/cloud_quotas/client.py b/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/services/cloud_quotas/client.py index 376a965778b5..81a992db5567 100644 --- a/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/services/cloud_quotas/client.py +++ b/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/services/cloud_quotas/client.py @@ -692,7 +692,7 @@ def __init__( transport_init: Union[ Type[CloudQuotasTransport], Callable[..., CloudQuotasTransport] ] = ( - type(self).get_transport_class(transport) + CloudQuotasClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., CloudQuotasTransport], transport) ) diff --git a/packages/google-cloud-cloudquotas/samples/generated_samples/snippet_metadata_google.api.cloudquotas.v1.json b/packages/google-cloud-cloudquotas/samples/generated_samples/snippet_metadata_google.api.cloudquotas.v1.json index 33a66763fc72..5c9889ca79cf 100644 --- a/packages/google-cloud-cloudquotas/samples/generated_samples/snippet_metadata_google.api.cloudquotas.v1.json +++ b/packages/google-cloud-cloudquotas/samples/generated_samples/snippet_metadata_google.api.cloudquotas.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-quotas", - "version": "0.1.10" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-cloudquotas/tests/unit/gapic/cloudquotas_v1/test_cloud_quotas.py b/packages/google-cloud-cloudquotas/tests/unit/gapic/cloudquotas_v1/test_cloud_quotas.py index fceb1992e9ae..4988c6aec8cf 100644 --- a/packages/google-cloud-cloudquotas/tests/unit/gapic/cloudquotas_v1/test_cloud_quotas.py +++ b/packages/google-cloud-cloudquotas/tests/unit/gapic/cloudquotas_v1/test_cloud_quotas.py @@ -1251,22 +1251,23 @@ async def test_list_quota_infos_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_quota_infos - ] = mock_object + ] = mock_rpc request = {} await client.list_quota_infos(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_quota_infos(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1848,22 +1849,23 @@ async def test_get_quota_info_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_quota_info - ] = mock_object + ] = mock_rpc request = {} await client.get_quota_info(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_quota_info(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2256,22 +2258,23 @@ async def test_list_quota_preferences_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_quota_preferences - ] = mock_object + ] = mock_rpc request = {} await client.list_quota_preferences(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_quota_preferences(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2865,22 +2868,23 @@ async def test_get_quota_preference_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_quota_preference - ] = mock_object + ] = mock_rpc request = {} await client.get_quota_preference(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_quota_preference(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3284,22 +3288,23 @@ async def test_create_quota_preference_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_quota_preference - ] = mock_object + ] = mock_rpc request = {} await client.create_quota_preference(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_quota_preference(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3718,22 +3723,23 @@ async def test_update_quota_preference_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_quota_preference - ] = mock_object + ] = mock_rpc request = {} await client.update_quota_preference(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_quota_preference(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement/gapic_version.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement/gapic_version.py index cf5493b86bbc..558c8aab67c5 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement/gapic_version.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.7" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/gapic_version.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/gapic_version.py index cf5493b86bbc..558c8aab67c5 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/gapic_version.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.7" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/async_client.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/async_client.py index 34170fd7ad29..ba83a537babe 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/async_client.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -217,10 +216,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ConsumerProcurementServiceClient).get_transport_class, - type(ConsumerProcurementServiceClient), - ) + get_transport_class = ConsumerProcurementServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/client.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/client.py index 3999e726c4b8..be7ec242cec1 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/client.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/client.py @@ -703,7 +703,7 @@ def __init__( Type[ConsumerProcurementServiceTransport], Callable[..., ConsumerProcurementServiceTransport], ] = ( - type(self).get_transport_class(transport) + ConsumerProcurementServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ConsumerProcurementServiceTransport], transport) ) diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1alpha1/gapic_version.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1alpha1/gapic_version.py index cf5493b86bbc..558c8aab67c5 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1alpha1/gapic_version.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1alpha1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.7" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1alpha1/services/consumer_procurement_service/async_client.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1alpha1/services/consumer_procurement_service/async_client.py index 31816f320933..2c5fdff4be5d 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1alpha1/services/consumer_procurement_service/async_client.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1alpha1/services/consumer_procurement_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -217,10 +216,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ConsumerProcurementServiceClient).get_transport_class, - type(ConsumerProcurementServiceClient), - ) + get_transport_class = ConsumerProcurementServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1alpha1/services/consumer_procurement_service/client.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1alpha1/services/consumer_procurement_service/client.py index b9e8b61dc93e..2d84edd718bd 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1alpha1/services/consumer_procurement_service/client.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1alpha1/services/consumer_procurement_service/client.py @@ -703,7 +703,7 @@ def __init__( Type[ConsumerProcurementServiceTransport], Callable[..., ConsumerProcurementServiceTransport], ] = ( - type(self).get_transport_class(transport) + ConsumerProcurementServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ConsumerProcurementServiceTransport], transport) ) diff --git a/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/snippet_metadata_google.cloud.commerce.consumer.procurement.v1.json b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/snippet_metadata_google.cloud.commerce.consumer.procurement.v1.json index a8da078691cb..08bfac2f3eb3 100644 --- a/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/snippet_metadata_google.cloud.commerce.consumer.procurement.v1.json +++ b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/snippet_metadata_google.cloud.commerce.consumer.procurement.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-commerce-consumer-procurement", - "version": "0.1.7" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/snippet_metadata_google.cloud.commerce.consumer.procurement.v1alpha1.json b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/snippet_metadata_google.cloud.commerce.consumer.procurement.v1alpha1.json index 6fe6a56b1e43..2b8ed5a003a6 100644 --- a/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/snippet_metadata_google.cloud.commerce.consumer.procurement.v1alpha1.json +++ b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/snippet_metadata_google.cloud.commerce.consumer.procurement.v1alpha1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-commerce-consumer-procurement", - "version": "0.1.7" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-commerce-consumer-procurement/tests/unit/gapic/commerce_consumer_procurement_v1/test_consumer_procurement_service.py b/packages/google-cloud-commerce-consumer-procurement/tests/unit/gapic/commerce_consumer_procurement_v1/test_consumer_procurement_service.py index 907df7d5cdfa..be7a8fe9c772 100644 --- a/packages/google-cloud-commerce-consumer-procurement/tests/unit/gapic/commerce_consumer_procurement_v1/test_consumer_procurement_service.py +++ b/packages/google-cloud-commerce-consumer-procurement/tests/unit/gapic/commerce_consumer_procurement_v1/test_consumer_procurement_service.py @@ -1327,8 +1327,9 @@ def test_place_order_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.place_order(request) @@ -1382,26 +1383,28 @@ async def test_place_order_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.place_order - ] = mock_object + ] = mock_rpc request = {} await client.place_order(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.place_order(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1669,22 +1672,23 @@ async def test_get_order_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_order - ] = mock_object + ] = mock_rpc request = {} await client.get_order(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_order(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2037,22 +2041,23 @@ async def test_list_orders_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_orders - ] = mock_object + ] = mock_rpc request = {} await client.list_orders(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_orders(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-commerce-consumer-procurement/tests/unit/gapic/commerce_consumer_procurement_v1alpha1/test_consumer_procurement_service.py b/packages/google-cloud-commerce-consumer-procurement/tests/unit/gapic/commerce_consumer_procurement_v1alpha1/test_consumer_procurement_service.py index b1d81f550141..a2b4b10250ac 100644 --- a/packages/google-cloud-commerce-consumer-procurement/tests/unit/gapic/commerce_consumer_procurement_v1alpha1/test_consumer_procurement_service.py +++ b/packages/google-cloud-commerce-consumer-procurement/tests/unit/gapic/commerce_consumer_procurement_v1alpha1/test_consumer_procurement_service.py @@ -1327,8 +1327,9 @@ def test_place_order_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.place_order(request) @@ -1382,26 +1383,28 @@ async def test_place_order_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.place_order - ] = mock_object + ] = mock_rpc request = {} await client.place_order(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.place_order(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1669,22 +1672,23 @@ async def test_get_order_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_order - ] = mock_object + ] = mock_rpc request = {} await client.get_order(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_order(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2037,22 +2041,23 @@ async def test_list_orders_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_orders - ] = mock_object + ] = mock_rpc request = {} await client.list_orders(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_orders(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-common/google/cloud/common/gapic_version.py b/packages/google-cloud-common/google/cloud/common/gapic_version.py index 5dea85083756..558c8aab67c5 100644 --- a/packages/google-cloud-common/google/cloud/common/gapic_version.py +++ b/packages/google-cloud-common/google/cloud/common/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.3.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-compute/google/cloud/compute/gapic_version.py b/packages/google-cloud-compute/google/cloud/compute/gapic_version.py index 7a6b2c884e03..558c8aab67c5 100644 --- a/packages/google-cloud-compute/google/cloud/compute/gapic_version.py +++ b/packages/google-cloud-compute/google/cloud/compute/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.19.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/gapic_version.py b/packages/google-cloud-compute/google/cloud/compute_v1/gapic_version.py index 7a6b2c884e03..558c8aab67c5 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/gapic_version.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.19.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/accelerator_types/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/accelerator_types/client.py index 3fc7d1c43658..e3c490fde1e6 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/accelerator_types/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/accelerator_types/client.py @@ -643,7 +643,7 @@ def __init__( Type[AcceleratorTypesTransport], Callable[..., AcceleratorTypesTransport], ] = ( - type(self).get_transport_class(transport) + AcceleratorTypesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AcceleratorTypesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/addresses/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/addresses/client.py index 3f04fe8f8cf4..bd522fdd409c 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/addresses/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/addresses/client.py @@ -635,7 +635,7 @@ def __init__( transport_init: Union[ Type[AddressesTransport], Callable[..., AddressesTransport] ] = ( - type(self).get_transport_class(transport) + AddressesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AddressesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/autoscalers/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/autoscalers/client.py index ef8959793963..654de7d186ed 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/autoscalers/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/autoscalers/client.py @@ -635,7 +635,7 @@ def __init__( transport_init: Union[ Type[AutoscalersTransport], Callable[..., AutoscalersTransport] ] = ( - type(self).get_transport_class(transport) + AutoscalersClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AutoscalersTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_buckets/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_buckets/client.py index 4de581727f4f..8a244f67edab 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_buckets/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_buckets/client.py @@ -640,7 +640,7 @@ def __init__( transport_init: Union[ Type[BackendBucketsTransport], Callable[..., BackendBucketsTransport] ] = ( - type(self).get_transport_class(transport) + BackendBucketsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., BackendBucketsTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_services/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_services/client.py index 5cd689f386fc..8f6aa412de12 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_services/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_services/client.py @@ -642,7 +642,7 @@ def __init__( transport_init: Union[ Type[BackendServicesTransport], Callable[..., BackendServicesTransport] ] = ( - type(self).get_transport_class(transport) + BackendServicesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., BackendServicesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/disk_types/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/disk_types/client.py index 08d1809ce71b..3d2aa700494a 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/disk_types/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/disk_types/client.py @@ -632,7 +632,7 @@ def __init__( transport_init: Union[ Type[DiskTypesTransport], Callable[..., DiskTypesTransport] ] = ( - type(self).get_transport_class(transport) + DiskTypesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DiskTypesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/disks/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/disks/client.py index b24be76d31c5..d8f3e5164750 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/disks/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/disks/client.py @@ -635,7 +635,7 @@ def __init__( transport_init: Union[ Type[DisksTransport], Callable[..., DisksTransport] ] = ( - type(self).get_transport_class(transport) + DisksClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DisksTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/external_vpn_gateways/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/external_vpn_gateways/client.py index 6e4c804f13cf..173f2d4d5e5c 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/external_vpn_gateways/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/external_vpn_gateways/client.py @@ -645,7 +645,7 @@ def __init__( Type[ExternalVpnGatewaysTransport], Callable[..., ExternalVpnGatewaysTransport], ] = ( - type(self).get_transport_class(transport) + ExternalVpnGatewaysClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ExternalVpnGatewaysTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/firewall_policies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/firewall_policies/client.py index abd41c10a277..f5c93138a301 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/firewall_policies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/firewall_policies/client.py @@ -643,7 +643,7 @@ def __init__( Type[FirewallPoliciesTransport], Callable[..., FirewallPoliciesTransport], ] = ( - type(self).get_transport_class(transport) + FirewallPoliciesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., FirewallPoliciesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/firewalls/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/firewalls/client.py index a38a0d219b5e..a25be623c57b 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/firewalls/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/firewalls/client.py @@ -635,7 +635,7 @@ def __init__( transport_init: Union[ Type[FirewallsTransport], Callable[..., FirewallsTransport] ] = ( - type(self).get_transport_class(transport) + FirewallsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., FirewallsTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/forwarding_rules/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/forwarding_rules/client.py index 1f5c84f1165d..479f226c6089 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/forwarding_rules/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/forwarding_rules/client.py @@ -642,7 +642,7 @@ def __init__( transport_init: Union[ Type[ForwardingRulesTransport], Callable[..., ForwardingRulesTransport] ] = ( - type(self).get_transport_class(transport) + ForwardingRulesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ForwardingRulesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_addresses/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_addresses/client.py index 050317243dfd..aa361a0639af 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_addresses/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_addresses/client.py @@ -642,7 +642,7 @@ def __init__( transport_init: Union[ Type[GlobalAddressesTransport], Callable[..., GlobalAddressesTransport] ] = ( - type(self).get_transport_class(transport) + GlobalAddressesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., GlobalAddressesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_forwarding_rules/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_forwarding_rules/client.py index 1a410d04c750..73fac29acac7 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_forwarding_rules/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_forwarding_rules/client.py @@ -647,7 +647,7 @@ def __init__( Type[GlobalForwardingRulesTransport], Callable[..., GlobalForwardingRulesTransport], ] = ( - type(self).get_transport_class(transport) + GlobalForwardingRulesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., GlobalForwardingRulesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_network_endpoint_groups/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_network_endpoint_groups/client.py index 35d214e6ba5f..9c3968a6f6d0 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_network_endpoint_groups/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_network_endpoint_groups/client.py @@ -651,7 +651,7 @@ def __init__( Type[GlobalNetworkEndpointGroupsTransport], Callable[..., GlobalNetworkEndpointGroupsTransport], ] = ( - type(self).get_transport_class(transport) + GlobalNetworkEndpointGroupsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast( Callable[..., GlobalNetworkEndpointGroupsTransport], transport diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_operations/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_operations/client.py index e1c0f53c1b56..582f90608011 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_operations/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_operations/client.py @@ -640,7 +640,7 @@ def __init__( Type[GlobalOperationsTransport], Callable[..., GlobalOperationsTransport], ] = ( - type(self).get_transport_class(transport) + GlobalOperationsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., GlobalOperationsTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_organization_operations/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_organization_operations/client.py index a173eec0d1d9..917163e5dde7 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_organization_operations/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_organization_operations/client.py @@ -650,7 +650,7 @@ def __init__( Type[GlobalOrganizationOperationsTransport], Callable[..., GlobalOrganizationOperationsTransport], ] = ( - type(self).get_transport_class(transport) + GlobalOrganizationOperationsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast( Callable[..., GlobalOrganizationOperationsTransport], transport diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_public_delegated_prefixes/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_public_delegated_prefixes/client.py index e65f6c3cbeb0..31baf252244e 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_public_delegated_prefixes/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_public_delegated_prefixes/client.py @@ -655,7 +655,7 @@ def __init__( Type[GlobalPublicDelegatedPrefixesTransport], Callable[..., GlobalPublicDelegatedPrefixesTransport], ] = ( - type(self).get_transport_class(transport) + GlobalPublicDelegatedPrefixesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast( Callable[..., GlobalPublicDelegatedPrefixesTransport], transport diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/health_checks/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/health_checks/client.py index 5fb579f953e1..390ca315950a 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/health_checks/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/health_checks/client.py @@ -635,7 +635,7 @@ def __init__( transport_init: Union[ Type[HealthChecksTransport], Callable[..., HealthChecksTransport] ] = ( - type(self).get_transport_class(transport) + HealthChecksClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., HealthChecksTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/image_family_views/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/image_family_views/client.py index 29aa1952847b..a92d0d7c5fe2 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/image_family_views/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/image_family_views/client.py @@ -639,7 +639,7 @@ def __init__( Type[ImageFamilyViewsTransport], Callable[..., ImageFamilyViewsTransport], ] = ( - type(self).get_transport_class(transport) + ImageFamilyViewsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ImageFamilyViewsTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/images/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/images/client.py index 087baf56acd7..28351f58c4eb 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/images/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/images/client.py @@ -635,7 +635,7 @@ def __init__( transport_init: Union[ Type[ImagesTransport], Callable[..., ImagesTransport] ] = ( - type(self).get_transport_class(transport) + ImagesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ImagesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_group_manager_resize_requests/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_group_manager_resize_requests/client.py index d564a2844b4a..721dce6bb8a2 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_group_manager_resize_requests/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_group_manager_resize_requests/client.py @@ -664,7 +664,7 @@ def __init__( Type[InstanceGroupManagerResizeRequestsTransport], Callable[..., InstanceGroupManagerResizeRequestsTransport], ] = ( - type(self).get_transport_class(transport) + InstanceGroupManagerResizeRequestsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast( Callable[..., InstanceGroupManagerResizeRequestsTransport], diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_group_managers/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_group_managers/client.py index 3fd4f3f6dafb..d65cb6c12b37 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_group_managers/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_group_managers/client.py @@ -647,7 +647,7 @@ def __init__( Type[InstanceGroupManagersTransport], Callable[..., InstanceGroupManagersTransport], ] = ( - type(self).get_transport_class(transport) + InstanceGroupManagersClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., InstanceGroupManagersTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_groups/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_groups/client.py index 06a208f662a5..9d4f7dea8bd7 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_groups/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_groups/client.py @@ -640,7 +640,7 @@ def __init__( transport_init: Union[ Type[InstanceGroupsTransport], Callable[..., InstanceGroupsTransport] ] = ( - type(self).get_transport_class(transport) + InstanceGroupsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., InstanceGroupsTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_settings_service/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_settings_service/client.py index 20a54c242f81..6b2aa28abee9 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_settings_service/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_settings_service/client.py @@ -648,7 +648,7 @@ def __init__( Type[InstanceSettingsServiceTransport], Callable[..., InstanceSettingsServiceTransport], ] = ( - type(self).get_transport_class(transport) + InstanceSettingsServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., InstanceSettingsServiceTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_templates/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_templates/client.py index 8294ff277b29..aab4c02a23b2 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_templates/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_templates/client.py @@ -645,7 +645,7 @@ def __init__( Type[InstanceTemplatesTransport], Callable[..., InstanceTemplatesTransport], ] = ( - type(self).get_transport_class(transport) + InstanceTemplatesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., InstanceTemplatesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/instances/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/instances/client.py index 57ac5bc40363..489e7e33bbec 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/instances/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/instances/client.py @@ -635,7 +635,7 @@ def __init__( transport_init: Union[ Type[InstancesTransport], Callable[..., InstancesTransport] ] = ( - type(self).get_transport_class(transport) + InstancesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., InstancesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/instant_snapshots/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/instant_snapshots/client.py index 5340dce4c40e..64699398706e 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/instant_snapshots/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/instant_snapshots/client.py @@ -643,7 +643,7 @@ def __init__( Type[InstantSnapshotsTransport], Callable[..., InstantSnapshotsTransport], ] = ( - type(self).get_transport_class(transport) + InstantSnapshotsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., InstantSnapshotsTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_attachments/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_attachments/client.py index 663603537759..253f65fefb4b 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_attachments/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_attachments/client.py @@ -649,7 +649,7 @@ def __init__( Type[InterconnectAttachmentsTransport], Callable[..., InterconnectAttachmentsTransport], ] = ( - type(self).get_transport_class(transport) + InterconnectAttachmentsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., InterconnectAttachmentsTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_locations/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_locations/client.py index 05372b3bddb9..4c1955436e94 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_locations/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_locations/client.py @@ -644,7 +644,7 @@ def __init__( Type[InterconnectLocationsTransport], Callable[..., InterconnectLocationsTransport], ] = ( - type(self).get_transport_class(transport) + InterconnectLocationsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., InterconnectLocationsTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_remote_locations/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_remote_locations/client.py index f4158a224965..de0a24175bb3 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_remote_locations/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_remote_locations/client.py @@ -648,7 +648,7 @@ def __init__( Type[InterconnectRemoteLocationsTransport], Callable[..., InterconnectRemoteLocationsTransport], ] = ( - type(self).get_transport_class(transport) + InterconnectRemoteLocationsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast( Callable[..., InterconnectRemoteLocationsTransport], transport diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnects/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnects/client.py index fce47fdba9be..aaceb51c97e0 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnects/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnects/client.py @@ -638,7 +638,7 @@ def __init__( transport_init: Union[ Type[InterconnectsTransport], Callable[..., InterconnectsTransport] ] = ( - type(self).get_transport_class(transport) + InterconnectsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., InterconnectsTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/license_codes/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/license_codes/client.py index 6a2003e8c5bd..19791d56e0c1 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/license_codes/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/license_codes/client.py @@ -631,7 +631,7 @@ def __init__( transport_init: Union[ Type[LicenseCodesTransport], Callable[..., LicenseCodesTransport] ] = ( - type(self).get_transport_class(transport) + LicenseCodesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., LicenseCodesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/licenses/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/licenses/client.py index af21a808ff1f..1e391cd879eb 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/licenses/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/licenses/client.py @@ -635,7 +635,7 @@ def __init__( transport_init: Union[ Type[LicensesTransport], Callable[..., LicensesTransport] ] = ( - type(self).get_transport_class(transport) + LicensesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., LicensesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/machine_images/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/machine_images/client.py index 694ab8c0658f..d8263f56e910 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/machine_images/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/machine_images/client.py @@ -638,7 +638,7 @@ def __init__( transport_init: Union[ Type[MachineImagesTransport], Callable[..., MachineImagesTransport] ] = ( - type(self).get_transport_class(transport) + MachineImagesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., MachineImagesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/machine_types/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/machine_types/client.py index 2cc3d99ace0a..76b729111007 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/machine_types/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/machine_types/client.py @@ -632,7 +632,7 @@ def __init__( transport_init: Union[ Type[MachineTypesTransport], Callable[..., MachineTypesTransport] ] = ( - type(self).get_transport_class(transport) + MachineTypesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., MachineTypesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_attachments/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_attachments/client.py index afc68a604f24..2e218d73ffe9 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_attachments/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_attachments/client.py @@ -645,7 +645,7 @@ def __init__( Type[NetworkAttachmentsTransport], Callable[..., NetworkAttachmentsTransport], ] = ( - type(self).get_transport_class(transport) + NetworkAttachmentsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., NetworkAttachmentsTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_edge_security_services/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_edge_security_services/client.py index 4619eff25e1c..491d38581d21 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_edge_security_services/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_edge_security_services/client.py @@ -651,7 +651,7 @@ def __init__( Type[NetworkEdgeSecurityServicesTransport], Callable[..., NetworkEdgeSecurityServicesTransport], ] = ( - type(self).get_transport_class(transport) + NetworkEdgeSecurityServicesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast( Callable[..., NetworkEdgeSecurityServicesTransport], transport diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_endpoint_groups/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_endpoint_groups/client.py index f3fe32270087..5dd8c4475ead 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_endpoint_groups/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_endpoint_groups/client.py @@ -647,7 +647,7 @@ def __init__( Type[NetworkEndpointGroupsTransport], Callable[..., NetworkEndpointGroupsTransport], ] = ( - type(self).get_transport_class(transport) + NetworkEndpointGroupsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., NetworkEndpointGroupsTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_firewall_policies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_firewall_policies/client.py index 46d70f359c55..2ce5915f843f 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_firewall_policies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_firewall_policies/client.py @@ -649,7 +649,7 @@ def __init__( Type[NetworkFirewallPoliciesTransport], Callable[..., NetworkFirewallPoliciesTransport], ] = ( - type(self).get_transport_class(transport) + NetworkFirewallPoliciesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., NetworkFirewallPoliciesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/networks/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/networks/client.py index 44729db252f0..2d6c6fdcca83 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/networks/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/networks/client.py @@ -635,7 +635,7 @@ def __init__( transport_init: Union[ Type[NetworksTransport], Callable[..., NetworksTransport] ] = ( - type(self).get_transport_class(transport) + NetworksClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., NetworksTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/node_groups/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/node_groups/client.py index b33790a602d8..abfa417c23ea 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/node_groups/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/node_groups/client.py @@ -635,7 +635,7 @@ def __init__( transport_init: Union[ Type[NodeGroupsTransport], Callable[..., NodeGroupsTransport] ] = ( - type(self).get_transport_class(transport) + NodeGroupsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., NodeGroupsTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/node_templates/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/node_templates/client.py index e45c491e2789..a37de9001145 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/node_templates/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/node_templates/client.py @@ -638,7 +638,7 @@ def __init__( transport_init: Union[ Type[NodeTemplatesTransport], Callable[..., NodeTemplatesTransport] ] = ( - type(self).get_transport_class(transport) + NodeTemplatesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., NodeTemplatesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/node_types/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/node_types/client.py index 6bfc6ff6e2a0..5d20d96caba5 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/node_types/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/node_types/client.py @@ -632,7 +632,7 @@ def __init__( transport_init: Union[ Type[NodeTypesTransport], Callable[..., NodeTypesTransport] ] = ( - type(self).get_transport_class(transport) + NodeTypesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., NodeTypesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/packet_mirrorings/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/packet_mirrorings/client.py index 926c95f56744..8f3aae82b89e 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/packet_mirrorings/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/packet_mirrorings/client.py @@ -643,7 +643,7 @@ def __init__( Type[PacketMirroringsTransport], Callable[..., PacketMirroringsTransport], ] = ( - type(self).get_transport_class(transport) + PacketMirroringsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., PacketMirroringsTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/projects/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/projects/client.py index df3c01084b7f..3cd34365bdb5 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/projects/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/projects/client.py @@ -635,7 +635,7 @@ def __init__( transport_init: Union[ Type[ProjectsTransport], Callable[..., ProjectsTransport] ] = ( - type(self).get_transport_class(transport) + ProjectsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ProjectsTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/public_advertised_prefixes/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/public_advertised_prefixes/client.py index 21e6317ca4bc..f988a2b8c93b 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/public_advertised_prefixes/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/public_advertised_prefixes/client.py @@ -649,7 +649,7 @@ def __init__( Type[PublicAdvertisedPrefixesTransport], Callable[..., PublicAdvertisedPrefixesTransport], ] = ( - type(self).get_transport_class(transport) + PublicAdvertisedPrefixesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., PublicAdvertisedPrefixesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/public_delegated_prefixes/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/public_delegated_prefixes/client.py index ade92d4e6428..82989f868234 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/public_delegated_prefixes/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/public_delegated_prefixes/client.py @@ -649,7 +649,7 @@ def __init__( Type[PublicDelegatedPrefixesTransport], Callable[..., PublicDelegatedPrefixesTransport], ] = ( - type(self).get_transport_class(transport) + PublicDelegatedPrefixesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., PublicDelegatedPrefixesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_autoscalers/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_autoscalers/client.py index 5ea80bcbbe37..1fc50b748039 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_autoscalers/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_autoscalers/client.py @@ -645,7 +645,7 @@ def __init__( Type[RegionAutoscalersTransport], Callable[..., RegionAutoscalersTransport], ] = ( - type(self).get_transport_class(transport) + RegionAutoscalersClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RegionAutoscalersTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_backend_services/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_backend_services/client.py index 6e7b7af27151..76de8cad59dc 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_backend_services/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_backend_services/client.py @@ -647,7 +647,7 @@ def __init__( Type[RegionBackendServicesTransport], Callable[..., RegionBackendServicesTransport], ] = ( - type(self).get_transport_class(transport) + RegionBackendServicesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RegionBackendServicesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_commitments/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_commitments/client.py index 351ad29d66c6..1df1123fcdb5 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_commitments/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_commitments/client.py @@ -645,7 +645,7 @@ def __init__( Type[RegionCommitmentsTransport], Callable[..., RegionCommitmentsTransport], ] = ( - type(self).get_transport_class(transport) + RegionCommitmentsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RegionCommitmentsTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disk_types/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disk_types/client.py index 68f6a78d57f3..e985fbb9e895 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disk_types/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disk_types/client.py @@ -639,7 +639,7 @@ def __init__( transport_init: Union[ Type[RegionDiskTypesTransport], Callable[..., RegionDiskTypesTransport] ] = ( - type(self).get_transport_class(transport) + RegionDiskTypesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RegionDiskTypesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disks/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disks/client.py index c9dbd47f72fd..6f97ae0f0868 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disks/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disks/client.py @@ -635,7 +635,7 @@ def __init__( transport_init: Union[ Type[RegionDisksTransport], Callable[..., RegionDisksTransport] ] = ( - type(self).get_transport_class(transport) + RegionDisksClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RegionDisksTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_check_services/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_check_services/client.py index 0a2b95d11b4d..a26bcfd452d9 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_check_services/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_check_services/client.py @@ -649,7 +649,7 @@ def __init__( Type[RegionHealthCheckServicesTransport], Callable[..., RegionHealthCheckServicesTransport], ] = ( - type(self).get_transport_class(transport) + RegionHealthCheckServicesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RegionHealthCheckServicesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_checks/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_checks/client.py index 2fcbea43d2af..30fea9ff210a 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_checks/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_checks/client.py @@ -645,7 +645,7 @@ def __init__( Type[RegionHealthChecksTransport], Callable[..., RegionHealthChecksTransport], ] = ( - type(self).get_transport_class(transport) + RegionHealthChecksClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RegionHealthChecksTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_group_managers/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_group_managers/client.py index a90d5db315ab..ce6498488bf9 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_group_managers/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_group_managers/client.py @@ -651,7 +651,7 @@ def __init__( Type[RegionInstanceGroupManagersTransport], Callable[..., RegionInstanceGroupManagersTransport], ] = ( - type(self).get_transport_class(transport) + RegionInstanceGroupManagersClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast( Callable[..., RegionInstanceGroupManagersTransport], transport diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_groups/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_groups/client.py index 5b55c3dea81b..aacea1576000 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_groups/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_groups/client.py @@ -645,7 +645,7 @@ def __init__( Type[RegionInstanceGroupsTransport], Callable[..., RegionInstanceGroupsTransport], ] = ( - type(self).get_transport_class(transport) + RegionInstanceGroupsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RegionInstanceGroupsTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_templates/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_templates/client.py index 18738fb19cf1..911bd2c6ae22 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_templates/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_templates/client.py @@ -649,7 +649,7 @@ def __init__( Type[RegionInstanceTemplatesTransport], Callable[..., RegionInstanceTemplatesTransport], ] = ( - type(self).get_transport_class(transport) + RegionInstanceTemplatesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RegionInstanceTemplatesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instances/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instances/client.py index eccd921bf254..b9838ead41aa 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instances/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instances/client.py @@ -641,7 +641,7 @@ def __init__( transport_init: Union[ Type[RegionInstancesTransport], Callable[..., RegionInstancesTransport] ] = ( - type(self).get_transport_class(transport) + RegionInstancesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RegionInstancesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instant_snapshots/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instant_snapshots/client.py index ef18ad964f96..5f2772163b6a 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instant_snapshots/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instant_snapshots/client.py @@ -647,7 +647,7 @@ def __init__( Type[RegionInstantSnapshotsTransport], Callable[..., RegionInstantSnapshotsTransport], ] = ( - type(self).get_transport_class(transport) + RegionInstantSnapshotsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RegionInstantSnapshotsTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_endpoint_groups/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_endpoint_groups/client.py index 448c184d7608..ae10133ac5df 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_endpoint_groups/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_endpoint_groups/client.py @@ -651,7 +651,7 @@ def __init__( Type[RegionNetworkEndpointGroupsTransport], Callable[..., RegionNetworkEndpointGroupsTransport], ] = ( - type(self).get_transport_class(transport) + RegionNetworkEndpointGroupsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast( Callable[..., RegionNetworkEndpointGroupsTransport], transport diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_firewall_policies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_firewall_policies/client.py index 2ea39512745b..9ef32491766d 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_firewall_policies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_firewall_policies/client.py @@ -655,7 +655,7 @@ def __init__( Type[RegionNetworkFirewallPoliciesTransport], Callable[..., RegionNetworkFirewallPoliciesTransport], ] = ( - type(self).get_transport_class(transport) + RegionNetworkFirewallPoliciesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast( Callable[..., RegionNetworkFirewallPoliciesTransport], transport diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_notification_endpoints/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_notification_endpoints/client.py index eb68db77a265..8f9b353ce1dd 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_notification_endpoints/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_notification_endpoints/client.py @@ -651,7 +651,7 @@ def __init__( Type[RegionNotificationEndpointsTransport], Callable[..., RegionNotificationEndpointsTransport], ] = ( - type(self).get_transport_class(transport) + RegionNotificationEndpointsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast( Callable[..., RegionNotificationEndpointsTransport], transport diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_operations/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_operations/client.py index b1650a94e7fe..9afa7640b4ae 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_operations/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_operations/client.py @@ -640,7 +640,7 @@ def __init__( Type[RegionOperationsTransport], Callable[..., RegionOperationsTransport], ] = ( - type(self).get_transport_class(transport) + RegionOperationsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RegionOperationsTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_security_policies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_security_policies/client.py index 1384c918a89e..64cd68517767 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_security_policies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_security_policies/client.py @@ -647,7 +647,7 @@ def __init__( Type[RegionSecurityPoliciesTransport], Callable[..., RegionSecurityPoliciesTransport], ] = ( - type(self).get_transport_class(transport) + RegionSecurityPoliciesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RegionSecurityPoliciesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_ssl_certificates/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_ssl_certificates/client.py index dc4c7c1737f0..601228014ab6 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_ssl_certificates/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_ssl_certificates/client.py @@ -647,7 +647,7 @@ def __init__( Type[RegionSslCertificatesTransport], Callable[..., RegionSslCertificatesTransport], ] = ( - type(self).get_transport_class(transport) + RegionSslCertificatesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RegionSslCertificatesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_ssl_policies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_ssl_policies/client.py index 1ceb3f71ffc3..3dad39ce1e01 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_ssl_policies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_ssl_policies/client.py @@ -645,7 +645,7 @@ def __init__( Type[RegionSslPoliciesTransport], Callable[..., RegionSslPoliciesTransport], ] = ( - type(self).get_transport_class(transport) + RegionSslPoliciesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RegionSslPoliciesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_http_proxies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_http_proxies/client.py index 74b34dc7fb94..4fa02950c9ee 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_http_proxies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_http_proxies/client.py @@ -649,7 +649,7 @@ def __init__( Type[RegionTargetHttpProxiesTransport], Callable[..., RegionTargetHttpProxiesTransport], ] = ( - type(self).get_transport_class(transport) + RegionTargetHttpProxiesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RegionTargetHttpProxiesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_https_proxies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_https_proxies/client.py index e4ad3653da7c..a0a67bf5464d 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_https_proxies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_https_proxies/client.py @@ -649,7 +649,7 @@ def __init__( Type[RegionTargetHttpsProxiesTransport], Callable[..., RegionTargetHttpsProxiesTransport], ] = ( - type(self).get_transport_class(transport) + RegionTargetHttpsProxiesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RegionTargetHttpsProxiesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_tcp_proxies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_tcp_proxies/client.py index b72e1de9e5ff..401ea84bb7f3 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_tcp_proxies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_tcp_proxies/client.py @@ -647,7 +647,7 @@ def __init__( Type[RegionTargetTcpProxiesTransport], Callable[..., RegionTargetTcpProxiesTransport], ] = ( - type(self).get_transport_class(transport) + RegionTargetTcpProxiesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RegionTargetTcpProxiesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_url_maps/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_url_maps/client.py index 16da5d9623ea..af510a30bb48 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_url_maps/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_url_maps/client.py @@ -638,7 +638,7 @@ def __init__( transport_init: Union[ Type[RegionUrlMapsTransport], Callable[..., RegionUrlMapsTransport] ] = ( - type(self).get_transport_class(transport) + RegionUrlMapsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RegionUrlMapsTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_zones/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_zones/client.py index 8d026c83045d..d011854631b2 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_zones/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_zones/client.py @@ -632,7 +632,7 @@ def __init__( transport_init: Union[ Type[RegionZonesTransport], Callable[..., RegionZonesTransport] ] = ( - type(self).get_transport_class(transport) + RegionZonesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RegionZonesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/regions/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/regions/client.py index 4dfd0ed86bad..9502b764063d 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/regions/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/regions/client.py @@ -632,7 +632,7 @@ def __init__( transport_init: Union[ Type[RegionsTransport], Callable[..., RegionsTransport] ] = ( - type(self).get_transport_class(transport) + RegionsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RegionsTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/reservations/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/reservations/client.py index 5c21ebe9f5cf..712408dd11c0 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/reservations/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/reservations/client.py @@ -635,7 +635,7 @@ def __init__( transport_init: Union[ Type[ReservationsTransport], Callable[..., ReservationsTransport] ] = ( - type(self).get_transport_class(transport) + ReservationsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ReservationsTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/resource_policies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/resource_policies/client.py index e0450bc23f06..15544bdf9cbd 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/resource_policies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/resource_policies/client.py @@ -643,7 +643,7 @@ def __init__( Type[ResourcePoliciesTransport], Callable[..., ResourcePoliciesTransport], ] = ( - type(self).get_transport_class(transport) + ResourcePoliciesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ResourcePoliciesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/routers/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/routers/client.py index 281943a40358..16244812f736 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/routers/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/routers/client.py @@ -635,7 +635,7 @@ def __init__( transport_init: Union[ Type[RoutersTransport], Callable[..., RoutersTransport] ] = ( - type(self).get_transport_class(transport) + RoutersClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RoutersTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/routes/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/routes/client.py index 61d007166218..4a180fbf1b7f 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/routes/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/routes/client.py @@ -635,7 +635,7 @@ def __init__( transport_init: Union[ Type[RoutesTransport], Callable[..., RoutesTransport] ] = ( - type(self).get_transport_class(transport) + RoutesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RoutesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/security_policies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/security_policies/client.py index 5a7260c8ce7d..9866a67f1ee7 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/security_policies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/security_policies/client.py @@ -643,7 +643,7 @@ def __init__( Type[SecurityPoliciesTransport], Callable[..., SecurityPoliciesTransport], ] = ( - type(self).get_transport_class(transport) + SecurityPoliciesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SecurityPoliciesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/service_attachments/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/service_attachments/client.py index a62bb7bd1dae..ed32f20eb629 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/service_attachments/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/service_attachments/client.py @@ -645,7 +645,7 @@ def __init__( Type[ServiceAttachmentsTransport], Callable[..., ServiceAttachmentsTransport], ] = ( - type(self).get_transport_class(transport) + ServiceAttachmentsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ServiceAttachmentsTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshot_settings_service/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshot_settings_service/client.py index a25ad4685a7a..96038398783e 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshot_settings_service/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshot_settings_service/client.py @@ -648,7 +648,7 @@ def __init__( Type[SnapshotSettingsServiceTransport], Callable[..., SnapshotSettingsServiceTransport], ] = ( - type(self).get_transport_class(transport) + SnapshotSettingsServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SnapshotSettingsServiceTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshots/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshots/client.py index d0db20882eb6..48ac2b521d06 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshots/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshots/client.py @@ -635,7 +635,7 @@ def __init__( transport_init: Union[ Type[SnapshotsTransport], Callable[..., SnapshotsTransport] ] = ( - type(self).get_transport_class(transport) + SnapshotsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SnapshotsTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/ssl_certificates/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/ssl_certificates/client.py index dae0cbcde444..9f8d79a499c5 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/ssl_certificates/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/ssl_certificates/client.py @@ -642,7 +642,7 @@ def __init__( transport_init: Union[ Type[SslCertificatesTransport], Callable[..., SslCertificatesTransport] ] = ( - type(self).get_transport_class(transport) + SslCertificatesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SslCertificatesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/ssl_policies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/ssl_policies/client.py index e0ee783f6e80..5a0b7f483a54 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/ssl_policies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/ssl_policies/client.py @@ -635,7 +635,7 @@ def __init__( transport_init: Union[ Type[SslPoliciesTransport], Callable[..., SslPoliciesTransport] ] = ( - type(self).get_transport_class(transport) + SslPoliciesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SslPoliciesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pool_types/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pool_types/client.py index 6f3717271b10..66c919a57704 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pool_types/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pool_types/client.py @@ -640,7 +640,7 @@ def __init__( Type[StoragePoolTypesTransport], Callable[..., StoragePoolTypesTransport], ] = ( - type(self).get_transport_class(transport) + StoragePoolTypesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., StoragePoolTypesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pools/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pools/client.py index a19b357d97ef..e70dc879e944 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pools/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pools/client.py @@ -635,7 +635,7 @@ def __init__( transport_init: Union[ Type[StoragePoolsTransport], Callable[..., StoragePoolsTransport] ] = ( - type(self).get_transport_class(transport) + StoragePoolsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., StoragePoolsTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/subnetworks/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/subnetworks/client.py index b3b2ba865ccd..2ffc259804cf 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/subnetworks/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/subnetworks/client.py @@ -635,7 +635,7 @@ def __init__( transport_init: Union[ Type[SubnetworksTransport], Callable[..., SubnetworksTransport] ] = ( - type(self).get_transport_class(transport) + SubnetworksClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SubnetworksTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_grpc_proxies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_grpc_proxies/client.py index fe20cd7b20b7..021e2c228f58 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_grpc_proxies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_grpc_proxies/client.py @@ -645,7 +645,7 @@ def __init__( Type[TargetGrpcProxiesTransport], Callable[..., TargetGrpcProxiesTransport], ] = ( - type(self).get_transport_class(transport) + TargetGrpcProxiesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., TargetGrpcProxiesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_http_proxies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_http_proxies/client.py index 2c046c5447d4..524ef81a25da 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_http_proxies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_http_proxies/client.py @@ -645,7 +645,7 @@ def __init__( Type[TargetHttpProxiesTransport], Callable[..., TargetHttpProxiesTransport], ] = ( - type(self).get_transport_class(transport) + TargetHttpProxiesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., TargetHttpProxiesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_https_proxies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_https_proxies/client.py index 1d471011d9b6..5fc74cea1db9 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_https_proxies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_https_proxies/client.py @@ -645,7 +645,7 @@ def __init__( Type[TargetHttpsProxiesTransport], Callable[..., TargetHttpsProxiesTransport], ] = ( - type(self).get_transport_class(transport) + TargetHttpsProxiesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., TargetHttpsProxiesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_instances/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_instances/client.py index c605f9b64f62..2575ea47aa79 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_instances/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_instances/client.py @@ -642,7 +642,7 @@ def __init__( transport_init: Union[ Type[TargetInstancesTransport], Callable[..., TargetInstancesTransport] ] = ( - type(self).get_transport_class(transport) + TargetInstancesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., TargetInstancesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_pools/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_pools/client.py index c90e842c2f03..b575ba53b5cf 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_pools/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_pools/client.py @@ -635,7 +635,7 @@ def __init__( transport_init: Union[ Type[TargetPoolsTransport], Callable[..., TargetPoolsTransport] ] = ( - type(self).get_transport_class(transport) + TargetPoolsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., TargetPoolsTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_ssl_proxies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_ssl_proxies/client.py index 223e70bd3023..ea783bb9a2fa 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_ssl_proxies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_ssl_proxies/client.py @@ -643,7 +643,7 @@ def __init__( Type[TargetSslProxiesTransport], Callable[..., TargetSslProxiesTransport], ] = ( - type(self).get_transport_class(transport) + TargetSslProxiesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., TargetSslProxiesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_tcp_proxies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_tcp_proxies/client.py index 3b06ab98861b..ed1563d2f327 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_tcp_proxies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_tcp_proxies/client.py @@ -643,7 +643,7 @@ def __init__( Type[TargetTcpProxiesTransport], Callable[..., TargetTcpProxiesTransport], ] = ( - type(self).get_transport_class(transport) + TargetTcpProxiesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., TargetTcpProxiesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_vpn_gateways/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_vpn_gateways/client.py index 17c732e6074c..a9c6cb8df839 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_vpn_gateways/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_vpn_gateways/client.py @@ -645,7 +645,7 @@ def __init__( Type[TargetVpnGatewaysTransport], Callable[..., TargetVpnGatewaysTransport], ] = ( - type(self).get_transport_class(transport) + TargetVpnGatewaysClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., TargetVpnGatewaysTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/url_maps/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/url_maps/client.py index 453f4be8cdb0..9692ef9e40dd 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/url_maps/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/url_maps/client.py @@ -635,7 +635,7 @@ def __init__( transport_init: Union[ Type[UrlMapsTransport], Callable[..., UrlMapsTransport] ] = ( - type(self).get_transport_class(transport) + UrlMapsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., UrlMapsTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/vpn_gateways/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/vpn_gateways/client.py index 4dbea329de10..a3db517a80c0 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/vpn_gateways/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/vpn_gateways/client.py @@ -635,7 +635,7 @@ def __init__( transport_init: Union[ Type[VpnGatewaysTransport], Callable[..., VpnGatewaysTransport] ] = ( - type(self).get_transport_class(transport) + VpnGatewaysClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., VpnGatewaysTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/vpn_tunnels/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/vpn_tunnels/client.py index 6fe30460d897..86a982810b11 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/vpn_tunnels/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/vpn_tunnels/client.py @@ -635,7 +635,7 @@ def __init__( transport_init: Union[ Type[VpnTunnelsTransport], Callable[..., VpnTunnelsTransport] ] = ( - type(self).get_transport_class(transport) + VpnTunnelsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., VpnTunnelsTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/zone_operations/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/zone_operations/client.py index 6b139371feaa..8d59625da496 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/zone_operations/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/zone_operations/client.py @@ -637,7 +637,7 @@ def __init__( transport_init: Union[ Type[ZoneOperationsTransport], Callable[..., ZoneOperationsTransport] ] = ( - type(self).get_transport_class(transport) + ZoneOperationsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ZoneOperationsTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/zones/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/zones/client.py index 22464582ed16..6d712df2de77 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/zones/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/zones/client.py @@ -632,7 +632,7 @@ def __init__( transport_init: Union[ Type[ZonesTransport], Callable[..., ZonesTransport] ] = ( - type(self).get_transport_class(transport) + ZonesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ZonesTransport], transport) ) diff --git a/packages/google-cloud-compute/samples/generated_samples/snippet_metadata_google.cloud.compute.v1.json b/packages/google-cloud-compute/samples/generated_samples/snippet_metadata_google.cloud.compute.v1.json index bdfae0e308c3..8a5a8ea62778 100644 --- a/packages/google-cloud-compute/samples/generated_samples/snippet_metadata_google.cloud.compute.v1.json +++ b/packages/google-cloud-compute/samples/generated_samples/snippet_metadata_google.cloud.compute.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-compute", - "version": "1.19.2" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing/gapic_version.py b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing/gapic_version.py index 5feceb32bedf..558c8aab67c5 100644 --- a/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing/gapic_version.py +++ b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/gapic_version.py b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/gapic_version.py index 5feceb32bedf..558c8aab67c5 100644 --- a/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/gapic_version.py +++ b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/services/confidential_computing/async_client.py b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/services/confidential_computing/async_client.py index a228e2a68655..42eed01a661d 100644 --- a/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/services/confidential_computing/async_client.py +++ b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/services/confidential_computing/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -194,10 +193,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ConfidentialComputingClient).get_transport_class, - type(ConfidentialComputingClient), - ) + get_transport_class = ConfidentialComputingClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/services/confidential_computing/client.py b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/services/confidential_computing/client.py index 405aa8b04120..2dc8aec9e96a 100644 --- a/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/services/confidential_computing/client.py +++ b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/services/confidential_computing/client.py @@ -670,7 +670,7 @@ def __init__( Type[ConfidentialComputingTransport], Callable[..., ConfidentialComputingTransport], ] = ( - type(self).get_transport_class(transport) + ConfidentialComputingClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ConfidentialComputingTransport], transport) ) diff --git a/packages/google-cloud-confidentialcomputing/samples/generated_samples/snippet_metadata_google.cloud.confidentialcomputing.v1.json b/packages/google-cloud-confidentialcomputing/samples/generated_samples/snippet_metadata_google.cloud.confidentialcomputing.v1.json index 1f70f8007495..957efb1f23a6 100644 --- a/packages/google-cloud-confidentialcomputing/samples/generated_samples/snippet_metadata_google.cloud.confidentialcomputing.v1.json +++ b/packages/google-cloud-confidentialcomputing/samples/generated_samples/snippet_metadata_google.cloud.confidentialcomputing.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-confidentialcomputing", - "version": "0.4.11" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-confidentialcomputing/tests/unit/gapic/confidentialcomputing_v1/test_confidential_computing.py b/packages/google-cloud-confidentialcomputing/tests/unit/gapic/confidentialcomputing_v1/test_confidential_computing.py index d3399c09bd4a..88b58dbe84ec 100644 --- a/packages/google-cloud-confidentialcomputing/tests/unit/gapic/confidentialcomputing_v1/test_confidential_computing.py +++ b/packages/google-cloud-confidentialcomputing/tests/unit/gapic/confidentialcomputing_v1/test_confidential_computing.py @@ -1361,22 +1361,23 @@ async def test_create_challenge_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_challenge - ] = mock_object + ] = mock_rpc request = {} await client.create_challenge(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_challenge(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1747,22 +1748,23 @@ async def test_verify_attestation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.verify_attestation - ] = mock_object + ] = mock_rpc request = {} await client.verify_attestation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.verify_attestation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-config/google/cloud/config/gapic_version.py b/packages/google-cloud-config/google/cloud/config/gapic_version.py index 4b834789ba9e..558c8aab67c5 100644 --- a/packages/google-cloud-config/google/cloud/config/gapic_version.py +++ b/packages/google-cloud-config/google/cloud/config/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-config/google/cloud/config_v1/gapic_version.py b/packages/google-cloud-config/google/cloud/config_v1/gapic_version.py index 4b834789ba9e..558c8aab67c5 100644 --- a/packages/google-cloud-config/google/cloud/config_v1/gapic_version.py +++ b/packages/google-cloud-config/google/cloud/config_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-config/google/cloud/config_v1/services/config/async_client.py b/packages/google-cloud-config/google/cloud/config_v1/services/config/async_client.py index 65290c0e7ea0..3bd01b92b89d 100644 --- a/packages/google-cloud-config/google/cloud/config_v1/services/config/async_client.py +++ b/packages/google-cloud-config/google/cloud/config_v1/services/config/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -204,9 +203,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ConfigClient).get_transport_class, type(ConfigClient) - ) + get_transport_class = ConfigClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-config/google/cloud/config_v1/services/config/client.py b/packages/google-cloud-config/google/cloud/config_v1/services/config/client.py index 791c5295ea52..2a763afcd0ff 100644 --- a/packages/google-cloud-config/google/cloud/config_v1/services/config/client.py +++ b/packages/google-cloud-config/google/cloud/config_v1/services/config/client.py @@ -808,7 +808,7 @@ def __init__( transport_init: Union[ Type[ConfigTransport], Callable[..., ConfigTransport] ] = ( - type(self).get_transport_class(transport) + ConfigClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ConfigTransport], transport) ) diff --git a/packages/google-cloud-config/samples/generated_samples/snippet_metadata_google.cloud.config.v1.json b/packages/google-cloud-config/samples/generated_samples/snippet_metadata_google.cloud.config.v1.json index f3c226df045e..772dccf47120 100644 --- a/packages/google-cloud-config/samples/generated_samples/snippet_metadata_google.cloud.config.v1.json +++ b/packages/google-cloud-config/samples/generated_samples/snippet_metadata_google.cloud.config.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-config", - "version": "0.1.11" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-config/tests/unit/gapic/config_v1/test_config.py b/packages/google-cloud-config/tests/unit/gapic/config_v1/test_config.py index c7134800a41c..c3d4c7bcbe0a 100644 --- a/packages/google-cloud-config/tests/unit/gapic/config_v1/test_config.py +++ b/packages/google-cloud-config/tests/unit/gapic/config_v1/test_config.py @@ -1243,22 +1243,23 @@ async def test_list_deployments_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_deployments - ] = mock_object + ] = mock_rpc request = {} await client.list_deployments(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_deployments(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1848,22 +1849,23 @@ async def test_get_deployment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_deployment - ] = mock_object + ] = mock_rpc request = {} await client.get_deployment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_deployment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2194,8 +2196,9 @@ def test_create_deployment_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_deployment(request) @@ -2251,26 +2254,28 @@ async def test_create_deployment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_deployment - ] = mock_object + ] = mock_rpc request = {} await client.create_deployment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_deployment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2618,8 +2623,9 @@ def test_update_deployment_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_deployment(request) @@ -2675,26 +2681,28 @@ async def test_update_deployment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_deployment - ] = mock_object + ] = mock_rpc request = {} await client.update_deployment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_deployment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3034,8 +3042,9 @@ def test_delete_deployment_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_deployment(request) @@ -3091,26 +3100,28 @@ async def test_delete_deployment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_deployment - ] = mock_object + ] = mock_rpc request = {} await client.delete_deployment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_deployment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3475,22 +3486,23 @@ async def test_list_revisions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_revisions - ] = mock_object + ] = mock_rpc request = {} await client.list_revisions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_revisions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4079,22 +4091,23 @@ async def test_get_revision_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_revision - ] = mock_object + ] = mock_rpc request = {} await client.get_revision(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_revision(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4475,22 +4488,23 @@ async def test_get_resource_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_resource - ] = mock_object + ] = mock_rpc request = {} await client.get_resource(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_resource(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4848,22 +4862,23 @@ async def test_list_resources_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_resources - ] = mock_object + ] = mock_rpc request = {} await client.list_resources(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_resources(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5421,22 +5436,23 @@ async def test_export_deployment_statefile_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.export_deployment_statefile - ] = mock_object + ] = mock_rpc request = {} await client.export_deployment_statefile(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.export_deployment_statefile(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5721,22 +5737,23 @@ async def test_export_revision_statefile_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.export_revision_statefile - ] = mock_object + ] = mock_rpc request = {} await client.export_revision_statefile(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.export_revision_statefile(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6009,22 +6026,23 @@ async def test_import_statefile_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_statefile - ] = mock_object + ] = mock_rpc request = {} await client.import_statefile(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.import_statefile(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6374,22 +6392,23 @@ async def test_delete_statefile_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_statefile - ] = mock_object + ] = mock_rpc request = {} await client.delete_statefile(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_statefile(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6673,8 +6692,9 @@ def test_lock_deployment_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.lock_deployment(request) @@ -6728,26 +6748,28 @@ async def test_lock_deployment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.lock_deployment - ] = mock_object + ] = mock_rpc request = {} await client.lock_deployment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.lock_deployment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7045,8 +7067,9 @@ def test_unlock_deployment_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.unlock_deployment(request) @@ -7102,26 +7125,28 @@ async def test_unlock_deployment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.unlock_deployment - ] = mock_object + ] = mock_rpc request = {} await client.unlock_deployment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.unlock_deployment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7501,22 +7526,23 @@ async def test_export_lock_info_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.export_lock_info - ] = mock_object + ] = mock_rpc request = {} await client.export_lock_info(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.export_lock_info(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7815,8 +7841,9 @@ def test_create_preview_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_preview(request) @@ -7870,26 +7897,28 @@ async def test_create_preview_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_preview - ] = mock_object + ] = mock_rpc request = {} await client.create_preview(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_preview(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8301,22 +8330,23 @@ async def test_get_preview_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_preview - ] = mock_object + ] = mock_rpc request = {} await client.get_preview(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_preview(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8694,22 +8724,23 @@ async def test_list_previews_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_previews - ] = mock_object + ] = mock_rpc request = {} await client.list_previews(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_previews(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9198,8 +9229,9 @@ def test_delete_preview_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_preview(request) @@ -9253,26 +9285,28 @@ async def test_delete_preview_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_preview - ] = mock_object + ] = mock_rpc request = {} await client.delete_preview(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_preview(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9626,22 +9660,23 @@ async def test_export_preview_result_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.export_preview_result - ] = mock_object + ] = mock_rpc request = {} await client.export_preview_result(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.export_preview_result(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9933,22 +9968,23 @@ async def test_list_terraform_versions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_terraform_versions - ] = mock_object + ] = mock_rpc request = {} await client.list_terraform_versions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_terraform_versions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10527,22 +10563,23 @@ async def test_get_terraform_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_terraform_version - ] = mock_object + ] = mock_rpc request = {} await client.get_terraform_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_terraform_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights/gapic_version.py b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights/gapic_version.py index 1a7fb072f786..558c8aab67c5 100644 --- a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights/gapic_version.py +++ b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.17.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/gapic_version.py b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/gapic_version.py index 1a7fb072f786..558c8aab67c5 100644 --- a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/gapic_version.py +++ b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.17.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/async_client.py b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/async_client.py index 40795f94b8ff..3e0bb0884cb2 100644 --- a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/async_client.py +++ b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -236,10 +235,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ContactCenterInsightsClient).get_transport_class, - type(ContactCenterInsightsClient), - ) + get_transport_class = ContactCenterInsightsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/client.py b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/client.py index e1e7b70918e6..5a264ad26d88 100644 --- a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/client.py +++ b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/client.py @@ -885,7 +885,7 @@ def __init__( Type[ContactCenterInsightsTransport], Callable[..., ContactCenterInsightsTransport], ] = ( - type(self).get_transport_class(transport) + ContactCenterInsightsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ContactCenterInsightsTransport], transport) ) diff --git a/packages/google-cloud-contact-center-insights/samples/generated_samples/snippet_metadata_google.cloud.contactcenterinsights.v1.json b/packages/google-cloud-contact-center-insights/samples/generated_samples/snippet_metadata_google.cloud.contactcenterinsights.v1.json index 8a7deccb9d04..bb09ad8db4c2 100644 --- a/packages/google-cloud-contact-center-insights/samples/generated_samples/snippet_metadata_google.cloud.contactcenterinsights.v1.json +++ b/packages/google-cloud-contact-center-insights/samples/generated_samples/snippet_metadata_google.cloud.contactcenterinsights.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-contact-center-insights", - "version": "1.17.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-contact-center-insights/tests/unit/gapic/contact_center_insights_v1/test_contact_center_insights.py b/packages/google-cloud-contact-center-insights/tests/unit/gapic/contact_center_insights_v1/test_contact_center_insights.py index ab8a670da609..962ffb010308 100644 --- a/packages/google-cloud-contact-center-insights/tests/unit/gapic/contact_center_insights_v1/test_contact_center_insights.py +++ b/packages/google-cloud-contact-center-insights/tests/unit/gapic/contact_center_insights_v1/test_contact_center_insights.py @@ -1397,22 +1397,23 @@ async def test_create_conversation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_conversation - ] = mock_object + ] = mock_rpc request = {} await client.create_conversation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_conversation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1770,8 +1771,9 @@ def test_upload_conversation_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.upload_conversation(request) @@ -1827,26 +1829,28 @@ async def test_upload_conversation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.upload_conversation - ] = mock_object + ] = mock_rpc request = {} await client.upload_conversation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.upload_conversation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2140,22 +2144,23 @@ async def test_update_conversation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_conversation - ] = mock_object + ] = mock_rpc request = {} await client.update_conversation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_conversation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2564,22 +2569,23 @@ async def test_get_conversation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_conversation - ] = mock_object + ] = mock_rpc request = {} await client.get_conversation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_conversation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2955,22 +2961,23 @@ async def test_list_conversations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_conversations - ] = mock_object + ] = mock_rpc request = {} await client.list_conversations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_conversations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3537,22 +3544,23 @@ async def test_delete_conversation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_conversation - ] = mock_object + ] = mock_rpc request = {} await client.delete_conversation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_conversation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3847,8 +3855,9 @@ def test_create_analysis_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_analysis(request) @@ -3902,26 +3911,28 @@ async def test_create_analysis_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_analysis - ] = mock_object + ] = mock_rpc request = {} await client.create_analysis(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_analysis(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4278,22 +4289,23 @@ async def test_get_analysis_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_analysis - ] = mock_object + ] = mock_rpc request = {} await client.get_analysis(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_analysis(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4643,22 +4655,23 @@ async def test_list_analyses_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_analyses - ] = mock_object + ] = mock_rpc request = {} await client.list_analyses(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_analyses(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5195,22 +5208,23 @@ async def test_delete_analysis_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_analysis - ] = mock_object + ] = mock_rpc request = {} await client.delete_analysis(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_analysis(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5508,8 +5522,9 @@ def test_bulk_analyze_conversations_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.bulk_analyze_conversations(request) @@ -5565,26 +5580,28 @@ async def test_bulk_analyze_conversations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.bulk_analyze_conversations - ] = mock_object + ] = mock_rpc request = {} await client.bulk_analyze_conversations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.bulk_analyze_conversations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5918,8 +5935,9 @@ def test_bulk_delete_conversations_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.bulk_delete_conversations(request) @@ -5975,26 +5993,28 @@ async def test_bulk_delete_conversations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.bulk_delete_conversations - ] = mock_object + ] = mock_rpc request = {} await client.bulk_delete_conversations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.bulk_delete_conversations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6315,8 +6335,9 @@ def test_ingest_conversations_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.ingest_conversations(request) @@ -6372,26 +6393,28 @@ async def test_ingest_conversations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.ingest_conversations - ] = mock_object + ] = mock_rpc request = {} await client.ingest_conversations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.ingest_conversations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6706,8 +6729,9 @@ def test_export_insights_data_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.export_insights_data(request) @@ -6763,26 +6787,28 @@ async def test_export_insights_data_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.export_insights_data - ] = mock_object + ] = mock_rpc request = {} await client.export_insights_data(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.export_insights_data(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7093,8 +7119,9 @@ def test_create_issue_model_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_issue_model(request) @@ -7150,26 +7177,28 @@ async def test_create_issue_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_issue_model - ] = mock_object + ] = mock_rpc request = {} await client.create_issue_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_issue_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7559,22 +7588,23 @@ async def test_update_issue_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_issue_model - ] = mock_object + ] = mock_rpc request = {} await client.update_issue_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_issue_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7969,22 +7999,23 @@ async def test_get_issue_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_issue_model - ] = mock_object + ] = mock_rpc request = {} await client.get_issue_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_issue_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8349,22 +8380,23 @@ async def test_list_issue_models_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_issue_models - ] = mock_object + ] = mock_rpc request = {} await client.list_issue_models(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_issue_models(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8675,8 +8707,9 @@ def test_delete_issue_model_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_issue_model(request) @@ -8732,26 +8765,28 @@ async def test_delete_issue_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_issue_model - ] = mock_object + ] = mock_rpc request = {} await client.delete_issue_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_issue_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9062,8 +9097,9 @@ def test_deploy_issue_model_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.deploy_issue_model(request) @@ -9119,26 +9155,28 @@ async def test_deploy_issue_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.deploy_issue_model - ] = mock_object + ] = mock_rpc request = {} await client.deploy_issue_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.deploy_issue_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9449,8 +9487,9 @@ def test_undeploy_issue_model_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.undeploy_issue_model(request) @@ -9506,26 +9545,28 @@ async def test_undeploy_issue_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.undeploy_issue_model - ] = mock_object + ] = mock_rpc request = {} await client.undeploy_issue_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.undeploy_issue_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9886,22 +9927,23 @@ async def test_get_issue_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_issue - ] = mock_object + ] = mock_rpc request = {} await client.get_issue(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_issue(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10246,22 +10288,23 @@ async def test_list_issues_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_issues - ] = mock_object + ] = mock_rpc request = {} await client.list_issues(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_issues(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10610,22 +10653,23 @@ async def test_update_issue_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_issue - ] = mock_object + ] = mock_rpc request = {} await client.update_issue(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_issue(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10978,22 +11022,23 @@ async def test_delete_issue_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_issue - ] = mock_object + ] = mock_rpc request = {} await client.delete_issue(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_issue(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11344,22 +11389,23 @@ async def test_calculate_issue_model_stats_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.calculate_issue_model_stats - ] = mock_object + ] = mock_rpc request = {} await client.calculate_issue_model_stats(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.calculate_issue_model_stats(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11749,22 +11795,23 @@ async def test_create_phrase_matcher_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_phrase_matcher - ] = mock_object + ] = mock_rpc request = {} await client.create_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_phrase_matcher(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12176,22 +12223,23 @@ async def test_get_phrase_matcher_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_phrase_matcher - ] = mock_object + ] = mock_rpc request = {} await client.get_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_phrase_matcher(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12579,22 +12627,23 @@ async def test_list_phrase_matchers_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_phrase_matchers - ] = mock_object + ] = mock_rpc request = {} await client.list_phrase_matchers(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_phrase_matchers(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13162,22 +13211,23 @@ async def test_delete_phrase_matcher_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_phrase_matcher - ] = mock_object + ] = mock_rpc request = {} await client.delete_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_phrase_matcher(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13555,22 +13605,23 @@ async def test_update_phrase_matcher_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_phrase_matcher - ] = mock_object + ] = mock_rpc request = {} await client.update_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_phrase_matcher(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13957,22 +14008,23 @@ async def test_calculate_stats_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.calculate_stats - ] = mock_object + ] = mock_rpc request = {} await client.calculate_stats(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.calculate_stats(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14327,22 +14379,23 @@ async def test_get_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_settings - ] = mock_object + ] = mock_rpc request = {} await client.get_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14689,22 +14742,23 @@ async def test_update_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_settings - ] = mock_object + ] = mock_rpc request = {} await client.update_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15068,22 +15122,23 @@ async def test_create_view_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_view - ] = mock_object + ] = mock_rpc request = {} await client.create_view(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_view(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15447,22 +15502,23 @@ async def test_get_view_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_view - ] = mock_object + ] = mock_rpc request = {} await client.get_view(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_view(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15811,22 +15867,23 @@ async def test_list_views_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_views - ] = mock_object + ] = mock_rpc request = {} await client.list_views(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_views(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -16372,22 +16429,23 @@ async def test_update_view_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_view - ] = mock_object + ] = mock_rpc request = {} await client.update_view(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_view(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -16740,22 +16798,23 @@ async def test_delete_view_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_view - ] = mock_object + ] = mock_rpc request = {} await client.delete_view(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_view(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-container/google/cloud/container/gapic_version.py b/packages/google-cloud-container/google/cloud/container/gapic_version.py index 39ada5514ff0..558c8aab67c5 100644 --- a/packages/google-cloud-container/google/cloud/container/gapic_version.py +++ b/packages/google-cloud-container/google/cloud/container/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.50.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-container/google/cloud/container_v1/gapic_version.py b/packages/google-cloud-container/google/cloud/container_v1/gapic_version.py index 39ada5514ff0..558c8aab67c5 100644 --- a/packages/google-cloud-container/google/cloud/container_v1/gapic_version.py +++ b/packages/google-cloud-container/google/cloud/container_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.50.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/async_client.py b/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/async_client.py index 9518929e7c09..2b7a0e1a642a 100644 --- a/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/async_client.py +++ b/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -190,9 +189,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ClusterManagerClient).get_transport_class, type(ClusterManagerClient) - ) + get_transport_class = ClusterManagerClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/client.py b/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/client.py index 6367cf1b7183..3dea67a3b36f 100644 --- a/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/client.py +++ b/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/client.py @@ -657,7 +657,7 @@ def __init__( transport_init: Union[ Type[ClusterManagerTransport], Callable[..., ClusterManagerTransport] ] = ( - type(self).get_transport_class(transport) + ClusterManagerClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ClusterManagerTransport], transport) ) diff --git a/packages/google-cloud-container/google/cloud/container_v1beta1/gapic_version.py b/packages/google-cloud-container/google/cloud/container_v1beta1/gapic_version.py index 39ada5514ff0..558c8aab67c5 100644 --- a/packages/google-cloud-container/google/cloud/container_v1beta1/gapic_version.py +++ b/packages/google-cloud-container/google/cloud/container_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.50.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-container/google/cloud/container_v1beta1/services/cluster_manager/async_client.py b/packages/google-cloud-container/google/cloud/container_v1beta1/services/cluster_manager/async_client.py index e30f0bf1ef76..699b9b86cdac 100644 --- a/packages/google-cloud-container/google/cloud/container_v1beta1/services/cluster_manager/async_client.py +++ b/packages/google-cloud-container/google/cloud/container_v1beta1/services/cluster_manager/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -190,9 +189,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ClusterManagerClient).get_transport_class, type(ClusterManagerClient) - ) + get_transport_class = ClusterManagerClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-container/google/cloud/container_v1beta1/services/cluster_manager/client.py b/packages/google-cloud-container/google/cloud/container_v1beta1/services/cluster_manager/client.py index cd7a5750ed0a..13487da71d47 100644 --- a/packages/google-cloud-container/google/cloud/container_v1beta1/services/cluster_manager/client.py +++ b/packages/google-cloud-container/google/cloud/container_v1beta1/services/cluster_manager/client.py @@ -655,7 +655,7 @@ def __init__( transport_init: Union[ Type[ClusterManagerTransport], Callable[..., ClusterManagerTransport] ] = ( - type(self).get_transport_class(transport) + ClusterManagerClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ClusterManagerTransport], transport) ) diff --git a/packages/google-cloud-container/samples/generated_samples/snippet_metadata_google.container.v1.json b/packages/google-cloud-container/samples/generated_samples/snippet_metadata_google.container.v1.json index e1284a608c4e..2a343a6434fa 100644 --- a/packages/google-cloud-container/samples/generated_samples/snippet_metadata_google.container.v1.json +++ b/packages/google-cloud-container/samples/generated_samples/snippet_metadata_google.container.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-container", - "version": "2.50.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-container/samples/generated_samples/snippet_metadata_google.container.v1beta1.json b/packages/google-cloud-container/samples/generated_samples/snippet_metadata_google.container.v1beta1.json index 17b37ede76ea..afdb562b267e 100644 --- a/packages/google-cloud-container/samples/generated_samples/snippet_metadata_google.container.v1beta1.json +++ b/packages/google-cloud-container/samples/generated_samples/snippet_metadata_google.container.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-container", - "version": "2.50.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-container/tests/unit/gapic/container_v1/test_cluster_manager.py b/packages/google-cloud-container/tests/unit/gapic/container_v1/test_cluster_manager.py index ed4966a33353..3e87b9113ba1 100644 --- a/packages/google-cloud-container/tests/unit/gapic/container_v1/test_cluster_manager.py +++ b/packages/google-cloud-container/tests/unit/gapic/container_v1/test_cluster_manager.py @@ -1290,22 +1290,23 @@ async def test_list_clusters_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_clusters - ] = mock_object + ] = mock_rpc request = {} await client.list_clusters(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_clusters(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1773,22 +1774,23 @@ async def test_get_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_cluster - ] = mock_object + ] = mock_rpc request = {} await client.get_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2263,22 +2265,23 @@ async def test_create_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_cluster - ] = mock_object + ] = mock_rpc request = {} await client.create_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2713,22 +2716,23 @@ async def test_update_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_cluster - ] = mock_object + ] = mock_rpc request = {} await client.update_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3199,22 +3203,23 @@ async def test_update_node_pool_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_node_pool - ] = mock_object + ] = mock_rpc request = {} await client.update_node_pool(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_node_pool(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3552,22 +3557,23 @@ async def test_set_node_pool_autoscaling_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_node_pool_autoscaling - ] = mock_object + ] = mock_rpc request = {} await client.set_node_pool_autoscaling(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_node_pool_autoscaling(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3911,22 +3917,23 @@ async def test_set_logging_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_logging_service - ] = mock_object + ] = mock_rpc request = {} await client.set_logging_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_logging_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4397,22 +4404,23 @@ async def test_set_monitoring_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_monitoring_service - ] = mock_object + ] = mock_rpc request = {} await client.set_monitoring_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_monitoring_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4878,22 +4886,23 @@ async def test_set_addons_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_addons_config - ] = mock_object + ] = mock_rpc request = {} await client.set_addons_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_addons_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5360,22 +5369,23 @@ async def test_set_locations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_locations - ] = mock_object + ] = mock_rpc request = {} await client.set_locations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_locations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5822,22 +5832,23 @@ async def test_update_master_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_master - ] = mock_object + ] = mock_rpc request = {} await client.update_master(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_master(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6282,22 +6293,23 @@ async def test_set_master_auth_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_master_auth - ] = mock_object + ] = mock_rpc request = {} await client.set_master_auth(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_master_auth(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6620,22 +6632,23 @@ async def test_delete_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_cluster - ] = mock_object + ] = mock_rpc request = {} await client.delete_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7038,22 +7051,23 @@ async def test_list_operations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_operations - ] = mock_object + ] = mock_rpc request = {} await client.list_operations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_operations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7458,22 +7472,23 @@ async def test_get_operation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_operation - ] = mock_object + ] = mock_rpc request = {} await client.get_operation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_operation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7873,22 +7888,23 @@ async def test_cancel_operation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.cancel_operation - ] = mock_object + ] = mock_rpc request = {} await client.cancel_operation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.cancel_operation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8284,22 +8300,23 @@ async def test_get_server_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_server_config - ] = mock_object + ] = mock_rpc request = {} await client.get_server_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_server_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8691,22 +8708,23 @@ async def test_get_json_web_keys_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_json_web_keys - ] = mock_object + ] = mock_rpc request = {} await client.get_json_web_keys(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_json_web_keys(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8977,22 +8995,23 @@ async def test_list_node_pools_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_node_pools - ] = mock_object + ] = mock_rpc request = {} await client.list_node_pools(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_node_pools(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9403,22 +9422,23 @@ async def test_get_node_pool_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_node_pool - ] = mock_object + ] = mock_rpc request = {} await client.get_node_pool(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_node_pool(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9863,22 +9883,23 @@ async def test_create_node_pool_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_node_pool - ] = mock_object + ] = mock_rpc request = {} await client.create_node_pool(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_node_pool(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10327,22 +10348,23 @@ async def test_delete_node_pool_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_node_pool - ] = mock_object + ] = mock_rpc request = {} await client.delete_node_pool(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_node_pool(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10757,22 +10779,23 @@ async def test_complete_node_pool_upgrade_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.complete_node_pool_upgrade - ] = mock_object + ] = mock_rpc request = {} await client.complete_node_pool_upgrade(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.complete_node_pool_upgrade(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11090,22 +11113,23 @@ async def test_rollback_node_pool_upgrade_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.rollback_node_pool_upgrade - ] = mock_object + ] = mock_rpc request = {} await client.rollback_node_pool_upgrade(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.rollback_node_pool_upgrade(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11576,22 +11600,23 @@ async def test_set_node_pool_management_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_node_pool_management - ] = mock_object + ] = mock_rpc request = {} await client.set_node_pool_management(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_node_pool_management(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11921,22 +11946,23 @@ async def test_set_labels_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_labels - ] = mock_object + ] = mock_rpc request = {} await client.set_labels(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_labels(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12259,22 +12285,23 @@ async def test_set_legacy_abac_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_legacy_abac - ] = mock_object + ] = mock_rpc request = {} await client.set_legacy_abac(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_legacy_abac(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12729,22 +12756,23 @@ async def test_start_ip_rotation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.start_ip_rotation - ] = mock_object + ] = mock_rpc request = {} await client.start_ip_rotation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.start_ip_rotation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13201,22 +13229,23 @@ async def test_complete_ip_rotation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.complete_ip_rotation - ] = mock_object + ] = mock_rpc request = {} await client.complete_ip_rotation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.complete_ip_rotation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13676,22 +13705,23 @@ async def test_set_node_pool_size_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_node_pool_size - ] = mock_object + ] = mock_rpc request = {} await client.set_node_pool_size(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_node_pool_size(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14032,22 +14062,23 @@ async def test_set_network_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_network_policy - ] = mock_object + ] = mock_rpc request = {} await client.set_network_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_network_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14528,22 +14559,23 @@ async def test_set_maintenance_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_maintenance_policy - ] = mock_object + ] = mock_rpc request = {} await client.set_maintenance_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_maintenance_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15016,22 +15048,23 @@ async def test_list_usable_subnetworks_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_usable_subnetworks - ] = mock_object + ] = mock_rpc request = {} await client.list_usable_subnetworks(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_usable_subnetworks(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15520,22 +15553,23 @@ async def test_check_autopilot_compatibility_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.check_autopilot_compatibility - ] = mock_object + ] = mock_rpc request = {} await client.check_autopilot_compatibility(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.check_autopilot_compatibility(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-container/tests/unit/gapic/container_v1beta1/test_cluster_manager.py b/packages/google-cloud-container/tests/unit/gapic/container_v1beta1/test_cluster_manager.py index dcdc6664a778..da6c520022d0 100644 --- a/packages/google-cloud-container/tests/unit/gapic/container_v1beta1/test_cluster_manager.py +++ b/packages/google-cloud-container/tests/unit/gapic/container_v1beta1/test_cluster_manager.py @@ -1267,22 +1267,23 @@ async def test_list_clusters_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_clusters - ] = mock_object + ] = mock_rpc request = {} await client.list_clusters(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_clusters(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1746,22 +1747,23 @@ async def test_get_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_cluster - ] = mock_object + ] = mock_rpc request = {} await client.get_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2230,22 +2232,23 @@ async def test_create_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_cluster - ] = mock_object + ] = mock_rpc request = {} await client.create_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2670,22 +2673,23 @@ async def test_update_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_cluster - ] = mock_object + ] = mock_rpc request = {} await client.update_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3146,22 +3150,23 @@ async def test_update_node_pool_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_node_pool - ] = mock_object + ] = mock_rpc request = {} await client.update_node_pool(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_node_pool(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3499,22 +3504,23 @@ async def test_set_node_pool_autoscaling_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_node_pool_autoscaling - ] = mock_object + ] = mock_rpc request = {} await client.set_node_pool_autoscaling(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_node_pool_autoscaling(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3858,22 +3864,23 @@ async def test_set_logging_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_logging_service - ] = mock_object + ] = mock_rpc request = {} await client.set_logging_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_logging_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4334,22 +4341,23 @@ async def test_set_monitoring_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_monitoring_service - ] = mock_object + ] = mock_rpc request = {} await client.set_monitoring_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_monitoring_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4805,22 +4813,23 @@ async def test_set_addons_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_addons_config - ] = mock_object + ] = mock_rpc request = {} await client.set_addons_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_addons_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5277,22 +5286,23 @@ async def test_set_locations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_locations - ] = mock_object + ] = mock_rpc request = {} await client.set_locations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_locations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5729,22 +5739,23 @@ async def test_update_master_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_master - ] = mock_object + ] = mock_rpc request = {} await client.update_master(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_master(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6179,22 +6190,23 @@ async def test_set_master_auth_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_master_auth - ] = mock_object + ] = mock_rpc request = {} await client.set_master_auth(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_master_auth(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6517,22 +6529,23 @@ async def test_delete_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_cluster - ] = mock_object + ] = mock_rpc request = {} await client.delete_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6925,22 +6938,23 @@ async def test_list_operations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_operations - ] = mock_object + ] = mock_rpc request = {} await client.list_operations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_operations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7335,22 +7349,23 @@ async def test_get_operation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_operation - ] = mock_object + ] = mock_rpc request = {} await client.get_operation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_operation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7740,22 +7755,23 @@ async def test_cancel_operation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.cancel_operation - ] = mock_object + ] = mock_rpc request = {} await client.cancel_operation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.cancel_operation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8141,22 +8157,23 @@ async def test_get_server_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_server_config - ] = mock_object + ] = mock_rpc request = {} await client.get_server_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_server_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8538,22 +8555,23 @@ async def test_get_json_web_keys_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_json_web_keys - ] = mock_object + ] = mock_rpc request = {} await client.get_json_web_keys(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_json_web_keys(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8824,22 +8842,23 @@ async def test_list_node_pools_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_node_pools - ] = mock_object + ] = mock_rpc request = {} await client.list_node_pools(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_node_pools(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9240,22 +9259,23 @@ async def test_get_node_pool_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_node_pool - ] = mock_object + ] = mock_rpc request = {} await client.get_node_pool(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_node_pool(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9690,22 +9710,23 @@ async def test_create_node_pool_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_node_pool - ] = mock_object + ] = mock_rpc request = {} await client.create_node_pool(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_node_pool(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10144,22 +10165,23 @@ async def test_delete_node_pool_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_node_pool - ] = mock_object + ] = mock_rpc request = {} await client.delete_node_pool(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_node_pool(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10564,22 +10586,23 @@ async def test_complete_node_pool_upgrade_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.complete_node_pool_upgrade - ] = mock_object + ] = mock_rpc request = {} await client.complete_node_pool_upgrade(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.complete_node_pool_upgrade(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10897,22 +10920,23 @@ async def test_rollback_node_pool_upgrade_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.rollback_node_pool_upgrade - ] = mock_object + ] = mock_rpc request = {} await client.rollback_node_pool_upgrade(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.rollback_node_pool_upgrade(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11373,22 +11397,23 @@ async def test_set_node_pool_management_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_node_pool_management - ] = mock_object + ] = mock_rpc request = {} await client.set_node_pool_management(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_node_pool_management(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11844,22 +11869,23 @@ async def test_set_labels_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_labels - ] = mock_object + ] = mock_rpc request = {} await client.set_labels(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_labels(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12304,22 +12330,23 @@ async def test_set_legacy_abac_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_legacy_abac - ] = mock_object + ] = mock_rpc request = {} await client.set_legacy_abac(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_legacy_abac(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12764,22 +12791,23 @@ async def test_start_ip_rotation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.start_ip_rotation - ] = mock_object + ] = mock_rpc request = {} await client.start_ip_rotation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.start_ip_rotation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13226,22 +13254,23 @@ async def test_complete_ip_rotation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.complete_ip_rotation - ] = mock_object + ] = mock_rpc request = {} await client.complete_ip_rotation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.complete_ip_rotation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13691,22 +13720,23 @@ async def test_set_node_pool_size_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_node_pool_size - ] = mock_object + ] = mock_rpc request = {} await client.set_node_pool_size(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_node_pool_size(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14047,22 +14077,23 @@ async def test_set_network_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_network_policy - ] = mock_object + ] = mock_rpc request = {} await client.set_network_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_network_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14533,22 +14564,23 @@ async def test_set_maintenance_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_maintenance_policy - ] = mock_object + ] = mock_rpc request = {} await client.set_maintenance_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_maintenance_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15011,22 +15043,23 @@ async def test_list_usable_subnetworks_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_usable_subnetworks - ] = mock_object + ] = mock_rpc request = {} await client.list_usable_subnetworks(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_usable_subnetworks(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15601,22 +15634,23 @@ async def test_check_autopilot_compatibility_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.check_autopilot_compatibility - ] = mock_object + ] = mock_rpc request = {} await client.check_autopilot_compatibility(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.check_autopilot_compatibility(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15891,22 +15925,23 @@ async def test_list_locations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_locations - ] = mock_object + ] = mock_rpc request = {} await client.list_locations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_locations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-containeranalysis/google/cloud/devtools/containeranalysis/gapic_version.py b/packages/google-cloud-containeranalysis/google/cloud/devtools/containeranalysis/gapic_version.py index c99be268ce75..558c8aab67c5 100644 --- a/packages/google-cloud-containeranalysis/google/cloud/devtools/containeranalysis/gapic_version.py +++ b/packages/google-cloud-containeranalysis/google/cloud/devtools/containeranalysis/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.14.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-containeranalysis/google/cloud/devtools/containeranalysis_v1/gapic_version.py b/packages/google-cloud-containeranalysis/google/cloud/devtools/containeranalysis_v1/gapic_version.py index c99be268ce75..558c8aab67c5 100644 --- a/packages/google-cloud-containeranalysis/google/cloud/devtools/containeranalysis_v1/gapic_version.py +++ b/packages/google-cloud-containeranalysis/google/cloud/devtools/containeranalysis_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.14.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-containeranalysis/google/cloud/devtools/containeranalysis_v1/services/container_analysis/async_client.py b/packages/google-cloud-containeranalysis/google/cloud/devtools/containeranalysis_v1/services/container_analysis/async_client.py index 3afffcc31462..7ae843dbce3d 100644 --- a/packages/google-cloud-containeranalysis/google/cloud/devtools/containeranalysis_v1/services/container_analysis/async_client.py +++ b/packages/google-cloud-containeranalysis/google/cloud/devtools/containeranalysis_v1/services/container_analysis/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -204,9 +203,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ContainerAnalysisClient).get_transport_class, type(ContainerAnalysisClient) - ) + get_transport_class = ContainerAnalysisClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-containeranalysis/google/cloud/devtools/containeranalysis_v1/services/container_analysis/client.py b/packages/google-cloud-containeranalysis/google/cloud/devtools/containeranalysis_v1/services/container_analysis/client.py index f547b55537f2..3b0dec1774db 100644 --- a/packages/google-cloud-containeranalysis/google/cloud/devtools/containeranalysis_v1/services/container_analysis/client.py +++ b/packages/google-cloud-containeranalysis/google/cloud/devtools/containeranalysis_v1/services/container_analysis/client.py @@ -662,7 +662,7 @@ def __init__( Type[ContainerAnalysisTransport], Callable[..., ContainerAnalysisTransport], ] = ( - type(self).get_transport_class(transport) + ContainerAnalysisClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ContainerAnalysisTransport], transport) ) diff --git a/packages/google-cloud-containeranalysis/samples/generated_samples/snippet_metadata_google.devtools.containeranalysis.v1.json b/packages/google-cloud-containeranalysis/samples/generated_samples/snippet_metadata_google.devtools.containeranalysis.v1.json index 21bd0118a95d..dab3e1ef2940 100644 --- a/packages/google-cloud-containeranalysis/samples/generated_samples/snippet_metadata_google.devtools.containeranalysis.v1.json +++ b/packages/google-cloud-containeranalysis/samples/generated_samples/snippet_metadata_google.devtools.containeranalysis.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-containeranalysis", - "version": "2.14.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-containeranalysis/tests/unit/gapic/containeranalysis_v1/test_container_analysis.py b/packages/google-cloud-containeranalysis/tests/unit/gapic/containeranalysis_v1/test_container_analysis.py index 08ab1ffe97ee..8852b7b1d541 100644 --- a/packages/google-cloud-containeranalysis/tests/unit/gapic/containeranalysis_v1/test_container_analysis.py +++ b/packages/google-cloud-containeranalysis/tests/unit/gapic/containeranalysis_v1/test_container_analysis.py @@ -1329,22 +1329,23 @@ async def test_set_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1712,22 +1713,23 @@ async def test_get_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2103,22 +2105,23 @@ async def test_test_iam_permissions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.test_iam_permissions - ] = mock_object + ] = mock_rpc request = {} await client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.test_iam_permissions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2517,22 +2520,23 @@ async def test_get_vulnerability_occurrences_summary_async_use_cached_wrapped_rp ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_vulnerability_occurrences_summary - ] = mock_object + ] = mock_rpc request = {} await client.get_vulnerability_occurrences_summary(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_vulnerability_occurrences_summary(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse/gapic_version.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse/gapic_version.py index 35a7786db3a7..558c8aab67c5 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse/gapic_version.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.7.9" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/gapic_version.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/gapic_version.py index 35a7786db3a7..558c8aab67c5 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/gapic_version.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.7.9" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/async_client.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/async_client.py index f344385b9816..3c6831dc21f0 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/async_client.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -197,10 +196,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DocumentLinkServiceClient).get_transport_class, - type(DocumentLinkServiceClient), - ) + get_transport_class = DocumentLinkServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/client.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/client.py index 75e842e51274..d3046a9d7f24 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/client.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/client.py @@ -695,7 +695,7 @@ def __init__( Type[DocumentLinkServiceTransport], Callable[..., DocumentLinkServiceTransport], ] = ( - type(self).get_transport_class(transport) + DocumentLinkServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DocumentLinkServiceTransport], transport) ) diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/async_client.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/async_client.py index a6de5aa870ce..db68dbe4838c 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/async_client.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -202,10 +201,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DocumentSchemaServiceClient).get_transport_class, - type(DocumentSchemaServiceClient), - ) + get_transport_class = DocumentSchemaServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/client.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/client.py index e09f1fefc82d..2ec3ff29b93c 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/client.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/client.py @@ -691,7 +691,7 @@ def __init__( Type[DocumentSchemaServiceTransport], Callable[..., DocumentSchemaServiceTransport], ] = ( - type(self).get_transport_class(transport) + DocumentSchemaServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DocumentSchemaServiceTransport], transport) ) diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_service/async_client.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_service/async_client.py index 471cbd4892a7..ff1753f3d097 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_service/async_client.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -204,9 +203,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DocumentServiceClient).get_transport_class, type(DocumentServiceClient) - ) + get_transport_class = DocumentServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_service/client.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_service/client.py index a67244cd0062..49814fe14345 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_service/client.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_service/client.py @@ -712,7 +712,7 @@ def __init__( transport_init: Union[ Type[DocumentServiceTransport], Callable[..., DocumentServiceTransport] ] = ( - type(self).get_transport_class(transport) + DocumentServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DocumentServiceTransport], transport) ) diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/pipeline_service/async_client.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/pipeline_service/async_client.py index af737f1f1c39..22669269e196 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/pipeline_service/async_client.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/pipeline_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -194,9 +193,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(PipelineServiceClient).get_transport_class, type(PipelineServiceClient) - ) + get_transport_class = PipelineServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/pipeline_service/client.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/pipeline_service/client.py index f4f365323074..ef5b9440ab85 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/pipeline_service/client.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/pipeline_service/client.py @@ -682,7 +682,7 @@ def __init__( transport_init: Union[ Type[PipelineServiceTransport], Callable[..., PipelineServiceTransport] ] = ( - type(self).get_transport_class(transport) + PipelineServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., PipelineServiceTransport], transport) ) diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/async_client.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/async_client.py index cf1c549e42f6..7c13c495543a 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/async_client.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -193,9 +192,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(RuleSetServiceClient).get_transport_class, type(RuleSetServiceClient) - ) + get_transport_class = RuleSetServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/client.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/client.py index 45d311236f7a..df37e447f79d 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/client.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/client.py @@ -701,7 +701,7 @@ def __init__( transport_init: Union[ Type[RuleSetServiceTransport], Callable[..., RuleSetServiceTransport] ] = ( - type(self).get_transport_class(transport) + RuleSetServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RuleSetServiceTransport], transport) ) diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/async_client.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/async_client.py index d47996577184..666fd9a062ff 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/async_client.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -196,9 +195,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(SynonymSetServiceClient).get_transport_class, type(SynonymSetServiceClient) - ) + get_transport_class = SynonymSetServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/client.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/client.py index 32812db3afc9..61e3bdc10a0c 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/client.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/client.py @@ -687,7 +687,7 @@ def __init__( Type[SynonymSetServiceTransport], Callable[..., SynonymSetServiceTransport], ] = ( - type(self).get_transport_class(transport) + SynonymSetServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SynonymSetServiceTransport], transport) ) diff --git a/packages/google-cloud-contentwarehouse/samples/generated_samples/snippet_metadata_google.cloud.contentwarehouse.v1.json b/packages/google-cloud-contentwarehouse/samples/generated_samples/snippet_metadata_google.cloud.contentwarehouse.v1.json index 6f7185201f9c..4e798f93791e 100644 --- a/packages/google-cloud-contentwarehouse/samples/generated_samples/snippet_metadata_google.cloud.contentwarehouse.v1.json +++ b/packages/google-cloud-contentwarehouse/samples/generated_samples/snippet_metadata_google.cloud.contentwarehouse.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-contentwarehouse", - "version": "0.7.9" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_link_service.py b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_link_service.py index c85e3d67f810..1f996a128646 100644 --- a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_link_service.py +++ b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_link_service.py @@ -1368,22 +1368,23 @@ async def test_list_linked_targets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_linked_targets - ] = mock_object + ] = mock_rpc request = {} await client.list_linked_targets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_linked_targets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1757,22 +1758,23 @@ async def test_list_linked_sources_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_linked_sources - ] = mock_object + ] = mock_rpc request = {} await client.list_linked_sources(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_linked_sources(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2352,22 +2354,23 @@ async def test_create_document_link_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_document_link - ] = mock_object + ] = mock_rpc request = {} await client.create_document_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_document_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2746,22 +2749,23 @@ async def test_delete_document_link_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_document_link - ] = mock_object + ] = mock_rpc request = {} await client.delete_document_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_document_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_schema_service.py b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_schema_service.py index 6918e5a30c6b..80a7753d7452 100644 --- a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_schema_service.py +++ b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_schema_service.py @@ -1379,22 +1379,23 @@ async def test_create_document_schema_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_document_schema - ] = mock_object + ] = mock_rpc request = {} await client.create_document_schema(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_document_schema(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1792,22 +1793,23 @@ async def test_update_document_schema_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_document_schema - ] = mock_object + ] = mock_rpc request = {} await client.update_document_schema(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_document_schema(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2204,22 +2206,23 @@ async def test_get_document_schema_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_document_schema - ] = mock_object + ] = mock_rpc request = {} await client.get_document_schema(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_document_schema(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2591,22 +2594,23 @@ async def test_delete_document_schema_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_document_schema - ] = mock_object + ] = mock_rpc request = {} await client.delete_document_schema(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_document_schema(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2972,22 +2976,23 @@ async def test_list_document_schemas_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_document_schemas - ] = mock_object + ] = mock_rpc request = {} await client.list_document_schemas(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_document_schemas(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_service.py b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_service.py index 87bf8ec6620f..5a92f71043cf 100644 --- a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_service.py +++ b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_service.py @@ -1326,22 +1326,23 @@ async def test_create_document_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_document - ] = mock_object + ] = mock_rpc request = {} await client.create_document(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1747,22 +1748,23 @@ async def test_get_document_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_document - ] = mock_object + ] = mock_rpc request = {} await client.get_document(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2140,22 +2142,23 @@ async def test_update_document_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_document - ] = mock_object + ] = mock_rpc request = {} await client.update_document(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2505,22 +2508,23 @@ async def test_delete_document_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_document - ] = mock_object + ] = mock_rpc request = {} await client.delete_document(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2873,22 +2877,23 @@ async def test_search_documents_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search_documents - ] = mock_object + ] = mock_rpc request = {} await client.search_documents(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_documents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3493,22 +3498,23 @@ async def test_lock_document_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.lock_document - ] = mock_object + ] = mock_rpc request = {} await client.lock_document(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.lock_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3884,22 +3890,23 @@ async def test_fetch_acl_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.fetch_acl - ] = mock_object + ] = mock_rpc request = {} await client.fetch_acl(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.fetch_acl(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4239,22 +4246,23 @@ async def test_set_acl_async_use_cached_wrapped_rpc(transport: str = "grpc_async ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_acl - ] = mock_object + ] = mock_rpc request = {} await client.set_acl(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_acl(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_pipeline_service.py b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_pipeline_service.py index 12669e2313b3..e87f4adba870 100644 --- a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_pipeline_service.py +++ b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_pipeline_service.py @@ -1260,8 +1260,9 @@ def test_run_pipeline_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.run_pipeline(request) @@ -1315,26 +1316,28 @@ async def test_run_pipeline_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.run_pipeline - ] = mock_object + ] = mock_rpc request = {} await client.run_pipeline(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.run_pipeline(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_rule_set_service.py b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_rule_set_service.py index 22aba996ea70..cc15a93ecd1d 100644 --- a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_rule_set_service.py +++ b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_rule_set_service.py @@ -1294,22 +1294,23 @@ async def test_create_rule_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_rule_set - ] = mock_object + ] = mock_rpc request = {} await client.create_rule_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_rule_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1675,22 +1676,23 @@ async def test_get_rule_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_rule_set - ] = mock_object + ] = mock_rpc request = {} await client.get_rule_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_rule_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2046,22 +2048,23 @@ async def test_update_rule_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_rule_set - ] = mock_object + ] = mock_rpc request = {} await client.update_rule_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_rule_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2414,22 +2417,23 @@ async def test_delete_rule_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_rule_set - ] = mock_object + ] = mock_rpc request = {} await client.delete_rule_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_rule_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2772,22 +2776,23 @@ async def test_list_rule_sets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_rule_sets - ] = mock_object + ] = mock_rpc request = {} await client.list_rule_sets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_rule_sets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_synonym_set_service.py b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_synonym_set_service.py index 451c74e46bf7..06ca74439156 100644 --- a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_synonym_set_service.py +++ b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_synonym_set_service.py @@ -1338,22 +1338,23 @@ async def test_create_synonym_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_synonym_set - ] = mock_object + ] = mock_rpc request = {} await client.create_synonym_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_synonym_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1728,22 +1729,23 @@ async def test_get_synonym_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_synonym_set - ] = mock_object + ] = mock_rpc request = {} await client.get_synonym_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_synonym_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2110,22 +2112,23 @@ async def test_update_synonym_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_synonym_set - ] = mock_object + ] = mock_rpc request = {} await client.update_synonym_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_synonym_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2502,22 +2505,23 @@ async def test_delete_synonym_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_synonym_set - ] = mock_object + ] = mock_rpc request = {} await client.delete_synonym_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_synonym_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2880,22 +2884,23 @@ async def test_list_synonym_sets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_synonym_sets - ] = mock_object + ] = mock_rpc request = {} await client.list_synonym_sets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_synonym_sets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio From 95fb211319a62f021866a5a6cd103553e282d8c0 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 16 Aug 2024 22:53:46 +0000 Subject: [PATCH 002/108] feat: [google-cloud-translate] Add BigQuery Metastore Partition Service API version v1alpha (#13006) - [ ] Regenerate this pull request now. PiperOrigin-RevId: 662212485 Source-Link: https://github.com/googleapis/googleapis/commit/456a812fbc03ef50e253dc85f2b2c22a8af96d36 Source-Link: https://github.com/googleapis/googleapis-gen/commit/2ec266e6da03208a76b0fd6001ba7df93dae44e6 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXRyYW5zbGF0ZS8uT3dsQm90LnlhbWwiLCJoIjoiMmVjMjY2ZTZkYTAzMjA4YTc2YjBmZDYwMDFiYTdkZjkzZGFlNDRlNiJ9 --------- Co-authored-by: Owl Bot Co-authored-by: Victor Chudnovsky --- .../google/cloud/translate/gapic_version.py | 2 +- .../cloud/translate_v3/gapic_version.py | 2 +- .../translation_service/async_client.py | 6 +- .../services/translation_service/client.py | 2 +- .../translate_v3/types/automl_translation.py | 2 +- .../cloud/translate_v3beta1/gapic_version.py | 2 +- .../translation_service/async_client.py | 6 +- .../services/translation_service/client.py | 2 +- ..._metadata_google.cloud.translation.v3.json | 2 +- ...data_google.cloud.translation.v3beta1.json | 2 +- .../translate_v3/test_translation_service.py | 452 ++++++++++-------- .../test_translation_service.py | 130 ++--- 12 files changed, 340 insertions(+), 270 deletions(-) diff --git a/packages/google-cloud-translate/google/cloud/translate/gapic_version.py b/packages/google-cloud-translate/google/cloud/translate/gapic_version.py index dd91ea26f1b4..558c8aab67c5 100644 --- a/packages/google-cloud-translate/google/cloud/translate/gapic_version.py +++ b/packages/google-cloud-translate/google/cloud/translate/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.16.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-translate/google/cloud/translate_v3/gapic_version.py b/packages/google-cloud-translate/google/cloud/translate_v3/gapic_version.py index dd91ea26f1b4..558c8aab67c5 100644 --- a/packages/google-cloud-translate/google/cloud/translate_v3/gapic_version.py +++ b/packages/google-cloud-translate/google/cloud/translate_v3/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.16.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-translate/google/cloud/translate_v3/services/translation_service/async_client.py b/packages/google-cloud-translate/google/cloud/translate_v3/services/translation_service/async_client.py index 8de8df0a3b2a..13093b977dfd 100644 --- a/packages/google-cloud-translate/google/cloud/translate_v3/services/translation_service/async_client.py +++ b/packages/google-cloud-translate/google/cloud/translate_v3/services/translation_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -228,10 +227,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(TranslationServiceClient).get_transport_class, - type(TranslationServiceClient), - ) + get_transport_class = TranslationServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-translate/google/cloud/translate_v3/services/translation_service/client.py b/packages/google-cloud-translate/google/cloud/translate_v3/services/translation_service/client.py index 94c80ddde495..99d9caa1c2f4 100644 --- a/packages/google-cloud-translate/google/cloud/translate_v3/services/translation_service/client.py +++ b/packages/google-cloud-translate/google/cloud/translate_v3/services/translation_service/client.py @@ -844,7 +844,7 @@ def __init__( Type[TranslationServiceTransport], Callable[..., TranslationServiceTransport], ] = ( - type(self).get_transport_class(transport) + TranslationServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., TranslationServiceTransport], transport) ) diff --git a/packages/google-cloud-translate/google/cloud/translate_v3/types/automl_translation.py b/packages/google-cloud-translate/google/cloud/translate_v3/types/automl_translation.py index 87aeb42cbb01..a70177d6a016 100644 --- a/packages/google-cloud-translate/google/cloud/translate_v3/types/automl_translation.py +++ b/packages/google-cloud-translate/google/cloud/translate_v3/types/automl_translation.py @@ -491,7 +491,7 @@ class Example(proto.Message): Attributes: name (str): Output only. The resource name of the example, in form of - \`projects/{project-number-or-id}/locations/{location_id}/datasets/{dataset_id}/examples/{example_id}' + ``projects/{project-number-or-id}/locations/{location_id}/datasets/{dataset_id}/examples/{example_id}`` source_text (str): Sentence in source language. target_text (str): diff --git a/packages/google-cloud-translate/google/cloud/translate_v3beta1/gapic_version.py b/packages/google-cloud-translate/google/cloud/translate_v3beta1/gapic_version.py index dd91ea26f1b4..558c8aab67c5 100644 --- a/packages/google-cloud-translate/google/cloud/translate_v3beta1/gapic_version.py +++ b/packages/google-cloud-translate/google/cloud/translate_v3beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.16.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-translate/google/cloud/translate_v3beta1/services/translation_service/async_client.py b/packages/google-cloud-translate/google/cloud/translate_v3beta1/services/translation_service/async_client.py index 17c2f265955a..90e6d6aedc1b 100644 --- a/packages/google-cloud-translate/google/cloud/translate_v3beta1/services/translation_service/async_client.py +++ b/packages/google-cloud-translate/google/cloud/translate_v3beta1/services/translation_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -191,10 +190,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(TranslationServiceClient).get_transport_class, - type(TranslationServiceClient), - ) + get_transport_class = TranslationServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-translate/google/cloud/translate_v3beta1/services/translation_service/client.py b/packages/google-cloud-translate/google/cloud/translate_v3beta1/services/translation_service/client.py index 82bb47e1785d..aa798b42656c 100644 --- a/packages/google-cloud-translate/google/cloud/translate_v3beta1/services/translation_service/client.py +++ b/packages/google-cloud-translate/google/cloud/translate_v3beta1/services/translation_service/client.py @@ -669,7 +669,7 @@ def __init__( Type[TranslationServiceTransport], Callable[..., TranslationServiceTransport], ] = ( - type(self).get_transport_class(transport) + TranslationServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., TranslationServiceTransport], transport) ) diff --git a/packages/google-cloud-translate/samples/generated_samples/snippet_metadata_google.cloud.translation.v3.json b/packages/google-cloud-translate/samples/generated_samples/snippet_metadata_google.cloud.translation.v3.json index 4558cd4c7300..b2041c071341 100644 --- a/packages/google-cloud-translate/samples/generated_samples/snippet_metadata_google.cloud.translation.v3.json +++ b/packages/google-cloud-translate/samples/generated_samples/snippet_metadata_google.cloud.translation.v3.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-translate", - "version": "3.16.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-translate/samples/generated_samples/snippet_metadata_google.cloud.translation.v3beta1.json b/packages/google-cloud-translate/samples/generated_samples/snippet_metadata_google.cloud.translation.v3beta1.json index 2f9b6b3678f3..42d10a5032f6 100644 --- a/packages/google-cloud-translate/samples/generated_samples/snippet_metadata_google.cloud.translation.v3beta1.json +++ b/packages/google-cloud-translate/samples/generated_samples/snippet_metadata_google.cloud.translation.v3beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-translate", - "version": "3.16.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-translate/tests/unit/gapic/translate_v3/test_translation_service.py b/packages/google-cloud-translate/tests/unit/gapic/translate_v3/test_translation_service.py index afa790ddb24f..809eb41a5f8e 100644 --- a/packages/google-cloud-translate/tests/unit/gapic/translate_v3/test_translation_service.py +++ b/packages/google-cloud-translate/tests/unit/gapic/translate_v3/test_translation_service.py @@ -1351,22 +1351,23 @@ async def test_translate_text_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.translate_text - ] = mock_object + ] = mock_rpc request = {} await client.translate_text(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.translate_text(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1760,22 +1761,23 @@ async def test_romanize_text_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.romanize_text - ] = mock_object + ] = mock_rpc request = {} await client.romanize_text(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.romanize_text(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2133,22 +2135,23 @@ async def test_detect_language_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.detect_language - ] = mock_object + ] = mock_rpc request = {} await client.detect_language(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.detect_language(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2533,22 +2536,23 @@ async def test_get_supported_languages_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_supported_languages - ] = mock_object + ] = mock_rpc request = {} await client.get_supported_languages(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_supported_languages(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2945,22 +2949,23 @@ async def test_translate_document_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.translate_document - ] = mock_object + ] = mock_rpc request = {} await client.translate_document(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.translate_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3190,8 +3195,9 @@ def test_batch_translate_text_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.batch_translate_text(request) @@ -3247,26 +3253,28 @@ async def test_batch_translate_text_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_translate_text - ] = mock_object + ] = mock_rpc request = {} await client.batch_translate_text(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.batch_translate_text(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3496,8 +3504,9 @@ def test_batch_translate_document_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.batch_translate_document(request) @@ -3553,26 +3562,28 @@ async def test_batch_translate_document_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_translate_document - ] = mock_object + ] = mock_rpc request = {} await client.batch_translate_document(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.batch_translate_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3969,8 +3980,9 @@ def test_create_glossary_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_glossary(request) @@ -4024,26 +4036,28 @@ async def test_create_glossary_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_glossary - ] = mock_object + ] = mock_rpc request = {} await client.create_glossary(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_glossary(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4340,8 +4354,9 @@ def test_update_glossary_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_glossary(request) @@ -4395,26 +4410,28 @@ async def test_update_glossary_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_glossary - ] = mock_object + ] = mock_rpc request = {} await client.update_glossary(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_glossary(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4775,22 +4792,23 @@ async def test_list_glossaries_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_glossaries - ] = mock_object + ] = mock_rpc request = {} await client.list_glossaries(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_glossaries(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5340,22 +5358,23 @@ async def test_get_glossary_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_glossary - ] = mock_object + ] = mock_rpc request = {} await client.get_glossary(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_glossary(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5652,8 +5671,9 @@ def test_delete_glossary_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_glossary(request) @@ -5707,26 +5727,28 @@ async def test_delete_glossary_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_glossary - ] = mock_object + ] = mock_rpc request = {} await client.delete_glossary(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_glossary(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6088,22 +6110,23 @@ async def test_get_glossary_entry_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_glossary_entry - ] = mock_object + ] = mock_rpc request = {} await client.get_glossary_entry(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_glossary_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6480,22 +6503,23 @@ async def test_list_glossary_entries_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_glossary_entries - ] = mock_object + ] = mock_rpc request = {} await client.list_glossary_entries(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_glossary_entries(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7073,22 +7097,23 @@ async def test_create_glossary_entry_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_glossary_entry - ] = mock_object + ] = mock_rpc request = {} await client.create_glossary_entry(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_glossary_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7472,22 +7497,23 @@ async def test_update_glossary_entry_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_glossary_entry - ] = mock_object + ] = mock_rpc request = {} await client.update_glossary_entry(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_glossary_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7855,22 +7881,23 @@ async def test_delete_glossary_entry_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_glossary_entry - ] = mock_object + ] = mock_rpc request = {} await client.delete_glossary_entry(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_glossary_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8165,8 +8192,9 @@ def test_create_dataset_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_dataset(request) @@ -8220,26 +8248,28 @@ async def test_create_dataset_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_dataset - ] = mock_object + ] = mock_rpc request = {} await client.create_dataset(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_dataset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8617,22 +8647,23 @@ async def test_get_dataset_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_dataset - ] = mock_object + ] = mock_rpc request = {} await client.get_dataset(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_dataset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8997,22 +9028,23 @@ async def test_list_datasets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_datasets - ] = mock_object + ] = mock_rpc request = {} await client.list_datasets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_datasets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9499,8 +9531,9 @@ def test_delete_dataset_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_dataset(request) @@ -9554,26 +9587,28 @@ async def test_delete_dataset_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_dataset - ] = mock_object + ] = mock_rpc request = {} await client.delete_dataset(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_dataset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9945,22 +9980,23 @@ async def test_create_adaptive_mt_dataset_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_adaptive_mt_dataset - ] = mock_object + ] = mock_rpc request = {} await client.create_adaptive_mt_dataset(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_adaptive_mt_dataset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10344,22 +10380,23 @@ async def test_delete_adaptive_mt_dataset_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_adaptive_mt_dataset - ] = mock_object + ] = mock_rpc request = {} await client.delete_adaptive_mt_dataset(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_adaptive_mt_dataset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10735,22 +10772,23 @@ async def test_get_adaptive_mt_dataset_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_adaptive_mt_dataset - ] = mock_object + ] = mock_rpc request = {} await client.get_adaptive_mt_dataset(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_adaptive_mt_dataset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11135,22 +11173,23 @@ async def test_list_adaptive_mt_datasets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_adaptive_mt_datasets - ] = mock_object + ] = mock_rpc request = {} await client.list_adaptive_mt_datasets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_adaptive_mt_datasets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11729,22 +11768,23 @@ async def test_adaptive_mt_translate_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.adaptive_mt_translate - ] = mock_object + ] = mock_rpc request = {} await client.adaptive_mt_translate(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.adaptive_mt_translate(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12131,22 +12171,23 @@ async def test_get_adaptive_mt_file_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_adaptive_mt_file - ] = mock_object + ] = mock_rpc request = {} await client.get_adaptive_mt_file(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_adaptive_mt_file(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12515,22 +12556,23 @@ async def test_delete_adaptive_mt_file_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_adaptive_mt_file - ] = mock_object + ] = mock_rpc request = {} await client.delete_adaptive_mt_file(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_adaptive_mt_file(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12889,22 +12931,23 @@ async def test_import_adaptive_mt_file_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_adaptive_mt_file - ] = mock_object + ] = mock_rpc request = {} await client.import_adaptive_mt_file(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.import_adaptive_mt_file(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13276,22 +13319,23 @@ async def test_list_adaptive_mt_files_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_adaptive_mt_files - ] = mock_object + ] = mock_rpc request = {} await client.list_adaptive_mt_files(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_adaptive_mt_files(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13867,22 +13911,23 @@ async def test_list_adaptive_mt_sentences_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_adaptive_mt_sentences - ] = mock_object + ] = mock_rpc request = {} await client.list_adaptive_mt_sentences(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_adaptive_mt_sentences(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14390,8 +14435,9 @@ def test_import_data_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.import_data(request) @@ -14445,26 +14491,28 @@ async def test_import_data_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_data - ] = mock_object + ] = mock_rpc request = {} await client.import_data(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.import_data(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14788,8 +14836,9 @@ def test_export_data_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.export_data(request) @@ -14843,26 +14892,28 @@ async def test_export_data_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.export_data - ] = mock_object + ] = mock_rpc request = {} await client.export_data(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.export_data(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15246,22 +15297,23 @@ async def test_list_examples_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_examples - ] = mock_object + ] = mock_rpc request = {} await client.list_examples(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_examples(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15748,8 +15800,9 @@ def test_create_model_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_model(request) @@ -15803,26 +15856,28 @@ async def test_create_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_model - ] = mock_object + ] = mock_rpc request = {} await client.create_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -16182,22 +16237,23 @@ async def test_list_models_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_models - ] = mock_object + ] = mock_rpc request = {} await client.list_models(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_models(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -16759,22 +16815,23 @@ async def test_get_model_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_model - ] = mock_object + ] = mock_rpc request = {} await client.get_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -17081,8 +17138,9 @@ def test_delete_model_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_model(request) @@ -17136,26 +17194,28 @@ async def test_delete_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_model - ] = mock_object + ] = mock_rpc request = {} await client.delete_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-translate/tests/unit/gapic/translate_v3beta1/test_translation_service.py b/packages/google-cloud-translate/tests/unit/gapic/translate_v3beta1/test_translation_service.py index 7db65779971e..7dd272d2e885 100644 --- a/packages/google-cloud-translate/tests/unit/gapic/translate_v3beta1/test_translation_service.py +++ b/packages/google-cloud-translate/tests/unit/gapic/translate_v3beta1/test_translation_service.py @@ -1340,22 +1340,23 @@ async def test_translate_text_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.translate_text - ] = mock_object + ] = mock_rpc request = {} await client.translate_text(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.translate_text(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1621,22 +1622,23 @@ async def test_detect_language_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.detect_language - ] = mock_object + ] = mock_rpc request = {} await client.detect_language(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.detect_language(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2015,22 +2017,23 @@ async def test_get_supported_languages_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_supported_languages - ] = mock_object + ] = mock_rpc request = {} await client.get_supported_languages(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_supported_languages(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2427,22 +2430,23 @@ async def test_translate_document_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.translate_document - ] = mock_object + ] = mock_rpc request = {} await client.translate_document(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.translate_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2672,8 +2676,9 @@ def test_batch_translate_text_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.batch_translate_text(request) @@ -2729,26 +2734,28 @@ async def test_batch_translate_text_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_translate_text - ] = mock_object + ] = mock_rpc request = {} await client.batch_translate_text(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.batch_translate_text(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2978,8 +2985,9 @@ def test_batch_translate_document_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.batch_translate_document(request) @@ -3035,26 +3043,28 @@ async def test_batch_translate_document_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_translate_document - ] = mock_object + ] = mock_rpc request = {} await client.batch_translate_document(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.batch_translate_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3451,8 +3461,9 @@ def test_create_glossary_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_glossary(request) @@ -3506,26 +3517,28 @@ async def test_create_glossary_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_glossary - ] = mock_object + ] = mock_rpc request = {} await client.create_glossary(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_glossary(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3886,22 +3899,23 @@ async def test_list_glossaries_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_glossaries - ] = mock_object + ] = mock_rpc request = {} await client.list_glossaries(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_glossaries(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4458,22 +4472,23 @@ async def test_get_glossary_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_glossary - ] = mock_object + ] = mock_rpc request = {} await client.get_glossary(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_glossary(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4768,8 +4783,9 @@ def test_delete_glossary_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_glossary(request) @@ -4823,26 +4839,28 @@ async def test_delete_glossary_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_glossary - ] = mock_object + ] = mock_rpc request = {} await client.delete_glossary(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_glossary(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio From ae539fb44b81de96aeb32fa823afb49016ab0932 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 19 Aug 2024 09:35:22 -0400 Subject: [PATCH 003/108] feat: [google-cloud-dialogflow-cx] A new field `satisfies_pzs` is added to message `.google.cloud.aiplatform.v1.BatchPredictionJob` (#13005) - [ ] Regenerate this pull request now. BEGIN_COMMIT_OVERRIDE feat: A new field `satisfies_pzi` is added to message `.google.cloud.aiplatform.v1.BatchPredictionJob` feat: A new message `RoutingConfig` is added feat: A new field `seed` is added to message `.google.cloud.aiplatform.v1.GenerationConfig` feat: A new field `routing_config` is added to message `.google.cloud.aiplatform.v1.GenerationConfig` feat: A new field `avg_logprobs` is added to message `.google.cloud.aiplatform.v1.Candidate` feat: A new field `satisfies_pzs` is added to message `.google.cloud.aiplatform.v1.CustomJob` feat: A new field `satisfies_pzi` is added to message `.google.cloud.aiplatform.v1.CustomJob` feat: A new field `satisfies_pzs` is added to message `.google.cloud.aiplatform.v1.DataItem` feat: A new field `satisfies_pzi` is added to message `.google.cloud.aiplatform.v1.DataItem` feat: A new field `satisfies_pzs` is added to message `.google.cloud.aiplatform.v1.Dataset` feat: A new field `satisfies_pzi` is added to message `.google.cloud.aiplatform.v1.Dataset` feat: A new field `satisfies_pzs` is added to message `.google.cloud.aiplatform.v1.DatasetVersion` feat: A new field `satisfies_pzi` is added to message `.google.cloud.aiplatform.v1.DatasetVersion` feat: A new field `satisfies_pzs` is added to message `.google.cloud.aiplatform.v1.DeploymentResourcePool` feat: A new field `satisfies_pzi` is added to message `.google.cloud.aiplatform.v1.DeploymentResourcePool` feat: A new field `satisfies_pzs` is added to message `.google.cloud.aiplatform.v1.EntityType` feat: A new field `satisfies_pzi` is added to message `.google.cloud.aiplatform.v1.EntityType` feat: A new message `TimeSeries` is added feat: A new field `time_series` is added to message `.google.cloud.aiplatform.v1.FeatureGroup` feat: A new field `satisfies_pzs` is added to message `.google.cloud.aiplatform.v1.FeatureOnlineStore` feat: A new field `satisfies_pzi` is added to message `.google.cloud.aiplatform.v1.FeatureOnlineStore` feat: A new field `satisfies_pzs` is added to message `.google.cloud.aiplatform.v1.FeatureView` feat: A new field `satisfies_pzi` is added to message `.google.cloud.aiplatform.v1.FeatureView` feat: A new field `satisfies_pzs` is added to message `.google.cloud.aiplatform.v1.FeatureViewSync` feat: A new field `satisfies_pzi` is added to message `.google.cloud.aiplatform.v1.FeatureViewSync` feat: A new field `satisfies_pzs` is added to message `.google.cloud.aiplatform.v1.Featurestore` feat: A new field `satisfies_pzi` is added to message `.google.cloud.aiplatform.v1.Featurestore` feat: A new field `satisfies_pzs` is added to message `.google.cloud.aiplatform.v1.HyperparameterTuningJob` feat: A new field `satisfies_pzi` is added to message `.google.cloud.aiplatform.v1.HyperparameterTuningJob` feat: A new field `satisfies_pzs` is added to message `.google.cloud.aiplatform.v1.Index` feat: A new field `satisfies_pzi` is added to message `.google.cloud.aiplatform.v1.Index` feat: A new field `satisfies_pzs` is added to message `.google.cloud.aiplatform.v1.IndexEndpoint` feat: A new field `satisfies_pzi` is added to message `.google.cloud.aiplatform.v1.IndexEndpoint` feat: A new field `satisfies_pzs` is added to message `.google.cloud.aiplatform.v1.ModelDeploymentMonitoringJob` feat: A new field `satisfies_pzi` is added to message `.google.cloud.aiplatform.v1.ModelDeploymentMonitoringJob` feat: A new field `hugging_face_token` is added to message `.google.cloud.aiplatform.v1.GetPublisherModelRequest` feat: A new field `satisfies_pzs` is added to message `.google.cloud.aiplatform.v1.NasJob` feat: A new field `satisfies_pzi` is added to message `.google.cloud.aiplatform.v1.NasJob` feat: A new field `encryption_spec` is added to message `.google.cloud.aiplatform.v1.NotebookExecutionJob` feat: A new field `sample_request` is added to message `.google.cloud.aiplatform.v1.PublisherModel` feat: A new resource_definition `compute.googleapis.com/NetworkAttachment` is added feat: A new field `service_attachment` is added to message `.google.cloud.aiplatform.v1.PrivateServiceConnectConfig` feat: A new field `total_truncated_example_count` is added to message `.google.cloud.aiplatform.v1.SupervisedTuningDataStats` feat: A new field `truncated_example_indices` is added to message `.google.cloud.aiplatform.v1.SupervisedTuningDataStats` docs: A comment for enum value `STOP` in enum `FinishReason` is changed docs: A comment for enum value `MAX_TOKENS` in enum `FinishReason` is changed docs: A comment for enum value `SAFETY` in enum `FinishReason` is changed docs: A comment for enum value `RECITATION` in enum `FinishReason` is changed docs: A comment for enum value `OTHER` in enum `FinishReason` is changed docs: A comment for enum value `BLOCKLIST` in enum `FinishReason` is changed docs: A comment for enum value `PROHIBITED_CONTENT` in enum `FinishReason` is changed docs: A comment for enum value `SPII` in enum `FinishReason` is changed docs: A comment for enum `Strategy` is changed docs: A comment for enum value `STRATEGY_UNSPECIFIED` in enum `Strategy` is changed docs: A comment for field `model` in message `.google.cloud.aiplatform.v1.GenerateContentRequest` is changed docs: A comment for enum value `AUTO` in enum `Mode` is changed END_COMMIT_OVERRIDE PiperOrigin-RevId: 661337164 Source-Link: https://github.com/googleapis/googleapis/commit/a5c87fe0b6f0c3ff00dfab77326b5ff728112046 Source-Link: https://github.com/googleapis/googleapis-gen/commit/e59dd786c12cd1ea13ed5e1b6b06ece8874d5f3b Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWRpYWxvZ2Zsb3ctY3gvLk93bEJvdC55YW1sIiwiaCI6ImU1OWRkNzg2YzEyY2QxZWExM2VkNWUxYjZiMDZlY2U4ODc0ZDVmM2IifQ== --------- Co-authored-by: Owl Bot Co-authored-by: Victor Chudnovsky --- .../cloud/dialogflowcx/gapic_version.py | 2 +- .../cloud/dialogflowcx_v3/gapic_version.py | 2 +- .../services/agents/async_client.py | 5 +- .../dialogflowcx_v3/services/agents/client.py | 2 +- .../services/changelogs/async_client.py | 5 +- .../services/changelogs/client.py | 2 +- .../services/deployments/async_client.py | 5 +- .../services/deployments/client.py | 2 +- .../services/entity_types/async_client.py | 5 +- .../services/entity_types/client.py | 2 +- .../services/environments/async_client.py | 5 +- .../services/environments/client.py | 2 +- .../services/experiments/async_client.py | 5 +- .../services/experiments/client.py | 2 +- .../services/flows/async_client.py | 5 +- .../dialogflowcx_v3/services/flows/client.py | 2 +- .../services/generators/async_client.py | 5 +- .../services/generators/client.py | 2 +- .../services/intents/async_client.py | 5 +- .../services/intents/client.py | 2 +- .../services/pages/async_client.py | 5 +- .../dialogflowcx_v3/services/pages/client.py | 2 +- .../security_settings_service/async_client.py | 6 +- .../security_settings_service/client.py | 2 +- .../session_entity_types/async_client.py | 6 +- .../services/session_entity_types/client.py | 2 +- .../services/sessions/async_client.py | 5 +- .../services/sessions/client.py | 2 +- .../services/test_cases/async_client.py | 5 +- .../services/test_cases/client.py | 2 +- .../transition_route_groups/async_client.py | 6 +- .../transition_route_groups/client.py | 2 +- .../services/versions/async_client.py | 5 +- .../services/versions/client.py | 2 +- .../services/webhooks/async_client.py | 5 +- .../services/webhooks/client.py | 2 +- .../dialogflowcx_v3beta1/gapic_version.py | 2 +- .../cloud/dialogflowcx_v3beta1/types/tool.py | 17 +- ...etadata_google.cloud.dialogflow.cx.v3.json | 2 +- ...ta_google.cloud.dialogflow.cx.v3beta1.json | 2 +- .../unit/gapic/dialogflowcx_v3/test_agents.py | 119 ++++++++------ .../gapic/dialogflowcx_v3/test_changelogs.py | 18 ++- .../gapic/dialogflowcx_v3/test_deployments.py | 18 ++- .../dialogflowcx_v3/test_entity_types.py | 83 +++++----- .../dialogflowcx_v3/test_environments.py | 121 ++++++++------ .../gapic/dialogflowcx_v3/test_experiments.py | 63 ++++---- .../unit/gapic/dialogflowcx_v3/test_flows.py | 120 ++++++++------ .../gapic/dialogflowcx_v3/test_generators.py | 45 +++--- .../gapic/dialogflowcx_v3/test_intents.py | 83 +++++----- .../unit/gapic/dialogflowcx_v3/test_pages.py | 45 +++--- .../test_security_settings_service.py | 45 +++--- .../test_session_entity_types.py | 45 +++--- .../gapic/dialogflowcx_v3/test_sessions.py | 54 ++++--- .../gapic/dialogflowcx_v3/test_test_cases.py | 148 ++++++++++-------- .../test_transition_route_groups.py | 45 +++--- .../gapic/dialogflowcx_v3/test_versions.py | 83 +++++----- .../gapic/dialogflowcx_v3/test_webhooks.py | 45 +++--- 57 files changed, 709 insertions(+), 620 deletions(-) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx/gapic_version.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx/gapic_version.py index ed47e29bfcec..558c8aab67c5 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx/gapic_version.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.35.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/gapic_version.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/gapic_version.py index ed47e29bfcec..558c8aab67c5 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/gapic_version.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.35.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/agents/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/agents/async_client.py index 646e81cef14c..d418c72e27a8 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/agents/async_client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/agents/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -217,9 +216,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AgentsClient).get_transport_class, type(AgentsClient) - ) + get_transport_class = AgentsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/agents/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/agents/client.py index 26950a72cab5..ee7755cbc5e4 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/agents/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/agents/client.py @@ -809,7 +809,7 @@ def __init__( transport_init: Union[ Type[AgentsTransport], Callable[..., AgentsTransport] ] = ( - type(self).get_transport_class(transport) + AgentsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AgentsTransport], transport) ) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/changelogs/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/changelogs/async_client.py index 5d6b6cb43f6a..1845856dbedc 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/changelogs/async_client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/changelogs/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -187,9 +186,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ChangelogsClient).get_transport_class, type(ChangelogsClient) - ) + get_transport_class = ChangelogsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/changelogs/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/changelogs/client.py index e7a87a9dc128..9131899f0763 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/changelogs/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/changelogs/client.py @@ -663,7 +663,7 @@ def __init__( transport_init: Union[ Type[ChangelogsTransport], Callable[..., ChangelogsTransport] ] = ( - type(self).get_transport_class(transport) + ChangelogsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ChangelogsTransport], transport) ) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/deployments/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/deployments/async_client.py index 28b3f4565c02..3d8b32468186 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/deployments/async_client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/deployments/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -197,9 +196,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DeploymentsClient).get_transport_class, type(DeploymentsClient) - ) + get_transport_class = DeploymentsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/deployments/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/deployments/client.py index de3bbf4c104f..377a00aa1f16 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/deployments/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/deployments/client.py @@ -743,7 +743,7 @@ def __init__( transport_init: Union[ Type[DeploymentsTransport], Callable[..., DeploymentsTransport] ] = ( - type(self).get_transport_class(transport) + DeploymentsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DeploymentsTransport], transport) ) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/entity_types/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/entity_types/async_client.py index 978b78be2307..ad3d87bc8c15 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/entity_types/async_client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/entity_types/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -192,9 +191,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(EntityTypesClient).get_transport_class, type(EntityTypesClient) - ) + get_transport_class = EntityTypesClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/entity_types/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/entity_types/client.py index f2f260f15254..ee52f34597b1 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/entity_types/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/entity_types/client.py @@ -666,7 +666,7 @@ def __init__( transport_init: Union[ Type[EntityTypesTransport], Callable[..., EntityTypesTransport] ] = ( - type(self).get_transport_class(transport) + EntityTypesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., EntityTypesTransport], transport) ) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/environments/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/environments/async_client.py index 20627436e83c..dbeb27d1f3ea 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/environments/async_client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/environments/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -212,9 +211,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(EnvironmentsClient).get_transport_class, type(EnvironmentsClient) - ) + get_transport_class = EnvironmentsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/environments/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/environments/client.py index 9e2f16b22c88..627b1550dd69 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/environments/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/environments/client.py @@ -818,7 +818,7 @@ def __init__( transport_init: Union[ Type[EnvironmentsTransport], Callable[..., EnvironmentsTransport] ] = ( - type(self).get_transport_class(transport) + EnvironmentsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., EnvironmentsTransport], transport) ) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/experiments/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/experiments/async_client.py index 55f9ff9e2aba..2c7b0531132c 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/experiments/async_client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/experiments/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -194,9 +193,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ExperimentsClient).get_transport_class, type(ExperimentsClient) - ) + get_transport_class = ExperimentsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/experiments/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/experiments/client.py index 2bcb0a2834be..ab123d3d2577 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/experiments/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/experiments/client.py @@ -694,7 +694,7 @@ def __init__( transport_init: Union[ Type[ExperimentsTransport], Callable[..., ExperimentsTransport] ] = ( - type(self).get_transport_class(transport) + ExperimentsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ExperimentsTransport], transport) ) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/flows/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/flows/async_client.py index e1d493f9c433..d577674add12 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/flows/async_client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/flows/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -203,9 +202,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(FlowsClient).get_transport_class, type(FlowsClient) - ) + get_transport_class = FlowsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/flows/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/flows/client.py index 0039ad75bb96..a9d4179addc9 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/flows/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/flows/client.py @@ -793,7 +793,7 @@ def __init__( transport_init: Union[ Type[FlowsTransport], Callable[..., FlowsTransport] ] = ( - type(self).get_transport_class(transport) + FlowsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., FlowsTransport], transport) ) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/generators/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/generators/async_client.py index fe8a36174bfb..fa5c7d375b52 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/generators/async_client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/generators/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -188,9 +187,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(GeneratorsClient).get_transport_class, type(GeneratorsClient) - ) + get_transport_class = GeneratorsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/generators/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/generators/client.py index c736a9c1c3a3..6628598ed330 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/generators/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/generators/client.py @@ -664,7 +664,7 @@ def __init__( transport_init: Union[ Type[GeneratorsTransport], Callable[..., GeneratorsTransport] ] = ( - type(self).get_transport_class(transport) + GeneratorsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., GeneratorsTransport], transport) ) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/async_client.py index 8a42b95d4097..651f59caa0db 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/async_client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -190,9 +189,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(IntentsClient).get_transport_class, type(IntentsClient) - ) + get_transport_class = IntentsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/client.py index 3cd48e97b07d..ed919379403b 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/client.py @@ -690,7 +690,7 @@ def __init__( transport_init: Union[ Type[IntentsTransport], Callable[..., IntentsTransport] ] = ( - type(self).get_transport_class(transport) + IntentsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., IntentsTransport], transport) ) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/pages/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/pages/async_client.py index 1f4b99ef98f2..55f6df6cae8c 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/pages/async_client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/pages/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -195,9 +194,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(PagesClient).get_transport_class, type(PagesClient) - ) + get_transport_class = PagesClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/pages/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/pages/client.py index 15651b9025fc..c937c1aaf18b 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/pages/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/pages/client.py @@ -787,7 +787,7 @@ def __init__( transport_init: Union[ Type[PagesTransport], Callable[..., PagesTransport] ] = ( - type(self).get_transport_class(transport) + PagesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., PagesTransport], transport) ) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/security_settings_service/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/security_settings_service/async_client.py index cf804aa80751..6a07ef8eaee0 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/security_settings_service/async_client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/security_settings_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -216,10 +215,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(SecuritySettingsServiceClient).get_transport_class, - type(SecuritySettingsServiceClient), - ) + get_transport_class = SecuritySettingsServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/security_settings_service/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/security_settings_service/client.py index cff938935e48..b8798ee4c5f5 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/security_settings_service/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/security_settings_service/client.py @@ -720,7 +720,7 @@ def __init__( Type[SecuritySettingsServiceTransport], Callable[..., SecuritySettingsServiceTransport], ] = ( - type(self).get_transport_class(transport) + SecuritySettingsServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SecuritySettingsServiceTransport], transport) ) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/session_entity_types/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/session_entity_types/async_client.py index 8182cbdc393b..629553f8b32c 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/session_entity_types/async_client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/session_entity_types/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -201,10 +200,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(SessionEntityTypesClient).get_transport_class, - type(SessionEntityTypesClient), - ) + get_transport_class = SessionEntityTypesClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/session_entity_types/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/session_entity_types/client.py index 91c34591e546..9eb7e66a9a34 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/session_entity_types/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/session_entity_types/client.py @@ -679,7 +679,7 @@ def __init__( Type[SessionEntityTypesTransport], Callable[..., SessionEntityTypesTransport], ] = ( - type(self).get_transport_class(transport) + SessionEntityTypesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SessionEntityTypesTransport], transport) ) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/sessions/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/sessions/async_client.py index ac9083dd159f..d7e73e53d89f 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/sessions/async_client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/sessions/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( AsyncIterable, @@ -212,9 +211,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(SessionsClient).get_transport_class, type(SessionsClient) - ) + get_transport_class = SessionsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/sessions/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/sessions/client.py index d0fcab1d2a96..bd0e647b2776 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/sessions/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/sessions/client.py @@ -887,7 +887,7 @@ def __init__( transport_init: Union[ Type[SessionsTransport], Callable[..., SessionsTransport] ] = ( - type(self).get_transport_class(transport) + SessionsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SessionsTransport], transport) ) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/test_cases/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/test_cases/async_client.py index 3ed4bbfe0d5a..c17d5d93010a 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/test_cases/async_client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/test_cases/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -216,9 +215,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(TestCasesClient).get_transport_class, type(TestCasesClient) - ) + get_transport_class = TestCasesClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/test_cases/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/test_cases/client.py index 1ba2e785fa73..890ba1cb1258 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/test_cases/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/test_cases/client.py @@ -888,7 +888,7 @@ def __init__( transport_init: Union[ Type[TestCasesTransport], Callable[..., TestCasesTransport] ] = ( - type(self).get_transport_class(transport) + TestCasesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., TestCasesTransport], transport) ) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/transition_route_groups/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/transition_route_groups/async_client.py index 74c87b263245..32a9f5867ae5 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/transition_route_groups/async_client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/transition_route_groups/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -211,10 +210,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(TransitionRouteGroupsClient).get_transport_class, - type(TransitionRouteGroupsClient), - ) + get_transport_class = TransitionRouteGroupsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/transition_route_groups/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/transition_route_groups/client.py index 595b5a8976f0..7e035c82dabc 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/transition_route_groups/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/transition_route_groups/client.py @@ -779,7 +779,7 @@ def __init__( Type[TransitionRouteGroupsTransport], Callable[..., TransitionRouteGroupsTransport], ] = ( - type(self).get_transport_class(transport) + TransitionRouteGroupsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., TransitionRouteGroupsTransport], transport) ) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/versions/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/versions/async_client.py index 42ec82ce0340..4e55612528a9 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/versions/async_client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/versions/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -192,9 +191,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(VersionsClient).get_transport_class, type(VersionsClient) - ) + get_transport_class = VersionsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/versions/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/versions/client.py index bdf2c9bbe88e..4ebdc5b74698 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/versions/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/versions/client.py @@ -672,7 +672,7 @@ def __init__( transport_init: Union[ Type[VersionsTransport], Callable[..., VersionsTransport] ] = ( - type(self).get_transport_class(transport) + VersionsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., VersionsTransport], transport) ) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/webhooks/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/webhooks/async_client.py index 9755dd5fd338..0098e1a7841e 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/webhooks/async_client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/webhooks/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -189,9 +188,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(WebhooksClient).get_transport_class, type(WebhooksClient) - ) + get_transport_class = WebhooksClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/webhooks/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/webhooks/client.py index 850bd6db63e2..6f05f8fbf4a6 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/webhooks/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/webhooks/client.py @@ -689,7 +689,7 @@ def __init__( transport_init: Union[ Type[WebhooksTransport], Callable[..., WebhooksTransport] ] = ( - type(self).get_transport_class(transport) + WebhooksClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., WebhooksTransport], transport) ) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/gapic_version.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/gapic_version.py index ed47e29bfcec..558c8aab67c5 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/gapic_version.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.35.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/tool.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/tool.py index 2adbf8c97e31..65a3df4ad4d9 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/tool.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/tool.py @@ -660,17 +660,12 @@ class CACert(proto.Message): certificates. N.B. Make sure the HTTPS server certificates are signed with "subject alt name". For instance a certificate can be self-signed - using the following command: - - :: - - openssl x509 - -req -days 200 -in example.com.csr \ - -signkey example.com.key \ - -out example.com.crt \ - -extfile <(printf - "\nsubjectAltName='DNS:www.example.com'") - + using the following command, openssl x509 + -req -days 200 -in example.com.csr \ + -signkey example.com.key \ + -out example.com.crt \ + -extfile <(printf + "\nsubjectAltName='DNS:www.example.com'") """ display_name: str = proto.Field( diff --git a/packages/google-cloud-dialogflow-cx/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.cx.v3.json b/packages/google-cloud-dialogflow-cx/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.cx.v3.json index 0f9ba15f5cf6..1ea6a76db3ff 100644 --- a/packages/google-cloud-dialogflow-cx/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.cx.v3.json +++ b/packages/google-cloud-dialogflow-cx/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.cx.v3.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dialogflow-cx", - "version": "1.35.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-dialogflow-cx/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.cx.v3beta1.json b/packages/google-cloud-dialogflow-cx/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.cx.v3beta1.json index 7f3672bb92e4..0fdb4114a2f4 100644 --- a/packages/google-cloud-dialogflow-cx/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.cx.v3beta1.json +++ b/packages/google-cloud-dialogflow-cx/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.cx.v3beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dialogflow-cx", - "version": "1.35.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_agents.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_agents.py index f3a358ac3535..c1fd0dd5c6de 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_agents.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_agents.py @@ -1241,22 +1241,23 @@ async def test_list_agents_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_agents - ] = mock_object + ] = mock_rpc request = {} await client.list_agents(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_agents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1833,22 +1834,23 @@ async def test_get_agent_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_agent - ] = mock_object + ] = mock_rpc request = {} await client.get_agent(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_agent(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2253,22 +2255,23 @@ async def test_create_agent_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_agent - ] = mock_object + ] = mock_rpc request = {} await client.create_agent(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_agent(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2679,22 +2682,23 @@ async def test_update_agent_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_agent - ] = mock_object + ] = mock_rpc request = {} await client.update_agent(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_agent(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3066,22 +3070,23 @@ async def test_delete_agent_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_agent - ] = mock_object + ] = mock_rpc request = {} await client.delete_agent(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_agent(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3369,8 +3374,9 @@ def test_export_agent_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.export_agent(request) @@ -3424,26 +3430,28 @@ async def test_export_agent_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.export_agent - ] = mock_object + ] = mock_rpc request = {} await client.export_agent(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.export_agent(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3653,8 +3661,9 @@ def test_restore_agent_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.restore_agent(request) @@ -3708,26 +3717,28 @@ async def test_restore_agent_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.restore_agent - ] = mock_object + ] = mock_rpc request = {} await client.restore_agent(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.restore_agent(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3993,22 +4004,23 @@ async def test_validate_agent_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.validate_agent - ] = mock_object + ] = mock_rpc request = {} await client.validate_agent(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.validate_agent(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4290,22 +4302,23 @@ async def test_get_agent_validation_result_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_agent_validation_result - ] = mock_object + ] = mock_rpc request = {} await client.get_agent_validation_result(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_agent_validation_result(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4682,22 +4695,23 @@ async def test_get_generative_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_generative_settings - ] = mock_object + ] = mock_rpc request = {} await client.get_generative_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_generative_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5080,22 +5094,23 @@ async def test_update_generative_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_generative_settings - ] = mock_object + ] = mock_rpc request = {} await client.update_generative_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_generative_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_changelogs.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_changelogs.py index 703d4b606ddf..b8bd1d2f1c54 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_changelogs.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_changelogs.py @@ -1250,22 +1250,23 @@ async def test_list_changelogs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_changelogs - ] = mock_object + ] = mock_rpc request = {} await client.list_changelogs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_changelogs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1826,22 +1827,23 @@ async def test_get_changelog_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_changelog - ] = mock_object + ] = mock_rpc request = {} await client.get_changelog(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_changelog(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_deployments.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_deployments.py index aef4e1dbdb91..e2dd4905bc86 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_deployments.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_deployments.py @@ -1254,22 +1254,23 @@ async def test_list_deployments_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_deployments - ] = mock_object + ] = mock_rpc request = {} await client.list_deployments(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_deployments(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1818,22 +1819,23 @@ async def test_get_deployment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_deployment - ] = mock_object + ] = mock_rpc request = {} await client.get_deployment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_deployment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_entity_types.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_entity_types.py index dd06d87835a8..a818583d5c57 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_entity_types.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_entity_types.py @@ -1281,22 +1281,23 @@ async def test_get_entity_type_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_entity_type - ] = mock_object + ] = mock_rpc request = {} await client.get_entity_type(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_entity_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1690,22 +1691,23 @@ async def test_create_entity_type_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_entity_type - ] = mock_object + ] = mock_rpc request = {} await client.create_entity_type(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_entity_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2118,22 +2120,23 @@ async def test_update_entity_type_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_entity_type - ] = mock_object + ] = mock_rpc request = {} await client.update_entity_type(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_entity_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2521,22 +2524,23 @@ async def test_delete_entity_type_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_entity_type - ] = mock_object + ] = mock_rpc request = {} await client.delete_entity_type(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_entity_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2900,22 +2904,23 @@ async def test_list_entity_types_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_entity_types - ] = mock_object + ] = mock_rpc request = {} await client.list_entity_types(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_entity_types(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3434,8 +3439,9 @@ def test_export_entity_types_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.export_entity_types(request) @@ -3491,26 +3497,28 @@ async def test_export_entity_types_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.export_entity_types - ] = mock_object + ] = mock_rpc request = {} await client.export_entity_types(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.export_entity_types(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3738,8 +3746,9 @@ def test_import_entity_types_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.import_entity_types(request) @@ -3795,26 +3804,28 @@ async def test_import_entity_types_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_entity_types - ] = mock_object + ] = mock_rpc request = {} await client.import_entity_types(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.import_entity_types(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_environments.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_environments.py index d7aedf0d1b11..d0ca52497e2f 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_environments.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_environments.py @@ -1290,22 +1290,23 @@ async def test_list_environments_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_environments - ] = mock_object + ] = mock_rpc request = {} await client.list_environments(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_environments(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1872,22 +1873,23 @@ async def test_get_environment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_environment - ] = mock_object + ] = mock_rpc request = {} await client.get_environment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_environment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2194,8 +2196,9 @@ def test_create_environment_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_environment(request) @@ -2251,26 +2254,28 @@ async def test_create_environment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_environment - ] = mock_object + ] = mock_rpc request = {} await client.create_environment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_environment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2587,8 +2592,9 @@ def test_update_environment_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_environment(request) @@ -2644,26 +2650,28 @@ async def test_update_environment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_environment - ] = mock_object + ] = mock_rpc request = {} await client.update_environment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_environment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3035,22 +3043,23 @@ async def test_delete_environment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_environment - ] = mock_object + ] = mock_rpc request = {} await client.delete_environment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_environment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3415,22 +3424,23 @@ async def test_lookup_environment_history_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.lookup_environment_history - ] = mock_object + ] = mock_rpc request = {} await client.lookup_environment_history(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.lookup_environment_history(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3948,8 +3958,9 @@ def test_run_continuous_test_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.run_continuous_test(request) @@ -4005,26 +4016,28 @@ async def test_run_continuous_test_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.run_continuous_test - ] = mock_object + ] = mock_rpc request = {} await client.run_continuous_test(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.run_continuous_test(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4309,22 +4322,23 @@ async def test_list_continuous_test_results_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_continuous_test_results - ] = mock_object + ] = mock_rpc request = {} await client.list_continuous_test_results(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_continuous_test_results(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4834,8 +4848,9 @@ def test_deploy_flow_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.deploy_flow(request) @@ -4889,26 +4904,28 @@ async def test_deploy_flow_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.deploy_flow - ] = mock_object + ] = mock_rpc request = {} await client.deploy_flow(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.deploy_flow(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_experiments.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_experiments.py index bf999cd76ad2..2f046b0f1d34 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_experiments.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_experiments.py @@ -1257,22 +1257,23 @@ async def test_list_experiments_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_experiments - ] = mock_object + ] = mock_rpc request = {} await client.list_experiments(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_experiments(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1827,22 +1828,23 @@ async def test_get_experiment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_experiment - ] = mock_object + ] = mock_rpc request = {} await client.get_experiment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_experiment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2221,22 +2223,23 @@ async def test_create_experiment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_experiment - ] = mock_object + ] = mock_rpc request = {} await client.create_experiment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_experiment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2632,22 +2635,23 @@ async def test_update_experiment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_experiment - ] = mock_object + ] = mock_rpc request = {} await client.update_experiment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_experiment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3028,22 +3032,23 @@ async def test_delete_experiment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_experiment - ] = mock_object + ] = mock_rpc request = {} await client.delete_experiment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_experiment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3407,22 +3412,23 @@ async def test_start_experiment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.start_experiment - ] = mock_object + ] = mock_rpc request = {} await client.start_experiment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.start_experiment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3791,22 +3797,23 @@ async def test_stop_experiment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.stop_experiment - ] = mock_object + ] = mock_rpc request = {} await client.stop_experiment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.stop_experiment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_flows.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_flows.py index 27d07c7468fa..67a434252491 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_flows.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_flows.py @@ -1252,22 +1252,23 @@ async def test_create_flow_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_flow - ] = mock_object + ] = mock_rpc request = {} await client.create_flow(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_flow(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1623,22 +1624,23 @@ async def test_delete_flow_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_flow - ] = mock_object + ] = mock_rpc request = {} await client.delete_flow(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_flow(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1980,22 +1982,23 @@ async def test_list_flows_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_flows - ] = mock_object + ] = mock_rpc request = {} await client.list_flows(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_flows(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2550,22 +2553,23 @@ async def test_get_flow_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_flow - ] = mock_object + ] = mock_rpc request = {} await client.get_flow(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_flow(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2930,22 +2934,23 @@ async def test_update_flow_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_flow - ] = mock_object + ] = mock_rpc request = {} await client.update_flow(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_flow(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3252,8 +3257,9 @@ def test_train_flow_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.train_flow(request) @@ -3305,26 +3311,28 @@ async def test_train_flow_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.train_flow - ] = mock_object + ] = mock_rpc request = {} await client.train_flow(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.train_flow(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3672,22 +3680,23 @@ async def test_validate_flow_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.validate_flow - ] = mock_object + ] = mock_rpc request = {} await client.validate_flow(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.validate_flow(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3969,22 +3978,23 @@ async def test_get_flow_validation_result_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_flow_validation_result - ] = mock_object + ] = mock_rpc request = {} await client.get_flow_validation_result(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_flow_validation_result(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4289,8 +4299,9 @@ def test_import_flow_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.import_flow(request) @@ -4344,26 +4355,28 @@ async def test_import_flow_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_flow - ] = mock_object + ] = mock_rpc request = {} await client.import_flow(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.import_flow(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4573,8 +4586,9 @@ def test_export_flow_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.export_flow(request) @@ -4628,26 +4642,28 @@ async def test_export_flow_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.export_flow - ] = mock_object + ] = mock_rpc request = {} await client.export_flow(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.export_flow(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_generators.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_generators.py index 2dadaf10ade6..3509dcdf879a 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_generators.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_generators.py @@ -1251,22 +1251,23 @@ async def test_list_generators_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_generators - ] = mock_object + ] = mock_rpc request = {} await client.list_generators(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_generators(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1814,22 +1815,23 @@ async def test_get_generator_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_generator - ] = mock_object + ] = mock_rpc request = {} await client.get_generator(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_generator(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2183,22 +2185,23 @@ async def test_create_generator_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_generator - ] = mock_object + ] = mock_rpc request = {} await client.create_generator(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_generator(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2564,22 +2567,23 @@ async def test_update_generator_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_generator - ] = mock_object + ] = mock_rpc request = {} await client.update_generator(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_generator(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2935,22 +2939,23 @@ async def test_delete_generator_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_generator - ] = mock_object + ] = mock_rpc request = {} await client.delete_generator(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_generator(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_intents.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_intents.py index 7aafde996419..1f38085b4554 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_intents.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_intents.py @@ -1235,22 +1235,23 @@ async def test_list_intents_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_intents - ] = mock_object + ] = mock_rpc request = {} await client.list_intents(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_intents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1805,22 +1806,23 @@ async def test_get_intent_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_intent - ] = mock_object + ] = mock_rpc request = {} await client.get_intent(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_intent(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2187,22 +2189,23 @@ async def test_create_intent_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_intent - ] = mock_object + ] = mock_rpc request = {} await client.create_intent(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_intent(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2577,22 +2580,23 @@ async def test_update_intent_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_intent - ] = mock_object + ] = mock_rpc request = {} await client.update_intent(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_intent(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2948,22 +2952,23 @@ async def test_delete_intent_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_intent - ] = mock_object + ] = mock_rpc request = {} await client.delete_intent(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_intent(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3249,8 +3254,9 @@ def test_import_intents_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.import_intents(request) @@ -3304,26 +3310,28 @@ async def test_import_intents_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_intents - ] = mock_object + ] = mock_rpc request = {} await client.import_intents(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.import_intents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3533,8 +3541,9 @@ def test_export_intents_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.export_intents(request) @@ -3588,26 +3597,28 @@ async def test_export_intents_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.export_intents - ] = mock_object + ] = mock_rpc request = {} await client.export_intents(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.export_intents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_pages.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_pages.py index 527a5c488360..7a13dfa2ef01 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_pages.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_pages.py @@ -1227,22 +1227,23 @@ async def test_list_pages_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_pages - ] = mock_object + ] = mock_rpc request = {} await client.list_pages(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_pages(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1794,22 +1795,23 @@ async def test_get_page_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_page - ] = mock_object + ] = mock_rpc request = {} await client.get_page(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_page(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2171,22 +2173,23 @@ async def test_create_page_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_page - ] = mock_object + ] = mock_rpc request = {} await client.create_page(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_page(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2556,22 +2559,23 @@ async def test_update_page_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_page - ] = mock_object + ] = mock_rpc request = {} await client.update_page(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_page(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2925,22 +2929,23 @@ async def test_delete_page_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_page - ] = mock_object + ] = mock_rpc request = {} await client.delete_page(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_page(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_security_settings_service.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_security_settings_service.py index 773d17c206a5..019fcfd61dd3 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_security_settings_service.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_security_settings_service.py @@ -1406,22 +1406,23 @@ async def test_create_security_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_security_settings - ] = mock_object + ] = mock_rpc request = {} await client.create_security_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_security_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1865,22 +1866,23 @@ async def test_get_security_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_security_settings - ] = mock_object + ] = mock_rpc request = {} await client.get_security_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_security_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2302,22 +2304,23 @@ async def test_update_security_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_security_settings - ] = mock_object + ] = mock_rpc request = {} await client.update_security_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_security_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2732,22 +2735,23 @@ async def test_list_security_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_security_settings - ] = mock_object + ] = mock_rpc request = {} await client.list_security_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_security_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3315,22 +3319,23 @@ async def test_delete_security_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_security_settings - ] = mock_object + ] = mock_rpc request = {} await client.delete_security_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_security_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_session_entity_types.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_session_entity_types.py index 34c921c7a716..6424ead20475 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_session_entity_types.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_session_entity_types.py @@ -1348,22 +1348,23 @@ async def test_list_session_entity_types_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_session_entity_types - ] = mock_object + ] = mock_rpc request = {} await client.list_session_entity_types(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_session_entity_types(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1950,22 +1951,23 @@ async def test_get_session_entity_type_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_session_entity_type - ] = mock_object + ] = mock_rpc request = {} await client.get_session_entity_type(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_session_entity_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2349,22 +2351,23 @@ async def test_create_session_entity_type_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_session_entity_type - ] = mock_object + ] = mock_rpc request = {} await client.create_session_entity_type(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_session_entity_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2762,22 +2765,23 @@ async def test_update_session_entity_type_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_session_entity_type - ] = mock_object + ] = mock_rpc request = {} await client.update_session_entity_type(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_session_entity_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3166,22 +3170,23 @@ async def test_delete_session_entity_type_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_session_entity_type - ] = mock_object + ] = mock_rpc request = {} await client.delete_session_entity_type(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_session_entity_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_sessions.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_sessions.py index 7df3ded84cbe..6f546a7eb231 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_sessions.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_sessions.py @@ -1249,22 +1249,23 @@ async def test_detect_intent_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.detect_intent - ] = mock_object + ] = mock_rpc request = {} await client.detect_intent(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.detect_intent(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1547,22 +1548,23 @@ async def test_server_streaming_detect_intent_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.server_streaming_detect_intent - ] = mock_object + ] = mock_rpc request = {} await client.server_streaming_detect_intent(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.server_streaming_detect_intent(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1770,22 +1772,23 @@ async def test_streaming_detect_intent_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.streaming_detect_intent - ] = mock_object + ] = mock_rpc request = [{}] await client.streaming_detect_intent(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.streaming_detect_intent(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1989,22 +1992,23 @@ async def test_match_intent_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.match_intent - ] = mock_object + ] = mock_rpc request = {} await client.match_intent(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.match_intent(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2267,22 +2271,23 @@ async def test_fulfill_intent_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.fulfill_intent - ] = mock_object + ] = mock_rpc request = {} await client.fulfill_intent(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.fulfill_intent(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2569,22 +2574,23 @@ async def test_submit_answer_feedback_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.submit_answer_feedback - ] = mock_object + ] = mock_rpc request = {} await client.submit_answer_feedback(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.submit_answer_feedback(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_test_cases.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_test_cases.py index 152e4633278a..998b46cc45d3 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_test_cases.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_test_cases.py @@ -1272,22 +1272,23 @@ async def test_list_test_cases_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_test_cases - ] = mock_object + ] = mock_rpc request = {} await client.list_test_cases(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_test_cases(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1836,22 +1837,23 @@ async def test_batch_delete_test_cases_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_delete_test_cases - ] = mock_object + ] = mock_rpc request = {} await client.batch_delete_test_cases(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.batch_delete_test_cases(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2210,22 +2212,23 @@ async def test_get_test_case_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_test_case - ] = mock_object + ] = mock_rpc request = {} await client.get_test_case(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_test_case(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2587,22 +2590,23 @@ async def test_create_test_case_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_test_case - ] = mock_object + ] = mock_rpc request = {} await client.create_test_case(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_test_case(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2974,22 +2978,23 @@ async def test_update_test_case_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_test_case - ] = mock_object + ] = mock_rpc request = {} await client.update_test_case(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_test_case(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3300,8 +3305,9 @@ def test_run_test_case_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.run_test_case(request) @@ -3355,26 +3361,28 @@ async def test_run_test_case_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.run_test_case - ] = mock_object + ] = mock_rpc request = {} await client.run_test_case(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.run_test_case(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3594,8 +3602,9 @@ def test_batch_run_test_cases_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.batch_run_test_cases(request) @@ -3651,26 +3660,28 @@ async def test_batch_run_test_cases_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_run_test_cases - ] = mock_object + ] = mock_rpc request = {} await client.batch_run_test_cases(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.batch_run_test_cases(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3952,22 +3963,23 @@ async def test_calculate_coverage_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.calculate_coverage - ] = mock_object + ] = mock_rpc request = {} await client.calculate_coverage(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.calculate_coverage(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4194,8 +4206,9 @@ def test_import_test_cases_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.import_test_cases(request) @@ -4251,26 +4264,28 @@ async def test_import_test_cases_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_test_cases - ] = mock_object + ] = mock_rpc request = {} await client.import_test_cases(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.import_test_cases(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4496,8 +4511,9 @@ def test_export_test_cases_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.export_test_cases(request) @@ -4553,26 +4569,28 @@ async def test_export_test_cases_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.export_test_cases - ] = mock_object + ] = mock_rpc request = {} await client.export_test_cases(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.export_test_cases(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4859,22 +4877,23 @@ async def test_list_test_case_results_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_test_case_results - ] = mock_object + ] = mock_rpc request = {} await client.list_test_case_results(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_test_case_results(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5453,22 +5472,23 @@ async def test_get_test_case_result_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_test_case_result - ] = mock_object + ] = mock_rpc request = {} await client.get_test_case_result(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_test_case_result(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_transition_route_groups.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_transition_route_groups.py index 5359a4941bec..d73c6d87dc90 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_transition_route_groups.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_transition_route_groups.py @@ -1386,22 +1386,23 @@ async def test_list_transition_route_groups_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_transition_route_groups - ] = mock_object + ] = mock_rpc request = {} await client.list_transition_route_groups(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_transition_route_groups(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1988,22 +1989,23 @@ async def test_get_transition_route_group_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_transition_route_group - ] = mock_object + ] = mock_rpc request = {} await client.get_transition_route_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_transition_route_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2387,22 +2389,23 @@ async def test_create_transition_route_group_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_transition_route_group - ] = mock_object + ] = mock_rpc request = {} await client.create_transition_route_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_transition_route_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2802,22 +2805,23 @@ async def test_update_transition_route_group_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_transition_route_group - ] = mock_object + ] = mock_rpc request = {} await client.update_transition_route_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_transition_route_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3203,22 +3207,23 @@ async def test_delete_transition_route_group_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_transition_route_group - ] = mock_object + ] = mock_rpc request = {} await client.delete_transition_route_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_transition_route_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_versions.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_versions.py index d07edcdaa702..9e1048175ba9 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_versions.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_versions.py @@ -1247,22 +1247,23 @@ async def test_list_versions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_versions - ] = mock_object + ] = mock_rpc request = {} await client.list_versions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_versions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1814,22 +1815,23 @@ async def test_get_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_version - ] = mock_object + ] = mock_rpc request = {} await client.get_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2124,8 +2126,9 @@ def test_create_version_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_version(request) @@ -2179,26 +2182,28 @@ async def test_create_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_version - ] = mock_object + ] = mock_rpc request = {} await client.create_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2559,22 +2564,23 @@ async def test_update_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_version - ] = mock_object + ] = mock_rpc request = {} await client.update_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2932,22 +2938,23 @@ async def test_delete_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_version - ] = mock_object + ] = mock_rpc request = {} await client.delete_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3231,8 +3238,9 @@ def test_load_version_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.load_version(request) @@ -3286,26 +3294,28 @@ async def test_load_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.load_version - ] = mock_object + ] = mock_rpc request = {} await client.load_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.load_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3660,22 +3670,23 @@ async def test_compare_versions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.compare_versions - ] = mock_object + ] = mock_rpc request = {} await client.compare_versions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.compare_versions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_webhooks.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_webhooks.py index ec37201b7396..df057720c7c9 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_webhooks.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_webhooks.py @@ -1235,22 +1235,23 @@ async def test_list_webhooks_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_webhooks - ] = mock_object + ] = mock_rpc request = {} await client.list_webhooks(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_webhooks(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1799,22 +1800,23 @@ async def test_get_webhook_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_webhook - ] = mock_object + ] = mock_rpc request = {} await client.get_webhook(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_webhook(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2169,22 +2171,23 @@ async def test_create_webhook_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_webhook - ] = mock_object + ] = mock_rpc request = {} await client.create_webhook(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_webhook(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2549,22 +2552,23 @@ async def test_update_webhook_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_webhook - ] = mock_object + ] = mock_rpc request = {} await client.update_webhook(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_webhook(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2920,22 +2924,23 @@ async def test_delete_webhook_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_webhook - ] = mock_object + ] = mock_rpc request = {} await client.delete_webhook(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_webhook(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio From 92f28c241d2428df9a0b08da693788bdd1fe3b8c Mon Sep 17 00:00:00 2001 From: Victor Chudnovsky Date: Mon, 19 Aug 2024 06:37:59 -0700 Subject: [PATCH 004/108] chore(docs): add doc formatting post-processing fixes (#13014) This attempts to configure post-processing for the repeated docs failures we're seeing. This is a short-term workaround, listing the individual cases with violations. We envision eventually implementing a proper fix, probably involving stricter upstream standards, but that will take a while, and these failures are drudge work at the moment. Note that I was only able to apply the first of these changes in my local client to the cloned PRs that were generating these errors; I'm not sure what's wrong with my incantation. Feedback welcome! (At the very least, this is a listing of the specific docs fixes we need to implement.) --------- Co-authored-by: Anthonios Partheniou --- .../doc-formatting.yaml | 1 + .../doc-formatting.yaml | 1 + .../doc-formatting.yaml | 1 + .../doc-formatting.yaml | 1 + .../doc-formatting.yaml | 1 + .../doc-formatting.yaml | 1 + .../doc-formatting.yaml | 1 + .../doc-formatting.yaml | 132 ++++++++++++++++++ 8 files changed, 139 insertions(+) create mode 120000 packages/google-ai-generativelanguage/scripts/client-post-processing/doc-formatting.yaml create mode 120000 packages/google-cloud-datacatalog/scripts/client-post-processing/doc-formatting.yaml create mode 120000 packages/google-cloud-edgenetwork/scripts/client-post-processing/doc-formatting.yaml create mode 120000 packages/google-cloud-network-connectivity/scripts/client-post-processing/doc-formatting.yaml create mode 120000 packages/google-cloud-retail/scripts/client-post-processing/doc-formatting.yaml create mode 120000 packages/google-cloud-securitycentermanagement/scripts/client-post-processing/doc-formatting.yaml create mode 120000 packages/google-cloud-visionai/scripts/client-post-processing/doc-formatting.yaml diff --git a/packages/google-ai-generativelanguage/scripts/client-post-processing/doc-formatting.yaml b/packages/google-ai-generativelanguage/scripts/client-post-processing/doc-formatting.yaml new file mode 120000 index 000000000000..6e0991666f97 --- /dev/null +++ b/packages/google-ai-generativelanguage/scripts/client-post-processing/doc-formatting.yaml @@ -0,0 +1 @@ +../../../../scripts/client-post-processing/doc-formatting.yaml \ No newline at end of file diff --git a/packages/google-cloud-datacatalog/scripts/client-post-processing/doc-formatting.yaml b/packages/google-cloud-datacatalog/scripts/client-post-processing/doc-formatting.yaml new file mode 120000 index 000000000000..6e0991666f97 --- /dev/null +++ b/packages/google-cloud-datacatalog/scripts/client-post-processing/doc-formatting.yaml @@ -0,0 +1 @@ +../../../../scripts/client-post-processing/doc-formatting.yaml \ No newline at end of file diff --git a/packages/google-cloud-edgenetwork/scripts/client-post-processing/doc-formatting.yaml b/packages/google-cloud-edgenetwork/scripts/client-post-processing/doc-formatting.yaml new file mode 120000 index 000000000000..6e0991666f97 --- /dev/null +++ b/packages/google-cloud-edgenetwork/scripts/client-post-processing/doc-formatting.yaml @@ -0,0 +1 @@ +../../../../scripts/client-post-processing/doc-formatting.yaml \ No newline at end of file diff --git a/packages/google-cloud-network-connectivity/scripts/client-post-processing/doc-formatting.yaml b/packages/google-cloud-network-connectivity/scripts/client-post-processing/doc-formatting.yaml new file mode 120000 index 000000000000..6e0991666f97 --- /dev/null +++ b/packages/google-cloud-network-connectivity/scripts/client-post-processing/doc-formatting.yaml @@ -0,0 +1 @@ +../../../../scripts/client-post-processing/doc-formatting.yaml \ No newline at end of file diff --git a/packages/google-cloud-retail/scripts/client-post-processing/doc-formatting.yaml b/packages/google-cloud-retail/scripts/client-post-processing/doc-formatting.yaml new file mode 120000 index 000000000000..6e0991666f97 --- /dev/null +++ b/packages/google-cloud-retail/scripts/client-post-processing/doc-formatting.yaml @@ -0,0 +1 @@ +../../../../scripts/client-post-processing/doc-formatting.yaml \ No newline at end of file diff --git a/packages/google-cloud-securitycentermanagement/scripts/client-post-processing/doc-formatting.yaml b/packages/google-cloud-securitycentermanagement/scripts/client-post-processing/doc-formatting.yaml new file mode 120000 index 000000000000..6e0991666f97 --- /dev/null +++ b/packages/google-cloud-securitycentermanagement/scripts/client-post-processing/doc-formatting.yaml @@ -0,0 +1 @@ +../../../../scripts/client-post-processing/doc-formatting.yaml \ No newline at end of file diff --git a/packages/google-cloud-visionai/scripts/client-post-processing/doc-formatting.yaml b/packages/google-cloud-visionai/scripts/client-post-processing/doc-formatting.yaml new file mode 120000 index 000000000000..6e0991666f97 --- /dev/null +++ b/packages/google-cloud-visionai/scripts/client-post-processing/doc-formatting.yaml @@ -0,0 +1 @@ +../../../../scripts/client-post-processing/doc-formatting.yaml \ No newline at end of file diff --git a/scripts/client-post-processing/doc-formatting.yaml b/scripts/client-post-processing/doc-formatting.yaml index 94fb923ef181..01c368217277 100644 --- a/scripts/client-post-processing/doc-formatting.yaml +++ b/scripts/client-post-processing/doc-formatting.yaml @@ -35,3 +35,135 @@ replacements: before: \"NS_\" after: "`NS_`" count: 2 + - paths: [ + packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/retriever.py, + ] + before: \"corpora/\*/documents/\" + after: "`corpora/*/documents/`" + count: 1 + - paths: [ + packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/retriever.py, + ] + before: \"corpora/\*/documents/\*/chunks/\" + after: "`corpora/*/documents/*/chunks/`" + count: 1 + - paths: [ + packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/permission_service/async_client.py, + packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/permission_service/client.py + ] + before: | + \ role's permitted operations: + \ + \ - reader can use the resource \(e.g. + \ tuned model\) for inference + \ - writer has reader's permissions and + \ additionally can edit and share + \ - owner has writer's permissions and + \ additionally can delete + after: " role's permitted operations:\n\n - reader can use the resource (e.g.\n tuned model) for inference\n - writer has reader's permissions and\n additionally can edit and share\n - owner has writer's permissions and\n additionally can delete\n" + count: 6 + - paths: [ + packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/types/permission.py, + ] + before: | + \ previous role's permitted operations: + \ + \ - reader can use the resource \(e.g. tuned model\) for inference + \ - writer has reader's permissions and additionally can edit and + \ share + \ - owner has writer's permissions and additionally can delete + after: " previous role's permitted operations:\n\n - reader can use the resource (e.g. tuned model) for inference\n - writer has reader's permissions and additionally can edit and\n share\n - owner has writer's permissions and additionally can delete\n" + count: 1 + - paths: [ + packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/types/usage.py, + ] + before: | + \ references are not yet counted in usage computation + \ https://cloud.google.com/bigquery/docs/querying-wildcard-tables + after: " references are not yet counted in usage computation\n https://cloud.google.com/bigquery/docs/querying-wildcard-tables\n" + count: 1 + - paths: [ + packages/google-cloud-edgenetwork/google/cloud/edgenetwork_v1/types/resources.py, + ] + before: | + \ 14 years, 51 weeks, 6 days, 23 hours, 59 + after: " 14 years, 51 weeks, 6 days, 23 hours, 59\n" + count: 1 + - paths: [ + packages/google-cloud-network-connectivity/google/cloud/networkconnectivity_v1/types/policy_based_routing.py, + ] + before: | + \ 1000. The priority value must be from 1 to + \ 65535, inclusive. + after: " 1000. The priority value must be from 1 to\n 65535, inclusive.\n" + count: 1 + - paths: [ + packages/google-cloud-retail/google/cloud/retail_v2alpha/types/project.py, + ] + before: | + \ resource. Format: projects/\*/loggingConfig + after: " resource. Format: `projects/*/loggingConfig`\n" + count: 1 + - paths: [ + packages/google-cloud-retail/google/cloud/retail_v2alpha/types/project.py, + ] + before: | + \ resource. Format: projects/\*/alertConfig + after: " resource. Format: `projects/*/alertConfig`\n" + count: 1 + - paths: [ + packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/types/security_center_management.py, + ] + before: | + \ 1. the state could have changed \(e.g. IAM + \ permission lost\) or + \ 2. A failure occurred during creation of the + \ module. Defaults to false. + after: " - The state could have changed (e.g. IAM permission lost) or\n - A failure occurred during creation of the module. Defaults to false.\n" + count: 1 + - paths: [ + packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/types/security_center_management.py, + ] + before: | + \ module could still fail because 1. the state + \ could have changed \(e.g. IAM permission lost\) or + \ 2. A failure occurred while trying to update the + \ module. + after: " module could still fail because\n\n - The state could have changed (e.g. IAM permission lost) or\n - A failure occurred while trying to update the module.\n" + count: 2 + - paths: [ + packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/types/security_center_management.py, + ] + before: | + \ module could still fail because 1. the state + \ could have changed \(e.g. IAM permission lost\) or + \ 2. A failure occurred while trying to delete the + \ module. + after: " module could still fail because\n\n - The state could have changed (e.g. IAM permission lost) or\n - A failure occurred while trying to delete the module.\n" + count: 2 + - paths: [ + packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/types/security_center_management.py, + ] + before: | + \ module could still fail because 1. the state + \ could have changed \(e.g. IAM permission lost\) or + \ 2. A failure occurred during creation of the + \ module. + after: " module could still fail because\n\n - The state could have changed (e.g. IAM permission lost) or\n - A failure occurred during creation of the module.\n" + count: 1 + - paths: [ + packages/google-cloud-visionai/google/cloud/visionai_v1/types/platform.py, + packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/types/platform.py, + ] + before: | + \ 'ingestionTime': DOUBLE; \(UNIX timestamp\) + \ 'application': STRING; + after: " 'ingestionTime': DOUBLE; (UNIX timestamp)\n 'application': STRING;\n" + count: 1 + - paths: [ + packages/google-cloud-visionai/google/cloud/visionai_v1/types/platform.py, + ] + before: "'processor': STRING;\n }\n dynamic_config_input_topic " + after: "'processor': STRING;\n\n }\n\n dynamic_config_input_topic " + count: 1 + From 3fc401fb36cb8436e18ae28b23f401f196e3847a Mon Sep 17 00:00:00 2001 From: yoshi-code-bot <70984784+yoshi-code-bot@users.noreply.github.com> Date: Mon, 19 Aug 2024 06:43:08 -0700 Subject: [PATCH 005/108] chore: Update the root changelog (#13008) Update the root changelog --- CHANGELOG.md | 25 +++++++++++++------------ 1 file changed, 13 insertions(+), 12 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 83fb8b34b0f7..2b02f7d31c89 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,8 +4,8 @@ Changelogs ----- - [google-ads-admanager==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-ads-admanager/CHANGELOG.md) - [google-ai-generativelanguage==0.6.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-ai-generativelanguage/CHANGELOG.md) -- [google-analytics-admin==0.22.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-analytics-admin/CHANGELOG.md) -- [google-analytics-data==0.18.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-analytics-data/CHANGELOG.md) +- [google-analytics-admin==0.23.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-analytics-admin/CHANGELOG.md) +- [google-analytics-data==0.18.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-analytics-data/CHANGELOG.md) - [google-apps-card==0.1.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-card/CHANGELOG.md) - [google-apps-chat==0.1.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-chat/CHANGELOG.md) - [google-apps-events-subscriptions==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-events-subscriptions/CHANGELOG.md) @@ -20,6 +20,7 @@ Changelogs - [google-cloud-api-keys==0.5.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-api-keys/CHANGELOG.md) - [google-cloud-apigee-connect==1.9.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-apigee-connect/CHANGELOG.md) - [google-cloud-apigee-registry==0.6.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-apigee-registry/CHANGELOG.md) +- [google-cloud-apihub==0.1.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-apihub/CHANGELOG.md) - [google-cloud-appengine-admin==1.11.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-appengine-admin/CHANGELOG.md) - [google-cloud-appengine-logging==1.4.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-appengine-logging/CHANGELOG.md) - [google-cloud-apphub==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-apphub/CHANGELOG.md) @@ -29,7 +30,7 @@ Changelogs - [google-cloud-automl==2.13.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-automl/CHANGELOG.md) - [google-cloud-backupdr==0.1.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-backupdr/CHANGELOG.md) - [google-cloud-bare-metal-solution==1.7.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bare-metal-solution/CHANGELOG.md) -- [google-cloud-batch==0.17.23](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-batch/CHANGELOG.md) +- [google-cloud-batch==0.17.24](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-batch/CHANGELOG.md) - [google-cloud-beyondcorp-appconnections==0.4.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-appconnections/CHANGELOG.md) - [google-cloud-beyondcorp-appconnectors==0.4.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-appconnectors/CHANGELOG.md) - [google-cloud-beyondcorp-appgateways==0.4.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-appgateways/CHANGELOG.md) @@ -72,14 +73,14 @@ Changelogs - [google-cloud-dataproc-metastore==1.15.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataproc-metastore/CHANGELOG.md) - [google-cloud-dataproc==5.10.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataproc/CHANGELOG.md) - [google-cloud-datastream==1.9.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-datastream/CHANGELOG.md) -- [google-cloud-deploy==2.0.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-deploy/CHANGELOG.md) +- [google-cloud-deploy==2.0.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-deploy/CHANGELOG.md) - [google-cloud-developerconnect==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-developerconnect/CHANGELOG.md) - [google-cloud-dialogflow-cx==1.35.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dialogflow-cx/CHANGELOG.md) -- [google-cloud-dialogflow==2.30.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dialogflow/CHANGELOG.md) -- [google-cloud-discoveryengine==0.12.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-discoveryengine/CHANGELOG.md) -- [google-cloud-dlp==3.20.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dlp/CHANGELOG.md) +- [google-cloud-dialogflow==2.31.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dialogflow/CHANGELOG.md) +- [google-cloud-discoveryengine==0.12.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-discoveryengine/CHANGELOG.md) +- [google-cloud-dlp==3.21.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dlp/CHANGELOG.md) - [google-cloud-dms==1.9.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dms/CHANGELOG.md) -- [google-cloud-documentai==2.30.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-documentai/CHANGELOG.md) +- [google-cloud-documentai==2.31.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-documentai/CHANGELOG.md) - [google-cloud-domains==1.7.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-domains/CHANGELOG.md) - [google-cloud-edgecontainer==0.5.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-edgecontainer/CHANGELOG.md) - [google-cloud-edgenetwork==0.1.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-edgenetwork/CHANGELOG.md) @@ -88,7 +89,7 @@ Changelogs - [google-cloud-eventarc-publishing==0.6.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-eventarc-publishing/CHANGELOG.md) - [google-cloud-eventarc==1.11.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-eventarc/CHANGELOG.md) - [google-cloud-filestore==1.9.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-filestore/CHANGELOG.md) -- [google-cloud-functions==1.16.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-functions/CHANGELOG.md) +- [google-cloud-functions==1.17.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-functions/CHANGELOG.md) - [google-cloud-gdchardwaremanagement==0.1.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gdchardwaremanagement/CHANGELOG.md) - [google-cloud-gke-backup==0.5.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-backup/CHANGELOG.md) - [google-cloud-gke-connect-gateway==0.8.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-connect-gateway/CHANGELOG.md) @@ -113,7 +114,7 @@ Changelogs - [google-cloud-monitoring==2.22.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-monitoring/CHANGELOG.md) - [google-cloud-netapp==0.3.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-netapp/CHANGELOG.md) - [google-cloud-network-connectivity==2.4.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-network-connectivity/CHANGELOG.md) -- [google-cloud-network-management==1.17.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-network-management/CHANGELOG.md) +- [google-cloud-network-management==1.18.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-network-management/CHANGELOG.md) - [google-cloud-network-security==0.9.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-network-security/CHANGELOG.md) - [google-cloud-network-services==0.5.14](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-network-services/CHANGELOG.md) - [google-cloud-notebooks==1.10.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-notebooks/CHANGELOG.md) @@ -143,7 +144,7 @@ Changelogs - [google-cloud-scheduler==2.13.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-scheduler/CHANGELOG.md) - [google-cloud-secret-manager==2.20.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-secret-manager/CHANGELOG.md) - [google-cloud-securesourcemanager==0.1.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-securesourcemanager/CHANGELOG.md) -- [google-cloud-securitycenter==1.33.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-securitycenter/CHANGELOG.md) +- [google-cloud-securitycenter==1.34.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-securitycenter/CHANGELOG.md) - [google-cloud-securitycentermanagement==0.1.14](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-securitycentermanagement/CHANGELOG.md) - [google-cloud-service-control==1.12.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-service-control/CHANGELOG.md) - [google-cloud-service-directory==1.11.6](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-service-directory/CHANGELOG.md) @@ -163,7 +164,7 @@ Changelogs - [google-cloud-texttospeech==2.16.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-texttospeech/CHANGELOG.md) - [google-cloud-tpu==1.18.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-tpu/CHANGELOG.md) - [google-cloud-trace==1.13.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-trace/CHANGELOG.md) -- [google-cloud-translate==3.15.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-translate/CHANGELOG.md) +- [google-cloud-translate==3.16.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-translate/CHANGELOG.md) - [google-cloud-video-live-stream==1.8.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-video-live-stream/CHANGELOG.md) - [google-cloud-video-stitcher==0.7.12](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-video-stitcher/CHANGELOG.md) - [google-cloud-video-transcoder==1.12.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-video-transcoder/CHANGELOG.md) From 5dc35c8b35091a0ed7f69a0f4f4652a48523efaa Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 19 Aug 2024 09:44:49 -0400 Subject: [PATCH 006/108] feat: [google-cloud-dlp] inspect template modified cadence discovery config for Cloud SQL (#13016) - [ ] Regenerate this pull request now. BEGIN_COMMIT_OVERRIDE feat: inspect template modified cadence discovery config for Cloud SQL feat: file store data profiles can now be filtered by type and storage location docs: small improvements END_COMMIT_OVERRIDE PiperOrigin-RevId: 663887424 Source-Link: https://github.com/googleapis/googleapis/commit/07ee67599cf8b34d2fb0bf6a0823dbc1aa875568 Source-Link: https://github.com/googleapis/googleapis-gen/commit/6f7f8ebcf6e2d865ae92b576e5ad00c0007b7e9e Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWRscC8uT3dsQm90LnlhbWwiLCJoIjoiNmY3ZjhlYmNmNmUyZDg2NWFlOTJiNTc2ZTVhZDAwYzAwMDdiN2U5ZSJ9 --------- Co-authored-by: Owl Bot --- .../google/cloud/dlp/gapic_version.py | 2 +- .../google/cloud/dlp_v2/gapic_version.py | 2 +- .../services/dlp_service/async_client.py | 5 +- .../dlp_v2/services/dlp_service/client.py | 2 +- .../google/cloud/dlp_v2/types/dlp.py | 24 +- ...nippet_metadata_google.privacy.dlp.v2.json | 2 +- .../unit/gapic/dlp_v2/test_dlp_service.py | 495 ++++++++++-------- 7 files changed, 302 insertions(+), 230 deletions(-) diff --git a/packages/google-cloud-dlp/google/cloud/dlp/gapic_version.py b/packages/google-cloud-dlp/google/cloud/dlp/gapic_version.py index c843b7364c5d..558c8aab67c5 100644 --- a/packages/google-cloud-dlp/google/cloud/dlp/gapic_version.py +++ b/packages/google-cloud-dlp/google/cloud/dlp/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.21.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-dlp/google/cloud/dlp_v2/gapic_version.py b/packages/google-cloud-dlp/google/cloud/dlp_v2/gapic_version.py index c843b7364c5d..558c8aab67c5 100644 --- a/packages/google-cloud-dlp/google/cloud/dlp_v2/gapic_version.py +++ b/packages/google-cloud-dlp/google/cloud/dlp_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.21.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/async_client.py b/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/async_client.py index d960bcd23951..af7fdb0a0cee 100644 --- a/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/async_client.py +++ b/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -238,9 +237,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DlpServiceClient).get_transport_class, type(DlpServiceClient) - ) + get_transport_class = DlpServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/client.py b/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/client.py index 14536dbb7f57..198f49f1e8e8 100644 --- a/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/client.py +++ b/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/client.py @@ -917,7 +917,7 @@ def __init__( transport_init: Union[ Type[DlpServiceTransport], Callable[..., DlpServiceTransport] ] = ( - type(self).get_transport_class(transport) + DlpServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DlpServiceTransport], transport) ) diff --git a/packages/google-cloud-dlp/google/cloud/dlp_v2/types/dlp.py b/packages/google-cloud-dlp/google/cloud/dlp_v2/types/dlp.py index fe45d8719a30..a92c0cfd6de3 100644 --- a/packages/google-cloud-dlp/google/cloud/dlp_v2/types/dlp.py +++ b/packages/google-cloud-dlp/google/cloud/dlp_v2/types/dlp.py @@ -9114,6 +9114,11 @@ class DiscoveryCloudSqlGenerationCadence(proto.Message): this field, profiles are refreshed at this frequency regardless of whether the underlying tables have changed. Defaults to never. + inspect_template_modified_cadence (google.cloud.dlp_v2.types.DiscoveryInspectTemplateModifiedCadence): + Governs when to update data profiles when the inspection + rules defined by the ``InspectTemplate`` change. If not set, + changing the template will not cause a data profile to + update. """ class SchemaModifiedCadence(proto.Message): @@ -9167,6 +9172,13 @@ class CloudSqlSchemaModification(proto.Enum): number=2, enum="DataProfileUpdateFrequency", ) + inspect_template_modified_cadence: "DiscoveryInspectTemplateModifiedCadence" = ( + proto.Field( + proto.MESSAGE, + number=3, + message="DiscoveryInspectTemplateModifiedCadence", + ) + ) class SecretsDiscoveryTarget(proto.Message): @@ -12163,10 +12175,14 @@ class FileClusterSummary(proto.Message): cluster. file_extensions_scanned (MutableSequence[google.cloud.dlp_v2.types.FileExtensionInfo]): A sample of file types scanned in this - cluster. Empty if no files were scanned. + cluster. Empty if no files were scanned. File + extensions can be derived from the file name or + the file content. file_extensions_seen (MutableSequence[google.cloud.dlp_v2.types.FileExtensionInfo]): A sample of file types seen in this cluster. - Empty if no files were seen. + Empty if no files were seen. File extensions can + be derived from the file name or the file + content. no_files_exist (bool): True if no files exist in this cluster. If the bucket had more files than could be listed, this will be false even if @@ -12307,6 +12323,10 @@ class ListFileStoreDataProfilesRequest(proto.Message): - ``project_id`` - The Google Cloud project ID. - ``file_store_path`` - The path like "gs://bucket". + - ``data_source_type`` - The profile's data source type, + like "google/storage/bucket". + - ``data_storage_location`` - The location where the + file store's data is stored, like "us-central1". - ``sensitivity_level`` - HIGH|MODERATE|LOW - ``data_risk_level`` - HIGH|MODERATE|LOW - ``resource_visibility``: PUBLIC|RESTRICTED diff --git a/packages/google-cloud-dlp/samples/generated_samples/snippet_metadata_google.privacy.dlp.v2.json b/packages/google-cloud-dlp/samples/generated_samples/snippet_metadata_google.privacy.dlp.v2.json index 50ffd7e86b2d..4da85d5c6cd9 100644 --- a/packages/google-cloud-dlp/samples/generated_samples/snippet_metadata_google.privacy.dlp.v2.json +++ b/packages/google-cloud-dlp/samples/generated_samples/snippet_metadata_google.privacy.dlp.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dlp", - "version": "3.21.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-dlp/tests/unit/gapic/dlp_v2/test_dlp_service.py b/packages/google-cloud-dlp/tests/unit/gapic/dlp_v2/test_dlp_service.py index 68e6f837da6d..12f4b921c75b 100644 --- a/packages/google-cloud-dlp/tests/unit/gapic/dlp_v2/test_dlp_service.py +++ b/packages/google-cloud-dlp/tests/unit/gapic/dlp_v2/test_dlp_service.py @@ -1249,22 +1249,23 @@ async def test_inspect_content_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.inspect_content - ] = mock_object + ] = mock_rpc request = {} await client.inspect_content(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.inspect_content(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1533,22 +1534,23 @@ async def test_redact_image_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.redact_image - ] = mock_object + ] = mock_rpc request = {} await client.redact_image(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.redact_image(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1830,22 +1832,23 @@ async def test_deidentify_content_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.deidentify_content - ] = mock_object + ] = mock_rpc request = {} await client.deidentify_content(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.deidentify_content(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2128,22 +2131,23 @@ async def test_reidentify_content_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.reidentify_content - ] = mock_object + ] = mock_rpc request = {} await client.reidentify_content(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.reidentify_content(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2414,22 +2418,23 @@ async def test_list_info_types_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_info_types - ] = mock_object + ] = mock_rpc request = {} await client.list_info_types(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_info_types(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2737,22 +2742,23 @@ async def test_create_inspect_template_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_inspect_template - ] = mock_object + ] = mock_rpc request = {} await client.create_inspect_template(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_inspect_template(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3140,22 +3146,23 @@ async def test_update_inspect_template_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_inspect_template - ] = mock_object + ] = mock_rpc request = {} await client.update_inspect_template(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_inspect_template(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3552,22 +3559,23 @@ async def test_get_inspect_template_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_inspect_template - ] = mock_object + ] = mock_rpc request = {} await client.get_inspect_template(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_inspect_template(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3945,22 +3953,23 @@ async def test_list_inspect_templates_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_inspect_templates - ] = mock_object + ] = mock_rpc request = {} await client.list_inspect_templates(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_inspect_templates(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4527,22 +4536,23 @@ async def test_delete_inspect_template_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_inspect_template - ] = mock_object + ] = mock_rpc request = {} await client.delete_inspect_template(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_inspect_template(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4915,22 +4925,23 @@ async def test_create_deidentify_template_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_deidentify_template - ] = mock_object + ] = mock_rpc request = {} await client.create_deidentify_template(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_deidentify_template(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5322,22 +5333,23 @@ async def test_update_deidentify_template_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_deidentify_template - ] = mock_object + ] = mock_rpc request = {} await client.update_deidentify_template(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_deidentify_template(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5739,22 +5751,23 @@ async def test_get_deidentify_template_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_deidentify_template - ] = mock_object + ] = mock_rpc request = {} await client.get_deidentify_template(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_deidentify_template(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6136,22 +6149,23 @@ async def test_list_deidentify_templates_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_deidentify_templates - ] = mock_object + ] = mock_rpc request = {} await client.list_deidentify_templates(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_deidentify_templates(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6720,22 +6734,23 @@ async def test_delete_deidentify_template_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_deidentify_template - ] = mock_object + ] = mock_rpc request = {} await client.delete_deidentify_template(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_deidentify_template(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7110,22 +7125,23 @@ async def test_create_job_trigger_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_job_trigger - ] = mock_object + ] = mock_rpc request = {} await client.create_job_trigger(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_job_trigger(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7517,22 +7533,23 @@ async def test_update_job_trigger_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_job_trigger - ] = mock_object + ] = mock_rpc request = {} await client.update_job_trigger(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_job_trigger(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7921,22 +7938,23 @@ async def test_hybrid_inspect_job_trigger_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.hybrid_inspect_job_trigger - ] = mock_object + ] = mock_rpc request = {} await client.hybrid_inspect_job_trigger(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.hybrid_inspect_job_trigger(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8301,22 +8319,23 @@ async def test_get_job_trigger_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_job_trigger - ] = mock_object + ] = mock_rpc request = {} await client.get_job_trigger(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_job_trigger(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8685,22 +8704,23 @@ async def test_list_job_triggers_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_job_triggers - ] = mock_object + ] = mock_rpc request = {} await client.list_job_triggers(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_job_triggers(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9266,22 +9286,23 @@ async def test_delete_job_trigger_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_job_trigger - ] = mock_object + ] = mock_rpc request = {} await client.delete_job_trigger(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_job_trigger(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9652,22 +9673,23 @@ async def test_activate_job_trigger_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.activate_job_trigger - ] = mock_object + ] = mock_rpc request = {} await client.activate_job_trigger(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.activate_job_trigger(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9968,22 +9990,23 @@ async def test_create_discovery_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_discovery_config - ] = mock_object + ] = mock_rpc request = {} await client.create_discovery_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_discovery_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10376,22 +10399,23 @@ async def test_update_discovery_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_discovery_config - ] = mock_object + ] = mock_rpc request = {} await client.update_discovery_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_discovery_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10793,22 +10817,23 @@ async def test_get_discovery_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_discovery_config - ] = mock_object + ] = mock_rpc request = {} await client.get_discovery_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_discovery_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11186,22 +11211,23 @@ async def test_list_discovery_configs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_discovery_configs - ] = mock_object + ] = mock_rpc request = {} await client.list_discovery_configs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_discovery_configs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11768,22 +11794,23 @@ async def test_delete_discovery_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_discovery_config - ] = mock_object + ] = mock_rpc request = {} await client.delete_discovery_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_discovery_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12146,22 +12173,23 @@ async def test_create_dlp_job_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_dlp_job - ] = mock_object + ] = mock_rpc request = {} await client.create_dlp_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_dlp_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12590,22 +12618,23 @@ async def test_list_dlp_jobs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_dlp_jobs - ] = mock_object + ] = mock_rpc request = {} await client.list_dlp_jobs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_dlp_jobs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13157,22 +13186,23 @@ async def test_get_dlp_job_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_dlp_job - ] = mock_object + ] = mock_rpc request = {} await client.get_dlp_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_dlp_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13516,22 +13546,23 @@ async def test_delete_dlp_job_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_dlp_job - ] = mock_object + ] = mock_rpc request = {} await client.delete_dlp_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_dlp_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13864,22 +13895,23 @@ async def test_cancel_dlp_job_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.cancel_dlp_job - ] = mock_object + ] = mock_rpc request = {} await client.cancel_dlp_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.cancel_dlp_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14156,22 +14188,23 @@ async def test_create_stored_info_type_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_stored_info_type - ] = mock_object + ] = mock_rpc request = {} await client.create_stored_info_type(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_stored_info_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14549,22 +14582,23 @@ async def test_update_stored_info_type_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_stored_info_type - ] = mock_object + ] = mock_rpc request = {} await client.update_stored_info_type(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_stored_info_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14951,22 +14985,23 @@ async def test_get_stored_info_type_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_stored_info_type - ] = mock_object + ] = mock_rpc request = {} await client.get_stored_info_type(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_stored_info_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15340,22 +15375,23 @@ async def test_list_stored_info_types_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_stored_info_types - ] = mock_object + ] = mock_rpc request = {} await client.list_stored_info_types(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_stored_info_types(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15922,22 +15958,23 @@ async def test_delete_stored_info_type_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_stored_info_type - ] = mock_object + ] = mock_rpc request = {} await client.delete_stored_info_type(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_stored_info_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -16306,22 +16343,23 @@ async def test_list_project_data_profiles_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_project_data_profiles - ] = mock_object + ] = mock_rpc request = {} await client.list_project_data_profiles(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_project_data_profiles(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -16903,22 +16941,23 @@ async def test_list_table_data_profiles_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_table_data_profiles - ] = mock_object + ] = mock_rpc request = {} await client.list_table_data_profiles(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_table_data_profiles(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -17500,22 +17539,23 @@ async def test_list_column_data_profiles_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_column_data_profiles - ] = mock_object + ] = mock_rpc request = {} await client.list_column_data_profiles(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_column_data_profiles(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -18100,22 +18140,23 @@ async def test_get_project_data_profile_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_project_data_profile - ] = mock_object + ] = mock_rpc request = {} await client.get_project_data_profile(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_project_data_profile(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -18499,22 +18540,23 @@ async def test_list_file_store_data_profiles_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_file_store_data_profiles - ] = mock_object + ] = mock_rpc request = {} await client.list_file_store_data_profiles(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_file_store_data_profiles(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -19123,22 +19165,23 @@ async def test_get_file_store_data_profile_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_file_store_data_profile - ] = mock_object + ] = mock_rpc request = {} await client.get_file_store_data_profile(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_file_store_data_profile(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -19526,22 +19569,23 @@ async def test_delete_file_store_data_profile_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_file_store_data_profile - ] = mock_object + ] = mock_rpc request = {} await client.delete_file_store_data_profile(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_file_store_data_profile(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -19946,22 +19990,23 @@ async def test_get_table_data_profile_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_table_data_profile - ] = mock_object + ] = mock_rpc request = {} await client.get_table_data_profile(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_table_data_profile(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -20410,22 +20455,23 @@ async def test_get_column_data_profile_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_column_data_profile - ] = mock_object + ] = mock_rpc request = {} await client.get_column_data_profile(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_column_data_profile(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -20825,22 +20871,23 @@ async def test_delete_table_data_profile_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_table_data_profile - ] = mock_object + ] = mock_rpc request = {} await client.delete_table_data_profile(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_table_data_profile(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -21198,22 +21245,23 @@ async def test_hybrid_inspect_dlp_job_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.hybrid_inspect_dlp_job - ] = mock_object + ] = mock_rpc request = {} await client.hybrid_inspect_dlp_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.hybrid_inspect_dlp_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -21562,22 +21610,23 @@ async def test_finish_dlp_job_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.finish_dlp_job - ] = mock_object + ] = mock_rpc request = {} await client.finish_dlp_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.finish_dlp_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -21850,22 +21899,23 @@ async def test_create_connection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_connection - ] = mock_object + ] = mock_rpc request = {} await client.create_connection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_connection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -22235,22 +22285,23 @@ async def test_get_connection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_connection - ] = mock_object + ] = mock_rpc request = {} await client.get_connection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_connection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -22603,22 +22654,23 @@ async def test_list_connections_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_connections - ] = mock_object + ] = mock_rpc request = {} await client.list_connections(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_connections(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -23177,22 +23229,23 @@ async def test_search_connections_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search_connections - ] = mock_object + ] = mock_rpc request = {} await client.search_connections(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_connections(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -23756,22 +23809,23 @@ async def test_delete_connection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_connection - ] = mock_object + ] = mock_rpc request = {} await client.delete_connection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_connection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -24134,22 +24188,23 @@ async def test_update_connection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_connection - ] = mock_object + ] = mock_rpc request = {} await client.update_connection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_connection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio From 9c54c1d92e54f71f35d8e7a65bb16f730ec841b0 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 19 Aug 2024 09:45:59 -0400 Subject: [PATCH 007/108] feat: [google-cloud-storage-transfer] add HDFS configuration (#13011) - [ ] Regenerate this pull request now. BEGIN_COMMIT_OVERRIDE feat: add HDFS configuration feat: add GCS Managed Folders feat: add S3 Managed Private Network feat: add S3 Cloudfront Domain END_COMMIT_OVERRIDE PiperOrigin-RevId: 662684810 Source-Link: https://github.com/googleapis/googleapis/commit/83e51983650f38ed33bcc222bab6b5303d72da94 Source-Link: https://github.com/googleapis/googleapis-gen/commit/d8e78bb5c45ecdad16b2647cfdb09195dd55e976 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXN0b3JhZ2UtdHJhbnNmZXIvLk93bEJvdC55YW1sIiwiaCI6ImQ4ZTc4YmI1YzQ1ZWNkYWQxNmIyNjQ3Y2ZkYjA5MTk1ZGQ1NWU5NzYifQ== --------- Co-authored-by: Owl Bot --- .../google/cloud/storage_transfer/__init__.py | 2 + .../cloud/storage_transfer/gapic_version.py | 2 +- .../cloud/storage_transfer_v1/__init__.py | 2 + .../storage_transfer_v1/gapic_version.py | 2 +- .../storage_transfer_service/async_client.py | 12 +- .../storage_transfer_service/client.py | 8 +- .../transports/rest.py | 6 +- .../storage_transfer_v1/types/__init__.py | 2 + .../types/transfer_types.py | 162 ++++++++++++------ ...et_metadata_google.storagetransfer.v1.json | 2 +- .../test_storage_transfer_service.py | 145 +++++++++------- 11 files changed, 213 insertions(+), 132 deletions(-) diff --git a/packages/google-cloud-storage-transfer/google/cloud/storage_transfer/__init__.py b/packages/google-cloud-storage-transfer/google/cloud/storage_transfer/__init__.py index 877799df7355..1a2c93651b70 100644 --- a/packages/google-cloud-storage-transfer/google/cloud/storage_transfer/__init__.py +++ b/packages/google-cloud-storage-transfer/google/cloud/storage_transfer/__init__.py @@ -54,6 +54,7 @@ EventStream, GcsData, GoogleServiceAccount, + HdfsData, HttpData, LoggingConfig, MetadataOptions, @@ -100,6 +101,7 @@ "EventStream", "GcsData", "GoogleServiceAccount", + "HdfsData", "HttpData", "LoggingConfig", "MetadataOptions", diff --git a/packages/google-cloud-storage-transfer/google/cloud/storage_transfer/gapic_version.py b/packages/google-cloud-storage-transfer/google/cloud/storage_transfer/gapic_version.py index f6b57ae22bbe..558c8aab67c5 100644 --- a/packages/google-cloud-storage-transfer/google/cloud/storage_transfer/gapic_version.py +++ b/packages/google-cloud-storage-transfer/google/cloud/storage_transfer/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.11.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-storage-transfer/google/cloud/storage_transfer_v1/__init__.py b/packages/google-cloud-storage-transfer/google/cloud/storage_transfer_v1/__init__.py index 5a710e1bb368..ceffe4431dab 100644 --- a/packages/google-cloud-storage-transfer/google/cloud/storage_transfer_v1/__init__.py +++ b/packages/google-cloud-storage-transfer/google/cloud/storage_transfer_v1/__init__.py @@ -52,6 +52,7 @@ EventStream, GcsData, GoogleServiceAccount, + HdfsData, HttpData, LoggingConfig, MetadataOptions, @@ -88,6 +89,7 @@ "GetGoogleServiceAccountRequest", "GetTransferJobRequest", "GoogleServiceAccount", + "HdfsData", "HttpData", "ListAgentPoolsRequest", "ListAgentPoolsResponse", diff --git a/packages/google-cloud-storage-transfer/google/cloud/storage_transfer_v1/gapic_version.py b/packages/google-cloud-storage-transfer/google/cloud/storage_transfer_v1/gapic_version.py index f6b57ae22bbe..558c8aab67c5 100644 --- a/packages/google-cloud-storage-transfer/google/cloud/storage_transfer_v1/gapic_version.py +++ b/packages/google-cloud-storage-transfer/google/cloud/storage_transfer_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.11.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-storage-transfer/google/cloud/storage_transfer_v1/services/storage_transfer_service/async_client.py b/packages/google-cloud-storage-transfer/google/cloud/storage_transfer_v1/services/storage_transfer_service/async_client.py index e4baee85332b..67a7eb4cde3b 100644 --- a/packages/google-cloud-storage-transfer/google/cloud/storage_transfer_v1/services/storage_transfer_service/async_client.py +++ b/packages/google-cloud-storage-transfer/google/cloud/storage_transfer_v1/services/storage_transfer_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -201,10 +200,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(StorageTransferServiceClient).get_transport_class, - type(StorageTransferServiceClient), - ) + get_transport_class = StorageTransferServiceClient.get_transport_class def __init__( self, @@ -1137,7 +1133,7 @@ async def sample_create_agent_pool(): Returns: google.cloud.storage_transfer_v1.types.AgentPool: - Represents an On-Premises Agent pool. + Represents an agent pool. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -1274,7 +1270,7 @@ async def sample_update_agent_pool(): Returns: google.cloud.storage_transfer_v1.types.AgentPool: - Represents an On-Premises Agent pool. + Represents an agent pool. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -1382,7 +1378,7 @@ async def sample_get_agent_pool(): Returns: google.cloud.storage_transfer_v1.types.AgentPool: - Represents an On-Premises Agent pool. + Represents an agent pool. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have diff --git a/packages/google-cloud-storage-transfer/google/cloud/storage_transfer_v1/services/storage_transfer_service/client.py b/packages/google-cloud-storage-transfer/google/cloud/storage_transfer_v1/services/storage_transfer_service/client.py index e86e95d9e4f7..f07c89efe0e2 100644 --- a/packages/google-cloud-storage-transfer/google/cloud/storage_transfer_v1/services/storage_transfer_service/client.py +++ b/packages/google-cloud-storage-transfer/google/cloud/storage_transfer_v1/services/storage_transfer_service/client.py @@ -674,7 +674,7 @@ def __init__( Type[StorageTransferServiceTransport], Callable[..., StorageTransferServiceTransport], ] = ( - type(self).get_transport_class(transport) + StorageTransferServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., StorageTransferServiceTransport], transport) ) @@ -1538,7 +1538,7 @@ def sample_create_agent_pool(): Returns: google.cloud.storage_transfer_v1.types.AgentPool: - Represents an On-Premises Agent pool. + Represents an agent pool. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -1672,7 +1672,7 @@ def sample_update_agent_pool(): Returns: google.cloud.storage_transfer_v1.types.AgentPool: - Represents an On-Premises Agent pool. + Represents an agent pool. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -1777,7 +1777,7 @@ def sample_get_agent_pool(): Returns: google.cloud.storage_transfer_v1.types.AgentPool: - Represents an On-Premises Agent pool. + Represents an agent pool. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have diff --git a/packages/google-cloud-storage-transfer/google/cloud/storage_transfer_v1/services/storage_transfer_service/transports/rest.py b/packages/google-cloud-storage-transfer/google/cloud/storage_transfer_v1/services/storage_transfer_service/transports/rest.py index ea8f006ab3fc..4f2c0cefad9f 100644 --- a/packages/google-cloud-storage-transfer/google/cloud/storage_transfer_v1/services/storage_transfer_service/transports/rest.py +++ b/packages/google-cloud-storage-transfer/google/cloud/storage_transfer_v1/services/storage_transfer_service/transports/rest.py @@ -702,7 +702,7 @@ def __call__( Returns: ~.transfer_types.AgentPool: - Represents an On-Premises Agent pool. + Represents an agent pool. """ http_options: List[Dict[str, str]] = [ @@ -1047,7 +1047,7 @@ def __call__( Returns: ~.transfer_types.AgentPool: - Represents an On-Premises Agent pool. + Represents an agent pool. """ http_options: List[Dict[str, str]] = [ @@ -1753,7 +1753,7 @@ def __call__( Returns: ~.transfer_types.AgentPool: - Represents an On-Premises Agent pool. + Represents an agent pool. """ http_options: List[Dict[str, str]] = [ diff --git a/packages/google-cloud-storage-transfer/google/cloud/storage_transfer_v1/types/__init__.py b/packages/google-cloud-storage-transfer/google/cloud/storage_transfer_v1/types/__init__.py index a2cae19cf39f..214350808552 100644 --- a/packages/google-cloud-storage-transfer/google/cloud/storage_transfer_v1/types/__init__.py +++ b/packages/google-cloud-storage-transfer/google/cloud/storage_transfer_v1/types/__init__.py @@ -43,6 +43,7 @@ EventStream, GcsData, GoogleServiceAccount, + HdfsData, HttpData, LoggingConfig, MetadataOptions, @@ -87,6 +88,7 @@ "EventStream", "GcsData", "GoogleServiceAccount", + "HdfsData", "HttpData", "LoggingConfig", "MetadataOptions", diff --git a/packages/google-cloud-storage-transfer/google/cloud/storage_transfer_v1/types/transfer_types.py b/packages/google-cloud-storage-transfer/google/cloud/storage_transfer_v1/types/transfer_types.py index f870b380fc1a..b398ef85c01f 100644 --- a/packages/google-cloud-storage-transfer/google/cloud/storage_transfer_v1/types/transfer_types.py +++ b/packages/google-cloud-storage-transfer/google/cloud/storage_transfer_v1/types/transfer_types.py @@ -36,6 +36,7 @@ "AzureBlobStorageData", "HttpData", "PosixFilesystem", + "HdfsData", "AwsS3CompatibleData", "S3CompatibleMetadata", "AgentPool", @@ -305,6 +306,20 @@ class GcsData(proto.Message): The root path value must meet `Object Name Requirements `__. + managed_folder_transfer_enabled (bool): + Preview. Enables the transfer of managed folders between + Cloud Storage buckets. Set this option on the + gcs_data_source. + + If set to true: + + - Managed folders in the source bucket are transferred to + the destination bucket. + - Managed folders in the destination bucket are + overwritten. Other OVERWRITE options are not supported. + + See `Transfer Cloud Storage managed + folders `__. """ bucket_name: str = proto.Field( @@ -315,6 +330,10 @@ class GcsData(proto.Message): proto.STRING, number=3, ) + managed_folder_transfer_enabled: bool = proto.Field( + proto.BOOL, + number=4, + ) class AwsS3Data(proto.Message): @@ -322,6 +341,9 @@ class AwsS3Data(proto.Message): sink. In an AwsS3Data resource, an object's name is the S3 object's key name. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: bucket_name (str): Required. S3 Bucket name (see `Creating a @@ -353,34 +375,44 @@ class AwsS3Data(proto.Message): using the [GoogleServiceAccount][google.storagetransfer.v1.GoogleServiceAccount] for this project. + cloudfront_domain (str): + Optional. The CloudFront distribution domain name pointing + to this bucket, to use when fetching. + + See `Transfer from S3 via + CloudFront `__ + for more information. + + Format: ``https://{id}.cloudfront.net`` or any valid custom + domain. Must begin with ``https://``. credentials_secret (str): Optional. The Resource name of a secret in Secret Manager. - The Azure SAS token must be stored in Secret Manager in JSON + AWS credentials must be stored in Secret Manager in JSON format: - .. raw:: html - -
{
-                 "sas_token" : "SAS_TOKEN"
-                }
+ { "access_key_id": "ACCESS_KEY_ID", "secret_access_key": + "SECRET_ACCESS_KEY" } [GoogleServiceAccount][google.storagetransfer.v1.GoogleServiceAccount] must be granted ``roles/secretmanager.secretAccessor`` for the resource. - See [Configure access to a source: Microsoft Azure Blob - Storage] - (https://cloud.google.com/storage-transfer/docs/source-microsoft-azure#secret_manager) + See [Configure access to a source: Amazon S3] + (https://cloud.google.com/storage-transfer/docs/source-amazon-s3#secret_manager) for more information. If ``credentials_secret`` is specified, do not specify - [azure_credentials][]. - - This feature is in - `preview `__. + [role_arn][google.storagetransfer.v1.AwsS3Data.role_arn] or + [aws_access_key][google.storagetransfer.v1.AwsS3Data.aws_access_key]. Format: ``projects/{project_number}/secrets/{secret_name}`` + managed_private_network (bool): + Egress bytes over a Google-managed private + network. This network is shared between other + users of Storage Transfer Service. + + This field is a member of `oneof`_ ``private_network``. """ bucket_name: str = proto.Field( @@ -400,10 +432,19 @@ class AwsS3Data(proto.Message): proto.STRING, number=4, ) + cloudfront_domain: str = proto.Field( + proto.STRING, + number=6, + ) credentials_secret: str = proto.Field( proto.STRING, number=7, ) + managed_private_network: bool = proto.Field( + proto.BOOL, + number=8, + oneof="private_network", + ) class AzureBlobStorageData(proto.Message): @@ -442,11 +483,7 @@ class AzureBlobStorageData(proto.Message): The Azure SAS token must be stored in Secret Manager in JSON format: - .. raw:: html - -
{
-                 "sas_token" : "SAS_TOKEN"
-                }
+ { "sas_token" : "SAS_TOKEN" } [GoogleServiceAccount][google.storagetransfer.v1.GoogleServiceAccount] must be granted ``roles/secretmanager.secretAccessor`` for @@ -460,9 +497,6 @@ class AzureBlobStorageData(proto.Message): If ``credentials_secret`` is specified, do not specify [azure_credentials][google.storagetransfer.v1.AzureBlobStorageData.azure_credentials]. - This feature is in - `preview `__. - Format: ``projects/{project_number}/secrets/{secret_name}`` """ @@ -559,6 +593,24 @@ class PosixFilesystem(proto.Message): ) +class HdfsData(proto.Message): + r"""An HdfsData resource specifies a path within an HDFS entity + (e.g. a cluster). All cluster-specific settings, such as + namenodes and ports, are configured on the transfer agents + servicing requests, so HdfsData only contains the root path to + the data in our transfer. + + Attributes: + path (str): + Root path to transfer files. + """ + + path: str = proto.Field( + proto.STRING, + number=1, + ) + + class AwsS3CompatibleData(proto.Message): r"""An AwsS3CompatibleData resource. @@ -727,7 +779,7 @@ class ListApi(proto.Enum): class AgentPool(proto.Message): - r"""Represents an On-Premises Agent pool. + r"""Represents an agent pool. Attributes: name (str): @@ -755,8 +807,8 @@ class State(proto.Enum): Default value. This value is unused. CREATING (1): This is an initialization state. During this - stage, the resources such as Pub/Sub topics are - allocated for the AgentPool. + stage, resources are allocated for the + AgentPool. CREATED (2): Determines that the AgentPool is created for use. At this state, Agents can join the @@ -933,6 +985,10 @@ class TransferSpec(proto.Message): aws_s3_compatible_data_source (google.cloud.storage_transfer_v1.types.AwsS3CompatibleData): An AWS S3 compatible data source. + This field is a member of `oneof`_ ``data_source``. + hdfs_data_source (google.cloud.storage_transfer_v1.types.HdfsData): + An HDFS cluster data source. + This field is a member of `oneof`_ ``data_source``. gcs_intermediate_data_location (google.cloud.storage_transfer_v1.types.GcsData): For transfers between file systems, specifies a Cloud @@ -1020,6 +1076,12 @@ class TransferSpec(proto.Message): oneof="data_source", message="AwsS3CompatibleData", ) + hdfs_data_source: "HdfsData" = proto.Field( + proto.MESSAGE, + number=20, + oneof="data_source", + message="HdfsData", + ) gcs_intermediate_data_location: "GcsData" = proto.Field( proto.MESSAGE, number=16, @@ -1102,9 +1164,12 @@ class MetadataOptions(proto.Message): [KMS_KEY_DESTINATION_BUCKET_DEFAULT][google.storagetransfer.v1.MetadataOptions.KmsKey.KMS_KEY_DESTINATION_BUCKET_DEFAULT]. time_created (google.cloud.storage_transfer_v1.types.MetadataOptions.TimeCreated): Specifies how each object's ``timeCreated`` metadata is - preserved for transfers between Google Cloud Storage - buckets. If unspecified, the default behavior is the same as + preserved for transfers. If unspecified, the default + behavior is the same as [TIME_CREATED_SKIP][google.storagetransfer.v1.MetadataOptions.TimeCreated.TIME_CREATED_SKIP]. + This behavior is supported for transfers to Cloud Storage + buckets from Cloud Storage, Amazon S3, S3-compatible + storage, and Azure sources. """ class Symlink(proto.Enum): @@ -1273,10 +1338,11 @@ class TimeCreated(proto.Enum): Do not preserve the ``timeCreated`` metadata from the source object. TIME_CREATED_PRESERVE_AS_CUSTOM_TIME (2): - Preserves the source object's ``timeCreated`` metadata in - the ``customTime`` field in the destination object. Note - that any value stored in the source object's ``customTime`` - field will not be propagated to the destination object. + Preserves the source object's ``timeCreated`` or + ``lastModified`` metadata in the ``customTime`` field in the + destination object. Note that any value stored in the source + object's ``customTime`` field will not be propagated to the + destination object. """ TIME_CREATED_UNSPECIFIED = 0 TIME_CREATED_SKIP = 1 @@ -1539,9 +1605,7 @@ class TransferJob(proto.Message): transfer_spec (google.cloud.storage_transfer_v1.types.TransferSpec): Transfer specification. notification_config (google.cloud.storage_transfer_v1.types.NotificationConfig): - Notification configuration. This is not - supported for transfers involving - PosixFilesystem. + Notification configuration. logging_config (google.cloud.storage_transfer_v1.types.LoggingConfig): Logging configuration. schedule (google.cloud.storage_transfer_v1.types.Schedule): @@ -2000,34 +2064,26 @@ class PayloadFormat(proto.Enum): class LoggingConfig(proto.Message): r"""Specifies the logging behavior for transfer operations. - For cloud-to-cloud transfers, logs are sent to Cloud Logging. See - `Read transfer + Logs can be sent to Cloud Logging for all transfer types. See `Read + transfer logs `__ for details. - For transfers to or from a POSIX file system, logs are stored in the - Cloud Storage bucket that is the source or sink of the transfer. See - [Managing Transfer for on-premises jobs] - (https://cloud.google.com/storage-transfer/docs/managing-on-prem-jobs#viewing-logs) - for details. - Attributes: log_actions (MutableSequence[google.cloud.storage_transfer_v1.types.LoggingConfig.LoggableAction]): - Specifies the actions to be logged. If empty, no logs are - generated. Not supported for transfers with PosixFilesystem - data sources; use - [enable_onprem_gcs_transfer_logs][google.storagetransfer.v1.LoggingConfig.enable_onprem_gcs_transfer_logs] - instead. + Specifies the actions to be logged. If empty, + no logs are generated. log_action_states (MutableSequence[google.cloud.storage_transfer_v1.types.LoggingConfig.LoggableActionState]): States in which ``log_actions`` are logged. If empty, no - logs are generated. Not supported for transfers with - PosixFilesystem data sources; use - [enable_onprem_gcs_transfer_logs][google.storagetransfer.v1.LoggingConfig.enable_onprem_gcs_transfer_logs] - instead. + logs are generated. enable_onprem_gcs_transfer_logs (bool): - For transfers with a PosixFilesystem source, - this option enables the Cloud Storage transfer - logs for this transfer. + For PosixFilesystem transfers, enables `file system transfer + logs `__ + instead of, or in addition to, Cloud Logging. + + This option ignores [LoggableAction] and + [LoggableActionState]. If these are set, Cloud Logging will + also be enabled for this transfer. """ class LoggableAction(proto.Enum): diff --git a/packages/google-cloud-storage-transfer/samples/generated_samples/snippet_metadata_google.storagetransfer.v1.json b/packages/google-cloud-storage-transfer/samples/generated_samples/snippet_metadata_google.storagetransfer.v1.json index b8dbf3073eb4..f3c5ac4b04c5 100644 --- a/packages/google-cloud-storage-transfer/samples/generated_samples/snippet_metadata_google.storagetransfer.v1.json +++ b/packages/google-cloud-storage-transfer/samples/generated_samples/snippet_metadata_google.storagetransfer.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-storage-transfer", - "version": "1.11.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-storage-transfer/tests/unit/gapic/storage_transfer_v1/test_storage_transfer_service.py b/packages/google-cloud-storage-transfer/tests/unit/gapic/storage_transfer_v1/test_storage_transfer_service.py index ba51339e7f4f..64d7f75e4db5 100644 --- a/packages/google-cloud-storage-transfer/tests/unit/gapic/storage_transfer_v1/test_storage_transfer_service.py +++ b/packages/google-cloud-storage-transfer/tests/unit/gapic/storage_transfer_v1/test_storage_transfer_service.py @@ -1385,22 +1385,23 @@ async def test_get_google_service_account_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_google_service_account - ] = mock_object + ] = mock_rpc request = {} await client.get_google_service_account(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_google_service_account(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1696,22 +1697,23 @@ async def test_create_transfer_job_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_transfer_job - ] = mock_object + ] = mock_rpc request = {} await client.create_transfer_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_transfer_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1953,22 +1955,23 @@ async def test_update_transfer_job_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_transfer_job - ] = mock_object + ] = mock_rpc request = {} await client.update_transfer_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_transfer_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2265,22 +2268,23 @@ async def test_get_transfer_job_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_transfer_job - ] = mock_object + ] = mock_rpc request = {} await client.get_transfer_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_transfer_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2569,22 +2573,23 @@ async def test_list_transfer_jobs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_transfer_jobs - ] = mock_object + ] = mock_rpc request = {} await client.list_transfer_jobs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_transfer_jobs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2997,22 +3002,23 @@ async def test_pause_transfer_operation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.pause_transfer_operation - ] = mock_object + ] = mock_rpc request = {} await client.pause_transfer_operation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.pause_transfer_operation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3284,22 +3290,23 @@ async def test_resume_transfer_operation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.resume_transfer_operation - ] = mock_object + ] = mock_rpc request = {} await client.resume_transfer_operation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.resume_transfer_operation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3514,8 +3521,9 @@ def test_run_transfer_job_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.run_transfer_job(request) @@ -3569,26 +3577,28 @@ async def test_run_transfer_job_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.run_transfer_job - ] = mock_object + ] = mock_rpc request = {} await client.run_transfer_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.run_transfer_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3859,22 +3869,23 @@ async def test_delete_transfer_job_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_transfer_job - ] = mock_object + ] = mock_rpc request = {} await client.delete_transfer_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_transfer_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4158,22 +4169,23 @@ async def test_create_agent_pool_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_agent_pool - ] = mock_object + ] = mock_rpc request = {} await client.create_agent_pool(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_agent_pool(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4568,22 +4580,23 @@ async def test_update_agent_pool_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_agent_pool - ] = mock_object + ] = mock_rpc request = {} await client.update_agent_pool(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_agent_pool(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4962,22 +4975,23 @@ async def test_get_agent_pool_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_agent_pool - ] = mock_object + ] = mock_rpc request = {} await client.get_agent_pool(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_agent_pool(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5336,22 +5350,23 @@ async def test_list_agent_pools_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_agent_pools - ] = mock_object + ] = mock_rpc request = {} await client.list_agent_pools(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_agent_pools(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5897,22 +5912,23 @@ async def test_delete_agent_pool_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_agent_pool - ] = mock_object + ] = mock_rpc request = {} await client.delete_agent_pool(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_agent_pool(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6373,7 +6389,11 @@ def test_create_transfer_job_rest(request_type): "description": "description_value", "project_id": "project_id_value", "transfer_spec": { - "gcs_data_sink": {"bucket_name": "bucket_name_value", "path": "path_value"}, + "gcs_data_sink": { + "bucket_name": "bucket_name_value", + "path": "path_value", + "managed_folder_transfer_enabled": True, + }, "posix_data_sink": {"root_directory": "root_directory_value"}, "gcs_data_source": {}, "aws_s3_data_source": { @@ -6384,7 +6404,9 @@ def test_create_transfer_job_rest(request_type): }, "path": "path_value", "role_arn": "role_arn_value", + "cloudfront_domain": "cloudfront_domain_value", "credentials_secret": "credentials_secret_value", + "managed_private_network": True, }, "http_data_source": {"list_url": "list_url_value"}, "posix_data_source": {}, @@ -6407,6 +6429,7 @@ def test_create_transfer_job_rest(request_type): "list_api": 1, }, }, + "hdfs_data_source": {"path": "path_value"}, "gcs_intermediate_data_location": {}, "object_conditions": { "min_time_elapsed_since_last_modification": { From fdebbf2e914e9a8ed5a31a04ce9fe26de0f69c72 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 19 Aug 2024 10:06:13 -0400 Subject: [PATCH 008/108] docs: [google-ai-generativelanguage] Many small fixes (#13017) - [ ] Regenerate this pull request now. BEGIN_COMMIT_OVERRIDE docs: Many small fixes feat: Add new PromptFeedback and FinishReason entries feat: Add model max_temperature feat: Add new PromptFeedback and FinishReason entries for https://github.com/google-gemini/generative-ai-python/issues/476 END_COMMIT_OVERRIDE PiperOrigin-RevId: 663936564 Source-Link: https://github.com/googleapis/googleapis/commit/21c206f7370ed960b1c00418cb1edbdb81e3f999 Source-Link: https://github.com/googleapis/googleapis-gen/commit/97ac6dfcaa79ef76bb78f860e7986adcbe223081 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWFpLWdlbmVyYXRpdmVsYW5ndWFnZS8uT3dsQm90LnlhbWwiLCJoIjoiOTdhYzZkZmNhYTc5ZWY3NmJiNzhmODYwZTc5ODZhZGNiZTIyMzA4MSJ9 docs: Many small fixes PiperOrigin-RevId: 663936518 Source-Link: https://github.com/googleapis/googleapis/commit/5157b5f45590d4695e99e9f4a76cdc5c54077472 Source-Link: https://github.com/googleapis/googleapis-gen/commit/740787c5eb4f2dcfb9094a1183e0314c725fb0ca Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWFpLWdlbmVyYXRpdmVsYW5ndWFnZS8uT3dsQm90LnlhbWwiLCJoIjoiNzQwNzg3YzVlYjRmMmRjZmI5MDk0YTExODNlMDMxNGM3MjVmYjBjYSJ9 --------- Co-authored-by: Owl Bot --- .../ai/generativelanguage/gapic_version.py | 2 +- .../ai/generativelanguage_v1/gapic_version.py | 2 +- .../generative_service/async_client.py | 107 +++---- .../services/generative_service/client.py | 104 ++++--- .../generative_service/transports/grpc.py | 38 ++- .../transports/grpc_asyncio.py | 38 ++- .../generative_service/transports/rest.py | 38 +-- .../services/model_service/async_client.py | 23 +- .../services/model_service/client.py | 20 +- .../services/model_service/transports/grpc.py | 11 +- .../model_service/transports/grpc_asyncio.py | 11 +- .../types/generative_service.py | 169 +++++++---- .../ai/generativelanguage_v1/types/model.py | 42 ++- .../types/model_service.py | 7 +- .../gapic_version.py | 2 +- .../services/cache_service/async_client.py | 5 +- .../services/cache_service/client.py | 2 +- .../services/discuss_service/async_client.py | 5 +- .../services/discuss_service/client.py | 2 +- .../services/file_service/async_client.py | 5 +- .../services/file_service/client.py | 2 +- .../generative_service/async_client.py | 123 ++++---- .../services/generative_service/client.py | 120 ++++---- .../generative_service/transports/grpc.py | 38 ++- .../transports/grpc_asyncio.py | 38 ++- .../generative_service/transports/rest.py | 42 +-- .../services/model_service/async_client.py | 36 +-- .../services/model_service/client.py | 33 ++- .../services/model_service/transports/grpc.py | 22 +- .../model_service/transports/grpc_asyncio.py | 22 +- .../permission_service/async_client.py | 5 +- .../services/permission_service/client.py | 2 +- .../retriever_service/async_client.py | 5 +- .../services/retriever_service/client.py | 2 +- .../services/text_service/async_client.py | 5 +- .../services/text_service/client.py | 2 +- .../types/content.py | 8 + .../types/generative_service.py | 280 +++++++++++------- .../generativelanguage_v1beta/types/model.py | 21 +- .../types/model_service.py | 9 +- .../types/tuned_model.py | 11 +- .../gapic_version.py | 2 +- .../services/discuss_service/async_client.py | 5 +- .../services/discuss_service/client.py | 2 +- .../services/model_service/async_client.py | 5 +- .../services/model_service/client.py | 2 +- .../services/text_service/async_client.py | 5 +- .../services/text_service/client.py | 2 +- .../gapic_version.py | 2 +- .../services/discuss_service/async_client.py | 5 +- .../services/discuss_service/client.py | 2 +- .../services/model_service/async_client.py | 5 +- .../services/model_service/client.py | 2 +- .../permission_service/async_client.py | 5 +- .../services/permission_service/client.py | 2 +- .../permission_service/transports/rest.py | 18 +- .../services/text_service/async_client.py | 5 +- .../services/text_service/client.py | 2 +- ...adata_google.ai.generativelanguage.v1.json | 2 +- ...a_google.ai.generativelanguage.v1beta.json | 2 +- ..._google.ai.generativelanguage.v1beta2.json | 2 +- ..._google.ai.generativelanguage.v1beta3.json | 2 +- .../test_generative_service.py | 45 +-- .../test_model_service.py | 25 +- .../test_cache_service.py | 47 +-- .../test_discuss_service.py | 18 +- .../test_file_service.py | 36 ++- .../test_generative_service.py | 54 ++-- .../test_model_service.py | 73 +++-- .../test_permission_service.py | 54 ++-- .../test_retriever_service.py | 180 ++++++----- .../test_text_service.py | 36 ++- .../test_discuss_service.py | 18 +- .../test_model_service.py | 18 +- .../test_text_service.py | 18 +- .../test_discuss_service.py | 18 +- .../test_model_service.py | 73 +++-- .../test_permission_service.py | 54 ++-- .../test_text_service.py | 36 ++- 79 files changed, 1319 insertions(+), 1022 deletions(-) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage/gapic_version.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage/gapic_version.py index 911c2d1dfcef..558c8aab67c5 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage/gapic_version.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.8" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/gapic_version.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/gapic_version.py index 911c2d1dfcef..558c8aab67c5 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/gapic_version.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.8" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/async_client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/async_client.py index 7f380977c2b7..2fa8d2f13e5e 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/async_client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( AsyncIterable, @@ -194,9 +193,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(GenerativeServiceClient).get_transport_class, type(GenerativeServiceClient) - ) + get_transport_class = GenerativeServiceClient.get_transport_class def __init__( self, @@ -280,14 +277,15 @@ async def generate_content( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> generative_service.GenerateContentResponse: - r"""Generates a response from the model given an input - ``GenerateContentRequest``. - - Input capabilities differ between models, including tuned - models. See the `model - guide `__ and `tuning - guide `__ for - details. + r"""Generates a model response given an input + ``GenerateContentRequest``. Refer to the `text generation + guide `__ + for detailed usage information. Input capabilities differ + between models, including tuned models. Refer to the `model + guide `__ + and `tuning + guide `__ + for details. .. code-block:: python @@ -329,12 +327,14 @@ async def sample_generate_content(): on the ``request`` instance; if ``request`` is provided, this should not be set. contents (:class:`MutableSequence[google.ai.generativelanguage_v1.types.Content]`): - Required. The content of the current - conversation with the model. - For single-turn queries, this is a - single instance. For multi-turn queries, - this is a repeated field that contains - conversation history + latest request. + Required. The content of the current conversation with + the model. + + For single-turn queries, this is a single instance. For + multi-turn queries like + `chat `__, + this is a repeated field that contains the conversation + history and the latest request. This corresponds to the ``contents`` field on the ``request`` instance; if ``request`` is provided, this @@ -347,18 +347,18 @@ async def sample_generate_content(): Returns: google.ai.generativelanguage_v1.types.GenerateContentResponse: - Response from the model supporting multiple candidates. + Response from the model supporting multiple candidate + responses. - Note on safety ratings and content filtering. They - are reported for both prompt in + Safety ratings and content filtering are reported for + both prompt in GenerateContentResponse.prompt_feedback and for each candidate in finish_reason and in safety_ratings. The - API contract is that: - either all requested - candidates are returned or no candidates at all - no - candidates are returned only if there was something - wrong with the prompt (see prompt_feedback) - - feedback on each candidate is reported on - finish_reason and safety_ratings. + API: - Returns either all requested candidates or + none of them - Returns no candidates at all only if + there was something wrong with the prompt (check + prompt_feedback) - Reports feedback on each candidate + in finish_reason and safety_ratings. """ # Create or coerce a protobuf request object. @@ -421,8 +421,9 @@ def stream_generate_content( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> Awaitable[AsyncIterable[generative_service.GenerateContentResponse]]: - r"""Generates a streamed response from the model given an input - ``GenerateContentRequest``. + r"""Generates a `streamed + response `__ + from the model given an input ``GenerateContentRequest``. .. code-block:: python @@ -465,12 +466,14 @@ async def sample_stream_generate_content(): on the ``request`` instance; if ``request`` is provided, this should not be set. contents (:class:`MutableSequence[google.ai.generativelanguage_v1.types.Content]`): - Required. The content of the current - conversation with the model. - For single-turn queries, this is a - single instance. For multi-turn queries, - this is a repeated field that contains - conversation history + latest request. + Required. The content of the current conversation with + the model. + + For single-turn queries, this is a single instance. For + multi-turn queries like + `chat `__, + this is a repeated field that contains the conversation + history and the latest request. This corresponds to the ``contents`` field on the ``request`` instance; if ``request`` is provided, this @@ -483,18 +486,18 @@ async def sample_stream_generate_content(): Returns: AsyncIterable[google.ai.generativelanguage_v1.types.GenerateContentResponse]: - Response from the model supporting multiple candidates. + Response from the model supporting multiple candidate + responses. - Note on safety ratings and content filtering. They - are reported for both prompt in + Safety ratings and content filtering are reported for + both prompt in GenerateContentResponse.prompt_feedback and for each candidate in finish_reason and in safety_ratings. The - API contract is that: - either all requested - candidates are returned or no candidates at all - no - candidates are returned only if there was something - wrong with the prompt (see prompt_feedback) - - feedback on each candidate is reported on - finish_reason and safety_ratings. + API: - Returns either all requested candidates or + none of them - Returns no candidates at all only if + there was something wrong with the prompt (check + prompt_feedback) - Reports feedback on each candidate + in finish_reason and safety_ratings. """ # Create or coerce a protobuf request object. @@ -555,8 +558,9 @@ async def embed_content( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> generative_service.EmbedContentResponse: - r"""Generates an embedding from the model given an input - ``Content``. + r"""Generates a text embedding vector from the input ``Content`` + using the specified `Gemini Embedding + model `__. .. code-block:: python @@ -679,8 +683,9 @@ async def batch_embed_contents( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> generative_service.BatchEmbedContentsResponse: - r"""Generates multiple embeddings from the model given - input text in a synchronous call. + r"""Generates multiple embedding vectors from the input ``Content`` + which consists of a batch of strings represented as + ``EmbedContentRequest`` objects. .. code-block:: python @@ -804,8 +809,10 @@ async def count_tokens( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> generative_service.CountTokensResponse: - r"""Runs a model's tokenizer on input content and returns - the token count. + r"""Runs a model's tokenizer on input ``Content`` and returns the + token count. Refer to the `tokens + guide `__ to learn + more about tokens. .. code-block:: python diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/client.py index 886dfd3c9953..c0d080604625 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/client.py @@ -664,7 +664,7 @@ def __init__( Type[GenerativeServiceTransport], Callable[..., GenerativeServiceTransport], ] = ( - type(self).get_transport_class(transport) + GenerativeServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., GenerativeServiceTransport], transport) ) @@ -693,14 +693,15 @@ def generate_content( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> generative_service.GenerateContentResponse: - r"""Generates a response from the model given an input - ``GenerateContentRequest``. - - Input capabilities differ between models, including tuned - models. See the `model - guide `__ and `tuning - guide `__ for - details. + r"""Generates a model response given an input + ``GenerateContentRequest``. Refer to the `text generation + guide `__ + for detailed usage information. Input capabilities differ + between models, including tuned models. Refer to the `model + guide `__ + and `tuning + guide `__ + for details. .. code-block:: python @@ -742,12 +743,14 @@ def sample_generate_content(): on the ``request`` instance; if ``request`` is provided, this should not be set. contents (MutableSequence[google.ai.generativelanguage_v1.types.Content]): - Required. The content of the current - conversation with the model. - For single-turn queries, this is a - single instance. For multi-turn queries, - this is a repeated field that contains - conversation history + latest request. + Required. The content of the current conversation with + the model. + + For single-turn queries, this is a single instance. For + multi-turn queries like + `chat `__, + this is a repeated field that contains the conversation + history and the latest request. This corresponds to the ``contents`` field on the ``request`` instance; if ``request`` is provided, this @@ -760,18 +763,18 @@ def sample_generate_content(): Returns: google.ai.generativelanguage_v1.types.GenerateContentResponse: - Response from the model supporting multiple candidates. + Response from the model supporting multiple candidate + responses. - Note on safety ratings and content filtering. They - are reported for both prompt in + Safety ratings and content filtering are reported for + both prompt in GenerateContentResponse.prompt_feedback and for each candidate in finish_reason and in safety_ratings. The - API contract is that: - either all requested - candidates are returned or no candidates at all - no - candidates are returned only if there was something - wrong with the prompt (see prompt_feedback) - - feedback on each candidate is reported on - finish_reason and safety_ratings. + API: - Returns either all requested candidates or + none of them - Returns no candidates at all only if + there was something wrong with the prompt (check + prompt_feedback) - Reports feedback on each candidate + in finish_reason and safety_ratings. """ # Create or coerce a protobuf request object. @@ -831,8 +834,9 @@ def stream_generate_content( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> Iterable[generative_service.GenerateContentResponse]: - r"""Generates a streamed response from the model given an input - ``GenerateContentRequest``. + r"""Generates a `streamed + response `__ + from the model given an input ``GenerateContentRequest``. .. code-block:: python @@ -875,12 +879,14 @@ def sample_stream_generate_content(): on the ``request`` instance; if ``request`` is provided, this should not be set. contents (MutableSequence[google.ai.generativelanguage_v1.types.Content]): - Required. The content of the current - conversation with the model. - For single-turn queries, this is a - single instance. For multi-turn queries, - this is a repeated field that contains - conversation history + latest request. + Required. The content of the current conversation with + the model. + + For single-turn queries, this is a single instance. For + multi-turn queries like + `chat `__, + this is a repeated field that contains the conversation + history and the latest request. This corresponds to the ``contents`` field on the ``request`` instance; if ``request`` is provided, this @@ -893,18 +899,18 @@ def sample_stream_generate_content(): Returns: Iterable[google.ai.generativelanguage_v1.types.GenerateContentResponse]: - Response from the model supporting multiple candidates. + Response from the model supporting multiple candidate + responses. - Note on safety ratings and content filtering. They - are reported for both prompt in + Safety ratings and content filtering are reported for + both prompt in GenerateContentResponse.prompt_feedback and for each candidate in finish_reason and in safety_ratings. The - API contract is that: - either all requested - candidates are returned or no candidates at all - no - candidates are returned only if there was something - wrong with the prompt (see prompt_feedback) - - feedback on each candidate is reported on - finish_reason and safety_ratings. + API: - Returns either all requested candidates or + none of them - Returns no candidates at all only if + there was something wrong with the prompt (check + prompt_feedback) - Reports feedback on each candidate + in finish_reason and safety_ratings. """ # Create or coerce a protobuf request object. @@ -962,8 +968,9 @@ def embed_content( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> generative_service.EmbedContentResponse: - r"""Generates an embedding from the model given an input - ``Content``. + r"""Generates a text embedding vector from the input ``Content`` + using the specified `Gemini Embedding + model `__. .. code-block:: python @@ -1083,8 +1090,9 @@ def batch_embed_contents( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> generative_service.BatchEmbedContentsResponse: - r"""Generates multiple embeddings from the model given - input text in a synchronous call. + r"""Generates multiple embedding vectors from the input ``Content`` + which consists of a batch of strings represented as + ``EmbedContentRequest`` objects. .. code-block:: python @@ -1205,8 +1213,10 @@ def count_tokens( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> generative_service.CountTokensResponse: - r"""Runs a model's tokenizer on input content and returns - the token count. + r"""Runs a model's tokenizer on input ``Content`` and returns the + token count. Refer to the `tokens + guide `__ to learn + more about tokens. .. code-block:: python diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/transports/grpc.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/transports/grpc.py index 7fa405324e45..0c05a0b60ad9 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/transports/grpc.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/transports/grpc.py @@ -245,14 +245,15 @@ def generate_content( ]: r"""Return a callable for the generate content method over gRPC. - Generates a response from the model given an input - ``GenerateContentRequest``. - - Input capabilities differ between models, including tuned - models. See the `model - guide `__ and `tuning - guide `__ for - details. + Generates a model response given an input + ``GenerateContentRequest``. Refer to the `text generation + guide `__ + for detailed usage information. Input capabilities differ + between models, including tuned models. Refer to the `model + guide `__ + and `tuning + guide `__ + for details. Returns: Callable[[~.GenerateContentRequest], @@ -281,8 +282,9 @@ def stream_generate_content( ]: r"""Return a callable for the stream generate content method over gRPC. - Generates a streamed response from the model given an input - ``GenerateContentRequest``. + Generates a `streamed + response `__ + from the model given an input ``GenerateContentRequest``. Returns: Callable[[~.GenerateContentRequest], @@ -311,8 +313,9 @@ def embed_content( ]: r"""Return a callable for the embed content method over gRPC. - Generates an embedding from the model given an input - ``Content``. + Generates a text embedding vector from the input ``Content`` + using the specified `Gemini Embedding + model `__. Returns: Callable[[~.EmbedContentRequest], @@ -341,8 +344,9 @@ def batch_embed_contents( ]: r"""Return a callable for the batch embed contents method over gRPC. - Generates multiple embeddings from the model given - input text in a synchronous call. + Generates multiple embedding vectors from the input ``Content`` + which consists of a batch of strings represented as + ``EmbedContentRequest`` objects. Returns: Callable[[~.BatchEmbedContentsRequest], @@ -370,8 +374,10 @@ def count_tokens( ]: r"""Return a callable for the count tokens method over gRPC. - Runs a model's tokenizer on input content and returns - the token count. + Runs a model's tokenizer on input ``Content`` and returns the + token count. Refer to the `tokens + guide `__ to learn + more about tokens. Returns: Callable[[~.CountTokensRequest], diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/transports/grpc_asyncio.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/transports/grpc_asyncio.py index bc85a9437627..20323a521f71 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/transports/grpc_asyncio.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/transports/grpc_asyncio.py @@ -249,14 +249,15 @@ def generate_content( ]: r"""Return a callable for the generate content method over gRPC. - Generates a response from the model given an input - ``GenerateContentRequest``. - - Input capabilities differ between models, including tuned - models. See the `model - guide `__ and `tuning - guide `__ for - details. + Generates a model response given an input + ``GenerateContentRequest``. Refer to the `text generation + guide `__ + for detailed usage information. Input capabilities differ + between models, including tuned models. Refer to the `model + guide `__ + and `tuning + guide `__ + for details. Returns: Callable[[~.GenerateContentRequest], @@ -285,8 +286,9 @@ def stream_generate_content( ]: r"""Return a callable for the stream generate content method over gRPC. - Generates a streamed response from the model given an input - ``GenerateContentRequest``. + Generates a `streamed + response `__ + from the model given an input ``GenerateContentRequest``. Returns: Callable[[~.GenerateContentRequest], @@ -315,8 +317,9 @@ def embed_content( ]: r"""Return a callable for the embed content method over gRPC. - Generates an embedding from the model given an input - ``Content``. + Generates a text embedding vector from the input ``Content`` + using the specified `Gemini Embedding + model `__. Returns: Callable[[~.EmbedContentRequest], @@ -345,8 +348,9 @@ def batch_embed_contents( ]: r"""Return a callable for the batch embed contents method over gRPC. - Generates multiple embeddings from the model given - input text in a synchronous call. + Generates multiple embedding vectors from the input ``Content`` + which consists of a batch of strings represented as + ``EmbedContentRequest`` objects. Returns: Callable[[~.BatchEmbedContentsRequest], @@ -375,8 +379,10 @@ def count_tokens( ]: r"""Return a callable for the count tokens method over gRPC. - Runs a model's tokenizer on input content and returns - the token count. + Runs a model's tokenizer on input ``Content`` and returns the + token count. Refer to the `tokens + guide `__ to learn + more about tokens. Returns: Callable[[~.CountTokensRequest], diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/transports/rest.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/transports/rest.py index b604bc06b024..7498cba8d7e3 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/transports/rest.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/transports/rest.py @@ -715,20 +715,21 @@ def __call__( Returns: ~.generative_service.GenerateContentResponse: - Response from the model supporting multiple candidates. + Response from the model supporting multiple candidate + responses. - Note on safety ratings and content filtering. They are - reported for both prompt in + Safety ratings and content filtering are reported for + both prompt in ``GenerateContentResponse.prompt_feedback`` and for each candidate in ``finish_reason`` and in - ``safety_ratings``. The API contract is that: + ``safety_ratings``. The API: - - either all requested candidates are returned or no - candidates at all - - no candidates are returned only if there was - something wrong with the prompt (see + - Returns either all requested candidates or none of + them + - Returns no candidates at all only if there was + something wrong with the prompt (check ``prompt_feedback``) - - feedback on each candidate is reported on + - Reports feedback on each candidate in ``finish_reason`` and ``safety_ratings``. """ @@ -830,20 +831,21 @@ def __call__( Returns: ~.generative_service.GenerateContentResponse: - Response from the model supporting multiple candidates. + Response from the model supporting multiple candidate + responses. - Note on safety ratings and content filtering. They are - reported for both prompt in + Safety ratings and content filtering are reported for + both prompt in ``GenerateContentResponse.prompt_feedback`` and for each candidate in ``finish_reason`` and in - ``safety_ratings``. The API contract is that: + ``safety_ratings``. The API: - - either all requested candidates are returned or no - candidates at all - - no candidates are returned only if there was - something wrong with the prompt (see + - Returns either all requested candidates or none of + them + - Returns no candidates at all only if there was + something wrong with the prompt (check ``prompt_feedback``) - - feedback on each candidate is reported on + - Reports feedback on each candidate in ``finish_reason`` and ``safety_ratings``. """ diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/async_client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/async_client.py index 9eaa670055fb..8d69bc5d2451 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/async_client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -187,9 +186,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ModelServiceClient).get_transport_class, type(ModelServiceClient) - ) + get_transport_class = ModelServiceClient.get_transport_class def __init__( self, @@ -266,7 +263,12 @@ async def get_model( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> model.Model: - r"""Gets information about a specific Model. + r"""Gets information about a specific ``Model`` such as its version + number, token limits, + `parameters `__ + and other metadata. Refer to the `Gemini models + guide `__ + for detailed model information. .. code-block:: python @@ -377,7 +379,9 @@ async def list_models( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListModelsAsyncPager: - r"""Lists models available through the API. + r"""Lists the + ```Model``\ s `__ + available through the Gemini API. .. code-block:: python @@ -411,10 +415,9 @@ async def sample_list_models(): page_size (:class:`int`): The maximum number of ``Models`` to return (per page). - The service may return fewer models. If unspecified, at - most 50 models will be returned per page. This method - returns at most 1000 models per page, even if you pass a - larger page_size. + If unspecified, 50 models will be returned per page. + This method returns at most 1000 models per page, even + if you pass a larger page_size. This corresponds to the ``page_size`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/client.py index ffe27c694223..115407649578 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/client.py @@ -652,7 +652,7 @@ def __init__( transport_init: Union[ Type[ModelServiceTransport], Callable[..., ModelServiceTransport] ] = ( - type(self).get_transport_class(transport) + ModelServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ModelServiceTransport], transport) ) @@ -678,7 +678,12 @@ def get_model( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> model.Model: - r"""Gets information about a specific Model. + r"""Gets information about a specific ``Model`` such as its version + number, token limits, + `parameters `__ + and other metadata. Refer to the `Gemini models + guide `__ + for detailed model information. .. code-block:: python @@ -786,7 +791,9 @@ def list_models( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListModelsPager: - r"""Lists models available through the API. + r"""Lists the + ```Model``\ s `__ + available through the Gemini API. .. code-block:: python @@ -820,10 +827,9 @@ def sample_list_models(): page_size (int): The maximum number of ``Models`` to return (per page). - The service may return fewer models. If unspecified, at - most 50 models will be returned per page. This method - returns at most 1000 models per page, even if you pass a - larger page_size. + If unspecified, 50 models will be returned per page. + This method returns at most 1000 models per page, even + if you pass a larger page_size. This corresponds to the ``page_size`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/transports/grpc.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/transports/grpc.py index cd57836b737c..234ee1693ebf 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/transports/grpc.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/transports/grpc.py @@ -240,7 +240,12 @@ def grpc_channel(self) -> grpc.Channel: def get_model(self) -> Callable[[model_service.GetModelRequest], model.Model]: r"""Return a callable for the get model method over gRPC. - Gets information about a specific Model. + Gets information about a specific ``Model`` such as its version + number, token limits, + `parameters `__ + and other metadata. Refer to the `Gemini models + guide `__ + for detailed model information. Returns: Callable[[~.GetModelRequest], @@ -266,7 +271,9 @@ def list_models( ) -> Callable[[model_service.ListModelsRequest], model_service.ListModelsResponse]: r"""Return a callable for the list models method over gRPC. - Lists models available through the API. + Lists the + ```Model``\ s `__ + available through the Gemini API. Returns: Callable[[~.ListModelsRequest], diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/transports/grpc_asyncio.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/transports/grpc_asyncio.py index f1569dd307a5..c7c11c694671 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/transports/grpc_asyncio.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/transports/grpc_asyncio.py @@ -246,7 +246,12 @@ def get_model( ) -> Callable[[model_service.GetModelRequest], Awaitable[model.Model]]: r"""Return a callable for the get model method over gRPC. - Gets information about a specific Model. + Gets information about a specific ``Model`` such as its version + number, token limits, + `parameters `__ + and other metadata. Refer to the `Gemini models + guide `__ + for detailed model information. Returns: Callable[[~.GetModelRequest], @@ -274,7 +279,9 @@ def list_models( ]: r"""Return a callable for the list models method over gRPC. - Lists models available through the API. + Lists the + ```Model``\ s `__ + available through the Gemini API. Returns: Callable[[~.ListModelsRequest], diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/generative_service.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/generative_service.py index e1121ef229db..e19c5b166abc 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/generative_service.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/generative_service.py @@ -93,12 +93,14 @@ class GenerateContentRequest(proto.Message): Format: ``name=models/{model}``. contents (MutableSequence[google.ai.generativelanguage_v1.types.Content]): - Required. The content of the current - conversation with the model. - For single-turn queries, this is a single - instance. For multi-turn queries, this is a - repeated field that contains conversation - history + latest request. + Required. The content of the current conversation with the + model. + + For single-turn queries, this is a single instance. For + multi-turn queries like + `chat `__, + this is a repeated field that contains the conversation + history and the latest request. safety_settings (MutableSequence[google.ai.generativelanguage_v1.types.SafetySetting]): Optional. A list of unique ``SafetySetting`` instances for blocking unsafe content. @@ -116,7 +118,13 @@ class GenerateContentRequest(proto.Message): categories HARM_CATEGORY_HATE_SPEECH, HARM_CATEGORY_SEXUALLY_EXPLICIT, HARM_CATEGORY_DANGEROUS_CONTENT, HARM_CATEGORY_HARASSMENT - are supported. + are supported. Refer to the + `guide `__ + for detailed information on available safety settings. Also + refer to the `Safety + guidance `__ + to learn how to incorporate safety considerations in your AI + applications. generation_config (google.ai.generativelanguage_v1.types.GenerationConfig): Optional. Configuration options for model generation and outputs. @@ -148,7 +156,7 @@ class GenerateContentRequest(proto.Message): class GenerationConfig(proto.Message): r"""Configuration options for model generation and outputs. Not - all parameters may be configurable for every model. + all parameters are configurable for every model. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -162,14 +170,13 @@ class GenerationConfig(proto.Message): This field is a member of `oneof`_ ``_candidate_count``. stop_sequences (MutableSequence[str]): - Optional. The set of character sequences (up - to 5) that will stop output generation. If - specified, the API will stop at the first - appearance of a stop sequence. The stop sequence - will not be included as part of the response. + Optional. The set of character sequences (up to 5) that will + stop output generation. If specified, the API will stop at + the first appearance of a ``stop_sequence``. The stop + sequence will not be included as part of the response. max_output_tokens (int): Optional. The maximum number of tokens to include in a - candidate. + response candidate. Note: The default value varies by model, see the ``Model.output_token_limit`` attribute of the ``Model`` @@ -190,33 +197,35 @@ class GenerationConfig(proto.Message): Optional. The maximum cumulative probability of tokens to consider when sampling. - The model uses combined Top-k and nucleus sampling. + The model uses combined Top-k and Top-p (nucleus) sampling. Tokens are sorted based on their assigned probabilities so that only the most likely tokens are considered. Top-k sampling directly limits the maximum number of tokens to - consider, while Nucleus sampling limits number of tokens + consider, while Nucleus sampling limits the number of tokens based on the cumulative probability. - Note: The default value varies by model, see the - ``Model.top_p`` attribute of the ``Model`` returned from the - ``getModel`` function. + Note: The default value varies by ``Model`` and is specified + by the\ ``Model.top_p`` attribute returned from the + ``getModel`` function. An empty ``top_k`` attribute + indicates that the model doesn't apply top-k sampling and + doesn't allow setting ``top_k`` on requests. This field is a member of `oneof`_ ``_top_p``. top_k (int): Optional. The maximum number of tokens to consider when sampling. - Models use nucleus sampling or combined Top-k and nucleus - sampling. Top-k sampling considers the set of ``top_k`` most - probable tokens. Models running with nucleus sampling don't - allow top_k setting. + Gemini models use Top-p (nucleus) sampling or a combination + of Top-k and nucleus sampling. Top-k sampling considers the + set of ``top_k`` most probable tokens. Models running with + nucleus sampling don't allow top_k setting. - Note: The default value varies by model, see the - ``Model.top_k`` attribute of the ``Model`` returned from the - ``getModel`` function. Empty ``top_k`` field in ``Model`` - indicates the model doesn't apply top-k sampling and doesn't - allow setting ``top_k`` on requests. + Note: The default value varies by ``Model`` and is specified + by the\ ``Model.top_p`` attribute returned from the + ``getModel`` function. An empty ``top_k`` attribute + indicates that the model doesn't apply top-k sampling and + doesn't allow setting ``top_k`` on requests. This field is a member of `oneof`_ ``_top_k``. """ @@ -253,18 +262,16 @@ class GenerationConfig(proto.Message): class GenerateContentResponse(proto.Message): - r"""Response from the model supporting multiple candidates. - - Note on safety ratings and content filtering. They are reported for - both prompt in ``GenerateContentResponse.prompt_feedback`` and for - each candidate in ``finish_reason`` and in ``safety_ratings``. The - API contract is that: - - - either all requested candidates are returned or no candidates at - all - - no candidates are returned only if there was something wrong with - the prompt (see ``prompt_feedback``) - - feedback on each candidate is reported on ``finish_reason`` and + r"""Response from the model supporting multiple candidate responses. + + Safety ratings and content filtering are reported for both prompt in + ``GenerateContentResponse.prompt_feedback`` and for each candidate + in ``finish_reason`` and in ``safety_ratings``. The API: + + - Returns either all requested candidates or none of them + - Returns no candidates at all only if there was something wrong + with the prompt (check ``prompt_feedback``) + - Reports feedback on each candidate in ``finish_reason`` and ``safety_ratings``. Attributes: @@ -285,29 +292,35 @@ class PromptFeedback(proto.Message): Attributes: block_reason (google.ai.generativelanguage_v1.types.GenerateContentResponse.PromptFeedback.BlockReason): Optional. If set, the prompt was blocked and - no candidates are returned. Rephrase your - prompt. + no candidates are returned. Rephrase the prompt. safety_ratings (MutableSequence[google.ai.generativelanguage_v1.types.SafetyRating]): Ratings for safety of the prompt. There is at most one rating per category. """ class BlockReason(proto.Enum): - r"""Specifies what was the reason why prompt was blocked. + r"""Specifies the reason why the prompt was blocked. Values: BLOCK_REASON_UNSPECIFIED (0): Default value. This value is unused. SAFETY (1): - Prompt was blocked due to safety reasons. You can inspect + Prompt was blocked due to safety reasons. Inspect ``safety_ratings`` to understand which safety category blocked it. OTHER (2): - Prompt was blocked due to unknown reaasons. + Prompt was blocked due to unknown reasons. + BLOCKLIST (3): + Prompt was blocked due to the terms which are + included from the terminology blocklist. + PROHIBITED_CONTENT (4): + Prompt was blocked due to prohibited content. """ BLOCK_REASON_UNSPECIFIED = 0 SAFETY = 1 OTHER = 2 + BLOCKLIST = 3 + PROHIBITED_CONTENT = 4 block_reason: "GenerateContentResponse.PromptFeedback.BlockReason" = ( proto.Field( @@ -327,13 +340,15 @@ class UsageMetadata(proto.Message): Attributes: prompt_token_count (int): - Number of tokens in the prompt. + Number of tokens in the prompt. When ``cached_content`` is + set, this is still the total effective prompt size meaning + this includes the number of tokens in the cached content. candidates_token_count (int): - Total number of tokens across the generated - candidates. + Total number of tokens across all the + generated response candidates. total_token_count (int): Total token count for the generation request - (prompt + candidates). + (prompt + response candidates). """ prompt_token_count: int = proto.Field( @@ -374,7 +389,7 @@ class Candidate(proto.Message): Attributes: index (int): Output only. Index of the candidate in the - list of candidates. + list of response candidates. This field is a member of `oneof`_ ``_index``. content (google.ai.generativelanguage_v1.types.Content): @@ -384,7 +399,7 @@ class Candidate(proto.Message): Optional. Output only. The reason why the model stopped generating tokens. If empty, the model has not stopped generating - the tokens. + tokens. safety_ratings (MutableSequence[google.ai.generativelanguage_v1.types.SafetyRating]): List of ratings for the safety of a response candidate. @@ -414,20 +429,41 @@ class FinishReason(proto.Enum): The maximum number of tokens as specified in the request was reached. SAFETY (3): - The candidate content was flagged for safety - reasons. + The response candidate content was flagged + for safety reasons. RECITATION (4): - The candidate content was flagged for - recitation reasons. + The response candidate content was flagged + for recitation reasons. + LANGUAGE (6): + The response candidate content was flagged + for using an unsupported language. OTHER (5): Unknown reason. + BLOCKLIST (7): + Token generation stopped because the content + contains forbidden terms. + PROHIBITED_CONTENT (8): + Token generation stopped for potentially + containing prohibited content. + SPII (9): + Token generation stopped because the content + potentially contains Sensitive Personally + Identifiable Information (SPII). + MALFORMED_FUNCTION_CALL (10): + The function call generated by the model is + invalid. """ FINISH_REASON_UNSPECIFIED = 0 STOP = 1 MAX_TOKENS = 2 SAFETY = 3 RECITATION = 4 + LANGUAGE = 6 OTHER = 5 + BLOCKLIST = 7 + PROHIBITED_CONTENT = 8 + SPII = 9 + MALFORMED_FUNCTION_CALL = 10 index: int = proto.Field( proto.INT32, @@ -494,8 +530,8 @@ class EmbedContentRequest(proto.Message): Optional. Optional reduced dimension for the output embedding. If set, excessive values in the output embedding are truncated from the end. Supported by newer models since - 2024, and the earlier model (``models/embedding-001``) - cannot specify this value. + 2024 only. You cannot set this value if using the earlier + model (``models/embedding-001``). This field is a member of `oneof`_ ``_output_dimensionality``. """ @@ -623,9 +659,16 @@ class CountTokensRequest(proto.Message): Optional. The input given to the model as a prompt. This field is ignored when ``generate_content_request`` is set. generate_content_request (google.ai.generativelanguage_v1.types.GenerateContentRequest): - Optional. The overall input given to the - model. CountTokens will count prompt, function - calling, etc. + Optional. The overall input given to the ``Model``. This + includes the prompt as well as other model steering + information like `system + instructions `__, + and/or function declarations for `function + calling `__. + ``Model``\ s/\ ``Content``\ s and + ``generate_content_request``\ s are mutually exclusive. You + can either send ``Model`` + ``Content``\ s or a + ``generate_content_request``, but never both. """ model: str = proto.Field( @@ -651,10 +694,8 @@ class CountTokensResponse(proto.Message): Attributes: total_tokens (int): - The number of tokens that the ``model`` tokenizes the - ``prompt`` into. - - Always non-negative. + The number of tokens that the ``Model`` tokenizes the + ``prompt`` into. Always non-negative. """ total_tokens: int = proto.Field( diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/model.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/model.py index 587b5fb3d92b..ddcd4c24ccb6 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/model.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/model.py @@ -34,7 +34,10 @@ class Model(proto.Message): Attributes: name (str): - Required. The resource name of the ``Model``. + Required. The resource name of the ``Model``. Refer to + `Model + variants `__ + for all allowed values. Format: ``models/{model}`` with a ``{model}`` naming convention of: @@ -43,21 +46,21 @@ class Model(proto.Message): Examples: - - ``models/chat-bison-001`` + - ``models/gemini-1.5-flash-001`` base_model_id (str): Required. The name of the base model, pass this to the generation request. Examples: - - ``chat-bison`` + - ``gemini-1.5-flash`` version (str): Required. The version number of the model. - This represents the major version + This represents the major version (``1.0`` or ``1.5``) display_name (str): The human-readable name of the model. E.g. - "Chat Bison". + "Gemini 1.5 Flash". The name can be up to 128 characters long and can consist of any UTF-8 characters. description (str): @@ -71,21 +74,27 @@ class Model(proto.Message): supported_generation_methods (MutableSequence[str]): The model's supported generation methods. - The method names are defined as Pascal case strings, such as - ``generateMessage`` which correspond to API methods. + The corresponding API method names are defined as Pascal + case strings, such as ``generateMessage`` and + ``generateContent``. temperature (float): Controls the randomness of the output. - Values can range over ``[0.0,1.0]``, inclusive. A value - closer to ``1.0`` will produce responses that are more - varied, while a value closer to ``0.0`` will typically - result in less surprising responses from the model. This - value specifies default to be used by the backend while - making the call to the model. + Values can range over ``[0.0,max_temperature]``, inclusive. + A higher value will produce responses that are more varied, + while a value closer to ``0.0`` will typically result in + less surprising responses from the model. This value + specifies default to be used by the backend while making the + call to the model. This field is a member of `oneof`_ ``_temperature``. + max_temperature (float): + The maximum temperature this model can use. + + This field is a member of `oneof`_ ``_max_temperature``. top_p (float): - For Nucleus sampling. + For `Nucleus + sampling `__. Nucleus sampling considers the smallest set of tokens whose probability sum is at least ``top_p``. This value specifies @@ -142,6 +151,11 @@ class Model(proto.Message): number=9, optional=True, ) + max_temperature: float = proto.Field( + proto.FLOAT, + number=13, + optional=True, + ) top_p: float = proto.Field( proto.FLOAT, number=10, diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/model_service.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/model_service.py index 3fe406e5793a..d135709a3eb5 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/model_service.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/model_service.py @@ -57,10 +57,9 @@ class ListModelsRequest(proto.Message): page_size (int): The maximum number of ``Models`` to return (per page). - The service may return fewer models. If unspecified, at most - 50 models will be returned per page. This method returns at - most 1000 models per page, even if you pass a larger - page_size. + If unspecified, 50 models will be returned per page. This + method returns at most 1000 models per page, even if you + pass a larger page_size. page_token (str): A page token, received from a previous ``ListModels`` call. diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/gapic_version.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/gapic_version.py index 911c2d1dfcef..558c8aab67c5 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/gapic_version.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.8" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/cache_service/async_client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/cache_service/async_client.py index b9c5ece9451d..dd69486d09e4 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/cache_service/async_client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/cache_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -202,9 +201,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(CacheServiceClient).get_transport_class, type(CacheServiceClient) - ) + get_transport_class = CacheServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/cache_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/cache_service/client.py index cb851995d674..e5ce080ddfcb 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/cache_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/cache_service/client.py @@ -678,7 +678,7 @@ def __init__( transport_init: Union[ Type[CacheServiceTransport], Callable[..., CacheServiceTransport] ] = ( - type(self).get_transport_class(transport) + CacheServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., CacheServiceTransport], transport) ) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/discuss_service/async_client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/discuss_service/async_client.py index 1fcd99ba5047..d0fbfd02378c 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/discuss_service/async_client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/discuss_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -192,9 +191,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DiscussServiceClient).get_transport_class, type(DiscussServiceClient) - ) + get_transport_class = DiscussServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/discuss_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/discuss_service/client.py index 59883d48f819..3e8160f99d4d 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/discuss_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/discuss_service/client.py @@ -658,7 +658,7 @@ def __init__( transport_init: Union[ Type[DiscussServiceTransport], Callable[..., DiscussServiceTransport] ] = ( - type(self).get_transport_class(transport) + DiscussServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DiscussServiceTransport], transport) ) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/file_service/async_client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/file_service/async_client.py index b1387e09fb59..07a7e21e9827 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/file_service/async_client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/file_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -187,9 +186,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(FileServiceClient).get_transport_class, type(FileServiceClient) - ) + get_transport_class = FileServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/file_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/file_service/client.py index 11b887da8a6a..87de8e15bc6d 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/file_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/file_service/client.py @@ -652,7 +652,7 @@ def __init__( transport_init: Union[ Type[FileServiceTransport], Callable[..., FileServiceTransport] ] = ( - type(self).get_transport_class(transport) + FileServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., FileServiceTransport], transport) ) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/async_client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/async_client.py index dc9ba6241807..e3fc8e8c632d 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/async_client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( AsyncIterable, @@ -198,9 +197,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(GenerativeServiceClient).get_transport_class, type(GenerativeServiceClient) - ) + get_transport_class = GenerativeServiceClient.get_transport_class def __init__( self, @@ -284,14 +281,15 @@ async def generate_content( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> generative_service.GenerateContentResponse: - r"""Generates a response from the model given an input - ``GenerateContentRequest``. - - Input capabilities differ between models, including tuned - models. See the `model - guide `__ and `tuning - guide `__ for - details. + r"""Generates a model response given an input + ``GenerateContentRequest``. Refer to the `text generation + guide `__ + for detailed usage information. Input capabilities differ + between models, including tuned models. Refer to the `model + guide `__ + and `tuning + guide `__ + for details. .. code-block:: python @@ -333,12 +331,14 @@ async def sample_generate_content(): on the ``request`` instance; if ``request`` is provided, this should not be set. contents (:class:`MutableSequence[google.ai.generativelanguage_v1beta.types.Content]`): - Required. The content of the current - conversation with the model. - For single-turn queries, this is a - single instance. For multi-turn queries, - this is a repeated field that contains - conversation history + latest request. + Required. The content of the current conversation with + the model. + + For single-turn queries, this is a single instance. For + multi-turn queries like + `chat `__, + this is a repeated field that contains the conversation + history and the latest request. This corresponds to the ``contents`` field on the ``request`` instance; if ``request`` is provided, this @@ -351,18 +351,18 @@ async def sample_generate_content(): Returns: google.ai.generativelanguage_v1beta.types.GenerateContentResponse: - Response from the model supporting multiple candidates. + Response from the model supporting multiple candidate + responses. - Note on safety ratings and content filtering. They - are reported for both prompt in + Safety ratings and content filtering are reported for + both prompt in GenerateContentResponse.prompt_feedback and for each candidate in finish_reason and in safety_ratings. The - API contract is that: - either all requested - candidates are returned or no candidates at all - no - candidates are returned only if there was something - wrong with the prompt (see prompt_feedback) - - feedback on each candidate is reported on - finish_reason and safety_ratings. + API: - Returns either all requested candidates or + none of them - Returns no candidates at all only if + there was something wrong with the prompt (check + prompt_feedback) - Reports feedback on each candidate + in finish_reason and safety_ratings. """ # Create or coerce a protobuf request object. @@ -459,8 +459,8 @@ async def sample_generate_answer(): Args: request (Optional[Union[google.ai.generativelanguage_v1beta.types.GenerateAnswerRequest, dict]]): - The request object. Request to generate a grounded answer - from the model. + The request object. Request to generate a grounded answer from the + ``Model``. model (:class:`str`): Required. The name of the ``Model`` to use for generating the grounded response. @@ -472,13 +472,13 @@ async def sample_generate_answer(): should not be set. contents (:class:`MutableSequence[google.ai.generativelanguage_v1beta.types.Content]`): Required. The content of the current conversation with - the model. For single-turn queries, this is a single + the ``Model``. For single-turn queries, this is a single question to answer. For multi-turn queries, this is a repeated field that contains conversation history and the last ``Content`` in the list containing the question. - Note: GenerateAnswer currently only supports queries in + Note: ``GenerateAnswer`` only supports queries in English. This corresponds to the ``contents`` field @@ -502,7 +502,13 @@ async def sample_generate_answer(): categories HARM_CATEGORY_HATE_SPEECH, HARM_CATEGORY_SEXUALLY_EXPLICIT, HARM_CATEGORY_DANGEROUS_CONTENT, - HARM_CATEGORY_HARASSMENT are supported. + HARM_CATEGORY_HARASSMENT are supported. Refer to the + `guide `__ + for detailed information on available safety settings. + Also refer to the `Safety + guidance `__ + to learn how to incorporate safety considerations in + your AI applications. This corresponds to the ``safety_settings`` field on the ``request`` instance; if ``request`` is provided, this @@ -590,8 +596,9 @@ def stream_generate_content( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> Awaitable[AsyncIterable[generative_service.GenerateContentResponse]]: - r"""Generates a streamed response from the model given an input - ``GenerateContentRequest``. + r"""Generates a `streamed + response `__ + from the model given an input ``GenerateContentRequest``. .. code-block:: python @@ -634,12 +641,14 @@ async def sample_stream_generate_content(): on the ``request`` instance; if ``request`` is provided, this should not be set. contents (:class:`MutableSequence[google.ai.generativelanguage_v1beta.types.Content]`): - Required. The content of the current - conversation with the model. - For single-turn queries, this is a - single instance. For multi-turn queries, - this is a repeated field that contains - conversation history + latest request. + Required. The content of the current conversation with + the model. + + For single-turn queries, this is a single instance. For + multi-turn queries like + `chat `__, + this is a repeated field that contains the conversation + history and the latest request. This corresponds to the ``contents`` field on the ``request`` instance; if ``request`` is provided, this @@ -652,18 +661,18 @@ async def sample_stream_generate_content(): Returns: AsyncIterable[google.ai.generativelanguage_v1beta.types.GenerateContentResponse]: - Response from the model supporting multiple candidates. + Response from the model supporting multiple candidate + responses. - Note on safety ratings and content filtering. They - are reported for both prompt in + Safety ratings and content filtering are reported for + both prompt in GenerateContentResponse.prompt_feedback and for each candidate in finish_reason and in safety_ratings. The - API contract is that: - either all requested - candidates are returned or no candidates at all - no - candidates are returned only if there was something - wrong with the prompt (see prompt_feedback) - - feedback on each candidate is reported on - finish_reason and safety_ratings. + API: - Returns either all requested candidates or + none of them - Returns no candidates at all only if + there was something wrong with the prompt (check + prompt_feedback) - Reports feedback on each candidate + in finish_reason and safety_ratings. """ # Create or coerce a protobuf request object. @@ -724,8 +733,9 @@ async def embed_content( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> generative_service.EmbedContentResponse: - r"""Generates an embedding from the model given an input - ``Content``. + r"""Generates a text embedding vector from the input ``Content`` + using the specified `Gemini Embedding + model `__. .. code-block:: python @@ -848,8 +858,9 @@ async def batch_embed_contents( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> generative_service.BatchEmbedContentsResponse: - r"""Generates multiple embeddings from the model given - input text in a synchronous call. + r"""Generates multiple embedding vectors from the input ``Content`` + which consists of a batch of strings represented as + ``EmbedContentRequest`` objects. .. code-block:: python @@ -973,8 +984,10 @@ async def count_tokens( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> generative_service.CountTokensResponse: - r"""Runs a model's tokenizer on input content and returns - the token count. + r"""Runs a model's tokenizer on input ``Content`` and returns the + token count. Refer to the `tokens + guide `__ to learn + more about tokens. .. code-block:: python diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/client.py index 0dc4b14b681f..64141f79649a 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/client.py @@ -679,7 +679,7 @@ def __init__( Type[GenerativeServiceTransport], Callable[..., GenerativeServiceTransport], ] = ( - type(self).get_transport_class(transport) + GenerativeServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., GenerativeServiceTransport], transport) ) @@ -708,14 +708,15 @@ def generate_content( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> generative_service.GenerateContentResponse: - r"""Generates a response from the model given an input - ``GenerateContentRequest``. - - Input capabilities differ between models, including tuned - models. See the `model - guide `__ and `tuning - guide `__ for - details. + r"""Generates a model response given an input + ``GenerateContentRequest``. Refer to the `text generation + guide `__ + for detailed usage information. Input capabilities differ + between models, including tuned models. Refer to the `model + guide `__ + and `tuning + guide `__ + for details. .. code-block:: python @@ -757,12 +758,14 @@ def sample_generate_content(): on the ``request`` instance; if ``request`` is provided, this should not be set. contents (MutableSequence[google.ai.generativelanguage_v1beta.types.Content]): - Required. The content of the current - conversation with the model. - For single-turn queries, this is a - single instance. For multi-turn queries, - this is a repeated field that contains - conversation history + latest request. + Required. The content of the current conversation with + the model. + + For single-turn queries, this is a single instance. For + multi-turn queries like + `chat `__, + this is a repeated field that contains the conversation + history and the latest request. This corresponds to the ``contents`` field on the ``request`` instance; if ``request`` is provided, this @@ -775,18 +778,18 @@ def sample_generate_content(): Returns: google.ai.generativelanguage_v1beta.types.GenerateContentResponse: - Response from the model supporting multiple candidates. + Response from the model supporting multiple candidate + responses. - Note on safety ratings and content filtering. They - are reported for both prompt in + Safety ratings and content filtering are reported for + both prompt in GenerateContentResponse.prompt_feedback and for each candidate in finish_reason and in safety_ratings. The - API contract is that: - either all requested - candidates are returned or no candidates at all - no - candidates are returned only if there was something - wrong with the prompt (see prompt_feedback) - - feedback on each candidate is reported on - finish_reason and safety_ratings. + API: - Returns either all requested candidates or + none of them - Returns no candidates at all only if + there was something wrong with the prompt (check + prompt_feedback) - Reports feedback on each candidate + in finish_reason and safety_ratings. """ # Create or coerce a protobuf request object. @@ -880,8 +883,8 @@ def sample_generate_answer(): Args: request (Union[google.ai.generativelanguage_v1beta.types.GenerateAnswerRequest, dict]): - The request object. Request to generate a grounded answer - from the model. + The request object. Request to generate a grounded answer from the + ``Model``. model (str): Required. The name of the ``Model`` to use for generating the grounded response. @@ -893,13 +896,13 @@ def sample_generate_answer(): should not be set. contents (MutableSequence[google.ai.generativelanguage_v1beta.types.Content]): Required. The content of the current conversation with - the model. For single-turn queries, this is a single + the ``Model``. For single-turn queries, this is a single question to answer. For multi-turn queries, this is a repeated field that contains conversation history and the last ``Content`` in the list containing the question. - Note: GenerateAnswer currently only supports queries in + Note: ``GenerateAnswer`` only supports queries in English. This corresponds to the ``contents`` field @@ -923,7 +926,13 @@ def sample_generate_answer(): categories HARM_CATEGORY_HATE_SPEECH, HARM_CATEGORY_SEXUALLY_EXPLICIT, HARM_CATEGORY_DANGEROUS_CONTENT, - HARM_CATEGORY_HARASSMENT are supported. + HARM_CATEGORY_HARASSMENT are supported. Refer to the + `guide `__ + for detailed information on available safety settings. + Also refer to the `Safety + guidance `__ + to learn how to incorporate safety considerations in + your AI applications. This corresponds to the ``safety_settings`` field on the ``request`` instance; if ``request`` is provided, this @@ -1008,8 +1017,9 @@ def stream_generate_content( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> Iterable[generative_service.GenerateContentResponse]: - r"""Generates a streamed response from the model given an input - ``GenerateContentRequest``. + r"""Generates a `streamed + response `__ + from the model given an input ``GenerateContentRequest``. .. code-block:: python @@ -1052,12 +1062,14 @@ def sample_stream_generate_content(): on the ``request`` instance; if ``request`` is provided, this should not be set. contents (MutableSequence[google.ai.generativelanguage_v1beta.types.Content]): - Required. The content of the current - conversation with the model. - For single-turn queries, this is a - single instance. For multi-turn queries, - this is a repeated field that contains - conversation history + latest request. + Required. The content of the current conversation with + the model. + + For single-turn queries, this is a single instance. For + multi-turn queries like + `chat `__, + this is a repeated field that contains the conversation + history and the latest request. This corresponds to the ``contents`` field on the ``request`` instance; if ``request`` is provided, this @@ -1070,18 +1082,18 @@ def sample_stream_generate_content(): Returns: Iterable[google.ai.generativelanguage_v1beta.types.GenerateContentResponse]: - Response from the model supporting multiple candidates. + Response from the model supporting multiple candidate + responses. - Note on safety ratings and content filtering. They - are reported for both prompt in + Safety ratings and content filtering are reported for + both prompt in GenerateContentResponse.prompt_feedback and for each candidate in finish_reason and in safety_ratings. The - API contract is that: - either all requested - candidates are returned or no candidates at all - no - candidates are returned only if there was something - wrong with the prompt (see prompt_feedback) - - feedback on each candidate is reported on - finish_reason and safety_ratings. + API: - Returns either all requested candidates or + none of them - Returns no candidates at all only if + there was something wrong with the prompt (check + prompt_feedback) - Reports feedback on each candidate + in finish_reason and safety_ratings. """ # Create or coerce a protobuf request object. @@ -1139,8 +1151,9 @@ def embed_content( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> generative_service.EmbedContentResponse: - r"""Generates an embedding from the model given an input - ``Content``. + r"""Generates a text embedding vector from the input ``Content`` + using the specified `Gemini Embedding + model `__. .. code-block:: python @@ -1260,8 +1273,9 @@ def batch_embed_contents( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> generative_service.BatchEmbedContentsResponse: - r"""Generates multiple embeddings from the model given - input text in a synchronous call. + r"""Generates multiple embedding vectors from the input ``Content`` + which consists of a batch of strings represented as + ``EmbedContentRequest`` objects. .. code-block:: python @@ -1382,8 +1396,10 @@ def count_tokens( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> generative_service.CountTokensResponse: - r"""Runs a model's tokenizer on input content and returns - the token count. + r"""Runs a model's tokenizer on input ``Content`` and returns the + token count. Refer to the `tokens + guide `__ to learn + more about tokens. .. code-block:: python diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/transports/grpc.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/transports/grpc.py index 1a4ef014a66e..4ff9e19a87b2 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/transports/grpc.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/transports/grpc.py @@ -245,14 +245,15 @@ def generate_content( ]: r"""Return a callable for the generate content method over gRPC. - Generates a response from the model given an input - ``GenerateContentRequest``. - - Input capabilities differ between models, including tuned - models. See the `model - guide `__ and `tuning - guide `__ for - details. + Generates a model response given an input + ``GenerateContentRequest``. Refer to the `text generation + guide `__ + for detailed usage information. Input capabilities differ + between models, including tuned models. Refer to the `model + guide `__ + and `tuning + guide `__ + for details. Returns: Callable[[~.GenerateContentRequest], @@ -311,8 +312,9 @@ def stream_generate_content( ]: r"""Return a callable for the stream generate content method over gRPC. - Generates a streamed response from the model given an input - ``GenerateContentRequest``. + Generates a `streamed + response `__ + from the model given an input ``GenerateContentRequest``. Returns: Callable[[~.GenerateContentRequest], @@ -341,8 +343,9 @@ def embed_content( ]: r"""Return a callable for the embed content method over gRPC. - Generates an embedding from the model given an input - ``Content``. + Generates a text embedding vector from the input ``Content`` + using the specified `Gemini Embedding + model `__. Returns: Callable[[~.EmbedContentRequest], @@ -371,8 +374,9 @@ def batch_embed_contents( ]: r"""Return a callable for the batch embed contents method over gRPC. - Generates multiple embeddings from the model given - input text in a synchronous call. + Generates multiple embedding vectors from the input ``Content`` + which consists of a batch of strings represented as + ``EmbedContentRequest`` objects. Returns: Callable[[~.BatchEmbedContentsRequest], @@ -400,8 +404,10 @@ def count_tokens( ]: r"""Return a callable for the count tokens method over gRPC. - Runs a model's tokenizer on input content and returns - the token count. + Runs a model's tokenizer on input ``Content`` and returns the + token count. Refer to the `tokens + guide `__ to learn + more about tokens. Returns: Callable[[~.CountTokensRequest], diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/transports/grpc_asyncio.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/transports/grpc_asyncio.py index d175309053ed..67f3f3bec7de 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/transports/grpc_asyncio.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/transports/grpc_asyncio.py @@ -249,14 +249,15 @@ def generate_content( ]: r"""Return a callable for the generate content method over gRPC. - Generates a response from the model given an input - ``GenerateContentRequest``. - - Input capabilities differ between models, including tuned - models. See the `model - guide `__ and `tuning - guide `__ for - details. + Generates a model response given an input + ``GenerateContentRequest``. Refer to the `text generation + guide `__ + for detailed usage information. Input capabilities differ + between models, including tuned models. Refer to the `model + guide `__ + and `tuning + guide `__ + for details. Returns: Callable[[~.GenerateContentRequest], @@ -315,8 +316,9 @@ def stream_generate_content( ]: r"""Return a callable for the stream generate content method over gRPC. - Generates a streamed response from the model given an input - ``GenerateContentRequest``. + Generates a `streamed + response `__ + from the model given an input ``GenerateContentRequest``. Returns: Callable[[~.GenerateContentRequest], @@ -345,8 +347,9 @@ def embed_content( ]: r"""Return a callable for the embed content method over gRPC. - Generates an embedding from the model given an input - ``Content``. + Generates a text embedding vector from the input ``Content`` + using the specified `Gemini Embedding + model `__. Returns: Callable[[~.EmbedContentRequest], @@ -375,8 +378,9 @@ def batch_embed_contents( ]: r"""Return a callable for the batch embed contents method over gRPC. - Generates multiple embeddings from the model given - input text in a synchronous call. + Generates multiple embedding vectors from the input ``Content`` + which consists of a batch of strings represented as + ``EmbedContentRequest`` objects. Returns: Callable[[~.BatchEmbedContentsRequest], @@ -405,8 +409,10 @@ def count_tokens( ]: r"""Return a callable for the count tokens method over gRPC. - Runs a model's tokenizer on input content and returns - the token count. + Runs a model's tokenizer on input ``Content`` and returns the + token count. Refer to the `tokens + guide `__ to learn + more about tokens. Returns: Callable[[~.CountTokensRequest], diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/transports/rest.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/transports/rest.py index 28b876f57052..3bf4d1331eec 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/transports/rest.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/transports/rest.py @@ -669,8 +669,8 @@ def __call__( Args: request (~.generative_service.GenerateAnswerRequest): - The request object. Request to generate a grounded answer - from the model. + The request object. Request to generate a grounded answer from the + ``Model``. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -774,20 +774,21 @@ def __call__( Returns: ~.generative_service.GenerateContentResponse: - Response from the model supporting multiple candidates. + Response from the model supporting multiple candidate + responses. - Note on safety ratings and content filtering. They are - reported for both prompt in + Safety ratings and content filtering are reported for + both prompt in ``GenerateContentResponse.prompt_feedback`` and for each candidate in ``finish_reason`` and in - ``safety_ratings``. The API contract is that: + ``safety_ratings``. The API: - - either all requested candidates are returned or no - candidates at all - - no candidates are returned only if there was - something wrong with the prompt (see + - Returns either all requested candidates or none of + them + - Returns no candidates at all only if there was + something wrong with the prompt (check ``prompt_feedback``) - - feedback on each candidate is reported on + - Reports feedback on each candidate in ``finish_reason`` and ``safety_ratings``. """ @@ -889,20 +890,21 @@ def __call__( Returns: ~.generative_service.GenerateContentResponse: - Response from the model supporting multiple candidates. + Response from the model supporting multiple candidate + responses. - Note on safety ratings and content filtering. They are - reported for both prompt in + Safety ratings and content filtering are reported for + both prompt in ``GenerateContentResponse.prompt_feedback`` and for each candidate in ``finish_reason`` and in - ``safety_ratings``. The API contract is that: + ``safety_ratings``. The API: - - either all requested candidates are returned or no - candidates at all - - no candidates are returned only if there was - something wrong with the prompt (see + - Returns either all requested candidates or none of + them + - Returns no candidates at all only if there was + something wrong with the prompt (check ``prompt_feedback``) - - feedback on each candidate is reported on + - Reports feedback on each candidate in ``finish_reason`` and ``safety_ratings``. """ diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/model_service/async_client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/model_service/async_client.py index 2177c1e0a437..6cd12d3367ac 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/model_service/async_client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/model_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -195,9 +194,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ModelServiceClient).get_transport_class, type(ModelServiceClient) - ) + get_transport_class = ModelServiceClient.get_transport_class def __init__( self, @@ -274,7 +271,12 @@ async def get_model( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> model.Model: - r"""Gets information about a specific Model. + r"""Gets information about a specific ``Model`` such as its version + number, token limits, + `parameters `__ + and other metadata. Refer to the `Gemini models + guide `__ + for detailed model information. .. code-block:: python @@ -385,7 +387,9 @@ async def list_models( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListModelsAsyncPager: - r"""Lists models available through the API. + r"""Lists the + ```Model``\ s `__ + available through the Gemini API. .. code-block:: python @@ -419,10 +423,9 @@ async def sample_list_models(): page_size (:class:`int`): The maximum number of ``Models`` to return (per page). - The service may return fewer models. If unspecified, at - most 50 models will be returned per page. This method - returns at most 1000 models per page, even if you pass a - larger page_size. + If unspecified, 50 models will be returned per page. + This method returns at most 1000 models per page, even + if you pass a larger page_size. This corresponds to the ``page_size`` field on the ``request`` instance; if ``request`` is provided, this @@ -626,7 +629,7 @@ async def list_tuned_models( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListTunedModelsAsyncPager: - r"""Lists tuned models owned by the user. + r"""Lists created tuned models. .. code-block:: python @@ -761,12 +764,11 @@ async def create_tuned_model( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: - r"""Creates a tuned model. Intermediate tuning progress (if any) is - accessed through the [google.longrunning.Operations] service. + r"""Creates a tuned model. Check intermediate tuning progress (if + any) through the [google.longrunning.Operations] service. - Status and results can be accessed through the Operations - service. Example: GET - /v1/tunedModels/az2mb0bpw6i/operations/000-111-222 + Access status and results through the Operations service. + Example: GET /v1/tunedModels/az2mb0bpw6i/operations/000-111-222 .. code-block:: python @@ -815,7 +817,7 @@ async def sample_create_tuned_model(): specified. This value should be up to 40 characters, the first character must be a letter, the last could be a letter or a number. The id must match the regular - expression: `a-z <[a-z0-9-]{0,38}[a-z0-9]>`__?. + expression: ``[a-z]([a-z0-9-]{0,38}[a-z0-9])?``. This corresponds to the ``tuned_model_id`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/model_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/model_service/client.py index 013ab369acb9..c35b6b3c2168 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/model_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/model_service/client.py @@ -673,7 +673,7 @@ def __init__( transport_init: Union[ Type[ModelServiceTransport], Callable[..., ModelServiceTransport] ] = ( - type(self).get_transport_class(transport) + ModelServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ModelServiceTransport], transport) ) @@ -699,7 +699,12 @@ def get_model( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> model.Model: - r"""Gets information about a specific Model. + r"""Gets information about a specific ``Model`` such as its version + number, token limits, + `parameters `__ + and other metadata. Refer to the `Gemini models + guide `__ + for detailed model information. .. code-block:: python @@ -807,7 +812,9 @@ def list_models( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListModelsPager: - r"""Lists models available through the API. + r"""Lists the + ```Model``\ s `__ + available through the Gemini API. .. code-block:: python @@ -841,10 +848,9 @@ def sample_list_models(): page_size (int): The maximum number of ``Models`` to return (per page). - The service may return fewer models. If unspecified, at - most 50 models will be returned per page. This method - returns at most 1000 models per page, even if you pass a - larger page_size. + If unspecified, 50 models will be returned per page. + This method returns at most 1000 models per page, even + if you pass a larger page_size. This corresponds to the ``page_size`` field on the ``request`` instance; if ``request`` is provided, this @@ -1042,7 +1048,7 @@ def list_tuned_models( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListTunedModelsPager: - r"""Lists tuned models owned by the user. + r"""Lists created tuned models. .. code-block:: python @@ -1174,12 +1180,11 @@ def create_tuned_model( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: - r"""Creates a tuned model. Intermediate tuning progress (if any) is - accessed through the [google.longrunning.Operations] service. + r"""Creates a tuned model. Check intermediate tuning progress (if + any) through the [google.longrunning.Operations] service. - Status and results can be accessed through the Operations - service. Example: GET - /v1/tunedModels/az2mb0bpw6i/operations/000-111-222 + Access status and results through the Operations service. + Example: GET /v1/tunedModels/az2mb0bpw6i/operations/000-111-222 .. code-block:: python @@ -1228,7 +1233,7 @@ def sample_create_tuned_model(): specified. This value should be up to 40 characters, the first character must be a letter, the last could be a letter or a number. The id must match the regular - expression: `a-z <[a-z0-9-]{0,38}[a-z0-9]>`__?. + expression: ``[a-z]([a-z0-9-]{0,38}[a-z0-9])?``. This corresponds to the ``tuned_model_id`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/model_service/transports/grpc.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/model_service/transports/grpc.py index 0649fc878074..7e61ad9e403c 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/model_service/transports/grpc.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/model_service/transports/grpc.py @@ -258,7 +258,12 @@ def operations_client(self) -> operations_v1.OperationsClient: def get_model(self) -> Callable[[model_service.GetModelRequest], model.Model]: r"""Return a callable for the get model method over gRPC. - Gets information about a specific Model. + Gets information about a specific ``Model`` such as its version + number, token limits, + `parameters `__ + and other metadata. Refer to the `Gemini models + guide `__ + for detailed model information. Returns: Callable[[~.GetModelRequest], @@ -284,7 +289,9 @@ def list_models( ) -> Callable[[model_service.ListModelsRequest], model_service.ListModelsResponse]: r"""Return a callable for the list models method over gRPC. - Lists models available through the API. + Lists the + ```Model``\ s `__ + available through the Gemini API. Returns: Callable[[~.ListModelsRequest], @@ -338,7 +345,7 @@ def list_tuned_models( ]: r"""Return a callable for the list tuned models method over gRPC. - Lists tuned models owned by the user. + Lists created tuned models. Returns: Callable[[~.ListTunedModelsRequest], @@ -364,12 +371,11 @@ def create_tuned_model( ) -> Callable[[model_service.CreateTunedModelRequest], operations_pb2.Operation]: r"""Return a callable for the create tuned model method over gRPC. - Creates a tuned model. Intermediate tuning progress (if any) is - accessed through the [google.longrunning.Operations] service. + Creates a tuned model. Check intermediate tuning progress (if + any) through the [google.longrunning.Operations] service. - Status and results can be accessed through the Operations - service. Example: GET - /v1/tunedModels/az2mb0bpw6i/operations/000-111-222 + Access status and results through the Operations service. + Example: GET /v1/tunedModels/az2mb0bpw6i/operations/000-111-222 Returns: Callable[[~.CreateTunedModelRequest], diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/model_service/transports/grpc_asyncio.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/model_service/transports/grpc_asyncio.py index bfce8ab6172a..80d377114d32 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/model_service/transports/grpc_asyncio.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/model_service/transports/grpc_asyncio.py @@ -266,7 +266,12 @@ def get_model( ) -> Callable[[model_service.GetModelRequest], Awaitable[model.Model]]: r"""Return a callable for the get model method over gRPC. - Gets information about a specific Model. + Gets information about a specific ``Model`` such as its version + number, token limits, + `parameters `__ + and other metadata. Refer to the `Gemini models + guide `__ + for detailed model information. Returns: Callable[[~.GetModelRequest], @@ -294,7 +299,9 @@ def list_models( ]: r"""Return a callable for the list models method over gRPC. - Lists models available through the API. + Lists the + ```Model``\ s `__ + available through the Gemini API. Returns: Callable[[~.ListModelsRequest], @@ -351,7 +358,7 @@ def list_tuned_models( ]: r"""Return a callable for the list tuned models method over gRPC. - Lists tuned models owned by the user. + Lists created tuned models. Returns: Callable[[~.ListTunedModelsRequest], @@ -379,12 +386,11 @@ def create_tuned_model( ]: r"""Return a callable for the create tuned model method over gRPC. - Creates a tuned model. Intermediate tuning progress (if any) is - accessed through the [google.longrunning.Operations] service. + Creates a tuned model. Check intermediate tuning progress (if + any) through the [google.longrunning.Operations] service. - Status and results can be accessed through the Operations - service. Example: GET - /v1/tunedModels/az2mb0bpw6i/operations/000-111-222 + Access status and results through the Operations service. + Example: GET /v1/tunedModels/az2mb0bpw6i/operations/000-111-222 Returns: Callable[[~.CreateTunedModelRequest], diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/permission_service/async_client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/permission_service/async_client.py index a7ddc4392a3f..a44ae51107b1 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/permission_service/async_client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/permission_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -194,9 +193,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(PermissionServiceClient).get_transport_class, type(PermissionServiceClient) - ) + get_transport_class = PermissionServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/permission_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/permission_service/client.py index e38752e95708..61db870f8cb2 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/permission_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/permission_service/client.py @@ -669,7 +669,7 @@ def __init__( Type[PermissionServiceTransport], Callable[..., PermissionServiceTransport], ] = ( - type(self).get_transport_class(transport) + PermissionServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., PermissionServiceTransport], transport) ) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/retriever_service/async_client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/retriever_service/async_client.py index 39d67e5559b9..9f8898271113 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/retriever_service/async_client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/retriever_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -197,9 +196,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(RetrieverServiceClient).get_transport_class, type(RetrieverServiceClient) - ) + get_transport_class = RetrieverServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/retriever_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/retriever_service/client.py index 336cc97cc34b..2efc8f181dd1 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/retriever_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/retriever_service/client.py @@ -701,7 +701,7 @@ def __init__( Type[RetrieverServiceTransport], Callable[..., RetrieverServiceTransport], ] = ( - type(self).get_transport_class(transport) + RetrieverServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RetrieverServiceTransport], transport) ) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/text_service/async_client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/text_service/async_client.py index 672e05be2d15..adebc14f79ae 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/text_service/async_client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/text_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -188,9 +187,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(TextServiceClient).get_transport_class, type(TextServiceClient) - ) + get_transport_class = TextServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/text_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/text_service/client.py index 6df1eedcc78d..37c0ff946c1e 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/text_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/text_service/client.py @@ -653,7 +653,7 @@ def __init__( transport_init: Union[ Type[TextServiceTransport], Callable[..., TextServiceTransport] ] = ( - type(self).get_transport_class(transport) + TextServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., TextServiceTransport], transport) ) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/content.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/content.py index c5e42c09d5d4..bbdbf7f24bc8 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/content.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/content.py @@ -587,6 +587,7 @@ class Schema(proto.Message): for NUMBER type: float, double for INTEGER type: int32, int64 + for STRING type: enum description (str): Optional. A brief description of the parameter. This could contain examples of use. @@ -604,6 +605,9 @@ class Schema(proto.Message): Type.ARRAY. This field is a member of `oneof`_ ``_items``. + max_items (int): + Optional. Maximum number of the elements for + Type.ARRAY. properties (MutableMapping[str, google.ai.generativelanguage_v1beta.types.Schema]): Optional. Properties of Type.OBJECT. required (MutableSequence[str]): @@ -637,6 +641,10 @@ class Schema(proto.Message): optional=True, message="Schema", ) + max_items: int = proto.Field( + proto.INT64, + number=21, + ) properties: MutableMapping[str, "Schema"] = proto.MapField( proto.STRING, proto.MESSAGE, diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/generative_service.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/generative_service.py index a6cbe296b63c..b31b07aa4299 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/generative_service.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/generative_service.py @@ -98,28 +98,38 @@ class GenerateContentRequest(proto.Message): Format: ``name=models/{model}``. system_instruction (google.ai.generativelanguage_v1beta.types.Content): - Optional. Developer set system instruction. + Optional. Developer set `system + instruction(s) `__. Currently, text only. This field is a member of `oneof`_ ``_system_instruction``. contents (MutableSequence[google.ai.generativelanguage_v1beta.types.Content]): - Required. The content of the current - conversation with the model. - For single-turn queries, this is a single - instance. For multi-turn queries, this is a - repeated field that contains conversation - history + latest request. + Required. The content of the current conversation with the + model. + + For single-turn queries, this is a single instance. For + multi-turn queries like + `chat `__, + this is a repeated field that contains the conversation + history and the latest request. tools (MutableSequence[google.ai.generativelanguage_v1beta.types.Tool]): - Optional. A list of ``Tools`` the model may use to generate - the next response. + Optional. A list of ``Tools`` the ``Model`` may use to + generate the next response. A ``Tool`` is a piece of code that enables the system to interact with external systems to perform an action, or set - of actions, outside of knowledge and scope of the model. The - only supported tool is currently ``Function``. + of actions, outside of knowledge and scope of the ``Model``. + Supported ``Tool``\ s are ``Function`` and + ``code_execution``. Refer to the `Function + calling `__ + and the `Code + execution `__ + guides to learn more. tool_config (google.ai.generativelanguage_v1beta.types.ToolConfig): Optional. Tool configuration for any ``Tool`` specified in - the request. + the request. Refer to the `Function calling + guide `__ + for a usage example. safety_settings (MutableSequence[google.ai.generativelanguage_v1beta.types.SafetySetting]): Optional. A list of unique ``SafetySetting`` instances for blocking unsafe content. @@ -137,17 +147,22 @@ class GenerateContentRequest(proto.Message): categories HARM_CATEGORY_HATE_SPEECH, HARM_CATEGORY_SEXUALLY_EXPLICIT, HARM_CATEGORY_DANGEROUS_CONTENT, HARM_CATEGORY_HARASSMENT - are supported. + are supported. Refer to the + `guide `__ + for detailed information on available safety settings. Also + refer to the `Safety + guidance `__ + to learn how to incorporate safety considerations in your AI + applications. generation_config (google.ai.generativelanguage_v1beta.types.GenerationConfig): Optional. Configuration options for model generation and outputs. This field is a member of `oneof`_ ``_generation_config``. cached_content (str): - Optional. The name of the cached content used as context to - serve the prediction. Note: only used in explicit caching, - where users can have control over caching (e.g. what content - to cache) and enjoy guaranteed cost savings. Format: + Optional. The name of the content + `cached `__ + to use as context to serve the prediction. Format: ``cachedContents/{cachedContent}`` This field is a member of `oneof`_ ``_cached_content``. @@ -198,7 +213,7 @@ class GenerateContentRequest(proto.Message): class GenerationConfig(proto.Message): r"""Configuration options for model generation and outputs. Not - all parameters may be configurable for every model. + all parameters are configurable for every model. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -212,14 +227,13 @@ class GenerationConfig(proto.Message): This field is a member of `oneof`_ ``_candidate_count``. stop_sequences (MutableSequence[str]): - Optional. The set of character sequences (up - to 5) that will stop output generation. If - specified, the API will stop at the first - appearance of a stop sequence. The stop sequence - will not be included as part of the response. + Optional. The set of character sequences (up to 5) that will + stop output generation. If specified, the API will stop at + the first appearance of a ``stop_sequence``. The stop + sequence will not be included as part of the response. max_output_tokens (int): Optional. The maximum number of tokens to include in a - candidate. + response candidate. Note: The default value varies by model, see the ``Model.output_token_limit`` attribute of the ``Model`` @@ -240,49 +254,55 @@ class GenerationConfig(proto.Message): Optional. The maximum cumulative probability of tokens to consider when sampling. - The model uses combined Top-k and nucleus sampling. + The model uses combined Top-k and Top-p (nucleus) sampling. Tokens are sorted based on their assigned probabilities so that only the most likely tokens are considered. Top-k sampling directly limits the maximum number of tokens to - consider, while Nucleus sampling limits number of tokens + consider, while Nucleus sampling limits the number of tokens based on the cumulative probability. - Note: The default value varies by model, see the - ``Model.top_p`` attribute of the ``Model`` returned from the - ``getModel`` function. + Note: The default value varies by ``Model`` and is specified + by the\ ``Model.top_p`` attribute returned from the + ``getModel`` function. An empty ``top_k`` attribute + indicates that the model doesn't apply top-k sampling and + doesn't allow setting ``top_k`` on requests. This field is a member of `oneof`_ ``_top_p``. top_k (int): Optional. The maximum number of tokens to consider when sampling. - Models use nucleus sampling or combined Top-k and nucleus - sampling. Top-k sampling considers the set of ``top_k`` most - probable tokens. Models running with nucleus sampling don't - allow top_k setting. + Gemini models use Top-p (nucleus) sampling or a combination + of Top-k and nucleus sampling. Top-k sampling considers the + set of ``top_k`` most probable tokens. Models running with + nucleus sampling don't allow top_k setting. - Note: The default value varies by model, see the - ``Model.top_k`` attribute of the ``Model`` returned from the - ``getModel`` function. Empty ``top_k`` field in ``Model`` - indicates the model doesn't apply top-k sampling and doesn't - allow setting ``top_k`` on requests. + Note: The default value varies by ``Model`` and is specified + by the\ ``Model.top_p`` attribute returned from the + ``getModel`` function. An empty ``top_k`` attribute + indicates that the model doesn't apply top-k sampling and + doesn't allow setting ``top_k`` on requests. This field is a member of `oneof`_ ``_top_k``. response_mime_type (str): - Optional. Output response mimetype of the generated - candidate text. Supported mimetype: ``text/plain``: - (default) Text output. ``application/json``: JSON response - in the candidates. + Optional. MIME type of the generated candidate text. + Supported MIME types are: ``text/plain``: (default) Text + output. ``application/json``: JSON response in the response + candidates. Refer to the + `docs `__ + for a list of all supported text MIME types. response_schema (google.ai.generativelanguage_v1beta.types.Schema): - Optional. Output response schema of the generated candidate - text when response mime type can have schema. Schema can be - objects, primitives or arrays and is a subset of `OpenAPI - schema `__. - - If set, a compatible response_mime_type must also be set. - Compatible mimetypes: ``application/json``: Schema for JSON - response. + Optional. Output schema of the generated candidate text. + Schemas must be a subset of the `OpenAPI + schema `__ and + can be objects, primitives or arrays. + + If set, a compatible ``response_mime_type`` must also be + set. Compatible MIME types: ``application/json``: Schema for + JSON response. Refer to the `JSON text generation + guide `__ + for more details. """ candidate_count: int = proto.Field( @@ -334,11 +354,11 @@ class SemanticRetrieverConfig(proto.Message): Attributes: source (str): - Required. Name of the resource for retrieval, - e.g. corpora/123 or corpora/123/documents/abc. + Required. Name of the resource for retrieval. Example: + ``corpora/123`` or ``corpora/123/documents/abc``. query (google.ai.generativelanguage_v1beta.types.Content): - Required. Query to use for similarity matching ``Chunk``\ s - in the given resource. + Required. Query to use for matching ``Chunk``\ s in the + given resource by similarity. metadata_filters (MutableSequence[google.ai.generativelanguage_v1beta.types.MetadataFilter]): Optional. Filters for selecting ``Document``\ s and/or ``Chunk``\ s from the resource. @@ -381,18 +401,16 @@ class SemanticRetrieverConfig(proto.Message): class GenerateContentResponse(proto.Message): - r"""Response from the model supporting multiple candidates. - - Note on safety ratings and content filtering. They are reported for - both prompt in ``GenerateContentResponse.prompt_feedback`` and for - each candidate in ``finish_reason`` and in ``safety_ratings``. The - API contract is that: - - - either all requested candidates are returned or no candidates at - all - - no candidates are returned only if there was something wrong with - the prompt (see ``prompt_feedback``) - - feedback on each candidate is reported on ``finish_reason`` and + r"""Response from the model supporting multiple candidate responses. + + Safety ratings and content filtering are reported for both prompt in + ``GenerateContentResponse.prompt_feedback`` and for each candidate + in ``finish_reason`` and in ``safety_ratings``. The API: + + - Returns either all requested candidates or none of them + - Returns no candidates at all only if there was something wrong + with the prompt (check ``prompt_feedback``) + - Reports feedback on each candidate in ``finish_reason`` and ``safety_ratings``. Attributes: @@ -413,29 +431,35 @@ class PromptFeedback(proto.Message): Attributes: block_reason (google.ai.generativelanguage_v1beta.types.GenerateContentResponse.PromptFeedback.BlockReason): Optional. If set, the prompt was blocked and - no candidates are returned. Rephrase your - prompt. + no candidates are returned. Rephrase the prompt. safety_ratings (MutableSequence[google.ai.generativelanguage_v1beta.types.SafetyRating]): Ratings for safety of the prompt. There is at most one rating per category. """ class BlockReason(proto.Enum): - r"""Specifies what was the reason why prompt was blocked. + r"""Specifies the reason why the prompt was blocked. Values: BLOCK_REASON_UNSPECIFIED (0): Default value. This value is unused. SAFETY (1): - Prompt was blocked due to safety reasons. You can inspect + Prompt was blocked due to safety reasons. Inspect ``safety_ratings`` to understand which safety category blocked it. OTHER (2): Prompt was blocked due to unknown reasons. + BLOCKLIST (3): + Prompt was blocked due to the terms which are + included from the terminology blocklist. + PROHIBITED_CONTENT (4): + Prompt was blocked due to prohibited content. """ BLOCK_REASON_UNSPECIFIED = 0 SAFETY = 1 OTHER = 2 + BLOCKLIST = 3 + PROHIBITED_CONTENT = 4 block_reason: "GenerateContentResponse.PromptFeedback.BlockReason" = ( proto.Field( @@ -455,18 +479,18 @@ class UsageMetadata(proto.Message): Attributes: prompt_token_count (int): - Number of tokens in the prompt. When cached_content is set, - this is still the total effective prompt size. I.e. this - includes the number of tokens in the cached content. + Number of tokens in the prompt. When ``cached_content`` is + set, this is still the total effective prompt size meaning + this includes the number of tokens in the cached content. cached_content_token_count (int): Number of tokens in the cached part of the - prompt, i.e. in the cached content. + prompt (the cached content) candidates_token_count (int): - Total number of tokens across the generated - candidates. + Total number of tokens across all the + generated response candidates. total_token_count (int): Total token count for the generation request - (prompt + candidates). + (prompt + response candidates). """ prompt_token_count: int = proto.Field( @@ -511,7 +535,7 @@ class Candidate(proto.Message): Attributes: index (int): Output only. Index of the candidate in the - list of candidates. + list of response candidates. This field is a member of `oneof`_ ``_index``. content (google.ai.generativelanguage_v1beta.types.Content): @@ -521,7 +545,7 @@ class Candidate(proto.Message): Optional. Output only. The reason why the model stopped generating tokens. If empty, the model has not stopped generating - the tokens. + tokens. safety_ratings (MutableSequence[google.ai.generativelanguage_v1beta.types.SafetyRating]): List of ratings for the safety of a response candidate. @@ -556,20 +580,41 @@ class FinishReason(proto.Enum): The maximum number of tokens as specified in the request was reached. SAFETY (3): - The candidate content was flagged for safety - reasons. + The response candidate content was flagged + for safety reasons. RECITATION (4): - The candidate content was flagged for - recitation reasons. + The response candidate content was flagged + for recitation reasons. + LANGUAGE (6): + The response candidate content was flagged + for using an unsupported language. OTHER (5): Unknown reason. + BLOCKLIST (7): + Token generation stopped because the content + contains forbidden terms. + PROHIBITED_CONTENT (8): + Token generation stopped for potentially + containing prohibited content. + SPII (9): + Token generation stopped because the content + potentially contains Sensitive Personally + Identifiable Information (SPII). + MALFORMED_FUNCTION_CALL (10): + The function call generated by the model is + invalid. """ FINISH_REASON_UNSPECIFIED = 0 STOP = 1 MAX_TOKENS = 2 SAFETY = 3 RECITATION = 4 + LANGUAGE = 6 OTHER = 5 + BLOCKLIST = 7 + PROHIBITED_CONTENT = 8 + SPII = 9 + MALFORMED_FUNCTION_CALL = 10 index: int = proto.Field( proto.INT32, @@ -714,7 +759,7 @@ class GroundingAttribution(proto.Message): class GenerateAnswerRequest(proto.Message): - r"""Request to generate a grounded answer from the model. + r"""Request to generate a grounded answer from the ``Model``. This message has `oneof`_ fields (mutually exclusive fields). For each oneof, at most one member field can be set at the same time. @@ -740,13 +785,12 @@ class GenerateAnswerRequest(proto.Message): Format: ``model=models/{model}``. contents (MutableSequence[google.ai.generativelanguage_v1beta.types.Content]): Required. The content of the current conversation with the - model. For single-turn queries, this is a single question to - answer. For multi-turn queries, this is a repeated field - that contains conversation history and the last ``Content`` - in the list containing the question. + ``Model``. For single-turn queries, this is a single + question to answer. For multi-turn queries, this is a + repeated field that contains conversation history and the + last ``Content`` in the list containing the question. - Note: GenerateAnswer currently only supports queries in - English. + Note: ``GenerateAnswer`` only supports queries in English. answer_style (google.ai.generativelanguage_v1beta.types.GenerateAnswerRequest.AnswerStyle): Required. Style in which answers should be returned. @@ -767,7 +811,13 @@ class GenerateAnswerRequest(proto.Message): categories HARM_CATEGORY_HATE_SPEECH, HARM_CATEGORY_SEXUALLY_EXPLICIT, HARM_CATEGORY_DANGEROUS_CONTENT, HARM_CATEGORY_HARASSMENT - are supported. + are supported. Refer to the + `guide `__ + for detailed information on available safety settings. Also + refer to the `Safety + guidance `__ + to learn how to incorporate safety considerations in your AI + applications. temperature (float): Optional. Controls the randomness of the output. @@ -858,26 +908,25 @@ class GenerateAnswerResponse(proto.Message): Output only. The model's estimate of the probability that its answer is correct and grounded in the input passages. - A low answerable_probability indicates that the answer might - not be grounded in the sources. + A low ``answerable_probability`` indicates that the answer + might not be grounded in the sources. - When ``answerable_probability`` is low, some clients may - wish to: + When ``answerable_probability`` is low, you may want to: - Display a message to the effect of "We couldn’t answer that question" to the user. - Fall back to a general-purpose LLM that answers the question from world knowledge. The threshold and nature - of such fallbacks will depend on individual clients’ use - cases. 0.5 is a good starting threshold. + of such fallbacks will depend on individual use cases. + ``0.5`` is a good starting threshold. This field is a member of `oneof`_ ``_answerable_probability``. input_feedback (google.ai.generativelanguage_v1beta.types.GenerateAnswerResponse.InputFeedback): Output only. Feedback related to the input data used to - answer the question, as opposed to model-generated response - to the question. + answer the question, as opposed to the model-generated + response to the question. - "Input data" can be one or more of the following: + The input data can be one or more of the following: - Question specified by the last entry in ``GenerateAnswerRequest.content`` @@ -892,7 +941,7 @@ class GenerateAnswerResponse(proto.Message): class InputFeedback(proto.Message): r"""Feedback related to the input data used to answer the - question, as opposed to model-generated response to the + question, as opposed to the model-generated response to the question. @@ -901,7 +950,7 @@ class InputFeedback(proto.Message): Attributes: block_reason (google.ai.generativelanguage_v1beta.types.GenerateAnswerResponse.InputFeedback.BlockReason): Optional. If set, the input was blocked and - no candidates are returned. Rephrase your input. + no candidates are returned. Rephrase the input. This field is a member of `oneof`_ ``_block_reason``. safety_ratings (MutableSequence[google.ai.generativelanguage_v1beta.types.SafetyRating]): @@ -916,7 +965,7 @@ class BlockReason(proto.Enum): BLOCK_REASON_UNSPECIFIED (0): Default value. This value is unused. SAFETY (1): - Input was blocked due to safety reasons. You can inspect + Input was blocked due to safety reasons. Inspect ``safety_ratings`` to understand which safety category blocked it. OTHER (2): @@ -990,8 +1039,8 @@ class EmbedContentRequest(proto.Message): Optional. Optional reduced dimension for the output embedding. If set, excessive values in the output embedding are truncated from the end. Supported by newer models since - 2024, and the earlier model (``models/embedding-001``) - cannot specify this value. + 2024 only. You cannot set this value if using the earlier + model (``models/embedding-001``). This field is a member of `oneof`_ ``_output_dimensionality``. """ @@ -1119,9 +1168,16 @@ class CountTokensRequest(proto.Message): Optional. The input given to the model as a prompt. This field is ignored when ``generate_content_request`` is set. generate_content_request (google.ai.generativelanguage_v1beta.types.GenerateContentRequest): - Optional. The overall input given to the - model. CountTokens will count prompt, function - calling, etc. + Optional. The overall input given to the ``Model``. This + includes the prompt as well as other model steering + information like `system + instructions `__, + and/or function declarations for `function + calling `__. + ``Model``\ s/\ ``Content``\ s and + ``generate_content_request``\ s are mutually exclusive. You + can either send ``Model`` + ``Content``\ s or a + ``generate_content_request``, but never both. """ model: str = proto.Field( @@ -1147,12 +1203,8 @@ class CountTokensResponse(proto.Message): Attributes: total_tokens (int): - The number of tokens that the ``model`` tokenizes the - ``prompt`` into. - - Always non-negative. When cached_content is set, this is - still the total effective prompt size. I.e. this includes - the number of tokens in the cached content. + The number of tokens that the ``Model`` tokenizes the + ``prompt`` into. Always non-negative. cached_content_token_count (int): Number of tokens in the cached part of the prompt, i.e. in the cached content. diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/model.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/model.py index 82dbdd515047..13c91f98f341 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/model.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/model.py @@ -34,7 +34,10 @@ class Model(proto.Message): Attributes: name (str): - Required. The resource name of the ``Model``. + Required. The resource name of the ``Model``. Refer to + `Model + variants `__ + for all allowed values. Format: ``models/{model}`` with a ``{model}`` naming convention of: @@ -43,21 +46,21 @@ class Model(proto.Message): Examples: - - ``models/chat-bison-001`` + - ``models/gemini-1.5-flash-001`` base_model_id (str): Required. The name of the base model, pass this to the generation request. Examples: - - ``chat-bison`` + - ``gemini-1.5-flash`` version (str): Required. The version number of the model. - This represents the major version + This represents the major version (``1.0`` or ``1.5``) display_name (str): The human-readable name of the model. E.g. - "Chat Bison". + "Gemini 1.5 Flash". The name can be up to 128 characters long and can consist of any UTF-8 characters. description (str): @@ -71,8 +74,9 @@ class Model(proto.Message): supported_generation_methods (MutableSequence[str]): The model's supported generation methods. - The method names are defined as Pascal case strings, such as - ``generateMessage`` which correspond to API methods. + The corresponding API method names are defined as Pascal + case strings, such as ``generateMessage`` and + ``generateContent``. temperature (float): Controls the randomness of the output. @@ -89,7 +93,8 @@ class Model(proto.Message): This field is a member of `oneof`_ ``_max_temperature``. top_p (float): - For Nucleus sampling. + For `Nucleus + sampling `__. Nucleus sampling considers the smallest set of tokens whose probability sum is at least ``top_p``. This value specifies diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/model_service.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/model_service.py index a49938f4feb4..934be96ddd56 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/model_service.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/model_service.py @@ -66,10 +66,9 @@ class ListModelsRequest(proto.Message): page_size (int): The maximum number of ``Models`` to return (per page). - The service may return fewer models. If unspecified, at most - 50 models will be returned per page. This method returns at - most 1000 models per page, even if you pass a larger - page_size. + If unspecified, 50 models will be returned per page. This + method returns at most 1000 models per page, even if you + pass a larger page_size. page_token (str): A page token, received from a previous ``ListModels`` call. @@ -232,7 +231,7 @@ class CreateTunedModelRequest(proto.Message): This value should be up to 40 characters, the first character must be a letter, the last could be a letter or a number. The id must match the regular expression: - `a-z <[a-z0-9-]{0,38}[a-z0-9]>`__?. + ``[a-z]([a-z0-9-]{0,38}[a-z0-9])?``. This field is a member of `oneof`_ ``_tuned_model_id``. tuned_model (google.ai.generativelanguage_v1beta.types.TunedModel): diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/tuned_model.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/tuned_model.py index 97d190d45489..a45283f33632 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/tuned_model.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/tuned_model.py @@ -54,7 +54,7 @@ class TunedModel(proto.Message): This field is a member of `oneof`_ ``source_model``. base_model (str): Immutable. The name of the ``Model`` to tune. Example: - ``models/text-bison-001`` + ``models/gemini-1.5-flash-001`` This field is a member of `oneof`_ ``source_model``. name (str): @@ -63,8 +63,11 @@ class TunedModel(proto.Message): display_name is set on create, the id portion of the name will be set by concatenating the words of the display_name with hyphens and adding a random portion for uniqueness. - Example: display_name = "Sentence Translator" name = - "tunedModels/sentence-translator-u3b7m". + + Example: + + - display_name = ``Sentence Translator`` + - name = ``tunedModels/sentence-translator-u3b7m`` display_name (str): Optional. The name to display for this model in user interfaces. The display name must be up @@ -206,7 +209,7 @@ class TunedModelSource(proto.Message): base_model (str): Output only. The name of the base ``Model`` this ``TunedModel`` was tuned from. Example: - ``models/text-bison-001`` + ``models/gemini-1.5-flash-001`` """ tuned_model: str = proto.Field( diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/gapic_version.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/gapic_version.py index 911c2d1dfcef..558c8aab67c5 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/gapic_version.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.8" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/discuss_service/async_client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/discuss_service/async_client.py index 7e192545aa93..6413d76f3cbd 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/discuss_service/async_client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/discuss_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -190,9 +189,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DiscussServiceClient).get_transport_class, type(DiscussServiceClient) - ) + get_transport_class = DiscussServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/discuss_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/discuss_service/client.py index 9c93c822fda1..18bbc6108ddd 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/discuss_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/discuss_service/client.py @@ -656,7 +656,7 @@ def __init__( transport_init: Union[ Type[DiscussServiceTransport], Callable[..., DiscussServiceTransport] ] = ( - type(self).get_transport_class(transport) + DiscussServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DiscussServiceTransport], transport) ) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/model_service/async_client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/model_service/async_client.py index cd7ca4b19b5c..53f2c11e8a5c 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/model_service/async_client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/model_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -185,9 +184,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ModelServiceClient).get_transport_class, type(ModelServiceClient) - ) + get_transport_class = ModelServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/model_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/model_service/client.py index 45998abaf48f..b4c965681988 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/model_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/model_service/client.py @@ -650,7 +650,7 @@ def __init__( transport_init: Union[ Type[ModelServiceTransport], Callable[..., ModelServiceTransport] ] = ( - type(self).get_transport_class(transport) + ModelServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ModelServiceTransport], transport) ) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/text_service/async_client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/text_service/async_client.py index 2d648ebfd134..14b3d745095b 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/text_service/async_client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/text_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -186,9 +185,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(TextServiceClient).get_transport_class, type(TextServiceClient) - ) + get_transport_class = TextServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/text_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/text_service/client.py index d7f49966172f..74a4d9424c53 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/text_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/text_service/client.py @@ -651,7 +651,7 @@ def __init__( transport_init: Union[ Type[TextServiceTransport], Callable[..., TextServiceTransport] ] = ( - type(self).get_transport_class(transport) + TextServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., TextServiceTransport], transport) ) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/gapic_version.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/gapic_version.py index 911c2d1dfcef..558c8aab67c5 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/gapic_version.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.8" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/discuss_service/async_client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/discuss_service/async_client.py index 12f8cd1d7215..0683a984cae1 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/discuss_service/async_client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/discuss_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -192,9 +191,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DiscussServiceClient).get_transport_class, type(DiscussServiceClient) - ) + get_transport_class = DiscussServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/discuss_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/discuss_service/client.py index 15ec3f87e353..3f96ccafb73a 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/discuss_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/discuss_service/client.py @@ -658,7 +658,7 @@ def __init__( transport_init: Union[ Type[DiscussServiceTransport], Callable[..., DiscussServiceTransport] ] = ( - type(self).get_transport_class(transport) + DiscussServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DiscussServiceTransport], transport) ) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/model_service/async_client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/model_service/async_client.py index 991704110b98..c51657477468 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/model_service/async_client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/model_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -195,9 +194,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ModelServiceClient).get_transport_class, type(ModelServiceClient) - ) + get_transport_class = ModelServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/model_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/model_service/client.py index 96f90e217bc6..f76ac868c667 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/model_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/model_service/client.py @@ -673,7 +673,7 @@ def __init__( transport_init: Union[ Type[ModelServiceTransport], Callable[..., ModelServiceTransport] ] = ( - type(self).get_transport_class(transport) + ModelServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ModelServiceTransport], transport) ) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/permission_service/async_client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/permission_service/async_client.py index c7829c1a55d8..bc1bb058d9ff 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/permission_service/async_client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/permission_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -198,9 +197,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(PermissionServiceClient).get_transport_class, type(PermissionServiceClient) - ) + get_transport_class = PermissionServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/permission_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/permission_service/client.py index aef954c40823..fbbd86c3e7d5 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/permission_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/permission_service/client.py @@ -684,7 +684,7 @@ def __init__( Type[PermissionServiceTransport], Callable[..., PermissionServiceTransport], ] = ( - type(self).get_transport_class(transport) + PermissionServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., PermissionServiceTransport], transport) ) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/permission_service/transports/rest.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/permission_service/transports/rest.py index 41e23b173930..a96955194c9d 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/permission_service/transports/rest.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/permission_service/transports/rest.py @@ -395,11 +395,11 @@ def __call__( role is a superset of the previous role's permitted operations: - - reader can use the resource (e.g. + - reader can use the resource (e.g. tuned model) for inference - - writer has reader's permissions and + - writer has reader's permissions and additionally can edit and share - - owner has writer's permissions and + - owner has writer's permissions and additionally can delete """ @@ -588,11 +588,11 @@ def __call__( role is a superset of the previous role's permitted operations: - - reader can use the resource (e.g. + - reader can use the resource (e.g. tuned model) for inference - - writer has reader's permissions and + - writer has reader's permissions and additionally can edit and share - - owner has writer's permissions and + - owner has writer's permissions and additionally can delete """ @@ -882,11 +882,11 @@ def __call__( role is a superset of the previous role's permitted operations: - - reader can use the resource (e.g. + - reader can use the resource (e.g. tuned model) for inference - - writer has reader's permissions and + - writer has reader's permissions and additionally can edit and share - - owner has writer's permissions and + - owner has writer's permissions and additionally can delete """ diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/text_service/async_client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/text_service/async_client.py index 8c2138d0991e..69c1df66a614 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/text_service/async_client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/text_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -188,9 +187,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(TextServiceClient).get_transport_class, type(TextServiceClient) - ) + get_transport_class = TextServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/text_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/text_service/client.py index a13903fde0a4..ad257c9d2909 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/text_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/text_service/client.py @@ -653,7 +653,7 @@ def __init__( transport_init: Union[ Type[TextServiceTransport], Callable[..., TextServiceTransport] ] = ( - type(self).get_transport_class(transport) + TextServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., TextServiceTransport], transport) ) diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1.json b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1.json index c1b9855b9786..d6c3fe4c5051 100644 --- a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1.json +++ b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-ai-generativelanguage", - "version": "0.6.8" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta.json b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta.json index 24523bd58674..3a0ef31881fb 100644 --- a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta.json +++ b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-ai-generativelanguage", - "version": "0.6.8" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta2.json b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta2.json index d7f8a8be4c13..5b7d0a0509b4 100644 --- a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta2.json +++ b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-ai-generativelanguage", - "version": "0.6.8" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta3.json b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta3.json index 3d177a97e009..91de9e353f90 100644 --- a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta3.json +++ b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta3.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-ai-generativelanguage", - "version": "0.6.8" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1/test_generative_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1/test_generative_service.py index 1fcb053b7910..24d7d6fbcddc 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1/test_generative_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1/test_generative_service.py @@ -1318,22 +1318,23 @@ async def test_generate_content_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.generate_content - ] = mock_object + ] = mock_rpc request = {} await client.generate_content(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.generate_content(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1700,22 +1701,23 @@ async def test_stream_generate_content_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.stream_generate_content - ] = mock_object + ] = mock_rpc request = {} await client.stream_generate_content(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.stream_generate_content(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2080,22 +2082,23 @@ async def test_embed_content_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.embed_content - ] = mock_object + ] = mock_rpc request = {} await client.embed_content(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.embed_content(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2458,22 +2461,23 @@ async def test_batch_embed_contents_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_embed_contents - ] = mock_object + ] = mock_rpc request = {} await client.batch_embed_contents(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.batch_embed_contents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2840,22 +2844,23 @@ async def test_count_tokens_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.count_tokens - ] = mock_object + ] = mock_rpc request = {} await client.count_tokens(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.count_tokens(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1/test_model_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1/test_model_service.py index fe9436e4d83c..c5ec01d8390e 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1/test_model_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1/test_model_service.py @@ -1125,6 +1125,7 @@ def test_get_model(request_type, transport: str = "grpc"): output_token_limit=1967, supported_generation_methods=["supported_generation_methods_value"], temperature=0.1198, + max_temperature=0.16190000000000002, top_p=0.546, top_k=541, ) @@ -1149,6 +1150,7 @@ def test_get_model(request_type, transport: str = "grpc"): "supported_generation_methods_value" ] assert math.isclose(response.temperature, 0.1198, rel_tol=1e-6) + assert math.isclose(response.max_temperature, 0.16190000000000002, rel_tol=1e-6) assert math.isclose(response.top_p, 0.546, rel_tol=1e-6) assert response.top_k == 541 @@ -1258,6 +1260,7 @@ async def test_get_model_empty_call_async(): output_token_limit=1967, supported_generation_methods=["supported_generation_methods_value"], temperature=0.1198, + max_temperature=0.16190000000000002, top_p=0.546, top_k=541, ) @@ -1289,22 +1292,23 @@ async def test_get_model_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_model - ] = mock_object + ] = mock_rpc request = {} await client.get_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1334,6 +1338,7 @@ async def test_get_model_async( output_token_limit=1967, supported_generation_methods=["supported_generation_methods_value"], temperature=0.1198, + max_temperature=0.16190000000000002, top_p=0.546, top_k=541, ) @@ -1359,6 +1364,7 @@ async def test_get_model_async( "supported_generation_methods_value" ] assert math.isclose(response.temperature, 0.1198, rel_tol=1e-6) + assert math.isclose(response.max_temperature, 0.16190000000000002, rel_tol=1e-6) assert math.isclose(response.top_p, 0.546, rel_tol=1e-6) assert response.top_k == 541 @@ -1671,22 +1677,23 @@ async def test_list_models_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_models - ] = mock_object + ] = mock_rpc request = {} await client.list_models(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_models(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2041,6 +2048,7 @@ def test_get_model_rest(request_type): output_token_limit=1967, supported_generation_methods=["supported_generation_methods_value"], temperature=0.1198, + max_temperature=0.16190000000000002, top_p=0.546, top_k=541, ) @@ -2069,6 +2077,7 @@ def test_get_model_rest(request_type): "supported_generation_methods_value" ] assert math.isclose(response.temperature, 0.1198, rel_tol=1e-6) + assert math.isclose(response.max_temperature, 0.16190000000000002, rel_tol=1e-6) assert math.isclose(response.top_p, 0.546, rel_tol=1e-6) assert response.top_k == 541 diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_cache_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_cache_service.py index becb6c3615c0..46ecbbbaeafa 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_cache_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_cache_service.py @@ -1280,22 +1280,23 @@ async def test_list_cached_contents_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_cached_contents - ] = mock_object + ] = mock_rpc request = {} await client.list_cached_contents(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_cached_contents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1718,22 +1719,23 @@ async def test_create_cached_content_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_cached_content - ] = mock_object + ] = mock_rpc request = {} await client.create_cached_content(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_cached_content(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2062,22 +2064,23 @@ async def test_get_cached_content_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_cached_content - ] = mock_object + ] = mock_rpc request = {} await client.get_cached_content(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_cached_content(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2455,22 +2458,23 @@ async def test_update_cached_content_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_cached_content - ] = mock_object + ] = mock_rpc request = {} await client.update_cached_content(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_cached_content(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2862,22 +2866,23 @@ async def test_delete_cached_content_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_cached_content - ] = mock_object + ] = mock_rpc request = {} await client.delete_cached_content(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_cached_content(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3343,6 +3348,7 @@ def test_create_cached_content_rest(request_type): "nullable": True, "enum": ["enum_value1", "enum_value2"], "items": {}, + "max_items": 967, "properties": {}, "required": ["required_value1", "required_value2"], }, @@ -4090,6 +4096,7 @@ def test_update_cached_content_rest(request_type): "nullable": True, "enum": ["enum_value1", "enum_value2"], "items": {}, + "max_items": 967, "properties": {}, "required": ["required_value1", "required_value2"], }, diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_discuss_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_discuss_service.py index 2f193871150e..90e1265131cc 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_discuss_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_discuss_service.py @@ -1278,22 +1278,23 @@ async def test_generate_message_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.generate_message - ] = mock_object + ] = mock_rpc request = {} await client.generate_message(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.generate_message(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1693,22 +1694,23 @@ async def test_count_message_tokens_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.count_message_tokens - ] = mock_object + ] = mock_rpc request = {} await client.count_message_tokens(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.count_message_tokens(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_file_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_file_service.py index 9b544412dcbe..d7a595504369 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_file_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_file_service.py @@ -1240,22 +1240,23 @@ async def test_create_file_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_file - ] = mock_object + ] = mock_rpc request = {} await client.create_file(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_file(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1456,22 +1457,23 @@ async def test_list_files_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_files - ] = mock_object + ] = mock_rpc request = {} await client.list_files(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_files(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1884,22 +1886,23 @@ async def test_get_file_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_file - ] = mock_object + ] = mock_rpc request = {} await client.get_file(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_file(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2249,22 +2252,23 @@ async def test_delete_file_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_file - ] = mock_object + ] = mock_rpc request = {} await client.delete_file(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_file(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_generative_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_generative_service.py index 61b7792b3d1e..b73be3c92750 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_generative_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_generative_service.py @@ -1325,22 +1325,23 @@ async def test_generate_content_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.generate_content - ] = mock_object + ] = mock_rpc request = {} await client.generate_content(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.generate_content(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1697,22 +1698,23 @@ async def test_generate_answer_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.generate_answer - ] = mock_object + ] = mock_rpc request = {} await client.generate_answer(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.generate_answer(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2124,22 +2126,23 @@ async def test_stream_generate_content_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.stream_generate_content - ] = mock_object + ] = mock_rpc request = {} await client.stream_generate_content(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.stream_generate_content(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2504,22 +2507,23 @@ async def test_embed_content_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.embed_content - ] = mock_object + ] = mock_rpc request = {} await client.embed_content(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.embed_content(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2882,22 +2886,23 @@ async def test_batch_embed_contents_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_embed_contents - ] = mock_object + ] = mock_rpc request = {} await client.batch_embed_contents(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.batch_embed_contents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3267,22 +3272,23 @@ async def test_count_tokens_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.count_tokens - ] = mock_object + ] = mock_rpc request = {} await client.count_tokens(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.count_tokens(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_model_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_model_service.py index 7cd36f346f06..c8b4aed2becb 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_model_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_model_service.py @@ -1305,22 +1305,23 @@ async def test_get_model_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_model - ] = mock_object + ] = mock_rpc request = {} await client.get_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1689,22 +1690,23 @@ async def test_list_models_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_models - ] = mock_object + ] = mock_rpc request = {} await client.list_models(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_models(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2212,22 +2214,23 @@ async def test_get_tuned_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_tuned_model - ] = mock_object + ] = mock_rpc request = {} await client.get_tuned_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_tuned_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2600,22 +2603,23 @@ async def test_list_tuned_models_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_tuned_models - ] = mock_object + ] = mock_rpc request = {} await client.list_tuned_models(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_tuned_models(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3072,8 +3076,9 @@ def test_create_tuned_model_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_tuned_model(request) @@ -3129,26 +3134,28 @@ async def test_create_tuned_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_tuned_model - ] = mock_object + ] = mock_rpc request = {} await client.create_tuned_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_tuned_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3500,22 +3507,23 @@ async def test_update_tuned_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_tuned_model - ] = mock_object + ] = mock_rpc request = {} await client.update_tuned_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_tuned_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3925,22 +3933,23 @@ async def test_delete_tuned_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_tuned_model - ] = mock_object + ] = mock_rpc request = {} await client.delete_tuned_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_tuned_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_permission_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_permission_service.py index d6e039a3fdef..4a5b16af4ee0 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_permission_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_permission_service.py @@ -1342,22 +1342,23 @@ async def test_create_permission_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_permission - ] = mock_object + ] = mock_rpc request = {} await client.create_permission(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_permission(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1742,22 +1743,23 @@ async def test_get_permission_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_permission - ] = mock_object + ] = mock_rpc request = {} await client.get_permission(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_permission(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2117,22 +2119,23 @@ async def test_list_permissions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_permissions - ] = mock_object + ] = mock_rpc request = {} await client.list_permissions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_permissions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2691,22 +2694,23 @@ async def test_update_permission_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_permission - ] = mock_object + ] = mock_rpc request = {} await client.update_permission(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_permission(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3085,22 +3089,23 @@ async def test_delete_permission_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_permission - ] = mock_object + ] = mock_rpc request = {} await client.delete_permission(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_permission(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3460,22 +3465,23 @@ async def test_transfer_ownership_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.transfer_ownership - ] = mock_object + ] = mock_rpc request = {} await client.transfer_ownership(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.transfer_ownership(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_retriever_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_retriever_service.py index 4ad80ff8f8b0..d04ac7285fb9 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_retriever_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_retriever_service.py @@ -1313,22 +1313,23 @@ async def test_create_corpus_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_corpus - ] = mock_object + ] = mock_rpc request = {} await client.create_corpus(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_corpus(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1617,22 +1618,23 @@ async def test_get_corpus_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_corpus - ] = mock_object + ] = mock_rpc request = {} await client.get_corpus(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_corpus(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1978,22 +1980,23 @@ async def test_update_corpus_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_corpus - ] = mock_object + ] = mock_rpc request = {} await client.update_corpus(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_corpus(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2343,22 +2346,23 @@ async def test_delete_corpus_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_corpus - ] = mock_object + ] = mock_rpc request = {} await client.delete_corpus(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_corpus(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2698,22 +2702,23 @@ async def test_list_corpora_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_corpora - ] = mock_object + ] = mock_rpc request = {} await client.list_corpora(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_corpora(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3107,22 +3112,23 @@ async def test_query_corpus_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.query_corpus - ] = mock_object + ] = mock_rpc request = {} await client.query_corpus(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.query_corpus(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3389,22 +3395,23 @@ async def test_create_document_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_document - ] = mock_object + ] = mock_rpc request = {} await client.create_document(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3765,22 +3772,23 @@ async def test_get_document_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_document - ] = mock_object + ] = mock_rpc request = {} await client.get_document(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4126,22 +4134,23 @@ async def test_update_document_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_document - ] = mock_object + ] = mock_rpc request = {} await client.update_document(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4492,22 +4501,23 @@ async def test_delete_document_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_document - ] = mock_object + ] = mock_rpc request = {} await client.delete_document(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4850,22 +4860,23 @@ async def test_list_documents_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_documents - ] = mock_object + ] = mock_rpc request = {} await client.list_documents(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_documents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5405,22 +5416,23 @@ async def test_query_document_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.query_document - ] = mock_object + ] = mock_rpc request = {} await client.query_document(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.query_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5687,22 +5699,23 @@ async def test_create_chunk_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_chunk - ] = mock_object + ] = mock_rpc request = {} await client.create_chunk(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_chunk(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6066,22 +6079,23 @@ async def test_batch_create_chunks_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_create_chunks - ] = mock_object + ] = mock_rpc request = {} await client.batch_create_chunks(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.batch_create_chunks(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6353,22 +6367,23 @@ async def test_get_chunk_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_chunk - ] = mock_object + ] = mock_rpc request = {} await client.get_chunk(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_chunk(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6714,22 +6729,23 @@ async def test_update_chunk_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_chunk - ] = mock_object + ] = mock_rpc request = {} await client.update_chunk(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_chunk(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7093,22 +7109,23 @@ async def test_batch_update_chunks_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_update_chunks - ] = mock_object + ] = mock_rpc request = {} await client.batch_update_chunks(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.batch_update_chunks(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7372,22 +7389,23 @@ async def test_delete_chunk_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_chunk - ] = mock_object + ] = mock_rpc request = {} await client.delete_chunk(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_chunk(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7732,22 +7750,23 @@ async def test_batch_delete_chunks_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_delete_chunks - ] = mock_object + ] = mock_rpc request = {} await client.batch_delete_chunks(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.batch_delete_chunks(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8016,22 +8035,23 @@ async def test_list_chunks_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_chunks - ] = mock_object + ] = mock_rpc request = {} await client.list_chunks(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_chunks(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_text_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_text_service.py index bdae2d9cd5ca..0ef83108f220 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_text_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_text_service.py @@ -1239,22 +1239,23 @@ async def test_generate_text_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.generate_text - ] = mock_object + ] = mock_rpc request = {} await client.generate_text(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.generate_text(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1647,22 +1648,23 @@ async def test_embed_text_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.embed_text - ] = mock_object + ] = mock_rpc request = {} await client.embed_text(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.embed_text(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2015,22 +2017,23 @@ async def test_batch_embed_text_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_embed_text - ] = mock_object + ] = mock_rpc request = {} await client.batch_embed_text(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.batch_embed_text(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2396,22 +2399,23 @@ async def test_count_text_tokens_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.count_text_tokens - ] = mock_object + ] = mock_rpc request = {} await client.count_text_tokens(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.count_text_tokens(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta2/test_discuss_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta2/test_discuss_service.py index 37e8bfb66a96..aaca5a1e5113 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta2/test_discuss_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta2/test_discuss_service.py @@ -1277,22 +1277,23 @@ async def test_generate_message_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.generate_message - ] = mock_object + ] = mock_rpc request = {} await client.generate_message(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.generate_message(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1692,22 +1693,23 @@ async def test_count_message_tokens_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.count_message_tokens - ] = mock_object + ] = mock_rpc request = {} await client.count_message_tokens(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.count_message_tokens(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta2/test_model_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta2/test_model_service.py index 45fec79440be..bb7f4de6054b 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta2/test_model_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta2/test_model_service.py @@ -1288,22 +1288,23 @@ async def test_get_model_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_model - ] = mock_object + ] = mock_rpc request = {} await client.get_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1670,22 +1671,23 @@ async def test_list_models_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_models - ] = mock_object + ] = mock_rpc request = {} await client.list_models(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_models(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta2/test_text_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta2/test_text_service.py index b52f00220a90..8780254ecbaa 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta2/test_text_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta2/test_text_service.py @@ -1238,22 +1238,23 @@ async def test_generate_text_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.generate_text - ] = mock_object + ] = mock_rpc request = {} await client.generate_text(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.generate_text(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1646,22 +1647,23 @@ async def test_embed_text_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.embed_text - ] = mock_object + ] = mock_rpc request = {} await client.embed_text(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.embed_text(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_discuss_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_discuss_service.py index 963596a4ca0f..6b698304d1c0 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_discuss_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_discuss_service.py @@ -1278,22 +1278,23 @@ async def test_generate_message_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.generate_message - ] = mock_object + ] = mock_rpc request = {} await client.generate_message(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.generate_message(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1693,22 +1694,23 @@ async def test_count_message_tokens_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.count_message_tokens - ] = mock_object + ] = mock_rpc request = {} await client.count_message_tokens(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.count_message_tokens(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_model_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_model_service.py index 502ea6d85d66..65e877a7a177 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_model_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_model_service.py @@ -1302,22 +1302,23 @@ async def test_get_model_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_model - ] = mock_object + ] = mock_rpc request = {} await client.get_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1684,22 +1685,23 @@ async def test_list_models_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_models - ] = mock_object + ] = mock_rpc request = {} await client.list_models(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_models(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2207,22 +2209,23 @@ async def test_get_tuned_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_tuned_model - ] = mock_object + ] = mock_rpc request = {} await client.get_tuned_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_tuned_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2593,22 +2596,23 @@ async def test_list_tuned_models_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_tuned_models - ] = mock_object + ] = mock_rpc request = {} await client.list_tuned_models(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_tuned_models(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3065,8 +3069,9 @@ def test_create_tuned_model_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_tuned_model(request) @@ -3122,26 +3127,28 @@ async def test_create_tuned_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_tuned_model - ] = mock_object + ] = mock_rpc request = {} await client.create_tuned_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_tuned_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3493,22 +3500,23 @@ async def test_update_tuned_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_tuned_model - ] = mock_object + ] = mock_rpc request = {} await client.update_tuned_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_tuned_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3918,22 +3926,23 @@ async def test_delete_tuned_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_tuned_model - ] = mock_object + ] = mock_rpc request = {} await client.delete_tuned_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_tuned_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_permission_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_permission_service.py index 92213ee3a04e..faeeaf23de2c 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_permission_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_permission_service.py @@ -1342,22 +1342,23 @@ async def test_create_permission_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_permission - ] = mock_object + ] = mock_rpc request = {} await client.create_permission(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_permission(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1742,22 +1743,23 @@ async def test_get_permission_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_permission - ] = mock_object + ] = mock_rpc request = {} await client.get_permission(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_permission(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2117,22 +2119,23 @@ async def test_list_permissions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_permissions - ] = mock_object + ] = mock_rpc request = {} await client.list_permissions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_permissions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2691,22 +2694,23 @@ async def test_update_permission_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_permission - ] = mock_object + ] = mock_rpc request = {} await client.update_permission(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_permission(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3085,22 +3089,23 @@ async def test_delete_permission_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_permission - ] = mock_object + ] = mock_rpc request = {} await client.delete_permission(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_permission(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3460,22 +3465,23 @@ async def test_transfer_ownership_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.transfer_ownership - ] = mock_object + ] = mock_rpc request = {} await client.transfer_ownership(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.transfer_ownership(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_text_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_text_service.py index cbd2b4c65ce6..30b8ee8bd044 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_text_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_text_service.py @@ -1239,22 +1239,23 @@ async def test_generate_text_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.generate_text - ] = mock_object + ] = mock_rpc request = {} await client.generate_text(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.generate_text(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1647,22 +1648,23 @@ async def test_embed_text_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.embed_text - ] = mock_object + ] = mock_rpc request = {} await client.embed_text(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.embed_text(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2015,22 +2017,23 @@ async def test_batch_embed_text_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_embed_text - ] = mock_object + ] = mock_rpc request = {} await client.batch_embed_text(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.batch_embed_text(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2396,22 +2399,23 @@ async def test_count_text_tokens_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.count_text_tokens - ] = mock_object + ] = mock_rpc request = {} await client.count_text_tokens(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.count_text_tokens(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio From 973e48afb87cef6565535a7262e38195245018ef Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 19 Aug 2024 10:07:41 -0400 Subject: [PATCH 009/108] feat: [google-cloud-texttospeech] A new method `StreamingSynthesize` is added to service `TextToSpeech` (#13012) - [ ] Regenerate this pull request now. BEGIN_COMMIT_OVERRIDE feat: A new method `StreamingSynthesize` is added to service `TextToSpeech` docs: A comment for field `name` in message `.google.cloud.texttospeech.v1beta1.VoiceSelectionParams` is changed docs: A comment for field `name` in message `.google.cloud.texttospeech.v1.VoiceSelectionParams` is changed END_COMMIT_OVERRIDE PiperOrigin-RevId: 662978887 Source-Link: https://github.com/googleapis/googleapis/commit/195c051374369ac270384e622f05b2b961dcacd5 Source-Link: https://github.com/googleapis/googleapis-gen/commit/1a53efd4034f12bd453c36f1f3b9dbaf01695cf5 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXRleHR0b3NwZWVjaC8uT3dsQm90LnlhbWwiLCJoIjoiMWE1M2VmZDQwMzRmMTJiZDQ1M2MzNmYxZjNiOWRiYWYwMTY5NWNmNSJ9 feat: A new method `StreamingSynthesize` is added to service `TextToSpeech` docs: A comment for field `name` in message `.google.cloud.texttospeech.v1.VoiceSelectionParams` is changed PiperOrigin-RevId: 662708702 Source-Link: https://github.com/googleapis/googleapis/commit/569fc73ce3ddd18eec7884d4e2dfaa2e27a49017 Source-Link: https://github.com/googleapis/googleapis-gen/commit/f34f8a1d90646d3197a3d0308d8e145c70f77f41 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXRleHR0b3NwZWVjaC8uT3dsQm90LnlhbWwiLCJoIjoiZjM0ZjhhMWQ5MDY0NmQzMTk3YTNkMDMwOGQ4ZTE0NWM3MGY3N2Y0MSJ9 --------- Co-authored-by: Owl Bot --- .../google/cloud/texttospeech/__init__.py | 8 + .../cloud/texttospeech/gapic_version.py | 2 +- .../google/cloud/texttospeech_v1/__init__.py | 8 + .../cloud/texttospeech_v1/gapic_metadata.json | 15 ++ .../cloud/texttospeech_v1/gapic_version.py | 2 +- .../services/text_to_speech/async_client.py | 102 ++++++++- .../services/text_to_speech/client.py | 96 ++++++++- .../text_to_speech/transports/base.py | 17 ++ .../text_to_speech/transports/grpc.py | 29 +++ .../text_to_speech/transports/grpc_asyncio.py | 35 +++ .../text_to_speech/transports/rest.py | 26 +++ .../async_client.py | 6 +- .../client.py | 2 +- .../cloud/texttospeech_v1/types/__init__.py | 8 + .../cloud/texttospeech_v1/types/cloud_tts.py | 112 +++++++++- .../cloud/texttospeech_v1beta1/__init__.py | 8 + .../texttospeech_v1beta1/gapic_metadata.json | 15 ++ .../texttospeech_v1beta1/gapic_version.py | 2 +- .../services/text_to_speech/async_client.py | 102 ++++++++- .../services/text_to_speech/client.py | 96 ++++++++- .../text_to_speech/transports/base.py | 17 ++ .../text_to_speech/transports/grpc.py | 29 +++ .../text_to_speech/transports/grpc_asyncio.py | 35 +++ .../text_to_speech/transports/rest.py | 26 +++ .../async_client.py | 6 +- .../client.py | 2 +- .../texttospeech_v1beta1/types/__init__.py | 8 + .../texttospeech_v1beta1/types/cloud_tts.py | 112 +++++++++- ...metadata_google.cloud.texttospeech.v1.json | 155 ++++++++++++- ...ata_google.cloud.texttospeech.v1beta1.json | 155 ++++++++++++- ...xt_to_speech_streaming_synthesize_async.py | 66 ++++++ ...ext_to_speech_streaming_synthesize_sync.py | 66 ++++++ ...xt_to_speech_streaming_synthesize_async.py | 66 ++++++ ...ext_to_speech_streaming_synthesize_sync.py | 66 ++++++ .../scripts/fixup_texttospeech_v1_keywords.py | 1 + .../fixup_texttospeech_v1beta1_keywords.py | 1 + .../texttospeech_v1/test_text_to_speech.py | 203 +++++++++++++++++- ...st_text_to_speech_long_audio_synthesize.py | 19 +- .../test_text_to_speech.py | 203 +++++++++++++++++- ...st_text_to_speech_long_audio_synthesize.py | 19 +- 40 files changed, 1881 insertions(+), 65 deletions(-) create mode 100644 packages/google-cloud-texttospeech/samples/generated_samples/texttospeech_v1_generated_text_to_speech_streaming_synthesize_async.py create mode 100644 packages/google-cloud-texttospeech/samples/generated_samples/texttospeech_v1_generated_text_to_speech_streaming_synthesize_sync.py create mode 100644 packages/google-cloud-texttospeech/samples/generated_samples/texttospeech_v1beta1_generated_text_to_speech_streaming_synthesize_async.py create mode 100644 packages/google-cloud-texttospeech/samples/generated_samples/texttospeech_v1beta1_generated_text_to_speech_streaming_synthesize_sync.py diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech/__init__.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech/__init__.py index 1e92c39ce373..7327d61c79d8 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech/__init__.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech/__init__.py @@ -37,6 +37,10 @@ ListVoicesRequest, ListVoicesResponse, SsmlVoiceGender, + StreamingSynthesisInput, + StreamingSynthesizeConfig, + StreamingSynthesizeRequest, + StreamingSynthesizeResponse, SynthesisInput, SynthesizeSpeechRequest, SynthesizeSpeechResponse, @@ -58,6 +62,10 @@ "CustomVoiceParams", "ListVoicesRequest", "ListVoicesResponse", + "StreamingSynthesisInput", + "StreamingSynthesizeConfig", + "StreamingSynthesizeRequest", + "StreamingSynthesizeResponse", "SynthesisInput", "SynthesizeSpeechRequest", "SynthesizeSpeechResponse", diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech/gapic_version.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech/gapic_version.py index 8671082a1dc9..558c8aab67c5 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech/gapic_version.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.16.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/__init__.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/__init__.py index e8a97471ba01..cf700121df8b 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/__init__.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/__init__.py @@ -30,6 +30,10 @@ ListVoicesRequest, ListVoicesResponse, SsmlVoiceGender, + StreamingSynthesisInput, + StreamingSynthesizeConfig, + StreamingSynthesizeRequest, + StreamingSynthesizeResponse, SynthesisInput, SynthesizeSpeechRequest, SynthesizeSpeechResponse, @@ -51,6 +55,10 @@ "ListVoicesRequest", "ListVoicesResponse", "SsmlVoiceGender", + "StreamingSynthesisInput", + "StreamingSynthesizeConfig", + "StreamingSynthesizeRequest", + "StreamingSynthesizeResponse", "SynthesisInput", "SynthesizeLongAudioMetadata", "SynthesizeLongAudioRequest", diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/gapic_metadata.json b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/gapic_metadata.json index f8570d743151..9c20cbce6b46 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/gapic_metadata.json +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/gapic_metadata.json @@ -15,6 +15,11 @@ "list_voices" ] }, + "StreamingSynthesize": { + "methods": [ + "streaming_synthesize" + ] + }, "SynthesizeSpeech": { "methods": [ "synthesize_speech" @@ -30,6 +35,11 @@ "list_voices" ] }, + "StreamingSynthesize": { + "methods": [ + "streaming_synthesize" + ] + }, "SynthesizeSpeech": { "methods": [ "synthesize_speech" @@ -45,6 +55,11 @@ "list_voices" ] }, + "StreamingSynthesize": { + "methods": [ + "streaming_synthesize" + ] + }, "SynthesizeSpeech": { "methods": [ "synthesize_speech" diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/gapic_version.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/gapic_version.py index 8671082a1dc9..558c8aab67c5 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/gapic_version.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.16.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/async_client.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/async_client.py index 7d83fd4072fa..be0edc7e5980 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/async_client.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/async_client.py @@ -14,9 +14,11 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( + AsyncIterable, + AsyncIterator, + Awaitable, Callable, Dict, Mapping, @@ -184,9 +186,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(TextToSpeechClient).get_transport_class, type(TextToSpeechClient) - ) + get_transport_class = TextToSpeechClient.get_transport_class def __init__( self, @@ -495,6 +495,100 @@ async def sample_synthesize_speech(): # Done; return the response. return response + def streaming_synthesize( + self, + requests: Optional[AsyncIterator[cloud_tts.StreamingSynthesizeRequest]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> Awaitable[AsyncIterable[cloud_tts.StreamingSynthesizeResponse]]: + r"""Performs bidirectional streaming speech synthesis: + receive audio while sending text. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import texttospeech_v1 + + async def sample_streaming_synthesize(): + # Create a client + client = texttospeech_v1.TextToSpeechAsyncClient() + + # Initialize request argument(s) + streaming_config = texttospeech_v1.StreamingSynthesizeConfig() + streaming_config.voice.language_code = "language_code_value" + + request = texttospeech_v1.StreamingSynthesizeRequest( + streaming_config=streaming_config, + ) + + # This method expects an iterator which contains + # 'texttospeech_v1.StreamingSynthesizeRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = await client.streaming_synthesize(requests=request_generator()) + + # Handle the response + async for response in stream: + print(response) + + Args: + requests (AsyncIterator[`google.cloud.texttospeech_v1.types.StreamingSynthesizeRequest`]): + The request object AsyncIterator. Request message for the ``StreamingSynthesize`` method. + Multiple ``StreamingSynthesizeRequest`` messages are + sent in one call. The first message must contain a + ``streaming_config`` that fully specifies the request + configuration and must not contain ``input``. All + subsequent messages must only have ``input`` set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + AsyncIterable[google.cloud.texttospeech_v1.types.StreamingSynthesizeResponse]: + StreamingSynthesizeResponse is the only message returned to the + client by StreamingSynthesize method. A series of + zero or more StreamingSynthesizeResponse messages are + streamed back to the client. + + """ + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.streaming_synthesize + ] + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = rpc( + requests, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def list_operations( self, request: Optional[operations_pb2.ListOperationsRequest] = None, diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/client.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/client.py index 27b8ea780313..7269169ce96c 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/client.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/client.py @@ -19,6 +19,8 @@ from typing import ( Callable, Dict, + Iterable, + Iterator, Mapping, MutableMapping, MutableSequence, @@ -659,7 +661,7 @@ def __init__( transport_init: Union[ Type[TextToSpeechTransport], Callable[..., TextToSpeechTransport] ] = ( - type(self).get_transport_class(transport) + TextToSpeechClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., TextToSpeechTransport], transport) ) @@ -908,6 +910,98 @@ def sample_synthesize_speech(): # Done; return the response. return response + def streaming_synthesize( + self, + requests: Optional[Iterator[cloud_tts.StreamingSynthesizeRequest]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> Iterable[cloud_tts.StreamingSynthesizeResponse]: + r"""Performs bidirectional streaming speech synthesis: + receive audio while sending text. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import texttospeech_v1 + + def sample_streaming_synthesize(): + # Create a client + client = texttospeech_v1.TextToSpeechClient() + + # Initialize request argument(s) + streaming_config = texttospeech_v1.StreamingSynthesizeConfig() + streaming_config.voice.language_code = "language_code_value" + + request = texttospeech_v1.StreamingSynthesizeRequest( + streaming_config=streaming_config, + ) + + # This method expects an iterator which contains + # 'texttospeech_v1.StreamingSynthesizeRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = client.streaming_synthesize(requests=request_generator()) + + # Handle the response + for response in stream: + print(response) + + Args: + requests (Iterator[google.cloud.texttospeech_v1.types.StreamingSynthesizeRequest]): + The request object iterator. Request message for the ``StreamingSynthesize`` method. + Multiple ``StreamingSynthesizeRequest`` messages are + sent in one call. The first message must contain a + ``streaming_config`` that fully specifies the request + configuration and must not contain ``input``. All + subsequent messages must only have ``input`` set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + Iterable[google.cloud.texttospeech_v1.types.StreamingSynthesizeResponse]: + StreamingSynthesizeResponse is the only message returned to the + client by StreamingSynthesize method. A series of + zero or more StreamingSynthesizeResponse messages are + streamed back to the client. + + """ + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.streaming_synthesize] + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + requests, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "TextToSpeechClient": return self diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/transports/base.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/transports/base.py index c533fda135dd..35dce47646bb 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/transports/base.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/transports/base.py @@ -139,6 +139,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.streaming_synthesize: gapic_v1.method.wrap_method( + self.streaming_synthesize, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -171,6 +176,18 @@ def synthesize_speech( ]: raise NotImplementedError() + @property + def streaming_synthesize( + self, + ) -> Callable[ + [cloud_tts.StreamingSynthesizeRequest], + Union[ + cloud_tts.StreamingSynthesizeResponse, + Awaitable[cloud_tts.StreamingSynthesizeResponse], + ], + ]: + raise NotImplementedError() + @property def list_operations( self, diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/transports/grpc.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/transports/grpc.py index 7016c642147a..13950d3132b2 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/transports/grpc.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/transports/grpc.py @@ -290,6 +290,35 @@ def synthesize_speech( ) return self._stubs["synthesize_speech"] + @property + def streaming_synthesize( + self, + ) -> Callable[ + [cloud_tts.StreamingSynthesizeRequest], cloud_tts.StreamingSynthesizeResponse + ]: + r"""Return a callable for the streaming synthesize method over gRPC. + + Performs bidirectional streaming speech synthesis: + receive audio while sending text. + + Returns: + Callable[[~.StreamingSynthesizeRequest], + ~.StreamingSynthesizeResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "streaming_synthesize" not in self._stubs: + self._stubs["streaming_synthesize"] = self.grpc_channel.stream_stream( + "/google.cloud.texttospeech.v1.TextToSpeech/StreamingSynthesize", + request_serializer=cloud_tts.StreamingSynthesizeRequest.serialize, + response_deserializer=cloud_tts.StreamingSynthesizeResponse.deserialize, + ) + return self._stubs["streaming_synthesize"] + def close(self): self.grpc_channel.close() diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/transports/grpc_asyncio.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/transports/grpc_asyncio.py index 5e983306ecdc..f0b32891e660 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/transports/grpc_asyncio.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/transports/grpc_asyncio.py @@ -297,6 +297,36 @@ def synthesize_speech( ) return self._stubs["synthesize_speech"] + @property + def streaming_synthesize( + self, + ) -> Callable[ + [cloud_tts.StreamingSynthesizeRequest], + Awaitable[cloud_tts.StreamingSynthesizeResponse], + ]: + r"""Return a callable for the streaming synthesize method over gRPC. + + Performs bidirectional streaming speech synthesis: + receive audio while sending text. + + Returns: + Callable[[~.StreamingSynthesizeRequest], + Awaitable[~.StreamingSynthesizeResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "streaming_synthesize" not in self._stubs: + self._stubs["streaming_synthesize"] = self.grpc_channel.stream_stream( + "/google.cloud.texttospeech.v1.TextToSpeech/StreamingSynthesize", + request_serializer=cloud_tts.StreamingSynthesizeRequest.serialize, + response_deserializer=cloud_tts.StreamingSynthesizeResponse.deserialize, + ) + return self._stubs["streaming_synthesize"] + def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { @@ -310,6 +340,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.streaming_synthesize: gapic_v1.method_async.wrap_method( + self.streaming_synthesize, + default_timeout=None, + client_info=client_info, + ), } def close(self): diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/transports/rest.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/transports/rest.py index ee1376fc6213..6ae3ab4fe79c 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/transports/rest.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/transports/rest.py @@ -356,6 +356,22 @@ def __call__( resp = self._interceptor.post_list_voices(resp) return resp + class _StreamingSynthesize(TextToSpeechRestStub): + def __hash__(self): + return hash("StreamingSynthesize") + + def __call__( + self, + request: cloud_tts.StreamingSynthesizeRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> rest_streaming.ResponseIterator: + raise NotImplementedError( + "Method StreamingSynthesize is not available over REST transport" + ) + class _SynthesizeSpeech(TextToSpeechRestStub): def __hash__(self): return hash("SynthesizeSpeech") @@ -459,6 +475,16 @@ def list_voices( # In C++ this would require a dynamic_cast return self._ListVoices(self._session, self._host, self._interceptor) # type: ignore + @property + def streaming_synthesize( + self, + ) -> Callable[ + [cloud_tts.StreamingSynthesizeRequest], cloud_tts.StreamingSynthesizeResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._StreamingSynthesize(self._session, self._host, self._interceptor) # type: ignore + @property def synthesize_speech( self, diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech_long_audio_synthesize/async_client.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech_long_audio_synthesize/async_client.py index 3da661c4ba35..1e8e3dc9eb10 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech_long_audio_synthesize/async_client.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech_long_audio_synthesize/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -203,10 +202,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(TextToSpeechLongAudioSynthesizeClient).get_transport_class, - type(TextToSpeechLongAudioSynthesizeClient), - ) + get_transport_class = TextToSpeechLongAudioSynthesizeClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech_long_audio_synthesize/client.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech_long_audio_synthesize/client.py index 7d23ea13cbab..c03ded16c83d 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech_long_audio_synthesize/client.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech_long_audio_synthesize/client.py @@ -686,7 +686,7 @@ def __init__( Type[TextToSpeechLongAudioSynthesizeTransport], Callable[..., TextToSpeechLongAudioSynthesizeTransport], ] = ( - type(self).get_transport_class(transport) + TextToSpeechLongAudioSynthesizeClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast( Callable[..., TextToSpeechLongAudioSynthesizeTransport], transport diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/types/__init__.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/types/__init__.py index f72385b6e40b..af5276839661 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/types/__init__.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/types/__init__.py @@ -20,6 +20,10 @@ ListVoicesRequest, ListVoicesResponse, SsmlVoiceGender, + StreamingSynthesisInput, + StreamingSynthesizeConfig, + StreamingSynthesizeRequest, + StreamingSynthesizeResponse, SynthesisInput, SynthesizeSpeechRequest, SynthesizeSpeechResponse, @@ -37,6 +41,10 @@ "CustomVoiceParams", "ListVoicesRequest", "ListVoicesResponse", + "StreamingSynthesisInput", + "StreamingSynthesizeConfig", + "StreamingSynthesizeRequest", + "StreamingSynthesizeResponse", "SynthesisInput", "SynthesizeSpeechRequest", "SynthesizeSpeechResponse", diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/types/cloud_tts.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/types/cloud_tts.py index b234001ee70f..3eefa60c9e5a 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/types/cloud_tts.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/types/cloud_tts.py @@ -33,6 +33,10 @@ "AudioConfig", "CustomVoiceParams", "SynthesizeSpeechResponse", + "StreamingSynthesizeConfig", + "StreamingSynthesisInput", + "StreamingSynthesizeRequest", + "StreamingSynthesizeResponse", }, ) @@ -272,9 +276,9 @@ class VoiceSelectionParams(proto.Message): e.g. using "nb" (Norwegian Bokmal) instead of "no" (Norwegian)". name (str): - The name of the voice. If not set, the service will choose a - voice based on the other parameters such as language_code - and gender. + The name of the voice. If both the name and the gender are + not set, the service will choose a voice based on the other + parameters such as language_code. ssml_gender (google.cloud.texttospeech_v1.types.SsmlVoiceGender): The preferred gender of the voice. If not set, the service will choose a voice based on the other parameters such as @@ -448,4 +452,106 @@ class SynthesizeSpeechResponse(proto.Message): ) +class StreamingSynthesizeConfig(proto.Message): + r"""Provides configuration information for the + StreamingSynthesize request. + + Attributes: + voice (google.cloud.texttospeech_v1.types.VoiceSelectionParams): + Required. The desired voice of the + synthesized audio. + """ + + voice: "VoiceSelectionParams" = proto.Field( + proto.MESSAGE, + number=1, + message="VoiceSelectionParams", + ) + + +class StreamingSynthesisInput(proto.Message): + r"""Input to be synthesized. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + text (str): + The raw text to be synthesized. It is + recommended that each input contains complete, + terminating sentences, as this will likely + result in better prosody in the output audio. + That being said, users are free to input text + however they please. + + This field is a member of `oneof`_ ``input_source``. + """ + + text: str = proto.Field( + proto.STRING, + number=1, + oneof="input_source", + ) + + +class StreamingSynthesizeRequest(proto.Message): + r"""Request message for the ``StreamingSynthesize`` method. Multiple + ``StreamingSynthesizeRequest`` messages are sent in one call. The + first message must contain a ``streaming_config`` that fully + specifies the request configuration and must not contain ``input``. + All subsequent messages must only have ``input`` set. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + streaming_config (google.cloud.texttospeech_v1.types.StreamingSynthesizeConfig): + StreamingSynthesizeConfig to be used in this streaming + attempt. Only specified in the first message sent in a + ``StreamingSynthesize`` call. + + This field is a member of `oneof`_ ``streaming_request``. + input (google.cloud.texttospeech_v1.types.StreamingSynthesisInput): + Input to synthesize. Specified in all messages but the first + in a ``StreamingSynthesize`` call. + + This field is a member of `oneof`_ ``streaming_request``. + """ + + streaming_config: "StreamingSynthesizeConfig" = proto.Field( + proto.MESSAGE, + number=1, + oneof="streaming_request", + message="StreamingSynthesizeConfig", + ) + input: "StreamingSynthesisInput" = proto.Field( + proto.MESSAGE, + number=2, + oneof="streaming_request", + message="StreamingSynthesisInput", + ) + + +class StreamingSynthesizeResponse(proto.Message): + r"""``StreamingSynthesizeResponse`` is the only message returned to the + client by ``StreamingSynthesize`` method. A series of zero or more + ``StreamingSynthesizeResponse`` messages are streamed back to the + client. + + Attributes: + audio_content (bytes): + The audio data bytes encoded as specified in + the request. This is headerless LINEAR16 audio + with a sample rate of 24000. + """ + + audio_content: bytes = proto.Field( + proto.BYTES, + number=1, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/__init__.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/__init__.py index 0d247d8d245b..8f56e19e75c2 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/__init__.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/__init__.py @@ -30,6 +30,10 @@ ListVoicesRequest, ListVoicesResponse, SsmlVoiceGender, + StreamingSynthesisInput, + StreamingSynthesizeConfig, + StreamingSynthesizeRequest, + StreamingSynthesizeResponse, SynthesisInput, SynthesizeSpeechRequest, SynthesizeSpeechResponse, @@ -52,6 +56,10 @@ "ListVoicesRequest", "ListVoicesResponse", "SsmlVoiceGender", + "StreamingSynthesisInput", + "StreamingSynthesizeConfig", + "StreamingSynthesizeRequest", + "StreamingSynthesizeResponse", "SynthesisInput", "SynthesizeLongAudioMetadata", "SynthesizeLongAudioRequest", diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/gapic_metadata.json b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/gapic_metadata.json index 668c7494623a..174c6d86b5f4 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/gapic_metadata.json +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/gapic_metadata.json @@ -15,6 +15,11 @@ "list_voices" ] }, + "StreamingSynthesize": { + "methods": [ + "streaming_synthesize" + ] + }, "SynthesizeSpeech": { "methods": [ "synthesize_speech" @@ -30,6 +35,11 @@ "list_voices" ] }, + "StreamingSynthesize": { + "methods": [ + "streaming_synthesize" + ] + }, "SynthesizeSpeech": { "methods": [ "synthesize_speech" @@ -45,6 +55,11 @@ "list_voices" ] }, + "StreamingSynthesize": { + "methods": [ + "streaming_synthesize" + ] + }, "SynthesizeSpeech": { "methods": [ "synthesize_speech" diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/gapic_version.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/gapic_version.py index 8671082a1dc9..558c8aab67c5 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/gapic_version.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.16.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/async_client.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/async_client.py index f9699448f08d..2029117a9a01 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/async_client.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/async_client.py @@ -14,9 +14,11 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( + AsyncIterable, + AsyncIterator, + Awaitable, Callable, Dict, Mapping, @@ -184,9 +186,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(TextToSpeechClient).get_transport_class, type(TextToSpeechClient) - ) + get_transport_class = TextToSpeechClient.get_transport_class def __init__( self, @@ -495,6 +495,100 @@ async def sample_synthesize_speech(): # Done; return the response. return response + def streaming_synthesize( + self, + requests: Optional[AsyncIterator[cloud_tts.StreamingSynthesizeRequest]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> Awaitable[AsyncIterable[cloud_tts.StreamingSynthesizeResponse]]: + r"""Performs bidirectional streaming speech synthesis: + receive audio while sending text. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import texttospeech_v1beta1 + + async def sample_streaming_synthesize(): + # Create a client + client = texttospeech_v1beta1.TextToSpeechAsyncClient() + + # Initialize request argument(s) + streaming_config = texttospeech_v1beta1.StreamingSynthesizeConfig() + streaming_config.voice.language_code = "language_code_value" + + request = texttospeech_v1beta1.StreamingSynthesizeRequest( + streaming_config=streaming_config, + ) + + # This method expects an iterator which contains + # 'texttospeech_v1beta1.StreamingSynthesizeRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = await client.streaming_synthesize(requests=request_generator()) + + # Handle the response + async for response in stream: + print(response) + + Args: + requests (AsyncIterator[`google.cloud.texttospeech_v1beta1.types.StreamingSynthesizeRequest`]): + The request object AsyncIterator. Request message for the ``StreamingSynthesize`` method. + Multiple ``StreamingSynthesizeRequest`` messages are + sent in one call. The first message must contain a + ``streaming_config`` that fully specifies the request + configuration and must not contain ``input``. All + subsequent messages must only have ``input`` set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + AsyncIterable[google.cloud.texttospeech_v1beta1.types.StreamingSynthesizeResponse]: + StreamingSynthesizeResponse is the only message returned to the + client by StreamingSynthesize method. A series of + zero or more StreamingSynthesizeResponse messages are + streamed back to the client. + + """ + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.streaming_synthesize + ] + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = rpc( + requests, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def list_operations( self, request: Optional[operations_pb2.ListOperationsRequest] = None, diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/client.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/client.py index 8ed1c1118503..7c2c925a082e 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/client.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/client.py @@ -19,6 +19,8 @@ from typing import ( Callable, Dict, + Iterable, + Iterator, Mapping, MutableMapping, MutableSequence, @@ -659,7 +661,7 @@ def __init__( transport_init: Union[ Type[TextToSpeechTransport], Callable[..., TextToSpeechTransport] ] = ( - type(self).get_transport_class(transport) + TextToSpeechClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., TextToSpeechTransport], transport) ) @@ -908,6 +910,98 @@ def sample_synthesize_speech(): # Done; return the response. return response + def streaming_synthesize( + self, + requests: Optional[Iterator[cloud_tts.StreamingSynthesizeRequest]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> Iterable[cloud_tts.StreamingSynthesizeResponse]: + r"""Performs bidirectional streaming speech synthesis: + receive audio while sending text. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import texttospeech_v1beta1 + + def sample_streaming_synthesize(): + # Create a client + client = texttospeech_v1beta1.TextToSpeechClient() + + # Initialize request argument(s) + streaming_config = texttospeech_v1beta1.StreamingSynthesizeConfig() + streaming_config.voice.language_code = "language_code_value" + + request = texttospeech_v1beta1.StreamingSynthesizeRequest( + streaming_config=streaming_config, + ) + + # This method expects an iterator which contains + # 'texttospeech_v1beta1.StreamingSynthesizeRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = client.streaming_synthesize(requests=request_generator()) + + # Handle the response + for response in stream: + print(response) + + Args: + requests (Iterator[google.cloud.texttospeech_v1beta1.types.StreamingSynthesizeRequest]): + The request object iterator. Request message for the ``StreamingSynthesize`` method. + Multiple ``StreamingSynthesizeRequest`` messages are + sent in one call. The first message must contain a + ``streaming_config`` that fully specifies the request + configuration and must not contain ``input``. All + subsequent messages must only have ``input`` set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + Iterable[google.cloud.texttospeech_v1beta1.types.StreamingSynthesizeResponse]: + StreamingSynthesizeResponse is the only message returned to the + client by StreamingSynthesize method. A series of + zero or more StreamingSynthesizeResponse messages are + streamed back to the client. + + """ + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.streaming_synthesize] + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + requests, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "TextToSpeechClient": return self diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/transports/base.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/transports/base.py index 8134c6c35bc3..912b3b1528a1 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/transports/base.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/transports/base.py @@ -139,6 +139,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.streaming_synthesize: gapic_v1.method.wrap_method( + self.streaming_synthesize, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -171,6 +176,18 @@ def synthesize_speech( ]: raise NotImplementedError() + @property + def streaming_synthesize( + self, + ) -> Callable[ + [cloud_tts.StreamingSynthesizeRequest], + Union[ + cloud_tts.StreamingSynthesizeResponse, + Awaitable[cloud_tts.StreamingSynthesizeResponse], + ], + ]: + raise NotImplementedError() + @property def list_operations( self, diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/transports/grpc.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/transports/grpc.py index b96bebfd2bee..68c20e80e3ca 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/transports/grpc.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/transports/grpc.py @@ -290,6 +290,35 @@ def synthesize_speech( ) return self._stubs["synthesize_speech"] + @property + def streaming_synthesize( + self, + ) -> Callable[ + [cloud_tts.StreamingSynthesizeRequest], cloud_tts.StreamingSynthesizeResponse + ]: + r"""Return a callable for the streaming synthesize method over gRPC. + + Performs bidirectional streaming speech synthesis: + receive audio while sending text. + + Returns: + Callable[[~.StreamingSynthesizeRequest], + ~.StreamingSynthesizeResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "streaming_synthesize" not in self._stubs: + self._stubs["streaming_synthesize"] = self.grpc_channel.stream_stream( + "/google.cloud.texttospeech.v1beta1.TextToSpeech/StreamingSynthesize", + request_serializer=cloud_tts.StreamingSynthesizeRequest.serialize, + response_deserializer=cloud_tts.StreamingSynthesizeResponse.deserialize, + ) + return self._stubs["streaming_synthesize"] + def close(self): self.grpc_channel.close() diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/transports/grpc_asyncio.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/transports/grpc_asyncio.py index 1b32573f27ac..6dbcc65f4eb8 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/transports/grpc_asyncio.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/transports/grpc_asyncio.py @@ -297,6 +297,36 @@ def synthesize_speech( ) return self._stubs["synthesize_speech"] + @property + def streaming_synthesize( + self, + ) -> Callable[ + [cloud_tts.StreamingSynthesizeRequest], + Awaitable[cloud_tts.StreamingSynthesizeResponse], + ]: + r"""Return a callable for the streaming synthesize method over gRPC. + + Performs bidirectional streaming speech synthesis: + receive audio while sending text. + + Returns: + Callable[[~.StreamingSynthesizeRequest], + Awaitable[~.StreamingSynthesizeResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "streaming_synthesize" not in self._stubs: + self._stubs["streaming_synthesize"] = self.grpc_channel.stream_stream( + "/google.cloud.texttospeech.v1beta1.TextToSpeech/StreamingSynthesize", + request_serializer=cloud_tts.StreamingSynthesizeRequest.serialize, + response_deserializer=cloud_tts.StreamingSynthesizeResponse.deserialize, + ) + return self._stubs["streaming_synthesize"] + def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { @@ -310,6 +340,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.streaming_synthesize: gapic_v1.method_async.wrap_method( + self.streaming_synthesize, + default_timeout=None, + client_info=client_info, + ), } def close(self): diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/transports/rest.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/transports/rest.py index 9eab301a71e9..fe059d7a03ab 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/transports/rest.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/transports/rest.py @@ -356,6 +356,22 @@ def __call__( resp = self._interceptor.post_list_voices(resp) return resp + class _StreamingSynthesize(TextToSpeechRestStub): + def __hash__(self): + return hash("StreamingSynthesize") + + def __call__( + self, + request: cloud_tts.StreamingSynthesizeRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> rest_streaming.ResponseIterator: + raise NotImplementedError( + "Method StreamingSynthesize is not available over REST transport" + ) + class _SynthesizeSpeech(TextToSpeechRestStub): def __hash__(self): return hash("SynthesizeSpeech") @@ -459,6 +475,16 @@ def list_voices( # In C++ this would require a dynamic_cast return self._ListVoices(self._session, self._host, self._interceptor) # type: ignore + @property + def streaming_synthesize( + self, + ) -> Callable[ + [cloud_tts.StreamingSynthesizeRequest], cloud_tts.StreamingSynthesizeResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._StreamingSynthesize(self._session, self._host, self._interceptor) # type: ignore + @property def synthesize_speech( self, diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech_long_audio_synthesize/async_client.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech_long_audio_synthesize/async_client.py index 3a1a693d383f..b973c9ce717f 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech_long_audio_synthesize/async_client.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech_long_audio_synthesize/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -203,10 +202,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(TextToSpeechLongAudioSynthesizeClient).get_transport_class, - type(TextToSpeechLongAudioSynthesizeClient), - ) + get_transport_class = TextToSpeechLongAudioSynthesizeClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech_long_audio_synthesize/client.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech_long_audio_synthesize/client.py index 663f23707a1c..f5b2a8c92966 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech_long_audio_synthesize/client.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech_long_audio_synthesize/client.py @@ -686,7 +686,7 @@ def __init__( Type[TextToSpeechLongAudioSynthesizeTransport], Callable[..., TextToSpeechLongAudioSynthesizeTransport], ] = ( - type(self).get_transport_class(transport) + TextToSpeechLongAudioSynthesizeClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast( Callable[..., TextToSpeechLongAudioSynthesizeTransport], transport diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/types/__init__.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/types/__init__.py index 42c6ce99ff40..6ce3f53efc7c 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/types/__init__.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/types/__init__.py @@ -20,6 +20,10 @@ ListVoicesRequest, ListVoicesResponse, SsmlVoiceGender, + StreamingSynthesisInput, + StreamingSynthesizeConfig, + StreamingSynthesizeRequest, + StreamingSynthesizeResponse, SynthesisInput, SynthesizeSpeechRequest, SynthesizeSpeechResponse, @@ -38,6 +42,10 @@ "CustomVoiceParams", "ListVoicesRequest", "ListVoicesResponse", + "StreamingSynthesisInput", + "StreamingSynthesizeConfig", + "StreamingSynthesizeRequest", + "StreamingSynthesizeResponse", "SynthesisInput", "SynthesizeSpeechRequest", "SynthesizeSpeechResponse", diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/types/cloud_tts.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/types/cloud_tts.py index d1cb430714a8..216ece51ff22 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/types/cloud_tts.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/types/cloud_tts.py @@ -34,6 +34,10 @@ "CustomVoiceParams", "SynthesizeSpeechResponse", "Timepoint", + "StreamingSynthesizeConfig", + "StreamingSynthesisInput", + "StreamingSynthesizeRequest", + "StreamingSynthesizeResponse", }, ) @@ -299,9 +303,9 @@ class VoiceSelectionParams(proto.Message): e.g. using "nb" (Norwegian Bokmal) instead of "no" (Norwegian)". name (str): - The name of the voice. If not set, the service will choose a - voice based on the other parameters such as language_code - and gender. + The name of the voice. If both the name and the gender are + not set, the service will choose a voice based on the other + parameters such as language_code. ssml_gender (google.cloud.texttospeech_v1beta1.types.SsmlVoiceGender): The preferred gender of the voice. If not set, the service will choose a voice based on the other parameters such as @@ -514,4 +518,106 @@ class Timepoint(proto.Message): ) +class StreamingSynthesizeConfig(proto.Message): + r"""Provides configuration information for the + StreamingSynthesize request. + + Attributes: + voice (google.cloud.texttospeech_v1beta1.types.VoiceSelectionParams): + Required. The desired voice of the + synthesized audio. + """ + + voice: "VoiceSelectionParams" = proto.Field( + proto.MESSAGE, + number=1, + message="VoiceSelectionParams", + ) + + +class StreamingSynthesisInput(proto.Message): + r"""Input to be synthesized. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + text (str): + The raw text to be synthesized. It is + recommended that each input contains complete, + terminating sentences, as this will likely + result in better prosody in the output audio. + That being said, users are free to input text + however they please. + + This field is a member of `oneof`_ ``input_source``. + """ + + text: str = proto.Field( + proto.STRING, + number=1, + oneof="input_source", + ) + + +class StreamingSynthesizeRequest(proto.Message): + r"""Request message for the ``StreamingSynthesize`` method. Multiple + ``StreamingSynthesizeRequest`` messages are sent in one call. The + first message must contain a ``streaming_config`` that fully + specifies the request configuration and must not contain ``input``. + All subsequent messages must only have ``input`` set. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + streaming_config (google.cloud.texttospeech_v1beta1.types.StreamingSynthesizeConfig): + StreamingSynthesizeConfig to be used in this streaming + attempt. Only specified in the first message sent in a + ``StreamingSynthesize`` call. + + This field is a member of `oneof`_ ``streaming_request``. + input (google.cloud.texttospeech_v1beta1.types.StreamingSynthesisInput): + Input to synthesize. Specified in all messages but the first + in a ``StreamingSynthesize`` call. + + This field is a member of `oneof`_ ``streaming_request``. + """ + + streaming_config: "StreamingSynthesizeConfig" = proto.Field( + proto.MESSAGE, + number=1, + oneof="streaming_request", + message="StreamingSynthesizeConfig", + ) + input: "StreamingSynthesisInput" = proto.Field( + proto.MESSAGE, + number=2, + oneof="streaming_request", + message="StreamingSynthesisInput", + ) + + +class StreamingSynthesizeResponse(proto.Message): + r"""``StreamingSynthesizeResponse`` is the only message returned to the + client by ``StreamingSynthesize`` method. A series of zero or more + ``StreamingSynthesizeResponse`` messages are streamed back to the + client. + + Attributes: + audio_content (bytes): + The audio data bytes encoded as specified in + the request. This is headerless LINEAR16 audio + with a sample rate of 24000. + """ + + audio_content: bytes = proto.Field( + proto.BYTES, + number=1, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-texttospeech/samples/generated_samples/snippet_metadata_google.cloud.texttospeech.v1.json b/packages/google-cloud-texttospeech/samples/generated_samples/snippet_metadata_google.cloud.texttospeech.v1.json index 8b708f5a395d..f87785fcdd45 100644 --- a/packages/google-cloud-texttospeech/samples/generated_samples/snippet_metadata_google.cloud.texttospeech.v1.json +++ b/packages/google-cloud-texttospeech/samples/generated_samples/snippet_metadata_google.cloud.texttospeech.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-texttospeech", - "version": "2.16.5" + "version": "0.1.0" }, "snippets": [ { @@ -325,6 +325,159 @@ ], "title": "texttospeech_v1_generated_text_to_speech_list_voices_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.texttospeech_v1.TextToSpeechAsyncClient", + "shortName": "TextToSpeechAsyncClient" + }, + "fullName": "google.cloud.texttospeech_v1.TextToSpeechAsyncClient.streaming_synthesize", + "method": { + "fullName": "google.cloud.texttospeech.v1.TextToSpeech.StreamingSynthesize", + "service": { + "fullName": "google.cloud.texttospeech.v1.TextToSpeech", + "shortName": "TextToSpeech" + }, + "shortName": "StreamingSynthesize" + }, + "parameters": [ + { + "name": "requests", + "type": "Iterator[google.cloud.texttospeech_v1.types.StreamingSynthesizeRequest]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "Iterable[google.cloud.texttospeech_v1.types.StreamingSynthesizeResponse]", + "shortName": "streaming_synthesize" + }, + "description": "Sample for StreamingSynthesize", + "file": "texttospeech_v1_generated_text_to_speech_streaming_synthesize_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "texttospeech_v1_generated_TextToSpeech_StreamingSynthesize_async", + "segments": [ + { + "end": 65, + "start": 27, + "type": "FULL" + }, + { + "end": 65, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 58, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 61, + "start": 59, + "type": "REQUEST_EXECUTION" + }, + { + "end": 66, + "start": 62, + "type": "RESPONSE_HANDLING" + } + ], + "title": "texttospeech_v1_generated_text_to_speech_streaming_synthesize_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.texttospeech_v1.TextToSpeechClient", + "shortName": "TextToSpeechClient" + }, + "fullName": "google.cloud.texttospeech_v1.TextToSpeechClient.streaming_synthesize", + "method": { + "fullName": "google.cloud.texttospeech.v1.TextToSpeech.StreamingSynthesize", + "service": { + "fullName": "google.cloud.texttospeech.v1.TextToSpeech", + "shortName": "TextToSpeech" + }, + "shortName": "StreamingSynthesize" + }, + "parameters": [ + { + "name": "requests", + "type": "Iterator[google.cloud.texttospeech_v1.types.StreamingSynthesizeRequest]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "Iterable[google.cloud.texttospeech_v1.types.StreamingSynthesizeResponse]", + "shortName": "streaming_synthesize" + }, + "description": "Sample for StreamingSynthesize", + "file": "texttospeech_v1_generated_text_to_speech_streaming_synthesize_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "texttospeech_v1_generated_TextToSpeech_StreamingSynthesize_sync", + "segments": [ + { + "end": 65, + "start": 27, + "type": "FULL" + }, + { + "end": 65, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 58, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 61, + "start": 59, + "type": "REQUEST_EXECUTION" + }, + { + "end": 66, + "start": 62, + "type": "RESPONSE_HANDLING" + } + ], + "title": "texttospeech_v1_generated_text_to_speech_streaming_synthesize_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-texttospeech/samples/generated_samples/snippet_metadata_google.cloud.texttospeech.v1beta1.json b/packages/google-cloud-texttospeech/samples/generated_samples/snippet_metadata_google.cloud.texttospeech.v1beta1.json index 9691e03f39da..2877853b66c5 100644 --- a/packages/google-cloud-texttospeech/samples/generated_samples/snippet_metadata_google.cloud.texttospeech.v1beta1.json +++ b/packages/google-cloud-texttospeech/samples/generated_samples/snippet_metadata_google.cloud.texttospeech.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-texttospeech", - "version": "2.16.5" + "version": "0.1.0" }, "snippets": [ { @@ -325,6 +325,159 @@ ], "title": "texttospeech_v1beta1_generated_text_to_speech_list_voices_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.texttospeech_v1beta1.TextToSpeechAsyncClient", + "shortName": "TextToSpeechAsyncClient" + }, + "fullName": "google.cloud.texttospeech_v1beta1.TextToSpeechAsyncClient.streaming_synthesize", + "method": { + "fullName": "google.cloud.texttospeech.v1beta1.TextToSpeech.StreamingSynthesize", + "service": { + "fullName": "google.cloud.texttospeech.v1beta1.TextToSpeech", + "shortName": "TextToSpeech" + }, + "shortName": "StreamingSynthesize" + }, + "parameters": [ + { + "name": "requests", + "type": "Iterator[google.cloud.texttospeech_v1beta1.types.StreamingSynthesizeRequest]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "Iterable[google.cloud.texttospeech_v1beta1.types.StreamingSynthesizeResponse]", + "shortName": "streaming_synthesize" + }, + "description": "Sample for StreamingSynthesize", + "file": "texttospeech_v1beta1_generated_text_to_speech_streaming_synthesize_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "texttospeech_v1beta1_generated_TextToSpeech_StreamingSynthesize_async", + "segments": [ + { + "end": 65, + "start": 27, + "type": "FULL" + }, + { + "end": 65, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 58, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 61, + "start": 59, + "type": "REQUEST_EXECUTION" + }, + { + "end": 66, + "start": 62, + "type": "RESPONSE_HANDLING" + } + ], + "title": "texttospeech_v1beta1_generated_text_to_speech_streaming_synthesize_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.texttospeech_v1beta1.TextToSpeechClient", + "shortName": "TextToSpeechClient" + }, + "fullName": "google.cloud.texttospeech_v1beta1.TextToSpeechClient.streaming_synthesize", + "method": { + "fullName": "google.cloud.texttospeech.v1beta1.TextToSpeech.StreamingSynthesize", + "service": { + "fullName": "google.cloud.texttospeech.v1beta1.TextToSpeech", + "shortName": "TextToSpeech" + }, + "shortName": "StreamingSynthesize" + }, + "parameters": [ + { + "name": "requests", + "type": "Iterator[google.cloud.texttospeech_v1beta1.types.StreamingSynthesizeRequest]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "Iterable[google.cloud.texttospeech_v1beta1.types.StreamingSynthesizeResponse]", + "shortName": "streaming_synthesize" + }, + "description": "Sample for StreamingSynthesize", + "file": "texttospeech_v1beta1_generated_text_to_speech_streaming_synthesize_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "texttospeech_v1beta1_generated_TextToSpeech_StreamingSynthesize_sync", + "segments": [ + { + "end": 65, + "start": 27, + "type": "FULL" + }, + { + "end": 65, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 58, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 61, + "start": 59, + "type": "REQUEST_EXECUTION" + }, + { + "end": 66, + "start": 62, + "type": "RESPONSE_HANDLING" + } + ], + "title": "texttospeech_v1beta1_generated_text_to_speech_streaming_synthesize_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-texttospeech/samples/generated_samples/texttospeech_v1_generated_text_to_speech_streaming_synthesize_async.py b/packages/google-cloud-texttospeech/samples/generated_samples/texttospeech_v1_generated_text_to_speech_streaming_synthesize_async.py new file mode 100644 index 000000000000..ef666ea6b2a3 --- /dev/null +++ b/packages/google-cloud-texttospeech/samples/generated_samples/texttospeech_v1_generated_text_to_speech_streaming_synthesize_async.py @@ -0,0 +1,66 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for StreamingSynthesize +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-texttospeech + + +# [START texttospeech_v1_generated_TextToSpeech_StreamingSynthesize_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import texttospeech_v1 + + +async def sample_streaming_synthesize(): + # Create a client + client = texttospeech_v1.TextToSpeechAsyncClient() + + # Initialize request argument(s) + streaming_config = texttospeech_v1.StreamingSynthesizeConfig() + streaming_config.voice.language_code = "language_code_value" + + request = texttospeech_v1.StreamingSynthesizeRequest( + streaming_config=streaming_config, + ) + + # This method expects an iterator which contains + # 'texttospeech_v1.StreamingSynthesizeRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = await client.streaming_synthesize(requests=request_generator()) + + # Handle the response + async for response in stream: + print(response) + +# [END texttospeech_v1_generated_TextToSpeech_StreamingSynthesize_async] diff --git a/packages/google-cloud-texttospeech/samples/generated_samples/texttospeech_v1_generated_text_to_speech_streaming_synthesize_sync.py b/packages/google-cloud-texttospeech/samples/generated_samples/texttospeech_v1_generated_text_to_speech_streaming_synthesize_sync.py new file mode 100644 index 000000000000..a0f24ef1a98b --- /dev/null +++ b/packages/google-cloud-texttospeech/samples/generated_samples/texttospeech_v1_generated_text_to_speech_streaming_synthesize_sync.py @@ -0,0 +1,66 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for StreamingSynthesize +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-texttospeech + + +# [START texttospeech_v1_generated_TextToSpeech_StreamingSynthesize_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import texttospeech_v1 + + +def sample_streaming_synthesize(): + # Create a client + client = texttospeech_v1.TextToSpeechClient() + + # Initialize request argument(s) + streaming_config = texttospeech_v1.StreamingSynthesizeConfig() + streaming_config.voice.language_code = "language_code_value" + + request = texttospeech_v1.StreamingSynthesizeRequest( + streaming_config=streaming_config, + ) + + # This method expects an iterator which contains + # 'texttospeech_v1.StreamingSynthesizeRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = client.streaming_synthesize(requests=request_generator()) + + # Handle the response + for response in stream: + print(response) + +# [END texttospeech_v1_generated_TextToSpeech_StreamingSynthesize_sync] diff --git a/packages/google-cloud-texttospeech/samples/generated_samples/texttospeech_v1beta1_generated_text_to_speech_streaming_synthesize_async.py b/packages/google-cloud-texttospeech/samples/generated_samples/texttospeech_v1beta1_generated_text_to_speech_streaming_synthesize_async.py new file mode 100644 index 000000000000..9b403cb1b4a1 --- /dev/null +++ b/packages/google-cloud-texttospeech/samples/generated_samples/texttospeech_v1beta1_generated_text_to_speech_streaming_synthesize_async.py @@ -0,0 +1,66 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for StreamingSynthesize +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-texttospeech + + +# [START texttospeech_v1beta1_generated_TextToSpeech_StreamingSynthesize_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import texttospeech_v1beta1 + + +async def sample_streaming_synthesize(): + # Create a client + client = texttospeech_v1beta1.TextToSpeechAsyncClient() + + # Initialize request argument(s) + streaming_config = texttospeech_v1beta1.StreamingSynthesizeConfig() + streaming_config.voice.language_code = "language_code_value" + + request = texttospeech_v1beta1.StreamingSynthesizeRequest( + streaming_config=streaming_config, + ) + + # This method expects an iterator which contains + # 'texttospeech_v1beta1.StreamingSynthesizeRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = await client.streaming_synthesize(requests=request_generator()) + + # Handle the response + async for response in stream: + print(response) + +# [END texttospeech_v1beta1_generated_TextToSpeech_StreamingSynthesize_async] diff --git a/packages/google-cloud-texttospeech/samples/generated_samples/texttospeech_v1beta1_generated_text_to_speech_streaming_synthesize_sync.py b/packages/google-cloud-texttospeech/samples/generated_samples/texttospeech_v1beta1_generated_text_to_speech_streaming_synthesize_sync.py new file mode 100644 index 000000000000..cda6d83baa38 --- /dev/null +++ b/packages/google-cloud-texttospeech/samples/generated_samples/texttospeech_v1beta1_generated_text_to_speech_streaming_synthesize_sync.py @@ -0,0 +1,66 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for StreamingSynthesize +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-texttospeech + + +# [START texttospeech_v1beta1_generated_TextToSpeech_StreamingSynthesize_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import texttospeech_v1beta1 + + +def sample_streaming_synthesize(): + # Create a client + client = texttospeech_v1beta1.TextToSpeechClient() + + # Initialize request argument(s) + streaming_config = texttospeech_v1beta1.StreamingSynthesizeConfig() + streaming_config.voice.language_code = "language_code_value" + + request = texttospeech_v1beta1.StreamingSynthesizeRequest( + streaming_config=streaming_config, + ) + + # This method expects an iterator which contains + # 'texttospeech_v1beta1.StreamingSynthesizeRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = client.streaming_synthesize(requests=request_generator()) + + # Handle the response + for response in stream: + print(response) + +# [END texttospeech_v1beta1_generated_TextToSpeech_StreamingSynthesize_sync] diff --git a/packages/google-cloud-texttospeech/scripts/fixup_texttospeech_v1_keywords.py b/packages/google-cloud-texttospeech/scripts/fixup_texttospeech_v1_keywords.py index f8b062b9c12c..4b037274e385 100644 --- a/packages/google-cloud-texttospeech/scripts/fixup_texttospeech_v1_keywords.py +++ b/packages/google-cloud-texttospeech/scripts/fixup_texttospeech_v1_keywords.py @@ -40,6 +40,7 @@ class texttospeechCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { 'list_voices': ('language_code', ), + 'streaming_synthesize': ('streaming_config', 'input', ), 'synthesize_long_audio': ('input', 'audio_config', 'output_gcs_uri', 'voice', 'parent', ), 'synthesize_speech': ('input', 'voice', 'audio_config', ), } diff --git a/packages/google-cloud-texttospeech/scripts/fixup_texttospeech_v1beta1_keywords.py b/packages/google-cloud-texttospeech/scripts/fixup_texttospeech_v1beta1_keywords.py index 47dd32e8a36a..eb94e6a67dde 100644 --- a/packages/google-cloud-texttospeech/scripts/fixup_texttospeech_v1beta1_keywords.py +++ b/packages/google-cloud-texttospeech/scripts/fixup_texttospeech_v1beta1_keywords.py @@ -40,6 +40,7 @@ class texttospeechCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { 'list_voices': ('language_code', ), + 'streaming_synthesize': ('streaming_config', 'input', ), 'synthesize_long_audio': ('input', 'audio_config', 'output_gcs_uri', 'voice', 'parent', ), 'synthesize_speech': ('input', 'voice', 'audio_config', 'enable_time_pointing', ), } diff --git a/packages/google-cloud-texttospeech/tests/unit/gapic/texttospeech_v1/test_text_to_speech.py b/packages/google-cloud-texttospeech/tests/unit/gapic/texttospeech_v1/test_text_to_speech.py index fc6754fa817f..9a3422979f06 100644 --- a/packages/google-cloud-texttospeech/tests/unit/gapic/texttospeech_v1/test_text_to_speech.py +++ b/packages/google-cloud-texttospeech/tests/unit/gapic/texttospeech_v1/test_text_to_speech.py @@ -1253,22 +1253,23 @@ async def test_list_voices_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_voices - ] = mock_object + ] = mock_rpc request = {} await client.list_voices(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_voices(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1559,22 +1560,23 @@ async def test_synthesize_speech_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.synthesize_speech - ] = mock_object + ] = mock_rpc request = {} await client.synthesize_speech(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.synthesize_speech(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1736,6 +1738,163 @@ async def test_synthesize_speech_flattened_error_async(): ) +@pytest.mark.parametrize( + "request_type", + [ + cloud_tts.StreamingSynthesizeRequest, + dict, + ], +) +def test_streaming_synthesize(request_type, transport: str = "grpc"): + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + requests = [request] + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.streaming_synthesize), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iter([cloud_tts.StreamingSynthesizeResponse()]) + response = client.streaming_synthesize(iter(requests)) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert next(args[0]) == request + + # Establish that the response is the type that we expect. + for message in response: + assert isinstance(message, cloud_tts.StreamingSynthesizeResponse) + + +def test_streaming_synthesize_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.streaming_synthesize in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.streaming_synthesize + ] = mock_rpc + request = [{}] + client.streaming_synthesize(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.streaming_synthesize(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_streaming_synthesize_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = TextToSpeechAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.streaming_synthesize + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.streaming_synthesize + ] = mock_rpc + + request = [{}] + await client.streaming_synthesize(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.streaming_synthesize(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_streaming_synthesize_async( + transport: str = "grpc_asyncio", request_type=cloud_tts.StreamingSynthesizeRequest +): + client = TextToSpeechAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + requests = [request] + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.streaming_synthesize), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = mock.Mock(aio.StreamStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock( + side_effect=[cloud_tts.StreamingSynthesizeResponse()] + ) + response = await client.streaming_synthesize(iter(requests)) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert next(args[0]) == request + + # Establish that the response is the type that we expect. + message = await response.read() + assert isinstance(message, cloud_tts.StreamingSynthesizeResponse) + + +@pytest.mark.asyncio +async def test_streaming_synthesize_async_from_dict(): + await test_streaming_synthesize_async(request_type=dict) + + @pytest.mark.parametrize( "request_type", [ @@ -2263,6 +2422,30 @@ def test_synthesize_speech_rest_error(): ) +def test_streaming_synthesize_rest_no_http_options(): + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = cloud_tts.StreamingSynthesizeRequest() + requests = [request] + with pytest.raises(RuntimeError): + client.streaming_synthesize(requests) + + +def test_streaming_synthesize_rest_error(): + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # Since a `google.api.http` annotation is required for using a rest transport + # method, this should error. + with pytest.raises(NotImplementedError) as not_implemented_error: + client.streaming_synthesize({}) + assert "Method StreamingSynthesize is not available over REST transport" in str( + not_implemented_error.value + ) + + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.TextToSpeechGrpcTransport( @@ -2404,6 +2587,7 @@ def test_text_to_speech_base_transport(): methods = ( "list_voices", "synthesize_speech", + "streaming_synthesize", "get_operation", "list_operations", ) @@ -2667,6 +2851,9 @@ def test_text_to_speech_client_transport_session_collision(transport_name): session1 = client1.transport.synthesize_speech._session session2 = client2.transport.synthesize_speech._session assert session1 != session2 + session1 = client1.transport.streaming_synthesize._session + session2 = client2.transport.streaming_synthesize._session + assert session1 != session2 def test_text_to_speech_grpc_transport_channel(): diff --git a/packages/google-cloud-texttospeech/tests/unit/gapic/texttospeech_v1/test_text_to_speech_long_audio_synthesize.py b/packages/google-cloud-texttospeech/tests/unit/gapic/texttospeech_v1/test_text_to_speech_long_audio_synthesize.py index 0d90338932b2..e81c15d0ade6 100644 --- a/packages/google-cloud-texttospeech/tests/unit/gapic/texttospeech_v1/test_text_to_speech_long_audio_synthesize.py +++ b/packages/google-cloud-texttospeech/tests/unit/gapic/texttospeech_v1/test_text_to_speech_long_audio_synthesize.py @@ -1351,8 +1351,9 @@ def test_synthesize_long_audio_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.synthesize_long_audio(request) @@ -1408,26 +1409,28 @@ async def test_synthesize_long_audio_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.synthesize_long_audio - ] = mock_object + ] = mock_rpc request = {} await client.synthesize_long_audio(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.synthesize_long_audio(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-texttospeech/tests/unit/gapic/texttospeech_v1beta1/test_text_to_speech.py b/packages/google-cloud-texttospeech/tests/unit/gapic/texttospeech_v1beta1/test_text_to_speech.py index 87bbc9a96d70..8da3d41f86e1 100644 --- a/packages/google-cloud-texttospeech/tests/unit/gapic/texttospeech_v1beta1/test_text_to_speech.py +++ b/packages/google-cloud-texttospeech/tests/unit/gapic/texttospeech_v1beta1/test_text_to_speech.py @@ -1253,22 +1253,23 @@ async def test_list_voices_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_voices - ] = mock_object + ] = mock_rpc request = {} await client.list_voices(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_voices(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1559,22 +1560,23 @@ async def test_synthesize_speech_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.synthesize_speech - ] = mock_object + ] = mock_rpc request = {} await client.synthesize_speech(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.synthesize_speech(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1736,6 +1738,163 @@ async def test_synthesize_speech_flattened_error_async(): ) +@pytest.mark.parametrize( + "request_type", + [ + cloud_tts.StreamingSynthesizeRequest, + dict, + ], +) +def test_streaming_synthesize(request_type, transport: str = "grpc"): + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + requests = [request] + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.streaming_synthesize), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iter([cloud_tts.StreamingSynthesizeResponse()]) + response = client.streaming_synthesize(iter(requests)) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert next(args[0]) == request + + # Establish that the response is the type that we expect. + for message in response: + assert isinstance(message, cloud_tts.StreamingSynthesizeResponse) + + +def test_streaming_synthesize_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.streaming_synthesize in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.streaming_synthesize + ] = mock_rpc + request = [{}] + client.streaming_synthesize(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.streaming_synthesize(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_streaming_synthesize_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = TextToSpeechAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.streaming_synthesize + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.streaming_synthesize + ] = mock_rpc + + request = [{}] + await client.streaming_synthesize(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.streaming_synthesize(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_streaming_synthesize_async( + transport: str = "grpc_asyncio", request_type=cloud_tts.StreamingSynthesizeRequest +): + client = TextToSpeechAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + requests = [request] + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.streaming_synthesize), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = mock.Mock(aio.StreamStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock( + side_effect=[cloud_tts.StreamingSynthesizeResponse()] + ) + response = await client.streaming_synthesize(iter(requests)) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert next(args[0]) == request + + # Establish that the response is the type that we expect. + message = await response.read() + assert isinstance(message, cloud_tts.StreamingSynthesizeResponse) + + +@pytest.mark.asyncio +async def test_streaming_synthesize_async_from_dict(): + await test_streaming_synthesize_async(request_type=dict) + + @pytest.mark.parametrize( "request_type", [ @@ -2265,6 +2424,30 @@ def test_synthesize_speech_rest_error(): ) +def test_streaming_synthesize_rest_no_http_options(): + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = cloud_tts.StreamingSynthesizeRequest() + requests = [request] + with pytest.raises(RuntimeError): + client.streaming_synthesize(requests) + + +def test_streaming_synthesize_rest_error(): + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # Since a `google.api.http` annotation is required for using a rest transport + # method, this should error. + with pytest.raises(NotImplementedError) as not_implemented_error: + client.streaming_synthesize({}) + assert "Method StreamingSynthesize is not available over REST transport" in str( + not_implemented_error.value + ) + + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.TextToSpeechGrpcTransport( @@ -2406,6 +2589,7 @@ def test_text_to_speech_base_transport(): methods = ( "list_voices", "synthesize_speech", + "streaming_synthesize", "get_operation", "list_operations", ) @@ -2669,6 +2853,9 @@ def test_text_to_speech_client_transport_session_collision(transport_name): session1 = client1.transport.synthesize_speech._session session2 = client2.transport.synthesize_speech._session assert session1 != session2 + session1 = client1.transport.streaming_synthesize._session + session2 = client2.transport.streaming_synthesize._session + assert session1 != session2 def test_text_to_speech_grpc_transport_channel(): diff --git a/packages/google-cloud-texttospeech/tests/unit/gapic/texttospeech_v1beta1/test_text_to_speech_long_audio_synthesize.py b/packages/google-cloud-texttospeech/tests/unit/gapic/texttospeech_v1beta1/test_text_to_speech_long_audio_synthesize.py index 0532567b8b6c..1cf81d81470f 100644 --- a/packages/google-cloud-texttospeech/tests/unit/gapic/texttospeech_v1beta1/test_text_to_speech_long_audio_synthesize.py +++ b/packages/google-cloud-texttospeech/tests/unit/gapic/texttospeech_v1beta1/test_text_to_speech_long_audio_synthesize.py @@ -1351,8 +1351,9 @@ def test_synthesize_long_audio_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.synthesize_long_audio(request) @@ -1408,26 +1409,28 @@ async def test_synthesize_long_audio_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.synthesize_long_audio - ] = mock_object + ] = mock_rpc request = {} await client.synthesize_long_audio(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.synthesize_long_audio(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio From 5e6b4ce92614cc9a169c530f9a23d3934f4868cc Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 19 Aug 2024 10:08:21 -0400 Subject: [PATCH 010/108] docs: [google-cloud-managedkafka] changed API title to official name (#13010) - [ ] Regenerate this pull request now. PiperOrigin-RevId: 662676310 Source-Link: https://github.com/googleapis/googleapis/commit/e813a85a1455e44c4c93314d8c2fdf3d77516916 Source-Link: https://github.com/googleapis/googleapis-gen/commit/5b69026c55c42bc69280062dc33e579a666ab81e Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLW1hbmFnZWRrYWZrYS8uT3dsQm90LnlhbWwiLCJoIjoiNWI2OTAyNmM1NWM0MmJjNjkyODAwNjJkYzMzZTU3OWE2NjZhYjgxZSJ9 --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../.repo-metadata.json | 4 +- packages/google-cloud-managedkafka/README.rst | 18 +- .../docs/summary_overview.md | 6 +- .../cloud/managedkafka/gapic_version.py | 2 +- .../cloud/managedkafka_v1/gapic_version.py | 2 +- .../services/managed_kafka/async_client.py | 5 +- .../services/managed_kafka/client.py | 2 +- .../cloud/managedkafka_v1/types/resources.py | 4 +- ...metadata_google.cloud.managedkafka.v1.json | 2 +- .../managedkafka_v1/test_managed_kafka.py | 156 ++++++++++-------- 10 files changed, 109 insertions(+), 92 deletions(-) diff --git a/packages/google-cloud-managedkafka/.repo-metadata.json b/packages/google-cloud-managedkafka/.repo-metadata.json index bae310658c84..3f52296e1254 100644 --- a/packages/google-cloud-managedkafka/.repo-metadata.json +++ b/packages/google-cloud-managedkafka/.repo-metadata.json @@ -1,7 +1,7 @@ { "name": "google-cloud-managedkafka", - "name_pretty": "Apache Kafka for BigQuery API", - "api_description": "Apache Kafka for BigQuery is a managed cloud service that lets you ingest Kafka streams directly into Google Cloud.", + "name_pretty": "Managed Service for Apache Kafka", + "api_description": "Managed Service for Apache Kafka API is a managed cloud service that lets you ingest Kafka streams directly into Google Cloud.", "product_documentation": "https://cloud.google.com/managed-kafka", "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-managedkafka/latest", "issue_tracker": "https://issuetracker.google.com/issues/new?component=1376234", diff --git a/packages/google-cloud-managedkafka/README.rst b/packages/google-cloud-managedkafka/README.rst index 475bfc1ae7eb..b0d1efd0113f 100644 --- a/packages/google-cloud-managedkafka/README.rst +++ b/packages/google-cloud-managedkafka/README.rst @@ -1,9 +1,9 @@ -Python Client for Apache Kafka for BigQuery API -=============================================== +Python Client for Managed Service for Apache Kafka +================================================== |preview| |pypi| |versions| -`Apache Kafka for BigQuery API`_: Apache Kafka for BigQuery is a managed cloud service that lets you ingest Kafka streams directly into Google Cloud. +`Managed Service for Apache Kafka`_: Managed Service for Apache Kafka API is a managed cloud service that lets you ingest Kafka streams directly into Google Cloud. - `Client Library Documentation`_ - `Product Documentation`_ @@ -14,7 +14,7 @@ Python Client for Apache Kafka for BigQuery API :target: https://pypi.org/project/google-cloud-managedkafka/ .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-managedkafka.svg :target: https://pypi.org/project/google-cloud-managedkafka/ -.. _Apache Kafka for BigQuery API: https://cloud.google.com/managed-kafka +.. _Managed Service for Apache Kafka: https://cloud.google.com/managed-kafka .. _Client Library Documentation: https://cloud.google.com/python/docs/reference/google-cloud-managedkafka/latest/summary_overview .. _Product Documentation: https://cloud.google.com/managed-kafka @@ -25,12 +25,12 @@ In order to use this library, you first need to go through the following steps: 1. `Select or create a Cloud Platform project.`_ 2. `Enable billing for your project.`_ -3. `Enable the Apache Kafka for BigQuery API.`_ +3. `Enable the Managed Service for Apache Kafka.`_ 4. `Setup Authentication.`_ .. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project .. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Enable the Apache Kafka for BigQuery API.: https://cloud.google.com/managed-kafka +.. _Enable the Managed Service for Apache Kafka.: https://cloud.google.com/managed-kafka .. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html Installation @@ -97,12 +97,12 @@ Windows Next Steps ~~~~~~~~~~ -- Read the `Client Library Documentation`_ for Apache Kafka for BigQuery API +- Read the `Client Library Documentation`_ for Managed Service for Apache Kafka to see other available methods on the client. -- Read the `Apache Kafka for BigQuery API Product documentation`_ to learn +- Read the `Managed Service for Apache Kafka Product documentation`_ to learn more about the product and see How-to Guides. - View this `README`_ to see the full list of Cloud APIs that we cover. -.. _Apache Kafka for BigQuery API Product documentation: https://cloud.google.com/managed-kafka +.. _Managed Service for Apache Kafka Product documentation: https://cloud.google.com/managed-kafka .. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst diff --git a/packages/google-cloud-managedkafka/docs/summary_overview.md b/packages/google-cloud-managedkafka/docs/summary_overview.md index 3f9a67edeb83..67991639a447 100644 --- a/packages/google-cloud-managedkafka/docs/summary_overview.md +++ b/packages/google-cloud-managedkafka/docs/summary_overview.md @@ -5,14 +5,14 @@ reverted. Instead, if you want to place additional content, create an pick up on the content and merge the content. ]: # -# Apache Kafka for BigQuery API API +# Managed Service for Apache Kafka API -Overview of the APIs available for Apache Kafka for BigQuery API API. +Overview of the APIs available for Managed Service for Apache Kafka API. ## All entries Classes, methods and properties & attributes for -Apache Kafka for BigQuery API API. +Managed Service for Apache Kafka API. [classes](https://cloud.google.com/python/docs/reference/google-cloud-managedkafka/latest/summary_class.html) diff --git a/packages/google-cloud-managedkafka/google/cloud/managedkafka/gapic_version.py b/packages/google-cloud-managedkafka/google/cloud/managedkafka/gapic_version.py index 3b0a9d9a8d43..558c8aab67c5 100644 --- a/packages/google-cloud-managedkafka/google/cloud/managedkafka/gapic_version.py +++ b/packages/google-cloud-managedkafka/google/cloud/managedkafka/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/gapic_version.py b/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/gapic_version.py index 3b0a9d9a8d43..558c8aab67c5 100644 --- a/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/gapic_version.py +++ b/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/services/managed_kafka/async_client.py b/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/services/managed_kafka/async_client.py index f6455789d762..3b94de06bd2b 100644 --- a/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/services/managed_kafka/async_client.py +++ b/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/services/managed_kafka/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -201,9 +200,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ManagedKafkaClient).get_transport_class, type(ManagedKafkaClient) - ) + get_transport_class = ManagedKafkaClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/services/managed_kafka/client.py b/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/services/managed_kafka/client.py index 380fcb863ab2..c6ef1b025a76 100644 --- a/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/services/managed_kafka/client.py +++ b/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/services/managed_kafka/client.py @@ -737,7 +737,7 @@ def __init__( transport_init: Union[ Type[ManagedKafkaTransport], Callable[..., ManagedKafkaTransport] ] = ( - type(self).get_transport_class(transport) + ManagedKafkaClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ManagedKafkaTransport], transport) ) diff --git a/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/types/resources.py b/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/types/resources.py index 71a3ab46dc8e..ae837b7a76d3 100644 --- a/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/types/resources.py +++ b/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/types/resources.py @@ -327,8 +327,8 @@ class ConsumerPartitionMetadata(proto.Message): Attributes: offset (int): - Required. The offset for this partition, or 0 - if no offset has been committed. + Required. The current offset for this + partition, or 0 if no offset has been committed. metadata (str): Optional. The associated metadata for this partition, or empty if it does not exist. diff --git a/packages/google-cloud-managedkafka/samples/generated_samples/snippet_metadata_google.cloud.managedkafka.v1.json b/packages/google-cloud-managedkafka/samples/generated_samples/snippet_metadata_google.cloud.managedkafka.v1.json index d48b3688fd03..37b09a104f58 100644 --- a/packages/google-cloud-managedkafka/samples/generated_samples/snippet_metadata_google.cloud.managedkafka.v1.json +++ b/packages/google-cloud-managedkafka/samples/generated_samples/snippet_metadata_google.cloud.managedkafka.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-managedkafka", - "version": "0.1.2" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-managedkafka/tests/unit/gapic/managedkafka_v1/test_managed_kafka.py b/packages/google-cloud-managedkafka/tests/unit/gapic/managedkafka_v1/test_managed_kafka.py index c530fe667d68..f176358747b4 100644 --- a/packages/google-cloud-managedkafka/tests/unit/gapic/managedkafka_v1/test_managed_kafka.py +++ b/packages/google-cloud-managedkafka/tests/unit/gapic/managedkafka_v1/test_managed_kafka.py @@ -1281,22 +1281,23 @@ async def test_list_clusters_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_clusters - ] = mock_object + ] = mock_rpc request = {} await client.list_clusters(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_clusters(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1844,22 +1845,23 @@ async def test_get_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_cluster - ] = mock_object + ] = mock_rpc request = {} await client.get_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2152,8 +2154,9 @@ def test_create_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_cluster(request) @@ -2207,26 +2210,28 @@ async def test_create_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_cluster - ] = mock_object + ] = mock_rpc request = {} await client.create_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2568,8 +2573,9 @@ def test_update_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_cluster(request) @@ -2623,26 +2629,28 @@ async def test_update_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_cluster - ] = mock_object + ] = mock_rpc request = {} await client.update_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2978,8 +2986,9 @@ def test_delete_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_cluster(request) @@ -3033,26 +3042,28 @@ async def test_delete_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_cluster - ] = mock_object + ] = mock_rpc request = {} await client.delete_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3400,22 +3411,23 @@ async def test_list_topics_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_topics - ] = mock_object + ] = mock_rpc request = {} await client.list_topics(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_topics(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3962,22 +3974,23 @@ async def test_get_topic_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_topic - ] = mock_object + ] = mock_rpc request = {} await client.get_topic(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_topic(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4334,22 +4347,23 @@ async def test_create_topic_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_topic - ] = mock_object + ] = mock_rpc request = {} await client.create_topic(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_topic(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4720,22 +4734,23 @@ async def test_update_topic_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_topic - ] = mock_object + ] = mock_rpc request = {} await client.update_topic(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_topic(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5087,22 +5102,23 @@ async def test_delete_topic_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_topic - ] = mock_object + ] = mock_rpc request = {} await client.delete_topic(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_topic(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5456,22 +5472,23 @@ async def test_list_consumer_groups_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_consumer_groups - ] = mock_object + ] = mock_rpc request = {} await client.list_consumer_groups(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_consumer_groups(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6045,22 +6062,23 @@ async def test_get_consumer_group_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_consumer_group - ] = mock_object + ] = mock_rpc request = {} await client.get_consumer_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_consumer_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6428,22 +6446,23 @@ async def test_update_consumer_group_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_consumer_group - ] = mock_object + ] = mock_rpc request = {} await client.update_consumer_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_consumer_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6819,22 +6838,23 @@ async def test_delete_consumer_group_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_consumer_group - ] = mock_object + ] = mock_rpc request = {} await client.delete_consumer_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_consumer_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio From 6572d4c55e9653fa5a5440c451a8ff1db16eb0e6 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 19 Aug 2024 10:19:09 -0400 Subject: [PATCH 011/108] chore: Update gapic-generator-python to v1.18.5 (#13007) - [ ] Regenerate this pull request now. PiperOrigin-RevId: 662275444 Source-Link: https://github.com/googleapis/googleapis/commit/182e5df1cc85f74cba78c5d974eb1f77092d57a6 Source-Link: https://github.com/googleapis/googleapis-gen/commit/961446bae76f94fd4b3f28136625237becbc00b0 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWZ1bmN0aW9ucy8uT3dsQm90LnlhbWwiLCJoIjoiOTYxNDQ2YmFlNzZmOTRmZDRiM2YyODEzNjYyNTIzN2JlY2JjMDBiMCJ9 --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../google/cloud/functions/gapic_version.py | 2 +- .../cloud/functions_v1/gapic_version.py | 2 +- .../cloud_functions_service/async_client.py | 6 +- .../cloud_functions_service/client.py | 2 +- .../cloud/functions_v2/gapic_version.py | 2 +- .../services/function_service/async_client.py | 5 +- .../services/function_service/client.py | 2 +- ...et_metadata_google.cloud.functions.v1.json | 2 +- ...et_metadata_google.cloud.functions.v2.json | 2 +- .../test_cloud_functions_service.py | 129 ++++++++++-------- .../functions_v2/test_function_service.py | 102 ++++++++------ 11 files changed, 140 insertions(+), 116 deletions(-) diff --git a/packages/google-cloud-functions/google/cloud/functions/gapic_version.py b/packages/google-cloud-functions/google/cloud/functions/gapic_version.py index 007d6040cbe0..558c8aab67c5 100644 --- a/packages/google-cloud-functions/google/cloud/functions/gapic_version.py +++ b/packages/google-cloud-functions/google/cloud/functions/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.17.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-functions/google/cloud/functions_v1/gapic_version.py b/packages/google-cloud-functions/google/cloud/functions_v1/gapic_version.py index 007d6040cbe0..558c8aab67c5 100644 --- a/packages/google-cloud-functions/google/cloud/functions_v1/gapic_version.py +++ b/packages/google-cloud-functions/google/cloud/functions_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.17.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-functions/google/cloud/functions_v1/services/cloud_functions_service/async_client.py b/packages/google-cloud-functions/google/cloud/functions_v1/services/cloud_functions_service/async_client.py index d4eb36fa5491..256288870aac 100644 --- a/packages/google-cloud-functions/google/cloud/functions_v1/services/cloud_functions_service/async_client.py +++ b/packages/google-cloud-functions/google/cloud/functions_v1/services/cloud_functions_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -211,10 +210,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(CloudFunctionsServiceClient).get_transport_class, - type(CloudFunctionsServiceClient), - ) + get_transport_class = CloudFunctionsServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-functions/google/cloud/functions_v1/services/cloud_functions_service/client.py b/packages/google-cloud-functions/google/cloud/functions_v1/services/cloud_functions_service/client.py index fa3d9453d9ef..5d636b642576 100644 --- a/packages/google-cloud-functions/google/cloud/functions_v1/services/cloud_functions_service/client.py +++ b/packages/google-cloud-functions/google/cloud/functions_v1/services/cloud_functions_service/client.py @@ -727,7 +727,7 @@ def __init__( Type[CloudFunctionsServiceTransport], Callable[..., CloudFunctionsServiceTransport], ] = ( - type(self).get_transport_class(transport) + CloudFunctionsServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., CloudFunctionsServiceTransport], transport) ) diff --git a/packages/google-cloud-functions/google/cloud/functions_v2/gapic_version.py b/packages/google-cloud-functions/google/cloud/functions_v2/gapic_version.py index 007d6040cbe0..558c8aab67c5 100644 --- a/packages/google-cloud-functions/google/cloud/functions_v2/gapic_version.py +++ b/packages/google-cloud-functions/google/cloud/functions_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.17.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-functions/google/cloud/functions_v2/services/function_service/async_client.py b/packages/google-cloud-functions/google/cloud/functions_v2/services/function_service/async_client.py index c4adbd7e07f0..59be40626e5b 100644 --- a/packages/google-cloud-functions/google/cloud/functions_v2/services/function_service/async_client.py +++ b/packages/google-cloud-functions/google/cloud/functions_v2/services/function_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -221,9 +220,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(FunctionServiceClient).get_transport_class, type(FunctionServiceClient) - ) + get_transport_class = FunctionServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-functions/google/cloud/functions_v2/services/function_service/client.py b/packages/google-cloud-functions/google/cloud/functions_v2/services/function_service/client.py index 764eeb6d6903..c2b66bd32e37 100644 --- a/packages/google-cloud-functions/google/cloud/functions_v2/services/function_service/client.py +++ b/packages/google-cloud-functions/google/cloud/functions_v2/services/function_service/client.py @@ -877,7 +877,7 @@ def __init__( transport_init: Union[ Type[FunctionServiceTransport], Callable[..., FunctionServiceTransport] ] = ( - type(self).get_transport_class(transport) + FunctionServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., FunctionServiceTransport], transport) ) diff --git a/packages/google-cloud-functions/samples/generated_samples/snippet_metadata_google.cloud.functions.v1.json b/packages/google-cloud-functions/samples/generated_samples/snippet_metadata_google.cloud.functions.v1.json index bb2afc167d50..3a16e5fca57c 100644 --- a/packages/google-cloud-functions/samples/generated_samples/snippet_metadata_google.cloud.functions.v1.json +++ b/packages/google-cloud-functions/samples/generated_samples/snippet_metadata_google.cloud.functions.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-functions", - "version": "1.17.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-functions/samples/generated_samples/snippet_metadata_google.cloud.functions.v2.json b/packages/google-cloud-functions/samples/generated_samples/snippet_metadata_google.cloud.functions.v2.json index e455ac2a3bc1..0dbb0a2efac1 100644 --- a/packages/google-cloud-functions/samples/generated_samples/snippet_metadata_google.cloud.functions.v2.json +++ b/packages/google-cloud-functions/samples/generated_samples/snippet_metadata_google.cloud.functions.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-functions", - "version": "1.17.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-functions/tests/unit/gapic/functions_v1/test_cloud_functions_service.py b/packages/google-cloud-functions/tests/unit/gapic/functions_v1/test_cloud_functions_service.py index 6f12c5294265..f3dea68fcc52 100644 --- a/packages/google-cloud-functions/tests/unit/gapic/functions_v1/test_cloud_functions_service.py +++ b/packages/google-cloud-functions/tests/unit/gapic/functions_v1/test_cloud_functions_service.py @@ -1375,22 +1375,23 @@ async def test_list_functions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_functions - ] = mock_object + ] = mock_rpc request = {} await client.list_functions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_functions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1925,22 +1926,23 @@ async def test_get_function_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_function - ] = mock_object + ] = mock_rpc request = {} await client.get_function(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_function(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2283,8 +2285,9 @@ def test_create_function_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_function(request) @@ -2338,26 +2341,28 @@ async def test_create_function_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_function - ] = mock_object + ] = mock_rpc request = {} await client.create_function(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_function(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2653,8 +2658,9 @@ def test_update_function_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_function(request) @@ -2708,26 +2714,28 @@ async def test_update_function_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_function - ] = mock_object + ] = mock_rpc request = {} await client.update_function(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_function(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3017,8 +3025,9 @@ def test_delete_function_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_function(request) @@ -3072,26 +3081,28 @@ async def test_delete_function_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_function - ] = mock_object + ] = mock_rpc request = {} await client.delete_function(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_function(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3445,22 +3456,23 @@ async def test_call_function_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.call_function - ] = mock_object + ] = mock_rpc request = {} await client.call_function(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.call_function(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3837,22 +3849,23 @@ async def test_generate_upload_url_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.generate_upload_url - ] = mock_object + ] = mock_rpc request = {} await client.generate_upload_url(https://github.com/request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.generate_upload_url(https://github.com/request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4138,22 +4151,23 @@ async def test_generate_download_url_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.generate_download_url - ] = mock_object + ] = mock_rpc request = {} await client.generate_download_url(https://github.com/request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.generate_download_url(https://github.com/request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4429,22 +4443,23 @@ async def test_set_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4732,22 +4747,23 @@ async def test_get_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5043,22 +5059,23 @@ async def test_test_iam_permissions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.test_iam_permissions - ] = mock_object + ] = mock_rpc request = {} await client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.test_iam_permissions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-functions/tests/unit/gapic/functions_v2/test_function_service.py b/packages/google-cloud-functions/tests/unit/gapic/functions_v2/test_function_service.py index 79d0ff632102..b21935b7b635 100644 --- a/packages/google-cloud-functions/tests/unit/gapic/functions_v2/test_function_service.py +++ b/packages/google-cloud-functions/tests/unit/gapic/functions_v2/test_function_service.py @@ -1342,22 +1342,23 @@ async def test_get_function_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_function - ] = mock_object + ] = mock_rpc request = {} await client.get_function(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_function(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1723,22 +1724,23 @@ async def test_list_functions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_functions - ] = mock_object + ] = mock_rpc request = {} await client.list_functions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_functions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2229,8 +2231,9 @@ def test_create_function_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_function(request) @@ -2284,26 +2287,28 @@ async def test_create_function_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_function - ] = mock_object + ] = mock_rpc request = {} await client.create_function(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_function(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2609,8 +2614,9 @@ def test_update_function_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_function(request) @@ -2664,26 +2670,28 @@ async def test_update_function_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_function - ] = mock_object + ] = mock_rpc request = {} await client.update_function(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_function(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2983,8 +2991,9 @@ def test_delete_function_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_function(request) @@ -3038,26 +3047,28 @@ async def test_delete_function_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_function - ] = mock_object + ] = mock_rpc request = {} await client.delete_function(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_function(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3417,22 +3428,23 @@ async def test_generate_upload_url_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.generate_upload_url - ] = mock_object + ] = mock_rpc request = {} await client.generate_upload_url(https://github.com/request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.generate_upload_url(https://github.com/request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3718,22 +3730,23 @@ async def test_generate_download_url_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.generate_download_url - ] = mock_object + ] = mock_rpc request = {} await client.generate_download_url(https://github.com/request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.generate_download_url(https://github.com/request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4003,22 +4016,23 @@ async def test_list_runtimes_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_runtimes - ] = mock_object + ] = mock_rpc request = {} await client.list_runtimes(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_runtimes(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio From 5f179b98744808c33b07768f44efdfb3551fda03 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 19 Aug 2024 10:27:33 -0400 Subject: [PATCH 012/108] docs: [google-cloud-batch] Batch CentOS images and HPC CentOS images are EOS (#13009) - [ ] Regenerate this pull request now. BEGIN_COMMIT_OVERRIDE docs: Batch CentOS images and HPC CentOS images are EOS docs: Clarify required fields for Runnable.Container docs: Clarify required oneof fields for Runnable.Script docs: Clarify TaskSpec requires one or more runnables END_COMMIT_OVERRIDE PiperOrigin-RevId: 662665564 Source-Link: https://github.com/googleapis/googleapis/commit/2b7a9cbe5478aa896fac478aa996c6acf1a5306f Source-Link: https://github.com/googleapis/googleapis-gen/commit/b95b706c4c79db765c29b51a73840d5552afe3c5 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWJhdGNoLy5Pd2xCb3QueWFtbCIsImgiOiJiOTViNzA2YzRjNzlkYjc2NWMyOWI1MWE3Mzg0MGQ1NTUyYWZlM2M1In0= PiperOrigin-RevId: 662559250 Source-Link: https://github.com/googleapis/googleapis/commit/906736032699b7e943ef2155edbda05470723647 Source-Link: https://github.com/googleapis/googleapis-gen/commit/e7287e10ac26decb4c72aace4adc27685d3ecd95 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWJhdGNoLy5Pd2xCb3QueWFtbCIsImgiOiJlNzI4N2UxMGFjMjZkZWNiNGM3MmFhY2U0YWRjMjc2ODVkM2VjZDk1In0= --------- Co-authored-by: Owl Bot --- .../google/cloud/batch/gapic_version.py | 2 +- .../google/cloud/batch_v1/gapic_version.py | 2 +- .../services/batch_service/async_client.py | 5 +- .../batch_v1/services/batch_service/client.py | 2 +- .../google/cloud/batch_v1/types/job.py | 2 - .../google/cloud/batch_v1/types/task.py | 93 ++++++----- .../cloud/batch_v1alpha/gapic_version.py | 2 +- .../services/batch_service/async_client.py | 5 +- .../services/batch_service/client.py | 2 +- .../google/cloud/batch_v1alpha/types/job.py | 2 - .../google/cloud/batch_v1alpha/types/task.py | 93 ++++++----- ...nippet_metadata_google.cloud.batch.v1.json | 2 +- ...t_metadata_google.cloud.batch.v1alpha.json | 2 +- .../unit/gapic/batch_v1/test_batch_service.py | 64 ++++---- .../gapic/batch_v1alpha/test_batch_service.py | 147 ++++++++++-------- 15 files changed, 234 insertions(+), 191 deletions(-) diff --git a/packages/google-cloud-batch/google/cloud/batch/gapic_version.py b/packages/google-cloud-batch/google/cloud/batch/gapic_version.py index 889e09f4bc4c..558c8aab67c5 100644 --- a/packages/google-cloud-batch/google/cloud/batch/gapic_version.py +++ b/packages/google-cloud-batch/google/cloud/batch/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.17.24" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py b/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py index 889e09f4bc4c..558c8aab67c5 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.17.24" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/async_client.py b/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/async_client.py index 6b16945ad4e3..4d01c36c456e 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/async_client.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -200,9 +199,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(BatchServiceClient).get_transport_class, type(BatchServiceClient) - ) + get_transport_class = BatchServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/client.py b/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/client.py index 0079f78ef0a8..8d9d187805fd 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/client.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/client.py @@ -718,7 +718,7 @@ def __init__( transport_init: Union[ Type[BatchServiceTransport], Callable[..., BatchServiceTransport] ] = ( - type(self).get_transport_class(transport) + BatchServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., BatchServiceTransport], transport) ) diff --git a/packages/google-cloud-batch/google/cloud/batch_v1/types/job.py b/packages/google-cloud-batch/google/cloud/batch_v1/types/job.py index 6777c2c1b6f8..47abe1466463 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1/types/job.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1/types/job.py @@ -549,9 +549,7 @@ class Disk(proto.Message): following image values are supported for a boot disk: - ``batch-debian``: use Batch Debian images. - - ``batch-centos``: use Batch CentOS images. - ``batch-cos``: use Batch Container-Optimized images. - - ``batch-hpc-centos``: use Batch HPC CentOS images. - ``batch-hpc-rocky``: use Batch HPC Rocky Linux images. This field is a member of `oneof`_ ``data_source``. diff --git a/packages/google-cloud-batch/google/cloud/batch_v1/types/task.py b/packages/google-cloud-batch/google/cloud/batch_v1/types/task.py index f431f02a9850..17aa644fd47a 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1/types/task.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1/types/task.py @@ -301,32 +301,38 @@ class Container(proto.Message): Attributes: image_uri (str): - The URI to pull the container image from. + Required. The URI to pull the container image + from. commands (MutableSequence[str]): - Overrides the ``CMD`` specified in the container. If there - is an ENTRYPOINT (either in the container image or with the - entrypoint field below) then commands are appended as - arguments to the ENTRYPOINT. + Required for some container images. Overrides the ``CMD`` + specified in the container. If there is an ``ENTRYPOINT`` + (either in the container image or with the ``entrypoint`` + field below) then these commands are appended as arguments + to the ``ENTRYPOINT``. entrypoint (str): - Overrides the ``ENTRYPOINT`` specified in the container. + Required for some container images. Overrides the + ``ENTRYPOINT`` specified in the container. volumes (MutableSequence[str]): Volumes to mount (bind mount) from the host machine files or - directories into the container, formatted to match docker - run's --volume option, e.g. /foo:/bar, or /foo:/bar:ro + directories into the container, formatted to match + ``--volume`` option for the ``docker run`` command—for + example, ``/foo:/bar`` or ``/foo:/bar:ro``. If the ``TaskSpec.Volumes`` field is specified but this field is not, Batch will mount each volume from the host machine to the container with the same mount path by default. In this case, the default mount option for - containers will be read-only (ro) for existing persistent - disks and read-write (rw) for other volume types, regardless - of the original mount options specified in + containers will be read-only (``ro``) for existing + persistent disks and read-write (``rw``) for other volume + types, regardless of the original mount options specified in ``TaskSpec.Volumes``. If you need different mount settings, you can explicitly configure them in this field. options (str): - Arbitrary additional options to include in - the "docker run" command when running this - container, e.g. "--network host". + Required for some container images. Arbitrary additional + options to include in the ``docker run`` command when + running this container—for example, ``--network host``. For + the ``--volume`` option, use the ``volumes`` field for the + container. block_external_network (bool): If set to true, external network access to and from container will be blocked, containers that are with @@ -443,28 +449,32 @@ class Script(proto.Message): Attributes: path (str): - Script file path on the host VM. - - To specify an interpreter, please add a - ``#!``\ (also known as `shebang - line `__) as - the first line of the file.(For example, to execute the - script using bash, ``#!/bin/bash`` should be the first line - of the file. To execute the script using\ ``Python3``, - ``#!/usr/bin/env python3`` should be the first line of the - file.) Otherwise, the file will by default be executed by - ``/bin/sh``. + The path to a script file that is accessible from the host + VM(s). + + Unless the script file supports the default ``#!/bin/sh`` + shell interpreter, you must specify an interpreter by + including a [shebang + line](https://en.wikipedia.org/wiki/Shebang_(Unix) as the + first line of the file. For example, to execute the script + using bash, include ``#!/bin/bash`` as the first line of the + file. Alternatively, to execute the script using Python3, + include ``#!/usr/bin/env python3`` as the first line of the + file. This field is a member of `oneof`_ ``command``. text (str): - Shell script text. - - To specify an interpreter, please add a - ``#!\n`` at the beginning of the text.(For - example, to execute the script using bash, ``#!/bin/bash\n`` - should be added. To execute the script using\ ``Python3``, - ``#!/usr/bin/env python3\n`` should be added.) Otherwise, - the script will by default be executed by ``/bin/sh``. + The text for a script. + + Unless the script text supports the default ``#!/bin/sh`` + shell interpreter, you must specify an interpreter by + including a [shebang + line](https://en.wikipedia.org/wiki/Shebang_(Unix) at the + beginning of the text. For example, to execute the script + using bash, include ``#!/bin/bash\n`` at the beginning of + the text. Alternatively, to execute the script using + Python3, include ``#!/usr/bin/env python3\n`` at the + beginning of the text. This field is a member of `oneof`_ ``command``. """ @@ -481,8 +491,9 @@ class Script(proto.Message): ) class Barrier(proto.Message): - r"""Barrier runnable blocks until all tasks in a taskgroup reach - it. + r"""A barrier runnable automatically blocks the execution of + subsequent runnables until all the tasks in the task group reach + the barrier. Attributes: name (str): @@ -552,11 +563,13 @@ class TaskSpec(proto.Message): Attributes: runnables (MutableSequence[google.cloud.batch_v1.types.Runnable]): - The sequence of scripts or containers to run for this Task. - Each Task using this TaskSpec executes its list of runnables - in order. The Task succeeds if all of its runnables either - exit with a zero status or any that exit with a non-zero - status have the ignore_exit_status flag. + Required. The sequence of one or more runnables (executable + scripts, executable containers, and/or barriers) for each + task in this task group to run. Each task runs this list of + runnables in order. For a task to succeed, all of its script + and container runnables each must either exit with a zero + status or enable the ``ignore_exit_status`` subfield and + exit with any status. Background runnables are killed automatically (if they have not already exited) a short time after all foreground diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py index 889e09f4bc4c..558c8aab67c5 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.17.24" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/async_client.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/async_client.py index e7ab4b27281f..e5fa2a62b8f5 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/async_client.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -210,9 +209,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(BatchServiceClient).get_transport_class, type(BatchServiceClient) - ) + get_transport_class = BatchServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/client.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/client.py index bd4d3f87ca0d..1c35c5f30ac6 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/client.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/client.py @@ -746,7 +746,7 @@ def __init__( transport_init: Union[ Type[BatchServiceTransport], Callable[..., BatchServiceTransport] ] = ( - type(self).get_transport_class(transport) + BatchServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., BatchServiceTransport], transport) ) diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/job.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/job.py index 65660493a533..666e309ad00b 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/job.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/job.py @@ -669,9 +669,7 @@ class Disk(proto.Message): following image values are supported for a boot disk: - ``batch-debian``: use Batch Debian images. - - ``batch-centos``: use Batch CentOS images. - ``batch-cos``: use Batch Container-Optimized images. - - ``batch-hpc-centos``: use Batch HPC CentOS images. - ``batch-hpc-rocky``: use Batch HPC Rocky Linux images. This field is a member of `oneof`_ ``data_source``. diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/task.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/task.py index f854b36207a1..50619e3864b1 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/task.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/task.py @@ -341,32 +341,38 @@ class Container(proto.Message): Attributes: image_uri (str): - The URI to pull the container image from. + Required. The URI to pull the container image + from. commands (MutableSequence[str]): - Overrides the ``CMD`` specified in the container. If there - is an ENTRYPOINT (either in the container image or with the - entrypoint field below) then commands are appended as - arguments to the ENTRYPOINT. + Required for some container images. Overrides the ``CMD`` + specified in the container. If there is an ``ENTRYPOINT`` + (either in the container image or with the ``entrypoint`` + field below) then these commands are appended as arguments + to the ``ENTRYPOINT``. entrypoint (str): - Overrides the ``ENTRYPOINT`` specified in the container. + Required for some container images. Overrides the + ``ENTRYPOINT`` specified in the container. volumes (MutableSequence[str]): Volumes to mount (bind mount) from the host machine files or - directories into the container, formatted to match docker - run's --volume option, e.g. /foo:/bar, or /foo:/bar:ro + directories into the container, formatted to match + ``--volume`` option for the ``docker run`` command—for + example, ``/foo:/bar`` or ``/foo:/bar:ro``. If the ``TaskSpec.Volumes`` field is specified but this field is not, Batch will mount each volume from the host machine to the container with the same mount path by default. In this case, the default mount option for - containers will be read-only (ro) for existing persistent - disks and read-write (rw) for other volume types, regardless - of the original mount options specified in + containers will be read-only (``ro``) for existing + persistent disks and read-write (``rw``) for other volume + types, regardless of the original mount options specified in ``TaskSpec.Volumes``. If you need different mount settings, you can explicitly configure them in this field. options (str): - Arbitrary additional options to include in - the "docker run" command when running this - container, e.g. "--network host". + Required for some container images. Arbitrary additional + options to include in the ``docker run`` command when + running this container—for example, ``--network host``. For + the ``--volume`` option, use the ``volumes`` field for the + container. block_external_network (bool): If set to true, external network access to and from container will be blocked, containers that are with @@ -483,28 +489,32 @@ class Script(proto.Message): Attributes: path (str): - Script file path on the host VM. - - To specify an interpreter, please add a - ``#!``\ (also known as `shebang - line `__) as - the first line of the file.(For example, to execute the - script using bash, ``#!/bin/bash`` should be the first line - of the file. To execute the script using\ ``Python3``, - ``#!/usr/bin/env python3`` should be the first line of the - file.) Otherwise, the file will by default be executed by - ``/bin/sh``. + The path to a script file that is accessible from the host + VM(s). + + Unless the script file supports the default ``#!/bin/sh`` + shell interpreter, you must specify an interpreter by + including a [shebang + line](https://en.wikipedia.org/wiki/Shebang_(Unix) as the + first line of the file. For example, to execute the script + using bash, include ``#!/bin/bash`` as the first line of the + file. Alternatively, to execute the script using Python3, + include ``#!/usr/bin/env python3`` as the first line of the + file. This field is a member of `oneof`_ ``command``. text (str): - Shell script text. - - To specify an interpreter, please add a - ``#!\n`` at the beginning of the text.(For - example, to execute the script using bash, ``#!/bin/bash\n`` - should be added. To execute the script using\ ``Python3``, - ``#!/usr/bin/env python3\n`` should be added.) Otherwise, - the script will by default be executed by ``/bin/sh``. + The text for a script. + + Unless the script text supports the default ``#!/bin/sh`` + shell interpreter, you must specify an interpreter by + including a [shebang + line](https://en.wikipedia.org/wiki/Shebang_(Unix) at the + beginning of the text. For example, to execute the script + using bash, include ``#!/bin/bash\n`` at the beginning of + the text. Alternatively, to execute the script using + Python3, include ``#!/usr/bin/env python3\n`` at the + beginning of the text. This field is a member of `oneof`_ ``command``. """ @@ -521,8 +531,9 @@ class Script(proto.Message): ) class Barrier(proto.Message): - r"""Barrier runnable blocks until all tasks in a taskgroup reach - it. + r"""A barrier runnable automatically blocks the execution of + subsequent runnables until all the tasks in the task group reach + the barrier. Attributes: name (str): @@ -592,11 +603,13 @@ class TaskSpec(proto.Message): Attributes: runnables (MutableSequence[google.cloud.batch_v1alpha.types.Runnable]): - The sequence of scripts or containers to run for this Task. - Each Task using this TaskSpec executes its list of runnables - in order. The Task succeeds if all of its runnables either - exit with a zero status or any that exit with a non-zero - status have the ignore_exit_status flag. + Required. The sequence of one or more runnables (executable + scripts, executable containers, and/or barriers) for each + task in this task group to run. Each task runs this list of + runnables in order. For a task to succeed, all of its script + and container runnables each must either exit with a zero + status or enable the ``ignore_exit_status`` subfield and + exit with any status. Background runnables are killed automatically (if they have not already exited) a short time after all foreground diff --git a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json index 5b6a86a2dac4..e2df1067e4dd 100644 --- a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json +++ b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-batch", - "version": "0.17.24" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json index f9f1b4e652fd..7f67670b100c 100644 --- a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json +++ b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-batch", - "version": "0.17.24" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-batch/tests/unit/gapic/batch_v1/test_batch_service.py b/packages/google-cloud-batch/tests/unit/gapic/batch_v1/test_batch_service.py index 04b87e270586..3a3cc5d39a91 100644 --- a/packages/google-cloud-batch/tests/unit/gapic/batch_v1/test_batch_service.py +++ b/packages/google-cloud-batch/tests/unit/gapic/batch_v1/test_batch_service.py @@ -1283,22 +1283,23 @@ async def test_create_job_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_job - ] = mock_object + ] = mock_rpc request = {} await client.create_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1671,22 +1672,23 @@ async def test_get_job_async_use_cached_wrapped_rpc(transport: str = "grpc_async ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_job - ] = mock_object + ] = mock_rpc request = {} await client.get_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1983,8 +1985,9 @@ def test_delete_job_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_job(request) @@ -2036,26 +2039,28 @@ async def test_delete_job_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_job - ] = mock_object + ] = mock_rpc request = {} await client.delete_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2408,22 +2413,23 @@ async def test_list_jobs_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_jobs - ] = mock_object + ] = mock_rpc request = {} await client.list_jobs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_jobs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2966,22 +2972,23 @@ async def test_get_task_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_task - ] = mock_object + ] = mock_rpc request = {} await client.get_task(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_task(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3331,22 +3338,23 @@ async def test_list_tasks_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_tasks - ] = mock_object + ] = mock_rpc request = {} await client.list_tasks(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_tasks(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-batch/tests/unit/gapic/batch_v1alpha/test_batch_service.py b/packages/google-cloud-batch/tests/unit/gapic/batch_v1alpha/test_batch_service.py index a25909de20f8..8af9c5d104a1 100644 --- a/packages/google-cloud-batch/tests/unit/gapic/batch_v1alpha/test_batch_service.py +++ b/packages/google-cloud-batch/tests/unit/gapic/batch_v1alpha/test_batch_service.py @@ -1295,22 +1295,23 @@ async def test_create_job_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_job - ] = mock_object + ] = mock_rpc request = {} await client.create_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1690,22 +1691,23 @@ async def test_get_job_async_use_cached_wrapped_rpc(transport: str = "grpc_async ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_job - ] = mock_object + ] = mock_rpc request = {} await client.get_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2004,8 +2006,9 @@ def test_delete_job_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_job(request) @@ -2057,26 +2060,28 @@ async def test_delete_job_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_job - ] = mock_object + ] = mock_rpc request = {} await client.delete_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2366,8 +2371,9 @@ def test_cancel_job_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.cancel_job(request) @@ -2419,26 +2425,28 @@ async def test_cancel_job_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.cancel_job - ] = mock_object + ] = mock_rpc request = {} await client.cancel_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.cancel_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2789,22 +2797,23 @@ async def test_update_job_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_job - ] = mock_object + ] = mock_rpc request = {} await client.update_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3174,22 +3183,23 @@ async def test_list_jobs_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_jobs - ] = mock_object + ] = mock_rpc request = {} await client.list_jobs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_jobs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3732,22 +3742,23 @@ async def test_get_task_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_task - ] = mock_object + ] = mock_rpc request = {} await client.get_task(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_task(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4099,22 +4110,23 @@ async def test_list_tasks_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_tasks - ] = mock_object + ] = mock_rpc request = {} await client.list_tasks(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_tasks(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4677,22 +4689,23 @@ async def test_create_resource_allowance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_resource_allowance - ] = mock_object + ] = mock_rpc request = {} await client.create_resource_allowance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_resource_allowance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5125,22 +5138,23 @@ async def test_get_resource_allowance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_resource_allowance - ] = mock_object + ] = mock_rpc request = {} await client.get_resource_allowance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_resource_allowance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5458,8 +5472,9 @@ def test_delete_resource_allowance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_resource_allowance(request) @@ -5515,26 +5530,28 @@ async def test_delete_resource_allowance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_resource_allowance - ] = mock_object + ] = mock_rpc request = {} await client.delete_resource_allowance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_resource_allowance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5908,22 +5925,23 @@ async def test_list_resource_allowances_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_resource_allowances - ] = mock_object + ] = mock_rpc request = {} await client.list_resource_allowances(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_resource_allowances(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6502,22 +6520,23 @@ async def test_update_resource_allowance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_resource_allowance - ] = mock_object + ] = mock_rpc request = {} await client.update_resource_allowance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_resource_allowance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio From 9c95cc22c6fa8189233f7f2c355beef01019dccf Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 19 Aug 2024 10:44:56 -0400 Subject: [PATCH 013/108] chore: [Many APIs] Update gapic-generator-python to v1.18.5 (#12996) - [ ] Regenerate this pull request now. PiperOrigin-RevId: 661268868 Source-Link: https://github.com/googleapis/googleapis/commit/f7d214cb08cd7d9b018d44564a8b184263f64177 Source-Link: https://github.com/googleapis/googleapis-gen/commit/79a8411bbdb25a983fa3aae8c0e14327df129f94 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWFwcGVuZ2luZS1sb2dnaW5nLy5Pd2xCb3QueWFtbCIsImgiOiI3OWE4NDExYmJkYjI1YTk4M2ZhM2FhZThjMGUxNDMyN2RmMTI5Zjk0In0= Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWFwcGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWFydGlmYWN0LXJlZ2lzdHJ5Ly5Pd2xCb3QueWFtbCIsImgiOiI3OWE4NDExYmJkYjI1YTk4M2ZhM2FhZThjMGUxNDMyN2RmMTI5Zjk0In0= Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWFzc2V0Ly5Pd2xCb3QueWFtbCIsImgiOiI3OWE4NDExYmJkYjI1YTk4M2ZhM2FhZThjMGUxNDMyN2RmMTI5Zjk0In0= Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWFzc3VyZWQtd29ya2xvYWRzLy5Pd2xCb3QueWFtbCIsImgiOiI3OWE4NDExYmJkYjI1YTk4M2ZhM2FhZThjMGUxNDMyN2RmMTI5Zjk0In0= Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWF1dG9tbC8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWJhY2t1cGRyLy5Pd2xCb3QueWFtbCIsImgiOiI3OWE4NDExYmJkYjI1YTk4M2ZhM2FhZThjMGUxNDMyN2RmMTI5Zjk0In0= Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWJhcmUtbWV0YWwtc29sdXRpb24vLk93bEJvdC55YW1sIiwiaCI6Ijc5YTg0MTFiYmRiMjVhOTgzZmEzYWFlOGMwZTE0MzI3ZGYxMjlmOTQifQ== Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWJhdGNoLy5Pd2xCb3QueWFtbCIsImgiOiI3OWE4NDExYmJkYjI1YTk4M2ZhM2FhZThjMGUxNDMyN2RmMTI5Zjk0In0= Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWJleW9uZGNvcnAtYXBwY29ubmVjdGlvbnMvLk93bEJvdC55YW1sIiwiaCI6Ijc5YTg0MTFiYmRiMjVhOTgzZmEzYWFlOGMwZTE0MzI3ZGYxMjlmOTQifQ== Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWJleW9uZGNvcnAtYXBwY29ubmVjdG9ycy8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWJleW9uZGNvcnAtYXBwZ2F0ZXdheXMvLk93bEJvdC55YW1sIiwiaCI6Ijc5YTg0MTFiYmRiMjVhOTgzZmEzYWFlOGMwZTE0MzI3ZGYxMjlmOTQifQ== Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWJleW9uZGNvcnAtY2xpZW50Y29ubmVjdG9yc2VydmljZXMvLk93bEJvdC55YW1sIiwiaCI6Ijc5YTg0MTFiYmRiMjVhOTgzZmEzYWFlOGMwZTE0MzI3ZGYxMjlmOTQifQ== Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWJleW9uZGNvcnAtY2xpZW50Z2F0ZXdheXMvLk93bEJvdC55YW1sIiwiaCI6Ijc5YTg0MTFiYmRiMjVhOTgzZmEzYWFlOGMwZTE0MzI3ZGYxMjlmOTQifQ== Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWJpZ3F1ZXJ5LWFuYWx5dGljc2h1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWJpZ3F1ZXJ5LWJpZ2xha2UvLk93bEJvdC55YW1sIiwiaCI6Ijc5YTg0MTFiYmRiMjVhOTgzZmEzYWFlOGMwZTE0MzI3ZGYxMjlmOTQifQ== Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWJpZ3F1ZXJ5LWNvbm5lY3Rpb24vLk93bEJvdC55YW1sIiwiaCI6Ijc5YTg0MTFiYmRiMjVhOTgzZmEzYWFlOGMwZTE0MzI3ZGYxMjlmOTQifQ== Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWJpZ3F1ZXJ5LWRhdGEtZXhjaGFuZ2UvLk93bEJvdC55YW1sIiwiaCI6Ijc5YTg0MTFiYmRiMjVhOTgzZmEzYWFlOGMwZTE0MzI3ZGYxMjlmOTQifQ== Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWJpZ3F1ZXJ5LWRhdGFwb2xpY2llcy8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWJpZ3F1ZXJ5LWRhdGF0cmFuc2Zlci8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 --------- Co-authored-by: Owl Bot Co-authored-by: Victor Chudnovsky Co-authored-by: Anthonios Partheniou --- .../cloud/appengine_logging/gapic_version.py | 2 +- .../appengine_logging_v1/gapic_version.py | 2 +- .../google/cloud/apphub/gapic_version.py | 2 +- .../google/cloud/apphub_v1/gapic_version.py | 2 +- .../services/app_hub/async_client.py | 5 +- .../apphub_v1/services/app_hub/client.py | 2 +- ...ippet_metadata_google.cloud.apphub.v1.json | 2 +- .../unit/gapic/apphub_v1/test_app_hub.py | 353 ++++++----- .../cloud/artifactregistry/gapic_version.py | 2 +- .../artifactregistry_v1/gapic_version.py | 2 +- .../artifact_registry/async_client.py | 5 +- .../services/artifact_registry/client.py | 2 +- .../artifactregistry_v1beta2/gapic_version.py | 2 +- .../artifact_registry/async_client.py | 5 +- .../services/artifact_registry/client.py | 2 +- ...a_google.devtools.artifactregistry.v1.json | 2 +- ...gle.devtools.artifactregistry.v1beta2.json | 2 +- .../test_artifact_registry.py | 394 +++++++------ .../test_artifact_registry.py | 285 +++++---- .../google/cloud/asset/gapic_version.py | 2 +- .../google/cloud/asset_v1/gapic_version.py | 2 +- .../services/asset_service/async_client.py | 5 +- .../asset_v1/services/asset_service/client.py | 2 +- .../cloud/asset_v1p1beta1/gapic_version.py | 2 +- .../services/asset_service/async_client.py | 5 +- .../services/asset_service/client.py | 2 +- .../cloud/asset_v1p2beta1/gapic_version.py | 2 +- .../services/asset_service/async_client.py | 5 +- .../services/asset_service/client.py | 2 +- .../cloud/asset_v1p5beta1/gapic_version.py | 2 +- .../services/asset_service/async_client.py | 5 +- .../services/asset_service/client.py | 2 +- ...nippet_metadata_google.cloud.asset.v1.json | 2 +- ...metadata_google.cloud.asset.v1p1beta1.json | 2 +- ...metadata_google.cloud.asset.v1p2beta1.json | 2 +- ...metadata_google.cloud.asset.v1p5beta1.json | 2 +- .../unit/gapic/asset_v1/test_asset_service.py | 227 +++---- .../asset_v1p1beta1/test_asset_service.py | 18 +- .../asset_v1p2beta1/test_asset_service.py | 45 +- .../asset_v1p5beta1/test_asset_service.py | 9 +- .../cloud/assuredworkloads/gapic_version.py | 2 +- .../assuredworkloads_v1/gapic_version.py | 2 +- .../assured_workloads_service/async_client.py | 6 +- .../assured_workloads_service/client.py | 2 +- .../assuredworkloads_v1beta1/gapic_version.py | 2 +- .../assured_workloads_service/async_client.py | 6 +- .../assured_workloads_service/client.py | 2 +- ...data_google.cloud.assuredworkloads.v1.json | 2 +- ...google.cloud.assuredworkloads.v1beta1.json | 2 +- .../test_assured_workloads_service.py | 91 +-- .../test_assured_workloads_service.py | 73 ++- .../google/cloud/automl/gapic_version.py | 2 +- .../google/cloud/automl_v1/gapic_version.py | 2 +- .../services/auto_ml/async_client.py | 5 +- .../automl_v1/services/auto_ml/client.py | 2 +- .../prediction_service/async_client.py | 5 +- .../services/prediction_service/client.py | 2 +- .../cloud/automl_v1beta1/gapic_version.py | 2 +- .../services/auto_ml/async_client.py | 5 +- .../automl_v1beta1/services/auto_ml/client.py | 2 +- .../prediction_service/async_client.py | 5 +- .../services/prediction_service/client.py | 2 +- ...ippet_metadata_google.cloud.automl.v1.json | 2 +- ..._metadata_google.cloud.automl.v1beta1.json | 2 +- .../unit/gapic/automl_v1/test_auto_ml.py | 252 ++++---- .../automl_v1/test_prediction_service.py | 28 +- .../unit/gapic/automl_v1beta1/test_auto_ml.py | 306 +++++----- .../automl_v1beta1/test_prediction_service.py | 28 +- .../google/cloud/backupdr/gapic_version.py | 2 +- .../google/cloud/backupdr_v1/gapic_version.py | 2 +- .../services/backup_dr/async_client.py | 5 +- .../backupdr_v1/services/backup_dr/client.py | 2 +- ...pet_metadata_google.cloud.backupdr.v1.json | 2 +- .../unit/gapic/backupdr_v1/test_backup_dr.py | 56 +- .../bare_metal_solution/gapic_version.py | 2 +- .../bare_metal_solution_v2/gapic_version.py | 2 +- .../bare_metal_solution/async_client.py | 5 +- .../services/bare_metal_solution/client.py | 2 +- ...ata_google.cloud.baremetalsolution.v2.json | 2 +- .../test_bare_metal_solution.py | 556 ++++++++++-------- .../gapic_version.py | 2 +- .../gapic_version.py | 2 +- .../app_connections_service/async_client.py | 6 +- .../app_connections_service/client.py | 2 +- ...le.cloud.beyondcorp.appconnections.v1.json | 2 +- .../test_app_connections_service.py | 84 +-- .../beyondcorp_appconnectors/gapic_version.py | 2 +- .../gapic_version.py | 2 +- .../app_connectors_service/async_client.py | 6 +- .../services/app_connectors_service/client.py | 2 +- ...gle.cloud.beyondcorp.appconnectors.v1.json | 2 +- .../test_app_connectors_service.py | 94 +-- .../beyondcorp_appgateways/gapic_version.py | 2 +- .../gapic_version.py | 2 +- .../app_gateways_service/async_client.py | 6 +- .../services/app_gateways_service/client.py | 2 +- ...oogle.cloud.beyondcorp.appgateways.v1.json | 2 +- .../test_app_gateways_service.py | 56 +- .../gapic_version.py | 2 +- .../gapic_version.py | 2 +- .../async_client.py | 6 +- .../client.py | 2 +- ...beyondcorp.clientconnectorservices.v1.json | 2 +- .../test_client_connector_services_service.py | 75 ++- .../gapic_version.py | 2 +- .../gapic_version.py | 2 +- .../client_gateways_service/async_client.py | 6 +- .../client_gateways_service/client.py | 2 +- ...le.cloud.beyondcorp.clientgateways.v1.json | 2 +- .../test_client_gateways_service.py | 56 +- .../bigquery_analyticshub/gapic_version.py | 2 +- .../bigquery_analyticshub_v1/gapic_version.py | 2 +- .../analytics_hub_service/async_client.py | 6 +- .../services/analytics_hub_service/client.py | 2 +- ...google.cloud.bigquery.analyticshub.v1.json | 2 +- .../test_analytics_hub_service.py | 228 +++---- .../cloud/bigquery_biglake/gapic_version.py | 2 +- .../bigquery_biglake_v1/gapic_version.py | 2 +- .../metastore_service/async_client.py | 5 +- .../services/metastore_service/client.py | 2 +- .../gapic_version.py | 2 +- .../metastore_service/async_client.py | 5 +- .../services/metastore_service/client.py | 2 +- ...data_google.cloud.bigquery.biglake.v1.json | 2 +- ...oogle.cloud.bigquery.biglake.v1alpha1.json | 2 +- .../test_metastore_service.py | 135 +++-- .../test_metastore_service.py | 171 +++--- .../bigquery_connection/gapic_version.py | 2 +- .../bigquery_connection_v1/gapic_version.py | 2 +- .../connection_service/async_client.py | 5 +- .../services/connection_service/client.py | 2 +- ...a_google.cloud.bigquery.connection.v1.json | 2 +- .../test_connection_service.py | 72 ++- .../bigquery_data_exchange/gapic_version.py | 2 +- .../gapic_version.py | 2 +- .../analytics_hub_service/async_client.py | 6 +- .../services/analytics_hub_service/client.py | 2 +- ...e.cloud.bigquery.dataexchange.v1beta1.json | 2 +- .../test_analytics_hub_service.py | 135 +++-- .../bigquery_datapolicies/gapic_version.py | 2 +- .../bigquery_datapolicies_v1/gapic_version.py | 2 +- .../data_policy_service/async_client.py | 5 +- .../services/data_policy_service/client.py | 2 +- .../gapic_version.py | 2 +- .../data_policy_service/async_client.py | 5 +- .../services/data_policy_service/client.py | 2 +- ...google.cloud.bigquery.datapolicies.v1.json | 2 +- ...e.cloud.bigquery.datapolicies.v1beta1.json | 2 +- .../test_data_policy_service.py | 81 +-- .../test_data_policy_service.py | 72 ++- .../bigquery_datatransfer/gapic_version.py | 2 +- .../bigquery_datatransfer_v1/gapic_version.py | 2 +- .../data_transfer_service/async_client.py | 6 +- .../services/data_transfer_service/client.py | 2 +- ...google.cloud.bigquery.datatransfer.v1.json | 2 +- .../test_data_transfer_service.py | 144 +++-- 156 files changed, 2456 insertions(+), 2018 deletions(-) diff --git a/packages/google-cloud-appengine-logging/google/cloud/appengine_logging/gapic_version.py b/packages/google-cloud-appengine-logging/google/cloud/appengine_logging/gapic_version.py index 91e6b04fad21..558c8aab67c5 100644 --- a/packages/google-cloud-appengine-logging/google/cloud/appengine_logging/gapic_version.py +++ b/packages/google-cloud-appengine-logging/google/cloud/appengine_logging/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.4.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-appengine-logging/google/cloud/appengine_logging_v1/gapic_version.py b/packages/google-cloud-appengine-logging/google/cloud/appengine_logging_v1/gapic_version.py index 91e6b04fad21..558c8aab67c5 100644 --- a/packages/google-cloud-appengine-logging/google/cloud/appengine_logging_v1/gapic_version.py +++ b/packages/google-cloud-appengine-logging/google/cloud/appengine_logging_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.4.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-apphub/google/cloud/apphub/gapic_version.py b/packages/google-cloud-apphub/google/cloud/apphub/gapic_version.py index 3b0a9d9a8d43..558c8aab67c5 100644 --- a/packages/google-cloud-apphub/google/cloud/apphub/gapic_version.py +++ b/packages/google-cloud-apphub/google/cloud/apphub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-apphub/google/cloud/apphub_v1/gapic_version.py b/packages/google-cloud-apphub/google/cloud/apphub_v1/gapic_version.py index 3b0a9d9a8d43..558c8aab67c5 100644 --- a/packages/google-cloud-apphub/google/cloud/apphub_v1/gapic_version.py +++ b/packages/google-cloud-apphub/google/cloud/apphub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-apphub/google/cloud/apphub_v1/services/app_hub/async_client.py b/packages/google-cloud-apphub/google/cloud/apphub_v1/services/app_hub/async_client.py index 6893ded4c86a..75e2df6b93de 100644 --- a/packages/google-cloud-apphub/google/cloud/apphub_v1/services/app_hub/async_client.py +++ b/packages/google-cloud-apphub/google/cloud/apphub_v1/services/app_hub/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -216,9 +215,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AppHubClient).get_transport_class, type(AppHubClient) - ) + get_transport_class = AppHubClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-apphub/google/cloud/apphub_v1/services/app_hub/client.py b/packages/google-cloud-apphub/google/cloud/apphub_v1/services/app_hub/client.py index d0ade49a61d7..970d8975a18c 100644 --- a/packages/google-cloud-apphub/google/cloud/apphub_v1/services/app_hub/client.py +++ b/packages/google-cloud-apphub/google/cloud/apphub_v1/services/app_hub/client.py @@ -792,7 +792,7 @@ def __init__( transport_init: Union[ Type[AppHubTransport], Callable[..., AppHubTransport] ] = ( - type(self).get_transport_class(transport) + AppHubClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AppHubTransport], transport) ) diff --git a/packages/google-cloud-apphub/samples/generated_samples/snippet_metadata_google.cloud.apphub.v1.json b/packages/google-cloud-apphub/samples/generated_samples/snippet_metadata_google.cloud.apphub.v1.json index 4d9af8cc701f..f29a5f624a1e 100644 --- a/packages/google-cloud-apphub/samples/generated_samples/snippet_metadata_google.cloud.apphub.v1.json +++ b/packages/google-cloud-apphub/samples/generated_samples/snippet_metadata_google.cloud.apphub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-apphub", - "version": "0.1.2" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-apphub/tests/unit/gapic/apphub_v1/test_app_hub.py b/packages/google-cloud-apphub/tests/unit/gapic/apphub_v1/test_app_hub.py index 7eb90552f015..0cc2bf6405f1 100644 --- a/packages/google-cloud-apphub/tests/unit/gapic/apphub_v1/test_app_hub.py +++ b/packages/google-cloud-apphub/tests/unit/gapic/apphub_v1/test_app_hub.py @@ -1249,22 +1249,23 @@ async def test_lookup_service_project_attachment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.lookup_service_project_attachment - ] = mock_object + ] = mock_rpc request = {} await client.lookup_service_project_attachment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.lookup_service_project_attachment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1643,22 +1644,23 @@ async def test_list_service_project_attachments_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_service_project_attachments - ] = mock_object + ] = mock_rpc request = {} await client.list_service_project_attachments(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_service_project_attachments(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2189,8 +2191,9 @@ def test_create_service_project_attachment_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_service_project_attachment(request) @@ -2246,26 +2249,28 @@ async def test_create_service_project_attachment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_service_project_attachment - ] = mock_object + ] = mock_rpc request = {} await client.create_service_project_attachment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_service_project_attachment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2679,22 +2684,23 @@ async def test_get_service_project_attachment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_service_project_attachment - ] = mock_object + ] = mock_rpc request = {} await client.get_service_project_attachment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_service_project_attachment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3020,8 +3026,9 @@ def test_delete_service_project_attachment_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_service_project_attachment(request) @@ -3077,26 +3084,28 @@ async def test_delete_service_project_attachment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_service_project_attachment - ] = mock_object + ] = mock_rpc request = {} await client.delete_service_project_attachment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_service_project_attachment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3461,22 +3470,23 @@ async def test_detach_service_project_attachment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.detach_service_project_attachment - ] = mock_object + ] = mock_rpc request = {} await client.detach_service_project_attachment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.detach_service_project_attachment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3855,22 +3865,23 @@ async def test_list_discovered_services_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_discovered_services - ] = mock_object + ] = mock_rpc request = {} await client.list_discovered_services(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_discovered_services(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4449,22 +4460,23 @@ async def test_get_discovered_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_discovered_service - ] = mock_object + ] = mock_rpc request = {} await client.get_discovered_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_discovered_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4834,22 +4846,23 @@ async def test_lookup_discovered_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.lookup_discovered_service - ] = mock_object + ] = mock_rpc request = {} await client.lookup_discovered_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.lookup_discovered_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5225,22 +5238,23 @@ async def test_list_services_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_services - ] = mock_object + ] = mock_rpc request = {} await client.list_services(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_services(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5733,8 +5747,9 @@ def test_create_service_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_service(request) @@ -5788,26 +5803,28 @@ async def test_create_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_service - ] = mock_object + ] = mock_rpc request = {} await client.create_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6188,22 +6205,23 @@ async def test_get_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_service - ] = mock_object + ] = mock_rpc request = {} await client.get_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6502,8 +6520,9 @@ def test_update_service_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_service(request) @@ -6557,26 +6576,28 @@ async def test_update_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_service - ] = mock_object + ] = mock_rpc request = {} await client.update_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6878,8 +6899,9 @@ def test_delete_service_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_service(request) @@ -6933,26 +6955,28 @@ async def test_delete_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_service - ] = mock_object + ] = mock_rpc request = {} await client.delete_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7320,22 +7344,23 @@ async def test_list_discovered_workloads_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_discovered_workloads - ] = mock_object + ] = mock_rpc request = {} await client.list_discovered_workloads(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_discovered_workloads(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7914,22 +7939,23 @@ async def test_get_discovered_workload_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_discovered_workload - ] = mock_object + ] = mock_rpc request = {} await client.get_discovered_workload(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_discovered_workload(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8299,22 +8325,23 @@ async def test_lookup_discovered_workload_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.lookup_discovered_workload - ] = mock_object + ] = mock_rpc request = {} await client.lookup_discovered_workload(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.lookup_discovered_workload(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8690,22 +8717,23 @@ async def test_list_workloads_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_workloads - ] = mock_object + ] = mock_rpc request = {} await client.list_workloads(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_workloads(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9198,8 +9226,9 @@ def test_create_workload_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_workload(request) @@ -9253,26 +9282,28 @@ async def test_create_workload_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_workload - ] = mock_object + ] = mock_rpc request = {} await client.create_workload(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_workload(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9653,22 +9684,23 @@ async def test_get_workload_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_workload - ] = mock_object + ] = mock_rpc request = {} await client.get_workload(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_workload(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9967,8 +9999,9 @@ def test_update_workload_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_workload(request) @@ -10022,26 +10055,28 @@ async def test_update_workload_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_workload - ] = mock_object + ] = mock_rpc request = {} await client.update_workload(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_workload(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10343,8 +10378,9 @@ def test_delete_workload_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_workload(request) @@ -10398,26 +10434,28 @@ async def test_delete_workload_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_workload - ] = mock_object + ] = mock_rpc request = {} await client.delete_workload(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_workload(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10782,22 +10820,23 @@ async def test_list_applications_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_applications - ] = mock_object + ] = mock_rpc request = {} await client.list_applications(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_applications(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11318,8 +11357,9 @@ def test_create_application_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_application(request) @@ -11375,26 +11415,28 @@ async def test_create_application_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_application - ] = mock_object + ] = mock_rpc request = {} await client.create_application(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_application(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11783,22 +11825,23 @@ async def test_get_application_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_application - ] = mock_object + ] = mock_rpc request = {} await client.get_application(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_application(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12109,8 +12152,9 @@ def test_update_application_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_application(request) @@ -12166,26 +12210,28 @@ async def test_update_application_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_application - ] = mock_object + ] = mock_rpc request = {} await client.update_application(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_application(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12508,8 +12554,9 @@ def test_delete_application_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_application(request) @@ -12565,26 +12612,28 @@ async def test_delete_application_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_application - ] = mock_object + ] = mock_rpc request = {} await client.delete_application(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_application(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry/gapic_version.py b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry/gapic_version.py index f6b57ae22bbe..558c8aab67c5 100644 --- a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry/gapic_version.py +++ b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.11.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/gapic_version.py b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/gapic_version.py index f6b57ae22bbe..558c8aab67c5 100644 --- a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/gapic_version.py +++ b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.11.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/async_client.py b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/async_client.py index 3327d5cf80fa..f12091b471e7 100644 --- a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/async_client.py +++ b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -262,9 +261,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ArtifactRegistryClient).get_transport_class, type(ArtifactRegistryClient) - ) + get_transport_class = ArtifactRegistryClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/client.py b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/client.py index 1dd15dc4327c..76117933a9b3 100644 --- a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/client.py +++ b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/client.py @@ -998,7 +998,7 @@ def __init__( Type[ArtifactRegistryTransport], Callable[..., ArtifactRegistryTransport], ] = ( - type(self).get_transport_class(transport) + ArtifactRegistryClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ArtifactRegistryTransport], transport) ) diff --git a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1beta2/gapic_version.py b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1beta2/gapic_version.py index f6b57ae22bbe..558c8aab67c5 100644 --- a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1beta2/gapic_version.py +++ b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1beta2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.11.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1beta2/services/artifact_registry/async_client.py b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1beta2/services/artifact_registry/async_client.py index 54aab92c1222..53dbf2782f4f 100644 --- a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1beta2/services/artifact_registry/async_client.py +++ b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1beta2/services/artifact_registry/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -234,9 +233,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ArtifactRegistryClient).get_transport_class, type(ArtifactRegistryClient) - ) + get_transport_class = ArtifactRegistryClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1beta2/services/artifact_registry/client.py b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1beta2/services/artifact_registry/client.py index f52a3852aa0d..c11270d9ed9f 100644 --- a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1beta2/services/artifact_registry/client.py +++ b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1beta2/services/artifact_registry/client.py @@ -833,7 +833,7 @@ def __init__( Type[ArtifactRegistryTransport], Callable[..., ArtifactRegistryTransport], ] = ( - type(self).get_transport_class(transport) + ArtifactRegistryClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ArtifactRegistryTransport], transport) ) diff --git a/packages/google-cloud-artifact-registry/samples/generated_samples/snippet_metadata_google.devtools.artifactregistry.v1.json b/packages/google-cloud-artifact-registry/samples/generated_samples/snippet_metadata_google.devtools.artifactregistry.v1.json index deb29f1cf87e..b27db1a9e71b 100644 --- a/packages/google-cloud-artifact-registry/samples/generated_samples/snippet_metadata_google.devtools.artifactregistry.v1.json +++ b/packages/google-cloud-artifact-registry/samples/generated_samples/snippet_metadata_google.devtools.artifactregistry.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-artifact-registry", - "version": "1.11.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-artifact-registry/samples/generated_samples/snippet_metadata_google.devtools.artifactregistry.v1beta2.json b/packages/google-cloud-artifact-registry/samples/generated_samples/snippet_metadata_google.devtools.artifactregistry.v1beta2.json index 49de6b697aa4..c22cba2a0008 100644 --- a/packages/google-cloud-artifact-registry/samples/generated_samples/snippet_metadata_google.devtools.artifactregistry.v1beta2.json +++ b/packages/google-cloud-artifact-registry/samples/generated_samples/snippet_metadata_google.devtools.artifactregistry.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-artifact-registry", - "version": "1.11.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-artifact-registry/tests/unit/gapic/artifactregistry_v1/test_artifact_registry.py b/packages/google-cloud-artifact-registry/tests/unit/gapic/artifactregistry_v1/test_artifact_registry.py index 8c9d44c56e33..fb88718b7915 100644 --- a/packages/google-cloud-artifact-registry/tests/unit/gapic/artifactregistry_v1/test_artifact_registry.py +++ b/packages/google-cloud-artifact-registry/tests/unit/gapic/artifactregistry_v1/test_artifact_registry.py @@ -1359,22 +1359,23 @@ async def test_list_docker_images_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_docker_images - ] = mock_object + ] = mock_rpc request = {} await client.list_docker_images(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_docker_images(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1949,22 +1950,23 @@ async def test_get_docker_image_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_docker_image - ] = mock_object + ] = mock_rpc request = {} await client.get_docker_image(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_docker_image(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2335,22 +2337,23 @@ async def test_list_maven_artifacts_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_maven_artifacts - ] = mock_object + ] = mock_rpc request = {} await client.list_maven_artifacts(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_maven_artifacts(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2935,22 +2938,23 @@ async def test_get_maven_artifact_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_maven_artifact - ] = mock_object + ] = mock_rpc request = {} await client.get_maven_artifact(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_maven_artifact(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3329,22 +3333,23 @@ async def test_list_npm_packages_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_npm_packages - ] = mock_object + ] = mock_rpc request = {} await client.list_npm_packages(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_npm_packages(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3914,22 +3919,23 @@ async def test_get_npm_package_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_npm_package - ] = mock_object + ] = mock_rpc request = {} await client.get_npm_package(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_npm_package(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4294,22 +4300,23 @@ async def test_list_python_packages_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_python_packages - ] = mock_object + ] = mock_rpc request = {} await client.list_python_packages(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_python_packages(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4891,22 +4898,23 @@ async def test_get_python_package_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_python_package - ] = mock_object + ] = mock_rpc request = {} await client.get_python_package(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_python_package(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5225,8 +5233,9 @@ def test_import_apt_artifacts_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.import_apt_artifacts(request) @@ -5282,26 +5291,28 @@ async def test_import_apt_artifacts_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_apt_artifacts - ] = mock_object + ] = mock_rpc request = {} await client.import_apt_artifacts(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.import_apt_artifacts(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5525,8 +5536,9 @@ def test_import_yum_artifacts_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.import_yum_artifacts(request) @@ -5582,26 +5594,28 @@ async def test_import_yum_artifacts_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_yum_artifacts - ] = mock_object + ] = mock_rpc request = {} await client.import_yum_artifacts(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.import_yum_artifacts(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5883,22 +5897,23 @@ async def test_list_repositories_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_repositories - ] = mock_object + ] = mock_rpc request = {} await client.list_repositories(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_repositories(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6480,22 +6495,23 @@ async def test_get_repository_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_repository - ] = mock_object + ] = mock_rpc request = {} await client.get_repository(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_repository(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6812,8 +6828,9 @@ def test_create_repository_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_repository(request) @@ -6869,26 +6886,28 @@ async def test_create_repository_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_repository - ] = mock_object + ] = mock_rpc request = {} await client.create_repository(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_repository(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7315,22 +7334,23 @@ async def test_update_repository_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_repository - ] = mock_object + ] = mock_rpc request = {} await client.update_repository(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_repository(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7689,8 +7709,9 @@ def test_delete_repository_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_repository(request) @@ -7746,26 +7767,28 @@ async def test_delete_repository_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_repository - ] = mock_object + ] = mock_rpc request = {} await client.delete_repository(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_repository(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8123,22 +8146,23 @@ async def test_list_packages_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_packages - ] = mock_object + ] = mock_rpc request = {} await client.list_packages(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_packages(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8684,22 +8708,23 @@ async def test_get_package_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_package - ] = mock_object + ] = mock_rpc request = {} await client.get_package(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_package(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8990,8 +9015,9 @@ def test_delete_package_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_package(request) @@ -9045,26 +9071,28 @@ async def test_delete_package_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_package - ] = mock_object + ] = mock_rpc request = {} await client.delete_package(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_package(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9414,22 +9442,23 @@ async def test_list_versions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_versions - ] = mock_object + ] = mock_rpc request = {} await client.list_versions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_versions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9975,22 +10004,23 @@ async def test_get_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_version - ] = mock_object + ] = mock_rpc request = {} await client.get_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10281,8 +10311,9 @@ def test_delete_version_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_version(request) @@ -10336,26 +10367,28 @@ async def test_delete_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_version - ] = mock_object + ] = mock_rpc request = {} await client.delete_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10656,8 +10689,9 @@ def test_batch_delete_versions_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.batch_delete_versions(request) @@ -10713,26 +10747,28 @@ async def test_batch_delete_versions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_delete_versions - ] = mock_object + ] = mock_rpc request = {} await client.batch_delete_versions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.batch_delete_versions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11102,22 +11138,23 @@ async def test_list_files_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_files - ] = mock_object + ] = mock_rpc request = {} await client.list_files(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_files(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11664,22 +11701,23 @@ async def test_get_file_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_file - ] = mock_object + ] = mock_rpc request = {} await client.get_file(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_file(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12030,22 +12068,23 @@ async def test_list_tags_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_tags - ] = mock_object + ] = mock_rpc request = {} await client.list_tags(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_tags(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12589,22 +12628,23 @@ async def test_get_tag_async_use_cached_wrapped_rpc(transport: str = "grpc_async ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_tag - ] = mock_object + ] = mock_rpc request = {} await client.get_tag(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_tag(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12954,22 +12994,23 @@ async def test_create_tag_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_tag - ] = mock_object + ] = mock_rpc request = {} await client.create_tag(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_tag(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13333,22 +13374,23 @@ async def test_update_tag_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_tag - ] = mock_object + ] = mock_rpc request = {} await client.update_tag(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_tag(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13696,22 +13738,23 @@ async def test_delete_tag_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_tag - ] = mock_object + ] = mock_rpc request = {} await client.delete_tag(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_tag(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14054,22 +14097,23 @@ async def test_set_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14357,22 +14401,23 @@ async def test_get_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14668,22 +14713,23 @@ async def test_test_iam_permissions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.test_iam_permissions - ] = mock_object + ] = mock_rpc request = {} await client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.test_iam_permissions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14994,22 +15040,23 @@ async def test_get_project_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_project_settings - ] = mock_object + ] = mock_rpc request = {} await client.get_project_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_project_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15388,22 +15435,23 @@ async def test_update_project_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_project_settings - ] = mock_object + ] = mock_rpc request = {} await client.update_project_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_project_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15782,22 +15830,23 @@ async def test_get_vpcsc_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_vpcsc_config - ] = mock_object + ] = mock_rpc request = {} await client.get_vpcsc_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_vpcsc_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -16159,22 +16208,23 @@ async def test_update_vpcsc_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_vpcsc_config - ] = mock_object + ] = mock_rpc request = {} await client.update_vpcsc_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_vpcsc_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-artifact-registry/tests/unit/gapic/artifactregistry_v1beta2/test_artifact_registry.py b/packages/google-cloud-artifact-registry/tests/unit/gapic/artifactregistry_v1beta2/test_artifact_registry.py index 7399d56bf20a..c4d1aa4088c3 100644 --- a/packages/google-cloud-artifact-registry/tests/unit/gapic/artifactregistry_v1beta2/test_artifact_registry.py +++ b/packages/google-cloud-artifact-registry/tests/unit/gapic/artifactregistry_v1beta2/test_artifact_registry.py @@ -1293,8 +1293,9 @@ def test_import_apt_artifacts_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.import_apt_artifacts(request) @@ -1350,26 +1351,28 @@ async def test_import_apt_artifacts_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_apt_artifacts - ] = mock_object + ] = mock_rpc request = {} await client.import_apt_artifacts(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.import_apt_artifacts(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1593,8 +1596,9 @@ def test_import_yum_artifacts_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.import_yum_artifacts(request) @@ -1650,26 +1654,28 @@ async def test_import_yum_artifacts_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_yum_artifacts - ] = mock_object + ] = mock_rpc request = {} await client.import_yum_artifacts(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.import_yum_artifacts(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1951,22 +1957,23 @@ async def test_list_repositories_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_repositories - ] = mock_object + ] = mock_rpc request = {} await client.list_repositories(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_repositories(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2536,22 +2543,23 @@ async def test_get_repository_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_repository - ] = mock_object + ] = mock_rpc request = {} await client.get_repository(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_repository(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2860,8 +2868,9 @@ def test_create_repository_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_repository(request) @@ -2917,26 +2926,28 @@ async def test_create_repository_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_repository - ] = mock_object + ] = mock_rpc request = {} await client.create_repository(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_repository(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3351,22 +3362,23 @@ async def test_update_repository_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_repository - ] = mock_object + ] = mock_rpc request = {} await client.update_repository(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_repository(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3717,8 +3729,9 @@ def test_delete_repository_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_repository(request) @@ -3774,26 +3787,28 @@ async def test_delete_repository_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_repository - ] = mock_object + ] = mock_rpc request = {} await client.delete_repository(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_repository(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4151,22 +4166,23 @@ async def test_list_packages_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_packages - ] = mock_object + ] = mock_rpc request = {} await client.list_packages(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_packages(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4712,22 +4728,23 @@ async def test_get_package_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_package - ] = mock_object + ] = mock_rpc request = {} await client.get_package(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_package(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5018,8 +5035,9 @@ def test_delete_package_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_package(request) @@ -5073,26 +5091,28 @@ async def test_delete_package_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_package - ] = mock_object + ] = mock_rpc request = {} await client.delete_package(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_package(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5442,22 +5462,23 @@ async def test_list_versions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_versions - ] = mock_object + ] = mock_rpc request = {} await client.list_versions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_versions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6003,22 +6024,23 @@ async def test_get_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_version - ] = mock_object + ] = mock_rpc request = {} await client.get_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6309,8 +6331,9 @@ def test_delete_version_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_version(request) @@ -6364,26 +6387,28 @@ async def test_delete_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_version - ] = mock_object + ] = mock_rpc request = {} await client.delete_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6731,22 +6756,23 @@ async def test_list_files_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_files - ] = mock_object + ] = mock_rpc request = {} await client.list_files(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_files(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7293,22 +7319,23 @@ async def test_get_file_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_file - ] = mock_object + ] = mock_rpc request = {} await client.get_file(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_file(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7659,22 +7686,23 @@ async def test_list_tags_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_tags - ] = mock_object + ] = mock_rpc request = {} await client.list_tags(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_tags(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8218,22 +8246,23 @@ async def test_get_tag_async_use_cached_wrapped_rpc(transport: str = "grpc_async ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_tag - ] = mock_object + ] = mock_rpc request = {} await client.get_tag(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_tag(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8583,22 +8612,23 @@ async def test_create_tag_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_tag - ] = mock_object + ] = mock_rpc request = {} await client.create_tag(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_tag(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8962,22 +8992,23 @@ async def test_update_tag_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_tag - ] = mock_object + ] = mock_rpc request = {} await client.update_tag(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_tag(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9325,22 +9356,23 @@ async def test_delete_tag_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_tag - ] = mock_object + ] = mock_rpc request = {} await client.delete_tag(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_tag(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9683,22 +9715,23 @@ async def test_set_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9986,22 +10019,23 @@ async def test_get_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10297,22 +10331,23 @@ async def test_test_iam_permissions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.test_iam_permissions - ] = mock_object + ] = mock_rpc request = {} await client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.test_iam_permissions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10623,22 +10658,23 @@ async def test_get_project_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_project_settings - ] = mock_object + ] = mock_rpc request = {} await client.get_project_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_project_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11017,22 +11053,23 @@ async def test_update_project_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_project_settings - ] = mock_object + ] = mock_rpc request = {} await client.update_project_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_project_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-asset/google/cloud/asset/gapic_version.py b/packages/google-cloud-asset/google/cloud/asset/gapic_version.py index 83c98d26518e..558c8aab67c5 100644 --- a/packages/google-cloud-asset/google/cloud/asset/gapic_version.py +++ b/packages/google-cloud-asset/google/cloud/asset/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.26.3" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-asset/google/cloud/asset_v1/gapic_version.py b/packages/google-cloud-asset/google/cloud/asset_v1/gapic_version.py index 83c98d26518e..558c8aab67c5 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1/gapic_version.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.26.3" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-asset/google/cloud/asset_v1/services/asset_service/async_client.py b/packages/google-cloud-asset/google/cloud/asset_v1/services/asset_service/async_client.py index f520a1810ce5..31793fef2a14 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1/services/asset_service/async_client.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1/services/asset_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -205,9 +204,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AssetServiceClient).get_transport_class, type(AssetServiceClient) - ) + get_transport_class = AssetServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-asset/google/cloud/asset_v1/services/asset_service/client.py b/packages/google-cloud-asset/google/cloud/asset_v1/services/asset_service/client.py index 5ba73fd7b766..423f9eec50b8 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1/services/asset_service/client.py @@ -765,7 +765,7 @@ def __init__( transport_init: Union[ Type[AssetServiceTransport], Callable[..., AssetServiceTransport] ] = ( - type(self).get_transport_class(transport) + AssetServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AssetServiceTransport], transport) ) diff --git a/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/gapic_version.py b/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/gapic_version.py index 83c98d26518e..558c8aab67c5 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/gapic_version.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.26.3" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/services/asset_service/async_client.py b/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/services/asset_service/async_client.py index 5c0be34f37b1..2448592a9eda 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/services/asset_service/async_client.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/services/asset_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -183,9 +182,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AssetServiceClient).get_transport_class, type(AssetServiceClient) - ) + get_transport_class = AssetServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/services/asset_service/client.py b/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/services/asset_service/client.py index 18bf409e4004..2fcc58f03271 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/services/asset_service/client.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/services/asset_service/client.py @@ -635,7 +635,7 @@ def __init__( transport_init: Union[ Type[AssetServiceTransport], Callable[..., AssetServiceTransport] ] = ( - type(self).get_transport_class(transport) + AssetServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AssetServiceTransport], transport) ) diff --git a/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/gapic_version.py b/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/gapic_version.py index 83c98d26518e..558c8aab67c5 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/gapic_version.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.26.3" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/services/asset_service/async_client.py b/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/services/asset_service/async_client.py index 6cc4c143a6ac..f0398dadd045 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/services/asset_service/async_client.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/services/asset_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -184,9 +183,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AssetServiceClient).get_transport_class, type(AssetServiceClient) - ) + get_transport_class = AssetServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/services/asset_service/client.py b/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/services/asset_service/client.py index 2672c17bfe01..15a61ac82944 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/services/asset_service/client.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/services/asset_service/client.py @@ -651,7 +651,7 @@ def __init__( transport_init: Union[ Type[AssetServiceTransport], Callable[..., AssetServiceTransport] ] = ( - type(self).get_transport_class(transport) + AssetServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AssetServiceTransport], transport) ) diff --git a/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/gapic_version.py b/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/gapic_version.py index 83c98d26518e..558c8aab67c5 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/gapic_version.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.26.3" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/services/asset_service/async_client.py b/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/services/asset_service/async_client.py index 3546065a46fd..0cbe928ee9ea 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/services/asset_service/async_client.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/services/asset_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -193,9 +192,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AssetServiceClient).get_transport_class, type(AssetServiceClient) - ) + get_transport_class = AssetServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/services/asset_service/client.py b/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/services/asset_service/client.py index 7f8cf02ede71..63c2052ad74d 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/services/asset_service/client.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/services/asset_service/client.py @@ -701,7 +701,7 @@ def __init__( transport_init: Union[ Type[AssetServiceTransport], Callable[..., AssetServiceTransport] ] = ( - type(self).get_transport_class(transport) + AssetServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AssetServiceTransport], transport) ) diff --git a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1.json b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1.json index 898877880e13..5a90dfa88b31 100644 --- a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1.json +++ b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-asset", - "version": "3.26.3" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p1beta1.json b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p1beta1.json index f2c681ab954c..dcbeb822733b 100644 --- a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p1beta1.json +++ b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-asset", - "version": "3.26.3" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p2beta1.json b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p2beta1.json index 151bc0122350..38eaede856da 100644 --- a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p2beta1.json +++ b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p2beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-asset", - "version": "3.26.3" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p5beta1.json b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p5beta1.json index 25fc1ab14b6e..13ffc229e3a7 100644 --- a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p5beta1.json +++ b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p5beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-asset", - "version": "3.26.3" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/google-cloud-asset/tests/unit/gapic/asset_v1/test_asset_service.py index a1ef85c12918..8b4fbb714598 100644 --- a/packages/google-cloud-asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/google-cloud-asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -1217,8 +1217,9 @@ def test_export_assets_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.export_assets(request) @@ -1272,26 +1273,28 @@ async def test_export_assets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.export_assets - ] = mock_object + ] = mock_rpc request = {} await client.export_assets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.export_assets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1557,22 +1560,23 @@ async def test_list_assets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_assets - ] = mock_object + ] = mock_rpc request = {} await client.list_assets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_assets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2123,22 +2127,23 @@ async def test_batch_get_assets_history_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_get_assets_history - ] = mock_object + ] = mock_rpc request = {} await client.batch_get_assets_history(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.batch_get_assets_history(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2423,22 +2428,23 @@ async def test_create_feed_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_feed - ] = mock_object + ] = mock_rpc request = {} await client.create_feed(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_feed(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2801,22 +2807,23 @@ async def test_get_feed_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_feed - ] = mock_object + ] = mock_rpc request = {} await client.get_feed(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_feed(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3162,22 +3169,23 @@ async def test_list_feeds_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_feeds - ] = mock_object + ] = mock_rpc request = {} await client.list_feeds(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_feeds(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3531,22 +3539,23 @@ async def test_update_feed_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_feed - ] = mock_object + ] = mock_rpc request = {} await client.update_feed(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_feed(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3892,22 +3901,23 @@ async def test_delete_feed_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_feed - ] = mock_object + ] = mock_rpc request = {} await client.delete_feed(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_feed(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4265,22 +4275,23 @@ async def test_search_all_resources_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search_all_resources - ] = mock_object + ] = mock_rpc request = {} await client.search_all_resources(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_all_resources(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4881,22 +4892,23 @@ async def test_search_all_iam_policies_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search_all_iam_policies - ] = mock_object + ] = mock_rpc request = {} await client.search_all_iam_policies(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_all_iam_policies(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5480,22 +5492,23 @@ async def test_analyze_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.analyze_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.analyze_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.analyze_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5723,8 +5736,9 @@ def test_analyze_iam_policy_longrunning_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.analyze_iam_policy_longrunning(request) @@ -5780,26 +5794,28 @@ async def test_analyze_iam_policy_longrunning_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.analyze_iam_policy_longrunning - ] = mock_object + ] = mock_rpc request = {} await client.analyze_iam_policy_longrunning(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.analyze_iam_policy_longrunning(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6067,22 +6083,23 @@ async def test_analyze_move_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.analyze_move - ] = mock_object + ] = mock_rpc request = {} await client.analyze_move(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.analyze_move(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6355,22 +6372,23 @@ async def test_query_assets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.query_assets - ] = mock_object + ] = mock_rpc request = {} await client.query_assets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.query_assets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6662,22 +6680,23 @@ async def test_create_saved_query_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_saved_query - ] = mock_object + ] = mock_rpc request = {} await client.create_saved_query(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_saved_query(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7071,22 +7090,23 @@ async def test_get_saved_query_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_saved_query - ] = mock_object + ] = mock_rpc request = {} await client.get_saved_query(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_saved_query(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7457,22 +7477,23 @@ async def test_list_saved_queries_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_saved_queries - ] = mock_object + ] = mock_rpc request = {} await client.list_saved_queries(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_saved_queries(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8050,22 +8071,23 @@ async def test_update_saved_query_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_saved_query - ] = mock_object + ] = mock_rpc request = {} await client.update_saved_query(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_saved_query(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8445,22 +8467,23 @@ async def test_delete_saved_query_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_saved_query - ] = mock_object + ] = mock_rpc request = {} await client.delete_saved_query(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_saved_query(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8818,22 +8841,23 @@ async def test_batch_get_effective_iam_policies_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_get_effective_iam_policies - ] = mock_object + ] = mock_rpc request = {} await client.batch_get_effective_iam_policies(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.batch_get_effective_iam_policies(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9122,22 +9146,23 @@ async def test_analyze_org_policies_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.analyze_org_policies - ] = mock_object + ] = mock_rpc request = {} await client.analyze_org_policies(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.analyze_org_policies(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9744,22 +9769,23 @@ async def test_analyze_org_policy_governed_containers_async_use_cached_wrapped_r ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.analyze_org_policy_governed_containers - ] = mock_object + ] = mock_rpc request = {} await client.analyze_org_policy_governed_containers(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.analyze_org_policy_governed_containers(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10374,22 +10400,23 @@ async def test_analyze_org_policy_governed_assets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.analyze_org_policy_governed_assets - ] = mock_object + ] = mock_rpc request = {} await client.analyze_org_policy_governed_assets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.analyze_org_policy_governed_assets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-asset/tests/unit/gapic/asset_v1p1beta1/test_asset_service.py b/packages/google-cloud-asset/tests/unit/gapic/asset_v1p1beta1/test_asset_service.py index ad14021b1c75..94ff99223b67 100644 --- a/packages/google-cloud-asset/tests/unit/gapic/asset_v1p1beta1/test_asset_service.py +++ b/packages/google-cloud-asset/tests/unit/gapic/asset_v1p1beta1/test_asset_service.py @@ -1277,22 +1277,23 @@ async def test_search_all_resources_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search_all_resources - ] = mock_object + ] = mock_rpc request = {} await client.search_all_resources(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_all_resources(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1891,22 +1892,23 @@ async def test_search_all_iam_policies_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search_all_iam_policies - ] = mock_object + ] = mock_rpc request = {} await client.search_all_iam_policies(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_all_iam_policies(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-asset/tests/unit/gapic/asset_v1p2beta1/test_asset_service.py b/packages/google-cloud-asset/tests/unit/gapic/asset_v1p2beta1/test_asset_service.py index 21121056156d..2fb5df0ff9e6 100644 --- a/packages/google-cloud-asset/tests/unit/gapic/asset_v1p2beta1/test_asset_service.py +++ b/packages/google-cloud-asset/tests/unit/gapic/asset_v1p2beta1/test_asset_service.py @@ -1270,22 +1270,23 @@ async def test_create_feed_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_feed - ] = mock_object + ] = mock_rpc request = {} await client.create_feed(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_feed(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1643,22 +1644,23 @@ async def test_get_feed_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_feed - ] = mock_object + ] = mock_rpc request = {} await client.get_feed(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_feed(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2002,22 +2004,23 @@ async def test_list_feeds_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_feeds - ] = mock_object + ] = mock_rpc request = {} await client.list_feeds(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_feeds(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2368,22 +2371,23 @@ async def test_update_feed_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_feed - ] = mock_object + ] = mock_rpc request = {} await client.update_feed(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_feed(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2727,22 +2731,23 @@ async def test_delete_feed_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_feed - ] = mock_object + ] = mock_rpc request = {} await client.delete_feed(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_feed(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-asset/tests/unit/gapic/asset_v1p5beta1/test_asset_service.py b/packages/google-cloud-asset/tests/unit/gapic/asset_v1p5beta1/test_asset_service.py index 575062a34e4a..541d85d6aadb 100644 --- a/packages/google-cloud-asset/tests/unit/gapic/asset_v1p5beta1/test_asset_service.py +++ b/packages/google-cloud-asset/tests/unit/gapic/asset_v1p5beta1/test_asset_service.py @@ -1262,22 +1262,23 @@ async def test_list_assets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_assets - ] = mock_object + ] = mock_rpc request = {} await client.list_assets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_assets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads/gapic_version.py b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads/gapic_version.py index 88ee289a17e3..558c8aab67c5 100644 --- a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads/gapic_version.py +++ b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.12.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/gapic_version.py b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/gapic_version.py index 88ee289a17e3..558c8aab67c5 100644 --- a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/gapic_version.py +++ b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.12.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/services/assured_workloads_service/async_client.py b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/services/assured_workloads_service/async_client.py index 673bd86fee98..f8874f2c0b7f 100644 --- a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/services/assured_workloads_service/async_client.py +++ b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/services/assured_workloads_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -205,10 +204,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AssuredWorkloadsServiceClient).get_transport_class, - type(AssuredWorkloadsServiceClient), - ) + get_transport_class = AssuredWorkloadsServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/services/assured_workloads_service/client.py b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/services/assured_workloads_service/client.py index eb815b932858..e85690951b49 100644 --- a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/services/assured_workloads_service/client.py +++ b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/services/assured_workloads_service/client.py @@ -699,7 +699,7 @@ def __init__( Type[AssuredWorkloadsServiceTransport], Callable[..., AssuredWorkloadsServiceTransport], ] = ( - type(self).get_transport_class(transport) + AssuredWorkloadsServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AssuredWorkloadsServiceTransport], transport) ) diff --git a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/gapic_version.py b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/gapic_version.py index 88ee289a17e3..558c8aab67c5 100644 --- a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/gapic_version.py +++ b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.12.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/async_client.py b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/async_client.py index b7feed42f6f7..dd46655343c7 100644 --- a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/async_client.py +++ b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -203,10 +202,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AssuredWorkloadsServiceClient).get_transport_class, - type(AssuredWorkloadsServiceClient), - ) + get_transport_class = AssuredWorkloadsServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/client.py b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/client.py index ded263704f63..449efcd00681 100644 --- a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/client.py +++ b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/client.py @@ -677,7 +677,7 @@ def __init__( Type[AssuredWorkloadsServiceTransport], Callable[..., AssuredWorkloadsServiceTransport], ] = ( - type(self).get_transport_class(transport) + AssuredWorkloadsServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AssuredWorkloadsServiceTransport], transport) ) diff --git a/packages/google-cloud-assured-workloads/samples/generated_samples/snippet_metadata_google.cloud.assuredworkloads.v1.json b/packages/google-cloud-assured-workloads/samples/generated_samples/snippet_metadata_google.cloud.assuredworkloads.v1.json index 20c2b8afe354..5ecc555c1f98 100644 --- a/packages/google-cloud-assured-workloads/samples/generated_samples/snippet_metadata_google.cloud.assuredworkloads.v1.json +++ b/packages/google-cloud-assured-workloads/samples/generated_samples/snippet_metadata_google.cloud.assuredworkloads.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-assured-workloads", - "version": "1.12.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-assured-workloads/samples/generated_samples/snippet_metadata_google.cloud.assuredworkloads.v1beta1.json b/packages/google-cloud-assured-workloads/samples/generated_samples/snippet_metadata_google.cloud.assuredworkloads.v1beta1.json index f9cd0d03eb89..7bc4e288cc6a 100644 --- a/packages/google-cloud-assured-workloads/samples/generated_samples/snippet_metadata_google.cloud.assuredworkloads.v1beta1.json +++ b/packages/google-cloud-assured-workloads/samples/generated_samples/snippet_metadata_google.cloud.assuredworkloads.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-assured-workloads", - "version": "1.12.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-assured-workloads/tests/unit/gapic/assuredworkloads_v1/test_assured_workloads_service.py b/packages/google-cloud-assured-workloads/tests/unit/gapic/assuredworkloads_v1/test_assured_workloads_service.py index a6b830d6486d..25ced336bc6d 100644 --- a/packages/google-cloud-assured-workloads/tests/unit/gapic/assuredworkloads_v1/test_assured_workloads_service.py +++ b/packages/google-cloud-assured-workloads/tests/unit/gapic/assuredworkloads_v1/test_assured_workloads_service.py @@ -1312,8 +1312,9 @@ def test_create_workload_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_workload(request) @@ -1367,26 +1368,28 @@ async def test_create_workload_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_workload - ] = mock_object + ] = mock_rpc request = {} await client.create_workload(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_workload(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1774,22 +1777,23 @@ async def test_update_workload_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_workload - ] = mock_object + ] = mock_rpc request = {} await client.update_workload(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_workload(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2181,22 +2185,23 @@ async def test_restrict_allowed_resources_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.restrict_allowed_resources - ] = mock_object + ] = mock_rpc request = {} await client.restrict_allowed_resources(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.restrict_allowed_resources(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2462,22 +2467,23 @@ async def test_delete_workload_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_workload - ] = mock_object + ] = mock_rpc request = {} await client.delete_workload(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_workload(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2853,22 +2859,23 @@ async def test_get_workload_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_workload - ] = mock_object + ] = mock_rpc request = {} await client.get_workload(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_workload(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3246,22 +3253,23 @@ async def test_list_workloads_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_workloads - ] = mock_object + ] = mock_rpc request = {} await client.list_workloads(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_workloads(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3808,22 +3816,23 @@ async def test_list_violations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_violations - ] = mock_object + ] = mock_rpc request = {} await client.list_violations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_violations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4326,22 +4335,23 @@ async def test_get_violation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_violation - ] = mock_object + ] = mock_rpc request = {} await client.get_violation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_violation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4657,22 +4667,23 @@ async def test_acknowledge_violation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.acknowledge_violation - ] = mock_object + ] = mock_rpc request = {} await client.acknowledge_violation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.acknowledge_violation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-assured-workloads/tests/unit/gapic/assuredworkloads_v1beta1/test_assured_workloads_service.py b/packages/google-cloud-assured-workloads/tests/unit/gapic/assuredworkloads_v1beta1/test_assured_workloads_service.py index 97b74c010d95..c15aa8bf4dbd 100644 --- a/packages/google-cloud-assured-workloads/tests/unit/gapic/assuredworkloads_v1beta1/test_assured_workloads_service.py +++ b/packages/google-cloud-assured-workloads/tests/unit/gapic/assuredworkloads_v1beta1/test_assured_workloads_service.py @@ -1312,8 +1312,9 @@ def test_create_workload_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_workload(request) @@ -1367,26 +1368,28 @@ async def test_create_workload_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_workload - ] = mock_object + ] = mock_rpc request = {} await client.create_workload(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_workload(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1771,22 +1774,23 @@ async def test_update_workload_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_workload - ] = mock_object + ] = mock_rpc request = {} await client.update_workload(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_workload(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2115,22 +2119,23 @@ async def test_restrict_allowed_resources_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.restrict_allowed_resources - ] = mock_object + ] = mock_rpc request = {} await client.restrict_allowed_resources(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.restrict_allowed_resources(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2396,22 +2401,23 @@ async def test_delete_workload_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_workload - ] = mock_object + ] = mock_rpc request = {} await client.delete_workload(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_workload(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2784,22 +2790,23 @@ async def test_get_workload_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_workload - ] = mock_object + ] = mock_rpc request = {} await client.get_workload(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_workload(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3127,22 +3134,23 @@ async def test_analyze_workload_move_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.analyze_workload_move - ] = mock_object + ] = mock_rpc request = {} await client.analyze_workload_move(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.analyze_workload_move(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3447,22 +3455,23 @@ async def test_list_workloads_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_workloads - ] = mock_object + ] = mock_rpc request = {} await client.list_workloads(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_workloads(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-automl/google/cloud/automl/gapic_version.py b/packages/google-cloud-automl/google/cloud/automl/gapic_version.py index cf63574ee44f..558c8aab67c5 100644 --- a/packages/google-cloud-automl/google/cloud/automl/gapic_version.py +++ b/packages/google-cloud-automl/google/cloud/automl/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.13.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-automl/google/cloud/automl_v1/gapic_version.py b/packages/google-cloud-automl/google/cloud/automl_v1/gapic_version.py index cf63574ee44f..558c8aab67c5 100644 --- a/packages/google-cloud-automl/google/cloud/automl_v1/gapic_version.py +++ b/packages/google-cloud-automl/google/cloud/automl_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.13.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-automl/google/cloud/automl_v1/services/auto_ml/async_client.py b/packages/google-cloud-automl/google/cloud/automl_v1/services/auto_ml/async_client.py index 598855f0fc1d..84852b458c9f 100644 --- a/packages/google-cloud-automl/google/cloud/automl_v1/services/auto_ml/async_client.py +++ b/packages/google-cloud-automl/google/cloud/automl_v1/services/auto_ml/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -218,9 +217,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AutoMlClient).get_transport_class, type(AutoMlClient) - ) + get_transport_class = AutoMlClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-automl/google/cloud/automl_v1/services/auto_ml/client.py b/packages/google-cloud-automl/google/cloud/automl_v1/services/auto_ml/client.py index ccd37ea0d8fb..f75547d5642f 100644 --- a/packages/google-cloud-automl/google/cloud/automl_v1/services/auto_ml/client.py +++ b/packages/google-cloud-automl/google/cloud/automl_v1/services/auto_ml/client.py @@ -760,7 +760,7 @@ def __init__( transport_init: Union[ Type[AutoMlTransport], Callable[..., AutoMlTransport] ] = ( - type(self).get_transport_class(transport) + AutoMlClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AutoMlTransport], transport) ) diff --git a/packages/google-cloud-automl/google/cloud/automl_v1/services/prediction_service/async_client.py b/packages/google-cloud-automl/google/cloud/automl_v1/services/prediction_service/async_client.py index a73394386c14..d82bdf58cd78 100644 --- a/packages/google-cloud-automl/google/cloud/automl_v1/services/prediction_service/async_client.py +++ b/packages/google-cloud-automl/google/cloud/automl_v1/services/prediction_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -199,9 +198,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(PredictionServiceClient).get_transport_class, type(PredictionServiceClient) - ) + get_transport_class = PredictionServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-automl/google/cloud/automl_v1/services/prediction_service/client.py b/packages/google-cloud-automl/google/cloud/automl_v1/services/prediction_service/client.py index deaab40e9db2..b8d837686197 100644 --- a/packages/google-cloud-automl/google/cloud/automl_v1/services/prediction_service/client.py +++ b/packages/google-cloud-automl/google/cloud/automl_v1/services/prediction_service/client.py @@ -677,7 +677,7 @@ def __init__( Type[PredictionServiceTransport], Callable[..., PredictionServiceTransport], ] = ( - type(self).get_transport_class(transport) + PredictionServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., PredictionServiceTransport], transport) ) diff --git a/packages/google-cloud-automl/google/cloud/automl_v1beta1/gapic_version.py b/packages/google-cloud-automl/google/cloud/automl_v1beta1/gapic_version.py index cf63574ee44f..558c8aab67c5 100644 --- a/packages/google-cloud-automl/google/cloud/automl_v1beta1/gapic_version.py +++ b/packages/google-cloud-automl/google/cloud/automl_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.13.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/auto_ml/async_client.py b/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/auto_ml/async_client.py index b59caf5e044b..665e597dd641 100644 --- a/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/auto_ml/async_client.py +++ b/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/auto_ml/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -231,9 +230,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AutoMlClient).get_transport_class, type(AutoMlClient) - ) + get_transport_class = AutoMlClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/auto_ml/client.py b/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/auto_ml/client.py index acec65183c21..cb77a70222f4 100644 --- a/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/auto_ml/client.py +++ b/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/auto_ml/client.py @@ -819,7 +819,7 @@ def __init__( transport_init: Union[ Type[AutoMlTransport], Callable[..., AutoMlTransport] ] = ( - type(self).get_transport_class(transport) + AutoMlClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AutoMlTransport], transport) ) diff --git a/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/prediction_service/async_client.py b/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/prediction_service/async_client.py index e2ca9c63a3e9..b8ad20c31516 100644 --- a/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/prediction_service/async_client.py +++ b/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/prediction_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -199,9 +198,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(PredictionServiceClient).get_transport_class, type(PredictionServiceClient) - ) + get_transport_class = PredictionServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/prediction_service/client.py b/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/prediction_service/client.py index 6b93a2ce64fb..b52f0c1c9c30 100644 --- a/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/prediction_service/client.py +++ b/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/prediction_service/client.py @@ -677,7 +677,7 @@ def __init__( Type[PredictionServiceTransport], Callable[..., PredictionServiceTransport], ] = ( - type(self).get_transport_class(transport) + PredictionServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., PredictionServiceTransport], transport) ) diff --git a/packages/google-cloud-automl/samples/generated_samples/snippet_metadata_google.cloud.automl.v1.json b/packages/google-cloud-automl/samples/generated_samples/snippet_metadata_google.cloud.automl.v1.json index 7c633120b887..df593972f2f6 100644 --- a/packages/google-cloud-automl/samples/generated_samples/snippet_metadata_google.cloud.automl.v1.json +++ b/packages/google-cloud-automl/samples/generated_samples/snippet_metadata_google.cloud.automl.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-automl", - "version": "2.13.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-automl/samples/generated_samples/snippet_metadata_google.cloud.automl.v1beta1.json b/packages/google-cloud-automl/samples/generated_samples/snippet_metadata_google.cloud.automl.v1beta1.json index 3a9be5be7283..70c0487411a8 100644 --- a/packages/google-cloud-automl/samples/generated_samples/snippet_metadata_google.cloud.automl.v1beta1.json +++ b/packages/google-cloud-automl/samples/generated_samples/snippet_metadata_google.cloud.automl.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-automl", - "version": "2.13.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-automl/tests/unit/gapic/automl_v1/test_auto_ml.py b/packages/google-cloud-automl/tests/unit/gapic/automl_v1/test_auto_ml.py index b3cdc046cea3..34708e16bcaf 100644 --- a/packages/google-cloud-automl/tests/unit/gapic/automl_v1/test_auto_ml.py +++ b/packages/google-cloud-automl/tests/unit/gapic/automl_v1/test_auto_ml.py @@ -1184,8 +1184,9 @@ def test_create_dataset_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_dataset(request) @@ -1239,26 +1240,28 @@ async def test_create_dataset_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_dataset - ] = mock_object + ] = mock_rpc request = {} await client.create_dataset(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_dataset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1650,22 +1653,23 @@ async def test_get_dataset_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_dataset - ] = mock_object + ] = mock_rpc request = {} await client.get_dataset(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_dataset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2022,22 +2026,23 @@ async def test_list_datasets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_datasets - ] = mock_object + ] = mock_rpc request = {} await client.list_datasets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_datasets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2588,22 +2593,23 @@ async def test_update_dataset_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_dataset - ] = mock_object + ] = mock_rpc request = {} await client.update_dataset(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_dataset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2934,8 +2940,9 @@ def test_delete_dataset_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_dataset(request) @@ -2989,26 +2996,28 @@ async def test_delete_dataset_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_dataset - ] = mock_object + ] = mock_rpc request = {} await client.delete_dataset(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_dataset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3298,8 +3307,9 @@ def test_import_data_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.import_data(request) @@ -3353,26 +3363,28 @@ async def test_import_data_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_data - ] = mock_object + ] = mock_rpc request = {} await client.import_data(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.import_data(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3684,8 +3696,9 @@ def test_export_data_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.export_data(request) @@ -3739,26 +3752,28 @@ async def test_export_data_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.export_data - ] = mock_object + ] = mock_rpc request = {} await client.export_data(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.export_data(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4156,22 +4171,23 @@ async def test_get_annotation_spec_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_annotation_spec - ] = mock_object + ] = mock_rpc request = {} await client.get_annotation_spec(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_annotation_spec(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4478,8 +4494,9 @@ def test_create_model_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_model(request) @@ -4533,26 +4550,28 @@ async def test_create_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_model - ] = mock_object + ] = mock_rpc request = {} await client.create_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4942,22 +4961,23 @@ async def test_get_model_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_model - ] = mock_object + ] = mock_rpc request = {} await client.get_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5314,22 +5334,23 @@ async def test_list_models_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_models - ] = mock_object + ] = mock_rpc request = {} await client.list_models(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_models(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5816,8 +5837,9 @@ def test_delete_model_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_model(request) @@ -5871,26 +5893,28 @@ async def test_delete_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_model - ] = mock_object + ] = mock_rpc request = {} await client.delete_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6244,22 +6268,23 @@ async def test_update_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_model - ] = mock_object + ] = mock_rpc request = {} await client.update_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6590,8 +6615,9 @@ def test_deploy_model_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.deploy_model(request) @@ -6645,26 +6671,28 @@ async def test_deploy_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.deploy_model - ] = mock_object + ] = mock_rpc request = {} await client.deploy_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.deploy_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6954,8 +6982,9 @@ def test_undeploy_model_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.undeploy_model(request) @@ -7009,26 +7038,28 @@ async def test_undeploy_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.undeploy_model - ] = mock_object + ] = mock_rpc request = {} await client.undeploy_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.undeploy_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7318,8 +7349,9 @@ def test_export_model_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.export_model(request) @@ -7373,26 +7405,28 @@ async def test_export_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.export_model - ] = mock_object + ] = mock_rpc request = {} await client.export_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.export_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7793,22 +7827,23 @@ async def test_get_model_evaluation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_model_evaluation - ] = mock_object + ] = mock_rpc request = {} await client.get_model_evaluation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_model_evaluation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8190,22 +8225,23 @@ async def test_list_model_evaluations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_model_evaluations - ] = mock_object + ] = mock_rpc request = {} await client.list_model_evaluations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_model_evaluations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-automl/tests/unit/gapic/automl_v1/test_prediction_service.py b/packages/google-cloud-automl/tests/unit/gapic/automl_v1/test_prediction_service.py index f21e284bae99..7f6685ecdec3 100644 --- a/packages/google-cloud-automl/tests/unit/gapic/automl_v1/test_prediction_service.py +++ b/packages/google-cloud-automl/tests/unit/gapic/automl_v1/test_prediction_service.py @@ -1329,22 +1329,23 @@ async def test_predict_async_use_cached_wrapped_rpc(transport: str = "grpc_async ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.predict - ] = mock_object + ] = mock_rpc request = {} await client.predict(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.predict(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1666,8 +1667,9 @@ def test_batch_predict_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.batch_predict(request) @@ -1721,26 +1723,28 @@ async def test_batch_predict_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_predict - ] = mock_object + ] = mock_rpc request = {} await client.batch_predict(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.batch_predict(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-automl/tests/unit/gapic/automl_v1beta1/test_auto_ml.py b/packages/google-cloud-automl/tests/unit/gapic/automl_v1beta1/test_auto_ml.py index af1f078f1b51..d79c33b503ca 100644 --- a/packages/google-cloud-automl/tests/unit/gapic/automl_v1beta1/test_auto_ml.py +++ b/packages/google-cloud-automl/tests/unit/gapic/automl_v1beta1/test_auto_ml.py @@ -1262,22 +1262,23 @@ async def test_create_dataset_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_dataset - ] = mock_object + ] = mock_rpc request = {} await client.create_dataset(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_dataset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1676,22 +1677,23 @@ async def test_get_dataset_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_dataset - ] = mock_object + ] = mock_rpc request = {} await client.get_dataset(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_dataset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2048,22 +2050,23 @@ async def test_list_datasets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_datasets - ] = mock_object + ] = mock_rpc request = {} await client.list_datasets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_datasets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2614,22 +2617,23 @@ async def test_update_dataset_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_dataset - ] = mock_object + ] = mock_rpc request = {} await client.update_dataset(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_dataset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2950,8 +2954,9 @@ def test_delete_dataset_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_dataset(request) @@ -3005,26 +3010,28 @@ async def test_delete_dataset_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_dataset - ] = mock_object + ] = mock_rpc request = {} await client.delete_dataset(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_dataset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3314,8 +3321,9 @@ def test_import_data_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.import_data(request) @@ -3369,26 +3377,28 @@ async def test_import_data_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_data - ] = mock_object + ] = mock_rpc request = {} await client.import_data(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.import_data(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3700,8 +3710,9 @@ def test_export_data_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.export_data(request) @@ -3755,26 +3766,28 @@ async def test_export_data_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.export_data - ] = mock_object + ] = mock_rpc request = {} await client.export_data(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.export_data(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4172,22 +4185,23 @@ async def test_get_annotation_spec_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_annotation_spec - ] = mock_object + ] = mock_rpc request = {} await client.get_annotation_spec(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_annotation_spec(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4565,22 +4579,23 @@ async def test_get_table_spec_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_table_spec - ] = mock_object + ] = mock_rpc request = {} await client.get_table_spec(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_table_spec(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4945,22 +4960,23 @@ async def test_list_table_specs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_table_specs - ] = mock_object + ] = mock_rpc request = {} await client.list_table_specs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_table_specs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5524,22 +5540,23 @@ async def test_update_table_spec_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_table_spec - ] = mock_object + ] = mock_rpc request = {} await client.update_table_spec(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_table_spec(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5914,22 +5931,23 @@ async def test_get_column_spec_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_column_spec - ] = mock_object + ] = mock_rpc request = {} await client.get_column_spec(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_column_spec(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6296,22 +6314,23 @@ async def test_list_column_specs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_column_specs - ] = mock_object + ] = mock_rpc request = {} await client.list_column_specs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_column_specs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6886,22 +6905,23 @@ async def test_update_column_spec_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_column_spec - ] = mock_object + ] = mock_rpc request = {} await client.update_column_spec(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_column_spec(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7208,8 +7228,9 @@ def test_create_model_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_model(request) @@ -7263,26 +7284,28 @@ async def test_create_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_model - ] = mock_object + ] = mock_rpc request = {} await client.create_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7669,22 +7692,23 @@ async def test_get_model_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_model - ] = mock_object + ] = mock_rpc request = {} await client.get_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8039,22 +8063,23 @@ async def test_list_models_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_models - ] = mock_object + ] = mock_rpc request = {} await client.list_models(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_models(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8541,8 +8566,9 @@ def test_delete_model_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_model(request) @@ -8596,26 +8622,28 @@ async def test_delete_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_model - ] = mock_object + ] = mock_rpc request = {} await client.delete_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8905,8 +8933,9 @@ def test_deploy_model_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.deploy_model(request) @@ -8960,26 +8989,28 @@ async def test_deploy_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.deploy_model - ] = mock_object + ] = mock_rpc request = {} await client.deploy_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.deploy_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9269,8 +9300,9 @@ def test_undeploy_model_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.undeploy_model(request) @@ -9324,26 +9356,28 @@ async def test_undeploy_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.undeploy_model - ] = mock_object + ] = mock_rpc request = {} await client.undeploy_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.undeploy_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9633,8 +9667,9 @@ def test_export_model_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.export_model(request) @@ -9688,26 +9723,28 @@ async def test_export_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.export_model - ] = mock_object + ] = mock_rpc request = {} await client.export_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.export_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10042,8 +10079,9 @@ def test_export_evaluated_examples_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.export_evaluated_examples(request) @@ -10099,26 +10137,28 @@ async def test_export_evaluated_examples_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.export_evaluated_examples - ] = mock_object + ] = mock_rpc request = {} await client.export_evaluated_examples(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.export_evaluated_examples(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10525,22 +10565,23 @@ async def test_get_model_evaluation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_model_evaluation - ] = mock_object + ] = mock_rpc request = {} await client.get_model_evaluation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_model_evaluation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10922,22 +10963,23 @@ async def test_list_model_evaluations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_model_evaluations - ] = mock_object + ] = mock_rpc request = {} await client.list_model_evaluations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_model_evaluations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-automl/tests/unit/gapic/automl_v1beta1/test_prediction_service.py b/packages/google-cloud-automl/tests/unit/gapic/automl_v1beta1/test_prediction_service.py index 0e2018e1f32a..3d674900441e 100644 --- a/packages/google-cloud-automl/tests/unit/gapic/automl_v1beta1/test_prediction_service.py +++ b/packages/google-cloud-automl/tests/unit/gapic/automl_v1beta1/test_prediction_service.py @@ -1330,22 +1330,23 @@ async def test_predict_async_use_cached_wrapped_rpc(transport: str = "grpc_async ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.predict - ] = mock_object + ] = mock_rpc request = {} await client.predict(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.predict(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1667,8 +1668,9 @@ def test_batch_predict_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.batch_predict(request) @@ -1722,26 +1724,28 @@ async def test_batch_predict_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_predict - ] = mock_object + ] = mock_rpc request = {} await client.batch_predict(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.batch_predict(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr/gapic_version.py b/packages/google-cloud-backupdr/google/cloud/backupdr/gapic_version.py index 114e40645800..558c8aab67c5 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr/gapic_version.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.3" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/gapic_version.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/gapic_version.py index 114e40645800..558c8aab67c5 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/gapic_version.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.3" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/async_client.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/async_client.py index 0151899eaf62..33ea84a57365 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/async_client.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -191,9 +190,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(BackupDRClient).get_transport_class, type(BackupDRClient) - ) + get_transport_class = BackupDRClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/client.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/client.py index ea052aabef9a..a853cfead99d 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/client.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/client.py @@ -665,7 +665,7 @@ def __init__( transport_init: Union[ Type[BackupDRTransport], Callable[..., BackupDRTransport] ] = ( - type(self).get_transport_class(transport) + BackupDRClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., BackupDRTransport], transport) ) diff --git a/packages/google-cloud-backupdr/samples/generated_samples/snippet_metadata_google.cloud.backupdr.v1.json b/packages/google-cloud-backupdr/samples/generated_samples/snippet_metadata_google.cloud.backupdr.v1.json index 2671bc2d913d..ff879435143f 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/snippet_metadata_google.cloud.backupdr.v1.json +++ b/packages/google-cloud-backupdr/samples/generated_samples/snippet_metadata_google.cloud.backupdr.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-backupdr", - "version": "0.1.3" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-backupdr/tests/unit/gapic/backupdr_v1/test_backup_dr.py b/packages/google-cloud-backupdr/tests/unit/gapic/backupdr_v1/test_backup_dr.py index f7e48fb284b5..1b1c7ad74e91 100644 --- a/packages/google-cloud-backupdr/tests/unit/gapic/backupdr_v1/test_backup_dr.py +++ b/packages/google-cloud-backupdr/tests/unit/gapic/backupdr_v1/test_backup_dr.py @@ -1264,22 +1264,23 @@ async def test_list_management_servers_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_management_servers - ] = mock_object + ] = mock_rpc request = {} await client.list_management_servers(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_management_servers(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1876,22 +1877,23 @@ async def test_get_management_server_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_management_server - ] = mock_object + ] = mock_rpc request = {} await client.get_management_server(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_management_server(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2223,8 +2225,9 @@ def test_create_management_server_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_management_server(request) @@ -2280,26 +2283,28 @@ async def test_create_management_server_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_management_server - ] = mock_object + ] = mock_rpc request = {} await client.create_management_server(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_management_server(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2632,8 +2637,9 @@ def test_delete_management_server_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_management_server(request) @@ -2689,26 +2695,28 @@ async def test_delete_management_server_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_management_server - ] = mock_object + ] = mock_rpc request = {} await client.delete_management_server(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_management_server(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution/gapic_version.py b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution/gapic_version.py index e114cc3e196a..558c8aab67c5 100644 --- a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution/gapic_version.py +++ b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.7.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/gapic_version.py b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/gapic_version.py index e114cc3e196a..558c8aab67c5 100644 --- a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/gapic_version.py +++ b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.7.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/async_client.py b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/async_client.py index afb6dc1c0388..f05bf87b0d43 100644 --- a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/async_client.py +++ b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -278,9 +277,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(BareMetalSolutionClient).get_transport_class, type(BareMetalSolutionClient) - ) + get_transport_class = BareMetalSolutionClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/client.py b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/client.py index 8383e474221e..e25097f02f90 100644 --- a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/client.py +++ b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/client.py @@ -1034,7 +1034,7 @@ def __init__( Type[BareMetalSolutionTransport], Callable[..., BareMetalSolutionTransport], ] = ( - type(self).get_transport_class(transport) + BareMetalSolutionClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., BareMetalSolutionTransport], transport) ) diff --git a/packages/google-cloud-bare-metal-solution/samples/generated_samples/snippet_metadata_google.cloud.baremetalsolution.v2.json b/packages/google-cloud-bare-metal-solution/samples/generated_samples/snippet_metadata_google.cloud.baremetalsolution.v2.json index ddbf76e56a0e..b8dd39f4b60c 100644 --- a/packages/google-cloud-bare-metal-solution/samples/generated_samples/snippet_metadata_google.cloud.baremetalsolution.v2.json +++ b/packages/google-cloud-bare-metal-solution/samples/generated_samples/snippet_metadata_google.cloud.baremetalsolution.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bare-metal-solution", - "version": "1.7.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-bare-metal-solution/tests/unit/gapic/bare_metal_solution_v2/test_bare_metal_solution.py b/packages/google-cloud-bare-metal-solution/tests/unit/gapic/bare_metal_solution_v2/test_bare_metal_solution.py index 8d42ff9372d4..0b5a2762a3e1 100644 --- a/packages/google-cloud-bare-metal-solution/tests/unit/gapic/bare_metal_solution_v2/test_bare_metal_solution.py +++ b/packages/google-cloud-bare-metal-solution/tests/unit/gapic/bare_metal_solution_v2/test_bare_metal_solution.py @@ -1359,22 +1359,23 @@ async def test_list_instances_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_instances - ] = mock_object + ] = mock_rpc request = {} await client.list_instances(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_instances(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1952,22 +1953,23 @@ async def test_get_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_instance - ] = mock_object + ] = mock_rpc request = {} await client.get_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2274,8 +2276,9 @@ def test_update_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_instance(request) @@ -2329,26 +2332,28 @@ async def test_update_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_instance - ] = mock_object + ] = mock_rpc request = {} await client.update_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2739,22 +2744,23 @@ async def test_rename_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.rename_instance - ] = mock_object + ] = mock_rpc request = {} await client.rename_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.rename_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3075,8 +3081,9 @@ def test_reset_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.reset_instance(request) @@ -3130,26 +3137,28 @@ async def test_reset_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.reset_instance - ] = mock_object + ] = mock_rpc request = {} await client.reset_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.reset_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3439,8 +3448,9 @@ def test_start_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.start_instance(request) @@ -3494,26 +3504,28 @@ async def test_start_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.start_instance - ] = mock_object + ] = mock_rpc request = {} await client.start_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.start_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3803,8 +3815,9 @@ def test_stop_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.stop_instance(request) @@ -3858,26 +3871,28 @@ async def test_stop_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.stop_instance - ] = mock_object + ] = mock_rpc request = {} await client.stop_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.stop_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4178,8 +4193,9 @@ def test_enable_interactive_serial_console_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.enable_interactive_serial_console(request) @@ -4235,26 +4251,28 @@ async def test_enable_interactive_serial_console_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.enable_interactive_serial_console - ] = mock_object + ] = mock_rpc request = {} await client.enable_interactive_serial_console(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.enable_interactive_serial_console(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4566,8 +4584,9 @@ def test_disable_interactive_serial_console_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.disable_interactive_serial_console(request) @@ -4623,26 +4642,28 @@ async def test_disable_interactive_serial_console_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.disable_interactive_serial_console - ] = mock_object + ] = mock_rpc request = {} await client.disable_interactive_serial_console(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.disable_interactive_serial_console(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4945,8 +4966,9 @@ def test_detach_lun_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.detach_lun(request) @@ -4998,26 +5020,28 @@ async def test_detach_lun_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.detach_lun - ] = mock_object + ] = mock_rpc request = {} await client.detach_lun(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.detach_lun(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5375,22 +5399,23 @@ async def test_list_ssh_keys_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_ssh_keys - ] = mock_object + ] = mock_rpc request = {} await client.list_ssh_keys(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_ssh_keys(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5938,22 +5963,23 @@ async def test_create_ssh_key_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_ssh_key - ] = mock_object + ] = mock_rpc request = {} await client.create_ssh_key(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_ssh_key(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6313,22 +6339,23 @@ async def test_delete_ssh_key_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_ssh_key - ] = mock_object + ] = mock_rpc request = {} await client.delete_ssh_key(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_ssh_key(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6675,22 +6702,23 @@ async def test_list_volumes_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_volumes - ] = mock_object + ] = mock_rpc request = {} await client.list_volumes(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_volumes(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7299,22 +7327,23 @@ async def test_get_volume_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_volume - ] = mock_object + ] = mock_rpc request = {} await client.get_volume(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_volume(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7645,8 +7674,9 @@ def test_update_volume_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_volume(request) @@ -7700,26 +7730,28 @@ async def test_update_volume_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_volume - ] = mock_object + ] = mock_rpc request = {} await client.update_volume(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_volume(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8143,22 +8175,23 @@ async def test_rename_volume_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.rename_volume - ] = mock_object + ] = mock_rpc request = {} await client.rename_volume(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.rename_volume(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8503,8 +8536,9 @@ def test_evict_volume_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.evict_volume(request) @@ -8558,26 +8592,28 @@ async def test_evict_volume_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.evict_volume - ] = mock_object + ] = mock_rpc request = {} await client.evict_volume(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.evict_volume(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8867,8 +8903,9 @@ def test_resize_volume_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.resize_volume(request) @@ -8922,26 +8959,28 @@ async def test_resize_volume_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.resize_volume - ] = mock_object + ] = mock_rpc request = {} await client.resize_volume(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.resize_volume(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9304,22 +9343,23 @@ async def test_list_networks_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_networks - ] = mock_object + ] = mock_rpc request = {} await client.list_networks(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_networks(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9871,22 +9911,23 @@ async def test_list_network_usage_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_network_usage - ] = mock_object + ] = mock_rpc request = {} await client.list_network_usage(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_network_usage(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10275,22 +10316,23 @@ async def test_get_network_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_network - ] = mock_object + ] = mock_rpc request = {} await client.get_network(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_network(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10597,8 +10639,9 @@ def test_update_network_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_network(request) @@ -10652,26 +10695,28 @@ async def test_update_network_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_network - ] = mock_object + ] = mock_rpc request = {} await client.update_network(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_network(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11052,22 +11097,23 @@ async def test_create_volume_snapshot_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_volume_snapshot - ] = mock_object + ] = mock_rpc request = {} await client.create_volume_snapshot(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_volume_snapshot(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11400,8 +11446,9 @@ def test_restore_volume_snapshot_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.restore_volume_snapshot(request) @@ -11457,26 +11504,28 @@ async def test_restore_volume_snapshot_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.restore_volume_snapshot - ] = mock_object + ] = mock_rpc request = {} await client.restore_volume_snapshot(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.restore_volume_snapshot(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11839,22 +11888,23 @@ async def test_delete_volume_snapshot_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_volume_snapshot - ] = mock_object + ] = mock_rpc request = {} await client.delete_volume_snapshot(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_volume_snapshot(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12229,22 +12279,23 @@ async def test_get_volume_snapshot_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_volume_snapshot - ] = mock_object + ] = mock_rpc request = {} await client.get_volume_snapshot(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_volume_snapshot(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12630,22 +12681,23 @@ async def test_list_volume_snapshots_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_volume_snapshots - ] = mock_object + ] = mock_rpc request = {} await client.list_volume_snapshots(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_volume_snapshots(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13237,22 +13289,23 @@ async def test_get_lun_async_use_cached_wrapped_rpc(transport: str = "grpc_async ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_lun - ] = mock_object + ] = mock_rpc request = {} await client.get_lun(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_lun(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13620,22 +13673,23 @@ async def test_list_luns_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_luns - ] = mock_object + ] = mock_rpc request = {} await client.list_luns(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_luns(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14124,8 +14178,9 @@ def test_evict_lun_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.evict_lun(request) @@ -14177,26 +14232,28 @@ async def test_evict_lun_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.evict_lun - ] = mock_object + ] = mock_rpc request = {} await client.evict_lun(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.evict_lun(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14560,22 +14617,23 @@ async def test_get_nfs_share_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_nfs_share - ] = mock_object + ] = mock_rpc request = {} await client.get_nfs_share(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_nfs_share(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14939,22 +14997,23 @@ async def test_list_nfs_shares_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_nfs_shares - ] = mock_object + ] = mock_rpc request = {} await client.list_nfs_shares(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_nfs_shares(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15441,8 +15500,9 @@ def test_update_nfs_share_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_nfs_share(request) @@ -15496,26 +15556,28 @@ async def test_update_nfs_share_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_nfs_share - ] = mock_object + ] = mock_rpc request = {} await client.update_nfs_share(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_nfs_share(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15817,8 +15879,9 @@ def test_create_nfs_share_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_nfs_share(request) @@ -15872,26 +15935,28 @@ async def test_create_nfs_share_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_nfs_share - ] = mock_object + ] = mock_rpc request = {} await client.create_nfs_share(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_nfs_share(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -16269,22 +16334,23 @@ async def test_rename_nfs_share_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.rename_nfs_share - ] = mock_object + ] = mock_rpc request = {} await client.rename_nfs_share(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.rename_nfs_share(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -16597,8 +16663,9 @@ def test_delete_nfs_share_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_nfs_share(request) @@ -16652,26 +16719,28 @@ async def test_delete_nfs_share_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_nfs_share - ] = mock_object + ] = mock_rpc request = {} await client.delete_nfs_share(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_nfs_share(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -17032,22 +17101,23 @@ async def test_list_provisioning_quotas_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_provisioning_quotas - ] = mock_object + ] = mock_rpc request = {} await client.list_provisioning_quotas(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_provisioning_quotas(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -17621,22 +17691,23 @@ async def test_submit_provisioning_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.submit_provisioning_config - ] = mock_object + ] = mock_rpc request = {} await client.submit_provisioning_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.submit_provisioning_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -18043,22 +18114,23 @@ async def test_get_provisioning_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_provisioning_config - ] = mock_object + ] = mock_rpc request = {} await client.get_provisioning_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_provisioning_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -18478,22 +18550,23 @@ async def test_create_provisioning_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_provisioning_config - ] = mock_object + ] = mock_rpc request = {} await client.create_provisioning_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_provisioning_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -18921,22 +18994,23 @@ async def test_update_provisioning_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_provisioning_config - ] = mock_object + ] = mock_rpc request = {} await client.update_provisioning_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_provisioning_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -19359,22 +19433,23 @@ async def test_rename_network_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.rename_network - ] = mock_object + ] = mock_rpc request = {} await client.rename_network(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.rename_network(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -19753,22 +19828,23 @@ async def test_list_os_images_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_os_images - ] = mock_object + ] = mock_rpc request = {} await client.list_os_images(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_os_images(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections/gapic_version.py b/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections/gapic_version.py index 5feceb32bedf..558c8aab67c5 100644 --- a/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections/gapic_version.py +++ b/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections_v1/gapic_version.py b/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections_v1/gapic_version.py index 5feceb32bedf..558c8aab67c5 100644 --- a/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections_v1/gapic_version.py +++ b/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections_v1/services/app_connections_service/async_client.py b/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections_v1/services/app_connections_service/async_client.py index 10c32368d63b..962dd370306f 100644 --- a/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections_v1/services/app_connections_service/async_client.py +++ b/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections_v1/services/app_connections_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -225,10 +224,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AppConnectionsServiceClient).get_transport_class, - type(AppConnectionsServiceClient), - ) + get_transport_class = AppConnectionsServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections_v1/services/app_connections_service/client.py b/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections_v1/services/app_connections_service/client.py index 2036ca7cc4df..0e82da29ad9b 100644 --- a/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections_v1/services/app_connections_service/client.py +++ b/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections_v1/services/app_connections_service/client.py @@ -739,7 +739,7 @@ def __init__( Type[AppConnectionsServiceTransport], Callable[..., AppConnectionsServiceTransport], ] = ( - type(self).get_transport_class(transport) + AppConnectionsServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AppConnectionsServiceTransport], transport) ) diff --git a/packages/google-cloud-beyondcorp-appconnections/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.appconnections.v1.json b/packages/google-cloud-beyondcorp-appconnections/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.appconnections.v1.json index f0f179574810..a630b6644a7e 100644 --- a/packages/google-cloud-beyondcorp-appconnections/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.appconnections.v1.json +++ b/packages/google-cloud-beyondcorp-appconnections/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.appconnections.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-beyondcorp-appconnections", - "version": "0.4.11" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-beyondcorp-appconnections/tests/unit/gapic/beyondcorp_appconnections_v1/test_app_connections_service.py b/packages/google-cloud-beyondcorp-appconnections/tests/unit/gapic/beyondcorp_appconnections_v1/test_app_connections_service.py index 691dba428c52..e0d01098f488 100644 --- a/packages/google-cloud-beyondcorp-appconnections/tests/unit/gapic/beyondcorp_appconnections_v1/test_app_connections_service.py +++ b/packages/google-cloud-beyondcorp-appconnections/tests/unit/gapic/beyondcorp_appconnections_v1/test_app_connections_service.py @@ -1389,22 +1389,23 @@ async def test_list_app_connections_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_app_connections - ] = mock_object + ] = mock_rpc request = {} await client.list_app_connections(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_app_connections(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1999,22 +2000,23 @@ async def test_get_app_connection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_app_connection - ] = mock_object + ] = mock_rpc request = {} await client.get_app_connection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_app_connection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2343,8 +2345,9 @@ def test_create_app_connection_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_app_connection(request) @@ -2400,26 +2403,28 @@ async def test_create_app_connection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_app_connection - ] = mock_object + ] = mock_rpc request = {} await client.create_app_connection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_app_connection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2751,8 +2756,9 @@ def test_update_app_connection_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_app_connection(request) @@ -2808,26 +2814,28 @@ async def test_update_app_connection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_app_connection - ] = mock_object + ] = mock_rpc request = {} await client.update_app_connection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_app_connection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3151,8 +3159,9 @@ def test_delete_app_connection_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_app_connection(request) @@ -3208,26 +3217,28 @@ async def test_delete_app_connection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_app_connection - ] = mock_object + ] = mock_rpc request = {} await client.delete_app_connection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_app_connection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3604,22 +3615,23 @@ async def test_resolve_app_connections_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.resolve_app_connections - ] = mock_object + ] = mock_rpc request = {} await client.resolve_app_connections(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.resolve_app_connections(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors/gapic_version.py b/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors/gapic_version.py index 5feceb32bedf..558c8aab67c5 100644 --- a/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors/gapic_version.py +++ b/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors_v1/gapic_version.py b/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors_v1/gapic_version.py index 5feceb32bedf..558c8aab67c5 100644 --- a/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors_v1/gapic_version.py +++ b/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors_v1/services/app_connectors_service/async_client.py b/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors_v1/services/app_connectors_service/async_client.py index 926e618d6187..70a5047fb8ee 100644 --- a/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors_v1/services/app_connectors_service/async_client.py +++ b/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors_v1/services/app_connectors_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -219,10 +218,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AppConnectorsServiceClient).get_transport_class, - type(AppConnectorsServiceClient), - ) + get_transport_class = AppConnectorsServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors_v1/services/app_connectors_service/client.py b/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors_v1/services/app_connectors_service/client.py index 59edee9a9254..485df705d13c 100644 --- a/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors_v1/services/app_connectors_service/client.py +++ b/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors_v1/services/app_connectors_service/client.py @@ -695,7 +695,7 @@ def __init__( Type[AppConnectorsServiceTransport], Callable[..., AppConnectorsServiceTransport], ] = ( - type(self).get_transport_class(transport) + AppConnectorsServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AppConnectorsServiceTransport], transport) ) diff --git a/packages/google-cloud-beyondcorp-appconnectors/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.appconnectors.v1.json b/packages/google-cloud-beyondcorp-appconnectors/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.appconnectors.v1.json index 400b504d4ea6..b24a837b4434 100644 --- a/packages/google-cloud-beyondcorp-appconnectors/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.appconnectors.v1.json +++ b/packages/google-cloud-beyondcorp-appconnectors/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.appconnectors.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-beyondcorp-appconnectors", - "version": "0.4.11" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-beyondcorp-appconnectors/tests/unit/gapic/beyondcorp_appconnectors_v1/test_app_connectors_service.py b/packages/google-cloud-beyondcorp-appconnectors/tests/unit/gapic/beyondcorp_appconnectors_v1/test_app_connectors_service.py index db455cd7bb62..120223dd7e89 100644 --- a/packages/google-cloud-beyondcorp-appconnectors/tests/unit/gapic/beyondcorp_appconnectors_v1/test_app_connectors_service.py +++ b/packages/google-cloud-beyondcorp-appconnectors/tests/unit/gapic/beyondcorp_appconnectors_v1/test_app_connectors_service.py @@ -1392,22 +1392,23 @@ async def test_list_app_connectors_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_app_connectors - ] = mock_object + ] = mock_rpc request = {} await client.list_app_connectors(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_app_connectors(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1992,22 +1993,23 @@ async def test_get_app_connector_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_app_connector - ] = mock_object + ] = mock_rpc request = {} await client.get_app_connector(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_app_connector(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2331,8 +2333,9 @@ def test_create_app_connector_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_app_connector(request) @@ -2388,26 +2391,28 @@ async def test_create_app_connector_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_app_connector - ] = mock_object + ] = mock_rpc request = {} await client.create_app_connector(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_app_connector(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2738,8 +2743,9 @@ def test_update_app_connector_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_app_connector(request) @@ -2795,26 +2801,28 @@ async def test_update_app_connector_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_app_connector - ] = mock_object + ] = mock_rpc request = {} await client.update_app_connector(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_app_connector(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3137,8 +3145,9 @@ def test_delete_app_connector_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_app_connector(request) @@ -3194,26 +3203,28 @@ async def test_delete_app_connector_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_app_connector - ] = mock_object + ] = mock_rpc request = {} await client.delete_app_connector(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_app_connector(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3516,8 +3527,9 @@ def test_report_status_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.report_status(request) @@ -3571,26 +3583,28 @@ async def test_report_status_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.report_status - ] = mock_object + ] = mock_rpc request = {} await client.report_status(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.report_status(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways/gapic_version.py b/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways/gapic_version.py index 5feceb32bedf..558c8aab67c5 100644 --- a/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways/gapic_version.py +++ b/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways_v1/gapic_version.py b/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways_v1/gapic_version.py index 5feceb32bedf..558c8aab67c5 100644 --- a/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways_v1/gapic_version.py +++ b/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways_v1/services/app_gateways_service/async_client.py b/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways_v1/services/app_gateways_service/async_client.py index 71ed7b071c35..133141bf3bfc 100644 --- a/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways_v1/services/app_gateways_service/async_client.py +++ b/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways_v1/services/app_gateways_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -212,10 +211,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AppGatewaysServiceClient).get_transport_class, - type(AppGatewaysServiceClient), - ) + get_transport_class = AppGatewaysServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways_v1/services/app_gateways_service/client.py b/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways_v1/services/app_gateways_service/client.py index abc83d4d0df6..9bb6f081d233 100644 --- a/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways_v1/services/app_gateways_service/client.py +++ b/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways_v1/services/app_gateways_service/client.py @@ -690,7 +690,7 @@ def __init__( Type[AppGatewaysServiceTransport], Callable[..., AppGatewaysServiceTransport], ] = ( - type(self).get_transport_class(transport) + AppGatewaysServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AppGatewaysServiceTransport], transport) ) diff --git a/packages/google-cloud-beyondcorp-appgateways/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.appgateways.v1.json b/packages/google-cloud-beyondcorp-appgateways/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.appgateways.v1.json index 36bbe9dd1c18..4146eed832ac 100644 --- a/packages/google-cloud-beyondcorp-appgateways/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.appgateways.v1.json +++ b/packages/google-cloud-beyondcorp-appgateways/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.appgateways.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-beyondcorp-appgateways", - "version": "0.4.11" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-beyondcorp-appgateways/tests/unit/gapic/beyondcorp_appgateways_v1/test_app_gateways_service.py b/packages/google-cloud-beyondcorp-appgateways/tests/unit/gapic/beyondcorp_appgateways_v1/test_app_gateways_service.py index b884f194d421..f80e9716f499 100644 --- a/packages/google-cloud-beyondcorp-appgateways/tests/unit/gapic/beyondcorp_appgateways_v1/test_app_gateways_service.py +++ b/packages/google-cloud-beyondcorp-appgateways/tests/unit/gapic/beyondcorp_appgateways_v1/test_app_gateways_service.py @@ -1358,22 +1358,23 @@ async def test_list_app_gateways_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_app_gateways - ] = mock_object + ] = mock_rpc request = {} await client.list_app_gateways(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_app_gateways(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1957,22 +1958,23 @@ async def test_get_app_gateway_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_app_gateway - ] = mock_object + ] = mock_rpc request = {} await client.get_app_gateway(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_app_gateway(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2294,8 +2296,9 @@ def test_create_app_gateway_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_app_gateway(request) @@ -2351,26 +2354,28 @@ async def test_create_app_gateway_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_app_gateway - ] = mock_object + ] = mock_rpc request = {} await client.create_app_gateway(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_app_gateway(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2703,8 +2708,9 @@ def test_delete_app_gateway_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_app_gateway(request) @@ -2760,26 +2766,28 @@ async def test_delete_app_gateway_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_app_gateway - ] = mock_object + ] = mock_rpc request = {} await client.delete_app_gateway(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_app_gateway(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices/gapic_version.py b/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices/gapic_version.py index 5feceb32bedf..558c8aab67c5 100644 --- a/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices/gapic_version.py +++ b/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices_v1/gapic_version.py b/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices_v1/gapic_version.py index 5feceb32bedf..558c8aab67c5 100644 --- a/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices_v1/gapic_version.py +++ b/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices_v1/services/client_connector_services_service/async_client.py b/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices_v1/services/client_connector_services_service/async_client.py index e1445f5a2545..185fc139c1da 100644 --- a/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices_v1/services/client_connector_services_service/async_client.py +++ b/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices_v1/services/client_connector_services_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -229,10 +228,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ClientConnectorServicesServiceClient).get_transport_class, - type(ClientConnectorServicesServiceClient), - ) + get_transport_class = ClientConnectorServicesServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices_v1/services/client_connector_services_service/client.py b/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices_v1/services/client_connector_services_service/client.py index 5b0ad123fb11..550c2c7aa6f0 100644 --- a/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices_v1/services/client_connector_services_service/client.py +++ b/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices_v1/services/client_connector_services_service/client.py @@ -707,7 +707,7 @@ def __init__( Type[ClientConnectorServicesServiceTransport], Callable[..., ClientConnectorServicesServiceTransport], ] = ( - type(self).get_transport_class(transport) + ClientConnectorServicesServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast( Callable[..., ClientConnectorServicesServiceTransport], transport diff --git a/packages/google-cloud-beyondcorp-clientconnectorservices/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.clientconnectorservices.v1.json b/packages/google-cloud-beyondcorp-clientconnectorservices/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.clientconnectorservices.v1.json index 67eefa2752d3..9a230827e86d 100644 --- a/packages/google-cloud-beyondcorp-clientconnectorservices/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.clientconnectorservices.v1.json +++ b/packages/google-cloud-beyondcorp-clientconnectorservices/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.clientconnectorservices.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-beyondcorp-clientconnectorservices", - "version": "0.4.11" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-beyondcorp-clientconnectorservices/tests/unit/gapic/beyondcorp_clientconnectorservices_v1/test_client_connector_services_service.py b/packages/google-cloud-beyondcorp-clientconnectorservices/tests/unit/gapic/beyondcorp_clientconnectorservices_v1/test_client_connector_services_service.py index f867d5f86c02..982b7b9d9252 100644 --- a/packages/google-cloud-beyondcorp-clientconnectorservices/tests/unit/gapic/beyondcorp_clientconnectorservices_v1/test_client_connector_services_service.py +++ b/packages/google-cloud-beyondcorp-clientconnectorservices/tests/unit/gapic/beyondcorp_clientconnectorservices_v1/test_client_connector_services_service.py @@ -1429,22 +1429,23 @@ async def test_list_client_connector_services_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_client_connector_services - ] = mock_object + ] = mock_rpc request = {} await client.list_client_connector_services(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_client_connector_services(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2054,22 +2055,23 @@ async def test_get_client_connector_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_client_connector_service - ] = mock_object + ] = mock_rpc request = {} await client.get_client_connector_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_client_connector_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2404,8 +2406,9 @@ def test_create_client_connector_service_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_client_connector_service(request) @@ -2464,26 +2467,28 @@ async def test_create_client_connector_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_client_connector_service - ] = mock_object + ] = mock_rpc request = {} await client.create_client_connector_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_client_connector_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2836,8 +2841,9 @@ def test_update_client_connector_service_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_client_connector_service(request) @@ -2896,26 +2902,28 @@ async def test_update_client_connector_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_client_connector_service - ] = mock_object + ] = mock_rpc request = {} await client.update_client_connector_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_client_connector_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3260,8 +3268,9 @@ def test_delete_client_connector_service_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_client_connector_service(request) @@ -3320,26 +3329,28 @@ async def test_delete_client_connector_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_client_connector_service - ] = mock_object + ] = mock_rpc request = {} await client.delete_client_connector_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_client_connector_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways/gapic_version.py b/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways/gapic_version.py index 792f80c59ee5..558c8aab67c5 100644 --- a/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways/gapic_version.py +++ b/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.10" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways_v1/gapic_version.py b/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways_v1/gapic_version.py index 792f80c59ee5..558c8aab67c5 100644 --- a/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways_v1/gapic_version.py +++ b/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.10" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways_v1/services/client_gateways_service/async_client.py b/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways_v1/services/client_gateways_service/async_client.py index 6aced810bb66..d66c2154fbe9 100644 --- a/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways_v1/services/client_gateways_service/async_client.py +++ b/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways_v1/services/client_gateways_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -213,10 +212,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ClientGatewaysServiceClient).get_transport_class, - type(ClientGatewaysServiceClient), - ) + get_transport_class = ClientGatewaysServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways_v1/services/client_gateways_service/client.py b/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways_v1/services/client_gateways_service/client.py index 623bc52e144d..96a93ecd29e2 100644 --- a/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways_v1/services/client_gateways_service/client.py +++ b/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways_v1/services/client_gateways_service/client.py @@ -689,7 +689,7 @@ def __init__( Type[ClientGatewaysServiceTransport], Callable[..., ClientGatewaysServiceTransport], ] = ( - type(self).get_transport_class(transport) + ClientGatewaysServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ClientGatewaysServiceTransport], transport) ) diff --git a/packages/google-cloud-beyondcorp-clientgateways/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.clientgateways.v1.json b/packages/google-cloud-beyondcorp-clientgateways/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.clientgateways.v1.json index e6588b11df67..6d3ff814a340 100644 --- a/packages/google-cloud-beyondcorp-clientgateways/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.clientgateways.v1.json +++ b/packages/google-cloud-beyondcorp-clientgateways/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.clientgateways.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-beyondcorp-clientgateways", - "version": "0.4.10" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-beyondcorp-clientgateways/tests/unit/gapic/beyondcorp_clientgateways_v1/test_client_gateways_service.py b/packages/google-cloud-beyondcorp-clientgateways/tests/unit/gapic/beyondcorp_clientgateways_v1/test_client_gateways_service.py index 26f2b6ff7a5b..b2315222b3f8 100644 --- a/packages/google-cloud-beyondcorp-clientgateways/tests/unit/gapic/beyondcorp_clientgateways_v1/test_client_gateways_service.py +++ b/packages/google-cloud-beyondcorp-clientgateways/tests/unit/gapic/beyondcorp_clientgateways_v1/test_client_gateways_service.py @@ -1388,22 +1388,23 @@ async def test_list_client_gateways_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_client_gateways - ] = mock_object + ] = mock_rpc request = {} await client.list_client_gateways(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_client_gateways(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1992,22 +1993,23 @@ async def test_get_client_gateway_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_client_gateway - ] = mock_object + ] = mock_rpc request = {} await client.get_client_gateway(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_client_gateway(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2332,8 +2334,9 @@ def test_create_client_gateway_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_client_gateway(request) @@ -2389,26 +2392,28 @@ async def test_create_client_gateway_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_client_gateway - ] = mock_object + ] = mock_rpc request = {} await client.create_client_gateway(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_client_gateway(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2742,8 +2747,9 @@ def test_delete_client_gateway_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_client_gateway(request) @@ -2799,26 +2805,28 @@ async def test_delete_client_gateway_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_client_gateway - ] = mock_object + ] = mock_rpc request = {} await client.delete_client_gateway(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_client_gateway(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub/gapic_version.py b/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub/gapic_version.py index 5feceb32bedf..558c8aab67c5 100644 --- a/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub/gapic_version.py +++ b/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/gapic_version.py b/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/gapic_version.py index 5feceb32bedf..558c8aab67c5 100644 --- a/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/gapic_version.py +++ b/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/async_client.py b/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/async_client.py index 292deecf4f23..b75e593f1449 100644 --- a/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/async_client.py +++ b/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -215,10 +214,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AnalyticsHubServiceClient).get_transport_class, - type(AnalyticsHubServiceClient), - ) + get_transport_class = AnalyticsHubServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/client.py b/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/client.py index 36b8315ade23..c9dabbcb36ef 100644 --- a/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/client.py +++ b/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/client.py @@ -764,7 +764,7 @@ def __init__( Type[AnalyticsHubServiceTransport], Callable[..., AnalyticsHubServiceTransport], ] = ( - type(self).get_transport_class(transport) + AnalyticsHubServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AnalyticsHubServiceTransport], transport) ) diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/snippet_metadata_google.cloud.bigquery.analyticshub.v1.json b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/snippet_metadata_google.cloud.bigquery.analyticshub.v1.json index c9affa2ed5af..0e7f7893e9b6 100644 --- a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/snippet_metadata_google.cloud.bigquery.analyticshub.v1.json +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/snippet_metadata_google.cloud.bigquery.analyticshub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-analyticshub", - "version": "0.4.11" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-bigquery-analyticshub/tests/unit/gapic/bigquery_analyticshub_v1/test_analytics_hub_service.py b/packages/google-cloud-bigquery-analyticshub/tests/unit/gapic/bigquery_analyticshub_v1/test_analytics_hub_service.py index 5d773f946ca5..4dd8673a177b 100644 --- a/packages/google-cloud-bigquery-analyticshub/tests/unit/gapic/bigquery_analyticshub_v1/test_analytics_hub_service.py +++ b/packages/google-cloud-bigquery-analyticshub/tests/unit/gapic/bigquery_analyticshub_v1/test_analytics_hub_service.py @@ -1334,22 +1334,23 @@ async def test_list_data_exchanges_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_data_exchanges - ] = mock_object + ] = mock_rpc request = {} await client.list_data_exchanges(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_data_exchanges(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1925,22 +1926,23 @@ async def test_list_org_data_exchanges_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_org_data_exchanges - ] = mock_object + ] = mock_rpc request = {} await client.list_org_data_exchanges(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_org_data_exchanges(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2533,22 +2535,23 @@ async def test_get_data_exchange_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_data_exchange - ] = mock_object + ] = mock_rpc request = {} await client.get_data_exchange(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_data_exchange(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2956,22 +2959,23 @@ async def test_create_data_exchange_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_data_exchange - ] = mock_object + ] = mock_rpc request = {} await client.create_data_exchange(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_data_exchange(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3383,22 +3387,23 @@ async def test_update_data_exchange_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_data_exchange - ] = mock_object + ] = mock_rpc request = {} await client.update_data_exchange(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_data_exchange(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3786,22 +3791,23 @@ async def test_delete_data_exchange_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_data_exchange - ] = mock_object + ] = mock_rpc request = {} await client.delete_data_exchange(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_data_exchange(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4153,22 +4159,23 @@ async def test_list_listings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_listings - ] = mock_object + ] = mock_rpc request = {} await client.list_listings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_listings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4738,22 +4745,23 @@ async def test_get_listing_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_listing - ] = mock_object + ] = mock_rpc request = {} await client.get_listing(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_listing(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5149,22 +5157,23 @@ async def test_create_listing_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_listing - ] = mock_object + ] = mock_rpc request = {} await client.create_listing(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_listing(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5588,22 +5597,23 @@ async def test_update_listing_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_listing - ] = mock_object + ] = mock_rpc request = {} await client.update_listing(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_listing(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5997,22 +6007,23 @@ async def test_delete_listing_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_listing - ] = mock_object + ] = mock_rpc request = {} await client.delete_listing(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_listing(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6357,22 +6368,23 @@ async def test_subscribe_listing_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.subscribe_listing - ] = mock_object + ] = mock_rpc request = {} await client.subscribe_listing(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.subscribe_listing(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6689,8 +6701,9 @@ def test_subscribe_data_exchange_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.subscribe_data_exchange(request) @@ -6746,26 +6759,28 @@ async def test_subscribe_data_exchange_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.subscribe_data_exchange - ] = mock_object + ] = mock_rpc request = {} await client.subscribe_data_exchange(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.subscribe_data_exchange(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7076,8 +7091,9 @@ def test_refresh_subscription_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.refresh_subscription(request) @@ -7133,26 +7149,28 @@ async def test_refresh_subscription_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.refresh_subscription - ] = mock_object + ] = mock_rpc request = {} await client.refresh_subscription(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.refresh_subscription(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7524,22 +7542,23 @@ async def test_get_subscription_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_subscription - ] = mock_object + ] = mock_rpc request = {} await client.get_subscription(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_subscription(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7912,22 +7931,23 @@ async def test_list_subscriptions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_subscriptions - ] = mock_object + ] = mock_rpc request = {} await client.list_subscriptions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_subscriptions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8503,22 +8523,23 @@ async def test_list_shared_resource_subscriptions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_shared_resource_subscriptions - ] = mock_object + ] = mock_rpc request = {} await client.list_shared_resource_subscriptions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_shared_resource_subscriptions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9089,22 +9110,23 @@ async def test_revoke_subscription_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.revoke_subscription - ] = mock_object + ] = mock_rpc request = {} await client.revoke_subscription(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.revoke_subscription(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9414,8 +9436,9 @@ def test_delete_subscription_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_subscription(request) @@ -9471,26 +9494,28 @@ async def test_delete_subscription_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_subscription - ] = mock_object + ] = mock_rpc request = {} await client.delete_subscription(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_subscription(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9849,22 +9874,23 @@ async def test_get_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10151,22 +10177,23 @@ async def test_set_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10463,22 +10490,23 @@ async def test_test_iam_permissions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.test_iam_permissions - ] = mock_object + ] = mock_rpc request = {} await client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.test_iam_permissions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake/gapic_version.py b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake/gapic_version.py index 27a1c7da1ce8..558c8aab67c5 100644 --- a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake/gapic_version.py +++ b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.9" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1/gapic_version.py b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1/gapic_version.py index 27a1c7da1ce8..558c8aab67c5 100644 --- a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1/gapic_version.py +++ b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.9" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1/services/metastore_service/async_client.py b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1/services/metastore_service/async_client.py index 39cdc5786c28..9976660fe3a9 100644 --- a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1/services/metastore_service/async_client.py +++ b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1/services/metastore_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -205,9 +204,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(MetastoreServiceClient).get_transport_class, type(MetastoreServiceClient) - ) + get_transport_class = MetastoreServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1/services/metastore_service/client.py b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1/services/metastore_service/client.py index 73043e89fbc7..37fc63db909f 100644 --- a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1/services/metastore_service/client.py +++ b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1/services/metastore_service/client.py @@ -727,7 +727,7 @@ def __init__( Type[MetastoreServiceTransport], Callable[..., MetastoreServiceTransport], ] = ( - type(self).get_transport_class(transport) + MetastoreServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., MetastoreServiceTransport], transport) ) diff --git a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1alpha1/gapic_version.py b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1alpha1/gapic_version.py index 27a1c7da1ce8..558c8aab67c5 100644 --- a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1alpha1/gapic_version.py +++ b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1alpha1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.9" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1alpha1/services/metastore_service/async_client.py b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1alpha1/services/metastore_service/async_client.py index 57d093f919c3..740fef82f9d2 100644 --- a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1alpha1/services/metastore_service/async_client.py +++ b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1alpha1/services/metastore_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -207,9 +206,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(MetastoreServiceClient).get_transport_class, type(MetastoreServiceClient) - ) + get_transport_class = MetastoreServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1alpha1/services/metastore_service/client.py b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1alpha1/services/metastore_service/client.py index 30013e6dc3a4..0893a12db218 100644 --- a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1alpha1/services/metastore_service/client.py +++ b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1alpha1/services/metastore_service/client.py @@ -753,7 +753,7 @@ def __init__( Type[MetastoreServiceTransport], Callable[..., MetastoreServiceTransport], ] = ( - type(self).get_transport_class(transport) + MetastoreServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., MetastoreServiceTransport], transport) ) diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/snippet_metadata_google.cloud.bigquery.biglake.v1.json b/packages/google-cloud-bigquery-biglake/samples/generated_samples/snippet_metadata_google.cloud.bigquery.biglake.v1.json index 904fbd6135fd..bc293aab30cd 100644 --- a/packages/google-cloud-bigquery-biglake/samples/generated_samples/snippet_metadata_google.cloud.bigquery.biglake.v1.json +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/snippet_metadata_google.cloud.bigquery.biglake.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-biglake", - "version": "0.4.9" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/snippet_metadata_google.cloud.bigquery.biglake.v1alpha1.json b/packages/google-cloud-bigquery-biglake/samples/generated_samples/snippet_metadata_google.cloud.bigquery.biglake.v1alpha1.json index 4692b3d37bb0..f43e6ba05775 100644 --- a/packages/google-cloud-bigquery-biglake/samples/generated_samples/snippet_metadata_google.cloud.bigquery.biglake.v1alpha1.json +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/snippet_metadata_google.cloud.bigquery.biglake.v1alpha1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-biglake", - "version": "0.4.9" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-bigquery-biglake/tests/unit/gapic/bigquery_biglake_v1/test_metastore_service.py b/packages/google-cloud-bigquery-biglake/tests/unit/gapic/bigquery_biglake_v1/test_metastore_service.py index ec4c2498df35..4eb88b1409b3 100644 --- a/packages/google-cloud-bigquery-biglake/tests/unit/gapic/bigquery_biglake_v1/test_metastore_service.py +++ b/packages/google-cloud-bigquery-biglake/tests/unit/gapic/bigquery_biglake_v1/test_metastore_service.py @@ -1318,22 +1318,23 @@ async def test_create_catalog_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_catalog - ] = mock_object + ] = mock_rpc request = {} await client.create_catalog(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_catalog(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1698,22 +1699,23 @@ async def test_delete_catalog_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_catalog - ] = mock_object + ] = mock_rpc request = {} await client.delete_catalog(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_catalog(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2058,22 +2060,23 @@ async def test_get_catalog_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_catalog - ] = mock_object + ] = mock_rpc request = {} await client.get_catalog(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_catalog(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2420,22 +2423,23 @@ async def test_list_catalogs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_catalogs - ] = mock_object + ] = mock_rpc request = {} await client.list_catalogs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_catalogs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2983,22 +2987,23 @@ async def test_create_database_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_database - ] = mock_object + ] = mock_rpc request = {} await client.create_database(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_database(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3392,22 +3397,23 @@ async def test_delete_database_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_database - ] = mock_object + ] = mock_rpc request = {} await client.delete_database(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_database(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3753,22 +3759,23 @@ async def test_update_database_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_database - ] = mock_object + ] = mock_rpc request = {} await client.update_database(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_database(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4152,22 +4159,23 @@ async def test_get_database_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_database - ] = mock_object + ] = mock_rpc request = {} await client.get_database(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_database(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4516,22 +4524,23 @@ async def test_list_databases_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_databases - ] = mock_object + ] = mock_rpc request = {} await client.list_databases(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_databases(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5082,22 +5091,23 @@ async def test_create_table_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_table - ] = mock_object + ] = mock_rpc request = {} await client.create_table(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_table(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5496,22 +5506,23 @@ async def test_delete_table_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_table - ] = mock_object + ] = mock_rpc request = {} await client.delete_table(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_table(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5862,22 +5873,23 @@ async def test_update_table_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_table - ] = mock_object + ] = mock_rpc request = {} await client.update_table(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_table(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6268,22 +6280,23 @@ async def test_rename_table_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.rename_table - ] = mock_object + ] = mock_rpc request = {} await client.rename_table(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.rename_table(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6646,22 +6659,23 @@ async def test_get_table_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_table - ] = mock_object + ] = mock_rpc request = {} await client.get_table(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_table(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7012,22 +7026,23 @@ async def test_list_tables_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_tables - ] = mock_object + ] = mock_rpc request = {} await client.list_tables(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_tables(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-bigquery-biglake/tests/unit/gapic/bigquery_biglake_v1alpha1/test_metastore_service.py b/packages/google-cloud-bigquery-biglake/tests/unit/gapic/bigquery_biglake_v1alpha1/test_metastore_service.py index 883e1cd3626a..c6daf99d6847 100644 --- a/packages/google-cloud-bigquery-biglake/tests/unit/gapic/bigquery_biglake_v1alpha1/test_metastore_service.py +++ b/packages/google-cloud-bigquery-biglake/tests/unit/gapic/bigquery_biglake_v1alpha1/test_metastore_service.py @@ -1318,22 +1318,23 @@ async def test_create_catalog_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_catalog - ] = mock_object + ] = mock_rpc request = {} await client.create_catalog(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_catalog(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1698,22 +1699,23 @@ async def test_delete_catalog_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_catalog - ] = mock_object + ] = mock_rpc request = {} await client.delete_catalog(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_catalog(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2058,22 +2060,23 @@ async def test_get_catalog_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_catalog - ] = mock_object + ] = mock_rpc request = {} await client.get_catalog(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_catalog(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2420,22 +2423,23 @@ async def test_list_catalogs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_catalogs - ] = mock_object + ] = mock_rpc request = {} await client.list_catalogs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_catalogs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2983,22 +2987,23 @@ async def test_create_database_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_database - ] = mock_object + ] = mock_rpc request = {} await client.create_database(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_database(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3392,22 +3397,23 @@ async def test_delete_database_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_database - ] = mock_object + ] = mock_rpc request = {} await client.delete_database(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_database(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3753,22 +3759,23 @@ async def test_update_database_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_database - ] = mock_object + ] = mock_rpc request = {} await client.update_database(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_database(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4152,22 +4159,23 @@ async def test_get_database_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_database - ] = mock_object + ] = mock_rpc request = {} await client.get_database(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_database(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4516,22 +4524,23 @@ async def test_list_databases_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_databases - ] = mock_object + ] = mock_rpc request = {} await client.list_databases(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_databases(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5082,22 +5091,23 @@ async def test_create_table_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_table - ] = mock_object + ] = mock_rpc request = {} await client.create_table(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_table(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5496,22 +5506,23 @@ async def test_delete_table_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_table - ] = mock_object + ] = mock_rpc request = {} await client.delete_table(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_table(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5862,22 +5873,23 @@ async def test_update_table_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_table - ] = mock_object + ] = mock_rpc request = {} await client.update_table(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_table(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6268,22 +6280,23 @@ async def test_rename_table_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.rename_table - ] = mock_object + ] = mock_rpc request = {} await client.rename_table(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.rename_table(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6646,22 +6659,23 @@ async def test_get_table_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_table - ] = mock_object + ] = mock_rpc request = {} await client.get_table(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_table(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7012,22 +7026,23 @@ async def test_list_tables_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_tables - ] = mock_object + ] = mock_rpc request = {} await client.list_tables(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_tables(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7577,22 +7592,23 @@ async def test_create_lock_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_lock - ] = mock_object + ] = mock_rpc request = {} await client.create_lock(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_lock(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7944,22 +7960,23 @@ async def test_delete_lock_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_lock - ] = mock_object + ] = mock_rpc request = {} await client.delete_lock(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_lock(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8304,22 +8321,23 @@ async def test_check_lock_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.check_lock - ] = mock_object + ] = mock_rpc request = {} await client.check_lock(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.check_lock(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8668,22 +8686,23 @@ async def test_list_locks_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_locks - ] = mock_object + ] = mock_rpc request = {} await client.list_locks(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_locks(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection/gapic_version.py b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection/gapic_version.py index e018cef961ff..558c8aab67c5 100644 --- a/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection/gapic_version.py +++ b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.15.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/gapic_version.py b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/gapic_version.py index e018cef961ff..558c8aab67c5 100644 --- a/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/gapic_version.py +++ b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.15.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/services/connection_service/async_client.py b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/services/connection_service/async_client.py index 6d980c8ff3b3..9aef76185375 100644 --- a/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/services/connection_service/async_client.py +++ b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/services/connection_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -196,9 +195,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ConnectionServiceClient).get_transport_class, type(ConnectionServiceClient) - ) + get_transport_class = ConnectionServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/services/connection_service/client.py b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/services/connection_service/client.py index 8eaa107e8478..4dd767c16bad 100644 --- a/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/services/connection_service/client.py +++ b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/services/connection_service/client.py @@ -716,7 +716,7 @@ def __init__( Type[ConnectionServiceTransport], Callable[..., ConnectionServiceTransport], ] = ( - type(self).get_transport_class(transport) + ConnectionServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ConnectionServiceTransport], transport) ) diff --git a/packages/google-cloud-bigquery-connection/samples/generated_samples/snippet_metadata_google.cloud.bigquery.connection.v1.json b/packages/google-cloud-bigquery-connection/samples/generated_samples/snippet_metadata_google.cloud.bigquery.connection.v1.json index 273e29615ff2..0fde6f0d6368 100644 --- a/packages/google-cloud-bigquery-connection/samples/generated_samples/snippet_metadata_google.cloud.bigquery.connection.v1.json +++ b/packages/google-cloud-bigquery-connection/samples/generated_samples/snippet_metadata_google.cloud.bigquery.connection.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-connection", - "version": "1.15.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-bigquery-connection/tests/unit/gapic/bigquery_connection_v1/test_connection_service.py b/packages/google-cloud-bigquery-connection/tests/unit/gapic/bigquery_connection_v1/test_connection_service.py index 4f7b0d2864d8..a643d6edcbd2 100644 --- a/packages/google-cloud-bigquery-connection/tests/unit/gapic/bigquery_connection_v1/test_connection_service.py +++ b/packages/google-cloud-bigquery-connection/tests/unit/gapic/bigquery_connection_v1/test_connection_service.py @@ -1355,22 +1355,23 @@ async def test_create_connection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_connection - ] = mock_object + ] = mock_rpc request = {} await client.create_connection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_connection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1775,22 +1776,23 @@ async def test_get_connection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_connection - ] = mock_object + ] = mock_rpc request = {} await client.get_connection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_connection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2153,22 +2155,23 @@ async def test_list_connections_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_connections - ] = mock_object + ] = mock_rpc request = {} await client.list_connections(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_connections(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2736,22 +2739,23 @@ async def test_update_connection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_connection - ] = mock_object + ] = mock_rpc request = {} await client.update_connection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_connection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3144,22 +3148,23 @@ async def test_delete_connection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_connection - ] = mock_object + ] = mock_rpc request = {} await client.delete_connection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_connection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3512,22 +3517,23 @@ async def test_get_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3894,22 +3900,23 @@ async def test_set_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4286,22 +4293,23 @@ async def test_test_iam_permissions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.test_iam_permissions - ] = mock_object + ] = mock_rpc request = {} await client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.test_iam_permissions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange/gapic_version.py b/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange/gapic_version.py index bf678492aaad..558c8aab67c5 100644 --- a/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange/gapic_version.py +++ b/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.5.13" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/gapic_version.py b/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/gapic_version.py index bf678492aaad..558c8aab67c5 100644 --- a/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/gapic_version.py +++ b/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.5.13" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/async_client.py b/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/async_client.py index b1ea10447be6..877a8f55c9ab 100644 --- a/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/async_client.py +++ b/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -207,10 +206,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AnalyticsHubServiceClient).get_transport_class, - type(AnalyticsHubServiceClient), - ) + get_transport_class = AnalyticsHubServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/client.py b/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/client.py index 361b7577651d..c3820bca8c92 100644 --- a/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/client.py +++ b/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/client.py @@ -718,7 +718,7 @@ def __init__( Type[AnalyticsHubServiceTransport], Callable[..., AnalyticsHubServiceTransport], ] = ( - type(self).get_transport_class(transport) + AnalyticsHubServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AnalyticsHubServiceTransport], transport) ) diff --git a/packages/google-cloud-bigquery-data-exchange/samples/generated_samples/snippet_metadata_google.cloud.bigquery.dataexchange.v1beta1.json b/packages/google-cloud-bigquery-data-exchange/samples/generated_samples/snippet_metadata_google.cloud.bigquery.dataexchange.v1beta1.json index 4a849aea8d88..6e4d55c65d52 100644 --- a/packages/google-cloud-bigquery-data-exchange/samples/generated_samples/snippet_metadata_google.cloud.bigquery.dataexchange.v1beta1.json +++ b/packages/google-cloud-bigquery-data-exchange/samples/generated_samples/snippet_metadata_google.cloud.bigquery.dataexchange.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-data-exchange", - "version": "0.5.13" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-bigquery-data-exchange/tests/unit/gapic/bigquery_data_exchange_v1beta1/test_analytics_hub_service.py b/packages/google-cloud-bigquery-data-exchange/tests/unit/gapic/bigquery_data_exchange_v1beta1/test_analytics_hub_service.py index 8319363da6db..61a9895afd21 100644 --- a/packages/google-cloud-bigquery-data-exchange/tests/unit/gapic/bigquery_data_exchange_v1beta1/test_analytics_hub_service.py +++ b/packages/google-cloud-bigquery-data-exchange/tests/unit/gapic/bigquery_data_exchange_v1beta1/test_analytics_hub_service.py @@ -1323,22 +1323,23 @@ async def test_list_data_exchanges_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_data_exchanges - ] = mock_object + ] = mock_rpc request = {} await client.list_data_exchanges(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_data_exchanges(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1914,22 +1915,23 @@ async def test_list_org_data_exchanges_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_org_data_exchanges - ] = mock_object + ] = mock_rpc request = {} await client.list_org_data_exchanges(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_org_data_exchanges(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2519,22 +2521,23 @@ async def test_get_data_exchange_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_data_exchange - ] = mock_object + ] = mock_rpc request = {} await client.get_data_exchange(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_data_exchange(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2937,22 +2940,23 @@ async def test_create_data_exchange_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_data_exchange - ] = mock_object + ] = mock_rpc request = {} await client.create_data_exchange(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_data_exchange(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3359,22 +3363,23 @@ async def test_update_data_exchange_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_data_exchange - ] = mock_object + ] = mock_rpc request = {} await client.update_data_exchange(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_data_exchange(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3760,22 +3765,23 @@ async def test_delete_data_exchange_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_data_exchange - ] = mock_object + ] = mock_rpc request = {} await client.delete_data_exchange(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_data_exchange(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4127,22 +4133,23 @@ async def test_list_listings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_listings - ] = mock_object + ] = mock_rpc request = {} await client.list_listings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_listings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4709,22 +4716,23 @@ async def test_get_listing_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_listing - ] = mock_object + ] = mock_rpc request = {} await client.get_listing(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_listing(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5115,22 +5123,23 @@ async def test_create_listing_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_listing - ] = mock_object + ] = mock_rpc request = {} await client.create_listing(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_listing(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5549,22 +5558,23 @@ async def test_update_listing_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_listing - ] = mock_object + ] = mock_rpc request = {} await client.update_listing(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_listing(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5956,22 +5966,23 @@ async def test_delete_listing_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_listing - ] = mock_object + ] = mock_rpc request = {} await client.delete_listing(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_listing(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6316,22 +6327,23 @@ async def test_subscribe_listing_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.subscribe_listing - ] = mock_object + ] = mock_rpc request = {} await client.subscribe_listing(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.subscribe_listing(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6690,22 +6702,23 @@ async def test_get_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6992,22 +7005,23 @@ async def test_set_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7304,22 +7318,23 @@ async def test_test_iam_permissions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.test_iam_permissions - ] = mock_object + ] = mock_rpc request = {} await client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.test_iam_permissions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies/gapic_version.py b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies/gapic_version.py index 911c2d1dfcef..558c8aab67c5 100644 --- a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies/gapic_version.py +++ b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.8" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/gapic_version.py b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/gapic_version.py index 911c2d1dfcef..558c8aab67c5 100644 --- a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/gapic_version.py +++ b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.8" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/async_client.py b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/async_client.py index dc9c00a75490..02ffeb4d3d2c 100644 --- a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/async_client.py +++ b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -195,9 +194,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DataPolicyServiceClient).get_transport_class, type(DataPolicyServiceClient) - ) + get_transport_class = DataPolicyServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/client.py b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/client.py index 584b1dc78848..50a82cf1652f 100644 --- a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/client.py +++ b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/client.py @@ -673,7 +673,7 @@ def __init__( Type[DataPolicyServiceTransport], Callable[..., DataPolicyServiceTransport], ] = ( - type(self).get_transport_class(transport) + DataPolicyServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DataPolicyServiceTransport], transport) ) diff --git a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1beta1/gapic_version.py b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1beta1/gapic_version.py index 911c2d1dfcef..558c8aab67c5 100644 --- a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1beta1/gapic_version.py +++ b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.8" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1beta1/services/data_policy_service/async_client.py b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1beta1/services/data_policy_service/async_client.py index 633beaae99b0..a1bfade9d9f6 100644 --- a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1beta1/services/data_policy_service/async_client.py +++ b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1beta1/services/data_policy_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -197,9 +196,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DataPolicyServiceClient).get_transport_class, type(DataPolicyServiceClient) - ) + get_transport_class = DataPolicyServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1beta1/services/data_policy_service/client.py b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1beta1/services/data_policy_service/client.py index 00cfdfd25187..c7e5e6aa27a8 100644 --- a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1beta1/services/data_policy_service/client.py +++ b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1beta1/services/data_policy_service/client.py @@ -673,7 +673,7 @@ def __init__( Type[DataPolicyServiceTransport], Callable[..., DataPolicyServiceTransport], ] = ( - type(self).get_transport_class(transport) + DataPolicyServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DataPolicyServiceTransport], transport) ) diff --git a/packages/google-cloud-bigquery-datapolicies/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datapolicies.v1.json b/packages/google-cloud-bigquery-datapolicies/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datapolicies.v1.json index 975b08c55b8a..6be87a75b46d 100644 --- a/packages/google-cloud-bigquery-datapolicies/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datapolicies.v1.json +++ b/packages/google-cloud-bigquery-datapolicies/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datapolicies.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-datapolicies", - "version": "0.6.8" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-bigquery-datapolicies/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datapolicies.v1beta1.json b/packages/google-cloud-bigquery-datapolicies/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datapolicies.v1beta1.json index 4e353afc1b2f..6753f39659f4 100644 --- a/packages/google-cloud-bigquery-datapolicies/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datapolicies.v1beta1.json +++ b/packages/google-cloud-bigquery-datapolicies/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datapolicies.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-datapolicies", - "version": "0.6.8" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-bigquery-datapolicies/tests/unit/gapic/bigquery_datapolicies_v1/test_data_policy_service.py b/packages/google-cloud-bigquery-datapolicies/tests/unit/gapic/bigquery_datapolicies_v1/test_data_policy_service.py index 6a7e0e4337fd..ec982a3e1c3c 100644 --- a/packages/google-cloud-bigquery-datapolicies/tests/unit/gapic/bigquery_datapolicies_v1/test_data_policy_service.py +++ b/packages/google-cloud-bigquery-datapolicies/tests/unit/gapic/bigquery_datapolicies_v1/test_data_policy_service.py @@ -1349,22 +1349,23 @@ async def test_create_data_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_data_policy - ] = mock_object + ] = mock_rpc request = {} await client.create_data_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_data_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1758,22 +1759,23 @@ async def test_update_data_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_data_policy - ] = mock_object + ] = mock_rpc request = {} await client.update_data_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_data_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2173,22 +2175,23 @@ async def test_rename_data_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.rename_data_policy - ] = mock_object + ] = mock_rpc request = {} await client.rename_data_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.rename_data_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2569,22 +2572,23 @@ async def test_delete_data_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_data_policy - ] = mock_object + ] = mock_rpc request = {} await client.delete_data_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_data_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2944,22 +2948,23 @@ async def test_get_data_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_data_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_data_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_data_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3331,22 +3336,23 @@ async def test_list_data_policies_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_data_policies - ] = mock_object + ] = mock_rpc request = {} await client.list_data_policies(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_data_policies(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3910,22 +3916,23 @@ async def test_get_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4212,22 +4219,23 @@ async def test_set_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4524,22 +4532,23 @@ async def test_test_iam_permissions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.test_iam_permissions - ] = mock_object + ] = mock_rpc request = {} await client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.test_iam_permissions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-bigquery-datapolicies/tests/unit/gapic/bigquery_datapolicies_v1beta1/test_data_policy_service.py b/packages/google-cloud-bigquery-datapolicies/tests/unit/gapic/bigquery_datapolicies_v1beta1/test_data_policy_service.py index 8ed13bae9c6c..5b3ce146b7bf 100644 --- a/packages/google-cloud-bigquery-datapolicies/tests/unit/gapic/bigquery_datapolicies_v1beta1/test_data_policy_service.py +++ b/packages/google-cloud-bigquery-datapolicies/tests/unit/gapic/bigquery_datapolicies_v1beta1/test_data_policy_service.py @@ -1311,22 +1311,23 @@ async def test_create_data_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_data_policy - ] = mock_object + ] = mock_rpc request = {} await client.create_data_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_data_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1720,22 +1721,23 @@ async def test_update_data_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_data_policy - ] = mock_object + ] = mock_rpc request = {} await client.update_data_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_data_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2116,22 +2118,23 @@ async def test_delete_data_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_data_policy - ] = mock_object + ] = mock_rpc request = {} await client.delete_data_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_data_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2491,22 +2494,23 @@ async def test_get_data_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_data_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_data_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_data_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2876,22 +2880,23 @@ async def test_list_data_policies_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_data_policies - ] = mock_object + ] = mock_rpc request = {} await client.list_data_policies(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_data_policies(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3455,22 +3460,23 @@ async def test_get_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3757,22 +3763,23 @@ async def test_set_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4069,22 +4076,23 @@ async def test_test_iam_permissions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.test_iam_permissions - ] = mock_object + ] = mock_rpc request = {} await client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.test_iam_permissions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/gapic_version.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/gapic_version.py index 71a07a06cd93..558c8aab67c5 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/gapic_version.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.15.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic_version.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic_version.py index 71a07a06cd93..558c8aab67c5 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic_version.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.15.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/async_client.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/async_client.py index f66b024bd01c..87ab9e5d50f5 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/async_client.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -205,10 +204,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DataTransferServiceClient).get_transport_class, - type(DataTransferServiceClient), - ) + get_transport_class = DataTransferServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py index c8dc5155fa8e..7bf64c346481 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py @@ -713,7 +713,7 @@ def __init__( Type[DataTransferServiceTransport], Callable[..., DataTransferServiceTransport], ] = ( - type(self).get_transport_class(transport) + DataTransferServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DataTransferServiceTransport], transport) ) diff --git a/packages/google-cloud-bigquery-datatransfer/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datatransfer.v1.json b/packages/google-cloud-bigquery-datatransfer/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datatransfer.v1.json index dd8310150319..adc8c281da8f 100644 --- a/packages/google-cloud-bigquery-datatransfer/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datatransfer.v1.json +++ b/packages/google-cloud-bigquery-datatransfer/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datatransfer.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-datatransfer", - "version": "3.15.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-bigquery-datatransfer/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py b/packages/google-cloud-bigquery-datatransfer/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py index b15240ba8d36..527a6e81160e 100644 --- a/packages/google-cloud-bigquery-datatransfer/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py +++ b/packages/google-cloud-bigquery-datatransfer/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py @@ -1407,22 +1407,23 @@ async def test_get_data_source_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_data_source - ] = mock_object + ] = mock_rpc request = {} await client.get_data_source(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_data_source(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1819,22 +1820,23 @@ async def test_list_data_sources_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_data_sources - ] = mock_object + ] = mock_rpc request = {} await client.list_data_sources(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_data_sources(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2442,22 +2444,23 @@ async def test_create_transfer_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_transfer_config - ] = mock_object + ] = mock_rpc request = {} await client.create_transfer_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_transfer_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2890,22 +2893,23 @@ async def test_update_transfer_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_transfer_config - ] = mock_object + ] = mock_rpc request = {} await client.update_transfer_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_transfer_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3299,22 +3303,23 @@ async def test_delete_transfer_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_transfer_config - ] = mock_object + ] = mock_rpc request = {} await client.delete_transfer_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_transfer_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3705,22 +3710,23 @@ async def test_get_transfer_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_transfer_config - ] = mock_object + ] = mock_rpc request = {} await client.get_transfer_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_transfer_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4112,22 +4118,23 @@ async def test_list_transfer_configs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_transfer_configs - ] = mock_object + ] = mock_rpc request = {} await client.list_transfer_configs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_transfer_configs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4697,22 +4704,23 @@ async def test_schedule_transfer_runs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.schedule_transfer_runs - ] = mock_object + ] = mock_rpc request = {} await client.schedule_transfer_runs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.schedule_transfer_runs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5097,22 +5105,23 @@ async def test_start_manual_transfer_runs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.start_manual_transfer_runs - ] = mock_object + ] = mock_rpc request = {} await client.start_manual_transfer_runs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.start_manual_transfer_runs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5401,22 +5410,23 @@ async def test_get_transfer_run_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_transfer_run - ] = mock_object + ] = mock_rpc request = {} await client.get_transfer_run(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_transfer_run(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5780,22 +5790,23 @@ async def test_delete_transfer_run_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_transfer_run - ] = mock_object + ] = mock_rpc request = {} await client.delete_transfer_run(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_transfer_run(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6159,22 +6170,23 @@ async def test_list_transfer_runs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_transfer_runs - ] = mock_object + ] = mock_rpc request = {} await client.list_transfer_runs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_transfer_runs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6749,22 +6761,23 @@ async def test_list_transfer_logs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_transfer_logs - ] = mock_object + ] = mock_rpc request = {} await client.list_transfer_logs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_transfer_logs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7335,22 +7348,23 @@ async def test_check_valid_creds_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.check_valid_creds - ] = mock_object + ] = mock_rpc request = {} await client.check_valid_creds(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.check_valid_creds(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7714,22 +7728,23 @@ async def test_enroll_data_sources_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.enroll_data_sources - ] = mock_object + ] = mock_rpc request = {} await client.enroll_data_sources(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.enroll_data_sources(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8001,22 +8016,23 @@ async def test_unenroll_data_sources_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.unenroll_data_sources - ] = mock_object + ] = mock_rpc request = {} await client.unenroll_data_sources(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.unenroll_data_sources(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio From 226fd7c7dc114bd93d73f5759a0039a60cba90b2 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 19 Aug 2024 10:55:06 -0400 Subject: [PATCH 014/108] chore: [Many APIs] Update gapic-generator-python to v1.18.5 (#13001) - [ ] Regenerate this pull request now. PiperOrigin-RevId: 661268868 Source-Link: https://github.com/googleapis/googleapis/commit/f7d214cb08cd7d9b018d44564a8b184263f64177 Source-Link: https://github.com/googleapis/googleapis-gen/commit/79a8411bbdb25a983fa3aae8c0e14327df129f94 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXBoaXNoaW5nLXByb3RlY3Rpb24vLk93bEJvdC55YW1sIiwiaCI6Ijc5YTg0MTFiYmRiMjVhOTgzZmEzYWFlOGMwZTE0MzI3ZGYxMjlmOTQifQ== Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXBvbGljeS10cm91Ymxlc2hvb3Rlci8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXBvbGljeXNpbXVsYXRvci8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXBvbGljeXRyb3VibGVzaG9vdGVyLWlhbS8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXByaXZhdGUtY2EvLk93bEJvdC55YW1sIiwiaCI6Ijc5YTg0MTFiYmRiMjVhOTgzZmEzYWFlOGMwZTE0MzI3ZGYxMjlmOTQifQ== Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXByaXZhdGUtY2F0YWxvZy8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXByaXZpbGVnZWRhY2Nlc3NtYW5hZ2VyLy5Pd2xCb3QueWFtbCIsImgiOiI3OWE4NDExYmJkYjI1YTk4M2ZhM2FhZThjMGUxNDMyN2RmMTI5Zjk0In0= Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXB1YmxpYy1jYS8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXJhcGlkbWlncmF0aW9uYXNzZXNzbWVudC8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXJlY2FwdGNoYS1lbnRlcnByaXNlLy5Pd2xCb3QueWFtbCIsImgiOiI3OWE4NDExYmJkYjI1YTk4M2ZhM2FhZThjMGUxNDMyN2RmMTI5Zjk0In0= Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXJlY29tbWVuZGF0aW9ucy1haS8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXJlY29tbWVuZGVyLy5Pd2xCb3QueWFtbCIsImgiOiI3OWE4NDExYmJkYjI1YTk4M2ZhM2FhZThjMGUxNDMyN2RmMTI5Zjk0In0= Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXJlZGlzLWNsdXN0ZXIvLk93bEJvdC55YW1sIiwiaCI6Ijc5YTg0MTFiYmRiMjVhOTgzZmEzYWFlOGMwZTE0MzI3ZGYxMjlmOTQifQ== Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXJlZGlzLy5Pd2xCb3QueWFtbCIsImgiOiI3OWE4NDExYmJkYjI1YTk4M2ZhM2FhZThjMGUxNDMyN2RmMTI5Zjk0In0= Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXJlc291cmNlLW1hbmFnZXIvLk93bEJvdC55YW1sIiwiaCI6Ijc5YTg0MTFiYmRiMjVhOTgzZmEzYWFlOGMwZTE0MzI3ZGYxMjlmOTQifQ== Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXJlc291cmNlLXNldHRpbmdzLy5Pd2xCb3QueWFtbCIsImgiOiI3OWE4NDExYmJkYjI1YTk4M2ZhM2FhZThjMGUxNDMyN2RmMTI5Zjk0In0= Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXJldGFpbC8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXJ1bi8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXNjaGVkdWxlci8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXNlY3JldC1tYW5hZ2VyLy5Pd2xCb3QueWFtbCIsImgiOiI3OWE4NDExYmJkYjI1YTk4M2ZhM2FhZThjMGUxNDMyN2RmMTI5Zjk0In0= --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../cloud/phishingprotection/gapic_version.py | 2 +- .../gapic_version.py | 2 +- .../async_client.py | 6 +- .../client.py | 2 +- ...ogle.cloud.phishingprotection.v1beta1.json | 2 +- ...st_phishing_protection_service_v1_beta1.py | 9 +- .../policytroubleshooter/gapic_version.py | 2 +- .../policytroubleshooter_v1/gapic_version.py | 2 +- .../services/iam_checker/async_client.py | 5 +- .../services/iam_checker/client.py | 2 +- ..._google.cloud.policytroubleshooter.v1.json | 2 +- .../test_iam_checker.py | 9 +- .../cloud/policysimulator/gapic_version.py | 2 +- .../cloud/policysimulator_v1/gapic_version.py | 2 +- .../services/simulator/async_client.py | 5 +- .../services/simulator/client.py | 2 +- ...adata_google.cloud.policysimulator.v1.json | 2 +- .../policysimulator_v1/test_simulator.py | 37 +- .../policytroubleshooter_iam/gapic_version.py | 2 +- .../gapic_version.py | 2 +- .../policy_troubleshooter/async_client.py | 6 +- .../services/policy_troubleshooter/client.py | 2 +- ...gle.cloud.policytroubleshooter.iam.v3.json | 2 +- .../test_policy_troubleshooter.py | 9 +- .../cloud/security/privateca/gapic_version.py | 2 +- .../security/privateca_v1/gapic_version.py | 2 +- .../async_client.py | 6 +- .../certificate_authority_service/client.py | 2 +- .../privateca_v1beta1/gapic_version.py | 2 +- .../async_client.py | 6 +- .../certificate_authority_service/client.py | 2 +- ...ta_google.cloud.security.privateca.v1.json | 2 +- ...ogle.cloud.security.privateca.v1beta1.json | 2 +- .../test_certificate_authority_service.py | 401 ++++++++++-------- .../test_certificate_authority_service.py | 260 +++++++----- .../cloud/privatecatalog/gapic_version.py | 2 +- .../privatecatalog_v1beta1/gapic_version.py | 2 +- .../services/private_catalog/async_client.py | 5 +- .../services/private_catalog/client.py | 2 +- ...a_google.cloud.privatecatalog.v1beta1.json | 2 +- .../test_private_catalog.py | 27 +- .../privilegedaccessmanager/gapic_version.py | 2 +- .../gapic_version.py | 2 +- .../privileged_access_manager/async_client.py | 6 +- .../privileged_access_manager/client.py | 2 +- ...ogle.cloud.privilegedaccessmanager.v1.json | 2 +- .../test_privileged_access_manager.py | 166 ++++---- .../cloud/security/publicca/gapic_version.py | 2 +- .../security/publicca_v1/gapic_version.py | 2 +- .../async_client.py | 6 +- .../client.py | 2 +- .../publicca_v1beta1/gapic_version.py | 2 +- .../async_client.py | 6 +- .../client.py | 2 +- ...ata_google.cloud.security.publicca.v1.json | 2 +- ...oogle.cloud.security.publicca.v1beta1.json | 2 +- ...st_public_certificate_authority_service.py | 9 +- ...st_public_certificate_authority_service.py | 9 +- .../rapidmigrationassessment/gapic_version.py | 2 +- .../gapic_version.py | 2 +- .../async_client.py | 6 +- .../rapid_migration_assessment/client.py | 2 +- ...gle.cloud.rapidmigrationassessment.v1.json | 2 +- .../test_rapid_migration_assessment.py | 160 ++++--- .../recaptchaenterprise/gapic_version.py | 2 +- .../recaptchaenterprise_v1/gapic_version.py | 2 +- .../async_client.py | 6 +- .../recaptcha_enterprise_service/client.py | 2 +- ...a_google.cloud.recaptchaenterprise.v1.json | 2 +- .../test_recaptcha_enterprise_service.py | 171 ++++---- .../recommendationengine/gapic_version.py | 2 +- .../gapic_version.py | 2 +- .../services/catalog_service/async_client.py | 5 +- .../services/catalog_service/client.py | 2 +- .../async_client.py | 6 +- .../prediction_api_key_registry/client.py | 2 +- .../prediction_service/async_client.py | 5 +- .../services/prediction_service/client.py | 2 +- .../user_event_service/async_client.py | 5 +- .../services/user_event_service/client.py | 2 +- ...le.cloud.recommendationengine.v1beta1.json | 2 +- .../test_catalog_service.py | 64 +-- .../test_prediction_api_key_registry.py | 27 +- .../test_prediction_service.py | 9 +- .../test_user_event_service.py | 65 +-- .../google/cloud/recommender/gapic_version.py | 2 +- .../cloud/recommender_v1/gapic_version.py | 2 +- .../services/recommender/async_client.py | 5 +- .../services/recommender/client.py | 2 +- .../recommender_v1beta1/gapic_version.py | 2 +- .../services/recommender/async_client.py | 5 +- .../services/recommender/client.py | 2 +- ..._metadata_google.cloud.recommender.v1.json | 2 +- ...data_google.cloud.recommender.v1beta1.json | 2 +- .../gapic/recommender_v1/test_recommender.py | 117 ++--- .../recommender_v1beta1/test_recommender.py | 126 +++--- .../cloud/redis_cluster/gapic_version.py | 2 +- .../cloud/redis_cluster_v1/gapic_version.py | 2 +- .../cloud_redis_cluster/async_client.py | 5 +- .../services/cloud_redis_cluster/client.py | 2 +- .../redis_cluster_v1beta1/gapic_version.py | 2 +- .../cloud_redis_cluster/async_client.py | 5 +- .../services/cloud_redis_cluster/client.py | 2 +- ...etadata_google.cloud.redis.cluster.v1.json | 2 +- ...ta_google.cloud.redis.cluster.v1beta1.json | 2 +- .../test_cloud_redis_cluster.py | 84 ++-- .../test_cloud_redis_cluster.py | 84 ++-- .../google/cloud/redis/gapic_version.py | 2 +- .../google/cloud/redis_v1/gapic_version.py | 2 +- .../services/cloud_redis/async_client.py | 5 +- .../redis_v1/services/cloud_redis/client.py | 2 +- .../cloud/redis_v1beta1/gapic_version.py | 2 +- .../services/cloud_redis/async_client.py | 5 +- .../services/cloud_redis/client.py | 2 +- ...nippet_metadata_google.cloud.redis.v1.json | 2 +- ...t_metadata_google.cloud.redis.v1beta1.json | 2 +- .../unit/gapic/redis_v1/test_cloud_redis.py | 179 ++++---- .../gapic/redis_v1beta1/test_cloud_redis.py | 179 ++++---- .../cloud/resourcemanager/gapic_version.py | 2 +- .../cloud/resourcemanager_v3/gapic_version.py | 2 +- .../services/folders/async_client.py | 5 +- .../services/folders/client.py | 2 +- .../services/organizations/async_client.py | 5 +- .../services/organizations/client.py | 2 +- .../services/projects/async_client.py | 5 +- .../services/projects/client.py | 2 +- .../services/tag_bindings/async_client.py | 5 +- .../services/tag_bindings/client.py | 2 +- .../services/tag_holds/async_client.py | 5 +- .../services/tag_holds/client.py | 2 +- .../services/tag_keys/async_client.py | 5 +- .../services/tag_keys/client.py | 2 +- .../services/tag_values/async_client.py | 5 +- .../services/tag_values/client.py | 2 +- ...adata_google.cloud.resourcemanager.v3.json | 2 +- .../gapic/resourcemanager_v3/test_folders.py | 149 ++++--- .../resourcemanager_v3/test_organizations.py | 45 +- .../gapic/resourcemanager_v3/test_projects.py | 149 ++++--- .../resourcemanager_v3/test_tag_bindings.py | 56 +-- .../resourcemanager_v3/test_tag_holds.py | 47 +- .../gapic/resourcemanager_v3/test_tag_keys.py | 111 ++--- .../resourcemanager_v3/test_tag_values.py | 111 ++--- .../cloud/resourcesettings/gapic_version.py | 2 +- .../resourcesettings_v1/gapic_version.py | 2 +- .../resource_settings_service/async_client.py | 6 +- .../resource_settings_service/client.py | 2 +- ...data_google.cloud.resourcesettings.v1.json | 2 +- .../test_resource_settings_service.py | 27 +- .../google/cloud/retail/gapic_version.py | 2 +- .../google/cloud/retail_v2/gapic_version.py | 2 +- .../analytics_service/async_client.py | 5 +- .../services/analytics_service/client.py | 2 +- .../services/catalog_service/async_client.py | 5 +- .../services/catalog_service/client.py | 2 +- .../completion_service/async_client.py | 5 +- .../services/completion_service/client.py | 2 +- .../services/control_service/async_client.py | 5 +- .../services/control_service/client.py | 2 +- .../services/model_service/async_client.py | 5 +- .../services/model_service/client.py | 2 +- .../prediction_service/async_client.py | 5 +- .../services/prediction_service/client.py | 2 +- .../services/product_service/async_client.py | 5 +- .../services/product_service/client.py | 2 +- .../services/search_service/async_client.py | 5 +- .../services/search_service/client.py | 2 +- .../serving_config_service/async_client.py | 6 +- .../services/serving_config_service/client.py | 2 +- .../user_event_service/async_client.py | 5 +- .../services/user_event_service/client.py | 2 +- .../cloud/retail_v2alpha/gapic_version.py | 2 +- .../analytics_service/async_client.py | 5 +- .../services/analytics_service/client.py | 2 +- .../services/branch_service/async_client.py | 5 +- .../services/branch_service/client.py | 2 +- .../services/catalog_service/async_client.py | 5 +- .../services/catalog_service/client.py | 2 +- .../completion_service/async_client.py | 5 +- .../services/completion_service/client.py | 2 +- .../services/control_service/async_client.py | 5 +- .../services/control_service/client.py | 2 +- .../async_client.py | 6 +- .../client.py | 2 +- .../services/model_service/async_client.py | 5 +- .../services/model_service/client.py | 2 +- .../prediction_service/async_client.py | 5 +- .../services/prediction_service/client.py | 2 +- .../services/product_service/async_client.py | 5 +- .../services/product_service/client.py | 2 +- .../services/project_service/async_client.py | 5 +- .../services/project_service/client.py | 2 +- .../services/search_service/async_client.py | 5 +- .../services/search_service/client.py | 2 +- .../serving_config_service/async_client.py | 6 +- .../services/serving_config_service/client.py | 2 +- .../user_event_service/async_client.py | 5 +- .../services/user_event_service/client.py | 2 +- .../cloud/retail_v2beta/gapic_version.py | 2 +- .../analytics_service/async_client.py | 5 +- .../services/analytics_service/client.py | 2 +- .../services/catalog_service/async_client.py | 5 +- .../services/catalog_service/client.py | 2 +- .../completion_service/async_client.py | 5 +- .../services/completion_service/client.py | 2 +- .../services/control_service/async_client.py | 5 +- .../services/control_service/client.py | 2 +- .../services/model_service/async_client.py | 5 +- .../services/model_service/client.py | 2 +- .../prediction_service/async_client.py | 5 +- .../services/prediction_service/client.py | 2 +- .../services/product_service/async_client.py | 5 +- .../services/product_service/client.py | 2 +- .../services/search_service/async_client.py | 5 +- .../services/search_service/client.py | 2 +- .../serving_config_service/async_client.py | 6 +- .../services/serving_config_service/client.py | 2 +- .../user_event_service/async_client.py | 5 +- .../services/user_event_service/client.py | 2 +- ...ippet_metadata_google.cloud.retail.v2.json | 2 +- ..._metadata_google.cloud.retail.v2alpha.json | 2 +- ...t_metadata_google.cloud.retail.v2beta.json | 2 +- .../gapic/retail_v2/test_analytics_service.py | 19 +- .../gapic/retail_v2/test_catalog_service.py | 99 +++-- .../retail_v2/test_completion_service.py | 28 +- .../gapic/retail_v2/test_control_service.py | 45 +- .../gapic/retail_v2/test_model_service.py | 92 ++-- .../retail_v2/test_prediction_service.py | 9 +- .../gapic/retail_v2/test_product_service.py | 178 ++++---- .../gapic/retail_v2/test_search_service.py | 9 +- .../retail_v2/test_serving_config_service.py | 63 +-- .../retail_v2/test_user_event_service.py | 75 ++-- .../retail_v2alpha/test_analytics_service.py | 19 +- .../retail_v2alpha/test_branch_service.py | 18 +- .../retail_v2alpha/test_catalog_service.py | 108 ++--- .../retail_v2alpha/test_completion_service.py | 28 +- .../retail_v2alpha/test_control_service.py | 45 +- ...st_merchant_center_account_link_service.py | 37 +- .../retail_v2alpha/test_model_service.py | 92 ++-- .../retail_v2alpha/test_prediction_service.py | 9 +- .../retail_v2alpha/test_product_service.py | 178 ++++---- .../retail_v2alpha/test_project_service.py | 82 ++-- .../retail_v2alpha/test_search_service.py | 9 +- .../test_serving_config_service.py | 63 +-- .../retail_v2alpha/test_user_event_service.py | 75 ++-- .../retail_v2beta/test_analytics_service.py | 19 +- .../retail_v2beta/test_catalog_service.py | 108 ++--- .../retail_v2beta/test_completion_service.py | 28 +- .../retail_v2beta/test_control_service.py | 45 +- .../gapic/retail_v2beta/test_model_service.py | 92 ++-- .../retail_v2beta/test_prediction_service.py | 9 +- .../retail_v2beta/test_product_service.py | 178 ++++---- .../retail_v2beta/test_search_service.py | 9 +- .../test_serving_config_service.py | 63 +-- .../retail_v2beta/test_user_event_service.py | 75 ++-- .../google/cloud/run/gapic_version.py | 2 +- .../google/cloud/run_v2/gapic_version.py | 2 +- .../services/executions/async_client.py | 5 +- .../run_v2/services/executions/client.py | 2 +- .../run_v2/services/jobs/async_client.py | 5 +- .../cloud/run_v2/services/jobs/client.py | 2 +- .../run_v2/services/revisions/async_client.py | 5 +- .../cloud/run_v2/services/revisions/client.py | 2 +- .../run_v2/services/services/async_client.py | 5 +- .../cloud/run_v2/services/services/client.py | 2 +- .../run_v2/services/tasks/async_client.py | 5 +- .../cloud/run_v2/services/tasks/client.py | 2 +- .../snippet_metadata_google.cloud.run.v2.json | 2 +- .../unit/gapic/run_v2/test_executions.py | 56 +-- .../tests/unit/gapic/run_v2/test_jobs.py | 121 +++--- .../tests/unit/gapic/run_v2/test_revisions.py | 37 +- .../tests/unit/gapic/run_v2/test_services.py | 102 +++-- .../tests/unit/gapic/run_v2/test_tasks.py | 18 +- .../google/cloud/scheduler/gapic_version.py | 2 +- .../cloud/scheduler_v1/gapic_version.py | 2 +- .../services/cloud_scheduler/async_client.py | 5 +- .../services/cloud_scheduler/client.py | 2 +- .../cloud/scheduler_v1beta1/gapic_version.py | 2 +- .../services/cloud_scheduler/async_client.py | 5 +- .../services/cloud_scheduler/client.py | 2 +- ...et_metadata_google.cloud.scheduler.v1.json | 2 +- ...tadata_google.cloud.scheduler.v1beta1.json | 2 +- .../scheduler_v1/test_cloud_scheduler.py | 72 ++-- .../scheduler_v1beta1/test_cloud_scheduler.py | 72 ++-- .../cloud/secretmanager/gapic_version.py | 2 +- .../cloud/secretmanager_v1/gapic_version.py | 2 +- .../secret_manager_service/async_client.py | 6 +- .../services/secret_manager_service/client.py | 2 +- .../secretmanager_v1beta1/gapic_version.py | 2 +- .../secret_manager_service/async_client.py | 6 +- .../services/secret_manager_service/client.py | 2 +- .../secretmanager_v1beta2/gapic_version.py | 2 +- .../secret_manager_service/async_client.py | 6 +- .../services/secret_manager_service/client.py | 2 +- ...etadata_google.cloud.secretmanager.v1.json | 2 +- ...ta_google.cloud.secretmanager.v1beta2.json | 2 +- ...metadata_google.cloud.secrets.v1beta1.json | 2 +- .../test_secret_manager_service.py | 135 +++--- .../test_secret_manager_service.py | 135 +++--- .../test_secret_manager_service.py | 135 +++--- 299 files changed, 3499 insertions(+), 2975 deletions(-) diff --git a/packages/google-cloud-phishing-protection/google/cloud/phishingprotection/gapic_version.py b/packages/google-cloud-phishing-protection/google/cloud/phishingprotection/gapic_version.py index f6b57ae22bbe..558c8aab67c5 100644 --- a/packages/google-cloud-phishing-protection/google/cloud/phishingprotection/gapic_version.py +++ b/packages/google-cloud-phishing-protection/google/cloud/phishingprotection/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.11.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-phishing-protection/google/cloud/phishingprotection_v1beta1/gapic_version.py b/packages/google-cloud-phishing-protection/google/cloud/phishingprotection_v1beta1/gapic_version.py index f6b57ae22bbe..558c8aab67c5 100644 --- a/packages/google-cloud-phishing-protection/google/cloud/phishingprotection_v1beta1/gapic_version.py +++ b/packages/google-cloud-phishing-protection/google/cloud/phishingprotection_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.11.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-phishing-protection/google/cloud/phishingprotection_v1beta1/services/phishing_protection_service_v1_beta1/async_client.py b/packages/google-cloud-phishing-protection/google/cloud/phishingprotection_v1beta1/services/phishing_protection_service_v1_beta1/async_client.py index 712643bdda2a..dbc87ff76c1e 100644 --- a/packages/google-cloud-phishing-protection/google/cloud/phishingprotection_v1beta1/services/phishing_protection_service_v1_beta1/async_client.py +++ b/packages/google-cloud-phishing-protection/google/cloud/phishingprotection_v1beta1/services/phishing_protection_service_v1_beta1/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -197,10 +196,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(PhishingProtectionServiceV1Beta1Client).get_transport_class, - type(PhishingProtectionServiceV1Beta1Client), - ) + get_transport_class = PhishingProtectionServiceV1Beta1Client.get_transport_class def __init__( self, diff --git a/packages/google-cloud-phishing-protection/google/cloud/phishingprotection_v1beta1/services/phishing_protection_service_v1_beta1/client.py b/packages/google-cloud-phishing-protection/google/cloud/phishingprotection_v1beta1/services/phishing_protection_service_v1_beta1/client.py index fdb5ad75a077..3a46310b0faf 100644 --- a/packages/google-cloud-phishing-protection/google/cloud/phishingprotection_v1beta1/services/phishing_protection_service_v1_beta1/client.py +++ b/packages/google-cloud-phishing-protection/google/cloud/phishingprotection_v1beta1/services/phishing_protection_service_v1_beta1/client.py @@ -657,7 +657,7 @@ def __init__( Type[PhishingProtectionServiceV1Beta1Transport], Callable[..., PhishingProtectionServiceV1Beta1Transport], ] = ( - type(self).get_transport_class(transport) + PhishingProtectionServiceV1Beta1Client.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast( Callable[..., PhishingProtectionServiceV1Beta1Transport], transport diff --git a/packages/google-cloud-phishing-protection/samples/generated_samples/snippet_metadata_google.cloud.phishingprotection.v1beta1.json b/packages/google-cloud-phishing-protection/samples/generated_samples/snippet_metadata_google.cloud.phishingprotection.v1beta1.json index 652676fa0b73..250bb4a74445 100644 --- a/packages/google-cloud-phishing-protection/samples/generated_samples/snippet_metadata_google.cloud.phishingprotection.v1beta1.json +++ b/packages/google-cloud-phishing-protection/samples/generated_samples/snippet_metadata_google.cloud.phishingprotection.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-phishing-protection", - "version": "1.11.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-phishing-protection/tests/unit/gapic/phishingprotection_v1beta1/test_phishing_protection_service_v1_beta1.py b/packages/google-cloud-phishing-protection/tests/unit/gapic/phishingprotection_v1beta1/test_phishing_protection_service_v1_beta1.py index 461c5d37ca0d..af774419d9f2 100644 --- a/packages/google-cloud-phishing-protection/tests/unit/gapic/phishingprotection_v1beta1/test_phishing_protection_service_v1_beta1.py +++ b/packages/google-cloud-phishing-protection/tests/unit/gapic/phishingprotection_v1beta1/test_phishing_protection_service_v1_beta1.py @@ -1387,22 +1387,23 @@ async def test_report_phishing_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.report_phishing - ] = mock_object + ] = mock_rpc request = {} await client.report_phishing(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.report_phishing(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-policy-troubleshooter/google/cloud/policytroubleshooter/gapic_version.py b/packages/google-cloud-policy-troubleshooter/google/cloud/policytroubleshooter/gapic_version.py index f6b57ae22bbe..558c8aab67c5 100644 --- a/packages/google-cloud-policy-troubleshooter/google/cloud/policytroubleshooter/gapic_version.py +++ b/packages/google-cloud-policy-troubleshooter/google/cloud/policytroubleshooter/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.11.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-policy-troubleshooter/google/cloud/policytroubleshooter_v1/gapic_version.py b/packages/google-cloud-policy-troubleshooter/google/cloud/policytroubleshooter_v1/gapic_version.py index f6b57ae22bbe..558c8aab67c5 100644 --- a/packages/google-cloud-policy-troubleshooter/google/cloud/policytroubleshooter_v1/gapic_version.py +++ b/packages/google-cloud-policy-troubleshooter/google/cloud/policytroubleshooter_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.11.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-policy-troubleshooter/google/cloud/policytroubleshooter_v1/services/iam_checker/async_client.py b/packages/google-cloud-policy-troubleshooter/google/cloud/policytroubleshooter_v1/services/iam_checker/async_client.py index 87d04adfc6b4..16a70165f089 100644 --- a/packages/google-cloud-policy-troubleshooter/google/cloud/policytroubleshooter_v1/services/iam_checker/async_client.py +++ b/packages/google-cloud-policy-troubleshooter/google/cloud/policytroubleshooter_v1/services/iam_checker/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -184,9 +183,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(IamCheckerClient).get_transport_class, type(IamCheckerClient) - ) + get_transport_class = IamCheckerClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-policy-troubleshooter/google/cloud/policytroubleshooter_v1/services/iam_checker/client.py b/packages/google-cloud-policy-troubleshooter/google/cloud/policytroubleshooter_v1/services/iam_checker/client.py index 6a955ee407b3..75579e5e50bc 100644 --- a/packages/google-cloud-policy-troubleshooter/google/cloud/policytroubleshooter_v1/services/iam_checker/client.py +++ b/packages/google-cloud-policy-troubleshooter/google/cloud/policytroubleshooter_v1/services/iam_checker/client.py @@ -638,7 +638,7 @@ def __init__( transport_init: Union[ Type[IamCheckerTransport], Callable[..., IamCheckerTransport] ] = ( - type(self).get_transport_class(transport) + IamCheckerClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., IamCheckerTransport], transport) ) diff --git a/packages/google-cloud-policy-troubleshooter/samples/generated_samples/snippet_metadata_google.cloud.policytroubleshooter.v1.json b/packages/google-cloud-policy-troubleshooter/samples/generated_samples/snippet_metadata_google.cloud.policytroubleshooter.v1.json index 9c59eb354bc6..f5712f1a7dff 100644 --- a/packages/google-cloud-policy-troubleshooter/samples/generated_samples/snippet_metadata_google.cloud.policytroubleshooter.v1.json +++ b/packages/google-cloud-policy-troubleshooter/samples/generated_samples/snippet_metadata_google.cloud.policytroubleshooter.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-policy-troubleshooter", - "version": "1.11.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-policy-troubleshooter/tests/unit/gapic/policytroubleshooter_v1/test_iam_checker.py b/packages/google-cloud-policy-troubleshooter/tests/unit/gapic/policytroubleshooter_v1/test_iam_checker.py index 17170e536522..aea8ff278a08 100644 --- a/packages/google-cloud-policy-troubleshooter/tests/unit/gapic/policytroubleshooter_v1/test_iam_checker.py +++ b/packages/google-cloud-policy-troubleshooter/tests/unit/gapic/policytroubleshooter_v1/test_iam_checker.py @@ -1249,22 +1249,23 @@ async def test_troubleshoot_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.troubleshoot_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.troubleshoot_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.troubleshoot_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-policysimulator/google/cloud/policysimulator/gapic_version.py b/packages/google-cloud-policysimulator/google/cloud/policysimulator/gapic_version.py index ec8d212c9160..558c8aab67c5 100644 --- a/packages/google-cloud-policysimulator/google/cloud/policysimulator/gapic_version.py +++ b/packages/google-cloud-policysimulator/google/cloud/policysimulator/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.8" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-policysimulator/google/cloud/policysimulator_v1/gapic_version.py b/packages/google-cloud-policysimulator/google/cloud/policysimulator_v1/gapic_version.py index ec8d212c9160..558c8aab67c5 100644 --- a/packages/google-cloud-policysimulator/google/cloud/policysimulator_v1/gapic_version.py +++ b/packages/google-cloud-policysimulator/google/cloud/policysimulator_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.8" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-policysimulator/google/cloud/policysimulator_v1/services/simulator/async_client.py b/packages/google-cloud-policysimulator/google/cloud/policysimulator_v1/services/simulator/async_client.py index 9c6bc757d69b..1b3e1684ea1b 100644 --- a/packages/google-cloud-policysimulator/google/cloud/policysimulator_v1/services/simulator/async_client.py +++ b/packages/google-cloud-policysimulator/google/cloud/policysimulator_v1/services/simulator/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -200,9 +199,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(SimulatorClient).get_transport_class, type(SimulatorClient) - ) + get_transport_class = SimulatorClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-policysimulator/google/cloud/policysimulator_v1/services/simulator/client.py b/packages/google-cloud-policysimulator/google/cloud/policysimulator_v1/services/simulator/client.py index 5a7dfd3ae22a..1394da8ef751 100644 --- a/packages/google-cloud-policysimulator/google/cloud/policysimulator_v1/services/simulator/client.py +++ b/packages/google-cloud-policysimulator/google/cloud/policysimulator_v1/services/simulator/client.py @@ -696,7 +696,7 @@ def __init__( transport_init: Union[ Type[SimulatorTransport], Callable[..., SimulatorTransport] ] = ( - type(self).get_transport_class(transport) + SimulatorClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SimulatorTransport], transport) ) diff --git a/packages/google-cloud-policysimulator/samples/generated_samples/snippet_metadata_google.cloud.policysimulator.v1.json b/packages/google-cloud-policysimulator/samples/generated_samples/snippet_metadata_google.cloud.policysimulator.v1.json index 653fe56dca79..5df72c21a276 100644 --- a/packages/google-cloud-policysimulator/samples/generated_samples/snippet_metadata_google.cloud.policysimulator.v1.json +++ b/packages/google-cloud-policysimulator/samples/generated_samples/snippet_metadata_google.cloud.policysimulator.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-policysimulator", - "version": "0.1.8" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-policysimulator/tests/unit/gapic/policysimulator_v1/test_simulator.py b/packages/google-cloud-policysimulator/tests/unit/gapic/policysimulator_v1/test_simulator.py index a74b8961ae73..ebb56653ef15 100644 --- a/packages/google-cloud-policysimulator/tests/unit/gapic/policysimulator_v1/test_simulator.py +++ b/packages/google-cloud-policysimulator/tests/unit/gapic/policysimulator_v1/test_simulator.py @@ -1250,22 +1250,23 @@ async def test_get_replay_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_replay - ] = mock_object + ] = mock_rpc request = {} await client.get_replay(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_replay(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1556,8 +1557,9 @@ def test_create_replay_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_replay(request) @@ -1611,26 +1613,28 @@ async def test_create_replay_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_replay - ] = mock_object + ] = mock_rpc request = {} await client.create_replay(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_replay(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2000,22 +2004,23 @@ async def test_list_replay_results_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_replay_results - ] = mock_object + ] = mock_rpc request = {} await client.list_replay_results(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_replay_results(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-policytroubleshooter-iam/google/cloud/policytroubleshooter_iam/gapic_version.py b/packages/google-cloud-policytroubleshooter-iam/google/cloud/policytroubleshooter_iam/gapic_version.py index cf5493b86bbc..558c8aab67c5 100644 --- a/packages/google-cloud-policytroubleshooter-iam/google/cloud/policytroubleshooter_iam/gapic_version.py +++ b/packages/google-cloud-policytroubleshooter-iam/google/cloud/policytroubleshooter_iam/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.7" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-policytroubleshooter-iam/google/cloud/policytroubleshooter_iam_v3/gapic_version.py b/packages/google-cloud-policytroubleshooter-iam/google/cloud/policytroubleshooter_iam_v3/gapic_version.py index cf5493b86bbc..558c8aab67c5 100644 --- a/packages/google-cloud-policytroubleshooter-iam/google/cloud/policytroubleshooter_iam_v3/gapic_version.py +++ b/packages/google-cloud-policytroubleshooter-iam/google/cloud/policytroubleshooter_iam_v3/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.7" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-policytroubleshooter-iam/google/cloud/policytroubleshooter_iam_v3/services/policy_troubleshooter/async_client.py b/packages/google-cloud-policytroubleshooter-iam/google/cloud/policytroubleshooter_iam_v3/services/policy_troubleshooter/async_client.py index 4987c7ed98b6..5911aff64219 100644 --- a/packages/google-cloud-policytroubleshooter-iam/google/cloud/policytroubleshooter_iam_v3/services/policy_troubleshooter/async_client.py +++ b/packages/google-cloud-policytroubleshooter-iam/google/cloud/policytroubleshooter_iam_v3/services/policy_troubleshooter/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -188,10 +187,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(PolicyTroubleshooterClient).get_transport_class, - type(PolicyTroubleshooterClient), - ) + get_transport_class = PolicyTroubleshooterClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-policytroubleshooter-iam/google/cloud/policytroubleshooter_iam_v3/services/policy_troubleshooter/client.py b/packages/google-cloud-policytroubleshooter-iam/google/cloud/policytroubleshooter_iam_v3/services/policy_troubleshooter/client.py index e207f18b00a3..b195d1bc48c6 100644 --- a/packages/google-cloud-policytroubleshooter-iam/google/cloud/policytroubleshooter_iam_v3/services/policy_troubleshooter/client.py +++ b/packages/google-cloud-policytroubleshooter-iam/google/cloud/policytroubleshooter_iam_v3/services/policy_troubleshooter/client.py @@ -646,7 +646,7 @@ def __init__( Type[PolicyTroubleshooterTransport], Callable[..., PolicyTroubleshooterTransport], ] = ( - type(self).get_transport_class(transport) + PolicyTroubleshooterClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., PolicyTroubleshooterTransport], transport) ) diff --git a/packages/google-cloud-policytroubleshooter-iam/samples/generated_samples/snippet_metadata_google.cloud.policytroubleshooter.iam.v3.json b/packages/google-cloud-policytroubleshooter-iam/samples/generated_samples/snippet_metadata_google.cloud.policytroubleshooter.iam.v3.json index b0022e34c162..1be07e8450f7 100644 --- a/packages/google-cloud-policytroubleshooter-iam/samples/generated_samples/snippet_metadata_google.cloud.policytroubleshooter.iam.v3.json +++ b/packages/google-cloud-policytroubleshooter-iam/samples/generated_samples/snippet_metadata_google.cloud.policytroubleshooter.iam.v3.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-policytroubleshooter-iam", - "version": "0.1.7" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-policytroubleshooter-iam/tests/unit/gapic/policytroubleshooter_iam_v3/test_policy_troubleshooter.py b/packages/google-cloud-policytroubleshooter-iam/tests/unit/gapic/policytroubleshooter_iam_v3/test_policy_troubleshooter.py index 682710dbcafc..2424e60929e8 100644 --- a/packages/google-cloud-policytroubleshooter-iam/tests/unit/gapic/policytroubleshooter_iam_v3/test_policy_troubleshooter.py +++ b/packages/google-cloud-policytroubleshooter-iam/tests/unit/gapic/policytroubleshooter_iam_v3/test_policy_troubleshooter.py @@ -1361,22 +1361,23 @@ async def test_troubleshoot_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.troubleshoot_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.troubleshoot_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.troubleshoot_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-private-ca/google/cloud/security/privateca/gapic_version.py b/packages/google-cloud-private-ca/google/cloud/security/privateca/gapic_version.py index 0688549d000d..558c8aab67c5 100644 --- a/packages/google-cloud-private-ca/google/cloud/security/privateca/gapic_version.py +++ b/packages/google-cloud-private-ca/google/cloud/security/privateca/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.12.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-private-ca/google/cloud/security/privateca_v1/gapic_version.py b/packages/google-cloud-private-ca/google/cloud/security/privateca_v1/gapic_version.py index 0688549d000d..558c8aab67c5 100644 --- a/packages/google-cloud-private-ca/google/cloud/security/privateca_v1/gapic_version.py +++ b/packages/google-cloud-private-ca/google/cloud/security/privateca_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.12.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-private-ca/google/cloud/security/privateca_v1/services/certificate_authority_service/async_client.py b/packages/google-cloud-private-ca/google/cloud/security/privateca_v1/services/certificate_authority_service/async_client.py index d787953aed4b..1f75bf37a0ab 100644 --- a/packages/google-cloud-private-ca/google/cloud/security/privateca_v1/services/certificate_authority_service/async_client.py +++ b/packages/google-cloud-private-ca/google/cloud/security/privateca_v1/services/certificate_authority_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -235,10 +234,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(CertificateAuthorityServiceClient).get_transport_class, - type(CertificateAuthorityServiceClient), - ) + get_transport_class = CertificateAuthorityServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-private-ca/google/cloud/security/privateca_v1/services/certificate_authority_service/client.py b/packages/google-cloud-private-ca/google/cloud/security/privateca_v1/services/certificate_authority_service/client.py index ff5d2cfefb0c..c3d6c8d07687 100644 --- a/packages/google-cloud-private-ca/google/cloud/security/privateca_v1/services/certificate_authority_service/client.py +++ b/packages/google-cloud-private-ca/google/cloud/security/privateca_v1/services/certificate_authority_service/client.py @@ -785,7 +785,7 @@ def __init__( Type[CertificateAuthorityServiceTransport], Callable[..., CertificateAuthorityServiceTransport], ] = ( - type(self).get_transport_class(transport) + CertificateAuthorityServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast( Callable[..., CertificateAuthorityServiceTransport], transport diff --git a/packages/google-cloud-private-ca/google/cloud/security/privateca_v1beta1/gapic_version.py b/packages/google-cloud-private-ca/google/cloud/security/privateca_v1beta1/gapic_version.py index 0688549d000d..558c8aab67c5 100644 --- a/packages/google-cloud-private-ca/google/cloud/security/privateca_v1beta1/gapic_version.py +++ b/packages/google-cloud-private-ca/google/cloud/security/privateca_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.12.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-private-ca/google/cloud/security/privateca_v1beta1/services/certificate_authority_service/async_client.py b/packages/google-cloud-private-ca/google/cloud/security/privateca_v1beta1/services/certificate_authority_service/async_client.py index 7f6a6c3f6a94..da4fbf6716a3 100644 --- a/packages/google-cloud-private-ca/google/cloud/security/privateca_v1beta1/services/certificate_authority_service/async_client.py +++ b/packages/google-cloud-private-ca/google/cloud/security/privateca_v1beta1/services/certificate_authority_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -226,10 +225,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(CertificateAuthorityServiceClient).get_transport_class, - type(CertificateAuthorityServiceClient), - ) + get_transport_class = CertificateAuthorityServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-private-ca/google/cloud/security/privateca_v1beta1/services/certificate_authority_service/client.py b/packages/google-cloud-private-ca/google/cloud/security/privateca_v1beta1/services/certificate_authority_service/client.py index 875f2faee497..3f617c654203 100644 --- a/packages/google-cloud-private-ca/google/cloud/security/privateca_v1beta1/services/certificate_authority_service/client.py +++ b/packages/google-cloud-private-ca/google/cloud/security/privateca_v1beta1/services/certificate_authority_service/client.py @@ -754,7 +754,7 @@ def __init__( Type[CertificateAuthorityServiceTransport], Callable[..., CertificateAuthorityServiceTransport], ] = ( - type(self).get_transport_class(transport) + CertificateAuthorityServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast( Callable[..., CertificateAuthorityServiceTransport], transport diff --git a/packages/google-cloud-private-ca/samples/generated_samples/snippet_metadata_google.cloud.security.privateca.v1.json b/packages/google-cloud-private-ca/samples/generated_samples/snippet_metadata_google.cloud.security.privateca.v1.json index 7e4120478b01..2ae53b32ea8d 100644 --- a/packages/google-cloud-private-ca/samples/generated_samples/snippet_metadata_google.cloud.security.privateca.v1.json +++ b/packages/google-cloud-private-ca/samples/generated_samples/snippet_metadata_google.cloud.security.privateca.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-private-ca", - "version": "1.12.2" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-private-ca/samples/generated_samples/snippet_metadata_google.cloud.security.privateca.v1beta1.json b/packages/google-cloud-private-ca/samples/generated_samples/snippet_metadata_google.cloud.security.privateca.v1beta1.json index a860314c4f28..d9af1f5c993a 100644 --- a/packages/google-cloud-private-ca/samples/generated_samples/snippet_metadata_google.cloud.security.privateca.v1beta1.json +++ b/packages/google-cloud-private-ca/samples/generated_samples/snippet_metadata_google.cloud.security.privateca.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-private-ca", - "version": "1.12.2" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-private-ca/tests/unit/gapic/privateca_v1/test_certificate_authority_service.py b/packages/google-cloud-private-ca/tests/unit/gapic/privateca_v1/test_certificate_authority_service.py index 90141101216c..45657b7d387e 100644 --- a/packages/google-cloud-private-ca/tests/unit/gapic/privateca_v1/test_certificate_authority_service.py +++ b/packages/google-cloud-private-ca/tests/unit/gapic/privateca_v1/test_certificate_authority_service.py @@ -1422,22 +1422,23 @@ async def test_create_certificate_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_certificate - ] = mock_object + ] = mock_rpc request = {} await client.create_certificate(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_certificate(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1842,22 +1843,23 @@ async def test_get_certificate_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_certificate - ] = mock_object + ] = mock_rpc request = {} await client.get_certificate(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_certificate(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2235,22 +2237,23 @@ async def test_list_certificates_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_certificates - ] = mock_object + ] = mock_rpc request = {} await client.list_certificates(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_certificates(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2843,22 +2846,23 @@ async def test_revoke_certificate_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.revoke_certificate - ] = mock_object + ] = mock_rpc request = {} await client.revoke_certificate(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.revoke_certificate(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3255,22 +3259,23 @@ async def test_update_certificate_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_certificate - ] = mock_object + ] = mock_rpc request = {} await client.update_certificate(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_certificate(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3608,8 +3613,9 @@ def test_activate_certificate_authority_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.activate_certificate_authority(request) @@ -3665,26 +3671,28 @@ async def test_activate_certificate_authority_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.activate_certificate_authority - ] = mock_object + ] = mock_rpc request = {} await client.activate_certificate_authority(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.activate_certificate_authority(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4000,8 +4008,9 @@ def test_create_certificate_authority_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_certificate_authority(request) @@ -4057,26 +4066,28 @@ async def test_create_certificate_authority_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_certificate_authority - ] = mock_object + ] = mock_rpc request = {} await client.create_certificate_authority(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_certificate_authority(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4410,8 +4421,9 @@ def test_disable_certificate_authority_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.disable_certificate_authority(request) @@ -4467,26 +4479,28 @@ async def test_disable_certificate_authority_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.disable_certificate_authority - ] = mock_object + ] = mock_rpc request = {} await client.disable_certificate_authority(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.disable_certificate_authority(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4800,8 +4814,9 @@ def test_enable_certificate_authority_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.enable_certificate_authority(request) @@ -4857,26 +4872,28 @@ async def test_enable_certificate_authority_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.enable_certificate_authority - ] = mock_object + ] = mock_rpc request = {} await client.enable_certificate_authority(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.enable_certificate_authority(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5246,22 +5263,23 @@ async def test_fetch_certificate_authority_csr_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.fetch_certificate_authority_csr - ] = mock_object + ] = mock_rpc request = {} await client.fetch_certificate_authority_csr(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.fetch_certificate_authority_csr(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5649,22 +5667,23 @@ async def test_get_certificate_authority_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_certificate_authority - ] = mock_object + ] = mock_rpc request = {} await client.get_certificate_authority(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_certificate_authority(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6055,22 +6074,23 @@ async def test_list_certificate_authorities_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_certificate_authorities - ] = mock_object + ] = mock_rpc request = {} await client.list_certificate_authorities(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_certificate_authorities(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6593,8 +6613,9 @@ def test_undelete_certificate_authority_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.undelete_certificate_authority(request) @@ -6650,26 +6671,28 @@ async def test_undelete_certificate_authority_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.undelete_certificate_authority - ] = mock_object + ] = mock_rpc request = {} await client.undelete_certificate_authority(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.undelete_certificate_authority(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6983,8 +7006,9 @@ def test_delete_certificate_authority_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_certificate_authority(request) @@ -7040,26 +7064,28 @@ async def test_delete_certificate_authority_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_certificate_authority - ] = mock_object + ] = mock_rpc request = {} await client.delete_certificate_authority(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_certificate_authority(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7371,8 +7397,9 @@ def test_update_certificate_authority_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_certificate_authority(request) @@ -7428,26 +7455,28 @@ async def test_update_certificate_authority_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_certificate_authority - ] = mock_object + ] = mock_rpc request = {} await client.update_certificate_authority(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_certificate_authority(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7762,8 +7791,9 @@ def test_create_ca_pool_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_ca_pool(request) @@ -7817,26 +7847,28 @@ async def test_create_ca_pool_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_ca_pool - ] = mock_object + ] = mock_rpc request = {} await client.create_ca_pool(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_ca_pool(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8146,8 +8178,9 @@ def test_update_ca_pool_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_ca_pool(request) @@ -8201,26 +8234,28 @@ async def test_update_ca_pool_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_ca_pool - ] = mock_object + ] = mock_rpc request = {} await client.update_ca_pool(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_ca_pool(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8579,22 +8614,23 @@ async def test_get_ca_pool_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_ca_pool - ] = mock_object + ] = mock_rpc request = {} await client.get_ca_pool(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_ca_pool(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8950,22 +8986,23 @@ async def test_list_ca_pools_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_ca_pools - ] = mock_object + ] = mock_rpc request = {} await client.list_ca_pools(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_ca_pools(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9456,8 +9493,9 @@ def test_delete_ca_pool_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_ca_pool(request) @@ -9511,26 +9549,28 @@ async def test_delete_ca_pool_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_ca_pool - ] = mock_object + ] = mock_rpc request = {} await client.delete_ca_pool(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_ca_pool(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9873,22 +9913,23 @@ async def test_fetch_ca_certs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.fetch_ca_certs - ] = mock_object + ] = mock_rpc request = {} await client.fetch_ca_certs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.fetch_ca_certs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10262,22 +10303,23 @@ async def test_get_certificate_revocation_list_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_certificate_revocation_list - ] = mock_object + ] = mock_rpc request = {} await client.get_certificate_revocation_list(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_certificate_revocation_list(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10669,22 +10711,23 @@ async def test_list_certificate_revocation_lists_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_certificate_revocation_lists - ] = mock_object + ] = mock_rpc request = {} await client.list_certificate_revocation_lists(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_certificate_revocation_lists(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11207,8 +11250,9 @@ def test_update_certificate_revocation_list_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_certificate_revocation_list(request) @@ -11264,26 +11308,28 @@ async def test_update_certificate_revocation_list_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_certificate_revocation_list - ] = mock_object + ] = mock_rpc request = {} await client.update_certificate_revocation_list(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_certificate_revocation_list(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11617,8 +11663,9 @@ def test_create_certificate_template_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_certificate_template(request) @@ -11674,26 +11721,28 @@ async def test_create_certificate_template_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_certificate_template - ] = mock_object + ] = mock_rpc request = {} await client.create_certificate_template(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_certificate_template(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12027,8 +12076,9 @@ def test_delete_certificate_template_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_certificate_template(request) @@ -12084,26 +12134,28 @@ async def test_delete_certificate_template_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_certificate_template - ] = mock_object + ] = mock_rpc request = {} await client.delete_certificate_template(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_certificate_template(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12476,22 +12528,23 @@ async def test_get_certificate_template_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_certificate_template - ] = mock_object + ] = mock_rpc request = {} await client.get_certificate_template(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_certificate_template(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12874,22 +12927,23 @@ async def test_list_certificate_templates_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_certificate_templates - ] = mock_object + ] = mock_rpc request = {} await client.list_certificate_templates(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_certificate_templates(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13410,8 +13464,9 @@ def test_update_certificate_template_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_certificate_template(request) @@ -13467,26 +13522,28 @@ async def test_update_certificate_template_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_certificate_template - ] = mock_object + ] = mock_rpc request = {} await client.update_certificate_template(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_certificate_template(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-private-ca/tests/unit/gapic/privateca_v1beta1/test_certificate_authority_service.py b/packages/google-cloud-private-ca/tests/unit/gapic/privateca_v1beta1/test_certificate_authority_service.py index 8915936b0f8c..4c96ccd2ee1a 100644 --- a/packages/google-cloud-private-ca/tests/unit/gapic/privateca_v1beta1/test_certificate_authority_service.py +++ b/packages/google-cloud-private-ca/tests/unit/gapic/privateca_v1beta1/test_certificate_authority_service.py @@ -1406,22 +1406,23 @@ async def test_create_certificate_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_certificate - ] = mock_object + ] = mock_rpc request = {} await client.create_certificate(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_certificate(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1811,22 +1812,23 @@ async def test_get_certificate_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_certificate - ] = mock_object + ] = mock_rpc request = {} await client.get_certificate(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_certificate(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2198,22 +2200,23 @@ async def test_list_certificates_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_certificates - ] = mock_object + ] = mock_rpc request = {} await client.list_certificates(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_certificates(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2797,22 +2800,23 @@ async def test_revoke_certificate_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.revoke_certificate - ] = mock_object + ] = mock_rpc request = {} await client.revoke_certificate(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.revoke_certificate(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3194,22 +3198,23 @@ async def test_update_certificate_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_certificate - ] = mock_object + ] = mock_rpc request = {} await client.update_certificate(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_certificate(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3541,8 +3546,9 @@ def test_activate_certificate_authority_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.activate_certificate_authority(request) @@ -3598,26 +3604,28 @@ async def test_activate_certificate_authority_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.activate_certificate_authority - ] = mock_object + ] = mock_rpc request = {} await client.activate_certificate_authority(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.activate_certificate_authority(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3933,8 +3941,9 @@ def test_create_certificate_authority_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_certificate_authority(request) @@ -3990,26 +3999,28 @@ async def test_create_certificate_authority_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_certificate_authority - ] = mock_object + ] = mock_rpc request = {} await client.create_certificate_authority(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_certificate_authority(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4343,8 +4354,9 @@ def test_disable_certificate_authority_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.disable_certificate_authority(request) @@ -4400,26 +4412,28 @@ async def test_disable_certificate_authority_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.disable_certificate_authority - ] = mock_object + ] = mock_rpc request = {} await client.disable_certificate_authority(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.disable_certificate_authority(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4733,8 +4747,9 @@ def test_enable_certificate_authority_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.enable_certificate_authority(request) @@ -4790,26 +4805,28 @@ async def test_enable_certificate_authority_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.enable_certificate_authority - ] = mock_object + ] = mock_rpc request = {} await client.enable_certificate_authority(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.enable_certificate_authority(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5179,22 +5196,23 @@ async def test_fetch_certificate_authority_csr_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.fetch_certificate_authority_csr - ] = mock_object + ] = mock_rpc request = {} await client.fetch_certificate_authority_csr(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.fetch_certificate_authority_csr(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5582,22 +5600,23 @@ async def test_get_certificate_authority_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_certificate_authority - ] = mock_object + ] = mock_rpc request = {} await client.get_certificate_authority(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_certificate_authority(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5988,22 +6007,23 @@ async def test_list_certificate_authorities_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_certificate_authorities - ] = mock_object + ] = mock_rpc request = {} await client.list_certificate_authorities(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_certificate_authorities(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6526,8 +6546,9 @@ def test_restore_certificate_authority_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.restore_certificate_authority(request) @@ -6583,26 +6604,28 @@ async def test_restore_certificate_authority_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.restore_certificate_authority - ] = mock_object + ] = mock_rpc request = {} await client.restore_certificate_authority(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.restore_certificate_authority(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6916,8 +6939,9 @@ def test_schedule_delete_certificate_authority_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.schedule_delete_certificate_authority(request) @@ -6973,26 +6997,28 @@ async def test_schedule_delete_certificate_authority_async_use_cached_wrapped_rp ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.schedule_delete_certificate_authority - ] = mock_object + ] = mock_rpc request = {} await client.schedule_delete_certificate_authority(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.schedule_delete_certificate_authority(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7304,8 +7330,9 @@ def test_update_certificate_authority_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_certificate_authority(request) @@ -7361,26 +7388,28 @@ async def test_update_certificate_authority_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_certificate_authority - ] = mock_object + ] = mock_rpc request = {} await client.update_certificate_authority(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_certificate_authority(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7772,22 +7801,23 @@ async def test_get_certificate_revocation_list_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_certificate_revocation_list - ] = mock_object + ] = mock_rpc request = {} await client.get_certificate_revocation_list(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_certificate_revocation_list(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8177,22 +8207,23 @@ async def test_list_certificate_revocation_lists_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_certificate_revocation_lists - ] = mock_object + ] = mock_rpc request = {} await client.list_certificate_revocation_lists(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_certificate_revocation_lists(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8715,8 +8746,9 @@ def test_update_certificate_revocation_list_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_certificate_revocation_list(request) @@ -8772,26 +8804,28 @@ async def test_update_certificate_revocation_list_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_certificate_revocation_list - ] = mock_object + ] = mock_rpc request = {} await client.update_certificate_revocation_list(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_certificate_revocation_list(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9181,22 +9215,23 @@ async def test_get_reusable_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_reusable_config - ] = mock_object + ] = mock_rpc request = {} await client.get_reusable_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_reusable_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9579,22 +9614,23 @@ async def test_list_reusable_configs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_reusable_configs - ] = mock_object + ] = mock_rpc request = {} await client.list_reusable_configs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_reusable_configs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-private-catalog/google/cloud/privatecatalog/gapic_version.py b/packages/google-cloud-private-catalog/google/cloud/privatecatalog/gapic_version.py index bc3866048d8a..558c8aab67c5 100644 --- a/packages/google-cloud-private-catalog/google/cloud/privatecatalog/gapic_version.py +++ b/packages/google-cloud-private-catalog/google/cloud/privatecatalog/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.9.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-private-catalog/google/cloud/privatecatalog_v1beta1/gapic_version.py b/packages/google-cloud-private-catalog/google/cloud/privatecatalog_v1beta1/gapic_version.py index bc3866048d8a..558c8aab67c5 100644 --- a/packages/google-cloud-private-catalog/google/cloud/privatecatalog_v1beta1/gapic_version.py +++ b/packages/google-cloud-private-catalog/google/cloud/privatecatalog_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.9.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-private-catalog/google/cloud/privatecatalog_v1beta1/services/private_catalog/async_client.py b/packages/google-cloud-private-catalog/google/cloud/privatecatalog_v1beta1/services/private_catalog/async_client.py index 8514ca0cdd5e..2521b1a30e1e 100644 --- a/packages/google-cloud-private-catalog/google/cloud/privatecatalog_v1beta1/services/private_catalog/async_client.py +++ b/packages/google-cloud-private-catalog/google/cloud/privatecatalog_v1beta1/services/private_catalog/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -213,9 +212,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(PrivateCatalogClient).get_transport_class, type(PrivateCatalogClient) - ) + get_transport_class = PrivateCatalogClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-private-catalog/google/cloud/privatecatalog_v1beta1/services/private_catalog/client.py b/packages/google-cloud-private-catalog/google/cloud/privatecatalog_v1beta1/services/private_catalog/client.py index 6f96cd4c9a17..ad837e80086d 100644 --- a/packages/google-cloud-private-catalog/google/cloud/privatecatalog_v1beta1/services/private_catalog/client.py +++ b/packages/google-cloud-private-catalog/google/cloud/privatecatalog_v1beta1/services/private_catalog/client.py @@ -712,7 +712,7 @@ def __init__( transport_init: Union[ Type[PrivateCatalogTransport], Callable[..., PrivateCatalogTransport] ] = ( - type(self).get_transport_class(transport) + PrivateCatalogClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., PrivateCatalogTransport], transport) ) diff --git a/packages/google-cloud-private-catalog/samples/generated_samples/snippet_metadata_google.cloud.privatecatalog.v1beta1.json b/packages/google-cloud-private-catalog/samples/generated_samples/snippet_metadata_google.cloud.privatecatalog.v1beta1.json index abe8479d22b4..cc341df83810 100644 --- a/packages/google-cloud-private-catalog/samples/generated_samples/snippet_metadata_google.cloud.privatecatalog.v1beta1.json +++ b/packages/google-cloud-private-catalog/samples/generated_samples/snippet_metadata_google.cloud.privatecatalog.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-private-catalog", - "version": "0.9.11" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-private-catalog/tests/unit/gapic/privatecatalog_v1beta1/test_private_catalog.py b/packages/google-cloud-private-catalog/tests/unit/gapic/privatecatalog_v1beta1/test_private_catalog.py index 6c172e8b4fa5..e858dc325ab6 100644 --- a/packages/google-cloud-private-catalog/tests/unit/gapic/privatecatalog_v1beta1/test_private_catalog.py +++ b/packages/google-cloud-private-catalog/tests/unit/gapic/privatecatalog_v1beta1/test_private_catalog.py @@ -1285,22 +1285,23 @@ async def test_search_catalogs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search_catalogs - ] = mock_object + ] = mock_rpc request = {} await client.search_catalogs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_catalogs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1765,22 +1766,23 @@ async def test_search_products_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search_products - ] = mock_object + ] = mock_rpc request = {} await client.search_products(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_products(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2245,22 +2247,23 @@ async def test_search_versions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search_versions - ] = mock_object + ] = mock_rpc request = {} await client.search_versions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_versions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-privilegedaccessmanager/google/cloud/privilegedaccessmanager/gapic_version.py b/packages/google-cloud-privilegedaccessmanager/google/cloud/privilegedaccessmanager/gapic_version.py index 0c7cc68730c4..558c8aab67c5 100644 --- a/packages/google-cloud-privilegedaccessmanager/google/cloud/privilegedaccessmanager/gapic_version.py +++ b/packages/google-cloud-privilegedaccessmanager/google/cloud/privilegedaccessmanager/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.1" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-privilegedaccessmanager/google/cloud/privilegedaccessmanager_v1/gapic_version.py b/packages/google-cloud-privilegedaccessmanager/google/cloud/privilegedaccessmanager_v1/gapic_version.py index 0c7cc68730c4..558c8aab67c5 100644 --- a/packages/google-cloud-privilegedaccessmanager/google/cloud/privilegedaccessmanager_v1/gapic_version.py +++ b/packages/google-cloud-privilegedaccessmanager/google/cloud/privilegedaccessmanager_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.1" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-privilegedaccessmanager/google/cloud/privilegedaccessmanager_v1/services/privileged_access_manager/async_client.py b/packages/google-cloud-privilegedaccessmanager/google/cloud/privilegedaccessmanager_v1/services/privileged_access_manager/async_client.py index d16632fc0d3b..088d702fea61 100644 --- a/packages/google-cloud-privilegedaccessmanager/google/cloud/privilegedaccessmanager_v1/services/privileged_access_manager/async_client.py +++ b/packages/google-cloud-privilegedaccessmanager/google/cloud/privilegedaccessmanager_v1/services/privileged_access_manager/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -230,10 +229,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(PrivilegedAccessManagerClient).get_transport_class, - type(PrivilegedAccessManagerClient), - ) + get_transport_class = PrivilegedAccessManagerClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-privilegedaccessmanager/google/cloud/privilegedaccessmanager_v1/services/privileged_access_manager/client.py b/packages/google-cloud-privilegedaccessmanager/google/cloud/privilegedaccessmanager_v1/services/privileged_access_manager/client.py index 081724888303..8092e7274f66 100644 --- a/packages/google-cloud-privilegedaccessmanager/google/cloud/privilegedaccessmanager_v1/services/privileged_access_manager/client.py +++ b/packages/google-cloud-privilegedaccessmanager/google/cloud/privilegedaccessmanager_v1/services/privileged_access_manager/client.py @@ -728,7 +728,7 @@ def __init__( Type[PrivilegedAccessManagerTransport], Callable[..., PrivilegedAccessManagerTransport], ] = ( - type(self).get_transport_class(transport) + PrivilegedAccessManagerClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., PrivilegedAccessManagerTransport], transport) ) diff --git a/packages/google-cloud-privilegedaccessmanager/samples/generated_samples/snippet_metadata_google.cloud.privilegedaccessmanager.v1.json b/packages/google-cloud-privilegedaccessmanager/samples/generated_samples/snippet_metadata_google.cloud.privilegedaccessmanager.v1.json index 06e5d57aac66..be6124d09ade 100644 --- a/packages/google-cloud-privilegedaccessmanager/samples/generated_samples/snippet_metadata_google.cloud.privilegedaccessmanager.v1.json +++ b/packages/google-cloud-privilegedaccessmanager/samples/generated_samples/snippet_metadata_google.cloud.privilegedaccessmanager.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-privilegedaccessmanager", - "version": "0.1.1" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-privilegedaccessmanager/tests/unit/gapic/privilegedaccessmanager_v1/test_privileged_access_manager.py b/packages/google-cloud-privilegedaccessmanager/tests/unit/gapic/privilegedaccessmanager_v1/test_privileged_access_manager.py index 0422ec64ca47..8f3bd6e95563 100644 --- a/packages/google-cloud-privilegedaccessmanager/tests/unit/gapic/privilegedaccessmanager_v1/test_privileged_access_manager.py +++ b/packages/google-cloud-privilegedaccessmanager/tests/unit/gapic/privilegedaccessmanager_v1/test_privileged_access_manager.py @@ -1382,22 +1382,23 @@ async def test_check_onboarding_status_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.check_onboarding_status - ] = mock_object + ] = mock_rpc request = {} await client.check_onboarding_status(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.check_onboarding_status(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1690,22 +1691,23 @@ async def test_list_entitlements_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_entitlements - ] = mock_object + ] = mock_rpc request = {} await client.list_entitlements(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_entitlements(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2287,22 +2289,23 @@ async def test_search_entitlements_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search_entitlements - ] = mock_object + ] = mock_rpc request = {} await client.search_entitlements(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_entitlements(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2786,22 +2789,23 @@ async def test_get_entitlement_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_entitlement - ] = mock_object + ] = mock_rpc request = {} await client.get_entitlement(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_entitlement(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3113,8 +3117,9 @@ def test_create_entitlement_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_entitlement(request) @@ -3170,26 +3175,28 @@ async def test_create_entitlement_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_entitlement - ] = mock_object + ] = mock_rpc request = {} await client.create_entitlement(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_entitlement(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3522,8 +3529,9 @@ def test_delete_entitlement_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_entitlement(request) @@ -3579,26 +3587,28 @@ async def test_delete_entitlement_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_entitlement - ] = mock_object + ] = mock_rpc request = {} await client.delete_entitlement(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_entitlement(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3905,8 +3915,9 @@ def test_update_entitlement_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_entitlement(request) @@ -3962,26 +3973,28 @@ async def test_update_entitlement_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_entitlement - ] = mock_object + ] = mock_rpc request = {} await client.update_entitlement(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_entitlement(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4357,22 +4370,23 @@ async def test_list_grants_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_grants - ] = mock_object + ] = mock_rpc request = {} await client.list_grants(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_grants(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4922,22 +4936,23 @@ async def test_search_grants_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search_grants - ] = mock_object + ] = mock_rpc request = {} await client.search_grants(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_grants(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5409,22 +5424,23 @@ async def test_get_grant_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_grant - ] = mock_object + ] = mock_rpc request = {} await client.get_grant(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_grant(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5796,22 +5812,23 @@ async def test_create_grant_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_grant - ] = mock_object + ] = mock_rpc request = {} await client.create_grant(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_grant(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6193,22 +6210,23 @@ async def test_approve_grant_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.approve_grant - ] = mock_object + ] = mock_rpc request = {} await client.approve_grant(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.approve_grant(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6496,22 +6514,23 @@ async def test_deny_grant_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.deny_grant - ] = mock_object + ] = mock_rpc request = {} await client.deny_grant(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.deny_grant(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6733,8 +6752,9 @@ def test_revoke_grant_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.revoke_grant(request) @@ -6788,26 +6808,28 @@ async def test_revoke_grant_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.revoke_grant - ] = mock_object + ] = mock_rpc request = {} await client.revoke_grant(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.revoke_grant(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-public-ca/google/cloud/security/publicca/gapic_version.py b/packages/google-cloud-public-ca/google/cloud/security/publicca/gapic_version.py index ab68833be4be..558c8aab67c5 100644 --- a/packages/google-cloud-public-ca/google/cloud/security/publicca/gapic_version.py +++ b/packages/google-cloud-public-ca/google/cloud/security/publicca/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.12" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-public-ca/google/cloud/security/publicca_v1/gapic_version.py b/packages/google-cloud-public-ca/google/cloud/security/publicca_v1/gapic_version.py index ab68833be4be..558c8aab67c5 100644 --- a/packages/google-cloud-public-ca/google/cloud/security/publicca_v1/gapic_version.py +++ b/packages/google-cloud-public-ca/google/cloud/security/publicca_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.12" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-public-ca/google/cloud/security/publicca_v1/services/public_certificate_authority_service/async_client.py b/packages/google-cloud-public-ca/google/cloud/security/publicca_v1/services/public_certificate_authority_service/async_client.py index 3a0bcef5c3dd..3952f79569de 100644 --- a/packages/google-cloud-public-ca/google/cloud/security/publicca_v1/services/public_certificate_authority_service/async_client.py +++ b/packages/google-cloud-public-ca/google/cloud/security/publicca_v1/services/public_certificate_authority_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -208,10 +207,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(PublicCertificateAuthorityServiceClient).get_transport_class, - type(PublicCertificateAuthorityServiceClient), - ) + get_transport_class = PublicCertificateAuthorityServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-public-ca/google/cloud/security/publicca_v1/services/public_certificate_authority_service/client.py b/packages/google-cloud-public-ca/google/cloud/security/publicca_v1/services/public_certificate_authority_service/client.py index 07468c257a90..3a25b96642ef 100644 --- a/packages/google-cloud-public-ca/google/cloud/security/publicca_v1/services/public_certificate_authority_service/client.py +++ b/packages/google-cloud-public-ca/google/cloud/security/publicca_v1/services/public_certificate_authority_service/client.py @@ -686,7 +686,7 @@ def __init__( Type[PublicCertificateAuthorityServiceTransport], Callable[..., PublicCertificateAuthorityServiceTransport], ] = ( - type(self).get_transport_class(transport) + PublicCertificateAuthorityServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast( Callable[..., PublicCertificateAuthorityServiceTransport], transport diff --git a/packages/google-cloud-public-ca/google/cloud/security/publicca_v1beta1/gapic_version.py b/packages/google-cloud-public-ca/google/cloud/security/publicca_v1beta1/gapic_version.py index ab68833be4be..558c8aab67c5 100644 --- a/packages/google-cloud-public-ca/google/cloud/security/publicca_v1beta1/gapic_version.py +++ b/packages/google-cloud-public-ca/google/cloud/security/publicca_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.12" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-public-ca/google/cloud/security/publicca_v1beta1/services/public_certificate_authority_service/async_client.py b/packages/google-cloud-public-ca/google/cloud/security/publicca_v1beta1/services/public_certificate_authority_service/async_client.py index ec87c800157f..4238b0da97aa 100644 --- a/packages/google-cloud-public-ca/google/cloud/security/publicca_v1beta1/services/public_certificate_authority_service/async_client.py +++ b/packages/google-cloud-public-ca/google/cloud/security/publicca_v1beta1/services/public_certificate_authority_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -208,10 +207,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(PublicCertificateAuthorityServiceClient).get_transport_class, - type(PublicCertificateAuthorityServiceClient), - ) + get_transport_class = PublicCertificateAuthorityServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-public-ca/google/cloud/security/publicca_v1beta1/services/public_certificate_authority_service/client.py b/packages/google-cloud-public-ca/google/cloud/security/publicca_v1beta1/services/public_certificate_authority_service/client.py index c4894d633fd9..ca1aea18af97 100644 --- a/packages/google-cloud-public-ca/google/cloud/security/publicca_v1beta1/services/public_certificate_authority_service/client.py +++ b/packages/google-cloud-public-ca/google/cloud/security/publicca_v1beta1/services/public_certificate_authority_service/client.py @@ -686,7 +686,7 @@ def __init__( Type[PublicCertificateAuthorityServiceTransport], Callable[..., PublicCertificateAuthorityServiceTransport], ] = ( - type(self).get_transport_class(transport) + PublicCertificateAuthorityServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast( Callable[..., PublicCertificateAuthorityServiceTransport], transport diff --git a/packages/google-cloud-public-ca/samples/generated_samples/snippet_metadata_google.cloud.security.publicca.v1.json b/packages/google-cloud-public-ca/samples/generated_samples/snippet_metadata_google.cloud.security.publicca.v1.json index 27e5f16b7fa0..3150579e1285 100644 --- a/packages/google-cloud-public-ca/samples/generated_samples/snippet_metadata_google.cloud.security.publicca.v1.json +++ b/packages/google-cloud-public-ca/samples/generated_samples/snippet_metadata_google.cloud.security.publicca.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-security-publicca", - "version": "0.3.12" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-public-ca/samples/generated_samples/snippet_metadata_google.cloud.security.publicca.v1beta1.json b/packages/google-cloud-public-ca/samples/generated_samples/snippet_metadata_google.cloud.security.publicca.v1beta1.json index 884a3d6b1477..b4590ffb004f 100644 --- a/packages/google-cloud-public-ca/samples/generated_samples/snippet_metadata_google.cloud.security.publicca.v1beta1.json +++ b/packages/google-cloud-public-ca/samples/generated_samples/snippet_metadata_google.cloud.security.publicca.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-public-ca", - "version": "0.3.12" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-public-ca/tests/unit/gapic/publicca_v1/test_public_certificate_authority_service.py b/packages/google-cloud-public-ca/tests/unit/gapic/publicca_v1/test_public_certificate_authority_service.py index b60977e5b3ce..a8149b091358 100644 --- a/packages/google-cloud-public-ca/tests/unit/gapic/publicca_v1/test_public_certificate_authority_service.py +++ b/packages/google-cloud-public-ca/tests/unit/gapic/publicca_v1/test_public_certificate_authority_service.py @@ -1405,22 +1405,23 @@ async def test_create_external_account_key_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_external_account_key - ] = mock_object + ] = mock_rpc request = {} await client.create_external_account_key(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_external_account_key(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-public-ca/tests/unit/gapic/publicca_v1beta1/test_public_certificate_authority_service.py b/packages/google-cloud-public-ca/tests/unit/gapic/publicca_v1beta1/test_public_certificate_authority_service.py index 58de9849444e..f40e37aae2df 100644 --- a/packages/google-cloud-public-ca/tests/unit/gapic/publicca_v1beta1/test_public_certificate_authority_service.py +++ b/packages/google-cloud-public-ca/tests/unit/gapic/publicca_v1beta1/test_public_certificate_authority_service.py @@ -1405,22 +1405,23 @@ async def test_create_external_account_key_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_external_account_key - ] = mock_object + ] = mock_rpc request = {} await client.create_external_account_key(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_external_account_key(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment/gapic_version.py b/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment/gapic_version.py index f8ea948a9c30..558c8aab67c5 100644 --- a/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment/gapic_version.py +++ b/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.9" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment_v1/gapic_version.py b/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment_v1/gapic_version.py index f8ea948a9c30..558c8aab67c5 100644 --- a/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment_v1/gapic_version.py +++ b/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.9" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment_v1/services/rapid_migration_assessment/async_client.py b/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment_v1/services/rapid_migration_assessment/async_client.py index 25846448dbe3..865d49bbb495 100644 --- a/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment_v1/services/rapid_migration_assessment/async_client.py +++ b/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment_v1/services/rapid_migration_assessment/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -211,10 +210,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(RapidMigrationAssessmentClient).get_transport_class, - type(RapidMigrationAssessmentClient), - ) + get_transport_class = RapidMigrationAssessmentClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment_v1/services/rapid_migration_assessment/client.py b/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment_v1/services/rapid_migration_assessment/client.py index edbe91f0d11b..bf2954db3df0 100644 --- a/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment_v1/services/rapid_migration_assessment/client.py +++ b/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment_v1/services/rapid_migration_assessment/client.py @@ -705,7 +705,7 @@ def __init__( Type[RapidMigrationAssessmentTransport], Callable[..., RapidMigrationAssessmentTransport], ] = ( - type(self).get_transport_class(transport) + RapidMigrationAssessmentClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RapidMigrationAssessmentTransport], transport) ) diff --git a/packages/google-cloud-rapidmigrationassessment/samples/generated_samples/snippet_metadata_google.cloud.rapidmigrationassessment.v1.json b/packages/google-cloud-rapidmigrationassessment/samples/generated_samples/snippet_metadata_google.cloud.rapidmigrationassessment.v1.json index e0b400a0aa1e..c385b94c0c47 100644 --- a/packages/google-cloud-rapidmigrationassessment/samples/generated_samples/snippet_metadata_google.cloud.rapidmigrationassessment.v1.json +++ b/packages/google-cloud-rapidmigrationassessment/samples/generated_samples/snippet_metadata_google.cloud.rapidmigrationassessment.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-rapidmigrationassessment", - "version": "0.1.9" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-rapidmigrationassessment/tests/unit/gapic/rapidmigrationassessment_v1/test_rapid_migration_assessment.py b/packages/google-cloud-rapidmigrationassessment/tests/unit/gapic/rapidmigrationassessment_v1/test_rapid_migration_assessment.py index 30edab67d109..18703a1720b3 100644 --- a/packages/google-cloud-rapidmigrationassessment/tests/unit/gapic/rapidmigrationassessment_v1/test_rapid_migration_assessment.py +++ b/packages/google-cloud-rapidmigrationassessment/tests/unit/gapic/rapidmigrationassessment_v1/test_rapid_migration_assessment.py @@ -1325,8 +1325,9 @@ def test_create_collector_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_collector(request) @@ -1380,26 +1381,28 @@ async def test_create_collector_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_collector - ] = mock_object + ] = mock_rpc request = {} await client.create_collector(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_collector(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1720,8 +1723,9 @@ def test_create_annotation_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_annotation(request) @@ -1777,26 +1781,28 @@ async def test_create_annotation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_annotation - ] = mock_object + ] = mock_rpc request = {} await client.create_annotation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_annotation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2166,22 +2172,23 @@ async def test_get_annotation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_annotation - ] = mock_object + ] = mock_rpc request = {} await client.get_annotation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_annotation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2542,22 +2549,23 @@ async def test_list_collectors_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_collectors - ] = mock_object + ] = mock_rpc request = {} await client.list_collectors(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_collectors(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3130,22 +3138,23 @@ async def test_get_collector_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_collector - ] = mock_object + ] = mock_rpc request = {} await client.get_collector(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_collector(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3459,8 +3468,9 @@ def test_update_collector_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_collector(request) @@ -3514,26 +3524,28 @@ async def test_update_collector_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_collector - ] = mock_object + ] = mock_rpc request = {} await client.update_collector(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_collector(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3838,8 +3850,9 @@ def test_delete_collector_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_collector(request) @@ -3893,26 +3906,28 @@ async def test_delete_collector_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_collector - ] = mock_object + ] = mock_rpc request = {} await client.delete_collector(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_collector(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4207,8 +4222,9 @@ def test_resume_collector_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.resume_collector(request) @@ -4262,26 +4278,28 @@ async def test_resume_collector_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.resume_collector - ] = mock_object + ] = mock_rpc request = {} await client.resume_collector(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.resume_collector(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4584,8 +4602,9 @@ def test_register_collector_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.register_collector(request) @@ -4641,26 +4660,28 @@ async def test_register_collector_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.register_collector - ] = mock_object + ] = mock_rpc request = {} await client.register_collector(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.register_collector(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4963,8 +4984,9 @@ def test_pause_collector_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.pause_collector(request) @@ -5018,26 +5040,28 @@ async def test_pause_collector_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.pause_collector - ] = mock_object + ] = mock_rpc request = {} await client.pause_collector(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.pause_collector(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise/gapic_version.py b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise/gapic_version.py index 64606bb368ac..558c8aab67c5 100644 --- a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise/gapic_version.py +++ b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.21.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/gapic_version.py b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/gapic_version.py index 64606bb368ac..558c8aab67c5 100644 --- a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/gapic_version.py +++ b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.21.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/services/recaptcha_enterprise_service/async_client.py b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/services/recaptcha_enterprise_service/async_client.py index 1e52c58b947a..ca4ff9f14e83 100644 --- a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/services/recaptcha_enterprise_service/async_client.py +++ b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/services/recaptcha_enterprise_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -226,10 +225,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(RecaptchaEnterpriseServiceClient).get_transport_class, - type(RecaptchaEnterpriseServiceClient), - ) + get_transport_class = RecaptchaEnterpriseServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/services/recaptcha_enterprise_service/client.py b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/services/recaptcha_enterprise_service/client.py index 1aecd1b6873d..e26f3ad6450c 100644 --- a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/services/recaptcha_enterprise_service/client.py +++ b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/services/recaptcha_enterprise_service/client.py @@ -765,7 +765,7 @@ def __init__( Type[RecaptchaEnterpriseServiceTransport], Callable[..., RecaptchaEnterpriseServiceTransport], ] = ( - type(self).get_transport_class(transport) + RecaptchaEnterpriseServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RecaptchaEnterpriseServiceTransport], transport) ) diff --git a/packages/google-cloud-recaptcha-enterprise/samples/generated_samples/snippet_metadata_google.cloud.recaptchaenterprise.v1.json b/packages/google-cloud-recaptcha-enterprise/samples/generated_samples/snippet_metadata_google.cloud.recaptchaenterprise.v1.json index 0acb5c6006d6..aa554428ea59 100644 --- a/packages/google-cloud-recaptcha-enterprise/samples/generated_samples/snippet_metadata_google.cloud.recaptchaenterprise.v1.json +++ b/packages/google-cloud-recaptcha-enterprise/samples/generated_samples/snippet_metadata_google.cloud.recaptchaenterprise.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-recaptcha-enterprise", - "version": "1.21.2" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-recaptcha-enterprise/tests/unit/gapic/recaptchaenterprise_v1/test_recaptcha_enterprise_service.py b/packages/google-cloud-recaptcha-enterprise/tests/unit/gapic/recaptchaenterprise_v1/test_recaptcha_enterprise_service.py index 02122f3a7378..2134315d5a12 100644 --- a/packages/google-cloud-recaptcha-enterprise/tests/unit/gapic/recaptchaenterprise_v1/test_recaptcha_enterprise_service.py +++ b/packages/google-cloud-recaptcha-enterprise/tests/unit/gapic/recaptchaenterprise_v1/test_recaptcha_enterprise_service.py @@ -1329,22 +1329,23 @@ async def test_create_assessment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_assessment - ] = mock_object + ] = mock_rpc request = {} await client.create_assessment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_assessment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1723,22 +1724,23 @@ async def test_annotate_assessment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.annotate_assessment - ] = mock_object + ] = mock_rpc request = {} await client.annotate_assessment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.annotate_assessment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2106,22 +2108,23 @@ async def test_create_key_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_key - ] = mock_object + ] = mock_rpc request = {} await client.create_key(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_key(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2482,22 +2485,23 @@ async def test_list_keys_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_keys - ] = mock_object + ] = mock_rpc request = {} await client.list_keys(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_keys(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3053,22 +3057,23 @@ async def test_retrieve_legacy_secret_key_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.retrieve_legacy_secret_key - ] = mock_object + ] = mock_rpc request = {} await client.retrieve_legacy_secret_key(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.retrieve_legacy_secret_key(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3429,22 +3434,23 @@ async def test_get_key_async_use_cached_wrapped_rpc(transport: str = "grpc_async ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_key - ] = mock_object + ] = mock_rpc request = {} await client.get_key(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_key(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3792,22 +3798,23 @@ async def test_update_key_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_key - ] = mock_object + ] = mock_rpc request = {} await client.update_key(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_key(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4159,22 +4166,23 @@ async def test_delete_key_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_key - ] = mock_object + ] = mock_rpc request = {} await client.delete_key(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_key(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4517,22 +4525,23 @@ async def test_migrate_key_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.migrate_key - ] = mock_object + ] = mock_rpc request = {} await client.migrate_key(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.migrate_key(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4801,22 +4810,23 @@ async def test_get_metrics_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_metrics - ] = mock_object + ] = mock_rpc request = {} await client.get_metrics(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_metrics(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5187,22 +5197,23 @@ async def test_create_firewall_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_firewall_policy - ] = mock_object + ] = mock_rpc request = {} await client.create_firewall_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_firewall_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5593,22 +5604,23 @@ async def test_list_firewall_policies_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_firewall_policies - ] = mock_object + ] = mock_rpc request = {} await client.list_firewall_policies(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_firewall_policies(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6191,22 +6203,23 @@ async def test_get_firewall_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_firewall_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_firewall_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_firewall_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6590,22 +6603,23 @@ async def test_update_firewall_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_firewall_policy - ] = mock_object + ] = mock_rpc request = {} await client.update_firewall_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_firewall_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6987,22 +7001,23 @@ async def test_delete_firewall_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_firewall_policy - ] = mock_object + ] = mock_rpc request = {} await client.delete_firewall_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_firewall_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7361,22 +7376,23 @@ async def test_reorder_firewall_policies_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.reorder_firewall_policies - ] = mock_object + ] = mock_rpc request = {} await client.reorder_firewall_policies(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.reorder_firewall_policies(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7758,22 +7774,23 @@ async def test_list_related_account_groups_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_related_account_groups - ] = mock_object + ] = mock_rpc request = {} await client.list_related_account_groups(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_related_account_groups(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8362,22 +8379,23 @@ async def test_list_related_account_group_memberships_async_use_cached_wrapped_r ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_related_account_group_memberships - ] = mock_object + ] = mock_rpc request = {} await client.list_related_account_group_memberships(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_related_account_group_memberships(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8980,22 +8998,23 @@ async def test_search_related_account_group_memberships_async_use_cached_wrapped ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search_related_account_group_memberships - ] = mock_object + ] = mock_rpc request = {} await client.search_related_account_group_memberships(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_related_account_group_memberships(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-recommendations-ai/google/cloud/recommendationengine/gapic_version.py b/packages/google-cloud-recommendations-ai/google/cloud/recommendationengine/gapic_version.py index dbb72468cf59..558c8aab67c5 100644 --- a/packages/google-cloud-recommendations-ai/google/cloud/recommendationengine/gapic_version.py +++ b/packages/google-cloud-recommendations-ai/google/cloud/recommendationengine/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.10.12" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-recommendations-ai/google/cloud/recommendationengine_v1beta1/gapic_version.py b/packages/google-cloud-recommendations-ai/google/cloud/recommendationengine_v1beta1/gapic_version.py index dbb72468cf59..558c8aab67c5 100644 --- a/packages/google-cloud-recommendations-ai/google/cloud/recommendationengine_v1beta1/gapic_version.py +++ b/packages/google-cloud-recommendations-ai/google/cloud/recommendationengine_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.10.12" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-recommendations-ai/google/cloud/recommendationengine_v1beta1/services/catalog_service/async_client.py b/packages/google-cloud-recommendations-ai/google/cloud/recommendationengine_v1beta1/services/catalog_service/async_client.py index cb829983497d..f6b05b6cebb8 100644 --- a/packages/google-cloud-recommendations-ai/google/cloud/recommendationengine_v1beta1/services/catalog_service/async_client.py +++ b/packages/google-cloud-recommendations-ai/google/cloud/recommendationengine_v1beta1/services/catalog_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -202,9 +201,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(CatalogServiceClient).get_transport_class, type(CatalogServiceClient) - ) + get_transport_class = CatalogServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-recommendations-ai/google/cloud/recommendationengine_v1beta1/services/catalog_service/client.py b/packages/google-cloud-recommendations-ai/google/cloud/recommendationengine_v1beta1/services/catalog_service/client.py index 591488ecbf49..4af7c9a6985c 100644 --- a/packages/google-cloud-recommendations-ai/google/cloud/recommendationengine_v1beta1/services/catalog_service/client.py +++ b/packages/google-cloud-recommendations-ai/google/cloud/recommendationengine_v1beta1/services/catalog_service/client.py @@ -695,7 +695,7 @@ def __init__( transport_init: Union[ Type[CatalogServiceTransport], Callable[..., CatalogServiceTransport] ] = ( - type(self).get_transport_class(transport) + CatalogServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., CatalogServiceTransport], transport) ) diff --git a/packages/google-cloud-recommendations-ai/google/cloud/recommendationengine_v1beta1/services/prediction_api_key_registry/async_client.py b/packages/google-cloud-recommendations-ai/google/cloud/recommendationengine_v1beta1/services/prediction_api_key_registry/async_client.py index d13866375750..b8da065eece4 100644 --- a/packages/google-cloud-recommendations-ai/google/cloud/recommendationengine_v1beta1/services/prediction_api_key_registry/async_client.py +++ b/packages/google-cloud-recommendations-ai/google/cloud/recommendationengine_v1beta1/services/prediction_api_key_registry/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -211,10 +210,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(PredictionApiKeyRegistryClient).get_transport_class, - type(PredictionApiKeyRegistryClient), - ) + get_transport_class = PredictionApiKeyRegistryClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-recommendations-ai/google/cloud/recommendationengine_v1beta1/services/prediction_api_key_registry/client.py b/packages/google-cloud-recommendations-ai/google/cloud/recommendationengine_v1beta1/services/prediction_api_key_registry/client.py index c5798da26c02..33e15df2aaa6 100644 --- a/packages/google-cloud-recommendations-ai/google/cloud/recommendationengine_v1beta1/services/prediction_api_key_registry/client.py +++ b/packages/google-cloud-recommendations-ai/google/cloud/recommendationengine_v1beta1/services/prediction_api_key_registry/client.py @@ -707,7 +707,7 @@ def __init__( Type[PredictionApiKeyRegistryTransport], Callable[..., PredictionApiKeyRegistryTransport], ] = ( - type(self).get_transport_class(transport) + PredictionApiKeyRegistryClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., PredictionApiKeyRegistryTransport], transport) ) diff --git a/packages/google-cloud-recommendations-ai/google/cloud/recommendationengine_v1beta1/services/prediction_service/async_client.py b/packages/google-cloud-recommendations-ai/google/cloud/recommendationengine_v1beta1/services/prediction_service/async_client.py index 597f59d2532c..3c0ea1a7de50 100644 --- a/packages/google-cloud-recommendations-ai/google/cloud/recommendationengine_v1beta1/services/prediction_service/async_client.py +++ b/packages/google-cloud-recommendations-ai/google/cloud/recommendationengine_v1beta1/services/prediction_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -188,9 +187,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(PredictionServiceClient).get_transport_class, type(PredictionServiceClient) - ) + get_transport_class = PredictionServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-recommendations-ai/google/cloud/recommendationengine_v1beta1/services/prediction_service/client.py b/packages/google-cloud-recommendations-ai/google/cloud/recommendationengine_v1beta1/services/prediction_service/client.py index 8601107c017d..02c01d112842 100644 --- a/packages/google-cloud-recommendations-ai/google/cloud/recommendationengine_v1beta1/services/prediction_service/client.py +++ b/packages/google-cloud-recommendations-ai/google/cloud/recommendationengine_v1beta1/services/prediction_service/client.py @@ -670,7 +670,7 @@ def __init__( Type[PredictionServiceTransport], Callable[..., PredictionServiceTransport], ] = ( - type(self).get_transport_class(transport) + PredictionServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., PredictionServiceTransport], transport) ) diff --git a/packages/google-cloud-recommendations-ai/google/cloud/recommendationengine_v1beta1/services/user_event_service/async_client.py b/packages/google-cloud-recommendations-ai/google/cloud/recommendationengine_v1beta1/services/user_event_service/async_client.py index 93102d1d821d..c618593aaac8 100644 --- a/packages/google-cloud-recommendations-ai/google/cloud/recommendationengine_v1beta1/services/user_event_service/async_client.py +++ b/packages/google-cloud-recommendations-ai/google/cloud/recommendationengine_v1beta1/services/user_event_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -198,9 +197,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(UserEventServiceClient).get_transport_class, type(UserEventServiceClient) - ) + get_transport_class = UserEventServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-recommendations-ai/google/cloud/recommendationengine_v1beta1/services/user_event_service/client.py b/packages/google-cloud-recommendations-ai/google/cloud/recommendationengine_v1beta1/services/user_event_service/client.py index 1813b2f9ef7a..72bb0ef3c664 100644 --- a/packages/google-cloud-recommendations-ai/google/cloud/recommendationengine_v1beta1/services/user_event_service/client.py +++ b/packages/google-cloud-recommendations-ai/google/cloud/recommendationengine_v1beta1/services/user_event_service/client.py @@ -676,7 +676,7 @@ def __init__( Type[UserEventServiceTransport], Callable[..., UserEventServiceTransport], ] = ( - type(self).get_transport_class(transport) + UserEventServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., UserEventServiceTransport], transport) ) diff --git a/packages/google-cloud-recommendations-ai/samples/generated_samples/snippet_metadata_google.cloud.recommendationengine.v1beta1.json b/packages/google-cloud-recommendations-ai/samples/generated_samples/snippet_metadata_google.cloud.recommendationengine.v1beta1.json index f646f8610f51..8cfa197907b0 100644 --- a/packages/google-cloud-recommendations-ai/samples/generated_samples/snippet_metadata_google.cloud.recommendationengine.v1beta1.json +++ b/packages/google-cloud-recommendations-ai/samples/generated_samples/snippet_metadata_google.cloud.recommendationengine.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-recommendations-ai", - "version": "0.10.12" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-recommendations-ai/tests/unit/gapic/recommendationengine_v1beta1/test_catalog_service.py b/packages/google-cloud-recommendations-ai/tests/unit/gapic/recommendationengine_v1beta1/test_catalog_service.py index 9eff89c62c11..e8019247639b 100644 --- a/packages/google-cloud-recommendations-ai/tests/unit/gapic/recommendationengine_v1beta1/test_catalog_service.py +++ b/packages/google-cloud-recommendations-ai/tests/unit/gapic/recommendationengine_v1beta1/test_catalog_service.py @@ -1326,22 +1326,23 @@ async def test_create_catalog_item_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_catalog_item - ] = mock_object + ] = mock_rpc request = {} await client.create_catalog_item(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_catalog_item(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1734,22 +1735,23 @@ async def test_get_catalog_item_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_catalog_item - ] = mock_object + ] = mock_rpc request = {} await client.get_catalog_item(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_catalog_item(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2120,22 +2122,23 @@ async def test_list_catalog_items_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_catalog_items - ] = mock_object + ] = mock_rpc request = {} await client.list_catalog_items(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_catalog_items(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2734,22 +2737,23 @@ async def test_update_catalog_item_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_catalog_item - ] = mock_object + ] = mock_rpc request = {} await client.update_catalog_item(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_catalog_item(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3140,22 +3144,23 @@ async def test_delete_catalog_item_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_catalog_item - ] = mock_object + ] = mock_rpc request = {} await client.delete_catalog_item(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_catalog_item(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3462,8 +3467,9 @@ def test_import_catalog_items_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.import_catalog_items(request) @@ -3519,26 +3525,28 @@ async def test_import_catalog_items_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_catalog_items - ] = mock_object + ] = mock_rpc request = {} await client.import_catalog_items(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.import_catalog_items(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-recommendations-ai/tests/unit/gapic/recommendationengine_v1beta1/test_prediction_api_key_registry.py b/packages/google-cloud-recommendations-ai/tests/unit/gapic/recommendationengine_v1beta1/test_prediction_api_key_registry.py index e44ef6f10127..729389d26682 100644 --- a/packages/google-cloud-recommendations-ai/tests/unit/gapic/recommendationengine_v1beta1/test_prediction_api_key_registry.py +++ b/packages/google-cloud-recommendations-ai/tests/unit/gapic/recommendationengine_v1beta1/test_prediction_api_key_registry.py @@ -1390,22 +1390,23 @@ async def test_create_prediction_api_key_registration_async_use_cached_wrapped_r ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_prediction_api_key_registration - ] = mock_object + ] = mock_rpc request = {} await client.create_prediction_api_key_registration(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_prediction_api_key_registration(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1828,22 +1829,23 @@ async def test_list_prediction_api_key_registrations_async_use_cached_wrapped_rp ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_prediction_api_key_registrations - ] = mock_object + ] = mock_rpc request = {} await client.list_prediction_api_key_registrations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_prediction_api_key_registrations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2447,22 +2449,23 @@ async def test_delete_prediction_api_key_registration_async_use_cached_wrapped_r ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_prediction_api_key_registration - ] = mock_object + ] = mock_rpc request = {} await client.delete_prediction_api_key_registration(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_prediction_api_key_registration(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-recommendations-ai/tests/unit/gapic/recommendationengine_v1beta1/test_prediction_service.py b/packages/google-cloud-recommendations-ai/tests/unit/gapic/recommendationengine_v1beta1/test_prediction_service.py index c803bbfe59c6..4d146266b0b3 100644 --- a/packages/google-cloud-recommendations-ai/tests/unit/gapic/recommendationengine_v1beta1/test_prediction_service.py +++ b/packages/google-cloud-recommendations-ai/tests/unit/gapic/recommendationengine_v1beta1/test_prediction_service.py @@ -1337,22 +1337,23 @@ async def test_predict_async_use_cached_wrapped_rpc(transport: str = "grpc_async ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.predict - ] = mock_object + ] = mock_rpc request = {} await client.predict(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.predict(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-recommendations-ai/tests/unit/gapic/recommendationengine_v1beta1/test_user_event_service.py b/packages/google-cloud-recommendations-ai/tests/unit/gapic/recommendationengine_v1beta1/test_user_event_service.py index b19d2b23f606..606f8a8f4131 100644 --- a/packages/google-cloud-recommendations-ai/tests/unit/gapic/recommendationengine_v1beta1/test_user_event_service.py +++ b/packages/google-cloud-recommendations-ai/tests/unit/gapic/recommendationengine_v1beta1/test_user_event_service.py @@ -1332,22 +1332,23 @@ async def test_write_user_event_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.write_user_event - ] = mock_object + ] = mock_rpc request = {} await client.write_user_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.write_user_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1728,22 +1729,23 @@ async def test_collect_user_event_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.collect_user_event - ] = mock_object + ] = mock_rpc request = {} await client.collect_user_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.collect_user_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2141,22 +2143,23 @@ async def test_list_user_events_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_user_events - ] = mock_object + ] = mock_rpc request = {} await client.list_user_events(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_user_events(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2664,8 +2667,9 @@ def test_purge_user_events_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.purge_user_events(request) @@ -2721,26 +2725,28 @@ async def test_purge_user_events_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.purge_user_events - ] = mock_object + ] = mock_rpc request = {} await client.purge_user_events(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.purge_user_events(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3073,8 +3079,9 @@ def test_import_user_events_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.import_user_events(request) @@ -3130,26 +3137,28 @@ async def test_import_user_events_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_user_events - ] = mock_object + ] = mock_rpc request = {} await client.import_user_events(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.import_user_events(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-recommender/google/cloud/recommender/gapic_version.py b/packages/google-cloud-recommender/google/cloud/recommender/gapic_version.py index 9cec76a3a9c8..558c8aab67c5 100644 --- a/packages/google-cloud-recommender/google/cloud/recommender/gapic_version.py +++ b/packages/google-cloud-recommender/google/cloud/recommender/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.15.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-recommender/google/cloud/recommender_v1/gapic_version.py b/packages/google-cloud-recommender/google/cloud/recommender_v1/gapic_version.py index 9cec76a3a9c8..558c8aab67c5 100644 --- a/packages/google-cloud-recommender/google/cloud/recommender_v1/gapic_version.py +++ b/packages/google-cloud-recommender/google/cloud/recommender_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.15.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-recommender/google/cloud/recommender_v1/services/recommender/async_client.py b/packages/google-cloud-recommender/google/cloud/recommender_v1/services/recommender/async_client.py index 80c1f7d5f46d..fde02c55493e 100644 --- a/packages/google-cloud-recommender/google/cloud/recommender_v1/services/recommender/async_client.py +++ b/packages/google-cloud-recommender/google/cloud/recommender_v1/services/recommender/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -219,9 +218,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(RecommenderClient).get_transport_class, type(RecommenderClient) - ) + get_transport_class = RecommenderClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-recommender/google/cloud/recommender_v1/services/recommender/client.py b/packages/google-cloud-recommender/google/cloud/recommender_v1/services/recommender/client.py index 2c8babd33ba6..75d246c0424e 100644 --- a/packages/google-cloud-recommender/google/cloud/recommender_v1/services/recommender/client.py +++ b/packages/google-cloud-recommender/google/cloud/recommender_v1/services/recommender/client.py @@ -791,7 +791,7 @@ def __init__( transport_init: Union[ Type[RecommenderTransport], Callable[..., RecommenderTransport] ] = ( - type(self).get_transport_class(transport) + RecommenderClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RecommenderTransport], transport) ) diff --git a/packages/google-cloud-recommender/google/cloud/recommender_v1beta1/gapic_version.py b/packages/google-cloud-recommender/google/cloud/recommender_v1beta1/gapic_version.py index 9cec76a3a9c8..558c8aab67c5 100644 --- a/packages/google-cloud-recommender/google/cloud/recommender_v1beta1/gapic_version.py +++ b/packages/google-cloud-recommender/google/cloud/recommender_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.15.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-recommender/google/cloud/recommender_v1beta1/services/recommender/async_client.py b/packages/google-cloud-recommender/google/cloud/recommender_v1beta1/services/recommender/async_client.py index 1fdb8d42767f..f7546402b04b 100644 --- a/packages/google-cloud-recommender/google/cloud/recommender_v1beta1/services/recommender/async_client.py +++ b/packages/google-cloud-recommender/google/cloud/recommender_v1beta1/services/recommender/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -219,9 +218,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(RecommenderClient).get_transport_class, type(RecommenderClient) - ) + get_transport_class = RecommenderClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-recommender/google/cloud/recommender_v1beta1/services/recommender/client.py b/packages/google-cloud-recommender/google/cloud/recommender_v1beta1/services/recommender/client.py index 8de77513c423..e3ff47a03392 100644 --- a/packages/google-cloud-recommender/google/cloud/recommender_v1beta1/services/recommender/client.py +++ b/packages/google-cloud-recommender/google/cloud/recommender_v1beta1/services/recommender/client.py @@ -791,7 +791,7 @@ def __init__( transport_init: Union[ Type[RecommenderTransport], Callable[..., RecommenderTransport] ] = ( - type(self).get_transport_class(transport) + RecommenderClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RecommenderTransport], transport) ) diff --git a/packages/google-cloud-recommender/samples/generated_samples/snippet_metadata_google.cloud.recommender.v1.json b/packages/google-cloud-recommender/samples/generated_samples/snippet_metadata_google.cloud.recommender.v1.json index 57316030cd19..5c470a773f2a 100644 --- a/packages/google-cloud-recommender/samples/generated_samples/snippet_metadata_google.cloud.recommender.v1.json +++ b/packages/google-cloud-recommender/samples/generated_samples/snippet_metadata_google.cloud.recommender.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-recommender", - "version": "2.15.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-recommender/samples/generated_samples/snippet_metadata_google.cloud.recommender.v1beta1.json b/packages/google-cloud-recommender/samples/generated_samples/snippet_metadata_google.cloud.recommender.v1beta1.json index 7fe60b4959f9..e4ff805e99aa 100644 --- a/packages/google-cloud-recommender/samples/generated_samples/snippet_metadata_google.cloud.recommender.v1beta1.json +++ b/packages/google-cloud-recommender/samples/generated_samples/snippet_metadata_google.cloud.recommender.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-recommender", - "version": "2.15.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-recommender/tests/unit/gapic/recommender_v1/test_recommender.py b/packages/google-cloud-recommender/tests/unit/gapic/recommender_v1/test_recommender.py index 2f23b1846be5..b20b6b862d1c 100644 --- a/packages/google-cloud-recommender/tests/unit/gapic/recommender_v1/test_recommender.py +++ b/packages/google-cloud-recommender/tests/unit/gapic/recommender_v1/test_recommender.py @@ -1262,22 +1262,23 @@ async def test_list_insights_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_insights - ] = mock_object + ] = mock_rpc request = {} await client.list_insights(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_insights(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1839,22 +1840,23 @@ async def test_get_insight_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_insight - ] = mock_object + ] = mock_rpc request = {} await client.get_insight(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_insight(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2244,22 +2246,23 @@ async def test_mark_insight_accepted_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.mark_insight_accepted - ] = mock_object + ] = mock_rpc request = {} await client.mark_insight_accepted(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.mark_insight_accepted(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2663,22 +2666,23 @@ async def test_list_recommendations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_recommendations - ] = mock_object + ] = mock_rpc request = {} await client.list_recommendations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_recommendations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3277,22 +3281,23 @@ async def test_get_recommendation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_recommendation - ] = mock_object + ] = mock_rpc request = {} await client.get_recommendation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_recommendation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3692,22 +3697,23 @@ async def test_mark_recommendation_dismissed_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.mark_recommendation_dismissed - ] = mock_object + ] = mock_rpc request = {} await client.mark_recommendation_dismissed(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.mark_recommendation_dismissed(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4021,22 +4027,23 @@ async def test_mark_recommendation_claimed_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.mark_recommendation_claimed - ] = mock_object + ] = mock_rpc request = {} await client.mark_recommendation_claimed(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.mark_recommendation_claimed(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4456,22 +4463,23 @@ async def test_mark_recommendation_succeeded_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.mark_recommendation_succeeded - ] = mock_object + ] = mock_rpc request = {} await client.mark_recommendation_succeeded(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.mark_recommendation_succeeded(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4891,22 +4899,23 @@ async def test_mark_recommendation_failed_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.mark_recommendation_failed - ] = mock_object + ] = mock_rpc request = {} await client.mark_recommendation_failed(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.mark_recommendation_failed(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5318,22 +5327,23 @@ async def test_get_recommender_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_recommender_config - ] = mock_object + ] = mock_rpc request = {} await client.get_recommender_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_recommender_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5717,22 +5727,23 @@ async def test_update_recommender_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_recommender_config - ] = mock_object + ] = mock_rpc request = {} await client.update_recommender_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_recommender_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6138,22 +6149,23 @@ async def test_get_insight_type_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_insight_type_config - ] = mock_object + ] = mock_rpc request = {} await client.get_insight_type_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_insight_type_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6537,22 +6549,23 @@ async def test_update_insight_type_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_insight_type_config - ] = mock_object + ] = mock_rpc request = {} await client.update_insight_type_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_insight_type_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-recommender/tests/unit/gapic/recommender_v1beta1/test_recommender.py b/packages/google-cloud-recommender/tests/unit/gapic/recommender_v1beta1/test_recommender.py index a2eaa4c22216..4ca53c9d5518 100644 --- a/packages/google-cloud-recommender/tests/unit/gapic/recommender_v1beta1/test_recommender.py +++ b/packages/google-cloud-recommender/tests/unit/gapic/recommender_v1beta1/test_recommender.py @@ -1262,22 +1262,23 @@ async def test_list_insights_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_insights - ] = mock_object + ] = mock_rpc request = {} await client.list_insights(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_insights(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1839,22 +1840,23 @@ async def test_get_insight_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_insight - ] = mock_object + ] = mock_rpc request = {} await client.get_insight(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_insight(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2244,22 +2246,23 @@ async def test_mark_insight_accepted_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.mark_insight_accepted - ] = mock_object + ] = mock_rpc request = {} await client.mark_insight_accepted(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.mark_insight_accepted(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2663,22 +2666,23 @@ async def test_list_recommendations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_recommendations - ] = mock_object + ] = mock_rpc request = {} await client.list_recommendations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_recommendations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3277,22 +3281,23 @@ async def test_get_recommendation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_recommendation - ] = mock_object + ] = mock_rpc request = {} await client.get_recommendation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_recommendation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3692,22 +3697,23 @@ async def test_mark_recommendation_claimed_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.mark_recommendation_claimed - ] = mock_object + ] = mock_rpc request = {} await client.mark_recommendation_claimed(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.mark_recommendation_claimed(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4127,22 +4133,23 @@ async def test_mark_recommendation_succeeded_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.mark_recommendation_succeeded - ] = mock_object + ] = mock_rpc request = {} await client.mark_recommendation_succeeded(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.mark_recommendation_succeeded(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4562,22 +4569,23 @@ async def test_mark_recommendation_failed_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.mark_recommendation_failed - ] = mock_object + ] = mock_rpc request = {} await client.mark_recommendation_failed(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.mark_recommendation_failed(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4989,22 +4997,23 @@ async def test_get_recommender_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_recommender_config - ] = mock_object + ] = mock_rpc request = {} await client.get_recommender_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_recommender_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5388,22 +5397,23 @@ async def test_update_recommender_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_recommender_config - ] = mock_object + ] = mock_rpc request = {} await client.update_recommender_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_recommender_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5809,22 +5819,23 @@ async def test_get_insight_type_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_insight_type_config - ] = mock_object + ] = mock_rpc request = {} await client.get_insight_type_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_insight_type_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6208,22 +6219,23 @@ async def test_update_insight_type_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_insight_type_config - ] = mock_object + ] = mock_rpc request = {} await client.update_insight_type_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_insight_type_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6617,22 +6629,23 @@ async def test_list_recommenders_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_recommenders - ] = mock_object + ] = mock_rpc request = {} await client.list_recommenders(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_recommenders(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7052,22 +7065,23 @@ async def test_list_insight_types_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_insight_types - ] = mock_object + ] = mock_rpc request = {} await client.list_insight_types(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_insight_types(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-redis-cluster/google/cloud/redis_cluster/gapic_version.py b/packages/google-cloud-redis-cluster/google/cloud/redis_cluster/gapic_version.py index ec8d212c9160..558c8aab67c5 100644 --- a/packages/google-cloud-redis-cluster/google/cloud/redis_cluster/gapic_version.py +++ b/packages/google-cloud-redis-cluster/google/cloud/redis_cluster/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.8" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1/gapic_version.py b/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1/gapic_version.py index ec8d212c9160..558c8aab67c5 100644 --- a/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1/gapic_version.py +++ b/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.8" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1/services/cloud_redis_cluster/async_client.py b/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1/services/cloud_redis_cluster/async_client.py index 054db1211478..c6c7cfb33a92 100644 --- a/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1/services/cloud_redis_cluster/async_client.py +++ b/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1/services/cloud_redis_cluster/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -230,9 +229,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(CloudRedisClusterClient).get_transport_class, type(CloudRedisClusterClient) - ) + get_transport_class = CloudRedisClusterClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1/services/cloud_redis_cluster/client.py b/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1/services/cloud_redis_cluster/client.py index 0c8916fca99c..2fcc7b813675 100644 --- a/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1/services/cloud_redis_cluster/client.py +++ b/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1/services/cloud_redis_cluster/client.py @@ -724,7 +724,7 @@ def __init__( Type[CloudRedisClusterTransport], Callable[..., CloudRedisClusterTransport], ] = ( - type(self).get_transport_class(transport) + CloudRedisClusterClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., CloudRedisClusterTransport], transport) ) diff --git a/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1beta1/gapic_version.py b/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1beta1/gapic_version.py index ec8d212c9160..558c8aab67c5 100644 --- a/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1beta1/gapic_version.py +++ b/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.8" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1beta1/services/cloud_redis_cluster/async_client.py b/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1beta1/services/cloud_redis_cluster/async_client.py index f985b61b8193..7a4f1bbe2268 100644 --- a/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1beta1/services/cloud_redis_cluster/async_client.py +++ b/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1beta1/services/cloud_redis_cluster/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -230,9 +229,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(CloudRedisClusterClient).get_transport_class, type(CloudRedisClusterClient) - ) + get_transport_class = CloudRedisClusterClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1beta1/services/cloud_redis_cluster/client.py b/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1beta1/services/cloud_redis_cluster/client.py index ddf4f0342b20..801f259cfd26 100644 --- a/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1beta1/services/cloud_redis_cluster/client.py +++ b/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1beta1/services/cloud_redis_cluster/client.py @@ -724,7 +724,7 @@ def __init__( Type[CloudRedisClusterTransport], Callable[..., CloudRedisClusterTransport], ] = ( - type(self).get_transport_class(transport) + CloudRedisClusterClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., CloudRedisClusterTransport], transport) ) diff --git a/packages/google-cloud-redis-cluster/samples/generated_samples/snippet_metadata_google.cloud.redis.cluster.v1.json b/packages/google-cloud-redis-cluster/samples/generated_samples/snippet_metadata_google.cloud.redis.cluster.v1.json index 109259c52441..b69528fcb001 100644 --- a/packages/google-cloud-redis-cluster/samples/generated_samples/snippet_metadata_google.cloud.redis.cluster.v1.json +++ b/packages/google-cloud-redis-cluster/samples/generated_samples/snippet_metadata_google.cloud.redis.cluster.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-redis-cluster", - "version": "0.1.8" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-redis-cluster/samples/generated_samples/snippet_metadata_google.cloud.redis.cluster.v1beta1.json b/packages/google-cloud-redis-cluster/samples/generated_samples/snippet_metadata_google.cloud.redis.cluster.v1beta1.json index f1ebc56d073d..95ce3f58686e 100644 --- a/packages/google-cloud-redis-cluster/samples/generated_samples/snippet_metadata_google.cloud.redis.cluster.v1beta1.json +++ b/packages/google-cloud-redis-cluster/samples/generated_samples/snippet_metadata_google.cloud.redis.cluster.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-redis-cluster", - "version": "0.1.8" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-redis-cluster/tests/unit/gapic/redis_cluster_v1/test_cloud_redis_cluster.py b/packages/google-cloud-redis-cluster/tests/unit/gapic/redis_cluster_v1/test_cloud_redis_cluster.py index 781d6bfad2db..3e9e16ee3103 100644 --- a/packages/google-cloud-redis-cluster/tests/unit/gapic/redis_cluster_v1/test_cloud_redis_cluster.py +++ b/packages/google-cloud-redis-cluster/tests/unit/gapic/redis_cluster_v1/test_cloud_redis_cluster.py @@ -1339,22 +1339,23 @@ async def test_list_clusters_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_clusters - ] = mock_object + ] = mock_rpc request = {} await client.list_clusters(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_clusters(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1936,22 +1937,23 @@ async def test_get_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_cluster - ] = mock_object + ] = mock_rpc request = {} await client.get_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2270,8 +2272,9 @@ def test_update_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_cluster(request) @@ -2325,26 +2328,28 @@ async def test_update_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_cluster - ] = mock_object + ] = mock_rpc request = {} await client.update_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2647,8 +2652,9 @@ def test_delete_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_cluster(request) @@ -2702,26 +2708,28 @@ async def test_delete_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_cluster - ] = mock_object + ] = mock_rpc request = {} await client.delete_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3016,8 +3024,9 @@ def test_create_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_cluster(request) @@ -3071,26 +3080,28 @@ async def test_create_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_cluster - ] = mock_object + ] = mock_rpc request = {} await client.create_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3470,22 +3481,23 @@ async def test_get_cluster_certificate_authority_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_cluster_certificate_authority - ] = mock_object + ] = mock_rpc request = {} await client.get_cluster_certificate_authority(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_cluster_certificate_authority(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-redis-cluster/tests/unit/gapic/redis_cluster_v1beta1/test_cloud_redis_cluster.py b/packages/google-cloud-redis-cluster/tests/unit/gapic/redis_cluster_v1beta1/test_cloud_redis_cluster.py index 000b83bc63c2..3f57fb453d37 100644 --- a/packages/google-cloud-redis-cluster/tests/unit/gapic/redis_cluster_v1beta1/test_cloud_redis_cluster.py +++ b/packages/google-cloud-redis-cluster/tests/unit/gapic/redis_cluster_v1beta1/test_cloud_redis_cluster.py @@ -1339,22 +1339,23 @@ async def test_list_clusters_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_clusters - ] = mock_object + ] = mock_rpc request = {} await client.list_clusters(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_clusters(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1936,22 +1937,23 @@ async def test_get_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_cluster - ] = mock_object + ] = mock_rpc request = {} await client.get_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2270,8 +2272,9 @@ def test_update_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_cluster(request) @@ -2325,26 +2328,28 @@ async def test_update_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_cluster - ] = mock_object + ] = mock_rpc request = {} await client.update_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2647,8 +2652,9 @@ def test_delete_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_cluster(request) @@ -2702,26 +2708,28 @@ async def test_delete_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_cluster - ] = mock_object + ] = mock_rpc request = {} await client.delete_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3016,8 +3024,9 @@ def test_create_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_cluster(request) @@ -3071,26 +3080,28 @@ async def test_create_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_cluster - ] = mock_object + ] = mock_rpc request = {} await client.create_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3470,22 +3481,23 @@ async def test_get_cluster_certificate_authority_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_cluster_certificate_authority - ] = mock_object + ] = mock_rpc request = {} await client.get_cluster_certificate_authority(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_cluster_certificate_authority(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-redis/google/cloud/redis/gapic_version.py b/packages/google-cloud-redis/google/cloud/redis/gapic_version.py index 9cec76a3a9c8..558c8aab67c5 100644 --- a/packages/google-cloud-redis/google/cloud/redis/gapic_version.py +++ b/packages/google-cloud-redis/google/cloud/redis/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.15.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-redis/google/cloud/redis_v1/gapic_version.py b/packages/google-cloud-redis/google/cloud/redis_v1/gapic_version.py index 9cec76a3a9c8..558c8aab67c5 100644 --- a/packages/google-cloud-redis/google/cloud/redis_v1/gapic_version.py +++ b/packages/google-cloud-redis/google/cloud/redis_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.15.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-redis/google/cloud/redis_v1/services/cloud_redis/async_client.py b/packages/google-cloud-redis/google/cloud/redis_v1/services/cloud_redis/async_client.py index a27c3f5bf6d5..9c787522344f 100644 --- a/packages/google-cloud-redis/google/cloud/redis_v1/services/cloud_redis/async_client.py +++ b/packages/google-cloud-redis/google/cloud/redis_v1/services/cloud_redis/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -210,9 +209,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(CloudRedisClient).get_transport_class, type(CloudRedisClient) - ) + get_transport_class = CloudRedisClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-redis/google/cloud/redis_v1/services/cloud_redis/client.py b/packages/google-cloud-redis/google/cloud/redis_v1/services/cloud_redis/client.py index 0a9b4bc5224b..7ad754255344 100644 --- a/packages/google-cloud-redis/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/packages/google-cloud-redis/google/cloud/redis_v1/services/cloud_redis/client.py @@ -684,7 +684,7 @@ def __init__( transport_init: Union[ Type[CloudRedisTransport], Callable[..., CloudRedisTransport] ] = ( - type(self).get_transport_class(transport) + CloudRedisClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., CloudRedisTransport], transport) ) diff --git a/packages/google-cloud-redis/google/cloud/redis_v1beta1/gapic_version.py b/packages/google-cloud-redis/google/cloud/redis_v1beta1/gapic_version.py index 9cec76a3a9c8..558c8aab67c5 100644 --- a/packages/google-cloud-redis/google/cloud/redis_v1beta1/gapic_version.py +++ b/packages/google-cloud-redis/google/cloud/redis_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.15.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-redis/google/cloud/redis_v1beta1/services/cloud_redis/async_client.py b/packages/google-cloud-redis/google/cloud/redis_v1beta1/services/cloud_redis/async_client.py index cb4b598121a9..e53aa8945e14 100644 --- a/packages/google-cloud-redis/google/cloud/redis_v1beta1/services/cloud_redis/async_client.py +++ b/packages/google-cloud-redis/google/cloud/redis_v1beta1/services/cloud_redis/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -209,9 +208,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(CloudRedisClient).get_transport_class, type(CloudRedisClient) - ) + get_transport_class = CloudRedisClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-redis/google/cloud/redis_v1beta1/services/cloud_redis/client.py b/packages/google-cloud-redis/google/cloud/redis_v1beta1/services/cloud_redis/client.py index 72fbe5790148..bb1eece1dc09 100644 --- a/packages/google-cloud-redis/google/cloud/redis_v1beta1/services/cloud_redis/client.py +++ b/packages/google-cloud-redis/google/cloud/redis_v1beta1/services/cloud_redis/client.py @@ -683,7 +683,7 @@ def __init__( transport_init: Union[ Type[CloudRedisTransport], Callable[..., CloudRedisTransport] ] = ( - type(self).get_transport_class(transport) + CloudRedisClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., CloudRedisTransport], transport) ) diff --git a/packages/google-cloud-redis/samples/generated_samples/snippet_metadata_google.cloud.redis.v1.json b/packages/google-cloud-redis/samples/generated_samples/snippet_metadata_google.cloud.redis.v1.json index 9814a29ee508..174d9dff49b1 100644 --- a/packages/google-cloud-redis/samples/generated_samples/snippet_metadata_google.cloud.redis.v1.json +++ b/packages/google-cloud-redis/samples/generated_samples/snippet_metadata_google.cloud.redis.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-redis", - "version": "2.15.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-redis/samples/generated_samples/snippet_metadata_google.cloud.redis.v1beta1.json b/packages/google-cloud-redis/samples/generated_samples/snippet_metadata_google.cloud.redis.v1beta1.json index 587f2bc2ca04..443ce93aea7f 100644 --- a/packages/google-cloud-redis/samples/generated_samples/snippet_metadata_google.cloud.redis.v1beta1.json +++ b/packages/google-cloud-redis/samples/generated_samples/snippet_metadata_google.cloud.redis.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-redis", - "version": "2.15.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/google-cloud-redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index 121c29c978e3..c190120c175a 100644 --- a/packages/google-cloud-redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/google-cloud-redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -1262,22 +1262,23 @@ async def test_list_instances_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_instances - ] = mock_object + ] = mock_rpc request = {} await client.list_instances(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_instances(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1914,22 +1915,23 @@ async def test_get_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_instance - ] = mock_object + ] = mock_rpc request = {} await client.get_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2355,22 +2357,23 @@ async def test_get_instance_auth_string_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_instance_auth_string - ] = mock_object + ] = mock_rpc request = {} await client.get_instance_auth_string(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_instance_auth_string(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2676,8 +2679,9 @@ def test_create_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_instance(request) @@ -2731,26 +2735,28 @@ async def test_create_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_instance - ] = mock_object + ] = mock_rpc request = {} await client.create_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3056,8 +3062,9 @@ def test_update_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_instance(request) @@ -3111,26 +3118,28 @@ async def test_update_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_instance - ] = mock_object + ] = mock_rpc request = {} await client.update_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3434,8 +3443,9 @@ def test_upgrade_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.upgrade_instance(request) @@ -3489,26 +3499,28 @@ async def test_upgrade_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.upgrade_instance - ] = mock_object + ] = mock_rpc request = {} await client.upgrade_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.upgrade_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3808,8 +3820,9 @@ def test_import_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.import_instance(request) @@ -3863,26 +3876,28 @@ async def test_import_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_instance - ] = mock_object + ] = mock_rpc request = {} await client.import_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.import_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4194,8 +4209,9 @@ def test_export_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.export_instance(request) @@ -4249,26 +4265,28 @@ async def test_export_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.export_instance - ] = mock_object + ] = mock_rpc request = {} await client.export_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.export_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4588,8 +4606,9 @@ def test_failover_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.failover_instance(request) @@ -4645,26 +4664,28 @@ async def test_failover_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.failover_instance - ] = mock_object + ] = mock_rpc request = {} await client.failover_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.failover_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4978,8 +4999,9 @@ def test_delete_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_instance(request) @@ -5033,26 +5055,28 @@ async def test_delete_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_instance - ] = mock_object + ] = mock_rpc request = {} await client.delete_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5353,8 +5377,9 @@ def test_reschedule_maintenance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.reschedule_maintenance(request) @@ -5410,26 +5435,28 @@ async def test_reschedule_maintenance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.reschedule_maintenance - ] = mock_object + ] = mock_rpc request = {} await client.reschedule_maintenance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.reschedule_maintenance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-redis/tests/unit/gapic/redis_v1beta1/test_cloud_redis.py b/packages/google-cloud-redis/tests/unit/gapic/redis_v1beta1/test_cloud_redis.py index 3011c9a70442..649cddc3eb36 100644 --- a/packages/google-cloud-redis/tests/unit/gapic/redis_v1beta1/test_cloud_redis.py +++ b/packages/google-cloud-redis/tests/unit/gapic/redis_v1beta1/test_cloud_redis.py @@ -1262,22 +1262,23 @@ async def test_list_instances_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_instances - ] = mock_object + ] = mock_rpc request = {} await client.list_instances(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_instances(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1894,22 +1895,23 @@ async def test_get_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_instance - ] = mock_object + ] = mock_rpc request = {} await client.get_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2321,22 +2323,23 @@ async def test_get_instance_auth_string_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_instance_auth_string - ] = mock_object + ] = mock_rpc request = {} await client.get_instance_auth_string(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_instance_auth_string(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2642,8 +2645,9 @@ def test_create_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_instance(request) @@ -2697,26 +2701,28 @@ async def test_create_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_instance - ] = mock_object + ] = mock_rpc request = {} await client.create_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3022,8 +3028,9 @@ def test_update_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_instance(request) @@ -3077,26 +3084,28 @@ async def test_update_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_instance - ] = mock_object + ] = mock_rpc request = {} await client.update_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3400,8 +3409,9 @@ def test_upgrade_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.upgrade_instance(request) @@ -3455,26 +3465,28 @@ async def test_upgrade_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.upgrade_instance - ] = mock_object + ] = mock_rpc request = {} await client.upgrade_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.upgrade_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3774,8 +3786,9 @@ def test_import_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.import_instance(request) @@ -3829,26 +3842,28 @@ async def test_import_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_instance - ] = mock_object + ] = mock_rpc request = {} await client.import_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.import_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4160,8 +4175,9 @@ def test_export_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.export_instance(request) @@ -4215,26 +4231,28 @@ async def test_export_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.export_instance - ] = mock_object + ] = mock_rpc request = {} await client.export_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.export_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4554,8 +4572,9 @@ def test_failover_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.failover_instance(request) @@ -4611,26 +4630,28 @@ async def test_failover_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.failover_instance - ] = mock_object + ] = mock_rpc request = {} await client.failover_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.failover_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4944,8 +4965,9 @@ def test_delete_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_instance(request) @@ -4999,26 +5021,28 @@ async def test_delete_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_instance - ] = mock_object + ] = mock_rpc request = {} await client.delete_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5319,8 +5343,9 @@ def test_reschedule_maintenance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.reschedule_maintenance(request) @@ -5376,26 +5401,28 @@ async def test_reschedule_maintenance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.reschedule_maintenance - ] = mock_object + ] = mock_rpc request = {} await client.reschedule_maintenance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.reschedule_maintenance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-resource-manager/google/cloud/resourcemanager/gapic_version.py b/packages/google-cloud-resource-manager/google/cloud/resourcemanager/gapic_version.py index 88ee289a17e3..558c8aab67c5 100644 --- a/packages/google-cloud-resource-manager/google/cloud/resourcemanager/gapic_version.py +++ b/packages/google-cloud-resource-manager/google/cloud/resourcemanager/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.12.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-resource-manager/google/cloud/resourcemanager_v3/gapic_version.py b/packages/google-cloud-resource-manager/google/cloud/resourcemanager_v3/gapic_version.py index 88ee289a17e3..558c8aab67c5 100644 --- a/packages/google-cloud-resource-manager/google/cloud/resourcemanager_v3/gapic_version.py +++ b/packages/google-cloud-resource-manager/google/cloud/resourcemanager_v3/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.12.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-resource-manager/google/cloud/resourcemanager_v3/services/folders/async_client.py b/packages/google-cloud-resource-manager/google/cloud/resourcemanager_v3/services/folders/async_client.py index 4c23dfa34a2a..3bd2386561ef 100644 --- a/packages/google-cloud-resource-manager/google/cloud/resourcemanager_v3/services/folders/async_client.py +++ b/packages/google-cloud-resource-manager/google/cloud/resourcemanager_v3/services/folders/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -191,9 +190,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(FoldersClient).get_transport_class, type(FoldersClient) - ) + get_transport_class = FoldersClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-resource-manager/google/cloud/resourcemanager_v3/services/folders/client.py b/packages/google-cloud-resource-manager/google/cloud/resourcemanager_v3/services/folders/client.py index 4810be07ed6d..ff68a11427df 100644 --- a/packages/google-cloud-resource-manager/google/cloud/resourcemanager_v3/services/folders/client.py +++ b/packages/google-cloud-resource-manager/google/cloud/resourcemanager_v3/services/folders/client.py @@ -660,7 +660,7 @@ def __init__( transport_init: Union[ Type[FoldersTransport], Callable[..., FoldersTransport] ] = ( - type(self).get_transport_class(transport) + FoldersClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., FoldersTransport], transport) ) diff --git a/packages/google-cloud-resource-manager/google/cloud/resourcemanager_v3/services/organizations/async_client.py b/packages/google-cloud-resource-manager/google/cloud/resourcemanager_v3/services/organizations/async_client.py index cae01ab9a5ab..c0f2b3e3ea9d 100644 --- a/packages/google-cloud-resource-manager/google/cloud/resourcemanager_v3/services/organizations/async_client.py +++ b/packages/google-cloud-resource-manager/google/cloud/resourcemanager_v3/services/organizations/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -192,9 +191,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(OrganizationsClient).get_transport_class, type(OrganizationsClient) - ) + get_transport_class = OrganizationsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-resource-manager/google/cloud/resourcemanager_v3/services/organizations/client.py b/packages/google-cloud-resource-manager/google/cloud/resourcemanager_v3/services/organizations/client.py index 15ebdd40e826..884cca15464b 100644 --- a/packages/google-cloud-resource-manager/google/cloud/resourcemanager_v3/services/organizations/client.py +++ b/packages/google-cloud-resource-manager/google/cloud/resourcemanager_v3/services/organizations/client.py @@ -656,7 +656,7 @@ def __init__( transport_init: Union[ Type[OrganizationsTransport], Callable[..., OrganizationsTransport] ] = ( - type(self).get_transport_class(transport) + OrganizationsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., OrganizationsTransport], transport) ) diff --git a/packages/google-cloud-resource-manager/google/cloud/resourcemanager_v3/services/projects/async_client.py b/packages/google-cloud-resource-manager/google/cloud/resourcemanager_v3/services/projects/async_client.py index a953a5c483b4..d158bd99239e 100644 --- a/packages/google-cloud-resource-manager/google/cloud/resourcemanager_v3/services/projects/async_client.py +++ b/packages/google-cloud-resource-manager/google/cloud/resourcemanager_v3/services/projects/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -187,9 +186,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ProjectsClient).get_transport_class, type(ProjectsClient) - ) + get_transport_class = ProjectsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-resource-manager/google/cloud/resourcemanager_v3/services/projects/client.py b/packages/google-cloud-resource-manager/google/cloud/resourcemanager_v3/services/projects/client.py index 45435bcc2b88..7cbbc6758094 100644 --- a/packages/google-cloud-resource-manager/google/cloud/resourcemanager_v3/services/projects/client.py +++ b/packages/google-cloud-resource-manager/google/cloud/resourcemanager_v3/services/projects/client.py @@ -656,7 +656,7 @@ def __init__( transport_init: Union[ Type[ProjectsTransport], Callable[..., ProjectsTransport] ] = ( - type(self).get_transport_class(transport) + ProjectsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ProjectsTransport], transport) ) diff --git a/packages/google-cloud-resource-manager/google/cloud/resourcemanager_v3/services/tag_bindings/async_client.py b/packages/google-cloud-resource-manager/google/cloud/resourcemanager_v3/services/tag_bindings/async_client.py index 8f96e3e3242c..2d881fd4dddc 100644 --- a/packages/google-cloud-resource-manager/google/cloud/resourcemanager_v3/services/tag_bindings/async_client.py +++ b/packages/google-cloud-resource-manager/google/cloud/resourcemanager_v3/services/tag_bindings/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -195,9 +194,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(TagBindingsClient).get_transport_class, type(TagBindingsClient) - ) + get_transport_class = TagBindingsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-resource-manager/google/cloud/resourcemanager_v3/services/tag_bindings/client.py b/packages/google-cloud-resource-manager/google/cloud/resourcemanager_v3/services/tag_bindings/client.py index 104c074e1ead..6e886169d9d8 100644 --- a/packages/google-cloud-resource-manager/google/cloud/resourcemanager_v3/services/tag_bindings/client.py +++ b/packages/google-cloud-resource-manager/google/cloud/resourcemanager_v3/services/tag_bindings/client.py @@ -686,7 +686,7 @@ def __init__( transport_init: Union[ Type[TagBindingsTransport], Callable[..., TagBindingsTransport] ] = ( - type(self).get_transport_class(transport) + TagBindingsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., TagBindingsTransport], transport) ) diff --git a/packages/google-cloud-resource-manager/google/cloud/resourcemanager_v3/services/tag_holds/async_client.py b/packages/google-cloud-resource-manager/google/cloud/resourcemanager_v3/services/tag_holds/async_client.py index bcdafcf031f1..d144341f0aa2 100644 --- a/packages/google-cloud-resource-manager/google/cloud/resourcemanager_v3/services/tag_holds/async_client.py +++ b/packages/google-cloud-resource-manager/google/cloud/resourcemanager_v3/services/tag_holds/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -190,9 +189,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(TagHoldsClient).get_transport_class, type(TagHoldsClient) - ) + get_transport_class = TagHoldsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-resource-manager/google/cloud/resourcemanager_v3/services/tag_holds/client.py b/packages/google-cloud-resource-manager/google/cloud/resourcemanager_v3/services/tag_holds/client.py index 94bcd63cb6f1..286b17669188 100644 --- a/packages/google-cloud-resource-manager/google/cloud/resourcemanager_v3/services/tag_holds/client.py +++ b/packages/google-cloud-resource-manager/google/cloud/resourcemanager_v3/services/tag_holds/client.py @@ -661,7 +661,7 @@ def __init__( transport_init: Union[ Type[TagHoldsTransport], Callable[..., TagHoldsTransport] ] = ( - type(self).get_transport_class(transport) + TagHoldsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., TagHoldsTransport], transport) ) diff --git a/packages/google-cloud-resource-manager/google/cloud/resourcemanager_v3/services/tag_keys/async_client.py b/packages/google-cloud-resource-manager/google/cloud/resourcemanager_v3/services/tag_keys/async_client.py index 7222c4241d25..80c1781e4e09 100644 --- a/packages/google-cloud-resource-manager/google/cloud/resourcemanager_v3/services/tag_keys/async_client.py +++ b/packages/google-cloud-resource-manager/google/cloud/resourcemanager_v3/services/tag_keys/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -187,9 +186,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(TagKeysClient).get_transport_class, type(TagKeysClient) - ) + get_transport_class = TagKeysClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-resource-manager/google/cloud/resourcemanager_v3/services/tag_keys/client.py b/packages/google-cloud-resource-manager/google/cloud/resourcemanager_v3/services/tag_keys/client.py index 12d17cee43cd..5d4ad39e6289 100644 --- a/packages/google-cloud-resource-manager/google/cloud/resourcemanager_v3/services/tag_keys/client.py +++ b/packages/google-cloud-resource-manager/google/cloud/resourcemanager_v3/services/tag_keys/client.py @@ -656,7 +656,7 @@ def __init__( transport_init: Union[ Type[TagKeysTransport], Callable[..., TagKeysTransport] ] = ( - type(self).get_transport_class(transport) + TagKeysClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., TagKeysTransport], transport) ) diff --git a/packages/google-cloud-resource-manager/google/cloud/resourcemanager_v3/services/tag_values/async_client.py b/packages/google-cloud-resource-manager/google/cloud/resourcemanager_v3/services/tag_values/async_client.py index 300a8e28b50f..0c31d4c5426b 100644 --- a/packages/google-cloud-resource-manager/google/cloud/resourcemanager_v3/services/tag_values/async_client.py +++ b/packages/google-cloud-resource-manager/google/cloud/resourcemanager_v3/services/tag_values/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -189,9 +188,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(TagValuesClient).get_transport_class, type(TagValuesClient) - ) + get_transport_class = TagValuesClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-resource-manager/google/cloud/resourcemanager_v3/services/tag_values/client.py b/packages/google-cloud-resource-manager/google/cloud/resourcemanager_v3/services/tag_values/client.py index ee7dd0620dd1..ea695e212a4d 100644 --- a/packages/google-cloud-resource-manager/google/cloud/resourcemanager_v3/services/tag_values/client.py +++ b/packages/google-cloud-resource-manager/google/cloud/resourcemanager_v3/services/tag_values/client.py @@ -656,7 +656,7 @@ def __init__( transport_init: Union[ Type[TagValuesTransport], Callable[..., TagValuesTransport] ] = ( - type(self).get_transport_class(transport) + TagValuesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., TagValuesTransport], transport) ) diff --git a/packages/google-cloud-resource-manager/samples/generated_samples/snippet_metadata_google.cloud.resourcemanager.v3.json b/packages/google-cloud-resource-manager/samples/generated_samples/snippet_metadata_google.cloud.resourcemanager.v3.json index 3c7808aae55c..f05e3d14504e 100644 --- a/packages/google-cloud-resource-manager/samples/generated_samples/snippet_metadata_google.cloud.resourcemanager.v3.json +++ b/packages/google-cloud-resource-manager/samples/generated_samples/snippet_metadata_google.cloud.resourcemanager.v3.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-resource-manager", - "version": "1.12.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-resource-manager/tests/unit/gapic/resourcemanager_v3/test_folders.py b/packages/google-cloud-resource-manager/tests/unit/gapic/resourcemanager_v3/test_folders.py index 753acdf55b3b..5b1f7b326ad9 100644 --- a/packages/google-cloud-resource-manager/tests/unit/gapic/resourcemanager_v3/test_folders.py +++ b/packages/google-cloud-resource-manager/tests/unit/gapic/resourcemanager_v3/test_folders.py @@ -1243,22 +1243,23 @@ async def test_get_folder_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_folder - ] = mock_object + ] = mock_rpc request = {} await client.get_folder(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_folder(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1613,22 +1614,23 @@ async def test_list_folders_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_folders - ] = mock_object + ] = mock_rpc request = {} await client.list_folders(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_folders(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2109,22 +2111,23 @@ async def test_search_folders_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search_folders - ] = mock_object + ] = mock_rpc request = {} await client.search_folders(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_folders(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2543,8 +2546,9 @@ def test_create_folder_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_folder(request) @@ -2598,26 +2602,28 @@ async def test_create_folder_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_folder - ] = mock_object + ] = mock_rpc request = {} await client.create_folder(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_folder(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2842,8 +2848,9 @@ def test_update_folder_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_folder(request) @@ -2897,26 +2904,28 @@ async def test_update_folder_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_folder - ] = mock_object + ] = mock_rpc request = {} await client.update_folder(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_folder(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3218,8 +3227,9 @@ def test_move_folder_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.move_folder(request) @@ -3273,26 +3283,28 @@ async def test_move_folder_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.move_folder - ] = mock_object + ] = mock_rpc request = {} await client.move_folder(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.move_folder(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3592,8 +3604,9 @@ def test_delete_folder_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_folder(request) @@ -3647,26 +3660,28 @@ async def test_delete_folder_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_folder - ] = mock_object + ] = mock_rpc request = {} await client.delete_folder(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_folder(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3956,8 +3971,9 @@ def test_undelete_folder_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.undelete_folder(request) @@ -4011,26 +4027,28 @@ async def test_undelete_folder_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.undelete_folder - ] = mock_object + ] = mock_rpc request = {} await client.undelete_folder(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.undelete_folder(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4379,22 +4397,23 @@ async def test_get_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4761,22 +4780,23 @@ async def test_set_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5153,22 +5173,23 @@ async def test_test_iam_permissions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.test_iam_permissions - ] = mock_object + ] = mock_rpc request = {} await client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.test_iam_permissions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-resource-manager/tests/unit/gapic/resourcemanager_v3/test_organizations.py b/packages/google-cloud-resource-manager/tests/unit/gapic/resourcemanager_v3/test_organizations.py index feeecf05e47e..a5914d84168d 100644 --- a/packages/google-cloud-resource-manager/tests/unit/gapic/resourcemanager_v3/test_organizations.py +++ b/packages/google-cloud-resource-manager/tests/unit/gapic/resourcemanager_v3/test_organizations.py @@ -1299,22 +1299,23 @@ async def test_get_organization_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_organization - ] = mock_object + ] = mock_rpc request = {} await client.get_organization(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_organization(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1683,22 +1684,23 @@ async def test_search_organizations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search_organizations - ] = mock_object + ] = mock_rpc request = {} await client.search_organizations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_organizations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2195,22 +2197,23 @@ async def test_get_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2577,22 +2580,23 @@ async def test_set_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2969,22 +2973,23 @@ async def test_test_iam_permissions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.test_iam_permissions - ] = mock_object + ] = mock_rpc request = {} await client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.test_iam_permissions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-resource-manager/tests/unit/gapic/resourcemanager_v3/test_projects.py b/packages/google-cloud-resource-manager/tests/unit/gapic/resourcemanager_v3/test_projects.py index adbb4db38a9c..56e0807b186b 100644 --- a/packages/google-cloud-resource-manager/tests/unit/gapic/resourcemanager_v3/test_projects.py +++ b/packages/google-cloud-resource-manager/tests/unit/gapic/resourcemanager_v3/test_projects.py @@ -1259,22 +1259,23 @@ async def test_get_project_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_project - ] = mock_object + ] = mock_rpc request = {} await client.get_project(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_project(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1631,22 +1632,23 @@ async def test_list_projects_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_projects - ] = mock_object + ] = mock_rpc request = {} await client.list_projects(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_projects(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2127,22 +2129,23 @@ async def test_search_projects_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search_projects - ] = mock_object + ] = mock_rpc request = {} await client.search_projects(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_projects(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2561,8 +2564,9 @@ def test_create_project_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_project(request) @@ -2616,26 +2620,28 @@ async def test_create_project_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_project - ] = mock_object + ] = mock_rpc request = {} await client.create_project(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_project(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2860,8 +2866,9 @@ def test_update_project_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_project(request) @@ -2915,26 +2922,28 @@ async def test_update_project_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_project - ] = mock_object + ] = mock_rpc request = {} await client.update_project(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_project(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3236,8 +3245,9 @@ def test_move_project_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.move_project(request) @@ -3291,26 +3301,28 @@ async def test_move_project_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.move_project - ] = mock_object + ] = mock_rpc request = {} await client.move_project(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.move_project(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3610,8 +3622,9 @@ def test_delete_project_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_project(request) @@ -3665,26 +3678,28 @@ async def test_delete_project_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_project - ] = mock_object + ] = mock_rpc request = {} await client.delete_project(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_project(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3976,8 +3991,9 @@ def test_undelete_project_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.undelete_project(request) @@ -4031,26 +4047,28 @@ async def test_undelete_project_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.undelete_project - ] = mock_object + ] = mock_rpc request = {} await client.undelete_project(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.undelete_project(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4399,22 +4417,23 @@ async def test_get_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4781,22 +4800,23 @@ async def test_set_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5173,22 +5193,23 @@ async def test_test_iam_permissions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.test_iam_permissions - ] = mock_object + ] = mock_rpc request = {} await client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.test_iam_permissions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-resource-manager/tests/unit/gapic/resourcemanager_v3/test_tag_bindings.py b/packages/google-cloud-resource-manager/tests/unit/gapic/resourcemanager_v3/test_tag_bindings.py index 6c8ab46eb70c..24886da37256 100644 --- a/packages/google-cloud-resource-manager/tests/unit/gapic/resourcemanager_v3/test_tag_bindings.py +++ b/packages/google-cloud-resource-manager/tests/unit/gapic/resourcemanager_v3/test_tag_bindings.py @@ -1270,22 +1270,23 @@ async def test_list_tag_bindings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_tag_bindings - ] = mock_object + ] = mock_rpc request = {} await client.list_tag_bindings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_tag_bindings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1728,8 +1729,9 @@ def test_create_tag_binding_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_tag_binding(request) @@ -1785,26 +1787,28 @@ async def test_create_tag_binding_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_tag_binding - ] = mock_object + ] = mock_rpc request = {} await client.create_tag_binding(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_tag_binding(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2049,8 +2053,9 @@ def test_delete_tag_binding_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_tag_binding(request) @@ -2106,26 +2111,28 @@ async def test_delete_tag_binding_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_tag_binding - ] = mock_object + ] = mock_rpc request = {} await client.delete_tag_binding(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_tag_binding(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2495,22 +2502,23 @@ async def test_list_effective_tags_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_effective_tags - ] = mock_object + ] = mock_rpc request = {} await client.list_effective_tags(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_effective_tags(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-resource-manager/tests/unit/gapic/resourcemanager_v3/test_tag_holds.py b/packages/google-cloud-resource-manager/tests/unit/gapic/resourcemanager_v3/test_tag_holds.py index d138529f1176..da9dcfcf669e 100644 --- a/packages/google-cloud-resource-manager/tests/unit/gapic/resourcemanager_v3/test_tag_holds.py +++ b/packages/google-cloud-resource-manager/tests/unit/gapic/resourcemanager_v3/test_tag_holds.py @@ -1184,8 +1184,9 @@ def test_create_tag_hold_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_tag_hold(request) @@ -1239,26 +1240,28 @@ async def test_create_tag_hold_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_tag_hold - ] = mock_object + ] = mock_rpc request = {} await client.create_tag_hold(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_tag_hold(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1558,8 +1561,9 @@ def test_delete_tag_hold_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_tag_hold(request) @@ -1613,26 +1617,28 @@ async def test_delete_tag_hold_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_tag_hold - ] = mock_object + ] = mock_rpc request = {} await client.delete_tag_hold(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_tag_hold(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1982,22 +1988,23 @@ async def test_list_tag_holds_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_tag_holds - ] = mock_object + ] = mock_rpc request = {} await client.list_tag_holds(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_tag_holds(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-resource-manager/tests/unit/gapic/resourcemanager_v3/test_tag_keys.py b/packages/google-cloud-resource-manager/tests/unit/gapic/resourcemanager_v3/test_tag_keys.py index 868b6f233a97..dd90971206d4 100644 --- a/packages/google-cloud-resource-manager/tests/unit/gapic/resourcemanager_v3/test_tag_keys.py +++ b/packages/google-cloud-resource-manager/tests/unit/gapic/resourcemanager_v3/test_tag_keys.py @@ -1237,22 +1237,23 @@ async def test_list_tag_keys_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_tag_keys - ] = mock_object + ] = mock_rpc request = {} await client.list_tag_keys(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_tag_keys(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1749,22 +1750,23 @@ async def test_get_tag_key_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_tag_key - ] = mock_object + ] = mock_rpc request = {} await client.get_tag_key(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_tag_key(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2152,22 +2154,23 @@ async def test_get_namespaced_tag_key_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_namespaced_tag_key - ] = mock_object + ] = mock_rpc request = {} await client.get_namespaced_tag_key(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_namespaced_tag_key(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2411,8 +2414,9 @@ def test_create_tag_key_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_tag_key(request) @@ -2466,26 +2470,28 @@ async def test_create_tag_key_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_tag_key - ] = mock_object + ] = mock_rpc request = {} await client.create_tag_key(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_tag_key(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2710,8 +2716,9 @@ def test_update_tag_key_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_tag_key(request) @@ -2765,26 +2772,28 @@ async def test_update_tag_key_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_tag_key - ] = mock_object + ] = mock_rpc request = {} await client.update_tag_key(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_tag_key(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3086,8 +3095,9 @@ def test_delete_tag_key_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_tag_key(request) @@ -3141,26 +3151,28 @@ async def test_delete_tag_key_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_tag_key - ] = mock_object + ] = mock_rpc request = {} await client.delete_tag_key(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_tag_key(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3509,22 +3521,23 @@ async def test_get_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3891,22 +3904,23 @@ async def test_set_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4283,22 +4297,23 @@ async def test_test_iam_permissions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.test_iam_permissions - ] = mock_object + ] = mock_rpc request = {} await client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.test_iam_permissions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-resource-manager/tests/unit/gapic/resourcemanager_v3/test_tag_values.py b/packages/google-cloud-resource-manager/tests/unit/gapic/resourcemanager_v3/test_tag_values.py index 145d717b009b..588bda204436 100644 --- a/packages/google-cloud-resource-manager/tests/unit/gapic/resourcemanager_v3/test_tag_values.py +++ b/packages/google-cloud-resource-manager/tests/unit/gapic/resourcemanager_v3/test_tag_values.py @@ -1259,22 +1259,23 @@ async def test_list_tag_values_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_tag_values - ] = mock_object + ] = mock_rpc request = {} await client.list_tag_values(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_tag_values(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1768,22 +1769,23 @@ async def test_get_tag_value_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_tag_value - ] = mock_object + ] = mock_rpc request = {} await client.get_tag_value(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_tag_value(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2166,22 +2168,23 @@ async def test_get_namespaced_tag_value_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_namespaced_tag_value - ] = mock_object + ] = mock_rpc request = {} await client.get_namespaced_tag_value(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_namespaced_tag_value(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2426,8 +2429,9 @@ def test_create_tag_value_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_tag_value(request) @@ -2481,26 +2485,28 @@ async def test_create_tag_value_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_tag_value - ] = mock_object + ] = mock_rpc request = {} await client.create_tag_value(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_tag_value(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2727,8 +2733,9 @@ def test_update_tag_value_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_tag_value(request) @@ -2782,26 +2789,28 @@ async def test_update_tag_value_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_tag_value - ] = mock_object + ] = mock_rpc request = {} await client.update_tag_value(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_tag_value(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3105,8 +3114,9 @@ def test_delete_tag_value_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_tag_value(request) @@ -3160,26 +3170,28 @@ async def test_delete_tag_value_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_tag_value - ] = mock_object + ] = mock_rpc request = {} await client.delete_tag_value(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_tag_value(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3528,22 +3540,23 @@ async def test_get_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3910,22 +3923,23 @@ async def test_set_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4302,22 +4316,23 @@ async def test_test_iam_permissions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.test_iam_permissions - ] = mock_object + ] = mock_rpc request = {} await client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.test_iam_permissions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-resource-settings/google/cloud/resourcesettings/gapic_version.py b/packages/google-cloud-resource-settings/google/cloud/resourcesettings/gapic_version.py index 19ede3d595e9..558c8aab67c5 100644 --- a/packages/google-cloud-resource-settings/google/cloud/resourcesettings/gapic_version.py +++ b/packages/google-cloud-resource-settings/google/cloud/resourcesettings/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.9.6" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-resource-settings/google/cloud/resourcesettings_v1/gapic_version.py b/packages/google-cloud-resource-settings/google/cloud/resourcesettings_v1/gapic_version.py index 19ede3d595e9..558c8aab67c5 100644 --- a/packages/google-cloud-resource-settings/google/cloud/resourcesettings_v1/gapic_version.py +++ b/packages/google-cloud-resource-settings/google/cloud/resourcesettings_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.9.6" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-resource-settings/google/cloud/resourcesettings_v1/services/resource_settings_service/async_client.py b/packages/google-cloud-resource-settings/google/cloud/resourcesettings_v1/services/resource_settings_service/async_client.py index 49b5a48febb1..9d55d4cf1b46 100644 --- a/packages/google-cloud-resource-settings/google/cloud/resourcesettings_v1/services/resource_settings_service/async_client.py +++ b/packages/google-cloud-resource-settings/google/cloud/resourcesettings_v1/services/resource_settings_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -210,10 +209,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ResourceSettingsServiceClient).get_transport_class, - type(ResourceSettingsServiceClient), - ) + get_transport_class = ResourceSettingsServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-resource-settings/google/cloud/resourcesettings_v1/services/resource_settings_service/client.py b/packages/google-cloud-resource-settings/google/cloud/resourcesettings_v1/services/resource_settings_service/client.py index 9aede91377ee..2e92101cb77a 100644 --- a/packages/google-cloud-resource-settings/google/cloud/resourcesettings_v1/services/resource_settings_service/client.py +++ b/packages/google-cloud-resource-settings/google/cloud/resourcesettings_v1/services/resource_settings_service/client.py @@ -683,7 +683,7 @@ def __init__( Type[ResourceSettingsServiceTransport], Callable[..., ResourceSettingsServiceTransport], ] = ( - type(self).get_transport_class(transport) + ResourceSettingsServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ResourceSettingsServiceTransport], transport) ) diff --git a/packages/google-cloud-resource-settings/samples/generated_samples/snippet_metadata_google.cloud.resourcesettings.v1.json b/packages/google-cloud-resource-settings/samples/generated_samples/snippet_metadata_google.cloud.resourcesettings.v1.json index 9c7d097d6272..3465a96568ae 100644 --- a/packages/google-cloud-resource-settings/samples/generated_samples/snippet_metadata_google.cloud.resourcesettings.v1.json +++ b/packages/google-cloud-resource-settings/samples/generated_samples/snippet_metadata_google.cloud.resourcesettings.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-resource-settings", - "version": "1.9.6" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-resource-settings/tests/unit/gapic/resourcesettings_v1/test_resource_settings_service.py b/packages/google-cloud-resource-settings/tests/unit/gapic/resourcesettings_v1/test_resource_settings_service.py index 4c847c900384..83a05a6001e3 100644 --- a/packages/google-cloud-resource-settings/tests/unit/gapic/resourcesettings_v1/test_resource_settings_service.py +++ b/packages/google-cloud-resource-settings/tests/unit/gapic/resourcesettings_v1/test_resource_settings_service.py @@ -1355,22 +1355,23 @@ async def test_list_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_settings - ] = mock_object + ] = mock_rpc request = {} await client.list_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1916,22 +1917,23 @@ async def test_get_setting_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_setting - ] = mock_object + ] = mock_rpc request = {} await client.get_setting(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_setting(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2281,22 +2283,23 @@ async def test_update_setting_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_setting - ] = mock_object + ] = mock_rpc request = {} await client.update_setting(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_setting(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-retail/google/cloud/retail/gapic_version.py b/packages/google-cloud-retail/google/cloud/retail/gapic_version.py index 64606bb368ac..558c8aab67c5 100644 --- a/packages/google-cloud-retail/google/cloud/retail/gapic_version.py +++ b/packages/google-cloud-retail/google/cloud/retail/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.21.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-retail/google/cloud/retail_v2/gapic_version.py b/packages/google-cloud-retail/google/cloud/retail_v2/gapic_version.py index 64606bb368ac..558c8aab67c5 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2/gapic_version.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.21.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-retail/google/cloud/retail_v2/services/analytics_service/async_client.py b/packages/google-cloud-retail/google/cloud/retail_v2/services/analytics_service/async_client.py index 59b8e67f689c..02b773295420 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2/services/analytics_service/async_client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2/services/analytics_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -192,9 +191,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AnalyticsServiceClient).get_transport_class, type(AnalyticsServiceClient) - ) + get_transport_class = AnalyticsServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-retail/google/cloud/retail_v2/services/analytics_service/client.py b/packages/google-cloud-retail/google/cloud/retail_v2/services/analytics_service/client.py index 6d8ffadabe74..cd921dba6458 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2/services/analytics_service/client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2/services/analytics_service/client.py @@ -648,7 +648,7 @@ def __init__( Type[AnalyticsServiceTransport], Callable[..., AnalyticsServiceTransport], ] = ( - type(self).get_transport_class(transport) + AnalyticsServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AnalyticsServiceTransport], transport) ) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2/services/catalog_service/async_client.py b/packages/google-cloud-retail/google/cloud/retail_v2/services/catalog_service/async_client.py index a8c4eacbfa73..c62160e6d8fb 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2/services/catalog_service/async_client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2/services/catalog_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -204,9 +203,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(CatalogServiceClient).get_transport_class, type(CatalogServiceClient) - ) + get_transport_class = CatalogServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-retail/google/cloud/retail_v2/services/catalog_service/client.py b/packages/google-cloud-retail/google/cloud/retail_v2/services/catalog_service/client.py index 284970ae20d0..ae5150664236 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2/services/catalog_service/client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2/services/catalog_service/client.py @@ -735,7 +735,7 @@ def __init__( transport_init: Union[ Type[CatalogServiceTransport], Callable[..., CatalogServiceTransport] ] = ( - type(self).get_transport_class(transport) + CatalogServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., CatalogServiceTransport], transport) ) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2/services/completion_service/async_client.py b/packages/google-cloud-retail/google/cloud/retail_v2/services/completion_service/async_client.py index f8d51e750d7d..853078692f73 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2/services/completion_service/async_client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2/services/completion_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -196,9 +195,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(CompletionServiceClient).get_transport_class, type(CompletionServiceClient) - ) + get_transport_class = CompletionServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-retail/google/cloud/retail_v2/services/completion_service/client.py b/packages/google-cloud-retail/google/cloud/retail_v2/services/completion_service/client.py index 1121d6a571ab..4627b8d26d06 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2/services/completion_service/client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2/services/completion_service/client.py @@ -674,7 +674,7 @@ def __init__( Type[CompletionServiceTransport], Callable[..., CompletionServiceTransport], ] = ( - type(self).get_transport_class(transport) + CompletionServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., CompletionServiceTransport], transport) ) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2/services/control_service/async_client.py b/packages/google-cloud-retail/google/cloud/retail_v2/services/control_service/async_client.py index c0e5970158e9..4ada9e12dbaa 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2/services/control_service/async_client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2/services/control_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -196,9 +195,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ControlServiceClient).get_transport_class, type(ControlServiceClient) - ) + get_transport_class = ControlServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-retail/google/cloud/retail_v2/services/control_service/client.py b/packages/google-cloud-retail/google/cloud/retail_v2/services/control_service/client.py index 24ef0d36a660..b28ecd5b81bc 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2/services/control_service/client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2/services/control_service/client.py @@ -691,7 +691,7 @@ def __init__( transport_init: Union[ Type[ControlServiceTransport], Callable[..., ControlServiceTransport] ] = ( - type(self).get_transport_class(transport) + ControlServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ControlServiceTransport], transport) ) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2/services/model_service/async_client.py b/packages/google-cloud-retail/google/cloud/retail_v2/services/model_service/async_client.py index da08292edccd..a17f311a1ccd 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2/services/model_service/async_client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2/services/model_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -207,9 +206,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ModelServiceClient).get_transport_class, type(ModelServiceClient) - ) + get_transport_class = ModelServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-retail/google/cloud/retail_v2/services/model_service/client.py b/packages/google-cloud-retail/google/cloud/retail_v2/services/model_service/client.py index a86e7f73ff66..7b28f3e0a195 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2/services/model_service/client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2/services/model_service/client.py @@ -701,7 +701,7 @@ def __init__( transport_init: Union[ Type[ModelServiceTransport], Callable[..., ModelServiceTransport] ] = ( - type(self).get_transport_class(transport) + ModelServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ModelServiceTransport], transport) ) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2/services/prediction_service/async_client.py b/packages/google-cloud-retail/google/cloud/retail_v2/services/prediction_service/async_client.py index e776a5cbf7b0..fe2734fe3ff8 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2/services/prediction_service/async_client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2/services/prediction_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -189,9 +188,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(PredictionServiceClient).get_transport_class, type(PredictionServiceClient) - ) + get_transport_class = PredictionServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-retail/google/cloud/retail_v2/services/prediction_service/client.py b/packages/google-cloud-retail/google/cloud/retail_v2/services/prediction_service/client.py index 744adda66a05..5bcc4a673241 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2/services/prediction_service/client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2/services/prediction_service/client.py @@ -671,7 +671,7 @@ def __init__( Type[PredictionServiceTransport], Callable[..., PredictionServiceTransport], ] = ( - type(self).get_transport_class(transport) + PredictionServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., PredictionServiceTransport], transport) ) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2/services/product_service/async_client.py b/packages/google-cloud-retail/google/cloud/retail_v2/services/product_service/async_client.py index 8eb583bd01b2..1c37d91651f5 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2/services/product_service/async_client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2/services/product_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -203,9 +202,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ProductServiceClient).get_transport_class, type(ProductServiceClient) - ) + get_transport_class = ProductServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-retail/google/cloud/retail_v2/services/product_service/client.py b/packages/google-cloud-retail/google/cloud/retail_v2/services/product_service/client.py index 76386f0fe624..6f3fda9dd91e 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2/services/product_service/client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2/services/product_service/client.py @@ -702,7 +702,7 @@ def __init__( transport_init: Union[ Type[ProductServiceTransport], Callable[..., ProductServiceTransport] ] = ( - type(self).get_transport_class(transport) + ProductServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ProductServiceTransport], transport) ) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2/services/search_service/async_client.py b/packages/google-cloud-retail/google/cloud/retail_v2/services/search_service/async_client.py index 9226feb1deb3..c34b072bcd3e 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2/services/search_service/async_client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2/services/search_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -203,9 +202,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(SearchServiceClient).get_transport_class, type(SearchServiceClient) - ) + get_transport_class = SearchServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-retail/google/cloud/retail_v2/services/search_service/client.py b/packages/google-cloud-retail/google/cloud/retail_v2/services/search_service/client.py index f7ea0ee55ec4..e7a64e1747f0 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2/services/search_service/client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2/services/search_service/client.py @@ -742,7 +742,7 @@ def __init__( transport_init: Union[ Type[SearchServiceTransport], Callable[..., SearchServiceTransport] ] = ( - type(self).get_transport_class(transport) + SearchServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SearchServiceTransport], transport) ) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2/services/serving_config_service/async_client.py b/packages/google-cloud-retail/google/cloud/retail_v2/services/serving_config_service/async_client.py index 6f1fc03d17ab..9c0e995923a4 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2/services/serving_config_service/async_client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2/services/serving_config_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -198,10 +197,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ServingConfigServiceClient).get_transport_class, - type(ServingConfigServiceClient), - ) + get_transport_class = ServingConfigServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-retail/google/cloud/retail_v2/services/serving_config_service/client.py b/packages/google-cloud-retail/google/cloud/retail_v2/services/serving_config_service/client.py index a506ed8a69c6..0100c7998a60 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2/services/serving_config_service/client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2/services/serving_config_service/client.py @@ -696,7 +696,7 @@ def __init__( Type[ServingConfigServiceTransport], Callable[..., ServingConfigServiceTransport], ] = ( - type(self).get_transport_class(transport) + ServingConfigServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ServingConfigServiceTransport], transport) ) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2/services/user_event_service/async_client.py b/packages/google-cloud-retail/google/cloud/retail_v2/services/user_event_service/async_client.py index 0ef1014b541a..7d3f83d499bc 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2/services/user_event_service/async_client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2/services/user_event_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -204,9 +203,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(UserEventServiceClient).get_transport_class, type(UserEventServiceClient) - ) + get_transport_class = UserEventServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-retail/google/cloud/retail_v2/services/user_event_service/client.py b/packages/google-cloud-retail/google/cloud/retail_v2/services/user_event_service/client.py index cb4c2f299019..16706313d944 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2/services/user_event_service/client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2/services/user_event_service/client.py @@ -704,7 +704,7 @@ def __init__( Type[UserEventServiceTransport], Callable[..., UserEventServiceTransport], ] = ( - type(self).get_transport_class(transport) + UserEventServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., UserEventServiceTransport], transport) ) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/gapic_version.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/gapic_version.py index 64606bb368ac..558c8aab67c5 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2alpha/gapic_version.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.21.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/analytics_service/async_client.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/analytics_service/async_client.py index 2a924515aa23..fe7fa1bd08ac 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/analytics_service/async_client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/analytics_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -192,9 +191,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AnalyticsServiceClient).get_transport_class, type(AnalyticsServiceClient) - ) + get_transport_class = AnalyticsServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/analytics_service/client.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/analytics_service/client.py index 37db15101097..c0e513732f90 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/analytics_service/client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/analytics_service/client.py @@ -648,7 +648,7 @@ def __init__( Type[AnalyticsServiceTransport], Callable[..., AnalyticsServiceTransport], ] = ( - type(self).get_transport_class(transport) + AnalyticsServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AnalyticsServiceTransport], transport) ) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/branch_service/async_client.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/branch_service/async_client.py index 60ff49f4751c..7ffaac9b7e02 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/branch_service/async_client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/branch_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -201,9 +200,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(BranchServiceClient).get_transport_class, type(BranchServiceClient) - ) + get_transport_class = BranchServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/branch_service/client.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/branch_service/client.py index d470e68b0e9b..1282ec03d8ae 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/branch_service/client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/branch_service/client.py @@ -718,7 +718,7 @@ def __init__( transport_init: Union[ Type[BranchServiceTransport], Callable[..., BranchServiceTransport] ] = ( - type(self).get_transport_class(transport) + BranchServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., BranchServiceTransport], transport) ) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/catalog_service/async_client.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/catalog_service/async_client.py index 2c395b690d8c..501b14c89a7c 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/catalog_service/async_client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/catalog_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -204,9 +203,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(CatalogServiceClient).get_transport_class, type(CatalogServiceClient) - ) + get_transport_class = CatalogServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/catalog_service/client.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/catalog_service/client.py index 7cb8398e6d39..90d73619066c 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/catalog_service/client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/catalog_service/client.py @@ -735,7 +735,7 @@ def __init__( transport_init: Union[ Type[CatalogServiceTransport], Callable[..., CatalogServiceTransport] ] = ( - type(self).get_transport_class(transport) + CatalogServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., CatalogServiceTransport], transport) ) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/completion_service/async_client.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/completion_service/async_client.py index 4dd39b2f09e2..339aa18b1fa1 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/completion_service/async_client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/completion_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -196,9 +195,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(CompletionServiceClient).get_transport_class, type(CompletionServiceClient) - ) + get_transport_class = CompletionServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/completion_service/client.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/completion_service/client.py index 9b34df8d2367..b3669caf52ff 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/completion_service/client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/completion_service/client.py @@ -674,7 +674,7 @@ def __init__( Type[CompletionServiceTransport], Callable[..., CompletionServiceTransport], ] = ( - type(self).get_transport_class(transport) + CompletionServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., CompletionServiceTransport], transport) ) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/control_service/async_client.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/control_service/async_client.py index 8805a4f6554a..6a65bdb49195 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/control_service/async_client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/control_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -196,9 +195,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ControlServiceClient).get_transport_class, type(ControlServiceClient) - ) + get_transport_class = ControlServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/control_service/client.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/control_service/client.py index b6c036ea8316..2e7fac3245cc 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/control_service/client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/control_service/client.py @@ -691,7 +691,7 @@ def __init__( transport_init: Union[ Type[ControlServiceTransport], Callable[..., ControlServiceTransport] ] = ( - type(self).get_transport_class(transport) + ControlServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ControlServiceTransport], transport) ) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/merchant_center_account_link_service/async_client.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/merchant_center_account_link_service/async_client.py index a502b766a910..0780fce0cf42 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/merchant_center_account_link_service/async_client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/merchant_center_account_link_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -218,10 +217,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(MerchantCenterAccountLinkServiceClient).get_transport_class, - type(MerchantCenterAccountLinkServiceClient), - ) + get_transport_class = MerchantCenterAccountLinkServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/merchant_center_account_link_service/client.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/merchant_center_account_link_service/client.py index 7b018ce55405..64a8db485c3e 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/merchant_center_account_link_service/client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/merchant_center_account_link_service/client.py @@ -714,7 +714,7 @@ def __init__( Type[MerchantCenterAccountLinkServiceTransport], Callable[..., MerchantCenterAccountLinkServiceTransport], ] = ( - type(self).get_transport_class(transport) + MerchantCenterAccountLinkServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast( Callable[..., MerchantCenterAccountLinkServiceTransport], transport diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/model_service/async_client.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/model_service/async_client.py index 038085f56e4b..c24d57a48936 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/model_service/async_client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/model_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -207,9 +206,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ModelServiceClient).get_transport_class, type(ModelServiceClient) - ) + get_transport_class = ModelServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/model_service/client.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/model_service/client.py index 7a209a8bd46d..a066be461153 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/model_service/client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/model_service/client.py @@ -701,7 +701,7 @@ def __init__( transport_init: Union[ Type[ModelServiceTransport], Callable[..., ModelServiceTransport] ] = ( - type(self).get_transport_class(transport) + ModelServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ModelServiceTransport], transport) ) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/prediction_service/async_client.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/prediction_service/async_client.py index 3f5cf43e7324..a9f2b2c6d8cf 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/prediction_service/async_client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/prediction_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -189,9 +188,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(PredictionServiceClient).get_transport_class, type(PredictionServiceClient) - ) + get_transport_class = PredictionServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/prediction_service/client.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/prediction_service/client.py index 6835ee58dbd3..8006a94c516f 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/prediction_service/client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/prediction_service/client.py @@ -671,7 +671,7 @@ def __init__( Type[PredictionServiceTransport], Callable[..., PredictionServiceTransport], ] = ( - type(self).get_transport_class(transport) + PredictionServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., PredictionServiceTransport], transport) ) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/product_service/async_client.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/product_service/async_client.py index 19dafb1abc6a..59dc8c92da80 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/product_service/async_client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/product_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -203,9 +202,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ProductServiceClient).get_transport_class, type(ProductServiceClient) - ) + get_transport_class = ProductServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/product_service/client.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/product_service/client.py index e179ee47d040..a952d57b1fd8 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/product_service/client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/product_service/client.py @@ -702,7 +702,7 @@ def __init__( transport_init: Union[ Type[ProductServiceTransport], Callable[..., ProductServiceTransport] ] = ( - type(self).get_transport_class(transport) + ProductServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ProductServiceTransport], transport) ) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/project_service/async_client.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/project_service/async_client.py index 1755111bee4e..5902a7690593 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/project_service/async_client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/project_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -203,9 +202,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ProjectServiceClient).get_transport_class, type(ProjectServiceClient) - ) + get_transport_class = ProjectServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/project_service/client.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/project_service/client.py index 00e50534f649..1ede1178105b 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/project_service/client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/project_service/client.py @@ -691,7 +691,7 @@ def __init__( transport_init: Union[ Type[ProjectServiceTransport], Callable[..., ProjectServiceTransport] ] = ( - type(self).get_transport_class(transport) + ProjectServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ProjectServiceTransport], transport) ) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/search_service/async_client.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/search_service/async_client.py index 9dd73a92f796..18558f7ac7bc 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/search_service/async_client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/search_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -203,9 +202,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(SearchServiceClient).get_transport_class, type(SearchServiceClient) - ) + get_transport_class = SearchServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/search_service/client.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/search_service/client.py index c7848dd93ccc..7a75a67d124c 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/search_service/client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/search_service/client.py @@ -742,7 +742,7 @@ def __init__( transport_init: Union[ Type[SearchServiceTransport], Callable[..., SearchServiceTransport] ] = ( - type(self).get_transport_class(transport) + SearchServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SearchServiceTransport], transport) ) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/serving_config_service/async_client.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/serving_config_service/async_client.py index 29959459266b..2ceee2b6fbe7 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/serving_config_service/async_client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/serving_config_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -198,10 +197,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ServingConfigServiceClient).get_transport_class, - type(ServingConfigServiceClient), - ) + get_transport_class = ServingConfigServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/serving_config_service/client.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/serving_config_service/client.py index 56e83053691e..8279a8ab9073 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/serving_config_service/client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/serving_config_service/client.py @@ -696,7 +696,7 @@ def __init__( Type[ServingConfigServiceTransport], Callable[..., ServingConfigServiceTransport], ] = ( - type(self).get_transport_class(transport) + ServingConfigServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ServingConfigServiceTransport], transport) ) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/user_event_service/async_client.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/user_event_service/async_client.py index e90020bd88d1..607b3d2a8c35 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/user_event_service/async_client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/user_event_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -204,9 +203,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(UserEventServiceClient).get_transport_class, type(UserEventServiceClient) - ) + get_transport_class = UserEventServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/user_event_service/client.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/user_event_service/client.py index b3968830e160..c6c20f65085f 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/user_event_service/client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/user_event_service/client.py @@ -704,7 +704,7 @@ def __init__( Type[UserEventServiceTransport], Callable[..., UserEventServiceTransport], ] = ( - type(self).get_transport_class(transport) + UserEventServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., UserEventServiceTransport], transport) ) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2beta/gapic_version.py b/packages/google-cloud-retail/google/cloud/retail_v2beta/gapic_version.py index 64606bb368ac..558c8aab67c5 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2beta/gapic_version.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.21.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-retail/google/cloud/retail_v2beta/services/analytics_service/async_client.py b/packages/google-cloud-retail/google/cloud/retail_v2beta/services/analytics_service/async_client.py index 29cf67775313..94be5f129a6e 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2beta/services/analytics_service/async_client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2beta/services/analytics_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -192,9 +191,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AnalyticsServiceClient).get_transport_class, type(AnalyticsServiceClient) - ) + get_transport_class = AnalyticsServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-retail/google/cloud/retail_v2beta/services/analytics_service/client.py b/packages/google-cloud-retail/google/cloud/retail_v2beta/services/analytics_service/client.py index 347d0a98c37f..1ec516da60bd 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2beta/services/analytics_service/client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2beta/services/analytics_service/client.py @@ -648,7 +648,7 @@ def __init__( Type[AnalyticsServiceTransport], Callable[..., AnalyticsServiceTransport], ] = ( - type(self).get_transport_class(transport) + AnalyticsServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AnalyticsServiceTransport], transport) ) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2beta/services/catalog_service/async_client.py b/packages/google-cloud-retail/google/cloud/retail_v2beta/services/catalog_service/async_client.py index 7288f202aac9..91fb3daad5f8 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2beta/services/catalog_service/async_client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2beta/services/catalog_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -204,9 +203,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(CatalogServiceClient).get_transport_class, type(CatalogServiceClient) - ) + get_transport_class = CatalogServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-retail/google/cloud/retail_v2beta/services/catalog_service/client.py b/packages/google-cloud-retail/google/cloud/retail_v2beta/services/catalog_service/client.py index 04375e82d4b0..1277c854bd5f 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2beta/services/catalog_service/client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2beta/services/catalog_service/client.py @@ -735,7 +735,7 @@ def __init__( transport_init: Union[ Type[CatalogServiceTransport], Callable[..., CatalogServiceTransport] ] = ( - type(self).get_transport_class(transport) + CatalogServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., CatalogServiceTransport], transport) ) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2beta/services/completion_service/async_client.py b/packages/google-cloud-retail/google/cloud/retail_v2beta/services/completion_service/async_client.py index 1eb26266bc48..ec78a43d06c7 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2beta/services/completion_service/async_client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2beta/services/completion_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -196,9 +195,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(CompletionServiceClient).get_transport_class, type(CompletionServiceClient) - ) + get_transport_class = CompletionServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-retail/google/cloud/retail_v2beta/services/completion_service/client.py b/packages/google-cloud-retail/google/cloud/retail_v2beta/services/completion_service/client.py index a0a70046e73d..f15050a51ccb 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2beta/services/completion_service/client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2beta/services/completion_service/client.py @@ -674,7 +674,7 @@ def __init__( Type[CompletionServiceTransport], Callable[..., CompletionServiceTransport], ] = ( - type(self).get_transport_class(transport) + CompletionServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., CompletionServiceTransport], transport) ) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2beta/services/control_service/async_client.py b/packages/google-cloud-retail/google/cloud/retail_v2beta/services/control_service/async_client.py index 8d8447cb5e89..93bbec183bb9 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2beta/services/control_service/async_client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2beta/services/control_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -196,9 +195,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ControlServiceClient).get_transport_class, type(ControlServiceClient) - ) + get_transport_class = ControlServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-retail/google/cloud/retail_v2beta/services/control_service/client.py b/packages/google-cloud-retail/google/cloud/retail_v2beta/services/control_service/client.py index a3e9afc9a8ce..baeb546541e5 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2beta/services/control_service/client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2beta/services/control_service/client.py @@ -691,7 +691,7 @@ def __init__( transport_init: Union[ Type[ControlServiceTransport], Callable[..., ControlServiceTransport] ] = ( - type(self).get_transport_class(transport) + ControlServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ControlServiceTransport], transport) ) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2beta/services/model_service/async_client.py b/packages/google-cloud-retail/google/cloud/retail_v2beta/services/model_service/async_client.py index 153c826adc1e..338ec4548803 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2beta/services/model_service/async_client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2beta/services/model_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -207,9 +206,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ModelServiceClient).get_transport_class, type(ModelServiceClient) - ) + get_transport_class = ModelServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-retail/google/cloud/retail_v2beta/services/model_service/client.py b/packages/google-cloud-retail/google/cloud/retail_v2beta/services/model_service/client.py index 3de871250702..ce03a15c002b 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2beta/services/model_service/client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2beta/services/model_service/client.py @@ -701,7 +701,7 @@ def __init__( transport_init: Union[ Type[ModelServiceTransport], Callable[..., ModelServiceTransport] ] = ( - type(self).get_transport_class(transport) + ModelServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ModelServiceTransport], transport) ) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2beta/services/prediction_service/async_client.py b/packages/google-cloud-retail/google/cloud/retail_v2beta/services/prediction_service/async_client.py index 6b08e3a8e493..789d08a4d009 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2beta/services/prediction_service/async_client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2beta/services/prediction_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -189,9 +188,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(PredictionServiceClient).get_transport_class, type(PredictionServiceClient) - ) + get_transport_class = PredictionServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-retail/google/cloud/retail_v2beta/services/prediction_service/client.py b/packages/google-cloud-retail/google/cloud/retail_v2beta/services/prediction_service/client.py index 68384cd3c04b..f36033352de5 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2beta/services/prediction_service/client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2beta/services/prediction_service/client.py @@ -671,7 +671,7 @@ def __init__( Type[PredictionServiceTransport], Callable[..., PredictionServiceTransport], ] = ( - type(self).get_transport_class(transport) + PredictionServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., PredictionServiceTransport], transport) ) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2beta/services/product_service/async_client.py b/packages/google-cloud-retail/google/cloud/retail_v2beta/services/product_service/async_client.py index 1ed1903a7914..9b1c75f0ae6d 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2beta/services/product_service/async_client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2beta/services/product_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -203,9 +202,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ProductServiceClient).get_transport_class, type(ProductServiceClient) - ) + get_transport_class = ProductServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-retail/google/cloud/retail_v2beta/services/product_service/client.py b/packages/google-cloud-retail/google/cloud/retail_v2beta/services/product_service/client.py index 0792e0034354..99484158b296 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2beta/services/product_service/client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2beta/services/product_service/client.py @@ -702,7 +702,7 @@ def __init__( transport_init: Union[ Type[ProductServiceTransport], Callable[..., ProductServiceTransport] ] = ( - type(self).get_transport_class(transport) + ProductServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ProductServiceTransport], transport) ) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2beta/services/search_service/async_client.py b/packages/google-cloud-retail/google/cloud/retail_v2beta/services/search_service/async_client.py index 0ff10ea21dbc..49526aa91cd3 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2beta/services/search_service/async_client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2beta/services/search_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -203,9 +202,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(SearchServiceClient).get_transport_class, type(SearchServiceClient) - ) + get_transport_class = SearchServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-retail/google/cloud/retail_v2beta/services/search_service/client.py b/packages/google-cloud-retail/google/cloud/retail_v2beta/services/search_service/client.py index eeb53c0cbef6..13aaa2a0b0e8 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2beta/services/search_service/client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2beta/services/search_service/client.py @@ -742,7 +742,7 @@ def __init__( transport_init: Union[ Type[SearchServiceTransport], Callable[..., SearchServiceTransport] ] = ( - type(self).get_transport_class(transport) + SearchServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SearchServiceTransport], transport) ) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2beta/services/serving_config_service/async_client.py b/packages/google-cloud-retail/google/cloud/retail_v2beta/services/serving_config_service/async_client.py index 4b9df4301c8f..0796bf5e6df0 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2beta/services/serving_config_service/async_client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2beta/services/serving_config_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -198,10 +197,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ServingConfigServiceClient).get_transport_class, - type(ServingConfigServiceClient), - ) + get_transport_class = ServingConfigServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-retail/google/cloud/retail_v2beta/services/serving_config_service/client.py b/packages/google-cloud-retail/google/cloud/retail_v2beta/services/serving_config_service/client.py index b2f82bf8b577..b9a0d2989f0e 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2beta/services/serving_config_service/client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2beta/services/serving_config_service/client.py @@ -696,7 +696,7 @@ def __init__( Type[ServingConfigServiceTransport], Callable[..., ServingConfigServiceTransport], ] = ( - type(self).get_transport_class(transport) + ServingConfigServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ServingConfigServiceTransport], transport) ) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2beta/services/user_event_service/async_client.py b/packages/google-cloud-retail/google/cloud/retail_v2beta/services/user_event_service/async_client.py index 35e2125349c7..eee576ad894a 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2beta/services/user_event_service/async_client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2beta/services/user_event_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -204,9 +203,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(UserEventServiceClient).get_transport_class, type(UserEventServiceClient) - ) + get_transport_class = UserEventServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-retail/google/cloud/retail_v2beta/services/user_event_service/client.py b/packages/google-cloud-retail/google/cloud/retail_v2beta/services/user_event_service/client.py index 351bff551767..bdfe04bb7c5b 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2beta/services/user_event_service/client.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2beta/services/user_event_service/client.py @@ -704,7 +704,7 @@ def __init__( Type[UserEventServiceTransport], Callable[..., UserEventServiceTransport], ] = ( - type(self).get_transport_class(transport) + UserEventServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., UserEventServiceTransport], transport) ) diff --git a/packages/google-cloud-retail/samples/generated_samples/snippet_metadata_google.cloud.retail.v2.json b/packages/google-cloud-retail/samples/generated_samples/snippet_metadata_google.cloud.retail.v2.json index c29bdb8d5e28..2660f295abc7 100644 --- a/packages/google-cloud-retail/samples/generated_samples/snippet_metadata_google.cloud.retail.v2.json +++ b/packages/google-cloud-retail/samples/generated_samples/snippet_metadata_google.cloud.retail.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-retail", - "version": "1.21.2" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-retail/samples/generated_samples/snippet_metadata_google.cloud.retail.v2alpha.json b/packages/google-cloud-retail/samples/generated_samples/snippet_metadata_google.cloud.retail.v2alpha.json index 7bbc2595e87f..0005b981abd9 100644 --- a/packages/google-cloud-retail/samples/generated_samples/snippet_metadata_google.cloud.retail.v2alpha.json +++ b/packages/google-cloud-retail/samples/generated_samples/snippet_metadata_google.cloud.retail.v2alpha.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-retail", - "version": "1.21.2" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-retail/samples/generated_samples/snippet_metadata_google.cloud.retail.v2beta.json b/packages/google-cloud-retail/samples/generated_samples/snippet_metadata_google.cloud.retail.v2beta.json index 0f18dc25c0fe..112a4d73f6b4 100644 --- a/packages/google-cloud-retail/samples/generated_samples/snippet_metadata_google.cloud.retail.v2beta.json +++ b/packages/google-cloud-retail/samples/generated_samples/snippet_metadata_google.cloud.retail.v2beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-retail", - "version": "1.21.2" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-retail/tests/unit/gapic/retail_v2/test_analytics_service.py b/packages/google-cloud-retail/tests/unit/gapic/retail_v2/test_analytics_service.py index 92e91bc2e13a..fff3af8ebdc0 100644 --- a/packages/google-cloud-retail/tests/unit/gapic/retail_v2/test_analytics_service.py +++ b/packages/google-cloud-retail/tests/unit/gapic/retail_v2/test_analytics_service.py @@ -1278,8 +1278,9 @@ def test_export_analytics_metrics_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.export_analytics_metrics(request) @@ -1335,26 +1336,28 @@ async def test_export_analytics_metrics_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.export_analytics_metrics - ] = mock_object + ] = mock_rpc request = {} await client.export_analytics_metrics(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.export_analytics_metrics(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-retail/tests/unit/gapic/retail_v2/test_catalog_service.py b/packages/google-cloud-retail/tests/unit/gapic/retail_v2/test_catalog_service.py index da2aabb7a54e..78d7cc610df2 100644 --- a/packages/google-cloud-retail/tests/unit/gapic/retail_v2/test_catalog_service.py +++ b/packages/google-cloud-retail/tests/unit/gapic/retail_v2/test_catalog_service.py @@ -1290,22 +1290,23 @@ async def test_list_catalogs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_catalogs - ] = mock_object + ] = mock_rpc request = {} await client.list_catalogs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_catalogs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1847,22 +1848,23 @@ async def test_update_catalog_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_catalog - ] = mock_object + ] = mock_rpc request = {} await client.update_catalog(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_catalog(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2228,22 +2230,23 @@ async def test_set_default_branch_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_default_branch - ] = mock_object + ] = mock_rpc request = {} await client.set_default_branch(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_default_branch(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2609,22 +2612,23 @@ async def test_get_default_branch_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_default_branch - ] = mock_object + ] = mock_rpc request = {} await client.get_default_branch(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_default_branch(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3029,22 +3033,23 @@ async def test_get_completion_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_completion_config - ] = mock_object + ] = mock_rpc request = {} await client.get_completion_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_completion_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3466,22 +3471,23 @@ async def test_update_completion_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_completion_config - ] = mock_object + ] = mock_rpc request = {} await client.update_completion_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_completion_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3893,22 +3899,23 @@ async def test_get_attributes_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_attributes_config - ] = mock_object + ] = mock_rpc request = {} await client.get_attributes_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_attributes_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4288,22 +4295,23 @@ async def test_update_attributes_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_attributes_config - ] = mock_object + ] = mock_rpc request = {} await client.update_attributes_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_attributes_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4697,22 +4705,23 @@ async def test_add_catalog_attribute_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.add_catalog_attribute - ] = mock_object + ] = mock_rpc request = {} await client.add_catalog_attribute(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.add_catalog_attribute(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5012,22 +5021,23 @@ async def test_remove_catalog_attribute_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.remove_catalog_attribute - ] = mock_object + ] = mock_rpc request = {} await client.remove_catalog_attribute(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.remove_catalog_attribute(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5325,22 +5335,23 @@ async def test_replace_catalog_attribute_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.replace_catalog_attribute - ] = mock_object + ] = mock_rpc request = {} await client.replace_catalog_attribute(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.replace_catalog_attribute(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-retail/tests/unit/gapic/retail_v2/test_completion_service.py b/packages/google-cloud-retail/tests/unit/gapic/retail_v2/test_completion_service.py index d71e906c33d6..53343b74e411 100644 --- a/packages/google-cloud-retail/tests/unit/gapic/retail_v2/test_completion_service.py +++ b/packages/google-cloud-retail/tests/unit/gapic/retail_v2/test_completion_service.py @@ -1340,22 +1340,23 @@ async def test_complete_query_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.complete_query - ] = mock_object + ] = mock_rpc request = {} await client.complete_query(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.complete_query(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1580,8 +1581,9 @@ def test_import_completion_data_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.import_completion_data(request) @@ -1637,26 +1639,28 @@ async def test_import_completion_data_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_completion_data - ] = mock_object + ] = mock_rpc request = {} await client.import_completion_data(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.import_completion_data(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-retail/tests/unit/gapic/retail_v2/test_control_service.py b/packages/google-cloud-retail/tests/unit/gapic/retail_v2/test_control_service.py index 62f2f825fbd8..f906e10a0e46 100644 --- a/packages/google-cloud-retail/tests/unit/gapic/retail_v2/test_control_service.py +++ b/packages/google-cloud-retail/tests/unit/gapic/retail_v2/test_control_service.py @@ -1310,22 +1310,23 @@ async def test_create_control_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_control - ] = mock_object + ] = mock_rpc request = {} await client.create_control(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_control(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1709,22 +1710,23 @@ async def test_delete_control_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_control - ] = mock_object + ] = mock_rpc request = {} await client.delete_control(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_control(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2080,22 +2082,23 @@ async def test_update_control_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_control - ] = mock_object + ] = mock_rpc request = {} await client.update_control(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_control(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2496,22 +2499,23 @@ async def test_get_control_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_control - ] = mock_object + ] = mock_rpc request = {} await client.get_control(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_control(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2874,22 +2878,23 @@ async def test_list_controls_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_controls - ] = mock_object + ] = mock_rpc request = {} await client.list_controls(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_controls(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-retail/tests/unit/gapic/retail_v2/test_model_service.py b/packages/google-cloud-retail/tests/unit/gapic/retail_v2/test_model_service.py index 69e182fb4c5c..cf2f0646164d 100644 --- a/packages/google-cloud-retail/tests/unit/gapic/retail_v2/test_model_service.py +++ b/packages/google-cloud-retail/tests/unit/gapic/retail_v2/test_model_service.py @@ -1218,8 +1218,9 @@ def test_create_model_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_model(request) @@ -1273,26 +1274,28 @@ async def test_create_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_model - ] = mock_object + ] = mock_rpc request = {} await client.create_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1679,22 +1682,23 @@ async def test_get_model_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_model - ] = mock_object + ] = mock_rpc request = {} await client.get_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2096,22 +2100,23 @@ async def test_pause_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.pause_model - ] = mock_object + ] = mock_rpc request = {} await client.pause_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.pause_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2513,22 +2518,23 @@ async def test_resume_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.resume_model - ] = mock_object + ] = mock_rpc request = {} await client.resume_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.resume_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2890,22 +2896,23 @@ async def test_delete_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_model - ] = mock_object + ] = mock_rpc request = {} await client.delete_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3247,22 +3254,23 @@ async def test_list_models_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_models - ] = mock_object + ] = mock_rpc request = {} await client.list_models(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_models(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3834,22 +3842,23 @@ async def test_update_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_model - ] = mock_object + ] = mock_rpc request = {} await client.update_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4172,8 +4181,9 @@ def test_tune_model_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.tune_model(request) @@ -4225,26 +4235,28 @@ async def test_tune_model_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.tune_model - ] = mock_object + ] = mock_rpc request = {} await client.tune_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.tune_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-retail/tests/unit/gapic/retail_v2/test_prediction_service.py b/packages/google-cloud-retail/tests/unit/gapic/retail_v2/test_prediction_service.py index 123b259a478c..49827eacf030 100644 --- a/packages/google-cloud-retail/tests/unit/gapic/retail_v2/test_prediction_service.py +++ b/packages/google-cloud-retail/tests/unit/gapic/retail_v2/test_prediction_service.py @@ -1339,22 +1339,23 @@ async def test_predict_async_use_cached_wrapped_rpc(transport: str = "grpc_async ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.predict - ] = mock_object + ] = mock_rpc request = {} await client.predict(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.predict(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-retail/tests/unit/gapic/retail_v2/test_product_service.py b/packages/google-cloud-retail/tests/unit/gapic/retail_v2/test_product_service.py index 4e36774532c4..172a436129c8 100644 --- a/packages/google-cloud-retail/tests/unit/gapic/retail_v2/test_product_service.py +++ b/packages/google-cloud-retail/tests/unit/gapic/retail_v2/test_product_service.py @@ -1353,22 +1353,23 @@ async def test_create_product_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_product - ] = mock_object + ] = mock_rpc request = {} await client.create_product(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_product(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1826,22 +1827,23 @@ async def test_get_product_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_product - ] = mock_object + ] = mock_rpc request = {} await client.get_product(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_product(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2224,22 +2226,23 @@ async def test_list_products_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_products - ] = mock_object + ] = mock_rpc request = {} await client.list_products(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_products(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2829,22 +2832,23 @@ async def test_update_product_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_product - ] = mock_object + ] = mock_rpc request = {} await client.update_product(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_product(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3234,22 +3238,23 @@ async def test_delete_product_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_product - ] = mock_object + ] = mock_rpc request = {} await client.delete_product(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_product(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3535,8 +3540,9 @@ def test_purge_products_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.purge_products(request) @@ -3590,26 +3596,28 @@ async def test_purge_products_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.purge_products - ] = mock_object + ] = mock_rpc request = {} await client.purge_products(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.purge_products(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3821,8 +3829,9 @@ def test_import_products_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.import_products(request) @@ -3876,26 +3885,28 @@ async def test_import_products_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_products - ] = mock_object + ] = mock_rpc request = {} await client.import_products(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.import_products(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4099,8 +4110,9 @@ def test_set_inventory_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.set_inventory(request) @@ -4154,26 +4166,28 @@ async def test_set_inventory_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_inventory - ] = mock_object + ] = mock_rpc request = {} await client.set_inventory(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.set_inventory(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4486,8 +4500,9 @@ def test_add_fulfillment_places_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.add_fulfillment_places(request) @@ -4543,26 +4558,28 @@ async def test_add_fulfillment_places_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.add_fulfillment_places - ] = mock_object + ] = mock_rpc request = {} await client.add_fulfillment_places(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.add_fulfillment_places(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4876,8 +4893,9 @@ def test_remove_fulfillment_places_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.remove_fulfillment_places(request) @@ -4933,26 +4951,28 @@ async def test_remove_fulfillment_places_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.remove_fulfillment_places - ] = mock_object + ] = mock_rpc request = {} await client.remove_fulfillment_places(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.remove_fulfillment_places(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5264,8 +5284,9 @@ def test_add_local_inventories_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.add_local_inventories(request) @@ -5321,26 +5342,28 @@ async def test_add_local_inventories_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.add_local_inventories - ] = mock_object + ] = mock_rpc request = {} await client.add_local_inventories(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.add_local_inventories(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5652,8 +5675,9 @@ def test_remove_local_inventories_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.remove_local_inventories(request) @@ -5709,26 +5733,28 @@ async def test_remove_local_inventories_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.remove_local_inventories - ] = mock_object + ] = mock_rpc request = {} await client.remove_local_inventories(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.remove_local_inventories(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-retail/tests/unit/gapic/retail_v2/test_search_service.py b/packages/google-cloud-retail/tests/unit/gapic/retail_v2/test_search_service.py index b56a322c8e9b..55f2965a2e75 100644 --- a/packages/google-cloud-retail/tests/unit/gapic/retail_v2/test_search_service.py +++ b/packages/google-cloud-retail/tests/unit/gapic/retail_v2/test_search_service.py @@ -1308,22 +1308,23 @@ async def test_search_async_use_cached_wrapped_rpc(transport: str = "grpc_asynci ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search - ] = mock_object + ] = mock_rpc request = {} await client.search(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-retail/tests/unit/gapic/retail_v2/test_serving_config_service.py b/packages/google-cloud-retail/tests/unit/gapic/retail_v2/test_serving_config_service.py index 856afe5bb400..290585ccfe20 100644 --- a/packages/google-cloud-retail/tests/unit/gapic/retail_v2/test_serving_config_service.py +++ b/packages/google-cloud-retail/tests/unit/gapic/retail_v2/test_serving_config_service.py @@ -1426,22 +1426,23 @@ async def test_create_serving_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_serving_config - ] = mock_object + ] = mock_rpc request = {} await client.create_serving_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_serving_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1866,22 +1867,23 @@ async def test_delete_serving_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_serving_config - ] = mock_object + ] = mock_rpc request = {} await client.delete_serving_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_serving_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2297,22 +2299,23 @@ async def test_update_serving_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_serving_config - ] = mock_object + ] = mock_rpc request = {} await client.update_serving_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_serving_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2789,22 +2792,23 @@ async def test_get_serving_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_serving_config - ] = mock_object + ] = mock_rpc request = {} await client.get_serving_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_serving_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3217,22 +3221,23 @@ async def test_list_serving_configs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_serving_configs - ] = mock_object + ] = mock_rpc request = {} await client.list_serving_configs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_serving_configs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3852,22 +3857,23 @@ async def test_add_control_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.add_control - ] = mock_object + ] = mock_rpc request = {} await client.add_control(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.add_control(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4314,22 +4320,23 @@ async def test_remove_control_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.remove_control - ] = mock_object + ] = mock_rpc request = {} await client.remove_control(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.remove_control(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-retail/tests/unit/gapic/retail_v2/test_user_event_service.py b/packages/google-cloud-retail/tests/unit/gapic/retail_v2/test_user_event_service.py index 8a1f4390fe3e..38ee2d19f24a 100644 --- a/packages/google-cloud-retail/tests/unit/gapic/retail_v2/test_user_event_service.py +++ b/packages/google-cloud-retail/tests/unit/gapic/retail_v2/test_user_event_service.py @@ -1380,22 +1380,23 @@ async def test_write_user_event_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.write_user_event - ] = mock_object + ] = mock_rpc request = {} await client.write_user_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.write_user_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1714,22 +1715,23 @@ async def test_collect_user_event_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.collect_user_event - ] = mock_object + ] = mock_rpc request = {} await client.collect_user_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.collect_user_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1959,8 +1961,9 @@ def test_purge_user_events_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.purge_user_events(request) @@ -2016,26 +2019,28 @@ async def test_purge_user_events_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.purge_user_events - ] = mock_object + ] = mock_rpc request = {} await client.purge_user_events(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.purge_user_events(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2259,8 +2264,9 @@ def test_import_user_events_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.import_user_events(request) @@ -2316,26 +2322,28 @@ async def test_import_user_events_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_user_events - ] = mock_object + ] = mock_rpc request = {} await client.import_user_events(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.import_user_events(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2559,8 +2567,9 @@ def test_rejoin_user_events_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.rejoin_user_events(request) @@ -2616,26 +2625,28 @@ async def test_rejoin_user_events_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.rejoin_user_events - ] = mock_object + ] = mock_rpc request = {} await client.rejoin_user_events(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.rejoin_user_events(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_analytics_service.py b/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_analytics_service.py index c9455205c32c..58de4da2a153 100644 --- a/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_analytics_service.py +++ b/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_analytics_service.py @@ -1278,8 +1278,9 @@ def test_export_analytics_metrics_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.export_analytics_metrics(request) @@ -1335,26 +1336,28 @@ async def test_export_analytics_metrics_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.export_analytics_metrics - ] = mock_object + ] = mock_rpc request = {} await client.export_analytics_metrics(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.export_analytics_metrics(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_branch_service.py b/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_branch_service.py index bb499ec81722..3ffce6e5f89a 100644 --- a/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_branch_service.py +++ b/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_branch_service.py @@ -1274,22 +1274,23 @@ async def test_list_branches_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_branches - ] = mock_object + ] = mock_rpc request = {} await client.list_branches(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_branches(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1639,22 +1640,23 @@ async def test_get_branch_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_branch - ] = mock_object + ] = mock_rpc request = {} await client.get_branch(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_branch(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_catalog_service.py b/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_catalog_service.py index 91c1351bc5a6..19fa6f73117f 100644 --- a/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_catalog_service.py +++ b/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_catalog_service.py @@ -1290,22 +1290,23 @@ async def test_list_catalogs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_catalogs - ] = mock_object + ] = mock_rpc request = {} await client.list_catalogs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_catalogs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1847,22 +1848,23 @@ async def test_update_catalog_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_catalog - ] = mock_object + ] = mock_rpc request = {} await client.update_catalog(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_catalog(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2228,22 +2230,23 @@ async def test_set_default_branch_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_default_branch - ] = mock_object + ] = mock_rpc request = {} await client.set_default_branch(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_default_branch(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2609,22 +2612,23 @@ async def test_get_default_branch_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_default_branch - ] = mock_object + ] = mock_rpc request = {} await client.get_default_branch(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_default_branch(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3029,22 +3033,23 @@ async def test_get_completion_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_completion_config - ] = mock_object + ] = mock_rpc request = {} await client.get_completion_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_completion_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3466,22 +3471,23 @@ async def test_update_completion_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_completion_config - ] = mock_object + ] = mock_rpc request = {} await client.update_completion_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_completion_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3893,22 +3899,23 @@ async def test_get_attributes_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_attributes_config - ] = mock_object + ] = mock_rpc request = {} await client.get_attributes_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_attributes_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4288,22 +4295,23 @@ async def test_update_attributes_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_attributes_config - ] = mock_object + ] = mock_rpc request = {} await client.update_attributes_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_attributes_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4697,22 +4705,23 @@ async def test_add_catalog_attribute_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.add_catalog_attribute - ] = mock_object + ] = mock_rpc request = {} await client.add_catalog_attribute(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.add_catalog_attribute(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5012,22 +5021,23 @@ async def test_remove_catalog_attribute_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.remove_catalog_attribute - ] = mock_object + ] = mock_rpc request = {} await client.remove_catalog_attribute(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.remove_catalog_attribute(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5322,22 +5332,23 @@ async def test_batch_remove_catalog_attributes_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_remove_catalog_attributes - ] = mock_object + ] = mock_rpc request = {} await client.batch_remove_catalog_attributes(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.batch_remove_catalog_attributes(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5632,22 +5643,23 @@ async def test_replace_catalog_attribute_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.replace_catalog_attribute - ] = mock_object + ] = mock_rpc request = {} await client.replace_catalog_attribute(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.replace_catalog_attribute(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_completion_service.py b/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_completion_service.py index 3dc41c345907..465406c68571 100644 --- a/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_completion_service.py +++ b/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_completion_service.py @@ -1340,22 +1340,23 @@ async def test_complete_query_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.complete_query - ] = mock_object + ] = mock_rpc request = {} await client.complete_query(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.complete_query(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1580,8 +1581,9 @@ def test_import_completion_data_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.import_completion_data(request) @@ -1637,26 +1639,28 @@ async def test_import_completion_data_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_completion_data - ] = mock_object + ] = mock_rpc request = {} await client.import_completion_data(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.import_completion_data(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_control_service.py b/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_control_service.py index 7a807f3804b7..97e2acb574dd 100644 --- a/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_control_service.py +++ b/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_control_service.py @@ -1310,22 +1310,23 @@ async def test_create_control_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_control - ] = mock_object + ] = mock_rpc request = {} await client.create_control(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_control(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1733,22 +1734,23 @@ async def test_delete_control_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_control - ] = mock_object + ] = mock_rpc request = {} await client.delete_control(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_control(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2104,22 +2106,23 @@ async def test_update_control_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_control - ] = mock_object + ] = mock_rpc request = {} await client.update_control(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_control(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2544,22 +2547,23 @@ async def test_get_control_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_control - ] = mock_object + ] = mock_rpc request = {} await client.get_control(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_control(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2922,22 +2926,23 @@ async def test_list_controls_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_controls - ] = mock_object + ] = mock_rpc request = {} await client.list_controls(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_controls(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_merchant_center_account_link_service.py b/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_merchant_center_account_link_service.py index 91f7fae5366f..9cddf03dada5 100644 --- a/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_merchant_center_account_link_service.py +++ b/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_merchant_center_account_link_service.py @@ -1430,22 +1430,23 @@ async def test_list_merchant_center_account_links_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_merchant_center_account_links - ] = mock_object + ] = mock_rpc request = {} await client.list_merchant_center_account_links(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_merchant_center_account_links(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1781,8 +1782,9 @@ def test_create_merchant_center_account_link_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_merchant_center_account_link(request) @@ -1841,26 +1843,28 @@ async def test_create_merchant_center_account_link_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_merchant_center_account_link - ] = mock_object + ] = mock_rpc request = {} await client.create_merchant_center_account_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_merchant_center_account_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2263,22 +2267,23 @@ async def test_delete_merchant_center_account_link_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_merchant_center_account_link - ] = mock_object + ] = mock_rpc request = {} await client.delete_merchant_center_account_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_merchant_center_account_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_model_service.py b/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_model_service.py index ac46bea97dbb..aaae111e7abc 100644 --- a/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_model_service.py +++ b/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_model_service.py @@ -1218,8 +1218,9 @@ def test_create_model_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_model(request) @@ -1273,26 +1274,28 @@ async def test_create_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_model - ] = mock_object + ] = mock_rpc request = {} await client.create_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1703,22 +1706,23 @@ async def test_get_model_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_model - ] = mock_object + ] = mock_rpc request = {} await client.get_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2120,22 +2124,23 @@ async def test_pause_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.pause_model - ] = mock_object + ] = mock_rpc request = {} await client.pause_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.pause_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2537,22 +2542,23 @@ async def test_resume_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.resume_model - ] = mock_object + ] = mock_rpc request = {} await client.resume_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.resume_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2914,22 +2920,23 @@ async def test_delete_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_model - ] = mock_object + ] = mock_rpc request = {} await client.delete_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3271,22 +3278,23 @@ async def test_list_models_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_models - ] = mock_object + ] = mock_rpc request = {} await client.list_models(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_models(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3858,22 +3866,23 @@ async def test_update_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_model - ] = mock_object + ] = mock_rpc request = {} await client.update_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4220,8 +4229,9 @@ def test_tune_model_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.tune_model(request) @@ -4273,26 +4283,28 @@ async def test_tune_model_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.tune_model - ] = mock_object + ] = mock_rpc request = {} await client.tune_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.tune_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_prediction_service.py b/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_prediction_service.py index 5f0ec7929b45..68f22306b4ea 100644 --- a/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_prediction_service.py +++ b/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_prediction_service.py @@ -1339,22 +1339,23 @@ async def test_predict_async_use_cached_wrapped_rpc(transport: str = "grpc_async ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.predict - ] = mock_object + ] = mock_rpc request = {} await client.predict(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.predict(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_product_service.py b/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_product_service.py index 580ea2330f7a..ce665974c733 100644 --- a/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_product_service.py +++ b/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_product_service.py @@ -1353,22 +1353,23 @@ async def test_create_product_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_product - ] = mock_object + ] = mock_rpc request = {} await client.create_product(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_product(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1826,22 +1827,23 @@ async def test_get_product_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_product - ] = mock_object + ] = mock_rpc request = {} await client.get_product(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_product(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2227,22 +2229,23 @@ async def test_list_products_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_products - ] = mock_object + ] = mock_rpc request = {} await client.list_products(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_products(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2834,22 +2837,23 @@ async def test_update_product_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_product - ] = mock_object + ] = mock_rpc request = {} await client.update_product(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_product(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3239,22 +3243,23 @@ async def test_delete_product_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_product - ] = mock_object + ] = mock_rpc request = {} await client.delete_product(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_product(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3540,8 +3545,9 @@ def test_purge_products_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.purge_products(request) @@ -3595,26 +3601,28 @@ async def test_purge_products_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.purge_products - ] = mock_object + ] = mock_rpc request = {} await client.purge_products(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.purge_products(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3826,8 +3834,9 @@ def test_import_products_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.import_products(request) @@ -3881,26 +3890,28 @@ async def test_import_products_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_products - ] = mock_object + ] = mock_rpc request = {} await client.import_products(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.import_products(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4104,8 +4115,9 @@ def test_set_inventory_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.set_inventory(request) @@ -4159,26 +4171,28 @@ async def test_set_inventory_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_inventory - ] = mock_object + ] = mock_rpc request = {} await client.set_inventory(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.set_inventory(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4491,8 +4505,9 @@ def test_add_fulfillment_places_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.add_fulfillment_places(request) @@ -4548,26 +4563,28 @@ async def test_add_fulfillment_places_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.add_fulfillment_places - ] = mock_object + ] = mock_rpc request = {} await client.add_fulfillment_places(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.add_fulfillment_places(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4881,8 +4898,9 @@ def test_remove_fulfillment_places_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.remove_fulfillment_places(request) @@ -4938,26 +4956,28 @@ async def test_remove_fulfillment_places_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.remove_fulfillment_places - ] = mock_object + ] = mock_rpc request = {} await client.remove_fulfillment_places(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.remove_fulfillment_places(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5269,8 +5289,9 @@ def test_add_local_inventories_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.add_local_inventories(request) @@ -5326,26 +5347,28 @@ async def test_add_local_inventories_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.add_local_inventories - ] = mock_object + ] = mock_rpc request = {} await client.add_local_inventories(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.add_local_inventories(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5657,8 +5680,9 @@ def test_remove_local_inventories_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.remove_local_inventories(request) @@ -5714,26 +5738,28 @@ async def test_remove_local_inventories_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.remove_local_inventories - ] = mock_object + ] = mock_rpc request = {} await client.remove_local_inventories(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.remove_local_inventories(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_project_service.py b/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_project_service.py index 0a0bb8cfae47..5d8988c08c6f 100644 --- a/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_project_service.py +++ b/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_project_service.py @@ -1300,22 +1300,23 @@ async def test_get_project_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_project - ] = mock_object + ] = mock_rpc request = {} await client.get_project(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_project(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1669,22 +1670,23 @@ async def test_accept_terms_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.accept_terms - ] = mock_object + ] = mock_rpc request = {} await client.accept_terms(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.accept_terms(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1977,8 +1979,9 @@ def test_enroll_solution_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.enroll_solution(request) @@ -2032,26 +2035,28 @@ async def test_enroll_solution_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.enroll_solution - ] = mock_object + ] = mock_rpc request = {} await client.enroll_solution(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.enroll_solution(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2330,22 +2335,23 @@ async def test_list_enrolled_solutions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_enrolled_solutions - ] = mock_object + ] = mock_rpc request = {} await client.list_enrolled_solutions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_enrolled_solutions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2719,22 +2725,23 @@ async def test_get_logging_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_logging_config - ] = mock_object + ] = mock_rpc request = {} await client.get_logging_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_logging_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3103,22 +3110,23 @@ async def test_update_logging_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_logging_config - ] = mock_object + ] = mock_rpc request = {} await client.update_logging_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_logging_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3490,22 +3498,23 @@ async def test_get_alert_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_alert_config - ] = mock_object + ] = mock_rpc request = {} await client.get_alert_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_alert_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3858,22 +3867,23 @@ async def test_update_alert_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_alert_config - ] = mock_object + ] = mock_rpc request = {} await client.update_alert_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_alert_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_search_service.py b/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_search_service.py index 89d3fb8a15b8..47e49cf2aaac 100644 --- a/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_search_service.py +++ b/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_search_service.py @@ -1308,22 +1308,23 @@ async def test_search_async_use_cached_wrapped_rpc(transport: str = "grpc_asynci ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search - ] = mock_object + ] = mock_rpc request = {} await client.search(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_serving_config_service.py b/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_serving_config_service.py index 1c7200afb2bd..1aeaecb11264 100644 --- a/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_serving_config_service.py +++ b/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_serving_config_service.py @@ -1426,22 +1426,23 @@ async def test_create_serving_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_serving_config - ] = mock_object + ] = mock_rpc request = {} await client.create_serving_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_serving_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1866,22 +1867,23 @@ async def test_delete_serving_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_serving_config - ] = mock_object + ] = mock_rpc request = {} await client.delete_serving_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_serving_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2297,22 +2299,23 @@ async def test_update_serving_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_serving_config - ] = mock_object + ] = mock_rpc request = {} await client.update_serving_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_serving_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2789,22 +2792,23 @@ async def test_get_serving_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_serving_config - ] = mock_object + ] = mock_rpc request = {} await client.get_serving_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_serving_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3217,22 +3221,23 @@ async def test_list_serving_configs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_serving_configs - ] = mock_object + ] = mock_rpc request = {} await client.list_serving_configs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_serving_configs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3852,22 +3857,23 @@ async def test_add_control_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.add_control - ] = mock_object + ] = mock_rpc request = {} await client.add_control(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.add_control(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4314,22 +4320,23 @@ async def test_remove_control_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.remove_control - ] = mock_object + ] = mock_rpc request = {} await client.remove_control(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.remove_control(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_user_event_service.py b/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_user_event_service.py index 5e97d32391f6..087c243f3357 100644 --- a/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_user_event_service.py +++ b/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_user_event_service.py @@ -1380,22 +1380,23 @@ async def test_write_user_event_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.write_user_event - ] = mock_object + ] = mock_rpc request = {} await client.write_user_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.write_user_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1714,22 +1715,23 @@ async def test_collect_user_event_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.collect_user_event - ] = mock_object + ] = mock_rpc request = {} await client.collect_user_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.collect_user_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1959,8 +1961,9 @@ def test_purge_user_events_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.purge_user_events(request) @@ -2016,26 +2019,28 @@ async def test_purge_user_events_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.purge_user_events - ] = mock_object + ] = mock_rpc request = {} await client.purge_user_events(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.purge_user_events(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2259,8 +2264,9 @@ def test_import_user_events_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.import_user_events(request) @@ -2316,26 +2322,28 @@ async def test_import_user_events_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_user_events - ] = mock_object + ] = mock_rpc request = {} await client.import_user_events(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.import_user_events(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2559,8 +2567,9 @@ def test_rejoin_user_events_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.rejoin_user_events(request) @@ -2616,26 +2625,28 @@ async def test_rejoin_user_events_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.rejoin_user_events - ] = mock_object + ] = mock_rpc request = {} await client.rejoin_user_events(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.rejoin_user_events(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-retail/tests/unit/gapic/retail_v2beta/test_analytics_service.py b/packages/google-cloud-retail/tests/unit/gapic/retail_v2beta/test_analytics_service.py index 4f739fe4e174..323aa5eca1eb 100644 --- a/packages/google-cloud-retail/tests/unit/gapic/retail_v2beta/test_analytics_service.py +++ b/packages/google-cloud-retail/tests/unit/gapic/retail_v2beta/test_analytics_service.py @@ -1278,8 +1278,9 @@ def test_export_analytics_metrics_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.export_analytics_metrics(request) @@ -1335,26 +1336,28 @@ async def test_export_analytics_metrics_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.export_analytics_metrics - ] = mock_object + ] = mock_rpc request = {} await client.export_analytics_metrics(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.export_analytics_metrics(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-retail/tests/unit/gapic/retail_v2beta/test_catalog_service.py b/packages/google-cloud-retail/tests/unit/gapic/retail_v2beta/test_catalog_service.py index 9bbf805715ec..832c0e388794 100644 --- a/packages/google-cloud-retail/tests/unit/gapic/retail_v2beta/test_catalog_service.py +++ b/packages/google-cloud-retail/tests/unit/gapic/retail_v2beta/test_catalog_service.py @@ -1290,22 +1290,23 @@ async def test_list_catalogs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_catalogs - ] = mock_object + ] = mock_rpc request = {} await client.list_catalogs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_catalogs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1847,22 +1848,23 @@ async def test_update_catalog_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_catalog - ] = mock_object + ] = mock_rpc request = {} await client.update_catalog(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_catalog(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2228,22 +2230,23 @@ async def test_set_default_branch_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_default_branch - ] = mock_object + ] = mock_rpc request = {} await client.set_default_branch(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_default_branch(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2609,22 +2612,23 @@ async def test_get_default_branch_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_default_branch - ] = mock_object + ] = mock_rpc request = {} await client.get_default_branch(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_default_branch(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3029,22 +3033,23 @@ async def test_get_completion_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_completion_config - ] = mock_object + ] = mock_rpc request = {} await client.get_completion_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_completion_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3466,22 +3471,23 @@ async def test_update_completion_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_completion_config - ] = mock_object + ] = mock_rpc request = {} await client.update_completion_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_completion_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3893,22 +3899,23 @@ async def test_get_attributes_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_attributes_config - ] = mock_object + ] = mock_rpc request = {} await client.get_attributes_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_attributes_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4288,22 +4295,23 @@ async def test_update_attributes_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_attributes_config - ] = mock_object + ] = mock_rpc request = {} await client.update_attributes_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_attributes_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4697,22 +4705,23 @@ async def test_add_catalog_attribute_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.add_catalog_attribute - ] = mock_object + ] = mock_rpc request = {} await client.add_catalog_attribute(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.add_catalog_attribute(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5012,22 +5021,23 @@ async def test_remove_catalog_attribute_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.remove_catalog_attribute - ] = mock_object + ] = mock_rpc request = {} await client.remove_catalog_attribute(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.remove_catalog_attribute(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5322,22 +5332,23 @@ async def test_batch_remove_catalog_attributes_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_remove_catalog_attributes - ] = mock_object + ] = mock_rpc request = {} await client.batch_remove_catalog_attributes(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.batch_remove_catalog_attributes(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5632,22 +5643,23 @@ async def test_replace_catalog_attribute_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.replace_catalog_attribute - ] = mock_object + ] = mock_rpc request = {} await client.replace_catalog_attribute(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.replace_catalog_attribute(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-retail/tests/unit/gapic/retail_v2beta/test_completion_service.py b/packages/google-cloud-retail/tests/unit/gapic/retail_v2beta/test_completion_service.py index 8844894fa997..ff5a37bfc789 100644 --- a/packages/google-cloud-retail/tests/unit/gapic/retail_v2beta/test_completion_service.py +++ b/packages/google-cloud-retail/tests/unit/gapic/retail_v2beta/test_completion_service.py @@ -1340,22 +1340,23 @@ async def test_complete_query_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.complete_query - ] = mock_object + ] = mock_rpc request = {} await client.complete_query(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.complete_query(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1580,8 +1581,9 @@ def test_import_completion_data_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.import_completion_data(request) @@ -1637,26 +1639,28 @@ async def test_import_completion_data_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_completion_data - ] = mock_object + ] = mock_rpc request = {} await client.import_completion_data(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.import_completion_data(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-retail/tests/unit/gapic/retail_v2beta/test_control_service.py b/packages/google-cloud-retail/tests/unit/gapic/retail_v2beta/test_control_service.py index da0a9d08b05a..61304c7c2765 100644 --- a/packages/google-cloud-retail/tests/unit/gapic/retail_v2beta/test_control_service.py +++ b/packages/google-cloud-retail/tests/unit/gapic/retail_v2beta/test_control_service.py @@ -1310,22 +1310,23 @@ async def test_create_control_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_control - ] = mock_object + ] = mock_rpc request = {} await client.create_control(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_control(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1733,22 +1734,23 @@ async def test_delete_control_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_control - ] = mock_object + ] = mock_rpc request = {} await client.delete_control(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_control(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2104,22 +2106,23 @@ async def test_update_control_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_control - ] = mock_object + ] = mock_rpc request = {} await client.update_control(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_control(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2544,22 +2547,23 @@ async def test_get_control_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_control - ] = mock_object + ] = mock_rpc request = {} await client.get_control(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_control(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2922,22 +2926,23 @@ async def test_list_controls_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_controls - ] = mock_object + ] = mock_rpc request = {} await client.list_controls(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_controls(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-retail/tests/unit/gapic/retail_v2beta/test_model_service.py b/packages/google-cloud-retail/tests/unit/gapic/retail_v2beta/test_model_service.py index 192e90fbb359..1e4d62457096 100644 --- a/packages/google-cloud-retail/tests/unit/gapic/retail_v2beta/test_model_service.py +++ b/packages/google-cloud-retail/tests/unit/gapic/retail_v2beta/test_model_service.py @@ -1218,8 +1218,9 @@ def test_create_model_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_model(request) @@ -1273,26 +1274,28 @@ async def test_create_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_model - ] = mock_object + ] = mock_rpc request = {} await client.create_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1679,22 +1682,23 @@ async def test_get_model_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_model - ] = mock_object + ] = mock_rpc request = {} await client.get_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2096,22 +2100,23 @@ async def test_pause_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.pause_model - ] = mock_object + ] = mock_rpc request = {} await client.pause_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.pause_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2513,22 +2518,23 @@ async def test_resume_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.resume_model - ] = mock_object + ] = mock_rpc request = {} await client.resume_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.resume_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2890,22 +2896,23 @@ async def test_delete_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_model - ] = mock_object + ] = mock_rpc request = {} await client.delete_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3247,22 +3254,23 @@ async def test_list_models_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_models - ] = mock_object + ] = mock_rpc request = {} await client.list_models(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_models(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3834,22 +3842,23 @@ async def test_update_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_model - ] = mock_object + ] = mock_rpc request = {} await client.update_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4172,8 +4181,9 @@ def test_tune_model_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.tune_model(request) @@ -4225,26 +4235,28 @@ async def test_tune_model_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.tune_model - ] = mock_object + ] = mock_rpc request = {} await client.tune_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.tune_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-retail/tests/unit/gapic/retail_v2beta/test_prediction_service.py b/packages/google-cloud-retail/tests/unit/gapic/retail_v2beta/test_prediction_service.py index fd1fa869d957..46fd33cf1d37 100644 --- a/packages/google-cloud-retail/tests/unit/gapic/retail_v2beta/test_prediction_service.py +++ b/packages/google-cloud-retail/tests/unit/gapic/retail_v2beta/test_prediction_service.py @@ -1339,22 +1339,23 @@ async def test_predict_async_use_cached_wrapped_rpc(transport: str = "grpc_async ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.predict - ] = mock_object + ] = mock_rpc request = {} await client.predict(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.predict(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-retail/tests/unit/gapic/retail_v2beta/test_product_service.py b/packages/google-cloud-retail/tests/unit/gapic/retail_v2beta/test_product_service.py index 5ca1f72b8e02..179b69104960 100644 --- a/packages/google-cloud-retail/tests/unit/gapic/retail_v2beta/test_product_service.py +++ b/packages/google-cloud-retail/tests/unit/gapic/retail_v2beta/test_product_service.py @@ -1353,22 +1353,23 @@ async def test_create_product_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_product - ] = mock_object + ] = mock_rpc request = {} await client.create_product(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_product(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1826,22 +1827,23 @@ async def test_get_product_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_product - ] = mock_object + ] = mock_rpc request = {} await client.get_product(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_product(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2224,22 +2226,23 @@ async def test_list_products_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_products - ] = mock_object + ] = mock_rpc request = {} await client.list_products(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_products(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2829,22 +2832,23 @@ async def test_update_product_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_product - ] = mock_object + ] = mock_rpc request = {} await client.update_product(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_product(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3234,22 +3238,23 @@ async def test_delete_product_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_product - ] = mock_object + ] = mock_rpc request = {} await client.delete_product(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_product(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3535,8 +3540,9 @@ def test_purge_products_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.purge_products(request) @@ -3590,26 +3596,28 @@ async def test_purge_products_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.purge_products - ] = mock_object + ] = mock_rpc request = {} await client.purge_products(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.purge_products(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3821,8 +3829,9 @@ def test_import_products_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.import_products(request) @@ -3876,26 +3885,28 @@ async def test_import_products_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_products - ] = mock_object + ] = mock_rpc request = {} await client.import_products(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.import_products(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4099,8 +4110,9 @@ def test_set_inventory_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.set_inventory(request) @@ -4154,26 +4166,28 @@ async def test_set_inventory_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_inventory - ] = mock_object + ] = mock_rpc request = {} await client.set_inventory(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.set_inventory(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4486,8 +4500,9 @@ def test_add_fulfillment_places_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.add_fulfillment_places(request) @@ -4543,26 +4558,28 @@ async def test_add_fulfillment_places_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.add_fulfillment_places - ] = mock_object + ] = mock_rpc request = {} await client.add_fulfillment_places(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.add_fulfillment_places(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4876,8 +4893,9 @@ def test_remove_fulfillment_places_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.remove_fulfillment_places(request) @@ -4933,26 +4951,28 @@ async def test_remove_fulfillment_places_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.remove_fulfillment_places - ] = mock_object + ] = mock_rpc request = {} await client.remove_fulfillment_places(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.remove_fulfillment_places(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5264,8 +5284,9 @@ def test_add_local_inventories_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.add_local_inventories(request) @@ -5321,26 +5342,28 @@ async def test_add_local_inventories_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.add_local_inventories - ] = mock_object + ] = mock_rpc request = {} await client.add_local_inventories(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.add_local_inventories(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5652,8 +5675,9 @@ def test_remove_local_inventories_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.remove_local_inventories(request) @@ -5709,26 +5733,28 @@ async def test_remove_local_inventories_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.remove_local_inventories - ] = mock_object + ] = mock_rpc request = {} await client.remove_local_inventories(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.remove_local_inventories(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-retail/tests/unit/gapic/retail_v2beta/test_search_service.py b/packages/google-cloud-retail/tests/unit/gapic/retail_v2beta/test_search_service.py index 9a9135e80660..b14a90df168a 100644 --- a/packages/google-cloud-retail/tests/unit/gapic/retail_v2beta/test_search_service.py +++ b/packages/google-cloud-retail/tests/unit/gapic/retail_v2beta/test_search_service.py @@ -1308,22 +1308,23 @@ async def test_search_async_use_cached_wrapped_rpc(transport: str = "grpc_asynci ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search - ] = mock_object + ] = mock_rpc request = {} await client.search(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-retail/tests/unit/gapic/retail_v2beta/test_serving_config_service.py b/packages/google-cloud-retail/tests/unit/gapic/retail_v2beta/test_serving_config_service.py index 6b4dd11a585b..a22e87d7c8ce 100644 --- a/packages/google-cloud-retail/tests/unit/gapic/retail_v2beta/test_serving_config_service.py +++ b/packages/google-cloud-retail/tests/unit/gapic/retail_v2beta/test_serving_config_service.py @@ -1426,22 +1426,23 @@ async def test_create_serving_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_serving_config - ] = mock_object + ] = mock_rpc request = {} await client.create_serving_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_serving_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1866,22 +1867,23 @@ async def test_delete_serving_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_serving_config - ] = mock_object + ] = mock_rpc request = {} await client.delete_serving_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_serving_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2297,22 +2299,23 @@ async def test_update_serving_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_serving_config - ] = mock_object + ] = mock_rpc request = {} await client.update_serving_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_serving_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2789,22 +2792,23 @@ async def test_get_serving_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_serving_config - ] = mock_object + ] = mock_rpc request = {} await client.get_serving_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_serving_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3217,22 +3221,23 @@ async def test_list_serving_configs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_serving_configs - ] = mock_object + ] = mock_rpc request = {} await client.list_serving_configs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_serving_configs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3852,22 +3857,23 @@ async def test_add_control_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.add_control - ] = mock_object + ] = mock_rpc request = {} await client.add_control(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.add_control(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4314,22 +4320,23 @@ async def test_remove_control_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.remove_control - ] = mock_object + ] = mock_rpc request = {} await client.remove_control(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.remove_control(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-retail/tests/unit/gapic/retail_v2beta/test_user_event_service.py b/packages/google-cloud-retail/tests/unit/gapic/retail_v2beta/test_user_event_service.py index 39a6db253dcb..bda4d65dae28 100644 --- a/packages/google-cloud-retail/tests/unit/gapic/retail_v2beta/test_user_event_service.py +++ b/packages/google-cloud-retail/tests/unit/gapic/retail_v2beta/test_user_event_service.py @@ -1380,22 +1380,23 @@ async def test_write_user_event_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.write_user_event - ] = mock_object + ] = mock_rpc request = {} await client.write_user_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.write_user_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1714,22 +1715,23 @@ async def test_collect_user_event_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.collect_user_event - ] = mock_object + ] = mock_rpc request = {} await client.collect_user_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.collect_user_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1959,8 +1961,9 @@ def test_purge_user_events_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.purge_user_events(request) @@ -2016,26 +2019,28 @@ async def test_purge_user_events_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.purge_user_events - ] = mock_object + ] = mock_rpc request = {} await client.purge_user_events(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.purge_user_events(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2259,8 +2264,9 @@ def test_import_user_events_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.import_user_events(request) @@ -2316,26 +2322,28 @@ async def test_import_user_events_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_user_events - ] = mock_object + ] = mock_rpc request = {} await client.import_user_events(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.import_user_events(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2559,8 +2567,9 @@ def test_rejoin_user_events_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.rejoin_user_events(request) @@ -2616,26 +2625,28 @@ async def test_rejoin_user_events_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.rejoin_user_events - ] = mock_object + ] = mock_rpc request = {} await client.rejoin_user_events(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.rejoin_user_events(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-run/google/cloud/run/gapic_version.py b/packages/google-cloud-run/google/cloud/run/gapic_version.py index 85323a95dcaf..558c8aab67c5 100644 --- a/packages/google-cloud-run/google/cloud/run/gapic_version.py +++ b/packages/google-cloud-run/google/cloud/run/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.10.8" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-run/google/cloud/run_v2/gapic_version.py b/packages/google-cloud-run/google/cloud/run_v2/gapic_version.py index 85323a95dcaf..558c8aab67c5 100644 --- a/packages/google-cloud-run/google/cloud/run_v2/gapic_version.py +++ b/packages/google-cloud-run/google/cloud/run_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.10.8" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-run/google/cloud/run_v2/services/executions/async_client.py b/packages/google-cloud-run/google/cloud/run_v2/services/executions/async_client.py index 818c9d8c75a0..44bc25179902 100644 --- a/packages/google-cloud-run/google/cloud/run_v2/services/executions/async_client.py +++ b/packages/google-cloud-run/google/cloud/run_v2/services/executions/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -198,9 +197,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ExecutionsClient).get_transport_class, type(ExecutionsClient) - ) + get_transport_class = ExecutionsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-run/google/cloud/run_v2/services/executions/client.py b/packages/google-cloud-run/google/cloud/run_v2/services/executions/client.py index ed87d49c3c96..a714f76bdc8d 100644 --- a/packages/google-cloud-run/google/cloud/run_v2/services/executions/client.py +++ b/packages/google-cloud-run/google/cloud/run_v2/services/executions/client.py @@ -771,7 +771,7 @@ def __init__( transport_init: Union[ Type[ExecutionsTransport], Callable[..., ExecutionsTransport] ] = ( - type(self).get_transport_class(transport) + ExecutionsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ExecutionsTransport], transport) ) diff --git a/packages/google-cloud-run/google/cloud/run_v2/services/jobs/async_client.py b/packages/google-cloud-run/google/cloud/run_v2/services/jobs/async_client.py index 47cc61c3e1bf..931b728a5107 100644 --- a/packages/google-cloud-run/google/cloud/run_v2/services/jobs/async_client.py +++ b/packages/google-cloud-run/google/cloud/run_v2/services/jobs/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -199,9 +198,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(JobsClient).get_transport_class, type(JobsClient) - ) + get_transport_class = JobsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-run/google/cloud/run_v2/services/jobs/client.py b/packages/google-cloud-run/google/cloud/run_v2/services/jobs/client.py index f50fd9d1d220..90ccfba0b0a8 100644 --- a/packages/google-cloud-run/google/cloud/run_v2/services/jobs/client.py +++ b/packages/google-cloud-run/google/cloud/run_v2/services/jobs/client.py @@ -774,7 +774,7 @@ def __init__( ) transport_init: Union[Type[JobsTransport], Callable[..., JobsTransport]] = ( - type(self).get_transport_class(transport) + JobsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., JobsTransport], transport) ) diff --git a/packages/google-cloud-run/google/cloud/run_v2/services/revisions/async_client.py b/packages/google-cloud-run/google/cloud/run_v2/services/revisions/async_client.py index d3ae7b361e97..4862a5b8fd37 100644 --- a/packages/google-cloud-run/google/cloud/run_v2/services/revisions/async_client.py +++ b/packages/google-cloud-run/google/cloud/run_v2/services/revisions/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -205,9 +204,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(RevisionsClient).get_transport_class, type(RevisionsClient) - ) + get_transport_class = RevisionsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-run/google/cloud/run_v2/services/revisions/client.py b/packages/google-cloud-run/google/cloud/run_v2/services/revisions/client.py index 6378ffbb0e83..fe0b0250af38 100644 --- a/packages/google-cloud-run/google/cloud/run_v2/services/revisions/client.py +++ b/packages/google-cloud-run/google/cloud/run_v2/services/revisions/client.py @@ -778,7 +778,7 @@ def __init__( transport_init: Union[ Type[RevisionsTransport], Callable[..., RevisionsTransport] ] = ( - type(self).get_transport_class(transport) + RevisionsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RevisionsTransport], transport) ) diff --git a/packages/google-cloud-run/google/cloud/run_v2/services/services/async_client.py b/packages/google-cloud-run/google/cloud/run_v2/services/services/async_client.py index 9a1f1ccc7049..57ec39c67773 100644 --- a/packages/google-cloud-run/google/cloud/run_v2/services/services/async_client.py +++ b/packages/google-cloud-run/google/cloud/run_v2/services/services/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -202,9 +201,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ServicesClient).get_transport_class, type(ServicesClient) - ) + get_transport_class = ServicesClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-run/google/cloud/run_v2/services/services/client.py b/packages/google-cloud-run/google/cloud/run_v2/services/services/client.py index 99ce77de76cc..b9909a3d3f08 100644 --- a/packages/google-cloud-run/google/cloud/run_v2/services/services/client.py +++ b/packages/google-cloud-run/google/cloud/run_v2/services/services/client.py @@ -777,7 +777,7 @@ def __init__( transport_init: Union[ Type[ServicesTransport], Callable[..., ServicesTransport] ] = ( - type(self).get_transport_class(transport) + ServicesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ServicesTransport], transport) ) diff --git a/packages/google-cloud-run/google/cloud/run_v2/services/tasks/async_client.py b/packages/google-cloud-run/google/cloud/run_v2/services/tasks/async_client.py index 8fbce60cd396..381fe4df1417 100644 --- a/packages/google-cloud-run/google/cloud/run_v2/services/tasks/async_client.py +++ b/packages/google-cloud-run/google/cloud/run_v2/services/tasks/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -194,9 +193,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(TasksClient).get_transport_class, type(TasksClient) - ) + get_transport_class = TasksClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-run/google/cloud/run_v2/services/tasks/client.py b/packages/google-cloud-run/google/cloud/run_v2/services/tasks/client.py index e5d2f16b433e..819044044ce2 100644 --- a/packages/google-cloud-run/google/cloud/run_v2/services/tasks/client.py +++ b/packages/google-cloud-run/google/cloud/run_v2/services/tasks/client.py @@ -795,7 +795,7 @@ def __init__( transport_init: Union[ Type[TasksTransport], Callable[..., TasksTransport] ] = ( - type(self).get_transport_class(transport) + TasksClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., TasksTransport], transport) ) diff --git a/packages/google-cloud-run/samples/generated_samples/snippet_metadata_google.cloud.run.v2.json b/packages/google-cloud-run/samples/generated_samples/snippet_metadata_google.cloud.run.v2.json index 8999bceeff78..ae607aeec9de 100644 --- a/packages/google-cloud-run/samples/generated_samples/snippet_metadata_google.cloud.run.v2.json +++ b/packages/google-cloud-run/samples/generated_samples/snippet_metadata_google.cloud.run.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-run", - "version": "0.10.8" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-run/tests/unit/gapic/run_v2/test_executions.py b/packages/google-cloud-run/tests/unit/gapic/run_v2/test_executions.py index bb903bed6a9f..1d10325f99e9 100644 --- a/packages/google-cloud-run/tests/unit/gapic/run_v2/test_executions.py +++ b/packages/google-cloud-run/tests/unit/gapic/run_v2/test_executions.py @@ -1301,22 +1301,23 @@ async def test_get_execution_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_execution - ] = mock_object + ] = mock_rpc request = {} await client.get_execution(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_execution(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1695,22 +1696,23 @@ async def test_list_executions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_executions - ] = mock_object + ] = mock_rpc request = {} await client.list_executions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_executions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2201,8 +2203,9 @@ def test_delete_execution_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_execution(request) @@ -2256,26 +2259,28 @@ async def test_delete_execution_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_execution - ] = mock_object + ] = mock_rpc request = {} await client.delete_execution(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_execution(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2569,8 +2574,9 @@ def test_cancel_execution_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.cancel_execution(request) @@ -2624,26 +2630,28 @@ async def test_cancel_execution_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.cancel_execution - ] = mock_object + ] = mock_rpc request = {} await client.cancel_execution(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.cancel_execution(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-run/tests/unit/gapic/run_v2/test_jobs.py b/packages/google-cloud-run/tests/unit/gapic/run_v2/test_jobs.py index 8a9ae4186b31..11755c4fb3c4 100644 --- a/packages/google-cloud-run/tests/unit/gapic/run_v2/test_jobs.py +++ b/packages/google-cloud-run/tests/unit/gapic/run_v2/test_jobs.py @@ -1162,8 +1162,9 @@ def test_create_job_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_job(request) @@ -1215,26 +1216,28 @@ async def test_create_job_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_job - ] = mock_object + ] = mock_rpc request = {} await client.create_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1600,22 +1603,23 @@ async def test_get_job_async_use_cached_wrapped_rpc(transport: str = "grpc_async ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_job - ] = mock_object + ] = mock_rpc request = {} await client.get_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1951,22 +1955,23 @@ async def test_list_jobs_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_jobs - ] = mock_object + ] = mock_rpc request = {} await client.list_jobs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_jobs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2409,8 +2414,9 @@ def test_update_job_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_job(request) @@ -2462,26 +2468,28 @@ async def test_update_job_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_job - ] = mock_object + ] = mock_rpc request = {} await client.update_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2738,8 +2746,9 @@ def test_delete_job_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_job(request) @@ -2791,26 +2800,28 @@ async def test_delete_job_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_job - ] = mock_object + ] = mock_rpc request = {} await client.delete_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3067,8 +3078,9 @@ def test_run_job_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.run_job(request) @@ -3120,26 +3132,28 @@ async def test_run_job_async_use_cached_wrapped_rpc(transport: str = "grpc_async ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.run_job - ] = mock_object + ] = mock_rpc request = {} await client.run_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.run_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3453,22 +3467,23 @@ async def test_get_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3755,22 +3770,23 @@ async def test_set_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4067,22 +4083,23 @@ async def test_test_iam_permissions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.test_iam_permissions - ] = mock_object + ] = mock_rpc request = {} await client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.test_iam_permissions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-run/tests/unit/gapic/run_v2/test_revisions.py b/packages/google-cloud-run/tests/unit/gapic/run_v2/test_revisions.py index a605130a4550..e01cb1c29f93 100644 --- a/packages/google-cloud-run/tests/unit/gapic/run_v2/test_revisions.py +++ b/packages/google-cloud-run/tests/unit/gapic/run_v2/test_revisions.py @@ -1307,22 +1307,23 @@ async def test_get_revision_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_revision - ] = mock_object + ] = mock_rpc request = {} await client.get_revision(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_revision(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1672,22 +1673,23 @@ async def test_list_revisions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_revisions - ] = mock_object + ] = mock_rpc request = {} await client.list_revisions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_revisions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2138,8 +2140,9 @@ def test_delete_revision_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_revision(request) @@ -2193,26 +2196,28 @@ async def test_delete_revision_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_revision - ] = mock_object + ] = mock_rpc request = {} await client.delete_revision(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_revision(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-run/tests/unit/gapic/run_v2/test_services.py b/packages/google-cloud-run/tests/unit/gapic/run_v2/test_services.py index e077584539ed..af0363c22fdb 100644 --- a/packages/google-cloud-run/tests/unit/gapic/run_v2/test_services.py +++ b/packages/google-cloud-run/tests/unit/gapic/run_v2/test_services.py @@ -1193,8 +1193,9 @@ def test_create_service_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_service(request) @@ -1248,26 +1249,28 @@ async def test_create_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_service - ] = mock_object + ] = mock_rpc request = {} await client.create_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1652,22 +1655,23 @@ async def test_get_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_service - ] = mock_object + ] = mock_rpc request = {} await client.get_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2017,22 +2021,23 @@ async def test_list_services_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_services - ] = mock_object + ] = mock_rpc request = {} await client.list_services(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_services(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2477,8 +2482,9 @@ def test_update_service_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_service(request) @@ -2532,26 +2538,28 @@ async def test_update_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_service - ] = mock_object + ] = mock_rpc request = {} await client.update_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2818,8 +2826,9 @@ def test_delete_service_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_service(request) @@ -2873,26 +2882,28 @@ async def test_delete_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_service - ] = mock_object + ] = mock_rpc request = {} await client.delete_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3206,22 +3217,23 @@ async def test_get_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3508,22 +3520,23 @@ async def test_set_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3820,22 +3833,23 @@ async def test_test_iam_permissions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.test_iam_permissions - ] = mock_object + ] = mock_rpc request = {} await client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.test_iam_permissions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-run/tests/unit/gapic/run_v2/test_tasks.py b/packages/google-cloud-run/tests/unit/gapic/run_v2/test_tasks.py index 1574b3b92ecc..076f21f71989 100644 --- a/packages/google-cloud-run/tests/unit/gapic/run_v2/test_tasks.py +++ b/packages/google-cloud-run/tests/unit/gapic/run_v2/test_tasks.py @@ -1259,22 +1259,23 @@ async def test_get_task_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_task - ] = mock_object + ] = mock_rpc request = {} await client.get_task(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_task(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1652,22 +1653,23 @@ async def test_list_tasks_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_tasks - ] = mock_object + ] = mock_rpc request = {} await client.list_tasks(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_tasks(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-scheduler/google/cloud/scheduler/gapic_version.py b/packages/google-cloud-scheduler/google/cloud/scheduler/gapic_version.py index cf63574ee44f..558c8aab67c5 100644 --- a/packages/google-cloud-scheduler/google/cloud/scheduler/gapic_version.py +++ b/packages/google-cloud-scheduler/google/cloud/scheduler/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.13.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-scheduler/google/cloud/scheduler_v1/gapic_version.py b/packages/google-cloud-scheduler/google/cloud/scheduler_v1/gapic_version.py index cf63574ee44f..558c8aab67c5 100644 --- a/packages/google-cloud-scheduler/google/cloud/scheduler_v1/gapic_version.py +++ b/packages/google-cloud-scheduler/google/cloud/scheduler_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.13.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-scheduler/google/cloud/scheduler_v1/services/cloud_scheduler/async_client.py b/packages/google-cloud-scheduler/google/cloud/scheduler_v1/services/cloud_scheduler/async_client.py index cdeb2adfd75c..679a2babad17 100644 --- a/packages/google-cloud-scheduler/google/cloud/scheduler_v1/services/cloud_scheduler/async_client.py +++ b/packages/google-cloud-scheduler/google/cloud/scheduler_v1/services/cloud_scheduler/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -200,9 +199,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(CloudSchedulerClient).get_transport_class, type(CloudSchedulerClient) - ) + get_transport_class = CloudSchedulerClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-scheduler/google/cloud/scheduler_v1/services/cloud_scheduler/client.py b/packages/google-cloud-scheduler/google/cloud/scheduler_v1/services/cloud_scheduler/client.py index c333991745ab..4ab0ec007006 100644 --- a/packages/google-cloud-scheduler/google/cloud/scheduler_v1/services/cloud_scheduler/client.py +++ b/packages/google-cloud-scheduler/google/cloud/scheduler_v1/services/cloud_scheduler/client.py @@ -688,7 +688,7 @@ def __init__( transport_init: Union[ Type[CloudSchedulerTransport], Callable[..., CloudSchedulerTransport] ] = ( - type(self).get_transport_class(transport) + CloudSchedulerClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., CloudSchedulerTransport], transport) ) diff --git a/packages/google-cloud-scheduler/google/cloud/scheduler_v1beta1/gapic_version.py b/packages/google-cloud-scheduler/google/cloud/scheduler_v1beta1/gapic_version.py index cf63574ee44f..558c8aab67c5 100644 --- a/packages/google-cloud-scheduler/google/cloud/scheduler_v1beta1/gapic_version.py +++ b/packages/google-cloud-scheduler/google/cloud/scheduler_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.13.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-scheduler/google/cloud/scheduler_v1beta1/services/cloud_scheduler/async_client.py b/packages/google-cloud-scheduler/google/cloud/scheduler_v1beta1/services/cloud_scheduler/async_client.py index d2ae1c34de6d..76e71ea960f5 100644 --- a/packages/google-cloud-scheduler/google/cloud/scheduler_v1beta1/services/cloud_scheduler/async_client.py +++ b/packages/google-cloud-scheduler/google/cloud/scheduler_v1beta1/services/cloud_scheduler/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -200,9 +199,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(CloudSchedulerClient).get_transport_class, type(CloudSchedulerClient) - ) + get_transport_class = CloudSchedulerClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-scheduler/google/cloud/scheduler_v1beta1/services/cloud_scheduler/client.py b/packages/google-cloud-scheduler/google/cloud/scheduler_v1beta1/services/cloud_scheduler/client.py index 334e30bda2bf..29af8e0cb611 100644 --- a/packages/google-cloud-scheduler/google/cloud/scheduler_v1beta1/services/cloud_scheduler/client.py +++ b/packages/google-cloud-scheduler/google/cloud/scheduler_v1beta1/services/cloud_scheduler/client.py @@ -688,7 +688,7 @@ def __init__( transport_init: Union[ Type[CloudSchedulerTransport], Callable[..., CloudSchedulerTransport] ] = ( - type(self).get_transport_class(transport) + CloudSchedulerClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., CloudSchedulerTransport], transport) ) diff --git a/packages/google-cloud-scheduler/samples/generated_samples/snippet_metadata_google.cloud.scheduler.v1.json b/packages/google-cloud-scheduler/samples/generated_samples/snippet_metadata_google.cloud.scheduler.v1.json index c51f9f077dc3..4ab9d3e9fee8 100644 --- a/packages/google-cloud-scheduler/samples/generated_samples/snippet_metadata_google.cloud.scheduler.v1.json +++ b/packages/google-cloud-scheduler/samples/generated_samples/snippet_metadata_google.cloud.scheduler.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-scheduler", - "version": "2.13.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-scheduler/samples/generated_samples/snippet_metadata_google.cloud.scheduler.v1beta1.json b/packages/google-cloud-scheduler/samples/generated_samples/snippet_metadata_google.cloud.scheduler.v1beta1.json index 79d9c58bf1d0..240bb08cb534 100644 --- a/packages/google-cloud-scheduler/samples/generated_samples/snippet_metadata_google.cloud.scheduler.v1beta1.json +++ b/packages/google-cloud-scheduler/samples/generated_samples/snippet_metadata_google.cloud.scheduler.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-scheduler", - "version": "2.13.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-scheduler/tests/unit/gapic/scheduler_v1/test_cloud_scheduler.py b/packages/google-cloud-scheduler/tests/unit/gapic/scheduler_v1/test_cloud_scheduler.py index be568434f4a6..65b9dc46eb24 100644 --- a/packages/google-cloud-scheduler/tests/unit/gapic/scheduler_v1/test_cloud_scheduler.py +++ b/packages/google-cloud-scheduler/tests/unit/gapic/scheduler_v1/test_cloud_scheduler.py @@ -1290,22 +1290,23 @@ async def test_list_jobs_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_jobs - ] = mock_object + ] = mock_rpc request = {} await client.list_jobs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_jobs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1858,22 +1859,23 @@ async def test_get_job_async_use_cached_wrapped_rpc(transport: str = "grpc_async ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_job - ] = mock_object + ] = mock_rpc request = {} await client.get_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2236,22 +2238,23 @@ async def test_create_job_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_job - ] = mock_object + ] = mock_rpc request = {} await client.create_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2620,22 +2623,23 @@ async def test_update_job_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_job - ] = mock_object + ] = mock_rpc request = {} await client.update_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2989,22 +2993,23 @@ async def test_delete_job_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_job - ] = mock_object + ] = mock_rpc request = {} await client.delete_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3354,22 +3359,23 @@ async def test_pause_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.pause_job - ] = mock_object + ] = mock_rpc request = {} await client.pause_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.pause_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3732,22 +3738,23 @@ async def test_resume_job_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.resume_job - ] = mock_object + ] = mock_rpc request = {} await client.resume_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.resume_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4110,22 +4117,23 @@ async def test_run_job_async_use_cached_wrapped_rpc(transport: str = "grpc_async ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.run_job - ] = mock_object + ] = mock_rpc request = {} await client.run_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.run_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-scheduler/tests/unit/gapic/scheduler_v1beta1/test_cloud_scheduler.py b/packages/google-cloud-scheduler/tests/unit/gapic/scheduler_v1beta1/test_cloud_scheduler.py index 6c90881f466b..48944639efce 100644 --- a/packages/google-cloud-scheduler/tests/unit/gapic/scheduler_v1beta1/test_cloud_scheduler.py +++ b/packages/google-cloud-scheduler/tests/unit/gapic/scheduler_v1beta1/test_cloud_scheduler.py @@ -1292,22 +1292,23 @@ async def test_list_jobs_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_jobs - ] = mock_object + ] = mock_rpc request = {} await client.list_jobs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_jobs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1863,22 +1864,23 @@ async def test_get_job_async_use_cached_wrapped_rpc(transport: str = "grpc_async ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_job - ] = mock_object + ] = mock_rpc request = {} await client.get_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2246,22 +2248,23 @@ async def test_create_job_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_job - ] = mock_object + ] = mock_rpc request = {} await client.create_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2635,22 +2638,23 @@ async def test_update_job_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_job - ] = mock_object + ] = mock_rpc request = {} await client.update_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3006,22 +3010,23 @@ async def test_delete_job_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_job - ] = mock_object + ] = mock_rpc request = {} await client.delete_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3374,22 +3379,23 @@ async def test_pause_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.pause_job - ] = mock_object + ] = mock_rpc request = {} await client.pause_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.pause_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3757,22 +3763,23 @@ async def test_resume_job_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.resume_job - ] = mock_object + ] = mock_rpc request = {} await client.resume_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.resume_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4140,22 +4147,23 @@ async def test_run_job_async_use_cached_wrapped_rpc(transport: str = "grpc_async ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.run_job - ] = mock_object + ] = mock_rpc request = {} await client.run_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.run_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-secret-manager/google/cloud/secretmanager/gapic_version.py b/packages/google-cloud-secret-manager/google/cloud/secretmanager/gapic_version.py index b45143cb91b0..558c8aab67c5 100644 --- a/packages/google-cloud-secret-manager/google/cloud/secretmanager/gapic_version.py +++ b/packages/google-cloud-secret-manager/google/cloud/secretmanager/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.20.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-secret-manager/google/cloud/secretmanager_v1/gapic_version.py b/packages/google-cloud-secret-manager/google/cloud/secretmanager_v1/gapic_version.py index b45143cb91b0..558c8aab67c5 100644 --- a/packages/google-cloud-secret-manager/google/cloud/secretmanager_v1/gapic_version.py +++ b/packages/google-cloud-secret-manager/google/cloud/secretmanager_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.20.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-secret-manager/google/cloud/secretmanager_v1/services/secret_manager_service/async_client.py b/packages/google-cloud-secret-manager/google/cloud/secretmanager_v1/services/secret_manager_service/async_client.py index 3972e99fc949..bdd8ef2744e6 100644 --- a/packages/google-cloud-secret-manager/google/cloud/secretmanager_v1/services/secret_manager_service/async_client.py +++ b/packages/google-cloud-secret-manager/google/cloud/secretmanager_v1/services/secret_manager_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -207,10 +206,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(SecretManagerServiceClient).get_transport_class, - type(SecretManagerServiceClient), - ) + get_transport_class = SecretManagerServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-secret-manager/google/cloud/secretmanager_v1/services/secret_manager_service/client.py b/packages/google-cloud-secret-manager/google/cloud/secretmanager_v1/services/secret_manager_service/client.py index 5f38b9696f08..53eded794550 100644 --- a/packages/google-cloud-secret-manager/google/cloud/secretmanager_v1/services/secret_manager_service/client.py +++ b/packages/google-cloud-secret-manager/google/cloud/secretmanager_v1/services/secret_manager_service/client.py @@ -713,7 +713,7 @@ def __init__( Type[SecretManagerServiceTransport], Callable[..., SecretManagerServiceTransport], ] = ( - type(self).get_transport_class(transport) + SecretManagerServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SecretManagerServiceTransport], transport) ) diff --git a/packages/google-cloud-secret-manager/google/cloud/secretmanager_v1beta1/gapic_version.py b/packages/google-cloud-secret-manager/google/cloud/secretmanager_v1beta1/gapic_version.py index b45143cb91b0..558c8aab67c5 100644 --- a/packages/google-cloud-secret-manager/google/cloud/secretmanager_v1beta1/gapic_version.py +++ b/packages/google-cloud-secret-manager/google/cloud/secretmanager_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.20.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-secret-manager/google/cloud/secretmanager_v1beta1/services/secret_manager_service/async_client.py b/packages/google-cloud-secret-manager/google/cloud/secretmanager_v1beta1/services/secret_manager_service/async_client.py index 168f67cf6c2a..395a32437652 100644 --- a/packages/google-cloud-secret-manager/google/cloud/secretmanager_v1beta1/services/secret_manager_service/async_client.py +++ b/packages/google-cloud-secret-manager/google/cloud/secretmanager_v1beta1/services/secret_manager_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -203,10 +202,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(SecretManagerServiceClient).get_transport_class, - type(SecretManagerServiceClient), - ) + get_transport_class = SecretManagerServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-secret-manager/google/cloud/secretmanager_v1beta1/services/secret_manager_service/client.py b/packages/google-cloud-secret-manager/google/cloud/secretmanager_v1beta1/services/secret_manager_service/client.py index 125d21f1cf96..b8fbf80b5e56 100644 --- a/packages/google-cloud-secret-manager/google/cloud/secretmanager_v1beta1/services/secret_manager_service/client.py +++ b/packages/google-cloud-secret-manager/google/cloud/secretmanager_v1beta1/services/secret_manager_service/client.py @@ -694,7 +694,7 @@ def __init__( Type[SecretManagerServiceTransport], Callable[..., SecretManagerServiceTransport], ] = ( - type(self).get_transport_class(transport) + SecretManagerServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SecretManagerServiceTransport], transport) ) diff --git a/packages/google-cloud-secret-manager/google/cloud/secretmanager_v1beta2/gapic_version.py b/packages/google-cloud-secret-manager/google/cloud/secretmanager_v1beta2/gapic_version.py index b45143cb91b0..558c8aab67c5 100644 --- a/packages/google-cloud-secret-manager/google/cloud/secretmanager_v1beta2/gapic_version.py +++ b/packages/google-cloud-secret-manager/google/cloud/secretmanager_v1beta2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.20.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-secret-manager/google/cloud/secretmanager_v1beta2/services/secret_manager_service/async_client.py b/packages/google-cloud-secret-manager/google/cloud/secretmanager_v1beta2/services/secret_manager_service/async_client.py index d026d237da63..6dc3a2588ee4 100644 --- a/packages/google-cloud-secret-manager/google/cloud/secretmanager_v1beta2/services/secret_manager_service/async_client.py +++ b/packages/google-cloud-secret-manager/google/cloud/secretmanager_v1beta2/services/secret_manager_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -207,10 +206,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(SecretManagerServiceClient).get_transport_class, - type(SecretManagerServiceClient), - ) + get_transport_class = SecretManagerServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-secret-manager/google/cloud/secretmanager_v1beta2/services/secret_manager_service/client.py b/packages/google-cloud-secret-manager/google/cloud/secretmanager_v1beta2/services/secret_manager_service/client.py index 90a3b568c1cb..28e0eb565148 100644 --- a/packages/google-cloud-secret-manager/google/cloud/secretmanager_v1beta2/services/secret_manager_service/client.py +++ b/packages/google-cloud-secret-manager/google/cloud/secretmanager_v1beta2/services/secret_manager_service/client.py @@ -713,7 +713,7 @@ def __init__( Type[SecretManagerServiceTransport], Callable[..., SecretManagerServiceTransport], ] = ( - type(self).get_transport_class(transport) + SecretManagerServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SecretManagerServiceTransport], transport) ) diff --git a/packages/google-cloud-secret-manager/samples/generated_samples/snippet_metadata_google.cloud.secretmanager.v1.json b/packages/google-cloud-secret-manager/samples/generated_samples/snippet_metadata_google.cloud.secretmanager.v1.json index 636f8763ac4a..fb38dd2243a0 100644 --- a/packages/google-cloud-secret-manager/samples/generated_samples/snippet_metadata_google.cloud.secretmanager.v1.json +++ b/packages/google-cloud-secret-manager/samples/generated_samples/snippet_metadata_google.cloud.secretmanager.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-secret-manager", - "version": "2.20.2" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-secret-manager/samples/generated_samples/snippet_metadata_google.cloud.secretmanager.v1beta2.json b/packages/google-cloud-secret-manager/samples/generated_samples/snippet_metadata_google.cloud.secretmanager.v1beta2.json index 1aaad62ec62f..9eff2f7a2910 100644 --- a/packages/google-cloud-secret-manager/samples/generated_samples/snippet_metadata_google.cloud.secretmanager.v1beta2.json +++ b/packages/google-cloud-secret-manager/samples/generated_samples/snippet_metadata_google.cloud.secretmanager.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-secretmanager", - "version": "2.20.2" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-secret-manager/samples/generated_samples/snippet_metadata_google.cloud.secrets.v1beta1.json b/packages/google-cloud-secret-manager/samples/generated_samples/snippet_metadata_google.cloud.secrets.v1beta1.json index 39fbfb9663d1..52f1d4e38bcf 100644 --- a/packages/google-cloud-secret-manager/samples/generated_samples/snippet_metadata_google.cloud.secrets.v1beta1.json +++ b/packages/google-cloud-secret-manager/samples/generated_samples/snippet_metadata_google.cloud.secrets.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-secretmanager", - "version": "2.20.2" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-secret-manager/tests/unit/gapic/secretmanager_v1/test_secret_manager_service.py b/packages/google-cloud-secret-manager/tests/unit/gapic/secretmanager_v1/test_secret_manager_service.py index 665023a89311..9cba5034e241 100644 --- a/packages/google-cloud-secret-manager/tests/unit/gapic/secretmanager_v1/test_secret_manager_service.py +++ b/packages/google-cloud-secret-manager/tests/unit/gapic/secretmanager_v1/test_secret_manager_service.py @@ -1364,22 +1364,23 @@ async def test_list_secrets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_secrets - ] = mock_object + ] = mock_rpc request = {} await client.list_secrets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_secrets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1929,22 +1930,23 @@ async def test_create_secret_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_secret - ] = mock_object + ] = mock_rpc request = {} await client.create_secret(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_secret(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2332,22 +2334,23 @@ async def test_add_secret_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.add_secret_version - ] = mock_object + ] = mock_rpc request = {} await client.add_secret_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.add_secret_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2723,22 +2726,23 @@ async def test_get_secret_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_secret - ] = mock_object + ] = mock_rpc request = {} await client.get_secret(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_secret(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3084,22 +3088,23 @@ async def test_update_secret_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_secret - ] = mock_object + ] = mock_rpc request = {} await client.update_secret(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_secret(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3451,22 +3456,23 @@ async def test_delete_secret_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_secret - ] = mock_object + ] = mock_rpc request = {} await client.delete_secret(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_secret(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3825,22 +3831,23 @@ async def test_list_secret_versions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_secret_versions - ] = mock_object + ] = mock_rpc request = {} await client.list_secret_versions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_secret_versions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4424,22 +4431,23 @@ async def test_get_secret_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_secret_version - ] = mock_object + ] = mock_rpc request = {} await client.get_secret_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_secret_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4817,22 +4825,23 @@ async def test_access_secret_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.access_secret_version - ] = mock_object + ] = mock_rpc request = {} await client.access_secret_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.access_secret_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5215,22 +5224,23 @@ async def test_disable_secret_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.disable_secret_version - ] = mock_object + ] = mock_rpc request = {} await client.disable_secret_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.disable_secret_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5619,22 +5629,23 @@ async def test_enable_secret_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.enable_secret_version - ] = mock_object + ] = mock_rpc request = {} await client.enable_secret_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.enable_secret_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6023,22 +6034,23 @@ async def test_destroy_secret_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.destroy_secret_version - ] = mock_object + ] = mock_rpc request = {} await client.destroy_secret_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.destroy_secret_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6406,22 +6418,23 @@ async def test_set_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6709,22 +6722,23 @@ async def test_get_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7020,22 +7034,23 @@ async def test_test_iam_permissions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.test_iam_permissions - ] = mock_object + ] = mock_rpc request = {} await client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.test_iam_permissions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-secret-manager/tests/unit/gapic/secretmanager_v1beta1/test_secret_manager_service.py b/packages/google-cloud-secret-manager/tests/unit/gapic/secretmanager_v1beta1/test_secret_manager_service.py index 49e89ee4ca44..7fbbbd86d2c4 100644 --- a/packages/google-cloud-secret-manager/tests/unit/gapic/secretmanager_v1beta1/test_secret_manager_service.py +++ b/packages/google-cloud-secret-manager/tests/unit/gapic/secretmanager_v1beta1/test_secret_manager_service.py @@ -1360,22 +1360,23 @@ async def test_list_secrets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_secrets - ] = mock_object + ] = mock_rpc request = {} await client.list_secrets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_secrets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1922,22 +1923,23 @@ async def test_create_secret_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_secret - ] = mock_object + ] = mock_rpc request = {} await client.create_secret(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_secret(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2317,22 +2319,23 @@ async def test_add_secret_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.add_secret_version - ] = mock_object + ] = mock_rpc request = {} await client.add_secret_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.add_secret_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2701,22 +2704,23 @@ async def test_get_secret_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_secret - ] = mock_object + ] = mock_rpc request = {} await client.get_secret(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_secret(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3057,22 +3061,23 @@ async def test_update_secret_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_secret - ] = mock_object + ] = mock_rpc request = {} await client.update_secret(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_secret(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3420,22 +3425,23 @@ async def test_delete_secret_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_secret - ] = mock_object + ] = mock_rpc request = {} await client.delete_secret(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_secret(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3792,22 +3798,23 @@ async def test_list_secret_versions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_secret_versions - ] = mock_object + ] = mock_rpc request = {} await client.list_secret_versions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_secret_versions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4385,22 +4392,23 @@ async def test_get_secret_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_secret_version - ] = mock_object + ] = mock_rpc request = {} await client.get_secret_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_secret_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4774,22 +4782,23 @@ async def test_access_secret_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.access_secret_version - ] = mock_object + ] = mock_rpc request = {} await client.access_secret_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.access_secret_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5164,22 +5173,23 @@ async def test_disable_secret_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.disable_secret_version - ] = mock_object + ] = mock_rpc request = {} await client.disable_secret_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.disable_secret_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5556,22 +5566,23 @@ async def test_enable_secret_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.enable_secret_version - ] = mock_object + ] = mock_rpc request = {} await client.enable_secret_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.enable_secret_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5948,22 +5959,23 @@ async def test_destroy_secret_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.destroy_secret_version - ] = mock_object + ] = mock_rpc request = {} await client.destroy_secret_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.destroy_secret_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6327,22 +6339,23 @@ async def test_set_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6630,22 +6643,23 @@ async def test_get_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6941,22 +6955,23 @@ async def test_test_iam_permissions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.test_iam_permissions - ] = mock_object + ] = mock_rpc request = {} await client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.test_iam_permissions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-secret-manager/tests/unit/gapic/secretmanager_v1beta2/test_secret_manager_service.py b/packages/google-cloud-secret-manager/tests/unit/gapic/secretmanager_v1beta2/test_secret_manager_service.py index 2443ca2e5a07..aef4c6b36ae1 100644 --- a/packages/google-cloud-secret-manager/tests/unit/gapic/secretmanager_v1beta2/test_secret_manager_service.py +++ b/packages/google-cloud-secret-manager/tests/unit/gapic/secretmanager_v1beta2/test_secret_manager_service.py @@ -1364,22 +1364,23 @@ async def test_list_secrets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_secrets - ] = mock_object + ] = mock_rpc request = {} await client.list_secrets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_secrets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1929,22 +1930,23 @@ async def test_create_secret_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_secret - ] = mock_object + ] = mock_rpc request = {} await client.create_secret(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_secret(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2332,22 +2334,23 @@ async def test_add_secret_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.add_secret_version - ] = mock_object + ] = mock_rpc request = {} await client.add_secret_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.add_secret_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2723,22 +2726,23 @@ async def test_get_secret_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_secret - ] = mock_object + ] = mock_rpc request = {} await client.get_secret(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_secret(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3084,22 +3088,23 @@ async def test_update_secret_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_secret - ] = mock_object + ] = mock_rpc request = {} await client.update_secret(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_secret(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3451,22 +3456,23 @@ async def test_delete_secret_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_secret - ] = mock_object + ] = mock_rpc request = {} await client.delete_secret(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_secret(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3825,22 +3831,23 @@ async def test_list_secret_versions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_secret_versions - ] = mock_object + ] = mock_rpc request = {} await client.list_secret_versions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_secret_versions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4424,22 +4431,23 @@ async def test_get_secret_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_secret_version - ] = mock_object + ] = mock_rpc request = {} await client.get_secret_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_secret_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4817,22 +4825,23 @@ async def test_access_secret_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.access_secret_version - ] = mock_object + ] = mock_rpc request = {} await client.access_secret_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.access_secret_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5215,22 +5224,23 @@ async def test_disable_secret_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.disable_secret_version - ] = mock_object + ] = mock_rpc request = {} await client.disable_secret_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.disable_secret_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5619,22 +5629,23 @@ async def test_enable_secret_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.enable_secret_version - ] = mock_object + ] = mock_rpc request = {} await client.enable_secret_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.enable_secret_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6023,22 +6034,23 @@ async def test_destroy_secret_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.destroy_secret_version - ] = mock_object + ] = mock_rpc request = {} await client.destroy_secret_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.destroy_secret_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6406,22 +6418,23 @@ async def test_set_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6709,22 +6722,23 @@ async def test_get_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7020,22 +7034,23 @@ async def test_test_iam_permissions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.test_iam_permissions - ] = mock_object + ] = mock_rpc request = {} await client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.test_iam_permissions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio From 2921e20b0f8627d0aa23a9ee8f3df7c3940fecde Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 19 Aug 2024 10:58:10 -0400 Subject: [PATCH 015/108] chore: [Many APIs] Update gapic-generator-python to v1.18.5 (#13002) - [ ] Regenerate this pull request now. PiperOrigin-RevId: 661268868 Source-Link: https://github.com/googleapis/googleapis/commit/f7d214cb08cd7d9b018d44564a8b184263f64177 Source-Link: https://github.com/googleapis/googleapis-gen/commit/79a8411bbdb25a983fa3aae8c0e14327df129f94 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXNlY3VyZXNvdXJjZW1hbmFnZXIvLk93bEJvdC55YW1sIiwiaCI6Ijc5YTg0MTFiYmRiMjVhOTgzZmEzYWFlOGMwZTE0MzI3ZGYxMjlmOTQifQ== Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXNlY3VyaXR5Y2VudGVyLy5Pd2xCb3QueWFtbCIsImgiOiI3OWE4NDExYmJkYjI1YTk4M2ZhM2FhZThjMGUxNDMyN2RmMTI5Zjk0In0= Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXNlY3VyaXR5Y2VudGVybWFuYWdlbWVudC8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXNlcnZpY2UtY29udHJvbC8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXNlcnZpY2UtZGlyZWN0b3J5Ly5Pd2xCb3QueWFtbCIsImgiOiI3OWE4NDExYmJkYjI1YTk4M2ZhM2FhZThjMGUxNDMyN2RmMTI5Zjk0In0= Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXNlcnZpY2UtbWFuYWdlbWVudC8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXNlcnZpY2UtdXNhZ2UvLk93bEJvdC55YW1sIiwiaCI6Ijc5YTg0MTFiYmRiMjVhOTgzZmEzYWFlOGMwZTE0MzI3ZGYxMjlmOTQifQ== Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXNlcnZpY2VoZWFsdGgvLk93bEJvdC55YW1sIiwiaCI6Ijc5YTg0MTFiYmRiMjVhOTgzZmEzYWFlOGMwZTE0MzI3ZGYxMjlmOTQifQ== Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXNoZWxsLy5Pd2xCb3QueWFtbCIsImgiOiI3OWE4NDExYmJkYjI1YTk4M2ZhM2FhZThjMGUxNDMyN2RmMTI5Zjk0In0= Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXNvdXJjZS1jb250ZXh0Ly5Pd2xCb3QueWFtbCIsImgiOiI3OWE4NDExYmJkYjI1YTk4M2ZhM2FhZThjMGUxNDMyN2RmMTI5Zjk0In0= Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXNwZWVjaC8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXN0b3JhZ2UtY29udHJvbC8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXN0b3JhZ2UtdHJhbnNmZXIvLk93bEJvdC55YW1sIiwiaCI6Ijc5YTg0MTFiYmRiMjVhOTgzZmEzYWFlOGMwZTE0MzI3ZGYxMjlmOTQifQ== Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXN0b3JhZ2VpbnNpZ2h0cy8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXN1cHBvcnQvLk93bEJvdC55YW1sIiwiaCI6Ijc5YTg0MTFiYmRiMjVhOTgzZmEzYWFlOGMwZTE0MzI3ZGYxMjlmOTQifQ== Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXRhbGVudC8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXRhc2tzLy5Pd2xCb3QueWFtbCIsImgiOiI3OWE4NDExYmJkYjI1YTk4M2ZhM2FhZThjMGUxNDMyN2RmMTI5Zjk0In0= Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXRlbGNvYXV0b21hdGlvbi8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXRleHR0b3NwZWVjaC8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXRwdS8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../securesourcemanager/gapic_version.py | 2 +- .../securesourcemanager_v1/gapic_version.py | 2 +- .../secure_source_manager/async_client.py | 6 +- .../services/secure_source_manager/client.py | 2 +- ...a_google.cloud.securesourcemanager.v1.json | 2 +- .../test_secure_source_manager.py | 139 +++-- .../cloud/securitycenter/gapic_version.py | 2 +- .../cloud/securitycenter_v1/gapic_version.py | 2 +- .../services/security_center/async_client.py | 5 +- .../services/security_center/client.py | 2 +- .../securitycenter_v1beta1/gapic_version.py | 2 +- .../services/security_center/async_client.py | 5 +- .../services/security_center/client.py | 2 +- .../securitycenter_v1p1beta1/gapic_version.py | 2 +- .../services/security_center/async_client.py | 5 +- .../services/security_center/client.py | 2 +- .../cloud/securitycenter_v2/gapic_version.py | 2 +- .../services/security_center/async_client.py | 5 +- .../services/security_center/client.py | 2 +- ...tadata_google.cloud.securitycenter.v1.json | 2 +- ...a_google.cloud.securitycenter.v1beta1.json | 2 +- ...google.cloud.securitycenter.v1p1beta1.json | 2 +- ...tadata_google.cloud.securitycenter.v2.json | 2 +- .../securitycenter_v1/test_security_center.py | 587 ++++++++++-------- .../test_security_center.py | 172 ++--- .../test_security_center.py | 217 ++++--- .../securitycenter_v2/test_security_center.py | 370 ++++++----- .../securitycentermanagement/gapic_version.py | 2 +- .../gapic_version.py | 2 +- .../async_client.py | 6 +- .../security_center_management/client.py | 2 +- .../types/security_center_management.py | 10 +- ...gle.cloud.securitycentermanagement.v1.json | 2 +- .../test_security_center_management.py | 189 +++--- .../cloud/servicecontrol/gapic_version.py | 2 +- .../cloud/servicecontrol_v1/gapic_version.py | 2 +- .../services/quota_controller/async_client.py | 5 +- .../services/quota_controller/client.py | 2 +- .../service_controller/async_client.py | 5 +- .../services/service_controller/client.py | 2 +- .../types/service_controller.py | 2 +- .../cloud/servicecontrol_v2/gapic_version.py | 2 +- .../service_controller/async_client.py | 5 +- .../services/service_controller/client.py | 2 +- ...metadata_google.api.servicecontrol.v1.json | 2 +- ...metadata_google.api.servicecontrol.v2.json | 2 +- .../test_quota_controller.py | 9 +- .../test_service_controller.py | 18 +- .../test_service_controller.py | 18 +- .../cloud/servicedirectory/gapic_version.py | 2 +- .../servicedirectory_v1/gapic_version.py | 2 +- .../services/lookup_service/async_client.py | 5 +- .../services/lookup_service/client.py | 2 +- .../registration_service/async_client.py | 6 +- .../services/registration_service/client.py | 2 +- .../servicedirectory_v1beta1/gapic_version.py | 2 +- .../services/lookup_service/async_client.py | 5 +- .../services/lookup_service/client.py | 2 +- .../registration_service/async_client.py | 6 +- .../services/registration_service/client.py | 2 +- ...data_google.cloud.servicedirectory.v1.json | 2 +- ...google.cloud.servicedirectory.v1beta1.json | 2 +- .../test_lookup_service.py | 9 +- .../test_registration_service.py | 162 ++--- .../test_lookup_service.py | 9 +- .../test_registration_service.py | 162 ++--- .../cloud/servicemanagement/gapic_version.py | 2 +- .../servicemanagement_v1/gapic_version.py | 2 +- .../services/service_manager/async_client.py | 5 +- .../services/service_manager/client.py | 2 +- ...adata_google.api.servicemanagement.v1.json | 2 +- .../test_service_manager.py | 167 ++--- .../cloud/service_usage/gapic_version.py | 2 +- .../cloud/service_usage_v1/gapic_version.py | 2 +- .../services/service_usage/async_client.py | 5 +- .../services/service_usage/client.py | 2 +- ...t_metadata_google.api.serviceusage.v1.json | 2 +- .../service_usage_v1/test_service_usage.py | 84 +-- .../cloud/servicehealth/gapic_version.py | 2 +- .../cloud/servicehealth_v1/gapic_version.py | 2 +- .../services/service_health/async_client.py | 5 +- .../services/service_health/client.py | 2 +- ...etadata_google.cloud.servicehealth.v1.json | 2 +- .../servicehealth_v1/test_service_health.py | 54 +- .../google/cloud/shell/gapic_version.py | 2 +- .../google/cloud/shell_v1/gapic_version.py | 2 +- .../cloud_shell_service/async_client.py | 5 +- .../services/cloud_shell_service/client.py | 2 +- ...nippet_metadata_google.cloud.shell.v1.json | 2 +- .../shell_v1/test_cloud_shell_service.py | 85 +-- .../cloud/source_context/gapic_version.py | 2 +- .../cloud/source_context_v1/gapic_version.py | 2 +- .../google/cloud/speech/gapic_version.py | 2 +- .../google/cloud/speech_v1/gapic_version.py | 2 +- .../services/adaptation/async_client.py | 5 +- .../speech_v1/services/adaptation/client.py | 2 +- .../speech_v1/services/speech/async_client.py | 5 +- .../cloud/speech_v1/services/speech/client.py | 2 +- .../cloud/speech_v1p1beta1/gapic_version.py | 2 +- .../services/adaptation/async_client.py | 5 +- .../services/adaptation/client.py | 2 +- .../services/speech/async_client.py | 5 +- .../services/speech/client.py | 2 +- .../google/cloud/speech_v2/gapic_version.py | 2 +- .../speech_v2/services/speech/async_client.py | 5 +- .../cloud/speech_v2/services/speech/client.py | 2 +- ...ippet_metadata_google.cloud.speech.v1.json | 2 +- ...etadata_google.cloud.speech.v1p1beta1.json | 2 +- ...ippet_metadata_google.cloud.speech.v2.json | 2 +- .../unit/gapic/speech_v1/test_adaptation.py | 90 +-- .../tests/unit/gapic/speech_v1/test_speech.py | 37 +- .../gapic/speech_v1p1beta1/test_adaptation.py | 90 +-- .../gapic/speech_v1p1beta1/test_speech.py | 37 +- .../tests/unit/gapic/speech_v2/test_speech.py | 337 +++++----- .../cloud/storage_control/gapic_version.py | 2 +- .../cloud/storage_control_v2/gapic_version.py | 2 +- .../services/storage_control/async_client.py | 5 +- .../services/storage_control/client.py | 2 +- ...et_metadata_google.storage.control.v2.json | 2 +- .../test_storage_control.py | 100 +-- .../cloud/storageinsights/gapic_version.py | 2 +- .../cloud/storageinsights_v1/gapic_version.py | 2 +- .../services/storage_insights/async_client.py | 5 +- .../services/storage_insights/client.py | 2 +- ...adata_google.cloud.storageinsights.v1.json | 2 +- .../test_storage_insights.py | 63 +- .../google/cloud/support/gapic_version.py | 2 +- .../google/cloud/support_v2/gapic_version.py | 2 +- .../case_attachment_service/async_client.py | 6 +- .../case_attachment_service/client.py | 2 +- .../services/case_service/async_client.py | 5 +- .../services/case_service/client.py | 2 +- .../services/comment_service/async_client.py | 5 +- .../services/comment_service/client.py | 2 +- ...ppet_metadata_google.cloud.support.v2.json | 2 +- .../test_case_attachment_service.py | 9 +- .../gapic/support_v2/test_case_service.py | 72 ++- .../gapic/support_v2/test_comment_service.py | 18 +- .../google/cloud/talent/gapic_version.py | 2 +- .../google/cloud/talent_v4/gapic_version.py | 2 +- .../services/company_service/async_client.py | 5 +- .../services/company_service/client.py | 2 +- .../services/completion/async_client.py | 5 +- .../talent_v4/services/completion/client.py | 2 +- .../services/event_service/async_client.py | 5 +- .../services/event_service/client.py | 2 +- .../services/job_service/async_client.py | 5 +- .../talent_v4/services/job_service/client.py | 2 +- .../services/tenant_service/async_client.py | 5 +- .../services/tenant_service/client.py | 2 +- .../cloud/talent_v4beta1/gapic_version.py | 2 +- .../services/company_service/async_client.py | 5 +- .../services/company_service/client.py | 2 +- .../services/completion/async_client.py | 5 +- .../services/completion/client.py | 2 +- .../services/event_service/async_client.py | 5 +- .../services/event_service/client.py | 2 +- .../services/job_service/async_client.py | 5 +- .../services/job_service/client.py | 2 +- .../services/tenant_service/async_client.py | 5 +- .../services/tenant_service/client.py | 2 +- ...ippet_metadata_google.cloud.talent.v4.json | 2 +- ..._metadata_google.cloud.talent.v4beta1.json | 2 +- .../gapic/talent_v4/test_company_service.py | 45 +- .../unit/gapic/talent_v4/test_completion.py | 9 +- .../gapic/talent_v4/test_event_service.py | 9 +- .../unit/gapic/talent_v4/test_job_service.py | 120 ++-- .../gapic/talent_v4/test_tenant_service.py | 45 +- .../talent_v4beta1/test_company_service.py | 45 +- .../gapic/talent_v4beta1/test_completion.py | 9 +- .../talent_v4beta1/test_event_service.py | 9 +- .../gapic/talent_v4beta1/test_job_service.py | 110 ++-- .../talent_v4beta1/test_tenant_service.py | 45 +- .../google/cloud/tasks/gapic_version.py | 2 +- .../google/cloud/tasks_v2/gapic_version.py | 2 +- .../services/cloud_tasks/async_client.py | 5 +- .../tasks_v2/services/cloud_tasks/client.py | 2 +- .../cloud/tasks_v2beta2/gapic_version.py | 2 +- .../services/cloud_tasks/async_client.py | 5 +- .../services/cloud_tasks/client.py | 2 +- .../cloud/tasks_v2beta3/gapic_version.py | 2 +- .../services/cloud_tasks/async_client.py | 5 +- .../services/cloud_tasks/client.py | 2 +- ...nippet_metadata_google.cloud.tasks.v2.json | 2 +- ...t_metadata_google.cloud.tasks.v2beta2.json | 2 +- ...t_metadata_google.cloud.tasks.v2beta3.json | 2 +- .../unit/gapic/tasks_v2/test_cloud_tasks.py | 144 +++-- .../gapic/tasks_v2beta2/test_cloud_tasks.py | 189 +++--- .../gapic/tasks_v2beta3/test_cloud_tasks.py | 144 +++-- .../cloud/telcoautomation/gapic_version.py | 2 +- .../cloud/telcoautomation_v1/gapic_version.py | 2 +- .../services/telco_automation/async_client.py | 5 +- .../services/telco_automation/client.py | 2 +- .../telcoautomation_v1alpha1/gapic_version.py | 2 +- .../services/telco_automation/async_client.py | 5 +- .../services/telco_automation/client.py | 2 +- ...adata_google.cloud.telcoautomation.v1.json | 2 +- ...google.cloud.telcoautomation.v1alpha1.json | 2 +- .../test_telco_automation.py | 364 ++++++----- .../test_telco_automation.py | 364 ++++++----- .../google/cloud/tpu/gapic_version.py | 2 +- .../google/cloud/tpu_v1/gapic_version.py | 2 +- .../cloud/tpu_v1/services/tpu/async_client.py | 5 +- .../cloud/tpu_v1/services/tpu/client.py | 2 +- .../google/cloud/tpu_v2/gapic_version.py | 2 +- .../cloud/tpu_v2/services/tpu/async_client.py | 5 +- .../cloud/tpu_v2/services/tpu/client.py | 2 +- .../cloud/tpu_v2alpha1/gapic_version.py | 2 +- .../tpu_v2alpha1/services/tpu/async_client.py | 5 +- .../cloud/tpu_v2alpha1/services/tpu/client.py | 2 +- .../snippet_metadata_google.cloud.tpu.v1.json | 2 +- .../snippet_metadata_google.cloud.tpu.v2.json | 2 +- ...et_metadata_google.cloud.tpu.v2alpha1.json | 2 +- .../tests/unit/gapic/tpu_v1/test_tpu.py | 149 +++-- .../tests/unit/gapic/tpu_v2/test_tpu.py | 167 ++--- .../tests/unit/gapic/tpu_v2alpha1/test_tpu.py | 261 ++++---- 216 files changed, 3279 insertions(+), 2743 deletions(-) diff --git a/packages/google-cloud-securesourcemanager/google/cloud/securesourcemanager/gapic_version.py b/packages/google-cloud-securesourcemanager/google/cloud/securesourcemanager/gapic_version.py index ec8d212c9160..558c8aab67c5 100644 --- a/packages/google-cloud-securesourcemanager/google/cloud/securesourcemanager/gapic_version.py +++ b/packages/google-cloud-securesourcemanager/google/cloud/securesourcemanager/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.8" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-securesourcemanager/google/cloud/securesourcemanager_v1/gapic_version.py b/packages/google-cloud-securesourcemanager/google/cloud/securesourcemanager_v1/gapic_version.py index ec8d212c9160..558c8aab67c5 100644 --- a/packages/google-cloud-securesourcemanager/google/cloud/securesourcemanager_v1/gapic_version.py +++ b/packages/google-cloud-securesourcemanager/google/cloud/securesourcemanager_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.8" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-securesourcemanager/google/cloud/securesourcemanager_v1/services/secure_source_manager/async_client.py b/packages/google-cloud-securesourcemanager/google/cloud/securesourcemanager_v1/services/secure_source_manager/async_client.py index cc7923266926..faf15a770d4b 100644 --- a/packages/google-cloud-securesourcemanager/google/cloud/securesourcemanager_v1/services/secure_source_manager/async_client.py +++ b/packages/google-cloud-securesourcemanager/google/cloud/securesourcemanager_v1/services/secure_source_manager/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -237,10 +236,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(SecureSourceManagerClient).get_transport_class, - type(SecureSourceManagerClient), - ) + get_transport_class = SecureSourceManagerClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-securesourcemanager/google/cloud/securesourcemanager_v1/services/secure_source_manager/client.py b/packages/google-cloud-securesourcemanager/google/cloud/securesourcemanager_v1/services/secure_source_manager/client.py index 9736402743df..5a9a22aa6f79 100644 --- a/packages/google-cloud-securesourcemanager/google/cloud/securesourcemanager_v1/services/secure_source_manager/client.py +++ b/packages/google-cloud-securesourcemanager/google/cloud/securesourcemanager_v1/services/secure_source_manager/client.py @@ -791,7 +791,7 @@ def __init__( Type[SecureSourceManagerTransport], Callable[..., SecureSourceManagerTransport], ] = ( - type(self).get_transport_class(transport) + SecureSourceManagerClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SecureSourceManagerTransport], transport) ) diff --git a/packages/google-cloud-securesourcemanager/samples/generated_samples/snippet_metadata_google.cloud.securesourcemanager.v1.json b/packages/google-cloud-securesourcemanager/samples/generated_samples/snippet_metadata_google.cloud.securesourcemanager.v1.json index eb8b95723eee..d36601a453e4 100644 --- a/packages/google-cloud-securesourcemanager/samples/generated_samples/snippet_metadata_google.cloud.securesourcemanager.v1.json +++ b/packages/google-cloud-securesourcemanager/samples/generated_samples/snippet_metadata_google.cloud.securesourcemanager.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-securesourcemanager", - "version": "0.1.8" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-securesourcemanager/tests/unit/gapic/securesourcemanager_v1/test_secure_source_manager.py b/packages/google-cloud-securesourcemanager/tests/unit/gapic/securesourcemanager_v1/test_secure_source_manager.py index 6460dd7dd18d..5d50c0db2b23 100644 --- a/packages/google-cloud-securesourcemanager/tests/unit/gapic/securesourcemanager_v1/test_secure_source_manager.py +++ b/packages/google-cloud-securesourcemanager/tests/unit/gapic/securesourcemanager_v1/test_secure_source_manager.py @@ -1376,22 +1376,23 @@ async def test_list_instances_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_instances - ] = mock_object + ] = mock_rpc request = {} await client.list_instances(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_instances(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1949,22 +1950,23 @@ async def test_get_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_instance - ] = mock_object + ] = mock_rpc request = {} await client.get_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2271,8 +2273,9 @@ def test_create_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_instance(request) @@ -2326,26 +2329,28 @@ async def test_create_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_instance - ] = mock_object + ] = mock_rpc request = {} await client.create_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2658,8 +2663,9 @@ def test_delete_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_instance(request) @@ -2713,26 +2719,28 @@ async def test_delete_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_instance - ] = mock_object + ] = mock_rpc request = {} await client.delete_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3093,22 +3101,23 @@ async def test_list_repositories_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_repositories - ] = mock_object + ] = mock_rpc request = {} await client.list_repositories(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_repositories(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3682,22 +3691,23 @@ async def test_get_repository_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_repository - ] = mock_object + ] = mock_rpc request = {} await client.get_repository(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_repository(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4009,8 +4019,9 @@ def test_create_repository_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_repository(request) @@ -4066,26 +4077,28 @@ async def test_create_repository_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_repository - ] = mock_object + ] = mock_rpc request = {} await client.create_repository(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_repository(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4414,8 +4427,9 @@ def test_delete_repository_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_repository(request) @@ -4471,26 +4485,28 @@ async def test_delete_repository_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_repository - ] = mock_object + ] = mock_rpc request = {} await client.delete_repository(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_repository(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4862,22 +4878,23 @@ async def test_get_iam_policy_repo_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_iam_policy_repo - ] = mock_object + ] = mock_rpc request = {} await client.get_iam_policy_repo(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_iam_policy_repo(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5268,22 +5285,23 @@ async def test_set_iam_policy_repo_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_iam_policy_repo - ] = mock_object + ] = mock_rpc request = {} await client.set_iam_policy_repo(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_iam_policy_repo(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5673,22 +5691,23 @@ async def test_test_iam_permissions_repo_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.test_iam_permissions_repo - ] = mock_object + ] = mock_rpc request = {} await client.test_iam_permissions_repo(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.test_iam_permissions_repo(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-securitycenter/google/cloud/securitycenter/gapic_version.py b/packages/google-cloud-securitycenter/google/cloud/securitycenter/gapic_version.py index bcfde67a3bef..558c8aab67c5 100644 --- a/packages/google-cloud-securitycenter/google/cloud/securitycenter/gapic_version.py +++ b/packages/google-cloud-securitycenter/google/cloud/securitycenter/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.34.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1/gapic_version.py b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1/gapic_version.py index bcfde67a3bef..558c8aab67c5 100644 --- a/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1/gapic_version.py +++ b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.34.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1/services/security_center/async_client.py b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1/services/security_center/async_client.py index 5b8d3b8ba86a..1ef46d1b78c0 100644 --- a/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1/services/security_center/async_client.py +++ b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1/services/security_center/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -353,9 +352,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(SecurityCenterClient).get_transport_class, type(SecurityCenterClient) - ) + get_transport_class = SecurityCenterClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1/services/security_center/client.py b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1/services/security_center/client.py index a0692e19d518..54d86f3c22f0 100644 --- a/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1/services/security_center/client.py +++ b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1/services/security_center/client.py @@ -1151,7 +1151,7 @@ def __init__( transport_init: Union[ Type[SecurityCenterTransport], Callable[..., SecurityCenterTransport] ] = ( - type(self).get_transport_class(transport) + SecurityCenterClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SecurityCenterTransport], transport) ) diff --git a/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1beta1/gapic_version.py b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1beta1/gapic_version.py index bcfde67a3bef..558c8aab67c5 100644 --- a/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1beta1/gapic_version.py +++ b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.34.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1beta1/services/security_center/async_client.py b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1beta1/services/security_center/async_client.py index bc052e70a4b3..e0b55ad39ed7 100644 --- a/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1beta1/services/security_center/async_client.py +++ b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1beta1/services/security_center/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -220,9 +219,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(SecurityCenterClient).get_transport_class, type(SecurityCenterClient) - ) + get_transport_class = SecurityCenterClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1beta1/services/security_center/client.py b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1beta1/services/security_center/client.py index 732f40356728..1c8b08f218a7 100644 --- a/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1beta1/services/security_center/client.py +++ b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1beta1/services/security_center/client.py @@ -756,7 +756,7 @@ def __init__( transport_init: Union[ Type[SecurityCenterTransport], Callable[..., SecurityCenterTransport] ] = ( - type(self).get_transport_class(transport) + SecurityCenterClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SecurityCenterTransport], transport) ) diff --git a/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1p1beta1/gapic_version.py b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1p1beta1/gapic_version.py index bcfde67a3bef..558c8aab67c5 100644 --- a/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1p1beta1/gapic_version.py +++ b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1p1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.34.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1p1beta1/services/security_center/async_client.py b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1p1beta1/services/security_center/async_client.py index 41382503f1b4..70acba68fe13 100644 --- a/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1p1beta1/services/security_center/async_client.py +++ b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1p1beta1/services/security_center/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -234,9 +233,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(SecurityCenterClient).get_transport_class, type(SecurityCenterClient) - ) + get_transport_class = SecurityCenterClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1p1beta1/services/security_center/client.py b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1p1beta1/services/security_center/client.py index 6c4f7d37e4f7..07f082157aaf 100644 --- a/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1p1beta1/services/security_center/client.py +++ b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v1p1beta1/services/security_center/client.py @@ -799,7 +799,7 @@ def __init__( transport_init: Union[ Type[SecurityCenterTransport], Callable[..., SecurityCenterTransport] ] = ( - type(self).get_transport_class(transport) + SecurityCenterClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SecurityCenterTransport], transport) ) diff --git a/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/gapic_version.py b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/gapic_version.py index bcfde67a3bef..558c8aab67c5 100644 --- a/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/gapic_version.py +++ b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.34.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/services/security_center/async_client.py b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/services/security_center/async_client.py index e992367c8155..f6214f930fce 100644 --- a/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/services/security_center/async_client.py +++ b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/services/security_center/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -297,9 +296,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(SecurityCenterClient).get_transport_class, type(SecurityCenterClient) - ) + get_transport_class = SecurityCenterClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/services/security_center/client.py b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/services/security_center/client.py index 56ccc9e58154..623d810126c0 100644 --- a/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/services/security_center/client.py +++ b/packages/google-cloud-securitycenter/google/cloud/securitycenter_v2/services/security_center/client.py @@ -1012,7 +1012,7 @@ def __init__( transport_init: Union[ Type[SecurityCenterTransport], Callable[..., SecurityCenterTransport] ] = ( - type(self).get_transport_class(transport) + SecurityCenterClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SecurityCenterTransport], transport) ) diff --git a/packages/google-cloud-securitycenter/samples/generated_samples/snippet_metadata_google.cloud.securitycenter.v1.json b/packages/google-cloud-securitycenter/samples/generated_samples/snippet_metadata_google.cloud.securitycenter.v1.json index 45137910452a..176395462f39 100644 --- a/packages/google-cloud-securitycenter/samples/generated_samples/snippet_metadata_google.cloud.securitycenter.v1.json +++ b/packages/google-cloud-securitycenter/samples/generated_samples/snippet_metadata_google.cloud.securitycenter.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-securitycenter", - "version": "1.34.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-securitycenter/samples/generated_samples/snippet_metadata_google.cloud.securitycenter.v1beta1.json b/packages/google-cloud-securitycenter/samples/generated_samples/snippet_metadata_google.cloud.securitycenter.v1beta1.json index f717ba7b0993..a4c1295eaf99 100644 --- a/packages/google-cloud-securitycenter/samples/generated_samples/snippet_metadata_google.cloud.securitycenter.v1beta1.json +++ b/packages/google-cloud-securitycenter/samples/generated_samples/snippet_metadata_google.cloud.securitycenter.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-securitycenter", - "version": "1.34.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-securitycenter/samples/generated_samples/snippet_metadata_google.cloud.securitycenter.v1p1beta1.json b/packages/google-cloud-securitycenter/samples/generated_samples/snippet_metadata_google.cloud.securitycenter.v1p1beta1.json index ddee8465ac7f..774592ce11f7 100644 --- a/packages/google-cloud-securitycenter/samples/generated_samples/snippet_metadata_google.cloud.securitycenter.v1p1beta1.json +++ b/packages/google-cloud-securitycenter/samples/generated_samples/snippet_metadata_google.cloud.securitycenter.v1p1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-securitycenter", - "version": "1.34.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-securitycenter/samples/generated_samples/snippet_metadata_google.cloud.securitycenter.v2.json b/packages/google-cloud-securitycenter/samples/generated_samples/snippet_metadata_google.cloud.securitycenter.v2.json index 26e86997111a..3ebbbfe28b50 100644 --- a/packages/google-cloud-securitycenter/samples/generated_samples/snippet_metadata_google.cloud.securitycenter.v2.json +++ b/packages/google-cloud-securitycenter/samples/generated_samples/snippet_metadata_google.cloud.securitycenter.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-securitycenter", - "version": "1.34.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-securitycenter/tests/unit/gapic/securitycenter_v1/test_security_center.py b/packages/google-cloud-securitycenter/tests/unit/gapic/securitycenter_v1/test_security_center.py index 094cc1f0a802..b979ce6765ae 100644 --- a/packages/google-cloud-securitycenter/tests/unit/gapic/securitycenter_v1/test_security_center.py +++ b/packages/google-cloud-securitycenter/tests/unit/gapic/securitycenter_v1/test_security_center.py @@ -1338,8 +1338,9 @@ def test_bulk_mute_findings_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.bulk_mute_findings(request) @@ -1395,26 +1396,28 @@ async def test_bulk_mute_findings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.bulk_mute_findings - ] = mock_object + ] = mock_rpc request = {} await client.bulk_mute_findings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.bulk_mute_findings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1818,22 +1821,23 @@ async def test_create_security_health_analytics_custom_module_async_use_cached_w ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_security_health_analytics_custom_module - ] = mock_object + ] = mock_rpc request = {} await client.create_security_health_analytics_custom_module(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_security_health_analytics_custom_module(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2251,22 +2255,23 @@ async def test_create_source_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_source - ] = mock_object + ] = mock_rpc request = {} await client.create_source(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_source(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2672,22 +2677,23 @@ async def test_create_finding_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_finding - ] = mock_object + ] = mock_rpc request = {} await client.create_finding(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_finding(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3110,22 +3116,23 @@ async def test_create_mute_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_mute_config - ] = mock_object + ] = mock_rpc request = {} await client.create_mute_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_mute_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3539,22 +3546,23 @@ async def test_create_notification_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_notification_config - ] = mock_object + ] = mock_rpc request = {} await client.create_notification_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_notification_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3953,22 +3961,23 @@ async def test_delete_mute_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_mute_config - ] = mock_object + ] = mock_rpc request = {} await client.delete_mute_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_mute_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4325,22 +4334,23 @@ async def test_delete_notification_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_notification_config - ] = mock_object + ] = mock_rpc request = {} await client.delete_notification_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_notification_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4713,22 +4723,23 @@ async def test_delete_security_health_analytics_custom_module_async_use_cached_w ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_security_health_analytics_custom_module - ] = mock_object + ] = mock_rpc request = {} await client.delete_security_health_analytics_custom_module(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_security_health_analytics_custom_module(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5089,22 +5100,23 @@ async def test_get_simulation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_simulation - ] = mock_object + ] = mock_rpc request = {} await client.get_simulation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_simulation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5486,22 +5498,23 @@ async def test_get_valued_resource_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_valued_resource - ] = mock_object + ] = mock_rpc request = {} await client.get_valued_resource(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_valued_resource(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5901,22 +5914,23 @@ async def test_get_big_query_export_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_big_query_export - ] = mock_object + ] = mock_rpc request = {} await client.get_big_query_export(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_big_query_export(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6289,22 +6303,23 @@ async def test_get_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6683,22 +6698,23 @@ async def test_get_mute_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_mute_config - ] = mock_object + ] = mock_rpc request = {} await client.get_mute_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_mute_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7080,22 +7096,23 @@ async def test_get_notification_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_notification_config - ] = mock_object + ] = mock_rpc request = {} await client.get_notification_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_notification_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7477,22 +7494,23 @@ async def test_get_organization_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_organization_settings - ] = mock_object + ] = mock_rpc request = {} await client.get_organization_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_organization_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7897,22 +7915,23 @@ async def test_get_effective_security_health_analytics_custom_module_async_use_c ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_effective_security_health_analytics_custom_module - ] = mock_object + ] = mock_rpc request = {} await client.get_effective_security_health_analytics_custom_module(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_effective_security_health_analytics_custom_module(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8344,22 +8363,23 @@ async def test_get_security_health_analytics_custom_module_async_use_cached_wrap ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_security_health_analytics_custom_module - ] = mock_object + ] = mock_rpc request = {} await client.get_security_health_analytics_custom_module(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_security_health_analytics_custom_module(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8746,22 +8766,23 @@ async def test_get_source_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_source - ] = mock_object + ] = mock_rpc request = {} await client.get_source(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_source(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9122,22 +9143,23 @@ async def test_group_assets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.group_assets - ] = mock_object + ] = mock_rpc request = {} await client.group_assets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.group_assets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9610,22 +9632,23 @@ async def test_group_findings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.group_findings - ] = mock_object + ] = mock_rpc request = {} await client.group_findings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.group_findings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10190,22 +10213,23 @@ async def test_list_assets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_assets - ] = mock_object + ] = mock_rpc request = {} await client.list_assets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_assets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10712,22 +10736,23 @@ async def test_list_descendant_security_health_analytics_custom_modules_async_us ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_descendant_security_health_analytics_custom_modules - ] = mock_object + ] = mock_rpc request = {} await client.list_descendant_security_health_analytics_custom_modules(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_descendant_security_health_analytics_custom_modules(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11353,22 +11378,23 @@ async def test_list_findings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_findings - ] = mock_object + ] = mock_rpc request = {} await client.list_findings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_findings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11854,22 +11880,23 @@ async def test_list_mute_configs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_mute_configs - ] = mock_object + ] = mock_rpc request = {} await client.list_mute_configs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_mute_configs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12446,22 +12473,23 @@ async def test_list_notification_configs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_notification_configs - ] = mock_object + ] = mock_rpc request = {} await client.list_notification_configs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_notification_configs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13068,22 +13096,23 @@ async def test_list_effective_security_health_analytics_custom_modules_async_use ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_effective_security_health_analytics_custom_modules - ] = mock_object + ] = mock_rpc request = {} await client.list_effective_security_health_analytics_custom_modules(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_effective_security_health_analytics_custom_modules(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13725,22 +13754,23 @@ async def test_list_security_health_analytics_custom_modules_async_use_cached_wr ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_security_health_analytics_custom_modules - ] = mock_object + ] = mock_rpc request = {} await client.list_security_health_analytics_custom_modules(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_security_health_analytics_custom_modules(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14334,22 +14364,23 @@ async def test_list_sources_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_sources - ] = mock_object + ] = mock_rpc request = {} await client.list_sources(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_sources(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14847,8 +14878,9 @@ def test_run_asset_discovery_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.run_asset_discovery(request) @@ -14904,26 +14936,28 @@ async def test_run_asset_discovery_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.run_asset_discovery - ] = mock_object + ] = mock_rpc request = {} await client.run_asset_discovery(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.run_asset_discovery(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15332,22 +15366,23 @@ async def test_set_finding_state_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_finding_state - ] = mock_object + ] = mock_rpc request = {} await client.set_finding_state(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_finding_state(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15791,22 +15826,23 @@ async def test_set_mute_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_mute - ] = mock_object + ] = mock_rpc request = {} await client.set_mute(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_mute(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -16192,22 +16228,23 @@ async def test_set_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -16584,22 +16621,23 @@ async def test_test_iam_permissions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.test_iam_permissions - ] = mock_object + ] = mock_rpc request = {} await client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.test_iam_permissions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -17017,22 +17055,23 @@ async def test_simulate_security_health_analytics_custom_module_async_use_cached ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.simulate_security_health_analytics_custom_module - ] = mock_object + ] = mock_rpc request = {} await client.simulate_security_health_analytics_custom_module(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.simulate_security_health_analytics_custom_module(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -17479,22 +17518,23 @@ async def test_update_external_system_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_external_system - ] = mock_object + ] = mock_rpc request = {} await client.update_external_system(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_external_system(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -17912,22 +17952,23 @@ async def test_update_finding_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_finding - ] = mock_object + ] = mock_rpc request = {} await client.update_finding(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_finding(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -18324,22 +18365,23 @@ async def test_update_mute_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_mute_config - ] = mock_object + ] = mock_rpc request = {} await client.update_mute_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_mute_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -18737,22 +18779,23 @@ async def test_update_notification_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_notification_config - ] = mock_object + ] = mock_rpc request = {} await client.update_notification_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_notification_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -19148,22 +19191,23 @@ async def test_update_organization_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_organization_settings - ] = mock_object + ] = mock_rpc request = {} await client.update_organization_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_organization_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -19577,22 +19621,23 @@ async def test_update_security_health_analytics_custom_module_async_use_cached_w ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_security_health_analytics_custom_module - ] = mock_object + ] = mock_rpc request = {} await client.update_security_health_analytics_custom_module(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_security_health_analytics_custom_module(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -20006,22 +20051,23 @@ async def test_update_source_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_source - ] = mock_object + ] = mock_rpc request = {} await client.update_source(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_source(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -20385,22 +20431,23 @@ async def test_update_security_marks_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_security_marks - ] = mock_object + ] = mock_rpc request = {} await client.update_security_marks(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_security_marks(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -20792,22 +20839,23 @@ async def test_create_big_query_export_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_big_query_export - ] = mock_object + ] = mock_rpc request = {} await client.create_big_query_export(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_big_query_export(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -21203,22 +21251,23 @@ async def test_delete_big_query_export_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_big_query_export - ] = mock_object + ] = mock_rpc request = {} await client.delete_big_query_export(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_big_query_export(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -21593,22 +21642,23 @@ async def test_update_big_query_export_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_big_query_export - ] = mock_object + ] = mock_rpc request = {} await client.update_big_query_export(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_big_query_export(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -22003,22 +22053,23 @@ async def test_list_big_query_exports_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_big_query_exports - ] = mock_object + ] = mock_rpc request = {} await client.list_big_query_exports(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_big_query_exports(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -22627,22 +22678,23 @@ async def test_create_event_threat_detection_custom_module_async_use_cached_wrap ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_event_threat_detection_custom_module - ] = mock_object + ] = mock_rpc request = {} await client.create_event_threat_detection_custom_module(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_event_threat_detection_custom_module(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -23068,22 +23120,23 @@ async def test_delete_event_threat_detection_custom_module_async_use_cached_wrap ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_event_threat_detection_custom_module - ] = mock_object + ] = mock_rpc request = {} await client.delete_event_threat_detection_custom_module(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_event_threat_detection_custom_module(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -23480,22 +23533,23 @@ async def test_get_event_threat_detection_custom_module_async_use_cached_wrapped ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_event_threat_detection_custom_module - ] = mock_object + ] = mock_rpc request = {} await client.get_event_threat_detection_custom_module(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_event_threat_detection_custom_module(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -23913,22 +23967,23 @@ async def test_list_descendant_event_threat_detection_custom_modules_async_use_c ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_descendant_event_threat_detection_custom_modules - ] = mock_object + ] = mock_rpc request = {} await client.list_descendant_event_threat_detection_custom_modules(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_descendant_event_threat_detection_custom_modules(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -24566,22 +24621,23 @@ async def test_list_event_threat_detection_custom_modules_async_use_cached_wrapp ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_event_threat_detection_custom_modules - ] = mock_object + ] = mock_rpc request = {} await client.list_event_threat_detection_custom_modules(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_event_threat_detection_custom_modules(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -25207,22 +25263,23 @@ async def test_update_event_threat_detection_custom_module_async_use_cached_wrap ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_event_threat_detection_custom_module - ] = mock_object + ] = mock_rpc request = {} await client.update_event_threat_detection_custom_module(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_event_threat_detection_custom_module(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -25661,22 +25718,23 @@ async def test_validate_event_threat_detection_custom_module_async_use_cached_wr ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.validate_event_threat_detection_custom_module - ] = mock_object + ] = mock_rpc request = {} await client.validate_event_threat_detection_custom_module(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.validate_event_threat_detection_custom_module(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -26003,22 +26061,23 @@ async def test_get_effective_event_threat_detection_custom_module_async_use_cach ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_effective_event_threat_detection_custom_module - ] = mock_object + ] = mock_rpc request = {} await client.get_effective_event_threat_detection_custom_module(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_effective_event_threat_detection_custom_module(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -26448,22 +26507,23 @@ async def test_list_effective_event_threat_detection_custom_modules_async_use_ca ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_effective_event_threat_detection_custom_modules - ] = mock_object + ] = mock_rpc request = {} await client.list_effective_event_threat_detection_custom_modules(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_effective_event_threat_detection_custom_modules(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -27090,22 +27150,23 @@ async def test_batch_create_resource_value_configs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_create_resource_value_configs - ] = mock_object + ] = mock_rpc request = {} await client.batch_create_resource_value_configs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.batch_create_resource_value_configs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -27510,22 +27571,23 @@ async def test_delete_resource_value_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_resource_value_config - ] = mock_object + ] = mock_rpc request = {} await client.delete_resource_value_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_resource_value_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -27907,22 +27969,23 @@ async def test_get_resource_value_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_resource_value_config - ] = mock_object + ] = mock_rpc request = {} await client.get_resource_value_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_resource_value_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -28309,22 +28372,23 @@ async def test_list_resource_value_configs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_resource_value_configs - ] = mock_object + ] = mock_rpc request = {} await client.list_resource_value_configs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_resource_value_configs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -28919,22 +28983,23 @@ async def test_update_resource_value_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_resource_value_config - ] = mock_object + ] = mock_rpc request = {} await client.update_resource_value_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_resource_value_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -29346,22 +29411,23 @@ async def test_list_valued_resources_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_valued_resources - ] = mock_object + ] = mock_rpc request = {} await client.list_valued_resources(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_valued_resources(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -29939,22 +30005,23 @@ async def test_list_attack_paths_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_attack_paths - ] = mock_object + ] = mock_rpc request = {} await client.list_attack_paths(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_attack_paths(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-securitycenter/tests/unit/gapic/securitycenter_v1beta1/test_security_center.py b/packages/google-cloud-securitycenter/tests/unit/gapic/securitycenter_v1beta1/test_security_center.py index da591f54d17e..b98981a4cf98 100644 --- a/packages/google-cloud-securitycenter/tests/unit/gapic/securitycenter_v1beta1/test_security_center.py +++ b/packages/google-cloud-securitycenter/tests/unit/gapic/securitycenter_v1beta1/test_security_center.py @@ -1318,22 +1318,23 @@ async def test_create_source_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_source - ] = mock_object + ] = mock_rpc request = {} await client.create_source(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_source(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1710,22 +1711,23 @@ async def test_create_finding_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_finding - ] = mock_object + ] = mock_rpc request = {} await client.create_finding(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_finding(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2104,22 +2106,23 @@ async def test_get_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2499,22 +2502,23 @@ async def test_get_organization_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_organization_settings - ] = mock_object + ] = mock_rpc request = {} await client.get_organization_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_organization_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2880,22 +2884,23 @@ async def test_get_source_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_source - ] = mock_object + ] = mock_rpc request = {} await client.get_source(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_source(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3251,22 +3256,23 @@ async def test_group_assets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.group_assets - ] = mock_object + ] = mock_rpc request = {} await client.group_assets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.group_assets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3734,22 +3740,23 @@ async def test_group_findings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.group_findings - ] = mock_object + ] = mock_rpc request = {} await client.group_findings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.group_findings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4312,22 +4319,23 @@ async def test_list_assets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_assets - ] = mock_object + ] = mock_rpc request = {} await client.list_assets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_assets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4806,22 +4814,23 @@ async def test_list_findings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_findings - ] = mock_object + ] = mock_rpc request = {} await client.list_findings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_findings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5287,22 +5296,23 @@ async def test_list_sources_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_sources - ] = mock_object + ] = mock_rpc request = {} await client.list_sources(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_sources(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5800,8 +5810,9 @@ def test_run_asset_discovery_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.run_asset_discovery(request) @@ -5857,26 +5868,28 @@ async def test_run_asset_discovery_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.run_asset_discovery - ] = mock_object + ] = mock_rpc request = {} await client.run_asset_discovery(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.run_asset_discovery(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6258,22 +6271,23 @@ async def test_set_finding_state_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_finding_state - ] = mock_object + ] = mock_rpc request = {} await client.set_finding_state(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_finding_state(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6662,22 +6676,23 @@ async def test_set_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7054,22 +7069,23 @@ async def test_test_iam_permissions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.test_iam_permissions - ] = mock_object + ] = mock_rpc request = {} await client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.test_iam_permissions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7469,22 +7485,23 @@ async def test_update_finding_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_finding - ] = mock_object + ] = mock_rpc request = {} await client.update_finding(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_finding(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7852,22 +7869,23 @@ async def test_update_organization_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_organization_settings - ] = mock_object + ] = mock_rpc request = {} await client.update_organization_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_organization_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8239,22 +8257,23 @@ async def test_update_source_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_source - ] = mock_object + ] = mock_rpc request = {} await client.update_source(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_source(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8613,22 +8632,23 @@ async def test_update_security_marks_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_security_marks - ] = mock_object + ] = mock_rpc request = {} await client.update_security_marks(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_security_marks(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-securitycenter/tests/unit/gapic/securitycenter_v1p1beta1/test_security_center.py b/packages/google-cloud-securitycenter/tests/unit/gapic/securitycenter_v1p1beta1/test_security_center.py index 814439ddc784..ef06a07fd3b9 100644 --- a/packages/google-cloud-securitycenter/tests/unit/gapic/securitycenter_v1p1beta1/test_security_center.py +++ b/packages/google-cloud-securitycenter/tests/unit/gapic/securitycenter_v1p1beta1/test_security_center.py @@ -1326,22 +1326,23 @@ async def test_create_source_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_source - ] = mock_object + ] = mock_rpc request = {} await client.create_source(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_source(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1726,22 +1727,23 @@ async def test_create_finding_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_finding - ] = mock_object + ] = mock_rpc request = {} await client.create_finding(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_finding(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2151,22 +2153,23 @@ async def test_create_notification_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_notification_config - ] = mock_object + ] = mock_rpc request = {} await client.create_notification_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_notification_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2571,22 +2574,23 @@ async def test_delete_notification_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_notification_config - ] = mock_object + ] = mock_rpc request = {} await client.delete_notification_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_notification_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2940,22 +2944,23 @@ async def test_get_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3346,22 +3351,23 @@ async def test_get_notification_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_notification_config - ] = mock_object + ] = mock_rpc request = {} await client.get_notification_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_notification_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3747,22 +3753,23 @@ async def test_get_organization_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_organization_settings - ] = mock_object + ] = mock_rpc request = {} await client.get_organization_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_organization_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4131,22 +4138,23 @@ async def test_get_source_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_source - ] = mock_object + ] = mock_rpc request = {} await client.get_source(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_source(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4507,22 +4515,23 @@ async def test_group_assets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.group_assets - ] = mock_object + ] = mock_rpc request = {} await client.group_assets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.group_assets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4995,22 +5004,23 @@ async def test_group_findings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.group_findings - ] = mock_object + ] = mock_rpc request = {} await client.group_findings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.group_findings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5575,22 +5585,23 @@ async def test_list_assets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_assets - ] = mock_object + ] = mock_rpc request = {} await client.list_assets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_assets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6151,22 +6162,23 @@ async def test_list_findings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_findings - ] = mock_object + ] = mock_rpc request = {} await client.list_findings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_findings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6737,22 +6749,23 @@ async def test_list_notification_configs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_notification_configs - ] = mock_object + ] = mock_rpc request = {} await client.list_notification_configs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_notification_configs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7322,22 +7335,23 @@ async def test_list_sources_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_sources - ] = mock_object + ] = mock_rpc request = {} await client.list_sources(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_sources(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7835,8 +7849,9 @@ def test_run_asset_discovery_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.run_asset_discovery(request) @@ -7892,26 +7907,28 @@ async def test_run_asset_discovery_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.run_asset_discovery - ] = mock_object + ] = mock_rpc request = {} await client.run_asset_discovery(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.run_asset_discovery(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8299,22 +8316,23 @@ async def test_set_finding_state_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_finding_state - ] = mock_object + ] = mock_rpc request = {} await client.set_finding_state(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_finding_state(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8707,22 +8725,23 @@ async def test_set_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9099,22 +9118,23 @@ async def test_test_iam_permissions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.test_iam_permissions - ] = mock_object + ] = mock_rpc request = {} await client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.test_iam_permissions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9520,22 +9540,23 @@ async def test_update_finding_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_finding - ] = mock_object + ] = mock_rpc request = {} await client.update_finding(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_finding(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9929,22 +9950,23 @@ async def test_update_notification_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_notification_config - ] = mock_object + ] = mock_rpc request = {} await client.update_notification_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_notification_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10345,22 +10367,23 @@ async def test_update_organization_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_organization_settings - ] = mock_object + ] = mock_rpc request = {} await client.update_organization_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_organization_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10735,22 +10758,23 @@ async def test_update_source_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_source - ] = mock_object + ] = mock_rpc request = {} await client.update_source(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_source(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11124,22 +11148,23 @@ async def test_update_security_marks_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_security_marks - ] = mock_object + ] = mock_rpc request = {} await client.update_security_marks(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_security_marks(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-securitycenter/tests/unit/gapic/securitycenter_v2/test_security_center.py b/packages/google-cloud-securitycenter/tests/unit/gapic/securitycenter_v2/test_security_center.py index d4ed25fa3cef..1fc0f9fdd21b 100644 --- a/packages/google-cloud-securitycenter/tests/unit/gapic/securitycenter_v2/test_security_center.py +++ b/packages/google-cloud-securitycenter/tests/unit/gapic/securitycenter_v2/test_security_center.py @@ -1374,22 +1374,23 @@ async def test_batch_create_resource_value_configs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_create_resource_value_configs - ] = mock_object + ] = mock_rpc request = {} await client.batch_create_resource_value_configs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.batch_create_resource_value_configs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1744,8 +1745,9 @@ def test_bulk_mute_findings_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.bulk_mute_findings(request) @@ -1801,26 +1803,28 @@ async def test_bulk_mute_findings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.bulk_mute_findings - ] = mock_object + ] = mock_rpc request = {} await client.bulk_mute_findings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.bulk_mute_findings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2207,22 +2211,23 @@ async def test_create_big_query_export_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_big_query_export - ] = mock_object + ] = mock_rpc request = {} await client.create_big_query_export(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_big_query_export(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2656,22 +2661,23 @@ async def test_create_finding_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_finding - ] = mock_object + ] = mock_rpc request = {} await client.create_finding(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_finding(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3091,22 +3097,23 @@ async def test_create_mute_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_mute_config - ] = mock_object + ] = mock_rpc request = {} await client.create_mute_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_mute_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3523,22 +3530,23 @@ async def test_create_notification_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_notification_config - ] = mock_object + ] = mock_rpc request = {} await client.create_notification_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_notification_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3941,22 +3949,23 @@ async def test_create_source_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_source - ] = mock_object + ] = mock_rpc request = {} await client.create_source(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_source(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4324,22 +4333,23 @@ async def test_delete_big_query_export_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_big_query_export - ] = mock_object + ] = mock_rpc request = {} await client.delete_big_query_export(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_big_query_export(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4695,22 +4705,23 @@ async def test_delete_mute_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_mute_config - ] = mock_object + ] = mock_rpc request = {} await client.delete_mute_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_mute_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5074,22 +5085,23 @@ async def test_delete_notification_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_notification_config - ] = mock_object + ] = mock_rpc request = {} await client.delete_notification_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_notification_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5446,22 +5458,23 @@ async def test_delete_resource_value_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_resource_value_config - ] = mock_object + ] = mock_rpc request = {} await client.delete_resource_value_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_resource_value_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5839,22 +5852,23 @@ async def test_get_big_query_export_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_big_query_export - ] = mock_object + ] = mock_rpc request = {} await client.get_big_query_export(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_big_query_export(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6227,22 +6241,23 @@ async def test_get_simulation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_simulation - ] = mock_object + ] = mock_rpc request = {} await client.get_simulation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_simulation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6624,22 +6639,23 @@ async def test_get_valued_resource_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_valued_resource - ] = mock_object + ] = mock_rpc request = {} await client.get_valued_resource(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_valued_resource(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7015,22 +7031,23 @@ async def test_get_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7406,22 +7423,23 @@ async def test_get_mute_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_mute_config - ] = mock_object + ] = mock_rpc request = {} await client.get_mute_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_mute_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7804,22 +7822,23 @@ async def test_get_notification_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_notification_config - ] = mock_object + ] = mock_rpc request = {} await client.get_notification_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_notification_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8216,22 +8235,23 @@ async def test_get_resource_value_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_resource_value_config - ] = mock_object + ] = mock_rpc request = {} await client.get_resource_value_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_resource_value_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8610,22 +8630,23 @@ async def test_get_source_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_source - ] = mock_object + ] = mock_rpc request = {} await client.get_source(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_source(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8986,22 +9007,23 @@ async def test_group_findings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.group_findings - ] = mock_object + ] = mock_rpc request = {} await client.group_findings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.group_findings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9571,22 +9593,23 @@ async def test_list_attack_paths_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_attack_paths - ] = mock_object + ] = mock_rpc request = {} await client.list_attack_paths(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_attack_paths(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10163,22 +10186,23 @@ async def test_list_big_query_exports_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_big_query_exports - ] = mock_object + ] = mock_rpc request = {} await client.list_big_query_exports(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_big_query_exports(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10749,22 +10773,23 @@ async def test_list_findings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_findings - ] = mock_object + ] = mock_rpc request = {} await client.list_findings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_findings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11332,22 +11357,23 @@ async def test_list_mute_configs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_mute_configs - ] = mock_object + ] = mock_rpc request = {} await client.list_mute_configs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_mute_configs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11926,22 +11952,23 @@ async def test_list_notification_configs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_notification_configs - ] = mock_object + ] = mock_rpc request = {} await client.list_notification_configs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_notification_configs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12524,22 +12551,23 @@ async def test_list_resource_value_configs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_resource_value_configs - ] = mock_object + ] = mock_rpc request = {} await client.list_resource_value_configs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_resource_value_configs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13109,22 +13137,23 @@ async def test_list_sources_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_sources - ] = mock_object + ] = mock_rpc request = {} await client.list_sources(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_sources(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13690,22 +13719,23 @@ async def test_list_valued_resources_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_valued_resources - ] = mock_object + ] = mock_rpc request = {} await client.list_valued_resources(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_valued_resources(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14321,22 +14351,23 @@ async def test_set_finding_state_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_finding_state - ] = mock_object + ] = mock_rpc request = {} await client.set_finding_state(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_finding_state(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14733,22 +14764,23 @@ async def test_set_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15153,22 +15185,23 @@ async def test_set_mute_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_mute - ] = mock_object + ] = mock_rpc request = {} await client.set_mute(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_mute(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15563,22 +15596,23 @@ async def test_test_iam_permissions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.test_iam_permissions - ] = mock_object + ] = mock_rpc request = {} await client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.test_iam_permissions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15991,22 +16025,23 @@ async def test_update_big_query_export_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_big_query_export - ] = mock_object + ] = mock_rpc request = {} await client.update_big_query_export(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_big_query_export(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -16410,22 +16445,23 @@ async def test_update_external_system_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_external_system - ] = mock_object + ] = mock_rpc request = {} await client.update_external_system(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_external_system(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -16843,22 +16879,23 @@ async def test_update_finding_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_finding - ] = mock_object + ] = mock_rpc request = {} await client.update_finding(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_finding(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -17262,22 +17299,23 @@ async def test_update_mute_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_mute_config - ] = mock_object + ] = mock_rpc request = {} await client.update_mute_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_mute_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -17690,22 +17728,23 @@ async def test_update_notification_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_notification_config - ] = mock_object + ] = mock_rpc request = {} await client.update_notification_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_notification_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -18116,22 +18155,23 @@ async def test_update_resource_value_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_resource_value_config - ] = mock_object + ] = mock_rpc request = {} await client.update_resource_value_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_resource_value_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -18533,22 +18573,23 @@ async def test_update_security_marks_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_security_marks - ] = mock_object + ] = mock_rpc request = {} await client.update_security_marks(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_security_marks(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -18925,22 +18966,23 @@ async def test_update_source_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_source - ] = mock_object + ] = mock_rpc request = {} await client.update_source(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_source(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement/gapic_version.py b/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement/gapic_version.py index 7a4d810a47da..558c8aab67c5 100644 --- a/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement/gapic_version.py +++ b/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.14" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/gapic_version.py b/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/gapic_version.py index 7a4d810a47da..558c8aab67c5 100644 --- a/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/gapic_version.py +++ b/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.14" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/services/security_center_management/async_client.py b/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/services/security_center_management/async_client.py index 6d30d7281175..5267fe600985 100644 --- a/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/services/security_center_management/async_client.py +++ b/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/services/security_center_management/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -230,10 +229,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(SecurityCenterManagementClient).get_transport_class, - type(SecurityCenterManagementClient), - ) + get_transport_class = SecurityCenterManagementClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/services/security_center_management/client.py b/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/services/security_center_management/client.py index 5772693613e6..4aab550aff07 100644 --- a/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/services/security_center_management/client.py +++ b/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/services/security_center_management/client.py @@ -792,7 +792,7 @@ def __init__( Type[SecurityCenterManagementTransport], Callable[..., SecurityCenterManagementTransport], ] = ( - type(self).get_transport_class(transport) + SecurityCenterManagementClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SecurityCenterManagementTransport], transport) ) diff --git a/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/types/security_center_management.py b/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/types/security_center_management.py index 65be72ffe49b..49d0dff77c38 100644 --- a/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/types/security_center_management.py +++ b/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/types/security_center_management.py @@ -834,7 +834,7 @@ class UpdateSecurityHealthAnalyticsCustomModuleRequest(proto.Message): indicates the request is valid while an error response indicates the request is invalid. Note that a subsequent request to actually update the - module could still fail because: + module could still fail because - The state could have changed (e.g. IAM permission lost) or - A failure occurred while trying to update the module. @@ -877,7 +877,7 @@ class DeleteSecurityHealthAnalyticsCustomModuleRequest(proto.Message): indicates the request is valid while an error response indicates the request is invalid. Note that a subsequent request to actually delete the - module could still fail because: + module could still fail because - The state could have changed (e.g. IAM permission lost) or - A failure occurred while trying to delete the module. @@ -1715,7 +1715,7 @@ class CreateEventThreatDetectionCustomModuleRequest(proto.Message): indicates the request is valid while an error response indicates the request is invalid. Note that a subsequent request to actually create the - module could still fail because: + module could still fail because - The state could have changed (e.g. IAM permission lost) or - A failure occurred during creation of the module. @@ -1758,7 +1758,7 @@ class UpdateEventThreatDetectionCustomModuleRequest(proto.Message): indicates the request is valid while an error response indicates the request is invalid. Note that a subsequent request to actually update the - module could still fail because: + module could still fail because - The state could have changed (e.g. IAM permission lost) or - A failure occurred while trying to update the module. @@ -1801,7 +1801,7 @@ class DeleteEventThreatDetectionCustomModuleRequest(proto.Message): indicates the request is valid while an error response indicates the request is invalid. Note that a subsequent request to actually delete the - module could still fail because: + module could still fail because - The state could have changed (e.g. IAM permission lost) or - A failure occurred while trying to delete the module. diff --git a/packages/google-cloud-securitycentermanagement/samples/generated_samples/snippet_metadata_google.cloud.securitycentermanagement.v1.json b/packages/google-cloud-securitycentermanagement/samples/generated_samples/snippet_metadata_google.cloud.securitycentermanagement.v1.json index 2a2523830784..7299c07cc773 100644 --- a/packages/google-cloud-securitycentermanagement/samples/generated_samples/snippet_metadata_google.cloud.securitycentermanagement.v1.json +++ b/packages/google-cloud-securitycentermanagement/samples/generated_samples/snippet_metadata_google.cloud.securitycentermanagement.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-securitycentermanagement", - "version": "0.1.14" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-securitycentermanagement/tests/unit/gapic/securitycentermanagement_v1/test_security_center_management.py b/packages/google-cloud-securitycentermanagement/tests/unit/gapic/securitycentermanagement_v1/test_security_center_management.py index c05bd84d2673..35c8541f15be 100644 --- a/packages/google-cloud-securitycentermanagement/tests/unit/gapic/securitycentermanagement_v1/test_security_center_management.py +++ b/packages/google-cloud-securitycentermanagement/tests/unit/gapic/securitycentermanagement_v1/test_security_center_management.py @@ -1402,22 +1402,23 @@ async def test_list_effective_security_health_analytics_custom_modules_async_use ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_effective_security_health_analytics_custom_modules - ] = mock_object + ] = mock_rpc request = {} await client.list_effective_security_health_analytics_custom_modules(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_effective_security_health_analytics_custom_modules(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2071,22 +2072,23 @@ async def test_get_effective_security_health_analytics_custom_module_async_use_c ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_effective_security_health_analytics_custom_module - ] = mock_object + ] = mock_rpc request = {} await client.get_effective_security_health_analytics_custom_module(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_effective_security_health_analytics_custom_module(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2508,22 +2510,23 @@ async def test_list_security_health_analytics_custom_modules_async_use_cached_wr ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_security_health_analytics_custom_modules - ] = mock_object + ] = mock_rpc request = {} await client.list_security_health_analytics_custom_modules(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_security_health_analytics_custom_modules(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3154,22 +3157,23 @@ async def test_list_descendant_security_health_analytics_custom_modules_async_us ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_descendant_security_health_analytics_custom_modules - ] = mock_object + ] = mock_rpc request = {} await client.list_descendant_security_health_analytics_custom_modules(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_descendant_security_health_analytics_custom_modules(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3826,22 +3830,23 @@ async def test_get_security_health_analytics_custom_module_async_use_cached_wrap ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_security_health_analytics_custom_module - ] = mock_object + ] = mock_rpc request = {} await client.get_security_health_analytics_custom_module(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_security_health_analytics_custom_module(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4270,22 +4275,23 @@ async def test_create_security_health_analytics_custom_module_async_use_cached_w ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_security_health_analytics_custom_module - ] = mock_object + ] = mock_rpc request = {} await client.create_security_health_analytics_custom_module(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_security_health_analytics_custom_module(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4742,22 +4748,23 @@ async def test_update_security_health_analytics_custom_module_async_use_cached_w ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_security_health_analytics_custom_module - ] = mock_object + ] = mock_rpc request = {} await client.update_security_health_analytics_custom_module(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_security_health_analytics_custom_module(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5193,22 +5200,23 @@ async def test_delete_security_health_analytics_custom_module_async_use_cached_w ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_security_health_analytics_custom_module - ] = mock_object + ] = mock_rpc request = {} await client.delete_security_health_analytics_custom_module(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_security_health_analytics_custom_module(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5601,22 +5609,23 @@ async def test_simulate_security_health_analytics_custom_module_async_use_cached ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.simulate_security_health_analytics_custom_module - ] = mock_object + ] = mock_rpc request = {} await client.simulate_security_health_analytics_custom_module(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.simulate_security_health_analytics_custom_module(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6072,22 +6081,23 @@ async def test_list_effective_event_threat_detection_custom_modules_async_use_ca ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_effective_event_threat_detection_custom_modules - ] = mock_object + ] = mock_rpc request = {} await client.list_effective_event_threat_detection_custom_modules(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_effective_event_threat_detection_custom_modules(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6744,22 +6754,23 @@ async def test_get_effective_event_threat_detection_custom_module_async_use_cach ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_effective_event_threat_detection_custom_module - ] = mock_object + ] = mock_rpc request = {} await client.get_effective_event_threat_detection_custom_module(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_effective_event_threat_detection_custom_module(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7182,22 +7193,23 @@ async def test_list_event_threat_detection_custom_modules_async_use_cached_wrapp ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_event_threat_detection_custom_modules - ] = mock_object + ] = mock_rpc request = {} await client.list_event_threat_detection_custom_modules(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_event_threat_detection_custom_modules(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7810,22 +7822,23 @@ async def test_list_descendant_event_threat_detection_custom_modules_async_use_c ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_descendant_event_threat_detection_custom_modules - ] = mock_object + ] = mock_rpc request = {} await client.list_descendant_event_threat_detection_custom_modules(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_descendant_event_threat_detection_custom_modules(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8480,22 +8493,23 @@ async def test_get_event_threat_detection_custom_module_async_use_cached_wrapped ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_event_threat_detection_custom_module - ] = mock_object + ] = mock_rpc request = {} await client.get_event_threat_detection_custom_module(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_event_threat_detection_custom_module(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8928,22 +8942,23 @@ async def test_create_event_threat_detection_custom_module_async_use_cached_wrap ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_event_threat_detection_custom_module - ] = mock_object + ] = mock_rpc request = {} await client.create_event_threat_detection_custom_module(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_event_threat_detection_custom_module(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9395,22 +9410,23 @@ async def test_update_event_threat_detection_custom_module_async_use_cached_wrap ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_event_threat_detection_custom_module - ] = mock_object + ] = mock_rpc request = {} await client.update_event_threat_detection_custom_module(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_event_threat_detection_custom_module(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9835,22 +9851,23 @@ async def test_delete_event_threat_detection_custom_module_async_use_cached_wrap ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_event_threat_detection_custom_module - ] = mock_object + ] = mock_rpc request = {} await client.delete_event_threat_detection_custom_module(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_event_threat_detection_custom_module(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10234,22 +10251,23 @@ async def test_validate_event_threat_detection_custom_module_async_use_cached_wr ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.validate_event_threat_detection_custom_module - ] = mock_object + ] = mock_rpc request = {} await client.validate_event_threat_detection_custom_module(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.validate_event_threat_detection_custom_module(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10556,22 +10574,23 @@ async def test_get_security_center_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_security_center_service - ] = mock_object + ] = mock_rpc request = {} await client.get_security_center_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_security_center_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10958,22 +10977,23 @@ async def test_list_security_center_services_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_security_center_services - ] = mock_object + ] = mock_rpc request = {} await client.list_security_center_services(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_security_center_services(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11576,22 +11596,23 @@ async def test_update_security_center_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_security_center_service - ] = mock_object + ] = mock_rpc request = {} await client.update_security_center_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_security_center_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-service-control/google/cloud/servicecontrol/gapic_version.py b/packages/google-cloud-service-control/google/cloud/servicecontrol/gapic_version.py index 44497894458e..558c8aab67c5 100644 --- a/packages/google-cloud-service-control/google/cloud/servicecontrol/gapic_version.py +++ b/packages/google-cloud-service-control/google/cloud/servicecontrol/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.12.3" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-service-control/google/cloud/servicecontrol_v1/gapic_version.py b/packages/google-cloud-service-control/google/cloud/servicecontrol_v1/gapic_version.py index 44497894458e..558c8aab67c5 100644 --- a/packages/google-cloud-service-control/google/cloud/servicecontrol_v1/gapic_version.py +++ b/packages/google-cloud-service-control/google/cloud/servicecontrol_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.12.3" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-service-control/google/cloud/servicecontrol_v1/services/quota_controller/async_client.py b/packages/google-cloud-service-control/google/cloud/servicecontrol_v1/services/quota_controller/async_client.py index fef8489db0ee..9f77b731e70b 100644 --- a/packages/google-cloud-service-control/google/cloud/servicecontrol_v1/services/quota_controller/async_client.py +++ b/packages/google-cloud-service-control/google/cloud/servicecontrol_v1/services/quota_controller/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -188,9 +187,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(QuotaControllerClient).get_transport_class, type(QuotaControllerClient) - ) + get_transport_class = QuotaControllerClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-service-control/google/cloud/servicecontrol_v1/services/quota_controller/client.py b/packages/google-cloud-service-control/google/cloud/servicecontrol_v1/services/quota_controller/client.py index e39722f43543..8a1c74c9a86d 100644 --- a/packages/google-cloud-service-control/google/cloud/servicecontrol_v1/services/quota_controller/client.py +++ b/packages/google-cloud-service-control/google/cloud/servicecontrol_v1/services/quota_controller/client.py @@ -643,7 +643,7 @@ def __init__( transport_init: Union[ Type[QuotaControllerTransport], Callable[..., QuotaControllerTransport] ] = ( - type(self).get_transport_class(transport) + QuotaControllerClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., QuotaControllerTransport], transport) ) diff --git a/packages/google-cloud-service-control/google/cloud/servicecontrol_v1/services/service_controller/async_client.py b/packages/google-cloud-service-control/google/cloud/servicecontrol_v1/services/service_controller/async_client.py index bc097d420d0b..1281c8c0c78a 100644 --- a/packages/google-cloud-service-control/google/cloud/servicecontrol_v1/services/service_controller/async_client.py +++ b/packages/google-cloud-service-control/google/cloud/servicecontrol_v1/services/service_controller/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -188,9 +187,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ServiceControllerClient).get_transport_class, type(ServiceControllerClient) - ) + get_transport_class = ServiceControllerClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-service-control/google/cloud/servicecontrol_v1/services/service_controller/client.py b/packages/google-cloud-service-control/google/cloud/servicecontrol_v1/services/service_controller/client.py index cc2d75b2f534..f99a8ec3700e 100644 --- a/packages/google-cloud-service-control/google/cloud/servicecontrol_v1/services/service_controller/client.py +++ b/packages/google-cloud-service-control/google/cloud/servicecontrol_v1/services/service_controller/client.py @@ -646,7 +646,7 @@ def __init__( Type[ServiceControllerTransport], Callable[..., ServiceControllerTransport], ] = ( - type(self).get_transport_class(transport) + ServiceControllerClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ServiceControllerTransport], transport) ) diff --git a/packages/google-cloud-service-control/google/cloud/servicecontrol_v1/types/service_controller.py b/packages/google-cloud-service-control/google/cloud/servicecontrol_v1/types/service_controller.py index 93b6d50fbbd8..2ee129a72b45 100644 --- a/packages/google-cloud-service-control/google/cloud/servicecontrol_v1/types/service_controller.py +++ b/packages/google-cloud-service-control/google/cloud/servicecontrol_v1/types/service_controller.py @@ -137,7 +137,7 @@ class ConsumerInfo(proto.Message): project_number (int): The Google cloud project number, e.g. 1234567890. A value of 0 indicates no project - number is found. + number is found. NOTE: This field is deprecated after we support flexible consumer id. New code should not depend diff --git a/packages/google-cloud-service-control/google/cloud/servicecontrol_v2/gapic_version.py b/packages/google-cloud-service-control/google/cloud/servicecontrol_v2/gapic_version.py index 44497894458e..558c8aab67c5 100644 --- a/packages/google-cloud-service-control/google/cloud/servicecontrol_v2/gapic_version.py +++ b/packages/google-cloud-service-control/google/cloud/servicecontrol_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.12.3" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-service-control/google/cloud/servicecontrol_v2/services/service_controller/async_client.py b/packages/google-cloud-service-control/google/cloud/servicecontrol_v2/services/service_controller/async_client.py index c95c43ed562a..7772d9798ec8 100644 --- a/packages/google-cloud-service-control/google/cloud/servicecontrol_v2/services/service_controller/async_client.py +++ b/packages/google-cloud-service-control/google/cloud/servicecontrol_v2/services/service_controller/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -195,9 +194,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ServiceControllerClient).get_transport_class, type(ServiceControllerClient) - ) + get_transport_class = ServiceControllerClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-service-control/google/cloud/servicecontrol_v2/services/service_controller/client.py b/packages/google-cloud-service-control/google/cloud/servicecontrol_v2/services/service_controller/client.py index aaadf70354c0..b41b9b7e2d71 100644 --- a/packages/google-cloud-service-control/google/cloud/servicecontrol_v2/services/service_controller/client.py +++ b/packages/google-cloud-service-control/google/cloud/servicecontrol_v2/services/service_controller/client.py @@ -653,7 +653,7 @@ def __init__( Type[ServiceControllerTransport], Callable[..., ServiceControllerTransport], ] = ( - type(self).get_transport_class(transport) + ServiceControllerClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ServiceControllerTransport], transport) ) diff --git a/packages/google-cloud-service-control/samples/generated_samples/snippet_metadata_google.api.servicecontrol.v1.json b/packages/google-cloud-service-control/samples/generated_samples/snippet_metadata_google.api.servicecontrol.v1.json index a04fd9ede122..ae2dfb3a75ea 100644 --- a/packages/google-cloud-service-control/samples/generated_samples/snippet_metadata_google.api.servicecontrol.v1.json +++ b/packages/google-cloud-service-control/samples/generated_samples/snippet_metadata_google.api.servicecontrol.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-service-control", - "version": "1.12.3" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-service-control/samples/generated_samples/snippet_metadata_google.api.servicecontrol.v2.json b/packages/google-cloud-service-control/samples/generated_samples/snippet_metadata_google.api.servicecontrol.v2.json index 77e12dd48a68..2b78179c69f8 100644 --- a/packages/google-cloud-service-control/samples/generated_samples/snippet_metadata_google.api.servicecontrol.v2.json +++ b/packages/google-cloud-service-control/samples/generated_samples/snippet_metadata_google.api.servicecontrol.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-service-control", - "version": "1.12.3" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-service-control/tests/unit/gapic/servicecontrol_v1/test_quota_controller.py b/packages/google-cloud-service-control/tests/unit/gapic/servicecontrol_v1/test_quota_controller.py index 0961d911fdbd..53c5224aad8c 100644 --- a/packages/google-cloud-service-control/tests/unit/gapic/servicecontrol_v1/test_quota_controller.py +++ b/packages/google-cloud-service-control/tests/unit/gapic/servicecontrol_v1/test_quota_controller.py @@ -1316,22 +1316,23 @@ async def test_allocate_quota_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.allocate_quota - ] = mock_object + ] = mock_rpc request = {} await client.allocate_quota(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.allocate_quota(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-service-control/tests/unit/gapic/servicecontrol_v1/test_service_controller.py b/packages/google-cloud-service-control/tests/unit/gapic/servicecontrol_v1/test_service_controller.py index 1b8672d72a00..1d3eb0079544 100644 --- a/packages/google-cloud-service-control/tests/unit/gapic/servicecontrol_v1/test_service_controller.py +++ b/packages/google-cloud-service-control/tests/unit/gapic/servicecontrol_v1/test_service_controller.py @@ -1341,22 +1341,23 @@ async def test_check_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.check - ] = mock_object + ] = mock_rpc request = {} await client.check(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.check(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1630,22 +1631,23 @@ async def test_report_async_use_cached_wrapped_rpc(transport: str = "grpc_asynci ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.report - ] = mock_object + ] = mock_rpc request = {} await client.report(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.report(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-service-control/tests/unit/gapic/servicecontrol_v2/test_service_controller.py b/packages/google-cloud-service-control/tests/unit/gapic/servicecontrol_v2/test_service_controller.py index cd2b356f790f..0d6a0f1c01d4 100644 --- a/packages/google-cloud-service-control/tests/unit/gapic/servicecontrol_v2/test_service_controller.py +++ b/packages/google-cloud-service-control/tests/unit/gapic/servicecontrol_v2/test_service_controller.py @@ -1324,22 +1324,23 @@ async def test_check_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.check - ] = mock_object + ] = mock_rpc request = {} await client.check(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.check(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1598,22 +1599,23 @@ async def test_report_async_use_cached_wrapped_rpc(transport: str = "grpc_asynci ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.report - ] = mock_object + ] = mock_rpc request = {} await client.report(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.report(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-service-directory/google/cloud/servicedirectory/gapic_version.py b/packages/google-cloud-service-directory/google/cloud/servicedirectory/gapic_version.py index 43c9b3f7fc55..558c8aab67c5 100644 --- a/packages/google-cloud-service-directory/google/cloud/servicedirectory/gapic_version.py +++ b/packages/google-cloud-service-directory/google/cloud/servicedirectory/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.11.6" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-service-directory/google/cloud/servicedirectory_v1/gapic_version.py b/packages/google-cloud-service-directory/google/cloud/servicedirectory_v1/gapic_version.py index 43c9b3f7fc55..558c8aab67c5 100644 --- a/packages/google-cloud-service-directory/google/cloud/servicedirectory_v1/gapic_version.py +++ b/packages/google-cloud-service-directory/google/cloud/servicedirectory_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.11.6" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-service-directory/google/cloud/servicedirectory_v1/services/lookup_service/async_client.py b/packages/google-cloud-service-directory/google/cloud/servicedirectory_v1/services/lookup_service/async_client.py index 8fc0e81df82c..293f339ce78a 100644 --- a/packages/google-cloud-service-directory/google/cloud/servicedirectory_v1/services/lookup_service/async_client.py +++ b/packages/google-cloud-service-directory/google/cloud/servicedirectory_v1/services/lookup_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -192,9 +191,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(LookupServiceClient).get_transport_class, type(LookupServiceClient) - ) + get_transport_class = LookupServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-service-directory/google/cloud/servicedirectory_v1/services/lookup_service/client.py b/packages/google-cloud-service-directory/google/cloud/servicedirectory_v1/services/lookup_service/client.py index ca4f38ec7be5..ca6b5946605a 100644 --- a/packages/google-cloud-service-directory/google/cloud/servicedirectory_v1/services/lookup_service/client.py +++ b/packages/google-cloud-service-directory/google/cloud/servicedirectory_v1/services/lookup_service/client.py @@ -707,7 +707,7 @@ def __init__( transport_init: Union[ Type[LookupServiceTransport], Callable[..., LookupServiceTransport] ] = ( - type(self).get_transport_class(transport) + LookupServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., LookupServiceTransport], transport) ) diff --git a/packages/google-cloud-service-directory/google/cloud/servicedirectory_v1/services/registration_service/async_client.py b/packages/google-cloud-service-directory/google/cloud/servicedirectory_v1/services/registration_service/async_client.py index 8cfaf7d1da4e..8987356fda63 100644 --- a/packages/google-cloud-service-directory/google/cloud/servicedirectory_v1/services/registration_service/async_client.py +++ b/packages/google-cloud-service-directory/google/cloud/servicedirectory_v1/services/registration_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -219,10 +218,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(RegistrationServiceClient).get_transport_class, - type(RegistrationServiceClient), - ) + get_transport_class = RegistrationServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-service-directory/google/cloud/servicedirectory_v1/services/registration_service/client.py b/packages/google-cloud-service-directory/google/cloud/servicedirectory_v1/services/registration_service/client.py index 4467b387d069..d7aeb08d0116 100644 --- a/packages/google-cloud-service-directory/google/cloud/servicedirectory_v1/services/registration_service/client.py +++ b/packages/google-cloud-service-directory/google/cloud/servicedirectory_v1/services/registration_service/client.py @@ -761,7 +761,7 @@ def __init__( Type[RegistrationServiceTransport], Callable[..., RegistrationServiceTransport], ] = ( - type(self).get_transport_class(transport) + RegistrationServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RegistrationServiceTransport], transport) ) diff --git a/packages/google-cloud-service-directory/google/cloud/servicedirectory_v1beta1/gapic_version.py b/packages/google-cloud-service-directory/google/cloud/servicedirectory_v1beta1/gapic_version.py index 43c9b3f7fc55..558c8aab67c5 100644 --- a/packages/google-cloud-service-directory/google/cloud/servicedirectory_v1beta1/gapic_version.py +++ b/packages/google-cloud-service-directory/google/cloud/servicedirectory_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.11.6" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-service-directory/google/cloud/servicedirectory_v1beta1/services/lookup_service/async_client.py b/packages/google-cloud-service-directory/google/cloud/servicedirectory_v1beta1/services/lookup_service/async_client.py index 437732b9d3b0..1e8154d34435 100644 --- a/packages/google-cloud-service-directory/google/cloud/servicedirectory_v1beta1/services/lookup_service/async_client.py +++ b/packages/google-cloud-service-directory/google/cloud/servicedirectory_v1beta1/services/lookup_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -192,9 +191,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(LookupServiceClient).get_transport_class, type(LookupServiceClient) - ) + get_transport_class = LookupServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-service-directory/google/cloud/servicedirectory_v1beta1/services/lookup_service/client.py b/packages/google-cloud-service-directory/google/cloud/servicedirectory_v1beta1/services/lookup_service/client.py index 375c81f33cca..330e1ba27005 100644 --- a/packages/google-cloud-service-directory/google/cloud/servicedirectory_v1beta1/services/lookup_service/client.py +++ b/packages/google-cloud-service-directory/google/cloud/servicedirectory_v1beta1/services/lookup_service/client.py @@ -707,7 +707,7 @@ def __init__( transport_init: Union[ Type[LookupServiceTransport], Callable[..., LookupServiceTransport] ] = ( - type(self).get_transport_class(transport) + LookupServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., LookupServiceTransport], transport) ) diff --git a/packages/google-cloud-service-directory/google/cloud/servicedirectory_v1beta1/services/registration_service/async_client.py b/packages/google-cloud-service-directory/google/cloud/servicedirectory_v1beta1/services/registration_service/async_client.py index e4b97c18b466..2562bc7e6618 100644 --- a/packages/google-cloud-service-directory/google/cloud/servicedirectory_v1beta1/services/registration_service/async_client.py +++ b/packages/google-cloud-service-directory/google/cloud/servicedirectory_v1beta1/services/registration_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -221,10 +220,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(RegistrationServiceClient).get_transport_class, - type(RegistrationServiceClient), - ) + get_transport_class = RegistrationServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-service-directory/google/cloud/servicedirectory_v1beta1/services/registration_service/client.py b/packages/google-cloud-service-directory/google/cloud/servicedirectory_v1beta1/services/registration_service/client.py index 48c64a6df2b6..6d4a57f86d79 100644 --- a/packages/google-cloud-service-directory/google/cloud/servicedirectory_v1beta1/services/registration_service/client.py +++ b/packages/google-cloud-service-directory/google/cloud/servicedirectory_v1beta1/services/registration_service/client.py @@ -763,7 +763,7 @@ def __init__( Type[RegistrationServiceTransport], Callable[..., RegistrationServiceTransport], ] = ( - type(self).get_transport_class(transport) + RegistrationServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RegistrationServiceTransport], transport) ) diff --git a/packages/google-cloud-service-directory/samples/generated_samples/snippet_metadata_google.cloud.servicedirectory.v1.json b/packages/google-cloud-service-directory/samples/generated_samples/snippet_metadata_google.cloud.servicedirectory.v1.json index 6dd22304b45a..a0b20f7bef18 100644 --- a/packages/google-cloud-service-directory/samples/generated_samples/snippet_metadata_google.cloud.servicedirectory.v1.json +++ b/packages/google-cloud-service-directory/samples/generated_samples/snippet_metadata_google.cloud.servicedirectory.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-service-directory", - "version": "1.11.6" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-service-directory/samples/generated_samples/snippet_metadata_google.cloud.servicedirectory.v1beta1.json b/packages/google-cloud-service-directory/samples/generated_samples/snippet_metadata_google.cloud.servicedirectory.v1beta1.json index 8edfd07ebe4b..8cb020ecf895 100644 --- a/packages/google-cloud-service-directory/samples/generated_samples/snippet_metadata_google.cloud.servicedirectory.v1beta1.json +++ b/packages/google-cloud-service-directory/samples/generated_samples/snippet_metadata_google.cloud.servicedirectory.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-service-directory", - "version": "1.11.6" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-service-directory/tests/unit/gapic/servicedirectory_v1/test_lookup_service.py b/packages/google-cloud-service-directory/tests/unit/gapic/servicedirectory_v1/test_lookup_service.py index 34b8389d40ff..43f48b611c3f 100644 --- a/packages/google-cloud-service-directory/tests/unit/gapic/servicedirectory_v1/test_lookup_service.py +++ b/packages/google-cloud-service-directory/tests/unit/gapic/servicedirectory_v1/test_lookup_service.py @@ -1274,22 +1274,23 @@ async def test_resolve_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.resolve_service - ] = mock_object + ] = mock_rpc request = {} await client.resolve_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.resolve_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-service-directory/tests/unit/gapic/servicedirectory_v1/test_registration_service.py b/packages/google-cloud-service-directory/tests/unit/gapic/servicedirectory_v1/test_registration_service.py index c422ea4b351b..1ca9cbbf2523 100644 --- a/packages/google-cloud-service-directory/tests/unit/gapic/servicedirectory_v1/test_registration_service.py +++ b/packages/google-cloud-service-directory/tests/unit/gapic/servicedirectory_v1/test_registration_service.py @@ -1368,22 +1368,23 @@ async def test_create_namespace_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_namespace - ] = mock_object + ] = mock_rpc request = {} await client.create_namespace(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_namespace(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1761,22 +1762,23 @@ async def test_list_namespaces_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_namespaces - ] = mock_object + ] = mock_rpc request = {} await client.list_namespaces(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_namespaces(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2323,22 +2325,23 @@ async def test_get_namespace_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_namespace - ] = mock_object + ] = mock_rpc request = {} await client.get_namespace(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_namespace(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2687,22 +2690,23 @@ async def test_update_namespace_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_namespace - ] = mock_object + ] = mock_rpc request = {} await client.update_namespace(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_namespace(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3059,22 +3063,23 @@ async def test_delete_namespace_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_namespace - ] = mock_object + ] = mock_rpc request = {} await client.delete_namespace(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_namespace(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3420,22 +3425,23 @@ async def test_create_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_service - ] = mock_object + ] = mock_rpc request = {} await client.create_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3809,22 +3815,23 @@ async def test_list_services_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_services - ] = mock_object + ] = mock_rpc request = {} await client.list_services(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_services(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4371,22 +4378,23 @@ async def test_get_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_service - ] = mock_object + ] = mock_rpc request = {} await client.get_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4732,22 +4740,23 @@ async def test_update_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_service - ] = mock_object + ] = mock_rpc request = {} await client.update_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5098,22 +5107,23 @@ async def test_delete_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_service - ] = mock_object + ] = mock_rpc request = {} await client.delete_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5468,22 +5478,23 @@ async def test_create_endpoint_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_endpoint - ] = mock_object + ] = mock_rpc request = {} await client.create_endpoint(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_endpoint(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5867,22 +5878,23 @@ async def test_list_endpoints_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_endpoints - ] = mock_object + ] = mock_rpc request = {} await client.list_endpoints(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_endpoints(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6438,22 +6450,23 @@ async def test_get_endpoint_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_endpoint - ] = mock_object + ] = mock_rpc request = {} await client.get_endpoint(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_endpoint(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6815,22 +6828,23 @@ async def test_update_endpoint_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_endpoint - ] = mock_object + ] = mock_rpc request = {} await client.update_endpoint(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_endpoint(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7191,22 +7205,23 @@ async def test_delete_endpoint_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_endpoint - ] = mock_object + ] = mock_rpc request = {} await client.delete_endpoint(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_endpoint(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7550,22 +7565,23 @@ async def test_get_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7852,22 +7868,23 @@ async def test_set_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8164,22 +8181,23 @@ async def test_test_iam_permissions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.test_iam_permissions - ] = mock_object + ] = mock_rpc request = {} await client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.test_iam_permissions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-service-directory/tests/unit/gapic/servicedirectory_v1beta1/test_lookup_service.py b/packages/google-cloud-service-directory/tests/unit/gapic/servicedirectory_v1beta1/test_lookup_service.py index 7070f2d1f02b..073b0b00632f 100644 --- a/packages/google-cloud-service-directory/tests/unit/gapic/servicedirectory_v1beta1/test_lookup_service.py +++ b/packages/google-cloud-service-directory/tests/unit/gapic/servicedirectory_v1beta1/test_lookup_service.py @@ -1274,22 +1274,23 @@ async def test_resolve_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.resolve_service - ] = mock_object + ] = mock_rpc request = {} await client.resolve_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.resolve_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-service-directory/tests/unit/gapic/servicedirectory_v1beta1/test_registration_service.py b/packages/google-cloud-service-directory/tests/unit/gapic/servicedirectory_v1beta1/test_registration_service.py index be0b0df76b05..d452d3735365 100644 --- a/packages/google-cloud-service-directory/tests/unit/gapic/servicedirectory_v1beta1/test_registration_service.py +++ b/packages/google-cloud-service-directory/tests/unit/gapic/servicedirectory_v1beta1/test_registration_service.py @@ -1369,22 +1369,23 @@ async def test_create_namespace_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_namespace - ] = mock_object + ] = mock_rpc request = {} await client.create_namespace(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_namespace(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1762,22 +1763,23 @@ async def test_list_namespaces_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_namespaces - ] = mock_object + ] = mock_rpc request = {} await client.list_namespaces(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_namespaces(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2324,22 +2326,23 @@ async def test_get_namespace_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_namespace - ] = mock_object + ] = mock_rpc request = {} await client.get_namespace(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_namespace(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2688,22 +2691,23 @@ async def test_update_namespace_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_namespace - ] = mock_object + ] = mock_rpc request = {} await client.update_namespace(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_namespace(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3060,22 +3064,23 @@ async def test_delete_namespace_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_namespace - ] = mock_object + ] = mock_rpc request = {} await client.delete_namespace(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_namespace(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3421,22 +3426,23 @@ async def test_create_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_service - ] = mock_object + ] = mock_rpc request = {} await client.create_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3810,22 +3816,23 @@ async def test_list_services_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_services - ] = mock_object + ] = mock_rpc request = {} await client.list_services(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_services(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4372,22 +4379,23 @@ async def test_get_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_service - ] = mock_object + ] = mock_rpc request = {} await client.get_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4733,22 +4741,23 @@ async def test_update_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_service - ] = mock_object + ] = mock_rpc request = {} await client.update_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5099,22 +5108,23 @@ async def test_delete_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_service - ] = mock_object + ] = mock_rpc request = {} await client.delete_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5469,22 +5479,23 @@ async def test_create_endpoint_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_endpoint - ] = mock_object + ] = mock_rpc request = {} await client.create_endpoint(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_endpoint(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5868,22 +5879,23 @@ async def test_list_endpoints_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_endpoints - ] = mock_object + ] = mock_rpc request = {} await client.list_endpoints(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_endpoints(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6439,22 +6451,23 @@ async def test_get_endpoint_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_endpoint - ] = mock_object + ] = mock_rpc request = {} await client.get_endpoint(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_endpoint(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6816,22 +6829,23 @@ async def test_update_endpoint_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_endpoint - ] = mock_object + ] = mock_rpc request = {} await client.update_endpoint(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_endpoint(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7192,22 +7206,23 @@ async def test_delete_endpoint_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_endpoint - ] = mock_object + ] = mock_rpc request = {} await client.delete_endpoint(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_endpoint(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7551,22 +7566,23 @@ async def test_get_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7853,22 +7869,23 @@ async def test_set_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8165,22 +8182,23 @@ async def test_test_iam_permissions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.test_iam_permissions - ] = mock_object + ] = mock_rpc request = {} await client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.test_iam_permissions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-service-management/google/cloud/servicemanagement/gapic_version.py b/packages/google-cloud-service-management/google/cloud/servicemanagement/gapic_version.py index ab4d576b9121..558c8aab67c5 100644 --- a/packages/google-cloud-service-management/google/cloud/servicemanagement/gapic_version.py +++ b/packages/google-cloud-service-management/google/cloud/servicemanagement/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.8.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-service-management/google/cloud/servicemanagement_v1/gapic_version.py b/packages/google-cloud-service-management/google/cloud/servicemanagement_v1/gapic_version.py index ab4d576b9121..558c8aab67c5 100644 --- a/packages/google-cloud-service-management/google/cloud/servicemanagement_v1/gapic_version.py +++ b/packages/google-cloud-service-management/google/cloud/servicemanagement_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.8.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-service-management/google/cloud/servicemanagement_v1/services/service_manager/async_client.py b/packages/google-cloud-service-management/google/cloud/servicemanagement_v1/services/service_manager/async_client.py index 114749620a07..22cb0607669c 100644 --- a/packages/google-cloud-service-management/google/cloud/servicemanagement_v1/services/service_manager/async_client.py +++ b/packages/google-cloud-service-management/google/cloud/servicemanagement_v1/services/service_manager/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -218,9 +217,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ServiceManagerClient).get_transport_class, type(ServiceManagerClient) - ) + get_transport_class = ServiceManagerClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-service-management/google/cloud/servicemanagement_v1/services/service_manager/client.py b/packages/google-cloud-service-management/google/cloud/servicemanagement_v1/services/service_manager/client.py index 803f72a413c9..69042f000e65 100644 --- a/packages/google-cloud-service-management/google/cloud/servicemanagement_v1/services/service_manager/client.py +++ b/packages/google-cloud-service-management/google/cloud/servicemanagement_v1/services/service_manager/client.py @@ -671,7 +671,7 @@ def __init__( transport_init: Union[ Type[ServiceManagerTransport], Callable[..., ServiceManagerTransport] ] = ( - type(self).get_transport_class(transport) + ServiceManagerClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ServiceManagerTransport], transport) ) diff --git a/packages/google-cloud-service-management/samples/generated_samples/snippet_metadata_google.api.servicemanagement.v1.json b/packages/google-cloud-service-management/samples/generated_samples/snippet_metadata_google.api.servicemanagement.v1.json index 55c9c360f966..8b514f320127 100644 --- a/packages/google-cloud-service-management/samples/generated_samples/snippet_metadata_google.api.servicemanagement.v1.json +++ b/packages/google-cloud-service-management/samples/generated_samples/snippet_metadata_google.api.servicemanagement.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-service-management", - "version": "1.8.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-service-management/tests/unit/gapic/servicemanagement_v1/test_service_manager.py b/packages/google-cloud-service-management/tests/unit/gapic/servicemanagement_v1/test_service_manager.py index 6870c95c54ea..12b5f383294f 100644 --- a/packages/google-cloud-service-management/tests/unit/gapic/servicemanagement_v1/test_service_manager.py +++ b/packages/google-cloud-service-management/tests/unit/gapic/servicemanagement_v1/test_service_manager.py @@ -1333,22 +1333,23 @@ async def test_list_services_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_services - ] = mock_object + ] = mock_rpc request = {} await client.list_services(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_services(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1840,22 +1841,23 @@ async def test_get_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_service - ] = mock_object + ] = mock_rpc request = {} await client.get_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2146,8 +2148,9 @@ def test_create_service_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_service(request) @@ -2201,26 +2204,28 @@ async def test_create_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_service - ] = mock_object + ] = mock_rpc request = {} await client.create_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2449,8 +2454,9 @@ def test_delete_service_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_service(request) @@ -2504,26 +2510,28 @@ async def test_delete_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_service - ] = mock_object + ] = mock_rpc request = {} await client.delete_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2815,8 +2823,9 @@ def test_undelete_service_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.undelete_service(request) @@ -2870,26 +2879,28 @@ async def test_undelete_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.undelete_service - ] = mock_object + ] = mock_rpc request = {} await client.undelete_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.undelete_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3249,22 +3260,23 @@ async def test_list_service_configs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_service_configs - ] = mock_object + ] = mock_rpc request = {} await client.list_service_configs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_service_configs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3849,22 +3861,23 @@ async def test_get_service_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_service_config - ] = mock_object + ] = mock_rpc request = {} await client.get_service_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_service_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4269,22 +4282,23 @@ async def test_create_service_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_service_config - ] = mock_object + ] = mock_rpc request = {} await client.create_service_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_service_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4610,8 +4624,9 @@ def test_submit_config_source_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.submit_config_source(request) @@ -4667,26 +4682,28 @@ async def test_submit_config_source_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.submit_config_source - ] = mock_object + ] = mock_rpc request = {} await client.submit_config_source(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.submit_config_source(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5080,22 +5097,23 @@ async def test_list_service_rollouts_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_service_rollouts - ] = mock_object + ] = mock_rpc request = {} await client.list_service_rollouts(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_service_rollouts(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5690,22 +5708,23 @@ async def test_get_service_rollout_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_service_rollout - ] = mock_object + ] = mock_rpc request = {} await client.get_service_rollout(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_service_rollout(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6034,8 +6053,9 @@ def test_create_service_rollout_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_service_rollout(request) @@ -6091,26 +6111,28 @@ async def test_create_service_rollout_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_service_rollout - ] = mock_object + ] = mock_rpc request = {} await client.create_service_rollout(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_service_rollout(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6489,22 +6511,23 @@ async def test_generate_config_report_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.generate_config_report - ] = mock_object + ] = mock_rpc request = {} await client.generate_config_report(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.generate_config_report(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-service-usage/google/cloud/service_usage/gapic_version.py b/packages/google-cloud-service-usage/google/cloud/service_usage/gapic_version.py index 3ba9a6de4897..558c8aab67c5 100644 --- a/packages/google-cloud-service-usage/google/cloud/service_usage/gapic_version.py +++ b/packages/google-cloud-service-usage/google/cloud/service_usage/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.10.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-service-usage/google/cloud/service_usage_v1/gapic_version.py b/packages/google-cloud-service-usage/google/cloud/service_usage_v1/gapic_version.py index 3ba9a6de4897..558c8aab67c5 100644 --- a/packages/google-cloud-service-usage/google/cloud/service_usage_v1/gapic_version.py +++ b/packages/google-cloud-service-usage/google/cloud/service_usage_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.10.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-service-usage/google/cloud/service_usage_v1/services/service_usage/async_client.py b/packages/google-cloud-service-usage/google/cloud/service_usage_v1/services/service_usage/async_client.py index 55fefd19258b..94fc5f807598 100644 --- a/packages/google-cloud-service-usage/google/cloud/service_usage_v1/services/service_usage/async_client.py +++ b/packages/google-cloud-service-usage/google/cloud/service_usage_v1/services/service_usage/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -193,9 +192,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ServiceUsageClient).get_transport_class, type(ServiceUsageClient) - ) + get_transport_class = ServiceUsageClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-service-usage/google/cloud/service_usage_v1/services/service_usage/client.py b/packages/google-cloud-service-usage/google/cloud/service_usage_v1/services/service_usage/client.py index 9a2dcdbed26f..c40790bffda8 100644 --- a/packages/google-cloud-service-usage/google/cloud/service_usage_v1/services/service_usage/client.py +++ b/packages/google-cloud-service-usage/google/cloud/service_usage_v1/services/service_usage/client.py @@ -660,7 +660,7 @@ def __init__( transport_init: Union[ Type[ServiceUsageTransport], Callable[..., ServiceUsageTransport] ] = ( - type(self).get_transport_class(transport) + ServiceUsageClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ServiceUsageTransport], transport) ) diff --git a/packages/google-cloud-service-usage/samples/generated_samples/snippet_metadata_google.api.serviceusage.v1.json b/packages/google-cloud-service-usage/samples/generated_samples/snippet_metadata_google.api.serviceusage.v1.json index 3f449d7bdccc..4ff459eb433d 100644 --- a/packages/google-cloud-service-usage/samples/generated_samples/snippet_metadata_google.api.serviceusage.v1.json +++ b/packages/google-cloud-service-usage/samples/generated_samples/snippet_metadata_google.api.serviceusage.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-service-usage", - "version": "1.10.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-service-usage/tests/unit/gapic/service_usage_v1/test_service_usage.py b/packages/google-cloud-service-usage/tests/unit/gapic/service_usage_v1/test_service_usage.py index 42df79f733d1..c205b68a64e4 100644 --- a/packages/google-cloud-service-usage/tests/unit/gapic/service_usage_v1/test_service_usage.py +++ b/packages/google-cloud-service-usage/tests/unit/gapic/service_usage_v1/test_service_usage.py @@ -1216,8 +1216,9 @@ def test_enable_service_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.enable_service(request) @@ -1271,26 +1272,28 @@ async def test_enable_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.enable_service - ] = mock_object + ] = mock_rpc request = {} await client.enable_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.enable_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1498,8 +1501,9 @@ def test_disable_service_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.disable_service(request) @@ -1553,26 +1557,28 @@ async def test_disable_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.disable_service - ] = mock_object + ] = mock_rpc request = {} await client.disable_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.disable_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1842,22 +1848,23 @@ async def test_get_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_service - ] = mock_object + ] = mock_rpc request = {} await client.get_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2130,22 +2137,23 @@ async def test_list_services_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_services - ] = mock_object + ] = mock_rpc request = {} await client.list_services(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_services(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2561,8 +2569,9 @@ def test_batch_enable_services_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.batch_enable_services(request) @@ -2618,26 +2627,28 @@ async def test_batch_enable_services_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_enable_services - ] = mock_object + ] = mock_rpc request = {} await client.batch_enable_services(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.batch_enable_services(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2915,22 +2926,23 @@ async def test_batch_get_services_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_get_services - ] = mock_object + ] = mock_rpc request = {} await client.batch_get_services(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.batch_get_services(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-servicehealth/google/cloud/servicehealth/gapic_version.py b/packages/google-cloud-servicehealth/google/cloud/servicehealth/gapic_version.py index 51d2795b9d6b..558c8aab67c5 100644 --- a/packages/google-cloud-servicehealth/google/cloud/servicehealth/gapic_version.py +++ b/packages/google-cloud-servicehealth/google/cloud/servicehealth/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.6" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-servicehealth/google/cloud/servicehealth_v1/gapic_version.py b/packages/google-cloud-servicehealth/google/cloud/servicehealth_v1/gapic_version.py index 51d2795b9d6b..558c8aab67c5 100644 --- a/packages/google-cloud-servicehealth/google/cloud/servicehealth_v1/gapic_version.py +++ b/packages/google-cloud-servicehealth/google/cloud/servicehealth_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.6" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-servicehealth/google/cloud/servicehealth_v1/services/service_health/async_client.py b/packages/google-cloud-servicehealth/google/cloud/servicehealth_v1/services/service_health/async_client.py index aa3fa2e4d9df..3c1252e7433c 100644 --- a/packages/google-cloud-servicehealth/google/cloud/servicehealth_v1/services/service_health/async_client.py +++ b/packages/google-cloud-servicehealth/google/cloud/servicehealth_v1/services/service_health/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -202,9 +201,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ServiceHealthClient).get_transport_class, type(ServiceHealthClient) - ) + get_transport_class = ServiceHealthClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-servicehealth/google/cloud/servicehealth_v1/services/service_health/client.py b/packages/google-cloud-servicehealth/google/cloud/servicehealth_v1/services/service_health/client.py index 23baa4ebf749..557ca9b981ee 100644 --- a/packages/google-cloud-servicehealth/google/cloud/servicehealth_v1/services/service_health/client.py +++ b/packages/google-cloud-servicehealth/google/cloud/servicehealth_v1/services/service_health/client.py @@ -707,7 +707,7 @@ def __init__( transport_init: Union[ Type[ServiceHealthTransport], Callable[..., ServiceHealthTransport] ] = ( - type(self).get_transport_class(transport) + ServiceHealthClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ServiceHealthTransport], transport) ) diff --git a/packages/google-cloud-servicehealth/samples/generated_samples/snippet_metadata_google.cloud.servicehealth.v1.json b/packages/google-cloud-servicehealth/samples/generated_samples/snippet_metadata_google.cloud.servicehealth.v1.json index 0fa1d3d8d8f4..22600c1e9d56 100644 --- a/packages/google-cloud-servicehealth/samples/generated_samples/snippet_metadata_google.cloud.servicehealth.v1.json +++ b/packages/google-cloud-servicehealth/samples/generated_samples/snippet_metadata_google.cloud.servicehealth.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-servicehealth", - "version": "0.1.6" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-servicehealth/tests/unit/gapic/servicehealth_v1/test_service_health.py b/packages/google-cloud-servicehealth/tests/unit/gapic/servicehealth_v1/test_service_health.py index 23d69759f323..48e1de387887 100644 --- a/packages/google-cloud-servicehealth/tests/unit/gapic/servicehealth_v1/test_service_health.py +++ b/packages/google-cloud-servicehealth/tests/unit/gapic/servicehealth_v1/test_service_health.py @@ -1286,22 +1286,23 @@ async def test_list_events_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_events - ] = mock_object + ] = mock_rpc request = {} await client.list_events(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_events(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1871,22 +1872,23 @@ async def test_get_event_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_event - ] = mock_object + ] = mock_rpc request = {} await client.get_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2274,22 +2276,23 @@ async def test_list_organization_events_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_organization_events - ] = mock_object + ] = mock_rpc request = {} await client.list_organization_events(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_organization_events(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2895,22 +2898,23 @@ async def test_get_organization_event_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_organization_event - ] = mock_object + ] = mock_rpc request = {} await client.get_organization_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_organization_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3310,22 +3314,23 @@ async def test_list_organization_impacts_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_organization_impacts - ] = mock_object + ] = mock_rpc request = {} await client.list_organization_impacts(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_organization_impacts(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3907,22 +3912,23 @@ async def test_get_organization_impact_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_organization_impact - ] = mock_object + ] = mock_rpc request = {} await client.get_organization_impact(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_organization_impact(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-shell/google/cloud/shell/gapic_version.py b/packages/google-cloud-shell/google/cloud/shell/gapic_version.py index 64ddc0e431e3..558c8aab67c5 100644 --- a/packages/google-cloud-shell/google/cloud/shell/gapic_version.py +++ b/packages/google-cloud-shell/google/cloud/shell/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.9.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-shell/google/cloud/shell_v1/gapic_version.py b/packages/google-cloud-shell/google/cloud/shell_v1/gapic_version.py index 64ddc0e431e3..558c8aab67c5 100644 --- a/packages/google-cloud-shell/google/cloud/shell_v1/gapic_version.py +++ b/packages/google-cloud-shell/google/cloud/shell_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.9.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-shell/google/cloud/shell_v1/services/cloud_shell_service/async_client.py b/packages/google-cloud-shell/google/cloud/shell_v1/services/cloud_shell_service/async_client.py index ca3e64780036..be5655ec1677 100644 --- a/packages/google-cloud-shell/google/cloud/shell_v1/services/cloud_shell_service/async_client.py +++ b/packages/google-cloud-shell/google/cloud/shell_v1/services/cloud_shell_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -200,9 +199,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(CloudShellServiceClient).get_transport_class, type(CloudShellServiceClient) - ) + get_transport_class = CloudShellServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-shell/google/cloud/shell_v1/services/cloud_shell_service/client.py b/packages/google-cloud-shell/google/cloud/shell_v1/services/cloud_shell_service/client.py index 8a503929335f..ab1057fa6514 100644 --- a/packages/google-cloud-shell/google/cloud/shell_v1/services/cloud_shell_service/client.py +++ b/packages/google-cloud-shell/google/cloud/shell_v1/services/cloud_shell_service/client.py @@ -671,7 +671,7 @@ def __init__( Type[CloudShellServiceTransport], Callable[..., CloudShellServiceTransport], ] = ( - type(self).get_transport_class(transport) + CloudShellServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., CloudShellServiceTransport], transport) ) diff --git a/packages/google-cloud-shell/samples/generated_samples/snippet_metadata_google.cloud.shell.v1.json b/packages/google-cloud-shell/samples/generated_samples/snippet_metadata_google.cloud.shell.v1.json index f55c29fcf97c..3389a15fe5da 100644 --- a/packages/google-cloud-shell/samples/generated_samples/snippet_metadata_google.cloud.shell.v1.json +++ b/packages/google-cloud-shell/samples/generated_samples/snippet_metadata_google.cloud.shell.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-shell", - "version": "1.9.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-shell/tests/unit/gapic/shell_v1/test_cloud_shell_service.py b/packages/google-cloud-shell/tests/unit/gapic/shell_v1/test_cloud_shell_service.py index 47c7ca2dac08..ac7dd1c005aa 100644 --- a/packages/google-cloud-shell/tests/unit/gapic/shell_v1/test_cloud_shell_service.py +++ b/packages/google-cloud-shell/tests/unit/gapic/shell_v1/test_cloud_shell_service.py @@ -1353,22 +1353,23 @@ async def test_get_environment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_environment - ] = mock_object + ] = mock_rpc request = {} await client.get_environment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_environment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1687,8 +1688,9 @@ def test_start_environment_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.start_environment(request) @@ -1744,26 +1746,28 @@ async def test_start_environment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.start_environment - ] = mock_object + ] = mock_rpc request = {} await client.start_environment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.start_environment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1992,8 +1996,9 @@ def test_authorize_environment_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.authorize_environment(request) @@ -2049,26 +2054,28 @@ async def test_authorize_environment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.authorize_environment - ] = mock_object + ] = mock_rpc request = {} await client.authorize_environment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.authorize_environment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2284,8 +2291,9 @@ def test_add_public_key_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.add_public_key(request) @@ -2339,26 +2347,28 @@ async def test_add_public_key_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.add_public_key - ] = mock_object + ] = mock_rpc request = {} await client.add_public_key(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.add_public_key(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2576,8 +2586,9 @@ def test_remove_public_key_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.remove_public_key(request) @@ -2633,26 +2644,28 @@ async def test_remove_public_key_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.remove_public_key - ] = mock_object + ] = mock_rpc request = {} await client.remove_public_key(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.remove_public_key(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-source-context/google/cloud/source_context/gapic_version.py b/packages/google-cloud-source-context/google/cloud/source_context/gapic_version.py index ede38a02b828..558c8aab67c5 100644 --- a/packages/google-cloud-source-context/google/cloud/source_context/gapic_version.py +++ b/packages/google-cloud-source-context/google/cloud/source_context/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.5.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-source-context/google/cloud/source_context_v1/gapic_version.py b/packages/google-cloud-source-context/google/cloud/source_context_v1/gapic_version.py index ede38a02b828..558c8aab67c5 100644 --- a/packages/google-cloud-source-context/google/cloud/source_context_v1/gapic_version.py +++ b/packages/google-cloud-source-context/google/cloud/source_context_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.5.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-speech/google/cloud/speech/gapic_version.py b/packages/google-cloud-speech/google/cloud/speech/gapic_version.py index fe3f3ebe2e1f..558c8aab67c5 100644 --- a/packages/google-cloud-speech/google/cloud/speech/gapic_version.py +++ b/packages/google-cloud-speech/google/cloud/speech/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.27.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-speech/google/cloud/speech_v1/gapic_version.py b/packages/google-cloud-speech/google/cloud/speech_v1/gapic_version.py index fe3f3ebe2e1f..558c8aab67c5 100644 --- a/packages/google-cloud-speech/google/cloud/speech_v1/gapic_version.py +++ b/packages/google-cloud-speech/google/cloud/speech_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.27.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-speech/google/cloud/speech_v1/services/adaptation/async_client.py b/packages/google-cloud-speech/google/cloud/speech_v1/services/adaptation/async_client.py index 6cd5886e3d13..7abd73958369 100644 --- a/packages/google-cloud-speech/google/cloud/speech_v1/services/adaptation/async_client.py +++ b/packages/google-cloud-speech/google/cloud/speech_v1/services/adaptation/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -186,9 +185,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AdaptationClient).get_transport_class, type(AdaptationClient) - ) + get_transport_class = AdaptationClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-speech/google/cloud/speech_v1/services/adaptation/client.py b/packages/google-cloud-speech/google/cloud/speech_v1/services/adaptation/client.py index 18e477186b7f..ef0560f0e2c6 100644 --- a/packages/google-cloud-speech/google/cloud/speech_v1/services/adaptation/client.py +++ b/packages/google-cloud-speech/google/cloud/speech_v1/services/adaptation/client.py @@ -680,7 +680,7 @@ def __init__( transport_init: Union[ Type[AdaptationTransport], Callable[..., AdaptationTransport] ] = ( - type(self).get_transport_class(transport) + AdaptationClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AdaptationTransport], transport) ) diff --git a/packages/google-cloud-speech/google/cloud/speech_v1/services/speech/async_client.py b/packages/google-cloud-speech/google/cloud/speech_v1/services/speech/async_client.py index 240b84969d8e..9e8f1d32fa04 100644 --- a/packages/google-cloud-speech/google/cloud/speech_v1/services/speech/async_client.py +++ b/packages/google-cloud-speech/google/cloud/speech_v1/services/speech/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( AsyncIterable, @@ -187,9 +186,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(SpeechClient).get_transport_class, type(SpeechClient) - ) + get_transport_class = SpeechClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-speech/google/cloud/speech_v1/services/speech/client.py b/packages/google-cloud-speech/google/cloud/speech_v1/services/speech/client.py index e122288df1e6..df94e68968c7 100644 --- a/packages/google-cloud-speech/google/cloud/speech_v1/services/speech/client.py +++ b/packages/google-cloud-speech/google/cloud/speech_v1/services/speech/client.py @@ -684,7 +684,7 @@ def __init__( transport_init: Union[ Type[SpeechTransport], Callable[..., SpeechTransport] ] = ( - type(self).get_transport_class(transport) + SpeechClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SpeechTransport], transport) ) diff --git a/packages/google-cloud-speech/google/cloud/speech_v1p1beta1/gapic_version.py b/packages/google-cloud-speech/google/cloud/speech_v1p1beta1/gapic_version.py index fe3f3ebe2e1f..558c8aab67c5 100644 --- a/packages/google-cloud-speech/google/cloud/speech_v1p1beta1/gapic_version.py +++ b/packages/google-cloud-speech/google/cloud/speech_v1p1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.27.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-speech/google/cloud/speech_v1p1beta1/services/adaptation/async_client.py b/packages/google-cloud-speech/google/cloud/speech_v1p1beta1/services/adaptation/async_client.py index 844335c11659..20346d501ddf 100644 --- a/packages/google-cloud-speech/google/cloud/speech_v1p1beta1/services/adaptation/async_client.py +++ b/packages/google-cloud-speech/google/cloud/speech_v1p1beta1/services/adaptation/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -186,9 +185,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AdaptationClient).get_transport_class, type(AdaptationClient) - ) + get_transport_class = AdaptationClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-speech/google/cloud/speech_v1p1beta1/services/adaptation/client.py b/packages/google-cloud-speech/google/cloud/speech_v1p1beta1/services/adaptation/client.py index 0f1041e07509..2b1401613cc7 100644 --- a/packages/google-cloud-speech/google/cloud/speech_v1p1beta1/services/adaptation/client.py +++ b/packages/google-cloud-speech/google/cloud/speech_v1p1beta1/services/adaptation/client.py @@ -680,7 +680,7 @@ def __init__( transport_init: Union[ Type[AdaptationTransport], Callable[..., AdaptationTransport] ] = ( - type(self).get_transport_class(transport) + AdaptationClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AdaptationTransport], transport) ) diff --git a/packages/google-cloud-speech/google/cloud/speech_v1p1beta1/services/speech/async_client.py b/packages/google-cloud-speech/google/cloud/speech_v1p1beta1/services/speech/async_client.py index 6a8bec5fdcf8..241071396c8a 100644 --- a/packages/google-cloud-speech/google/cloud/speech_v1p1beta1/services/speech/async_client.py +++ b/packages/google-cloud-speech/google/cloud/speech_v1p1beta1/services/speech/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( AsyncIterable, @@ -187,9 +186,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(SpeechClient).get_transport_class, type(SpeechClient) - ) + get_transport_class = SpeechClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-speech/google/cloud/speech_v1p1beta1/services/speech/client.py b/packages/google-cloud-speech/google/cloud/speech_v1p1beta1/services/speech/client.py index 15c32ff97cb0..c69d5e732bbc 100644 --- a/packages/google-cloud-speech/google/cloud/speech_v1p1beta1/services/speech/client.py +++ b/packages/google-cloud-speech/google/cloud/speech_v1p1beta1/services/speech/client.py @@ -684,7 +684,7 @@ def __init__( transport_init: Union[ Type[SpeechTransport], Callable[..., SpeechTransport] ] = ( - type(self).get_transport_class(transport) + SpeechClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SpeechTransport], transport) ) diff --git a/packages/google-cloud-speech/google/cloud/speech_v2/gapic_version.py b/packages/google-cloud-speech/google/cloud/speech_v2/gapic_version.py index fe3f3ebe2e1f..558c8aab67c5 100644 --- a/packages/google-cloud-speech/google/cloud/speech_v2/gapic_version.py +++ b/packages/google-cloud-speech/google/cloud/speech_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.27.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-speech/google/cloud/speech_v2/services/speech/async_client.py b/packages/google-cloud-speech/google/cloud/speech_v2/services/speech/async_client.py index b5f3526a0cf8..c017de6aad8a 100644 --- a/packages/google-cloud-speech/google/cloud/speech_v2/services/speech/async_client.py +++ b/packages/google-cloud-speech/google/cloud/speech_v2/services/speech/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( AsyncIterable, @@ -200,9 +199,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(SpeechClient).get_transport_class, type(SpeechClient) - ) + get_transport_class = SpeechClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-speech/google/cloud/speech_v2/services/speech/client.py b/packages/google-cloud-speech/google/cloud/speech_v2/services/speech/client.py index acea709a9ade..99d29cce9198 100644 --- a/packages/google-cloud-speech/google/cloud/speech_v2/services/speech/client.py +++ b/packages/google-cloud-speech/google/cloud/speech_v2/services/speech/client.py @@ -780,7 +780,7 @@ def __init__( transport_init: Union[ Type[SpeechTransport], Callable[..., SpeechTransport] ] = ( - type(self).get_transport_class(transport) + SpeechClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SpeechTransport], transport) ) diff --git a/packages/google-cloud-speech/samples/generated_samples/snippet_metadata_google.cloud.speech.v1.json b/packages/google-cloud-speech/samples/generated_samples/snippet_metadata_google.cloud.speech.v1.json index 5a9168ba272c..c793efe0e3c8 100644 --- a/packages/google-cloud-speech/samples/generated_samples/snippet_metadata_google.cloud.speech.v1.json +++ b/packages/google-cloud-speech/samples/generated_samples/snippet_metadata_google.cloud.speech.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-speech", - "version": "2.27.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-speech/samples/generated_samples/snippet_metadata_google.cloud.speech.v1p1beta1.json b/packages/google-cloud-speech/samples/generated_samples/snippet_metadata_google.cloud.speech.v1p1beta1.json index 70f5c8ca37d0..b51d437c94b0 100644 --- a/packages/google-cloud-speech/samples/generated_samples/snippet_metadata_google.cloud.speech.v1p1beta1.json +++ b/packages/google-cloud-speech/samples/generated_samples/snippet_metadata_google.cloud.speech.v1p1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-speech", - "version": "2.27.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-speech/samples/generated_samples/snippet_metadata_google.cloud.speech.v2.json b/packages/google-cloud-speech/samples/generated_samples/snippet_metadata_google.cloud.speech.v2.json index 05a7586b71f9..73ac2fc19d50 100644 --- a/packages/google-cloud-speech/samples/generated_samples/snippet_metadata_google.cloud.speech.v2.json +++ b/packages/google-cloud-speech/samples/generated_samples/snippet_metadata_google.cloud.speech.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-speech", - "version": "2.27.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-speech/tests/unit/gapic/speech_v1/test_adaptation.py b/packages/google-cloud-speech/tests/unit/gapic/speech_v1/test_adaptation.py index be9b24595c72..0acf49779b05 100644 --- a/packages/google-cloud-speech/tests/unit/gapic/speech_v1/test_adaptation.py +++ b/packages/google-cloud-speech/tests/unit/gapic/speech_v1/test_adaptation.py @@ -1257,22 +1257,23 @@ async def test_create_phrase_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_phrase_set - ] = mock_object + ] = mock_rpc request = {} await client.create_phrase_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_phrase_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1653,22 +1654,23 @@ async def test_get_phrase_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_phrase_set - ] = mock_object + ] = mock_rpc request = {} await client.get_phrase_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_phrase_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2018,22 +2020,23 @@ async def test_list_phrase_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_phrase_set - ] = mock_object + ] = mock_rpc request = {} await client.list_phrase_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_phrase_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2586,22 +2589,23 @@ async def test_update_phrase_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_phrase_set - ] = mock_object + ] = mock_rpc request = {} await client.update_phrase_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_phrase_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2972,22 +2976,23 @@ async def test_delete_phrase_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_phrase_set - ] = mock_object + ] = mock_rpc request = {} await client.delete_phrase_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_phrase_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3355,22 +3360,23 @@ async def test_create_custom_class_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_custom_class - ] = mock_object + ] = mock_rpc request = {} await client.create_custom_class(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_custom_class(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3757,22 +3763,23 @@ async def test_get_custom_class_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_custom_class - ] = mock_object + ] = mock_rpc request = {} await client.get_custom_class(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_custom_class(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4138,22 +4145,23 @@ async def test_list_custom_classes_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_custom_classes - ] = mock_object + ] = mock_rpc request = {} await client.list_custom_classes(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_custom_classes(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4726,22 +4734,23 @@ async def test_update_custom_class_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_custom_class - ] = mock_object + ] = mock_rpc request = {} await client.update_custom_class(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_custom_class(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5118,22 +5127,23 @@ async def test_delete_custom_class_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_custom_class - ] = mock_object + ] = mock_rpc request = {} await client.delete_custom_class(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_custom_class(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-speech/tests/unit/gapic/speech_v1/test_speech.py b/packages/google-cloud-speech/tests/unit/gapic/speech_v1/test_speech.py index d546fe02e069..e51b24dd4820 100644 --- a/packages/google-cloud-speech/tests/unit/gapic/speech_v1/test_speech.py +++ b/packages/google-cloud-speech/tests/unit/gapic/speech_v1/test_speech.py @@ -1219,22 +1219,23 @@ async def test_recognize_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.recognize - ] = mock_object + ] = mock_rpc request = {} await client.recognize(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.recognize(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1495,8 +1496,9 @@ def test_long_running_recognize_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.long_running_recognize(request) @@ -1552,26 +1554,28 @@ async def test_long_running_recognize_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.long_running_recognize - ] = mock_object + ] = mock_rpc request = {} await client.long_running_recognize(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.long_running_recognize(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1819,22 +1823,23 @@ async def test_streaming_recognize_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.streaming_recognize - ] = mock_object + ] = mock_rpc request = [{}] await client.streaming_recognize(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.streaming_recognize(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-speech/tests/unit/gapic/speech_v1p1beta1/test_adaptation.py b/packages/google-cloud-speech/tests/unit/gapic/speech_v1p1beta1/test_adaptation.py index 70b5a6941da8..0c47ae49bccc 100644 --- a/packages/google-cloud-speech/tests/unit/gapic/speech_v1p1beta1/test_adaptation.py +++ b/packages/google-cloud-speech/tests/unit/gapic/speech_v1p1beta1/test_adaptation.py @@ -1257,22 +1257,23 @@ async def test_create_phrase_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_phrase_set - ] = mock_object + ] = mock_rpc request = {} await client.create_phrase_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_phrase_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1653,22 +1654,23 @@ async def test_get_phrase_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_phrase_set - ] = mock_object + ] = mock_rpc request = {} await client.get_phrase_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_phrase_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2018,22 +2020,23 @@ async def test_list_phrase_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_phrase_set - ] = mock_object + ] = mock_rpc request = {} await client.list_phrase_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_phrase_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2586,22 +2589,23 @@ async def test_update_phrase_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_phrase_set - ] = mock_object + ] = mock_rpc request = {} await client.update_phrase_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_phrase_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2972,22 +2976,23 @@ async def test_delete_phrase_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_phrase_set - ] = mock_object + ] = mock_rpc request = {} await client.delete_phrase_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_phrase_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3355,22 +3360,23 @@ async def test_create_custom_class_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_custom_class - ] = mock_object + ] = mock_rpc request = {} await client.create_custom_class(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_custom_class(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3757,22 +3763,23 @@ async def test_get_custom_class_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_custom_class - ] = mock_object + ] = mock_rpc request = {} await client.get_custom_class(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_custom_class(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4138,22 +4145,23 @@ async def test_list_custom_classes_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_custom_classes - ] = mock_object + ] = mock_rpc request = {} await client.list_custom_classes(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_custom_classes(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4726,22 +4734,23 @@ async def test_update_custom_class_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_custom_class - ] = mock_object + ] = mock_rpc request = {} await client.update_custom_class(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_custom_class(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5118,22 +5127,23 @@ async def test_delete_custom_class_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_custom_class - ] = mock_object + ] = mock_rpc request = {} await client.delete_custom_class(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_custom_class(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-speech/tests/unit/gapic/speech_v1p1beta1/test_speech.py b/packages/google-cloud-speech/tests/unit/gapic/speech_v1p1beta1/test_speech.py index 54c9c75be47b..284f37d35d65 100644 --- a/packages/google-cloud-speech/tests/unit/gapic/speech_v1p1beta1/test_speech.py +++ b/packages/google-cloud-speech/tests/unit/gapic/speech_v1p1beta1/test_speech.py @@ -1219,22 +1219,23 @@ async def test_recognize_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.recognize - ] = mock_object + ] = mock_rpc request = {} await client.recognize(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.recognize(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1495,8 +1496,9 @@ def test_long_running_recognize_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.long_running_recognize(request) @@ -1552,26 +1554,28 @@ async def test_long_running_recognize_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.long_running_recognize - ] = mock_object + ] = mock_rpc request = {} await client.long_running_recognize(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.long_running_recognize(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1819,22 +1823,23 @@ async def test_streaming_recognize_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.streaming_recognize - ] = mock_object + ] = mock_rpc request = [{}] await client.streaming_recognize(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.streaming_recognize(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-speech/tests/unit/gapic/speech_v2/test_speech.py b/packages/google-cloud-speech/tests/unit/gapic/speech_v2/test_speech.py index 0ac34ed676be..f4c129e9b8a2 100644 --- a/packages/google-cloud-speech/tests/unit/gapic/speech_v2/test_speech.py +++ b/packages/google-cloud-speech/tests/unit/gapic/speech_v2/test_speech.py @@ -1181,8 +1181,9 @@ def test_create_recognizer_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_recognizer(request) @@ -1238,26 +1239,28 @@ async def test_create_recognizer_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_recognizer - ] = mock_object + ] = mock_rpc request = {} await client.create_recognizer(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_recognizer(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1637,22 +1640,23 @@ async def test_list_recognizers_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_recognizers - ] = mock_object + ] = mock_rpc request = {} await client.list_recognizers(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_recognizers(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2222,22 +2226,23 @@ async def test_get_recognizer_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_recognizer - ] = mock_object + ] = mock_rpc request = {} await client.get_recognizer(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_recognizer(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2552,8 +2557,9 @@ def test_update_recognizer_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_recognizer(request) @@ -2609,26 +2615,28 @@ async def test_update_recognizer_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_recognizer - ] = mock_object + ] = mock_rpc request = {} await client.update_recognizer(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_recognizer(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2948,8 +2956,9 @@ def test_delete_recognizer_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_recognizer(request) @@ -3005,26 +3014,28 @@ async def test_delete_recognizer_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_recognizer - ] = mock_object + ] = mock_rpc request = {} await client.delete_recognizer(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_recognizer(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3336,8 +3347,9 @@ def test_undelete_recognizer_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.undelete_recognizer(request) @@ -3393,26 +3405,28 @@ async def test_undelete_recognizer_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.undelete_recognizer - ] = mock_object + ] = mock_rpc request = {} await client.undelete_recognizer(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.undelete_recognizer(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3763,22 +3777,23 @@ async def test_recognize_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.recognize - ] = mock_object + ] = mock_rpc request = {} await client.recognize(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.recognize(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4088,22 +4103,23 @@ async def test_streaming_recognize_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.streaming_recognize - ] = mock_object + ] = mock_rpc request = [{}] await client.streaming_recognize(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.streaming_recognize(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4254,8 +4270,9 @@ def test_batch_recognize_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.batch_recognize(request) @@ -4309,26 +4326,28 @@ async def test_batch_recognize_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_recognize - ] = mock_object + ] = mock_rpc request = {} await client.batch_recognize(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.batch_recognize(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4705,22 +4724,23 @@ async def test_get_config_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_config - ] = mock_object + ] = mock_rpc request = {} await client.get_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5066,22 +5086,23 @@ async def test_update_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_config - ] = mock_object + ] = mock_rpc request = {} await client.update_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5394,8 +5415,9 @@ def test_create_custom_class_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_custom_class(request) @@ -5451,26 +5473,28 @@ async def test_create_custom_class_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_custom_class - ] = mock_object + ] = mock_rpc request = {} await client.create_custom_class(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_custom_class(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5860,22 +5884,23 @@ async def test_list_custom_classes_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_custom_classes - ] = mock_object + ] = mock_rpc request = {} await client.list_custom_classes(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_custom_classes(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6459,22 +6484,23 @@ async def test_get_custom_class_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_custom_class - ] = mock_object + ] = mock_rpc request = {} await client.get_custom_class(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_custom_class(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6787,8 +6813,9 @@ def test_update_custom_class_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_custom_class(request) @@ -6844,26 +6871,28 @@ async def test_update_custom_class_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_custom_class - ] = mock_object + ] = mock_rpc request = {} await client.update_custom_class(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_custom_class(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7185,8 +7214,9 @@ def test_delete_custom_class_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_custom_class(request) @@ -7242,26 +7272,28 @@ async def test_delete_custom_class_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_custom_class - ] = mock_object + ] = mock_rpc request = {} await client.delete_custom_class(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_custom_class(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7574,8 +7606,9 @@ def test_undelete_custom_class_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.undelete_custom_class(request) @@ -7631,26 +7664,28 @@ async def test_undelete_custom_class_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.undelete_custom_class - ] = mock_object + ] = mock_rpc request = {} await client.undelete_custom_class(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.undelete_custom_class(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7961,8 +7996,9 @@ def test_create_phrase_set_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_phrase_set(request) @@ -8018,26 +8054,28 @@ async def test_create_phrase_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_phrase_set - ] = mock_object + ] = mock_rpc request = {} await client.create_phrase_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_phrase_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8417,22 +8455,23 @@ async def test_list_phrase_sets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_phrase_sets - ] = mock_object + ] = mock_rpc request = {} await client.list_phrase_sets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_phrase_sets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8999,22 +9038,23 @@ async def test_get_phrase_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_phrase_set - ] = mock_object + ] = mock_rpc request = {} await client.get_phrase_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_phrase_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9327,8 +9367,9 @@ def test_update_phrase_set_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_phrase_set(request) @@ -9384,26 +9425,28 @@ async def test_update_phrase_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_phrase_set - ] = mock_object + ] = mock_rpc request = {} await client.update_phrase_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_phrase_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9723,8 +9766,9 @@ def test_delete_phrase_set_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_phrase_set(request) @@ -9780,26 +9824,28 @@ async def test_delete_phrase_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_phrase_set - ] = mock_object + ] = mock_rpc request = {} await client.delete_phrase_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_phrase_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10111,8 +10157,9 @@ def test_undelete_phrase_set_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.undelete_phrase_set(request) @@ -10168,26 +10215,28 @@ async def test_undelete_phrase_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.undelete_phrase_set - ] = mock_object + ] = mock_rpc request = {} await client.undelete_phrase_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.undelete_phrase_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-storage-control/google/cloud/storage_control/gapic_version.py b/packages/google-cloud-storage-control/google/cloud/storage_control/gapic_version.py index 91c7bf21d6f7..558c8aab67c5 100644 --- a/packages/google-cloud-storage-control/google/cloud/storage_control/gapic_version.py +++ b/packages/google-cloud-storage-control/google/cloud/storage_control/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.0.3" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-storage-control/google/cloud/storage_control_v2/gapic_version.py b/packages/google-cloud-storage-control/google/cloud/storage_control_v2/gapic_version.py index 91c7bf21d6f7..558c8aab67c5 100644 --- a/packages/google-cloud-storage-control/google/cloud/storage_control_v2/gapic_version.py +++ b/packages/google-cloud-storage-control/google/cloud/storage_control_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.0.3" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-storage-control/google/cloud/storage_control_v2/services/storage_control/async_client.py b/packages/google-cloud-storage-control/google/cloud/storage_control_v2/services/storage_control/async_client.py index 5dcb50766308..1861e68ff051 100644 --- a/packages/google-cloud-storage-control/google/cloud/storage_control_v2/services/storage_control/async_client.py +++ b/packages/google-cloud-storage-control/google/cloud/storage_control_v2/services/storage_control/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -203,9 +202,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(StorageControlClient).get_transport_class, type(StorageControlClient) - ) + get_transport_class = StorageControlClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-storage-control/google/cloud/storage_control_v2/services/storage_control/client.py b/packages/google-cloud-storage-control/google/cloud/storage_control_v2/services/storage_control/client.py index 70297122546b..0f48c56f6d55 100644 --- a/packages/google-cloud-storage-control/google/cloud/storage_control_v2/services/storage_control/client.py +++ b/packages/google-cloud-storage-control/google/cloud/storage_control_v2/services/storage_control/client.py @@ -707,7 +707,7 @@ def __init__( transport_init: Union[ Type[StorageControlTransport], Callable[..., StorageControlTransport] ] = ( - type(self).get_transport_class(transport) + StorageControlClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., StorageControlTransport], transport) ) diff --git a/packages/google-cloud-storage-control/samples/generated_samples/snippet_metadata_google.storage.control.v2.json b/packages/google-cloud-storage-control/samples/generated_samples/snippet_metadata_google.storage.control.v2.json index 3c621e36c1a2..932c0e2b46a6 100644 --- a/packages/google-cloud-storage-control/samples/generated_samples/snippet_metadata_google.storage.control.v2.json +++ b/packages/google-cloud-storage-control/samples/generated_samples/snippet_metadata_google.storage.control.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-storage-control", - "version": "1.0.3" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-storage-control/tests/unit/gapic/storage_control_v2/test_storage_control.py b/packages/google-cloud-storage-control/tests/unit/gapic/storage_control_v2/test_storage_control.py index 438f433729ea..c86f237a3bf9 100644 --- a/packages/google-cloud-storage-control/tests/unit/gapic/storage_control_v2/test_storage_control.py +++ b/packages/google-cloud-storage-control/tests/unit/gapic/storage_control_v2/test_storage_control.py @@ -1307,22 +1307,23 @@ async def test_create_folder_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_folder - ] = mock_object + ] = mock_rpc request = {} await client.create_folder(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_folder(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1680,22 +1681,23 @@ async def test_delete_folder_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_folder - ] = mock_object + ] = mock_rpc request = {} await client.delete_folder(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_folder(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2034,22 +2036,23 @@ async def test_get_folder_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_folder - ] = mock_object + ] = mock_rpc request = {} await client.get_folder(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_folder(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2380,22 +2383,23 @@ async def test_list_folders_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_folders - ] = mock_object + ] = mock_rpc request = {} await client.list_folders(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_folders(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2863,8 +2867,9 @@ def test_rename_folder_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.rename_folder(request) @@ -2925,26 +2930,28 @@ async def test_rename_folder_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.rename_folder - ] = mock_object + ] = mock_rpc request = {} await client.rename_folder(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.rename_folder(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3316,22 +3323,23 @@ async def test_get_storage_layout_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_storage_layout - ] = mock_object + ] = mock_rpc request = {} await client.get_storage_layout(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_storage_layout(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3707,22 +3715,23 @@ async def test_create_managed_folder_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_managed_folder - ] = mock_object + ] = mock_rpc request = {} await client.create_managed_folder(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_managed_folder(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4102,22 +4111,23 @@ async def test_delete_managed_folder_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_managed_folder - ] = mock_object + ] = mock_rpc request = {} await client.delete_managed_folder(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_managed_folder(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4479,22 +4489,23 @@ async def test_get_managed_folder_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_managed_folder - ] = mock_object + ] = mock_rpc request = {} await client.get_managed_folder(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_managed_folder(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4866,22 +4877,23 @@ async def test_list_managed_folders_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_managed_folders - ] = mock_object + ] = mock_rpc request = {} await client.list_managed_folders(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_managed_folders(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-storageinsights/google/cloud/storageinsights/gapic_version.py b/packages/google-cloud-storageinsights/google/cloud/storageinsights/gapic_version.py index 9413c3341313..558c8aab67c5 100644 --- a/packages/google-cloud-storageinsights/google/cloud/storageinsights/gapic_version.py +++ b/packages/google-cloud-storageinsights/google/cloud/storageinsights/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.10" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-storageinsights/google/cloud/storageinsights_v1/gapic_version.py b/packages/google-cloud-storageinsights/google/cloud/storageinsights_v1/gapic_version.py index 9413c3341313..558c8aab67c5 100644 --- a/packages/google-cloud-storageinsights/google/cloud/storageinsights_v1/gapic_version.py +++ b/packages/google-cloud-storageinsights/google/cloud/storageinsights_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.10" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-storageinsights/google/cloud/storageinsights_v1/services/storage_insights/async_client.py b/packages/google-cloud-storageinsights/google/cloud/storageinsights_v1/services/storage_insights/async_client.py index 48620a4953b2..71ad026871cd 100644 --- a/packages/google-cloud-storageinsights/google/cloud/storageinsights_v1/services/storage_insights/async_client.py +++ b/packages/google-cloud-storageinsights/google/cloud/storageinsights_v1/services/storage_insights/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -200,9 +199,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(StorageInsightsClient).get_transport_class, type(StorageInsightsClient) - ) + get_transport_class = StorageInsightsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-storageinsights/google/cloud/storageinsights_v1/services/storage_insights/client.py b/packages/google-cloud-storageinsights/google/cloud/storageinsights_v1/services/storage_insights/client.py index a3b371c95536..0eafbaefbd3b 100644 --- a/packages/google-cloud-storageinsights/google/cloud/storageinsights_v1/services/storage_insights/client.py +++ b/packages/google-cloud-storageinsights/google/cloud/storageinsights_v1/services/storage_insights/client.py @@ -693,7 +693,7 @@ def __init__( transport_init: Union[ Type[StorageInsightsTransport], Callable[..., StorageInsightsTransport] ] = ( - type(self).get_transport_class(transport) + StorageInsightsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., StorageInsightsTransport], transport) ) diff --git a/packages/google-cloud-storageinsights/samples/generated_samples/snippet_metadata_google.cloud.storageinsights.v1.json b/packages/google-cloud-storageinsights/samples/generated_samples/snippet_metadata_google.cloud.storageinsights.v1.json index 3733ab5a9833..7411c6d3c4bf 100644 --- a/packages/google-cloud-storageinsights/samples/generated_samples/snippet_metadata_google.cloud.storageinsights.v1.json +++ b/packages/google-cloud-storageinsights/samples/generated_samples/snippet_metadata_google.cloud.storageinsights.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-storageinsights", - "version": "0.1.10" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-storageinsights/tests/unit/gapic/storageinsights_v1/test_storage_insights.py b/packages/google-cloud-storageinsights/tests/unit/gapic/storageinsights_v1/test_storage_insights.py index 426ad600db84..cec0f14f7216 100644 --- a/packages/google-cloud-storageinsights/tests/unit/gapic/storageinsights_v1/test_storage_insights.py +++ b/packages/google-cloud-storageinsights/tests/unit/gapic/storageinsights_v1/test_storage_insights.py @@ -1333,22 +1333,23 @@ async def test_list_report_configs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_report_configs - ] = mock_object + ] = mock_rpc request = {} await client.list_report_configs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_report_configs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1925,22 +1926,23 @@ async def test_get_report_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_report_config - ] = mock_object + ] = mock_rpc request = {} await client.get_report_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_report_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2318,22 +2320,23 @@ async def test_create_report_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_report_config - ] = mock_object + ] = mock_rpc request = {} await client.create_report_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_report_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2720,22 +2723,23 @@ async def test_update_report_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_report_config - ] = mock_object + ] = mock_rpc request = {} await client.update_report_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_report_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3114,22 +3118,23 @@ async def test_delete_report_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_report_config - ] = mock_object + ] = mock_rpc request = {} await client.delete_report_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_report_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3501,22 +3506,23 @@ async def test_list_report_details_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_report_details - ] = mock_object + ] = mock_rpc request = {} await client.list_report_details(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_report_details(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4096,22 +4102,23 @@ async def test_get_report_detail_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_report_detail - ] = mock_object + ] = mock_rpc request = {} await client.get_report_detail(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_report_detail(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-support/google/cloud/support/gapic_version.py b/packages/google-cloud-support/google/cloud/support/gapic_version.py index f8ea948a9c30..558c8aab67c5 100644 --- a/packages/google-cloud-support/google/cloud/support/gapic_version.py +++ b/packages/google-cloud-support/google/cloud/support/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.9" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-support/google/cloud/support_v2/gapic_version.py b/packages/google-cloud-support/google/cloud/support_v2/gapic_version.py index f8ea948a9c30..558c8aab67c5 100644 --- a/packages/google-cloud-support/google/cloud/support_v2/gapic_version.py +++ b/packages/google-cloud-support/google/cloud/support_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.9" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-support/google/cloud/support_v2/services/case_attachment_service/async_client.py b/packages/google-cloud-support/google/cloud/support_v2/services/case_attachment_service/async_client.py index 95bedd539f0c..5f657a9b63e6 100644 --- a/packages/google-cloud-support/google/cloud/support_v2/services/case_attachment_service/async_client.py +++ b/packages/google-cloud-support/google/cloud/support_v2/services/case_attachment_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -195,10 +194,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(CaseAttachmentServiceClient).get_transport_class, - type(CaseAttachmentServiceClient), - ) + get_transport_class = CaseAttachmentServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-support/google/cloud/support_v2/services/case_attachment_service/client.py b/packages/google-cloud-support/google/cloud/support_v2/services/case_attachment_service/client.py index a6557580f2d8..8cc8032bf194 100644 --- a/packages/google-cloud-support/google/cloud/support_v2/services/case_attachment_service/client.py +++ b/packages/google-cloud-support/google/cloud/support_v2/services/case_attachment_service/client.py @@ -686,7 +686,7 @@ def __init__( Type[CaseAttachmentServiceTransport], Callable[..., CaseAttachmentServiceTransport], ] = ( - type(self).get_transport_class(transport) + CaseAttachmentServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., CaseAttachmentServiceTransport], transport) ) diff --git a/packages/google-cloud-support/google/cloud/support_v2/services/case_service/async_client.py b/packages/google-cloud-support/google/cloud/support_v2/services/case_service/async_client.py index ab67abb08dfc..e95129a5e2d5 100644 --- a/packages/google-cloud-support/google/cloud/support_v2/services/case_service/async_client.py +++ b/packages/google-cloud-support/google/cloud/support_v2/services/case_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -189,9 +188,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(CaseServiceClient).get_transport_class, type(CaseServiceClient) - ) + get_transport_class = CaseServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-support/google/cloud/support_v2/services/case_service/client.py b/packages/google-cloud-support/google/cloud/support_v2/services/case_service/client.py index c5e7d8aad8f8..c77194b06421 100644 --- a/packages/google-cloud-support/google/cloud/support_v2/services/case_service/client.py +++ b/packages/google-cloud-support/google/cloud/support_v2/services/case_service/client.py @@ -656,7 +656,7 @@ def __init__( transport_init: Union[ Type[CaseServiceTransport], Callable[..., CaseServiceTransport] ] = ( - type(self).get_transport_class(transport) + CaseServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., CaseServiceTransport], transport) ) diff --git a/packages/google-cloud-support/google/cloud/support_v2/services/comment_service/async_client.py b/packages/google-cloud-support/google/cloud/support_v2/services/comment_service/async_client.py index 8d9b4072cef6..55c550482af4 100644 --- a/packages/google-cloud-support/google/cloud/support_v2/services/comment_service/async_client.py +++ b/packages/google-cloud-support/google/cloud/support_v2/services/comment_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -194,9 +193,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(CommentServiceClient).get_transport_class, type(CommentServiceClient) - ) + get_transport_class = CommentServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-support/google/cloud/support_v2/services/comment_service/client.py b/packages/google-cloud-support/google/cloud/support_v2/services/comment_service/client.py index cdd4bf587f4b..0419d970a79b 100644 --- a/packages/google-cloud-support/google/cloud/support_v2/services/comment_service/client.py +++ b/packages/google-cloud-support/google/cloud/support_v2/services/comment_service/client.py @@ -682,7 +682,7 @@ def __init__( transport_init: Union[ Type[CommentServiceTransport], Callable[..., CommentServiceTransport] ] = ( - type(self).get_transport_class(transport) + CommentServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., CommentServiceTransport], transport) ) diff --git a/packages/google-cloud-support/samples/generated_samples/snippet_metadata_google.cloud.support.v2.json b/packages/google-cloud-support/samples/generated_samples/snippet_metadata_google.cloud.support.v2.json index 4f98c4141b49..26f909a94299 100644 --- a/packages/google-cloud-support/samples/generated_samples/snippet_metadata_google.cloud.support.v2.json +++ b/packages/google-cloud-support/samples/generated_samples/snippet_metadata_google.cloud.support.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-support", - "version": "0.1.9" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-support/tests/unit/gapic/support_v2/test_case_attachment_service.py b/packages/google-cloud-support/tests/unit/gapic/support_v2/test_case_attachment_service.py index 0e28bd0e8388..b6400dd48958 100644 --- a/packages/google-cloud-support/tests/unit/gapic/support_v2/test_case_attachment_service.py +++ b/packages/google-cloud-support/tests/unit/gapic/support_v2/test_case_attachment_service.py @@ -1355,22 +1355,23 @@ async def test_list_attachments_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_attachments - ] = mock_object + ] = mock_rpc request = {} await client.list_attachments(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_attachments(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-support/tests/unit/gapic/support_v2/test_case_service.py b/packages/google-cloud-support/tests/unit/gapic/support_v2/test_case_service.py index 9c8deb430884..6f0d24440bd9 100644 --- a/packages/google-cloud-support/tests/unit/gapic/support_v2/test_case_service.py +++ b/packages/google-cloud-support/tests/unit/gapic/support_v2/test_case_service.py @@ -1277,22 +1277,23 @@ async def test_get_case_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_case - ] = mock_object + ] = mock_rpc request = {} await client.get_case(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_case(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1659,22 +1660,23 @@ async def test_list_cases_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_cases - ] = mock_object + ] = mock_rpc request = {} await client.list_cases(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_cases(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2221,22 +2223,23 @@ async def test_search_cases_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search_cases - ] = mock_object + ] = mock_rpc request = {} await client.search_cases(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_cases(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2727,22 +2730,23 @@ async def test_create_case_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_case - ] = mock_object + ] = mock_rpc request = {} await client.create_case(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_case(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3143,22 +3147,23 @@ async def test_update_case_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_case - ] = mock_object + ] = mock_rpc request = {} await client.update_case(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_case(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3563,22 +3568,23 @@ async def test_escalate_case_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.escalate_case - ] = mock_object + ] = mock_rpc request = {} await client.escalate_case(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.escalate_case(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3891,22 +3897,23 @@ async def test_close_case_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.close_case - ] = mock_object + ] = mock_rpc request = {} await client.close_case(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.close_case(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4206,22 +4213,23 @@ async def test_search_case_classifications_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search_case_classifications - ] = mock_object + ] = mock_rpc request = {} await client.search_case_classifications(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_case_classifications(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-support/tests/unit/gapic/support_v2/test_comment_service.py b/packages/google-cloud-support/tests/unit/gapic/support_v2/test_comment_service.py index 0d065bbefedd..2a0cb8bbc3d5 100644 --- a/packages/google-cloud-support/tests/unit/gapic/support_v2/test_comment_service.py +++ b/packages/google-cloud-support/tests/unit/gapic/support_v2/test_comment_service.py @@ -1287,22 +1287,23 @@ async def test_list_comments_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_comments - ] = mock_object + ] = mock_rpc request = {} await client.list_comments(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_comments(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1851,22 +1852,23 @@ async def test_create_comment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_comment - ] = mock_object + ] = mock_rpc request = {} await client.create_comment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_comment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-talent/google/cloud/talent/gapic_version.py b/packages/google-cloud-talent/google/cloud/talent/gapic_version.py index cf63574ee44f..558c8aab67c5 100644 --- a/packages/google-cloud-talent/google/cloud/talent/gapic_version.py +++ b/packages/google-cloud-talent/google/cloud/talent/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.13.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-talent/google/cloud/talent_v4/gapic_version.py b/packages/google-cloud-talent/google/cloud/talent_v4/gapic_version.py index cf63574ee44f..558c8aab67c5 100644 --- a/packages/google-cloud-talent/google/cloud/talent_v4/gapic_version.py +++ b/packages/google-cloud-talent/google/cloud/talent_v4/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.13.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-talent/google/cloud/talent_v4/services/company_service/async_client.py b/packages/google-cloud-talent/google/cloud/talent_v4/services/company_service/async_client.py index f4823b0dd5e2..5e0325f7150d 100644 --- a/packages/google-cloud-talent/google/cloud/talent_v4/services/company_service/async_client.py +++ b/packages/google-cloud-talent/google/cloud/talent_v4/services/company_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -197,9 +196,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(CompanyServiceClient).get_transport_class, type(CompanyServiceClient) - ) + get_transport_class = CompanyServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-talent/google/cloud/talent_v4/services/company_service/client.py b/packages/google-cloud-talent/google/cloud/talent_v4/services/company_service/client.py index ce5af4020d1e..caf3d1c008eb 100644 --- a/packages/google-cloud-talent/google/cloud/talent_v4/services/company_service/client.py +++ b/packages/google-cloud-talent/google/cloud/talent_v4/services/company_service/client.py @@ -685,7 +685,7 @@ def __init__( transport_init: Union[ Type[CompanyServiceTransport], Callable[..., CompanyServiceTransport] ] = ( - type(self).get_transport_class(transport) + CompanyServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., CompanyServiceTransport], transport) ) diff --git a/packages/google-cloud-talent/google/cloud/talent_v4/services/completion/async_client.py b/packages/google-cloud-talent/google/cloud/talent_v4/services/completion/async_client.py index cc5780da5073..09dcce998ba1 100644 --- a/packages/google-cloud-talent/google/cloud/talent_v4/services/completion/async_client.py +++ b/packages/google-cloud-talent/google/cloud/talent_v4/services/completion/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -184,9 +183,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(CompletionClient).get_transport_class, type(CompletionClient) - ) + get_transport_class = CompletionClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-talent/google/cloud/talent_v4/services/completion/client.py b/packages/google-cloud-talent/google/cloud/talent_v4/services/completion/client.py index 1dfc07d903f5..3c45d047571c 100644 --- a/packages/google-cloud-talent/google/cloud/talent_v4/services/completion/client.py +++ b/packages/google-cloud-talent/google/cloud/talent_v4/services/completion/client.py @@ -673,7 +673,7 @@ def __init__( transport_init: Union[ Type[CompletionTransport], Callable[..., CompletionTransport] ] = ( - type(self).get_transport_class(transport) + CompletionClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., CompletionTransport], transport) ) diff --git a/packages/google-cloud-talent/google/cloud/talent_v4/services/event_service/async_client.py b/packages/google-cloud-talent/google/cloud/talent_v4/services/event_service/async_client.py index 47de158968e2..f7365f87f862 100644 --- a/packages/google-cloud-talent/google/cloud/talent_v4/services/event_service/async_client.py +++ b/packages/google-cloud-talent/google/cloud/talent_v4/services/event_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -185,9 +184,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(EventServiceClient).get_transport_class, type(EventServiceClient) - ) + get_transport_class = EventServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-talent/google/cloud/talent_v4/services/event_service/client.py b/packages/google-cloud-talent/google/cloud/talent_v4/services/event_service/client.py index fc4f2a391aa9..69f9177e16be 100644 --- a/packages/google-cloud-talent/google/cloud/talent_v4/services/event_service/client.py +++ b/packages/google-cloud-talent/google/cloud/talent_v4/services/event_service/client.py @@ -652,7 +652,7 @@ def __init__( transport_init: Union[ Type[EventServiceTransport], Callable[..., EventServiceTransport] ] = ( - type(self).get_transport_class(transport) + EventServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., EventServiceTransport], transport) ) diff --git a/packages/google-cloud-talent/google/cloud/talent_v4/services/job_service/async_client.py b/packages/google-cloud-talent/google/cloud/talent_v4/services/job_service/async_client.py index d7942490681a..495120429f3c 100644 --- a/packages/google-cloud-talent/google/cloud/talent_v4/services/job_service/async_client.py +++ b/packages/google-cloud-talent/google/cloud/talent_v4/services/job_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -196,9 +195,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(JobServiceClient).get_transport_class, type(JobServiceClient) - ) + get_transport_class = JobServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-talent/google/cloud/talent_v4/services/job_service/client.py b/packages/google-cloud-talent/google/cloud/talent_v4/services/job_service/client.py index d04eb8a3d637..728cfc148185 100644 --- a/packages/google-cloud-talent/google/cloud/talent_v4/services/job_service/client.py +++ b/packages/google-cloud-talent/google/cloud/talent_v4/services/job_service/client.py @@ -705,7 +705,7 @@ def __init__( transport_init: Union[ Type[JobServiceTransport], Callable[..., JobServiceTransport] ] = ( - type(self).get_transport_class(transport) + JobServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., JobServiceTransport], transport) ) diff --git a/packages/google-cloud-talent/google/cloud/talent_v4/services/tenant_service/async_client.py b/packages/google-cloud-talent/google/cloud/talent_v4/services/tenant_service/async_client.py index 440ae0cfe43f..e1eff9dcb64d 100644 --- a/packages/google-cloud-talent/google/cloud/talent_v4/services/tenant_service/async_client.py +++ b/packages/google-cloud-talent/google/cloud/talent_v4/services/tenant_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -194,9 +193,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(TenantServiceClient).get_transport_class, type(TenantServiceClient) - ) + get_transport_class = TenantServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-talent/google/cloud/talent_v4/services/tenant_service/client.py b/packages/google-cloud-talent/google/cloud/talent_v4/services/tenant_service/client.py index 1ccc22908d2d..f90d8952cdb6 100644 --- a/packages/google-cloud-talent/google/cloud/talent_v4/services/tenant_service/client.py +++ b/packages/google-cloud-talent/google/cloud/talent_v4/services/tenant_service/client.py @@ -660,7 +660,7 @@ def __init__( transport_init: Union[ Type[TenantServiceTransport], Callable[..., TenantServiceTransport] ] = ( - type(self).get_transport_class(transport) + TenantServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., TenantServiceTransport], transport) ) diff --git a/packages/google-cloud-talent/google/cloud/talent_v4beta1/gapic_version.py b/packages/google-cloud-talent/google/cloud/talent_v4beta1/gapic_version.py index cf63574ee44f..558c8aab67c5 100644 --- a/packages/google-cloud-talent/google/cloud/talent_v4beta1/gapic_version.py +++ b/packages/google-cloud-talent/google/cloud/talent_v4beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.13.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-talent/google/cloud/talent_v4beta1/services/company_service/async_client.py b/packages/google-cloud-talent/google/cloud/talent_v4beta1/services/company_service/async_client.py index 88a89a3b4c51..680dde6dc17a 100644 --- a/packages/google-cloud-talent/google/cloud/talent_v4beta1/services/company_service/async_client.py +++ b/packages/google-cloud-talent/google/cloud/talent_v4beta1/services/company_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -194,9 +193,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(CompanyServiceClient).get_transport_class, type(CompanyServiceClient) - ) + get_transport_class = CompanyServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-talent/google/cloud/talent_v4beta1/services/company_service/client.py b/packages/google-cloud-talent/google/cloud/talent_v4beta1/services/company_service/client.py index 8255c46c0b27..80a0137f387d 100644 --- a/packages/google-cloud-talent/google/cloud/talent_v4beta1/services/company_service/client.py +++ b/packages/google-cloud-talent/google/cloud/talent_v4beta1/services/company_service/client.py @@ -667,7 +667,7 @@ def __init__( transport_init: Union[ Type[CompanyServiceTransport], Callable[..., CompanyServiceTransport] ] = ( - type(self).get_transport_class(transport) + CompanyServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., CompanyServiceTransport], transport) ) diff --git a/packages/google-cloud-talent/google/cloud/talent_v4beta1/services/completion/async_client.py b/packages/google-cloud-talent/google/cloud/talent_v4beta1/services/completion/async_client.py index e62428f606d1..1b5618f57f91 100644 --- a/packages/google-cloud-talent/google/cloud/talent_v4beta1/services/completion/async_client.py +++ b/packages/google-cloud-talent/google/cloud/talent_v4beta1/services/completion/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -182,9 +181,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(CompletionClient).get_transport_class, type(CompletionClient) - ) + get_transport_class = CompletionClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-talent/google/cloud/talent_v4beta1/services/completion/client.py b/packages/google-cloud-talent/google/cloud/talent_v4beta1/services/completion/client.py index bb1fcb4c0c0e..fa18c96ad60d 100644 --- a/packages/google-cloud-talent/google/cloud/talent_v4beta1/services/completion/client.py +++ b/packages/google-cloud-talent/google/cloud/talent_v4beta1/services/completion/client.py @@ -656,7 +656,7 @@ def __init__( transport_init: Union[ Type[CompletionTransport], Callable[..., CompletionTransport] ] = ( - type(self).get_transport_class(transport) + CompletionClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., CompletionTransport], transport) ) diff --git a/packages/google-cloud-talent/google/cloud/talent_v4beta1/services/event_service/async_client.py b/packages/google-cloud-talent/google/cloud/talent_v4beta1/services/event_service/async_client.py index d8d03e266547..e39fffd1edb6 100644 --- a/packages/google-cloud-talent/google/cloud/talent_v4beta1/services/event_service/async_client.py +++ b/packages/google-cloud-talent/google/cloud/talent_v4beta1/services/event_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -185,9 +184,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(EventServiceClient).get_transport_class, type(EventServiceClient) - ) + get_transport_class = EventServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-talent/google/cloud/talent_v4beta1/services/event_service/client.py b/packages/google-cloud-talent/google/cloud/talent_v4beta1/services/event_service/client.py index 1b27caac6ced..c64f0630bfb2 100644 --- a/packages/google-cloud-talent/google/cloud/talent_v4beta1/services/event_service/client.py +++ b/packages/google-cloud-talent/google/cloud/talent_v4beta1/services/event_service/client.py @@ -657,7 +657,7 @@ def __init__( transport_init: Union[ Type[EventServiceTransport], Callable[..., EventServiceTransport] ] = ( - type(self).get_transport_class(transport) + EventServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., EventServiceTransport], transport) ) diff --git a/packages/google-cloud-talent/google/cloud/talent_v4beta1/services/job_service/async_client.py b/packages/google-cloud-talent/google/cloud/talent_v4beta1/services/job_service/async_client.py index 8126cff5a66c..9b0e8a01eedd 100644 --- a/packages/google-cloud-talent/google/cloud/talent_v4beta1/services/job_service/async_client.py +++ b/packages/google-cloud-talent/google/cloud/talent_v4beta1/services/job_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -193,9 +192,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(JobServiceClient).get_transport_class, type(JobServiceClient) - ) + get_transport_class = JobServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-talent/google/cloud/talent_v4beta1/services/job_service/client.py b/packages/google-cloud-talent/google/cloud/talent_v4beta1/services/job_service/client.py index c9d26f352ebe..f0ce5161c165 100644 --- a/packages/google-cloud-talent/google/cloud/talent_v4beta1/services/job_service/client.py +++ b/packages/google-cloud-talent/google/cloud/talent_v4beta1/services/job_service/client.py @@ -687,7 +687,7 @@ def __init__( transport_init: Union[ Type[JobServiceTransport], Callable[..., JobServiceTransport] ] = ( - type(self).get_transport_class(transport) + JobServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., JobServiceTransport], transport) ) diff --git a/packages/google-cloud-talent/google/cloud/talent_v4beta1/services/tenant_service/async_client.py b/packages/google-cloud-talent/google/cloud/talent_v4beta1/services/tenant_service/async_client.py index 121be59f434d..27fb1d0aada7 100644 --- a/packages/google-cloud-talent/google/cloud/talent_v4beta1/services/tenant_service/async_client.py +++ b/packages/google-cloud-talent/google/cloud/talent_v4beta1/services/tenant_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -193,9 +192,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(TenantServiceClient).get_transport_class, type(TenantServiceClient) - ) + get_transport_class = TenantServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-talent/google/cloud/talent_v4beta1/services/tenant_service/client.py b/packages/google-cloud-talent/google/cloud/talent_v4beta1/services/tenant_service/client.py index fb0f37bd2940..968e2f8997be 100644 --- a/packages/google-cloud-talent/google/cloud/talent_v4beta1/services/tenant_service/client.py +++ b/packages/google-cloud-talent/google/cloud/talent_v4beta1/services/tenant_service/client.py @@ -659,7 +659,7 @@ def __init__( transport_init: Union[ Type[TenantServiceTransport], Callable[..., TenantServiceTransport] ] = ( - type(self).get_transport_class(transport) + TenantServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., TenantServiceTransport], transport) ) diff --git a/packages/google-cloud-talent/samples/generated_samples/snippet_metadata_google.cloud.talent.v4.json b/packages/google-cloud-talent/samples/generated_samples/snippet_metadata_google.cloud.talent.v4.json index 2a562018e098..055c070bd80b 100644 --- a/packages/google-cloud-talent/samples/generated_samples/snippet_metadata_google.cloud.talent.v4.json +++ b/packages/google-cloud-talent/samples/generated_samples/snippet_metadata_google.cloud.talent.v4.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-talent", - "version": "2.13.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-talent/samples/generated_samples/snippet_metadata_google.cloud.talent.v4beta1.json b/packages/google-cloud-talent/samples/generated_samples/snippet_metadata_google.cloud.talent.v4beta1.json index 4639c6d155f7..4ece637720f6 100644 --- a/packages/google-cloud-talent/samples/generated_samples/snippet_metadata_google.cloud.talent.v4beta1.json +++ b/packages/google-cloud-talent/samples/generated_samples/snippet_metadata_google.cloud.talent.v4beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-talent", - "version": "2.13.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-talent/tests/unit/gapic/talent_v4/test_company_service.py b/packages/google-cloud-talent/tests/unit/gapic/talent_v4/test_company_service.py index 3b77f6ebdf47..e41501b1fa83 100644 --- a/packages/google-cloud-talent/tests/unit/gapic/talent_v4/test_company_service.py +++ b/packages/google-cloud-talent/tests/unit/gapic/talent_v4/test_company_service.py @@ -1330,22 +1330,23 @@ async def test_create_company_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_company - ] = mock_object + ] = mock_rpc request = {} await client.create_company(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_company(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1765,22 +1766,23 @@ async def test_get_company_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_company - ] = mock_object + ] = mock_rpc request = {} await client.get_company(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_company(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2186,22 +2188,23 @@ async def test_update_company_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_company - ] = mock_object + ] = mock_rpc request = {} await client.update_company(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_company(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2575,22 +2578,23 @@ async def test_delete_company_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_company - ] = mock_object + ] = mock_rpc request = {} await client.delete_company(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_company(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2932,22 +2936,23 @@ async def test_list_companies_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_companies - ] = mock_object + ] = mock_rpc request = {} await client.list_companies(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_companies(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-talent/tests/unit/gapic/talent_v4/test_completion.py b/packages/google-cloud-talent/tests/unit/gapic/talent_v4/test_completion.py index 726aeb1ccd0a..6492cef6e739 100644 --- a/packages/google-cloud-talent/tests/unit/gapic/talent_v4/test_completion.py +++ b/packages/google-cloud-talent/tests/unit/gapic/talent_v4/test_completion.py @@ -1242,22 +1242,23 @@ async def test_complete_query_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.complete_query - ] = mock_object + ] = mock_rpc request = {} await client.complete_query(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.complete_query(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-talent/tests/unit/gapic/talent_v4/test_event_service.py b/packages/google-cloud-talent/tests/unit/gapic/talent_v4/test_event_service.py index 506466f716bf..4973d39ea973 100644 --- a/packages/google-cloud-talent/tests/unit/gapic/talent_v4/test_event_service.py +++ b/packages/google-cloud-talent/tests/unit/gapic/talent_v4/test_event_service.py @@ -1280,22 +1280,23 @@ async def test_create_client_event_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_client_event - ] = mock_object + ] = mock_rpc request = {} await client.create_client_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_client_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-talent/tests/unit/gapic/talent_v4/test_job_service.py b/packages/google-cloud-talent/tests/unit/gapic/talent_v4/test_job_service.py index 3aeb8cae2de7..3fdf07662597 100644 --- a/packages/google-cloud-talent/tests/unit/gapic/talent_v4/test_job_service.py +++ b/packages/google-cloud-talent/tests/unit/gapic/talent_v4/test_job_service.py @@ -1316,22 +1316,23 @@ async def test_create_job_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_job - ] = mock_object + ] = mock_rpc request = {} await client.create_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1674,8 +1675,9 @@ def test_batch_create_jobs_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.batch_create_jobs(request) @@ -1731,26 +1733,28 @@ async def test_batch_create_jobs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_create_jobs - ] = mock_object + ] = mock_rpc request = {} await client.batch_create_jobs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.batch_create_jobs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2168,22 +2172,23 @@ async def test_get_job_async_use_cached_wrapped_rpc(transport: str = "grpc_async ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_job - ] = mock_object + ] = mock_rpc request = {} await client.get_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2612,22 +2617,23 @@ async def test_update_job_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_job - ] = mock_object + ] = mock_rpc request = {} await client.update_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2970,8 +2976,9 @@ def test_batch_update_jobs_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.batch_update_jobs(request) @@ -3027,26 +3034,28 @@ async def test_batch_update_jobs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_update_jobs - ] = mock_object + ] = mock_rpc request = {} await client.batch_update_jobs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.batch_update_jobs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3403,22 +3412,23 @@ async def test_delete_job_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_job - ] = mock_object + ] = mock_rpc request = {} await client.delete_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3710,8 +3720,9 @@ def test_batch_delete_jobs_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.batch_delete_jobs(request) @@ -3767,26 +3778,28 @@ async def test_batch_delete_jobs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_delete_jobs - ] = mock_object + ] = mock_rpc request = {} await client.batch_delete_jobs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.batch_delete_jobs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4154,22 +4167,23 @@ async def test_list_jobs_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_jobs - ] = mock_object + ] = mock_rpc request = {} await client.list_jobs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_jobs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4733,22 +4747,23 @@ async def test_search_jobs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search_jobs - ] = mock_object + ] = mock_rpc request = {} await client.search_jobs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_jobs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5043,22 +5058,23 @@ async def test_search_jobs_for_alert_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search_jobs_for_alert - ] = mock_object + ] = mock_rpc request = {} await client.search_jobs_for_alert(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_jobs_for_alert(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-talent/tests/unit/gapic/talent_v4/test_tenant_service.py b/packages/google-cloud-talent/tests/unit/gapic/talent_v4/test_tenant_service.py index 77a566a36e7a..0439b7cdbf74 100644 --- a/packages/google-cloud-talent/tests/unit/gapic/talent_v4/test_tenant_service.py +++ b/packages/google-cloud-talent/tests/unit/gapic/talent_v4/test_tenant_service.py @@ -1287,22 +1287,23 @@ async def test_create_tenant_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_tenant - ] = mock_object + ] = mock_rpc request = {} await client.create_tenant(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_tenant(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1660,22 +1661,23 @@ async def test_get_tenant_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_tenant - ] = mock_object + ] = mock_rpc request = {} await client.get_tenant(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_tenant(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2021,22 +2023,23 @@ async def test_update_tenant_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_tenant - ] = mock_object + ] = mock_rpc request = {} await client.update_tenant(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_tenant(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2386,22 +2389,23 @@ async def test_delete_tenant_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_tenant - ] = mock_object + ] = mock_rpc request = {} await client.delete_tenant(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_tenant(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2743,22 +2747,23 @@ async def test_list_tenants_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_tenants - ] = mock_object + ] = mock_rpc request = {} await client.list_tenants(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_tenants(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-talent/tests/unit/gapic/talent_v4beta1/test_company_service.py b/packages/google-cloud-talent/tests/unit/gapic/talent_v4beta1/test_company_service.py index 43d61c442768..8b2e2a6dd74f 100644 --- a/packages/google-cloud-talent/tests/unit/gapic/talent_v4beta1/test_company_service.py +++ b/packages/google-cloud-talent/tests/unit/gapic/talent_v4beta1/test_company_service.py @@ -1330,22 +1330,23 @@ async def test_create_company_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_company - ] = mock_object + ] = mock_rpc request = {} await client.create_company(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_company(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1765,22 +1766,23 @@ async def test_get_company_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_company - ] = mock_object + ] = mock_rpc request = {} await client.get_company(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_company(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2186,22 +2188,23 @@ async def test_update_company_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_company - ] = mock_object + ] = mock_rpc request = {} await client.update_company(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_company(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2565,22 +2568,23 @@ async def test_delete_company_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_company - ] = mock_object + ] = mock_rpc request = {} await client.delete_company(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_company(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2922,22 +2926,23 @@ async def test_list_companies_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_companies - ] = mock_object + ] = mock_rpc request = {} await client.list_companies(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_companies(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-talent/tests/unit/gapic/talent_v4beta1/test_completion.py b/packages/google-cloud-talent/tests/unit/gapic/talent_v4beta1/test_completion.py index 0435526f38fa..f89692371d4c 100644 --- a/packages/google-cloud-talent/tests/unit/gapic/talent_v4beta1/test_completion.py +++ b/packages/google-cloud-talent/tests/unit/gapic/talent_v4beta1/test_completion.py @@ -1242,22 +1242,23 @@ async def test_complete_query_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.complete_query - ] = mock_object + ] = mock_rpc request = {} await client.complete_query(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.complete_query(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-talent/tests/unit/gapic/talent_v4beta1/test_event_service.py b/packages/google-cloud-talent/tests/unit/gapic/talent_v4beta1/test_event_service.py index 8e8293ed6acd..0c2f18ede84f 100644 --- a/packages/google-cloud-talent/tests/unit/gapic/talent_v4beta1/test_event_service.py +++ b/packages/google-cloud-talent/tests/unit/gapic/talent_v4beta1/test_event_service.py @@ -1280,22 +1280,23 @@ async def test_create_client_event_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_client_event - ] = mock_object + ] = mock_rpc request = {} await client.create_client_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_client_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-talent/tests/unit/gapic/talent_v4beta1/test_job_service.py b/packages/google-cloud-talent/tests/unit/gapic/talent_v4beta1/test_job_service.py index a581bd6846e4..62222e8ff5ec 100644 --- a/packages/google-cloud-talent/tests/unit/gapic/talent_v4beta1/test_job_service.py +++ b/packages/google-cloud-talent/tests/unit/gapic/talent_v4beta1/test_job_service.py @@ -1316,22 +1316,23 @@ async def test_create_job_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_job - ] = mock_object + ] = mock_rpc request = {} await client.create_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1674,8 +1675,9 @@ def test_batch_create_jobs_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.batch_create_jobs(request) @@ -1731,26 +1733,28 @@ async def test_batch_create_jobs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_create_jobs - ] = mock_object + ] = mock_rpc request = {} await client.batch_create_jobs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.batch_create_jobs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2168,22 +2172,23 @@ async def test_get_job_async_use_cached_wrapped_rpc(transport: str = "grpc_async ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_job - ] = mock_object + ] = mock_rpc request = {} await client.get_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2612,22 +2617,23 @@ async def test_update_job_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_job - ] = mock_object + ] = mock_rpc request = {} await client.update_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2960,8 +2966,9 @@ def test_batch_update_jobs_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.batch_update_jobs(request) @@ -3017,26 +3024,28 @@ async def test_batch_update_jobs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_update_jobs - ] = mock_object + ] = mock_rpc request = {} await client.batch_update_jobs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.batch_update_jobs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3393,22 +3402,23 @@ async def test_delete_job_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_job - ] = mock_object + ] = mock_rpc request = {} await client.delete_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3753,22 +3763,23 @@ async def test_batch_delete_jobs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_delete_jobs - ] = mock_object + ] = mock_rpc request = {} await client.batch_delete_jobs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.batch_delete_jobs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4130,22 +4141,23 @@ async def test_list_jobs_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_jobs - ] = mock_object + ] = mock_rpc request = {} await client.list_jobs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_jobs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4711,22 +4723,23 @@ async def test_search_jobs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search_jobs - ] = mock_object + ] = mock_rpc request = {} await client.search_jobs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_jobs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5223,22 +5236,23 @@ async def test_search_jobs_for_alert_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search_jobs_for_alert - ] = mock_object + ] = mock_rpc request = {} await client.search_jobs_for_alert(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_jobs_for_alert(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-talent/tests/unit/gapic/talent_v4beta1/test_tenant_service.py b/packages/google-cloud-talent/tests/unit/gapic/talent_v4beta1/test_tenant_service.py index 8368758c6738..3b2fefe49b17 100644 --- a/packages/google-cloud-talent/tests/unit/gapic/talent_v4beta1/test_tenant_service.py +++ b/packages/google-cloud-talent/tests/unit/gapic/talent_v4beta1/test_tenant_service.py @@ -1299,22 +1299,23 @@ async def test_create_tenant_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_tenant - ] = mock_object + ] = mock_rpc request = {} await client.create_tenant(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_tenant(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1692,22 +1693,23 @@ async def test_get_tenant_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_tenant - ] = mock_object + ] = mock_rpc request = {} await client.get_tenant(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_tenant(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2073,22 +2075,23 @@ async def test_update_tenant_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_tenant - ] = mock_object + ] = mock_rpc request = {} await client.update_tenant(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_tenant(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2436,22 +2439,23 @@ async def test_delete_tenant_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_tenant - ] = mock_object + ] = mock_rpc request = {} await client.delete_tenant(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_tenant(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2793,22 +2797,23 @@ async def test_list_tenants_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_tenants - ] = mock_object + ] = mock_rpc request = {} await client.list_tenants(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_tenants(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-tasks/google/cloud/tasks/gapic_version.py b/packages/google-cloud-tasks/google/cloud/tasks/gapic_version.py index 8671082a1dc9..558c8aab67c5 100644 --- a/packages/google-cloud-tasks/google/cloud/tasks/gapic_version.py +++ b/packages/google-cloud-tasks/google/cloud/tasks/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.16.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2/gapic_version.py b/packages/google-cloud-tasks/google/cloud/tasks_v2/gapic_version.py index 8671082a1dc9..558c8aab67c5 100644 --- a/packages/google-cloud-tasks/google/cloud/tasks_v2/gapic_version.py +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.16.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2/services/cloud_tasks/async_client.py b/packages/google-cloud-tasks/google/cloud/tasks_v2/services/cloud_tasks/async_client.py index dca0a5325b1c..2d70ab1ecc51 100644 --- a/packages/google-cloud-tasks/google/cloud/tasks_v2/services/cloud_tasks/async_client.py +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2/services/cloud_tasks/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -197,9 +196,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(CloudTasksClient).get_transport_class, type(CloudTasksClient) - ) + get_transport_class = CloudTasksClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2/services/cloud_tasks/client.py b/packages/google-cloud-tasks/google/cloud/tasks_v2/services/cloud_tasks/client.py index 2c3f7d8127b3..f34a12dcb2ff 100644 --- a/packages/google-cloud-tasks/google/cloud/tasks_v2/services/cloud_tasks/client.py +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2/services/cloud_tasks/client.py @@ -693,7 +693,7 @@ def __init__( transport_init: Union[ Type[CloudTasksTransport], Callable[..., CloudTasksTransport] ] = ( - type(self).get_transport_class(transport) + CloudTasksClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., CloudTasksTransport], transport) ) diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/gapic_version.py b/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/gapic_version.py index 8671082a1dc9..558c8aab67c5 100644 --- a/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/gapic_version.py +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.16.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/services/cloud_tasks/async_client.py b/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/services/cloud_tasks/async_client.py index 27d790256d2f..355e7f004079 100644 --- a/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/services/cloud_tasks/async_client.py +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/services/cloud_tasks/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -197,9 +196,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(CloudTasksClient).get_transport_class, type(CloudTasksClient) - ) + get_transport_class = CloudTasksClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/services/cloud_tasks/client.py b/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/services/cloud_tasks/client.py index 0ee1e534faae..c642abf66eac 100644 --- a/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/services/cloud_tasks/client.py +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/services/cloud_tasks/client.py @@ -693,7 +693,7 @@ def __init__( transport_init: Union[ Type[CloudTasksTransport], Callable[..., CloudTasksTransport] ] = ( - type(self).get_transport_class(transport) + CloudTasksClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., CloudTasksTransport], transport) ) diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/gapic_version.py b/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/gapic_version.py index 8671082a1dc9..558c8aab67c5 100644 --- a/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/gapic_version.py +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.16.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/services/cloud_tasks/async_client.py b/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/services/cloud_tasks/async_client.py index 32b2edc6e777..9984481c9e77 100644 --- a/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/services/cloud_tasks/async_client.py +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/services/cloud_tasks/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -197,9 +196,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(CloudTasksClient).get_transport_class, type(CloudTasksClient) - ) + get_transport_class = CloudTasksClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/services/cloud_tasks/client.py b/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/services/cloud_tasks/client.py index ec031eca9d48..704bb7fcc660 100644 --- a/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/services/cloud_tasks/client.py +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/services/cloud_tasks/client.py @@ -693,7 +693,7 @@ def __init__( transport_init: Union[ Type[CloudTasksTransport], Callable[..., CloudTasksTransport] ] = ( - type(self).get_transport_class(transport) + CloudTasksClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., CloudTasksTransport], transport) ) diff --git a/packages/google-cloud-tasks/samples/generated_samples/snippet_metadata_google.cloud.tasks.v2.json b/packages/google-cloud-tasks/samples/generated_samples/snippet_metadata_google.cloud.tasks.v2.json index 2ba8b7bbcc9f..164c51d1def9 100644 --- a/packages/google-cloud-tasks/samples/generated_samples/snippet_metadata_google.cloud.tasks.v2.json +++ b/packages/google-cloud-tasks/samples/generated_samples/snippet_metadata_google.cloud.tasks.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-tasks", - "version": "2.16.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-tasks/samples/generated_samples/snippet_metadata_google.cloud.tasks.v2beta2.json b/packages/google-cloud-tasks/samples/generated_samples/snippet_metadata_google.cloud.tasks.v2beta2.json index 534001942715..673ddcbdda4a 100644 --- a/packages/google-cloud-tasks/samples/generated_samples/snippet_metadata_google.cloud.tasks.v2beta2.json +++ b/packages/google-cloud-tasks/samples/generated_samples/snippet_metadata_google.cloud.tasks.v2beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-tasks", - "version": "2.16.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-tasks/samples/generated_samples/snippet_metadata_google.cloud.tasks.v2beta3.json b/packages/google-cloud-tasks/samples/generated_samples/snippet_metadata_google.cloud.tasks.v2beta3.json index af0f55d860ba..788516efec3b 100644 --- a/packages/google-cloud-tasks/samples/generated_samples/snippet_metadata_google.cloud.tasks.v2beta3.json +++ b/packages/google-cloud-tasks/samples/generated_samples/snippet_metadata_google.cloud.tasks.v2beta3.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-tasks", - "version": "2.16.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-tasks/tests/unit/gapic/tasks_v2/test_cloud_tasks.py b/packages/google-cloud-tasks/tests/unit/gapic/tasks_v2/test_cloud_tasks.py index a302361b6a8d..30ab342d8795 100644 --- a/packages/google-cloud-tasks/tests/unit/gapic/tasks_v2/test_cloud_tasks.py +++ b/packages/google-cloud-tasks/tests/unit/gapic/tasks_v2/test_cloud_tasks.py @@ -1259,22 +1259,23 @@ async def test_list_queues_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_queues - ] = mock_object + ] = mock_rpc request = {} await client.list_queues(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_queues(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1818,22 +1819,23 @@ async def test_get_queue_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_queue - ] = mock_object + ] = mock_rpc request = {} await client.get_queue(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_queue(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2183,22 +2185,23 @@ async def test_create_queue_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_queue - ] = mock_object + ] = mock_rpc request = {} await client.create_queue(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_queue(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2554,22 +2557,23 @@ async def test_update_queue_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_queue - ] = mock_object + ] = mock_rpc request = {} await client.update_queue(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_queue(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2919,22 +2923,23 @@ async def test_delete_queue_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_queue - ] = mock_object + ] = mock_rpc request = {} await client.delete_queue(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_queue(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3277,22 +3282,23 @@ async def test_purge_queue_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.purge_queue - ] = mock_object + ] = mock_rpc request = {} await client.purge_queue(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.purge_queue(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3642,22 +3648,23 @@ async def test_pause_queue_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.pause_queue - ] = mock_object + ] = mock_rpc request = {} await client.pause_queue(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.pause_queue(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4007,22 +4014,23 @@ async def test_resume_queue_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.resume_queue - ] = mock_object + ] = mock_rpc request = {} await client.resume_queue(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.resume_queue(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4372,22 +4380,23 @@ async def test_get_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4754,22 +4763,23 @@ async def test_set_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5146,22 +5156,23 @@ async def test_test_iam_permissions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.test_iam_permissions - ] = mock_object + ] = mock_rpc request = {} await client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.test_iam_permissions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5550,22 +5561,23 @@ async def test_list_tasks_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_tasks - ] = mock_object + ] = mock_rpc request = {} await client.list_tasks(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_tasks(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6115,22 +6127,23 @@ async def test_get_task_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_task - ] = mock_object + ] = mock_rpc request = {} await client.get_task(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_task(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6490,22 +6503,23 @@ async def test_create_task_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_task - ] = mock_object + ] = mock_rpc request = {} await client.create_task(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_task(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6859,22 +6873,23 @@ async def test_delete_task_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_task - ] = mock_object + ] = mock_rpc request = {} await client.delete_task(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_task(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7221,22 +7236,23 @@ async def test_run_task_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.run_task - ] = mock_object + ] = mock_rpc request = {} await client.run_task(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.run_task(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-tasks/tests/unit/gapic/tasks_v2beta2/test_cloud_tasks.py b/packages/google-cloud-tasks/tests/unit/gapic/tasks_v2beta2/test_cloud_tasks.py index 330d70e624e2..fb47db1becc9 100644 --- a/packages/google-cloud-tasks/tests/unit/gapic/tasks_v2beta2/test_cloud_tasks.py +++ b/packages/google-cloud-tasks/tests/unit/gapic/tasks_v2beta2/test_cloud_tasks.py @@ -1260,22 +1260,23 @@ async def test_list_queues_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_queues - ] = mock_object + ] = mock_rpc request = {} await client.list_queues(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_queues(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1819,22 +1820,23 @@ async def test_get_queue_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_queue - ] = mock_object + ] = mock_rpc request = {} await client.get_queue(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_queue(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2184,22 +2186,23 @@ async def test_create_queue_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_queue - ] = mock_object + ] = mock_rpc request = {} await client.create_queue(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_queue(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2555,22 +2558,23 @@ async def test_update_queue_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_queue - ] = mock_object + ] = mock_rpc request = {} await client.update_queue(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_queue(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2920,22 +2924,23 @@ async def test_delete_queue_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_queue - ] = mock_object + ] = mock_rpc request = {} await client.delete_queue(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_queue(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3278,22 +3283,23 @@ async def test_purge_queue_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.purge_queue - ] = mock_object + ] = mock_rpc request = {} await client.purge_queue(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.purge_queue(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3643,22 +3649,23 @@ async def test_pause_queue_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.pause_queue - ] = mock_object + ] = mock_rpc request = {} await client.pause_queue(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.pause_queue(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4008,22 +4015,23 @@ async def test_resume_queue_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.resume_queue - ] = mock_object + ] = mock_rpc request = {} await client.resume_queue(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.resume_queue(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4373,22 +4381,23 @@ async def test_upload_queue_yaml_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.upload_queue_yaml - ] = mock_object + ] = mock_rpc request = {} await client.upload_queue_yaml(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.upload_queue_yaml(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4594,22 +4603,23 @@ async def test_get_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4976,22 +4986,23 @@ async def test_set_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5368,22 +5379,23 @@ async def test_test_iam_permissions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.test_iam_permissions - ] = mock_object + ] = mock_rpc request = {} await client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.test_iam_permissions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5772,22 +5784,23 @@ async def test_list_tasks_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_tasks - ] = mock_object + ] = mock_rpc request = {} await client.list_tasks(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_tasks(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6331,22 +6344,23 @@ async def test_get_task_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_task - ] = mock_object + ] = mock_rpc request = {} await client.get_task(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_task(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6696,22 +6710,23 @@ async def test_create_task_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_task - ] = mock_object + ] = mock_rpc request = {} await client.create_task(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_task(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7061,22 +7076,23 @@ async def test_delete_task_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_task - ] = mock_object + ] = mock_rpc request = {} await client.delete_task(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_task(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7413,22 +7429,23 @@ async def test_lease_tasks_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.lease_tasks - ] = mock_object + ] = mock_rpc request = {} await client.lease_tasks(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.lease_tasks(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7779,22 +7796,23 @@ async def test_acknowledge_task_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.acknowledge_task - ] = mock_object + ] = mock_rpc request = {} await client.acknowledge_task(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.acknowledge_task(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8147,22 +8165,23 @@ async def test_renew_lease_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.renew_lease - ] = mock_object + ] = mock_rpc request = {} await client.renew_lease(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.renew_lease(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8532,22 +8551,23 @@ async def test_cancel_lease_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.cancel_lease - ] = mock_object + ] = mock_rpc request = {} await client.cancel_lease(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.cancel_lease(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8905,22 +8925,23 @@ async def test_run_task_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.run_task - ] = mock_object + ] = mock_rpc request = {} await client.run_task(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.run_task(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-tasks/tests/unit/gapic/tasks_v2beta3/test_cloud_tasks.py b/packages/google-cloud-tasks/tests/unit/gapic/tasks_v2beta3/test_cloud_tasks.py index 574752c5306d..b182a3858516 100644 --- a/packages/google-cloud-tasks/tests/unit/gapic/tasks_v2beta3/test_cloud_tasks.py +++ b/packages/google-cloud-tasks/tests/unit/gapic/tasks_v2beta3/test_cloud_tasks.py @@ -1259,22 +1259,23 @@ async def test_list_queues_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_queues - ] = mock_object + ] = mock_rpc request = {} await client.list_queues(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_queues(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1821,22 +1822,23 @@ async def test_get_queue_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_queue - ] = mock_object + ] = mock_rpc request = {} await client.get_queue(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_queue(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2191,22 +2193,23 @@ async def test_create_queue_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_queue - ] = mock_object + ] = mock_rpc request = {} await client.create_queue(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_queue(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2567,22 +2570,23 @@ async def test_update_queue_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_queue - ] = mock_object + ] = mock_rpc request = {} await client.update_queue(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_queue(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2934,22 +2938,23 @@ async def test_delete_queue_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_queue - ] = mock_object + ] = mock_rpc request = {} await client.delete_queue(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_queue(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3295,22 +3300,23 @@ async def test_purge_queue_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.purge_queue - ] = mock_object + ] = mock_rpc request = {} await client.purge_queue(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.purge_queue(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3665,22 +3671,23 @@ async def test_pause_queue_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.pause_queue - ] = mock_object + ] = mock_rpc request = {} await client.pause_queue(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.pause_queue(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4035,22 +4042,23 @@ async def test_resume_queue_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.resume_queue - ] = mock_object + ] = mock_rpc request = {} await client.resume_queue(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.resume_queue(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4402,22 +4410,23 @@ async def test_get_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4784,22 +4793,23 @@ async def test_set_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5176,22 +5186,23 @@ async def test_test_iam_permissions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.test_iam_permissions - ] = mock_object + ] = mock_rpc request = {} await client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.test_iam_permissions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5580,22 +5591,23 @@ async def test_list_tasks_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_tasks - ] = mock_object + ] = mock_rpc request = {} await client.list_tasks(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_tasks(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6145,22 +6157,23 @@ async def test_get_task_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_task - ] = mock_object + ] = mock_rpc request = {} await client.get_task(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_task(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6520,22 +6533,23 @@ async def test_create_task_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_task - ] = mock_object + ] = mock_rpc request = {} await client.create_task(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_task(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6889,22 +6903,23 @@ async def test_delete_task_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_task - ] = mock_object + ] = mock_rpc request = {} await client.delete_task(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_task(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7251,22 +7266,23 @@ async def test_run_task_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.run_task - ] = mock_object + ] = mock_rpc request = {} await client.run_task(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.run_task(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-telcoautomation/google/cloud/telcoautomation/gapic_version.py b/packages/google-cloud-telcoautomation/google/cloud/telcoautomation/gapic_version.py index c1954d3635eb..558c8aab67c5 100644 --- a/packages/google-cloud-telcoautomation/google/cloud/telcoautomation/gapic_version.py +++ b/packages/google-cloud-telcoautomation/google/cloud/telcoautomation/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.2.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-telcoautomation/google/cloud/telcoautomation_v1/gapic_version.py b/packages/google-cloud-telcoautomation/google/cloud/telcoautomation_v1/gapic_version.py index c1954d3635eb..558c8aab67c5 100644 --- a/packages/google-cloud-telcoautomation/google/cloud/telcoautomation_v1/gapic_version.py +++ b/packages/google-cloud-telcoautomation/google/cloud/telcoautomation_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.2.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-telcoautomation/google/cloud/telcoautomation_v1/services/telco_automation/async_client.py b/packages/google-cloud-telcoautomation/google/cloud/telcoautomation_v1/services/telco_automation/async_client.py index d1ea91d849b2..e291c7ac2ed0 100644 --- a/packages/google-cloud-telcoautomation/google/cloud/telcoautomation_v1/services/telco_automation/async_client.py +++ b/packages/google-cloud-telcoautomation/google/cloud/telcoautomation_v1/services/telco_automation/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -220,9 +219,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(TelcoAutomationClient).get_transport_class, type(TelcoAutomationClient) - ) + get_transport_class = TelcoAutomationClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-telcoautomation/google/cloud/telcoautomation_v1/services/telco_automation/client.py b/packages/google-cloud-telcoautomation/google/cloud/telcoautomation_v1/services/telco_automation/client.py index ee51e4f7f5c8..ced6bf29894d 100644 --- a/packages/google-cloud-telcoautomation/google/cloud/telcoautomation_v1/services/telco_automation/client.py +++ b/packages/google-cloud-telcoautomation/google/cloud/telcoautomation_v1/services/telco_automation/client.py @@ -793,7 +793,7 @@ def __init__( transport_init: Union[ Type[TelcoAutomationTransport], Callable[..., TelcoAutomationTransport] ] = ( - type(self).get_transport_class(transport) + TelcoAutomationClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., TelcoAutomationTransport], transport) ) diff --git a/packages/google-cloud-telcoautomation/google/cloud/telcoautomation_v1alpha1/gapic_version.py b/packages/google-cloud-telcoautomation/google/cloud/telcoautomation_v1alpha1/gapic_version.py index c1954d3635eb..558c8aab67c5 100644 --- a/packages/google-cloud-telcoautomation/google/cloud/telcoautomation_v1alpha1/gapic_version.py +++ b/packages/google-cloud-telcoautomation/google/cloud/telcoautomation_v1alpha1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.2.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-telcoautomation/google/cloud/telcoautomation_v1alpha1/services/telco_automation/async_client.py b/packages/google-cloud-telcoautomation/google/cloud/telcoautomation_v1alpha1/services/telco_automation/async_client.py index 2cfdcc20e3db..a5fe6cad5310 100644 --- a/packages/google-cloud-telcoautomation/google/cloud/telcoautomation_v1alpha1/services/telco_automation/async_client.py +++ b/packages/google-cloud-telcoautomation/google/cloud/telcoautomation_v1alpha1/services/telco_automation/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -220,9 +219,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(TelcoAutomationClient).get_transport_class, type(TelcoAutomationClient) - ) + get_transport_class = TelcoAutomationClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-telcoautomation/google/cloud/telcoautomation_v1alpha1/services/telco_automation/client.py b/packages/google-cloud-telcoautomation/google/cloud/telcoautomation_v1alpha1/services/telco_automation/client.py index ee3403d15b0d..401ed9b12f02 100644 --- a/packages/google-cloud-telcoautomation/google/cloud/telcoautomation_v1alpha1/services/telco_automation/client.py +++ b/packages/google-cloud-telcoautomation/google/cloud/telcoautomation_v1alpha1/services/telco_automation/client.py @@ -793,7 +793,7 @@ def __init__( transport_init: Union[ Type[TelcoAutomationTransport], Callable[..., TelcoAutomationTransport] ] = ( - type(self).get_transport_class(transport) + TelcoAutomationClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., TelcoAutomationTransport], transport) ) diff --git a/packages/google-cloud-telcoautomation/samples/generated_samples/snippet_metadata_google.cloud.telcoautomation.v1.json b/packages/google-cloud-telcoautomation/samples/generated_samples/snippet_metadata_google.cloud.telcoautomation.v1.json index a81b477a3400..1949e1042744 100644 --- a/packages/google-cloud-telcoautomation/samples/generated_samples/snippet_metadata_google.cloud.telcoautomation.v1.json +++ b/packages/google-cloud-telcoautomation/samples/generated_samples/snippet_metadata_google.cloud.telcoautomation.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-telcoautomation", - "version": "0.2.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-telcoautomation/samples/generated_samples/snippet_metadata_google.cloud.telcoautomation.v1alpha1.json b/packages/google-cloud-telcoautomation/samples/generated_samples/snippet_metadata_google.cloud.telcoautomation.v1alpha1.json index d0e81ea55070..777445ea6e9a 100644 --- a/packages/google-cloud-telcoautomation/samples/generated_samples/snippet_metadata_google.cloud.telcoautomation.v1alpha1.json +++ b/packages/google-cloud-telcoautomation/samples/generated_samples/snippet_metadata_google.cloud.telcoautomation.v1alpha1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-telcoautomation", - "version": "0.2.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-telcoautomation/tests/unit/gapic/telcoautomation_v1/test_telco_automation.py b/packages/google-cloud-telcoautomation/tests/unit/gapic/telcoautomation_v1/test_telco_automation.py index 6104c83ad775..a7dbf8da72e2 100644 --- a/packages/google-cloud-telcoautomation/tests/unit/gapic/telcoautomation_v1/test_telco_automation.py +++ b/packages/google-cloud-telcoautomation/tests/unit/gapic/telcoautomation_v1/test_telco_automation.py @@ -1341,22 +1341,23 @@ async def test_list_orchestration_clusters_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_orchestration_clusters - ] = mock_object + ] = mock_rpc request = {} await client.list_orchestration_clusters(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_orchestration_clusters(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1943,22 +1944,23 @@ async def test_get_orchestration_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_orchestration_cluster - ] = mock_object + ] = mock_rpc request = {} await client.get_orchestration_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_orchestration_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2281,8 +2283,9 @@ def test_create_orchestration_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_orchestration_cluster(request) @@ -2338,26 +2341,28 @@ async def test_create_orchestration_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_orchestration_cluster - ] = mock_object + ] = mock_rpc request = {} await client.create_orchestration_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_orchestration_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2699,8 +2704,9 @@ def test_delete_orchestration_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_orchestration_cluster(request) @@ -2756,26 +2762,28 @@ async def test_delete_orchestration_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_orchestration_cluster - ] = mock_object + ] = mock_rpc request = {} await client.delete_orchestration_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_orchestration_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3141,22 +3149,23 @@ async def test_list_edge_slms_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_edge_slms - ] = mock_object + ] = mock_rpc request = {} await client.list_edge_slms(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_edge_slms(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3716,22 +3725,23 @@ async def test_get_edge_slm_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_edge_slm - ] = mock_object + ] = mock_rpc request = {} await client.get_edge_slm(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_edge_slm(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4039,8 +4049,9 @@ def test_create_edge_slm_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_edge_slm(request) @@ -4094,26 +4105,28 @@ async def test_create_edge_slm_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_edge_slm - ] = mock_object + ] = mock_rpc request = {} await client.create_edge_slm(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_edge_slm(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4425,8 +4438,9 @@ def test_delete_edge_slm_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_edge_slm(request) @@ -4480,26 +4494,28 @@ async def test_delete_edge_slm_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_edge_slm - ] = mock_object + ] = mock_rpc request = {} await client.delete_edge_slm(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_edge_slm(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4873,22 +4889,23 @@ async def test_create_blueprint_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_blueprint - ] = mock_object + ] = mock_rpc request = {} await client.create_blueprint(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_blueprint(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5295,22 +5312,23 @@ async def test_update_blueprint_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_blueprint - ] = mock_object + ] = mock_rpc request = {} await client.update_blueprint(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_blueprint(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5709,22 +5727,23 @@ async def test_get_blueprint_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_blueprint - ] = mock_object + ] = mock_rpc request = {} await client.get_blueprint(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_blueprint(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6084,22 +6103,23 @@ async def test_delete_blueprint_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_blueprint - ] = mock_object + ] = mock_rpc request = {} await client.delete_blueprint(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_blueprint(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6443,22 +6463,23 @@ async def test_list_blueprints_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_blueprints - ] = mock_object + ] = mock_rpc request = {} await client.list_blueprints(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_blueprints(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7035,22 +7056,23 @@ async def test_approve_blueprint_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.approve_blueprint - ] = mock_object + ] = mock_rpc request = {} await client.approve_blueprint(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.approve_blueprint(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7460,22 +7482,23 @@ async def test_propose_blueprint_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.propose_blueprint - ] = mock_object + ] = mock_rpc request = {} await client.propose_blueprint(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.propose_blueprint(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7877,22 +7900,23 @@ async def test_reject_blueprint_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.reject_blueprint - ] = mock_object + ] = mock_rpc request = {} await client.reject_blueprint(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.reject_blueprint(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8272,22 +8296,23 @@ async def test_list_blueprint_revisions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_blueprint_revisions - ] = mock_object + ] = mock_rpc request = {} await client.list_blueprint_revisions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_blueprint_revisions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8868,22 +8893,23 @@ async def test_search_blueprint_revisions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search_blueprint_revisions - ] = mock_object + ] = mock_rpc request = {} await client.search_blueprint_revisions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_blueprint_revisions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9474,22 +9500,23 @@ async def test_search_deployment_revisions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search_deployment_revisions - ] = mock_object + ] = mock_rpc request = {} await client.search_deployment_revisions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_deployment_revisions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10071,22 +10098,23 @@ async def test_discard_blueprint_changes_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.discard_blueprint_changes - ] = mock_object + ] = mock_rpc request = {} await client.discard_blueprint_changes(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.discard_blueprint_changes(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10458,22 +10486,23 @@ async def test_list_public_blueprints_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_public_blueprints - ] = mock_object + ] = mock_rpc request = {} await client.list_public_blueprints(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_public_blueprints(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11062,22 +11091,23 @@ async def test_get_public_blueprint_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_public_blueprint - ] = mock_object + ] = mock_rpc request = {} await client.get_public_blueprint(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_public_blueprint(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11486,22 +11516,23 @@ async def test_create_deployment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_deployment - ] = mock_object + ] = mock_rpc request = {} await client.create_deployment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_deployment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11932,22 +11963,23 @@ async def test_update_deployment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_deployment - ] = mock_object + ] = mock_rpc request = {} await client.update_deployment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_deployment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12362,22 +12394,23 @@ async def test_get_deployment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_deployment - ] = mock_object + ] = mock_rpc request = {} await client.get_deployment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_deployment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12747,22 +12780,23 @@ async def test_remove_deployment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.remove_deployment - ] = mock_object + ] = mock_rpc request = {} await client.remove_deployment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.remove_deployment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13119,22 +13153,23 @@ async def test_list_deployments_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_deployments - ] = mock_object + ] = mock_rpc request = {} await client.list_deployments(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_deployments(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13692,22 +13727,23 @@ async def test_list_deployment_revisions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_deployment_revisions - ] = mock_object + ] = mock_rpc request = {} await client.list_deployment_revisions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_deployment_revisions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14279,22 +14315,23 @@ async def test_discard_deployment_changes_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.discard_deployment_changes - ] = mock_object + ] = mock_rpc request = {} await client.discard_deployment_changes(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.discard_deployment_changes(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14680,22 +14717,23 @@ async def test_apply_deployment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.apply_deployment - ] = mock_object + ] = mock_rpc request = {} await client.apply_deployment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.apply_deployment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15078,22 +15116,23 @@ async def test_compute_deployment_status_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.compute_deployment_status - ] = mock_object + ] = mock_rpc request = {} await client.compute_deployment_status(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.compute_deployment_status(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15496,22 +15535,23 @@ async def test_rollback_deployment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.rollback_deployment - ] = mock_object + ] = mock_rpc request = {} await client.rollback_deployment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.rollback_deployment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15918,22 +15958,23 @@ async def test_get_hydrated_deployment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_hydrated_deployment - ] = mock_object + ] = mock_rpc request = {} await client.get_hydrated_deployment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_hydrated_deployment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -16312,22 +16353,23 @@ async def test_list_hydrated_deployments_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_hydrated_deployments - ] = mock_object + ] = mock_rpc request = {} await client.list_hydrated_deployments(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_hydrated_deployments(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -16906,22 +16948,23 @@ async def test_update_hydrated_deployment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_hydrated_deployment - ] = mock_object + ] = mock_rpc request = {} await client.update_hydrated_deployment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_hydrated_deployment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -17314,22 +17357,23 @@ async def test_apply_hydrated_deployment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.apply_hydrated_deployment - ] = mock_object + ] = mock_rpc request = {} await client.apply_hydrated_deployment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.apply_hydrated_deployment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-telcoautomation/tests/unit/gapic/telcoautomation_v1alpha1/test_telco_automation.py b/packages/google-cloud-telcoautomation/tests/unit/gapic/telcoautomation_v1alpha1/test_telco_automation.py index 27d1365791e9..3c693a1235d3 100644 --- a/packages/google-cloud-telcoautomation/tests/unit/gapic/telcoautomation_v1alpha1/test_telco_automation.py +++ b/packages/google-cloud-telcoautomation/tests/unit/gapic/telcoautomation_v1alpha1/test_telco_automation.py @@ -1341,22 +1341,23 @@ async def test_list_orchestration_clusters_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_orchestration_clusters - ] = mock_object + ] = mock_rpc request = {} await client.list_orchestration_clusters(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_orchestration_clusters(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1943,22 +1944,23 @@ async def test_get_orchestration_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_orchestration_cluster - ] = mock_object + ] = mock_rpc request = {} await client.get_orchestration_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_orchestration_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2281,8 +2283,9 @@ def test_create_orchestration_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_orchestration_cluster(request) @@ -2338,26 +2341,28 @@ async def test_create_orchestration_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_orchestration_cluster - ] = mock_object + ] = mock_rpc request = {} await client.create_orchestration_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_orchestration_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2699,8 +2704,9 @@ def test_delete_orchestration_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_orchestration_cluster(request) @@ -2756,26 +2762,28 @@ async def test_delete_orchestration_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_orchestration_cluster - ] = mock_object + ] = mock_rpc request = {} await client.delete_orchestration_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_orchestration_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3141,22 +3149,23 @@ async def test_list_edge_slms_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_edge_slms - ] = mock_object + ] = mock_rpc request = {} await client.list_edge_slms(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_edge_slms(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3716,22 +3725,23 @@ async def test_get_edge_slm_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_edge_slm - ] = mock_object + ] = mock_rpc request = {} await client.get_edge_slm(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_edge_slm(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4039,8 +4049,9 @@ def test_create_edge_slm_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_edge_slm(request) @@ -4094,26 +4105,28 @@ async def test_create_edge_slm_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_edge_slm - ] = mock_object + ] = mock_rpc request = {} await client.create_edge_slm(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_edge_slm(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4425,8 +4438,9 @@ def test_delete_edge_slm_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_edge_slm(request) @@ -4480,26 +4494,28 @@ async def test_delete_edge_slm_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_edge_slm - ] = mock_object + ] = mock_rpc request = {} await client.delete_edge_slm(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_edge_slm(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4873,22 +4889,23 @@ async def test_create_blueprint_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_blueprint - ] = mock_object + ] = mock_rpc request = {} await client.create_blueprint(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_blueprint(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5295,22 +5312,23 @@ async def test_update_blueprint_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_blueprint - ] = mock_object + ] = mock_rpc request = {} await client.update_blueprint(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_blueprint(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5709,22 +5727,23 @@ async def test_get_blueprint_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_blueprint - ] = mock_object + ] = mock_rpc request = {} await client.get_blueprint(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_blueprint(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6084,22 +6103,23 @@ async def test_delete_blueprint_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_blueprint - ] = mock_object + ] = mock_rpc request = {} await client.delete_blueprint(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_blueprint(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6443,22 +6463,23 @@ async def test_list_blueprints_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_blueprints - ] = mock_object + ] = mock_rpc request = {} await client.list_blueprints(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_blueprints(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7035,22 +7056,23 @@ async def test_approve_blueprint_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.approve_blueprint - ] = mock_object + ] = mock_rpc request = {} await client.approve_blueprint(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.approve_blueprint(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7460,22 +7482,23 @@ async def test_propose_blueprint_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.propose_blueprint - ] = mock_object + ] = mock_rpc request = {} await client.propose_blueprint(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.propose_blueprint(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7877,22 +7900,23 @@ async def test_reject_blueprint_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.reject_blueprint - ] = mock_object + ] = mock_rpc request = {} await client.reject_blueprint(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.reject_blueprint(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8272,22 +8296,23 @@ async def test_list_blueprint_revisions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_blueprint_revisions - ] = mock_object + ] = mock_rpc request = {} await client.list_blueprint_revisions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_blueprint_revisions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8868,22 +8893,23 @@ async def test_search_blueprint_revisions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search_blueprint_revisions - ] = mock_object + ] = mock_rpc request = {} await client.search_blueprint_revisions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_blueprint_revisions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9474,22 +9500,23 @@ async def test_search_deployment_revisions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search_deployment_revisions - ] = mock_object + ] = mock_rpc request = {} await client.search_deployment_revisions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_deployment_revisions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10071,22 +10098,23 @@ async def test_discard_blueprint_changes_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.discard_blueprint_changes - ] = mock_object + ] = mock_rpc request = {} await client.discard_blueprint_changes(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.discard_blueprint_changes(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10458,22 +10486,23 @@ async def test_list_public_blueprints_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_public_blueprints - ] = mock_object + ] = mock_rpc request = {} await client.list_public_blueprints(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_public_blueprints(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11062,22 +11091,23 @@ async def test_get_public_blueprint_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_public_blueprint - ] = mock_object + ] = mock_rpc request = {} await client.get_public_blueprint(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_public_blueprint(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11486,22 +11516,23 @@ async def test_create_deployment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_deployment - ] = mock_object + ] = mock_rpc request = {} await client.create_deployment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_deployment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11932,22 +11963,23 @@ async def test_update_deployment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_deployment - ] = mock_object + ] = mock_rpc request = {} await client.update_deployment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_deployment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12362,22 +12394,23 @@ async def test_get_deployment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_deployment - ] = mock_object + ] = mock_rpc request = {} await client.get_deployment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_deployment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12747,22 +12780,23 @@ async def test_remove_deployment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.remove_deployment - ] = mock_object + ] = mock_rpc request = {} await client.remove_deployment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.remove_deployment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13119,22 +13153,23 @@ async def test_list_deployments_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_deployments - ] = mock_object + ] = mock_rpc request = {} await client.list_deployments(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_deployments(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13692,22 +13727,23 @@ async def test_list_deployment_revisions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_deployment_revisions - ] = mock_object + ] = mock_rpc request = {} await client.list_deployment_revisions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_deployment_revisions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14279,22 +14315,23 @@ async def test_discard_deployment_changes_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.discard_deployment_changes - ] = mock_object + ] = mock_rpc request = {} await client.discard_deployment_changes(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.discard_deployment_changes(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14680,22 +14717,23 @@ async def test_apply_deployment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.apply_deployment - ] = mock_object + ] = mock_rpc request = {} await client.apply_deployment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.apply_deployment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15078,22 +15116,23 @@ async def test_compute_deployment_status_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.compute_deployment_status - ] = mock_object + ] = mock_rpc request = {} await client.compute_deployment_status(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.compute_deployment_status(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15496,22 +15535,23 @@ async def test_rollback_deployment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.rollback_deployment - ] = mock_object + ] = mock_rpc request = {} await client.rollback_deployment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.rollback_deployment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15918,22 +15958,23 @@ async def test_get_hydrated_deployment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_hydrated_deployment - ] = mock_object + ] = mock_rpc request = {} await client.get_hydrated_deployment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_hydrated_deployment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -16312,22 +16353,23 @@ async def test_list_hydrated_deployments_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_hydrated_deployments - ] = mock_object + ] = mock_rpc request = {} await client.list_hydrated_deployments(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_hydrated_deployments(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -16906,22 +16948,23 @@ async def test_update_hydrated_deployment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_hydrated_deployment - ] = mock_object + ] = mock_rpc request = {} await client.update_hydrated_deployment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_hydrated_deployment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -17314,22 +17357,23 @@ async def test_apply_hydrated_deployment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.apply_hydrated_deployment - ] = mock_object + ] = mock_rpc request = {} await client.apply_hydrated_deployment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.apply_hydrated_deployment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-tpu/google/cloud/tpu/gapic_version.py b/packages/google-cloud-tpu/google/cloud/tpu/gapic_version.py index d413e1807c55..558c8aab67c5 100644 --- a/packages/google-cloud-tpu/google/cloud/tpu/gapic_version.py +++ b/packages/google-cloud-tpu/google/cloud/tpu/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.18.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-tpu/google/cloud/tpu_v1/gapic_version.py b/packages/google-cloud-tpu/google/cloud/tpu_v1/gapic_version.py index d413e1807c55..558c8aab67c5 100644 --- a/packages/google-cloud-tpu/google/cloud/tpu_v1/gapic_version.py +++ b/packages/google-cloud-tpu/google/cloud/tpu_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.18.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-tpu/google/cloud/tpu_v1/services/tpu/async_client.py b/packages/google-cloud-tpu/google/cloud/tpu_v1/services/tpu/async_client.py index 72f015014521..3506c89d6789 100644 --- a/packages/google-cloud-tpu/google/cloud/tpu_v1/services/tpu/async_client.py +++ b/packages/google-cloud-tpu/google/cloud/tpu_v1/services/tpu/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -192,9 +191,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(TpuClient).get_transport_class, type(TpuClient) - ) + get_transport_class = TpuClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-tpu/google/cloud/tpu_v1/services/tpu/client.py b/packages/google-cloud-tpu/google/cloud/tpu_v1/services/tpu/client.py index 8ac9023ac5af..c538712069f6 100644 --- a/packages/google-cloud-tpu/google/cloud/tpu_v1/services/tpu/client.py +++ b/packages/google-cloud-tpu/google/cloud/tpu_v1/services/tpu/client.py @@ -704,7 +704,7 @@ def __init__( ) transport_init: Union[Type[TpuTransport], Callable[..., TpuTransport]] = ( - type(self).get_transport_class(transport) + TpuClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., TpuTransport], transport) ) diff --git a/packages/google-cloud-tpu/google/cloud/tpu_v2/gapic_version.py b/packages/google-cloud-tpu/google/cloud/tpu_v2/gapic_version.py index d413e1807c55..558c8aab67c5 100644 --- a/packages/google-cloud-tpu/google/cloud/tpu_v2/gapic_version.py +++ b/packages/google-cloud-tpu/google/cloud/tpu_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.18.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-tpu/google/cloud/tpu_v2/services/tpu/async_client.py b/packages/google-cloud-tpu/google/cloud/tpu_v2/services/tpu/async_client.py index 613f850e346c..190fff995f77 100644 --- a/packages/google-cloud-tpu/google/cloud/tpu_v2/services/tpu/async_client.py +++ b/packages/google-cloud-tpu/google/cloud/tpu_v2/services/tpu/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -192,9 +191,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(TpuClient).get_transport_class, type(TpuClient) - ) + get_transport_class = TpuClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-tpu/google/cloud/tpu_v2/services/tpu/client.py b/packages/google-cloud-tpu/google/cloud/tpu_v2/services/tpu/client.py index a47126173b29..a4a0a29ab72e 100644 --- a/packages/google-cloud-tpu/google/cloud/tpu_v2/services/tpu/client.py +++ b/packages/google-cloud-tpu/google/cloud/tpu_v2/services/tpu/client.py @@ -708,7 +708,7 @@ def __init__( ) transport_init: Union[Type[TpuTransport], Callable[..., TpuTransport]] = ( - type(self).get_transport_class(transport) + TpuClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., TpuTransport], transport) ) diff --git a/packages/google-cloud-tpu/google/cloud/tpu_v2alpha1/gapic_version.py b/packages/google-cloud-tpu/google/cloud/tpu_v2alpha1/gapic_version.py index d413e1807c55..558c8aab67c5 100644 --- a/packages/google-cloud-tpu/google/cloud/tpu_v2alpha1/gapic_version.py +++ b/packages/google-cloud-tpu/google/cloud/tpu_v2alpha1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.18.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-tpu/google/cloud/tpu_v2alpha1/services/tpu/async_client.py b/packages/google-cloud-tpu/google/cloud/tpu_v2alpha1/services/tpu/async_client.py index 8df654861498..2ec3e4ee11f4 100644 --- a/packages/google-cloud-tpu/google/cloud/tpu_v2alpha1/services/tpu/async_client.py +++ b/packages/google-cloud-tpu/google/cloud/tpu_v2alpha1/services/tpu/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -194,9 +193,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(TpuClient).get_transport_class, type(TpuClient) - ) + get_transport_class = TpuClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-tpu/google/cloud/tpu_v2alpha1/services/tpu/client.py b/packages/google-cloud-tpu/google/cloud/tpu_v2alpha1/services/tpu/client.py index b54da5a0e2dd..82168d6e5bc5 100644 --- a/packages/google-cloud-tpu/google/cloud/tpu_v2alpha1/services/tpu/client.py +++ b/packages/google-cloud-tpu/google/cloud/tpu_v2alpha1/services/tpu/client.py @@ -728,7 +728,7 @@ def __init__( ) transport_init: Union[Type[TpuTransport], Callable[..., TpuTransport]] = ( - type(self).get_transport_class(transport) + TpuClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., TpuTransport], transport) ) diff --git a/packages/google-cloud-tpu/samples/generated_samples/snippet_metadata_google.cloud.tpu.v1.json b/packages/google-cloud-tpu/samples/generated_samples/snippet_metadata_google.cloud.tpu.v1.json index 3f4f7b6a5eba..0a659a05f598 100644 --- a/packages/google-cloud-tpu/samples/generated_samples/snippet_metadata_google.cloud.tpu.v1.json +++ b/packages/google-cloud-tpu/samples/generated_samples/snippet_metadata_google.cloud.tpu.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-tpu", - "version": "1.18.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-tpu/samples/generated_samples/snippet_metadata_google.cloud.tpu.v2.json b/packages/google-cloud-tpu/samples/generated_samples/snippet_metadata_google.cloud.tpu.v2.json index 7c8b0bfcec19..1b6219f228fc 100644 --- a/packages/google-cloud-tpu/samples/generated_samples/snippet_metadata_google.cloud.tpu.v2.json +++ b/packages/google-cloud-tpu/samples/generated_samples/snippet_metadata_google.cloud.tpu.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-tpu", - "version": "1.18.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-tpu/samples/generated_samples/snippet_metadata_google.cloud.tpu.v2alpha1.json b/packages/google-cloud-tpu/samples/generated_samples/snippet_metadata_google.cloud.tpu.v2alpha1.json index 624b859325a4..2ee393567ba2 100644 --- a/packages/google-cloud-tpu/samples/generated_samples/snippet_metadata_google.cloud.tpu.v2alpha1.json +++ b/packages/google-cloud-tpu/samples/generated_samples/snippet_metadata_google.cloud.tpu.v2alpha1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-tpu", - "version": "1.18.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-tpu/tests/unit/gapic/tpu_v1/test_tpu.py b/packages/google-cloud-tpu/tests/unit/gapic/tpu_v1/test_tpu.py index a6dbc94ae5ce..a7d60c0f9a72 100644 --- a/packages/google-cloud-tpu/tests/unit/gapic/tpu_v1/test_tpu.py +++ b/packages/google-cloud-tpu/tests/unit/gapic/tpu_v1/test_tpu.py @@ -1176,22 +1176,23 @@ async def test_list_nodes_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_nodes - ] = mock_object + ] = mock_rpc request = {} await client.list_nodes(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_nodes(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1773,22 +1774,23 @@ async def test_get_node_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_node - ] = mock_object + ] = mock_rpc request = {} await client.get_node(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_node(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2105,8 +2107,9 @@ def test_create_node_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_node(request) @@ -2160,26 +2163,28 @@ async def test_create_node_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_node - ] = mock_object + ] = mock_rpc request = {} await client.create_node(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_node(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2489,8 +2494,9 @@ def test_delete_node_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_node(request) @@ -2544,26 +2550,28 @@ async def test_delete_node_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_node - ] = mock_object + ] = mock_rpc request = {} await client.delete_node(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_node(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2855,8 +2863,9 @@ def test_reimage_node_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.reimage_node(request) @@ -2910,26 +2919,28 @@ async def test_reimage_node_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.reimage_node - ] = mock_object + ] = mock_rpc request = {} await client.reimage_node(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.reimage_node(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3137,8 +3148,9 @@ def test_stop_node_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.stop_node(request) @@ -3190,26 +3202,28 @@ async def test_stop_node_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.stop_node - ] = mock_object + ] = mock_rpc request = {} await client.stop_node(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.stop_node(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3417,8 +3431,9 @@ def test_start_node_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.start_node(request) @@ -3470,26 +3485,28 @@ async def test_start_node_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.start_node - ] = mock_object + ] = mock_rpc request = {} await client.start_node(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.start_node(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3775,22 +3792,23 @@ async def test_list_tensor_flow_versions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_tensor_flow_versions - ] = mock_object + ] = mock_rpc request = {} await client.list_tensor_flow_versions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_tensor_flow_versions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4372,22 +4390,23 @@ async def test_get_tensor_flow_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_tensor_flow_version - ] = mock_object + ] = mock_rpc request = {} await client.get_tensor_flow_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_tensor_flow_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4770,22 +4789,23 @@ async def test_list_accelerator_types_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_accelerator_types - ] = mock_object + ] = mock_rpc request = {} await client.list_accelerator_types(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_accelerator_types(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5363,22 +5383,23 @@ async def test_get_accelerator_type_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_accelerator_type - ] = mock_object + ] = mock_rpc request = {} await client.get_accelerator_type(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_accelerator_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-tpu/tests/unit/gapic/tpu_v2/test_tpu.py b/packages/google-cloud-tpu/tests/unit/gapic/tpu_v2/test_tpu.py index 52373572a9fd..62b1bf184f6c 100644 --- a/packages/google-cloud-tpu/tests/unit/gapic/tpu_v2/test_tpu.py +++ b/packages/google-cloud-tpu/tests/unit/gapic/tpu_v2/test_tpu.py @@ -1201,22 +1201,23 @@ async def test_list_nodes_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_nodes - ] = mock_object + ] = mock_rpc request = {} await client.list_nodes(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_nodes(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1795,22 +1796,23 @@ async def test_get_node_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_node - ] = mock_object + ] = mock_rpc request = {} await client.get_node(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_node(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2125,8 +2127,9 @@ def test_create_node_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_node(request) @@ -2180,26 +2183,28 @@ async def test_create_node_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_node - ] = mock_object + ] = mock_rpc request = {} await client.create_node(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_node(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2509,8 +2514,9 @@ def test_delete_node_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_node(request) @@ -2564,26 +2570,28 @@ async def test_delete_node_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_node - ] = mock_object + ] = mock_rpc request = {} await client.delete_node(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_node(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2873,8 +2881,9 @@ def test_stop_node_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.stop_node(request) @@ -2926,26 +2935,28 @@ async def test_stop_node_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.stop_node - ] = mock_object + ] = mock_rpc request = {} await client.stop_node(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.stop_node(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3153,8 +3164,9 @@ def test_start_node_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.start_node(request) @@ -3206,26 +3218,28 @@ async def test_start_node_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.start_node - ] = mock_object + ] = mock_rpc request = {} await client.start_node(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.start_node(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3429,8 +3443,9 @@ def test_update_node_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_node(request) @@ -3484,26 +3499,28 @@ async def test_update_node_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_node - ] = mock_object + ] = mock_rpc request = {} await client.update_node(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_node(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3867,22 +3884,23 @@ async def test_generate_service_identity_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.generate_service_identity - ] = mock_object + ] = mock_rpc request = {} await client.generate_service_identity(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.generate_service_identity(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4175,22 +4193,23 @@ async def test_list_accelerator_types_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_accelerator_types - ] = mock_object + ] = mock_rpc request = {} await client.list_accelerator_types(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_accelerator_types(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4768,22 +4787,23 @@ async def test_get_accelerator_type_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_accelerator_type - ] = mock_object + ] = mock_rpc request = {} await client.get_accelerator_type(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_accelerator_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5166,22 +5186,23 @@ async def test_list_runtime_versions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_runtime_versions - ] = mock_object + ] = mock_rpc request = {} await client.list_runtime_versions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_runtime_versions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5759,22 +5780,23 @@ async def test_get_runtime_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_runtime_version - ] = mock_object + ] = mock_rpc request = {} await client.get_runtime_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_runtime_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6144,22 +6166,23 @@ async def test_get_guest_attributes_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_guest_attributes - ] = mock_object + ] = mock_rpc request = {} await client.get_guest_attributes(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_guest_attributes(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-tpu/tests/unit/gapic/tpu_v2alpha1/test_tpu.py b/packages/google-cloud-tpu/tests/unit/gapic/tpu_v2alpha1/test_tpu.py index 7be46f3ade3c..c4beb68a593b 100644 --- a/packages/google-cloud-tpu/tests/unit/gapic/tpu_v2alpha1/test_tpu.py +++ b/packages/google-cloud-tpu/tests/unit/gapic/tpu_v2alpha1/test_tpu.py @@ -1182,22 +1182,23 @@ async def test_list_nodes_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_nodes - ] = mock_object + ] = mock_rpc request = {} await client.list_nodes(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_nodes(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1776,22 +1777,23 @@ async def test_get_node_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_node - ] = mock_object + ] = mock_rpc request = {} await client.get_node(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_node(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2108,8 +2110,9 @@ def test_create_node_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_node(request) @@ -2163,26 +2166,28 @@ async def test_create_node_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_node - ] = mock_object + ] = mock_rpc request = {} await client.create_node(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_node(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2494,8 +2499,9 @@ def test_delete_node_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_node(request) @@ -2549,26 +2555,28 @@ async def test_delete_node_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_node - ] = mock_object + ] = mock_rpc request = {} await client.delete_node(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_node(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2858,8 +2866,9 @@ def test_stop_node_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.stop_node(request) @@ -2911,26 +2920,28 @@ async def test_stop_node_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.stop_node - ] = mock_object + ] = mock_rpc request = {} await client.stop_node(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.stop_node(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3138,8 +3149,9 @@ def test_start_node_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.start_node(request) @@ -3191,26 +3203,28 @@ async def test_start_node_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.start_node - ] = mock_object + ] = mock_rpc request = {} await client.start_node(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.start_node(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3414,8 +3428,9 @@ def test_update_node_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_node(request) @@ -3469,26 +3484,28 @@ async def test_update_node_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_node - ] = mock_object + ] = mock_rpc request = {} await client.update_node(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_node(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3862,22 +3879,23 @@ async def test_list_queued_resources_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_queued_resources - ] = mock_object + ] = mock_rpc request = {} await client.list_queued_resources(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_queued_resources(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4455,22 +4473,23 @@ async def test_get_queued_resource_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_queued_resource - ] = mock_object + ] = mock_rpc request = {} await client.get_queued_resource(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_queued_resource(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4790,8 +4809,9 @@ def test_create_queued_resource_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_queued_resource(request) @@ -4847,26 +4867,28 @@ async def test_create_queued_resource_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_queued_resource - ] = mock_object + ] = mock_rpc request = {} await client.create_queued_resource(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_queued_resource(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5199,8 +5221,9 @@ def test_delete_queued_resource_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_queued_resource(request) @@ -5256,26 +5279,28 @@ async def test_delete_queued_resource_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_queued_resource - ] = mock_object + ] = mock_rpc request = {} await client.delete_queued_resource(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_queued_resource(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5586,8 +5611,9 @@ def test_reset_queued_resource_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.reset_queued_resource(request) @@ -5643,26 +5669,28 @@ async def test_reset_queued_resource_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.reset_queued_resource - ] = mock_object + ] = mock_rpc request = {} await client.reset_queued_resource(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.reset_queued_resource(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6026,22 +6054,23 @@ async def test_generate_service_identity_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.generate_service_identity - ] = mock_object + ] = mock_rpc request = {} await client.generate_service_identity(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.generate_service_identity(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6334,22 +6363,23 @@ async def test_list_accelerator_types_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_accelerator_types - ] = mock_object + ] = mock_rpc request = {} await client.list_accelerator_types(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_accelerator_types(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6927,22 +6957,23 @@ async def test_get_accelerator_type_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_accelerator_type - ] = mock_object + ] = mock_rpc request = {} await client.get_accelerator_type(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_accelerator_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7325,22 +7356,23 @@ async def test_list_runtime_versions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_runtime_versions - ] = mock_object + ] = mock_rpc request = {} await client.list_runtime_versions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_runtime_versions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7918,22 +7950,23 @@ async def test_get_runtime_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_runtime_version - ] = mock_object + ] = mock_rpc request = {} await client.get_runtime_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_runtime_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8303,22 +8336,23 @@ async def test_get_guest_attributes_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_guest_attributes - ] = mock_object + ] = mock_rpc request = {} await client.get_guest_attributes(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_guest_attributes(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8543,8 +8577,9 @@ def test_simulate_maintenance_event_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.simulate_maintenance_event(request) @@ -8600,26 +8635,28 @@ async def test_simulate_maintenance_event_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.simulate_maintenance_event - ] = mock_object + ] = mock_rpc request = {} await client.simulate_maintenance_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.simulate_maintenance_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio From a6b8f141e50d77b450bd5cfa4b83371de48bda94 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 19 Aug 2024 10:58:26 -0400 Subject: [PATCH 016/108] chore: [Many APIs] Update gapic-generator-python to v1.18.5 (#13000) - [ ] Regenerate this pull request now. PiperOrigin-RevId: 661268868 Source-Link: https://github.com/googleapis/googleapis/commit/f7d214cb08cd7d9b018d44564a8b184263f64177 Source-Link: https://github.com/googleapis/googleapis-gen/commit/79a8411bbdb25a983fa3aae8c0e14327df129f94 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWxpZmUtc2NpZW5jZXMvLk93bEJvdC55YW1sIiwiaCI6Ijc5YTg0MTFiYmRiMjVhOTgzZmEzYWFlOGMwZTE0MzI3ZGYxMjlmOTQifQ== Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLW1hbmFnZWQtaWRlbnRpdGllcy8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLW1hbmFnZWRrYWZrYS8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLW1lZGlhLXRyYW5zbGF0aW9uLy5Pd2xCb3QueWFtbCIsImgiOiI3OWE4NDExYmJkYjI1YTk4M2ZhM2FhZThjMGUxNDMyN2RmMTI5Zjk0In0= Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLW1lbWNhY2hlLy5Pd2xCb3QueWFtbCIsImgiOiI3OWE4NDExYmJkYjI1YTk4M2ZhM2FhZThjMGUxNDMyN2RmMTI5Zjk0In0= Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLW1pZ3JhdGlvbmNlbnRlci8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLW1vbml0b3JpbmctZGFzaGJvYXJkcy8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLW1vbml0b3JpbmctbWV0cmljcy1zY29wZXMvLk93bEJvdC55YW1sIiwiaCI6Ijc5YTg0MTFiYmRiMjVhOTgzZmEzYWFlOGMwZTE0MzI3ZGYxMjlmOTQifQ== Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLW1vbml0b3JpbmcvLk93bEJvdC55YW1sIiwiaCI6Ijc5YTg0MTFiYmRiMjVhOTgzZmEzYWFlOGMwZTE0MzI3ZGYxMjlmOTQifQ== Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLW5ldGFwcC8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLW5ldHdvcmstY29ubmVjdGl2aXR5Ly5Pd2xCb3QueWFtbCIsImgiOiI3OWE4NDExYmJkYjI1YTk4M2ZhM2FhZThjMGUxNDMyN2RmMTI5Zjk0In0= Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLW5ldHdvcmstbWFuYWdlbWVudC8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLW5ldHdvcmstc2VjdXJpdHkvLk93bEJvdC55YW1sIiwiaCI6Ijc5YTg0MTFiYmRiMjVhOTgzZmEzYWFlOGMwZTE0MzI3ZGYxMjlmOTQifQ== Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLW5ldHdvcmstc2VydmljZXMvLk93bEJvdC55YW1sIiwiaCI6Ijc5YTg0MTFiYmRiMjVhOTgzZmEzYWFlOGMwZTE0MzI3ZGYxMjlmOTQifQ== Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLW5vdGVib29rcy8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLW9wdGltaXphdGlvbi8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLW9yY2hlc3RyYXRpb24tYWlyZmxvdy8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLW9zLWNvbmZpZy8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLW9zLWxvZ2luLy5Pd2xCb3QueWFtbCIsImgiOiI3OWE4NDExYmJkYjI1YTk4M2ZhM2FhZThjMGUxNDMyN2RmMTI5Zjk0In0= Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXBhcmFsbGVsc3RvcmUvLk93bEJvdC55YW1sIiwiaCI6Ijc5YTg0MTFiYmRiMjVhOTgzZmEzYWFlOGMwZTE0MzI3ZGYxMjlmOTQifQ== --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../cloud/lifesciences/gapic_version.py | 2 +- .../lifesciences_v2beta/gapic_version.py | 2 +- .../workflows_service_v2_beta/async_client.py | 6 +- .../workflows_service_v2_beta/client.py | 2 +- ...data_google.cloud.lifesciences.v2beta.json | 2 +- .../test_workflows_service_v2_beta.py | 19 +- .../cloud/managedidentities/gapic_version.py | 2 +- .../managedidentities_v1/gapic_version.py | 2 +- .../async_client.py | 6 +- .../managed_identities_service/client.py | 2 +- ...ata_google.cloud.managedidentities.v1.json | 2 +- .../test_managed_identities_service.py | 160 ++-- .../cloud/mediatranslation/gapic_version.py | 2 +- .../mediatranslation_v1beta1/gapic_version.py | 2 +- .../async_client.py | 6 +- .../speech_translation_service/client.py | 2 +- ...google.cloud.mediatranslation.v1beta1.json | 2 +- .../test_speech_translation_service.py | 9 +- .../google/cloud/memcache/gapic_version.py | 2 +- .../google/cloud/memcache_v1/gapic_version.py | 2 +- .../services/cloud_memcache/async_client.py | 5 +- .../services/cloud_memcache/client.py | 2 +- .../cloud/memcache_v1beta2/gapic_version.py | 2 +- .../services/cloud_memcache/async_client.py | 5 +- .../services/cloud_memcache/client.py | 2 +- ...pet_metadata_google.cloud.memcache.v1.json | 2 +- ...etadata_google.cloud.memcache.v1beta2.json | 2 +- .../gapic/memcache_v1/test_cloud_memcache.py | 132 +-- .../memcache_v1beta2/test_cloud_memcache.py | 151 ++-- .../cloud/migrationcenter/gapic_version.py | 2 +- .../cloud/migrationcenter_v1/gapic_version.py | 2 +- .../services/migration_center/async_client.py | 5 +- .../services/migration_center/client.py | 2 +- ...adata_google.cloud.migrationcenter.v1.json | 2 +- .../test_migration_center.py | 662 ++++++++------- .../monitoring_dashboard/gapic_version.py | 2 +- .../monitoring_dashboard_v1/gapic_version.py | 2 +- .../dashboards_service/async_client.py | 5 +- .../services/dashboards_service/client.py | 2 +- ...tadata_google.monitoring.dashboard.v1.json | 2 +- .../test_dashboards_service.py | 45 +- .../monitoring_metrics_scope/gapic_version.py | 2 +- .../gapic_version.py | 2 +- .../services/metrics_scopes/async_client.py | 5 +- .../services/metrics_scopes/client.py | 2 +- ...ata_google.monitoring.metricsscope.v1.json | 2 +- .../test_metrics_scopes.py | 56 +- .../google/cloud/monitoring/gapic_version.py | 2 +- .../cloud/monitoring_v3/gapic_version.py | 2 +- .../alert_policy_service/async_client.py | 6 +- .../services/alert_policy_service/client.py | 2 +- .../services/group_service/async_client.py | 5 +- .../services/group_service/client.py | 2 +- .../services/metric_service/async_client.py | 5 +- .../services/metric_service/client.py | 2 +- .../async_client.py | 6 +- .../notification_channel_service/client.py | 2 +- .../services/query_service/async_client.py | 5 +- .../services/query_service/client.py | 2 +- .../async_client.py | 6 +- .../service_monitoring_service/client.py | 2 +- .../services/snooze_service/async_client.py | 5 +- .../services/snooze_service/client.py | 2 +- .../uptime_check_service/async_client.py | 6 +- .../services/uptime_check_service/client.py | 2 +- ...snippet_metadata_google.monitoring.v3.json | 2 +- .../test_alert_policy_service.py | 45 +- .../gapic/monitoring_v3/test_group_service.py | 54 +- .../monitoring_v3/test_metric_service.py | 81 +- .../test_notification_channel_service.py | 90 +- .../gapic/monitoring_v3/test_query_service.py | 9 +- .../test_service_monitoring_service.py | 90 +- .../monitoring_v3/test_snooze_service.py | 36 +- .../test_uptime_check_service.py | 54 +- .../google/cloud/netapp/gapic_version.py | 2 +- .../google/cloud/netapp_v1/gapic_version.py | 2 +- .../services/net_app/async_client.py | 5 +- .../netapp_v1/services/net_app/client.py | 2 +- ...ippet_metadata_google.cloud.netapp.v1.json | 2 +- .../unit/gapic/netapp_v1/test_net_app.py | 779 ++++++++++-------- .../networkconnectivity/gapic_version.py | 2 +- .../networkconnectivity_v1/gapic_version.py | 2 +- .../services/hub_service/async_client.py | 5 +- .../services/hub_service/client.py | 2 +- .../async_client.py | 6 +- .../policy_based_routing_service/client.py | 2 +- .../gapic_version.py | 2 +- .../services/hub_service/async_client.py | 5 +- .../services/hub_service/client.py | 2 +- ...a_google.cloud.networkconnectivity.v1.json | 2 +- ...le.cloud.networkconnectivity.v1alpha1.json | 2 +- .../test_hub_service.py | 251 +++--- .../test_policy_based_routing_service.py | 56 +- .../test_hub_service.py | 150 ++-- .../cloud/network_management/gapic_version.py | 2 +- .../network_management_v1/gapic_version.py | 2 +- .../reachability_service/async_client.py | 6 +- .../services/reachability_service/client.py | 2 +- .../types/connectivity_test.py | 2 +- ...ata_google.cloud.networkmanagement.v1.json | 2 +- .../test_reachability_service.py | 94 ++- .../cloud/network_security/gapic_version.py | 2 +- .../network_security_v1/gapic_version.py | 2 +- .../services/network_security/async_client.py | 5 +- .../services/network_security/client.py | 2 +- .../network_security_v1beta1/gapic_version.py | 2 +- .../services/network_security/async_client.py | 5 +- .../services/network_security/client.py | 2 +- ...adata_google.cloud.networksecurity.v1.json | 2 +- ..._google.cloud.networksecurity.v1beta1.json | 2 +- .../test_network_security.py | 225 ++--- .../test_network_security.py | 225 ++--- .../cloud/network_services/gapic_version.py | 2 +- .../network_services_v1/gapic_version.py | 2 +- .../services/dep_service/async_client.py | 5 +- .../services/dep_service/client.py | 2 +- .../services/network_services/async_client.py | 5 +- .../services/network_services/client.py | 2 +- ...adata_google.cloud.networkservices.v1.json | 2 +- .../network_services_v1/test_dep_service.py | 150 ++-- .../test_network_services.py | 581 +++++++------ .../google/cloud/notebooks/gapic_version.py | 2 +- .../cloud/notebooks_v1/gapic_version.py | 2 +- .../managed_notebook_service/async_client.py | 6 +- .../managed_notebook_service/client.py | 2 +- .../services/notebook_service/async_client.py | 5 +- .../services/notebook_service/client.py | 2 +- .../cloud/notebooks_v1beta1/gapic_version.py | 2 +- .../services/notebook_service/async_client.py | 5 +- .../services/notebook_service/client.py | 2 +- .../cloud/notebooks_v2/gapic_version.py | 2 +- .../services/notebook_service/async_client.py | 5 +- .../services/notebook_service/client.py | 2 +- ...et_metadata_google.cloud.notebooks.v1.json | 2 +- ...tadata_google.cloud.notebooks.v1beta1.json | 2 +- ...et_metadata_google.cloud.notebooks.v2.json | 2 +- .../test_managed_notebook_service.py | 217 ++--- .../notebooks_v1/test_notebook_service.py | 536 +++++++----- .../test_notebook_service.py | 311 ++++--- .../notebooks_v2/test_notebook_service.py | 198 +++-- .../cloud/optimization/gapic_version.py | 2 +- .../cloud/optimization_v1/gapic_version.py | 2 +- .../services/fleet_routing/async_client.py | 5 +- .../services/fleet_routing/client.py | 2 +- ...metadata_google.cloud.optimization.v1.json | 2 +- .../optimization_v1/test_fleet_routing.py | 28 +- .../airflow/service/gapic_version.py | 2 +- .../airflow/service_v1/gapic_version.py | 2 +- .../services/environments/async_client.py | 5 +- .../services/environments/client.py | 2 +- .../services/image_versions/async_client.py | 5 +- .../services/image_versions/client.py | 2 +- .../airflow/service_v1beta1/gapic_version.py | 2 +- .../services/environments/async_client.py | 5 +- .../services/environments/client.py | 2 +- .../services/image_versions/async_client.py | 5 +- .../services/image_versions/client.py | 2 +- ...loud.orchestration.airflow.service.v1.json | 2 +- ...orchestration.airflow.service.v1beta1.json | 2 +- .../gapic/service_v1/test_environments.py | 267 +++--- .../gapic/service_v1/test_image_versions.py | 9 +- .../service_v1beta1/test_environments.py | 305 ++++--- .../service_v1beta1/test_image_versions.py | 9 +- .../google/cloud/osconfig/gapic_version.py | 2 +- .../google/cloud/osconfig_v1/gapic_version.py | 2 +- .../os_config_service/async_client.py | 5 +- .../services/os_config_service/client.py | 2 +- .../os_config_zonal_service/async_client.py | 6 +- .../os_config_zonal_service/client.py | 2 +- .../cloud/osconfig_v1alpha/gapic_version.py | 2 +- .../os_config_zonal_service/async_client.py | 6 +- .../os_config_zonal_service/client.py | 2 +- ...pet_metadata_google.cloud.osconfig.v1.json | 2 +- ...etadata_google.cloud.osconfig.v1alpha.json | 2 +- .../osconfig_v1/test_os_config_service.py | 108 +-- .../test_os_config_zonal_service.py | 138 ++-- .../test_os_config_zonal_service.py | 156 ++-- .../google/cloud/oslogin/gapic_version.py | 2 +- .../google/cloud/oslogin_v1/gapic_version.py | 2 +- .../services/os_login_service/async_client.py | 5 +- .../services/os_login_service/client.py | 2 +- ...ppet_metadata_google.cloud.oslogin.v1.json | 2 +- .../gapic/oslogin_v1/test_os_login_service.py | 63 +- .../cloud/parallelstore/gapic_version.py | 2 +- .../parallelstore_v1beta/gapic_version.py | 2 +- .../services/parallelstore/async_client.py | 5 +- .../services/parallelstore/client.py | 2 +- ...ata_google.cloud.parallelstore.v1beta.json | 2 +- .../test_parallelstore.py | 113 +-- 189 files changed, 3952 insertions(+), 3139 deletions(-) diff --git a/packages/google-cloud-life-sciences/google/cloud/lifesciences/gapic_version.py b/packages/google-cloud-life-sciences/google/cloud/lifesciences/gapic_version.py index 3ba7229813ef..558c8aab67c5 100644 --- a/packages/google-cloud-life-sciences/google/cloud/lifesciences/gapic_version.py +++ b/packages/google-cloud-life-sciences/google/cloud/lifesciences/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.9.12" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-life-sciences/google/cloud/lifesciences_v2beta/gapic_version.py b/packages/google-cloud-life-sciences/google/cloud/lifesciences_v2beta/gapic_version.py index 3ba7229813ef..558c8aab67c5 100644 --- a/packages/google-cloud-life-sciences/google/cloud/lifesciences_v2beta/gapic_version.py +++ b/packages/google-cloud-life-sciences/google/cloud/lifesciences_v2beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.9.12" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-life-sciences/google/cloud/lifesciences_v2beta/services/workflows_service_v2_beta/async_client.py b/packages/google-cloud-life-sciences/google/cloud/lifesciences_v2beta/services/workflows_service_v2_beta/async_client.py index 6c4a2eddb507..3ab8a271703e 100644 --- a/packages/google-cloud-life-sciences/google/cloud/lifesciences_v2beta/services/workflows_service_v2_beta/async_client.py +++ b/packages/google-cloud-life-sciences/google/cloud/lifesciences_v2beta/services/workflows_service_v2_beta/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -193,10 +192,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(WorkflowsServiceV2BetaClient).get_transport_class, - type(WorkflowsServiceV2BetaClient), - ) + get_transport_class = WorkflowsServiceV2BetaClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-life-sciences/google/cloud/lifesciences_v2beta/services/workflows_service_v2_beta/client.py b/packages/google-cloud-life-sciences/google/cloud/lifesciences_v2beta/services/workflows_service_v2_beta/client.py index 8d4c175fa107..0bc017f134dc 100644 --- a/packages/google-cloud-life-sciences/google/cloud/lifesciences_v2beta/services/workflows_service_v2_beta/client.py +++ b/packages/google-cloud-life-sciences/google/cloud/lifesciences_v2beta/services/workflows_service_v2_beta/client.py @@ -651,7 +651,7 @@ def __init__( Type[WorkflowsServiceV2BetaTransport], Callable[..., WorkflowsServiceV2BetaTransport], ] = ( - type(self).get_transport_class(transport) + WorkflowsServiceV2BetaClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., WorkflowsServiceV2BetaTransport], transport) ) diff --git a/packages/google-cloud-life-sciences/samples/generated_samples/snippet_metadata_google.cloud.lifesciences.v2beta.json b/packages/google-cloud-life-sciences/samples/generated_samples/snippet_metadata_google.cloud.lifesciences.v2beta.json index 8f18716f7c4b..a5f3a0632b0b 100644 --- a/packages/google-cloud-life-sciences/samples/generated_samples/snippet_metadata_google.cloud.lifesciences.v2beta.json +++ b/packages/google-cloud-life-sciences/samples/generated_samples/snippet_metadata_google.cloud.lifesciences.v2beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-life-sciences", - "version": "0.9.12" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-life-sciences/tests/unit/gapic/lifesciences_v2beta/test_workflows_service_v2_beta.py b/packages/google-cloud-life-sciences/tests/unit/gapic/lifesciences_v2beta/test_workflows_service_v2_beta.py index 5c7545c60c37..87abafc97406 100644 --- a/packages/google-cloud-life-sciences/tests/unit/gapic/lifesciences_v2beta/test_workflows_service_v2_beta.py +++ b/packages/google-cloud-life-sciences/tests/unit/gapic/lifesciences_v2beta/test_workflows_service_v2_beta.py @@ -1310,8 +1310,9 @@ def test_run_pipeline_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.run_pipeline(request) @@ -1365,26 +1366,28 @@ async def test_run_pipeline_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.run_pipeline - ] = mock_object + ] = mock_rpc request = {} await client.run_pipeline(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.run_pipeline(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-managed-identities/google/cloud/managedidentities/gapic_version.py b/packages/google-cloud-managed-identities/google/cloud/managedidentities/gapic_version.py index 64ddc0e431e3..558c8aab67c5 100644 --- a/packages/google-cloud-managed-identities/google/cloud/managedidentities/gapic_version.py +++ b/packages/google-cloud-managed-identities/google/cloud/managedidentities/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.9.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-managed-identities/google/cloud/managedidentities_v1/gapic_version.py b/packages/google-cloud-managed-identities/google/cloud/managedidentities_v1/gapic_version.py index 64ddc0e431e3..558c8aab67c5 100644 --- a/packages/google-cloud-managed-identities/google/cloud/managedidentities_v1/gapic_version.py +++ b/packages/google-cloud-managed-identities/google/cloud/managedidentities_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.9.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-managed-identities/google/cloud/managedidentities_v1/services/managed_identities_service/async_client.py b/packages/google-cloud-managed-identities/google/cloud/managedidentities_v1/services/managed_identities_service/async_client.py index 662a218da6b0..da11de28f0b8 100644 --- a/packages/google-cloud-managed-identities/google/cloud/managedidentities_v1/services/managed_identities_service/async_client.py +++ b/packages/google-cloud-managed-identities/google/cloud/managedidentities_v1/services/managed_identities_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -234,10 +233,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ManagedIdentitiesServiceClient).get_transport_class, - type(ManagedIdentitiesServiceClient), - ) + get_transport_class = ManagedIdentitiesServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-managed-identities/google/cloud/managedidentities_v1/services/managed_identities_service/client.py b/packages/google-cloud-managed-identities/google/cloud/managedidentities_v1/services/managed_identities_service/client.py index e3ea172cfb8e..f0ffa388e890 100644 --- a/packages/google-cloud-managed-identities/google/cloud/managedidentities_v1/services/managed_identities_service/client.py +++ b/packages/google-cloud-managed-identities/google/cloud/managedidentities_v1/services/managed_identities_service/client.py @@ -708,7 +708,7 @@ def __init__( Type[ManagedIdentitiesServiceTransport], Callable[..., ManagedIdentitiesServiceTransport], ] = ( - type(self).get_transport_class(transport) + ManagedIdentitiesServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ManagedIdentitiesServiceTransport], transport) ) diff --git a/packages/google-cloud-managed-identities/samples/generated_samples/snippet_metadata_google.cloud.managedidentities.v1.json b/packages/google-cloud-managed-identities/samples/generated_samples/snippet_metadata_google.cloud.managedidentities.v1.json index f1277df59091..21e96cde5067 100644 --- a/packages/google-cloud-managed-identities/samples/generated_samples/snippet_metadata_google.cloud.managedidentities.v1.json +++ b/packages/google-cloud-managed-identities/samples/generated_samples/snippet_metadata_google.cloud.managedidentities.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-managed-identities", - "version": "1.9.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-managed-identities/tests/unit/gapic/managedidentities_v1/test_managed_identities_service.py b/packages/google-cloud-managed-identities/tests/unit/gapic/managedidentities_v1/test_managed_identities_service.py index 78d001a2ddd3..0c94f2731651 100644 --- a/packages/google-cloud-managed-identities/tests/unit/gapic/managedidentities_v1/test_managed_identities_service.py +++ b/packages/google-cloud-managed-identities/tests/unit/gapic/managedidentities_v1/test_managed_identities_service.py @@ -1279,8 +1279,9 @@ def test_create_microsoft_ad_domain_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_microsoft_ad_domain(request) @@ -1336,26 +1337,28 @@ async def test_create_microsoft_ad_domain_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_microsoft_ad_domain - ] = mock_object + ] = mock_rpc request = {} await client.create_microsoft_ad_domain(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_microsoft_ad_domain(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1744,22 +1747,23 @@ async def test_reset_admin_password_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.reset_admin_password - ] = mock_object + ] = mock_rpc request = {} await client.reset_admin_password(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.reset_admin_password(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2128,22 +2132,23 @@ async def test_list_domains_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_domains - ] = mock_object + ] = mock_rpc request = {} await client.list_domains(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_domains(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2708,22 +2713,23 @@ async def test_get_domain_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_domain - ] = mock_object + ] = mock_rpc request = {} await client.get_domain(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_domain(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3023,8 +3029,9 @@ def test_update_domain_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_domain(request) @@ -3078,26 +3085,28 @@ async def test_update_domain_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_domain - ] = mock_object + ] = mock_rpc request = {} await client.update_domain(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_domain(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3398,8 +3407,9 @@ def test_delete_domain_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_domain(request) @@ -3453,26 +3463,28 @@ async def test_delete_domain_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_domain - ] = mock_object + ] = mock_rpc request = {} await client.delete_domain(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_domain(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3763,8 +3775,9 @@ def test_attach_trust_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.attach_trust(request) @@ -3818,26 +3831,28 @@ async def test_attach_trust_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.attach_trust - ] = mock_object + ] = mock_rpc request = {} await client.attach_trust(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.attach_trust(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4148,8 +4163,9 @@ def test_reconfigure_trust_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.reconfigure_trust(request) @@ -4205,26 +4221,28 @@ async def test_reconfigure_trust_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.reconfigure_trust - ] = mock_object + ] = mock_rpc request = {} await client.reconfigure_trust(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.reconfigure_trust(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4545,8 +4563,9 @@ def test_detach_trust_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.detach_trust(request) @@ -4600,26 +4619,28 @@ async def test_detach_trust_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.detach_trust - ] = mock_object + ] = mock_rpc request = {} await client.detach_trust(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.detach_trust(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4920,8 +4941,9 @@ def test_validate_trust_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.validate_trust(request) @@ -4975,26 +4997,28 @@ async def test_validate_trust_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.validate_trust - ] = mock_object + ] = mock_rpc request = {} await client.validate_trust(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.validate_trust(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-media-translation/google/cloud/mediatranslation/gapic_version.py b/packages/google-cloud-media-translation/google/cloud/mediatranslation/gapic_version.py index 11e34cec2824..558c8aab67c5 100644 --- a/packages/google-cloud-media-translation/google/cloud/mediatranslation/gapic_version.py +++ b/packages/google-cloud-media-translation/google/cloud/mediatranslation/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.11.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-media-translation/google/cloud/mediatranslation_v1beta1/gapic_version.py b/packages/google-cloud-media-translation/google/cloud/mediatranslation_v1beta1/gapic_version.py index 11e34cec2824..558c8aab67c5 100644 --- a/packages/google-cloud-media-translation/google/cloud/mediatranslation_v1beta1/gapic_version.py +++ b/packages/google-cloud-media-translation/google/cloud/mediatranslation_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.11.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-media-translation/google/cloud/mediatranslation_v1beta1/services/speech_translation_service/async_client.py b/packages/google-cloud-media-translation/google/cloud/mediatranslation_v1beta1/services/speech_translation_service/async_client.py index 316b6f28b8f5..fd94e8a598fc 100644 --- a/packages/google-cloud-media-translation/google/cloud/mediatranslation_v1beta1/services/speech_translation_service/async_client.py +++ b/packages/google-cloud-media-translation/google/cloud/mediatranslation_v1beta1/services/speech_translation_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( AsyncIterable, @@ -195,10 +194,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(SpeechTranslationServiceClient).get_transport_class, - type(SpeechTranslationServiceClient), - ) + get_transport_class = SpeechTranslationServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-media-translation/google/cloud/mediatranslation_v1beta1/services/speech_translation_service/client.py b/packages/google-cloud-media-translation/google/cloud/mediatranslation_v1beta1/services/speech_translation_service/client.py index 085308e35f60..b615166192b1 100644 --- a/packages/google-cloud-media-translation/google/cloud/mediatranslation_v1beta1/services/speech_translation_service/client.py +++ b/packages/google-cloud-media-translation/google/cloud/mediatranslation_v1beta1/services/speech_translation_service/client.py @@ -648,7 +648,7 @@ def __init__( Type[SpeechTranslationServiceTransport], Callable[..., SpeechTranslationServiceTransport], ] = ( - type(self).get_transport_class(transport) + SpeechTranslationServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SpeechTranslationServiceTransport], transport) ) diff --git a/packages/google-cloud-media-translation/samples/generated_samples/snippet_metadata_google.cloud.mediatranslation.v1beta1.json b/packages/google-cloud-media-translation/samples/generated_samples/snippet_metadata_google.cloud.mediatranslation.v1beta1.json index 0465b887d386..53086cedf83d 100644 --- a/packages/google-cloud-media-translation/samples/generated_samples/snippet_metadata_google.cloud.mediatranslation.v1beta1.json +++ b/packages/google-cloud-media-translation/samples/generated_samples/snippet_metadata_google.cloud.mediatranslation.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-media-translation", - "version": "0.11.11" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-media-translation/tests/unit/gapic/mediatranslation_v1beta1/test_speech_translation_service.py b/packages/google-cloud-media-translation/tests/unit/gapic/mediatranslation_v1beta1/test_speech_translation_service.py index 7dee70a0a728..20405c8cafe2 100644 --- a/packages/google-cloud-media-translation/tests/unit/gapic/mediatranslation_v1beta1/test_speech_translation_service.py +++ b/packages/google-cloud-media-translation/tests/unit/gapic/mediatranslation_v1beta1/test_speech_translation_service.py @@ -1244,22 +1244,23 @@ async def test_streaming_translate_speech_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.streaming_translate_speech - ] = mock_object + ] = mock_rpc request = [{}] await client.streaming_translate_speech(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.streaming_translate_speech(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-memcache/google/cloud/memcache/gapic_version.py b/packages/google-cloud-memcache/google/cloud/memcache/gapic_version.py index 64ddc0e431e3..558c8aab67c5 100644 --- a/packages/google-cloud-memcache/google/cloud/memcache/gapic_version.py +++ b/packages/google-cloud-memcache/google/cloud/memcache/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.9.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-memcache/google/cloud/memcache_v1/gapic_version.py b/packages/google-cloud-memcache/google/cloud/memcache_v1/gapic_version.py index 64ddc0e431e3..558c8aab67c5 100644 --- a/packages/google-cloud-memcache/google/cloud/memcache_v1/gapic_version.py +++ b/packages/google-cloud-memcache/google/cloud/memcache_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.9.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-memcache/google/cloud/memcache_v1/services/cloud_memcache/async_client.py b/packages/google-cloud-memcache/google/cloud/memcache_v1/services/cloud_memcache/async_client.py index 18f85b0e0232..e14be166e27d 100644 --- a/packages/google-cloud-memcache/google/cloud/memcache_v1/services/cloud_memcache/async_client.py +++ b/packages/google-cloud-memcache/google/cloud/memcache_v1/services/cloud_memcache/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -214,9 +213,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(CloudMemcacheClient).get_transport_class, type(CloudMemcacheClient) - ) + get_transport_class = CloudMemcacheClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-memcache/google/cloud/memcache_v1/services/cloud_memcache/client.py b/packages/google-cloud-memcache/google/cloud/memcache_v1/services/cloud_memcache/client.py index 344bddaa9449..d1de2435f3db 100644 --- a/packages/google-cloud-memcache/google/cloud/memcache_v1/services/cloud_memcache/client.py +++ b/packages/google-cloud-memcache/google/cloud/memcache_v1/services/cloud_memcache/client.py @@ -685,7 +685,7 @@ def __init__( transport_init: Union[ Type[CloudMemcacheTransport], Callable[..., CloudMemcacheTransport] ] = ( - type(self).get_transport_class(transport) + CloudMemcacheClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., CloudMemcacheTransport], transport) ) diff --git a/packages/google-cloud-memcache/google/cloud/memcache_v1beta2/gapic_version.py b/packages/google-cloud-memcache/google/cloud/memcache_v1beta2/gapic_version.py index 64ddc0e431e3..558c8aab67c5 100644 --- a/packages/google-cloud-memcache/google/cloud/memcache_v1beta2/gapic_version.py +++ b/packages/google-cloud-memcache/google/cloud/memcache_v1beta2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.9.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-memcache/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py b/packages/google-cloud-memcache/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py index 55f66f3636f9..dbb25583ec25 100644 --- a/packages/google-cloud-memcache/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py +++ b/packages/google-cloud-memcache/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -214,9 +213,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(CloudMemcacheClient).get_transport_class, type(CloudMemcacheClient) - ) + get_transport_class = CloudMemcacheClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-memcache/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py b/packages/google-cloud-memcache/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py index 5c2b2d7ec4b3..830813728ed0 100644 --- a/packages/google-cloud-memcache/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py +++ b/packages/google-cloud-memcache/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py @@ -685,7 +685,7 @@ def __init__( transport_init: Union[ Type[CloudMemcacheTransport], Callable[..., CloudMemcacheTransport] ] = ( - type(self).get_transport_class(transport) + CloudMemcacheClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., CloudMemcacheTransport], transport) ) diff --git a/packages/google-cloud-memcache/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1.json b/packages/google-cloud-memcache/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1.json index 502fadd79989..4fbe6f0f3a20 100644 --- a/packages/google-cloud-memcache/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1.json +++ b/packages/google-cloud-memcache/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-memcache", - "version": "1.9.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-memcache/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1beta2.json b/packages/google-cloud-memcache/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1beta2.json index 4b235082a19d..a19a59a135cf 100644 --- a/packages/google-cloud-memcache/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1beta2.json +++ b/packages/google-cloud-memcache/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-memcache", - "version": "1.9.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-memcache/tests/unit/gapic/memcache_v1/test_cloud_memcache.py b/packages/google-cloud-memcache/tests/unit/gapic/memcache_v1/test_cloud_memcache.py index 0a1a292db2b1..638e4aa6ad47 100644 --- a/packages/google-cloud-memcache/tests/unit/gapic/memcache_v1/test_cloud_memcache.py +++ b/packages/google-cloud-memcache/tests/unit/gapic/memcache_v1/test_cloud_memcache.py @@ -1303,22 +1303,23 @@ async def test_list_instances_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_instances - ] = mock_object + ] = mock_rpc request = {} await client.list_instances(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_instances(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1887,22 +1888,23 @@ async def test_get_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_instance - ] = mock_object + ] = mock_rpc request = {} await client.get_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2213,8 +2215,9 @@ def test_create_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_instance(request) @@ -2268,26 +2271,28 @@ async def test_create_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_instance - ] = mock_object + ] = mock_rpc request = {} await client.create_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2593,8 +2598,9 @@ def test_update_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_instance(request) @@ -2648,26 +2654,28 @@ async def test_update_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_instance - ] = mock_object + ] = mock_rpc request = {} await client.update_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2975,8 +2983,9 @@ def test_update_parameters_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_parameters(request) @@ -3032,26 +3041,28 @@ async def test_update_parameters_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_parameters - ] = mock_object + ] = mock_rpc request = {} await client.update_parameters(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_parameters(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3371,8 +3382,9 @@ def test_delete_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_instance(request) @@ -3426,26 +3438,28 @@ async def test_delete_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_instance - ] = mock_object + ] = mock_rpc request = {} await client.delete_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3737,8 +3751,9 @@ def test_apply_parameters_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.apply_parameters(request) @@ -3792,26 +3807,28 @@ async def test_apply_parameters_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.apply_parameters - ] = mock_object + ] = mock_rpc request = {} await client.apply_parameters(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.apply_parameters(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4132,8 +4149,9 @@ def test_reschedule_maintenance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.reschedule_maintenance(request) @@ -4189,26 +4207,28 @@ async def test_reschedule_maintenance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.reschedule_maintenance - ] = mock_object + ] = mock_rpc request = {} await client.reschedule_maintenance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.reschedule_maintenance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-memcache/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py b/packages/google-cloud-memcache/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py index 59df4eb21979..6f3f5197d726 100644 --- a/packages/google-cloud-memcache/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py +++ b/packages/google-cloud-memcache/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py @@ -1303,22 +1303,23 @@ async def test_list_instances_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_instances - ] = mock_object + ] = mock_rpc request = {} await client.list_instances(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_instances(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1890,22 +1891,23 @@ async def test_get_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_instance - ] = mock_object + ] = mock_rpc request = {} await client.get_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2218,8 +2220,9 @@ def test_create_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_instance(request) @@ -2273,26 +2276,28 @@ async def test_create_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_instance - ] = mock_object + ] = mock_rpc request = {} await client.create_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2598,8 +2603,9 @@ def test_update_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_instance(request) @@ -2653,26 +2659,28 @@ async def test_update_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_instance - ] = mock_object + ] = mock_rpc request = {} await client.update_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2980,8 +2988,9 @@ def test_update_parameters_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_parameters(request) @@ -3037,26 +3046,28 @@ async def test_update_parameters_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_parameters - ] = mock_object + ] = mock_rpc request = {} await client.update_parameters(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_parameters(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3376,8 +3387,9 @@ def test_delete_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_instance(request) @@ -3431,26 +3443,28 @@ async def test_delete_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_instance - ] = mock_object + ] = mock_rpc request = {} await client.delete_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3742,8 +3756,9 @@ def test_apply_parameters_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.apply_parameters(request) @@ -3797,26 +3812,28 @@ async def test_apply_parameters_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.apply_parameters - ] = mock_object + ] = mock_rpc request = {} await client.apply_parameters(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.apply_parameters(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4137,8 +4154,9 @@ def test_apply_software_update_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.apply_software_update(request) @@ -4194,26 +4212,28 @@ async def test_apply_software_update_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.apply_software_update - ] = mock_object + ] = mock_rpc request = {} await client.apply_software_update(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.apply_software_update(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4545,8 +4565,9 @@ def test_reschedule_maintenance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.reschedule_maintenance(request) @@ -4602,26 +4623,28 @@ async def test_reschedule_maintenance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.reschedule_maintenance - ] = mock_object + ] = mock_rpc request = {} await client.reschedule_maintenance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.reschedule_maintenance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-migrationcenter/google/cloud/migrationcenter/gapic_version.py b/packages/google-cloud-migrationcenter/google/cloud/migrationcenter/gapic_version.py index f8ea948a9c30..558c8aab67c5 100644 --- a/packages/google-cloud-migrationcenter/google/cloud/migrationcenter/gapic_version.py +++ b/packages/google-cloud-migrationcenter/google/cloud/migrationcenter/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.9" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-migrationcenter/google/cloud/migrationcenter_v1/gapic_version.py b/packages/google-cloud-migrationcenter/google/cloud/migrationcenter_v1/gapic_version.py index f8ea948a9c30..558c8aab67c5 100644 --- a/packages/google-cloud-migrationcenter/google/cloud/migrationcenter_v1/gapic_version.py +++ b/packages/google-cloud-migrationcenter/google/cloud/migrationcenter_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.9" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-migrationcenter/google/cloud/migrationcenter_v1/services/migration_center/async_client.py b/packages/google-cloud-migrationcenter/google/cloud/migrationcenter_v1/services/migration_center/async_client.py index 4a6284e6d847..cbe47cf4c966 100644 --- a/packages/google-cloud-migrationcenter/google/cloud/migrationcenter_v1/services/migration_center/async_client.py +++ b/packages/google-cloud-migrationcenter/google/cloud/migrationcenter_v1/services/migration_center/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -219,9 +218,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(MigrationCenterClient).get_transport_class, type(MigrationCenterClient) - ) + get_transport_class = MigrationCenterClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-migrationcenter/google/cloud/migrationcenter_v1/services/migration_center/client.py b/packages/google-cloud-migrationcenter/google/cloud/migrationcenter_v1/services/migration_center/client.py index 9c0b3dcdf94b..90590157af59 100644 --- a/packages/google-cloud-migrationcenter/google/cloud/migrationcenter_v1/services/migration_center/client.py +++ b/packages/google-cloud-migrationcenter/google/cloud/migrationcenter_v1/services/migration_center/client.py @@ -871,7 +871,7 @@ def __init__( transport_init: Union[ Type[MigrationCenterTransport], Callable[..., MigrationCenterTransport] ] = ( - type(self).get_transport_class(transport) + MigrationCenterClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., MigrationCenterTransport], transport) ) diff --git a/packages/google-cloud-migrationcenter/samples/generated_samples/snippet_metadata_google.cloud.migrationcenter.v1.json b/packages/google-cloud-migrationcenter/samples/generated_samples/snippet_metadata_google.cloud.migrationcenter.v1.json index 337e07e80d87..3fc0956a4d6c 100644 --- a/packages/google-cloud-migrationcenter/samples/generated_samples/snippet_metadata_google.cloud.migrationcenter.v1.json +++ b/packages/google-cloud-migrationcenter/samples/generated_samples/snippet_metadata_google.cloud.migrationcenter.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-migrationcenter", - "version": "0.1.9" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-migrationcenter/tests/unit/gapic/migrationcenter_v1/test_migration_center.py b/packages/google-cloud-migrationcenter/tests/unit/gapic/migrationcenter_v1/test_migration_center.py index 4dee0c80f9bf..ee6b321140d8 100644 --- a/packages/google-cloud-migrationcenter/tests/unit/gapic/migrationcenter_v1/test_migration_center.py +++ b/packages/google-cloud-migrationcenter/tests/unit/gapic/migrationcenter_v1/test_migration_center.py @@ -1330,22 +1330,23 @@ async def test_list_assets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_assets - ] = mock_object + ] = mock_rpc request = {} await client.list_assets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_assets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1894,22 +1895,23 @@ async def test_get_asset_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_asset - ] = mock_object + ] = mock_rpc request = {} await client.get_asset(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_asset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2268,22 +2270,23 @@ async def test_update_asset_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_asset - ] = mock_object + ] = mock_rpc request = {} await client.update_asset(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_asset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2653,22 +2656,23 @@ async def test_batch_update_assets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_update_assets - ] = mock_object + ] = mock_rpc request = {} await client.batch_update_assets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.batch_update_assets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3054,22 +3058,23 @@ async def test_delete_asset_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_asset - ] = mock_object + ] = mock_rpc request = {} await client.delete_asset(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_asset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3414,22 +3419,23 @@ async def test_batch_delete_assets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_delete_assets - ] = mock_object + ] = mock_rpc request = {} await client.batch_delete_assets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.batch_delete_assets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3799,22 +3805,23 @@ async def test_report_asset_frames_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.report_asset_frames - ] = mock_object + ] = mock_rpc request = {} await client.report_asset_frames(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.report_asset_frames(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4095,22 +4102,23 @@ async def test_aggregate_assets_values_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.aggregate_assets_values - ] = mock_object + ] = mock_rpc request = {} await client.aggregate_assets_values(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.aggregate_assets_values(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4337,8 +4345,9 @@ def test_create_import_job_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_import_job(request) @@ -4394,26 +4403,28 @@ async def test_create_import_job_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_import_job - ] = mock_object + ] = mock_rpc request = {} await client.create_import_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_import_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4800,22 +4811,23 @@ async def test_list_import_jobs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_import_jobs - ] = mock_object + ] = mock_rpc request = {} await client.list_import_jobs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_import_jobs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5372,22 +5384,23 @@ async def test_get_import_job_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_import_job - ] = mock_object + ] = mock_rpc request = {} await client.get_import_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_import_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5699,8 +5712,9 @@ def test_delete_import_job_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_import_job(request) @@ -5756,26 +5770,28 @@ async def test_delete_import_job_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_import_job - ] = mock_object + ] = mock_rpc request = {} await client.delete_import_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_import_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6083,8 +6099,9 @@ def test_update_import_job_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_import_job(request) @@ -6140,26 +6157,28 @@ async def test_update_import_job_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_import_job - ] = mock_object + ] = mock_rpc request = {} await client.update_import_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_import_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6481,8 +6500,9 @@ def test_validate_import_job_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.validate_import_job(request) @@ -6538,26 +6558,28 @@ async def test_validate_import_job_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.validate_import_job - ] = mock_object + ] = mock_rpc request = {} await client.validate_import_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.validate_import_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6860,8 +6882,9 @@ def test_run_import_job_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.run_import_job(request) @@ -6915,26 +6938,28 @@ async def test_run_import_job_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.run_import_job - ] = mock_object + ] = mock_rpc request = {} await client.run_import_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.run_import_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7304,22 +7329,23 @@ async def test_get_import_data_file_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_import_data_file - ] = mock_object + ] = mock_rpc request = {} await client.get_import_data_file(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_import_data_file(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7710,22 +7736,23 @@ async def test_list_import_data_files_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_import_data_files - ] = mock_object + ] = mock_rpc request = {} await client.list_import_data_files(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_import_data_files(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8248,8 +8275,9 @@ def test_create_import_data_file_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_import_data_file(request) @@ -8305,26 +8333,28 @@ async def test_create_import_data_file_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_import_data_file - ] = mock_object + ] = mock_rpc request = {} await client.create_import_data_file(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_import_data_file(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8658,8 +8688,9 @@ def test_delete_import_data_file_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_import_data_file(request) @@ -8715,26 +8746,28 @@ async def test_delete_import_data_file_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_import_data_file - ] = mock_object + ] = mock_rpc request = {} await client.delete_import_data_file(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_import_data_file(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9100,22 +9133,23 @@ async def test_list_groups_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_groups - ] = mock_object + ] = mock_rpc request = {} await client.list_groups(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_groups(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9664,22 +9698,23 @@ async def test_get_group_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_group - ] = mock_object + ] = mock_rpc request = {} await client.get_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9980,8 +10015,9 @@ def test_create_group_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_group(request) @@ -10035,26 +10071,28 @@ async def test_create_group_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_group - ] = mock_object + ] = mock_rpc request = {} await client.create_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10364,8 +10402,9 @@ def test_update_group_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_group(request) @@ -10419,26 +10458,28 @@ async def test_update_group_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_group - ] = mock_object + ] = mock_rpc request = {} await client.update_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10740,8 +10781,9 @@ def test_delete_group_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_group(request) @@ -10795,26 +10837,28 @@ async def test_delete_group_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_group - ] = mock_object + ] = mock_rpc request = {} await client.delete_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11116,8 +11160,9 @@ def test_add_assets_to_group_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.add_assets_to_group(request) @@ -11173,26 +11218,28 @@ async def test_add_assets_to_group_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.add_assets_to_group - ] = mock_object + ] = mock_rpc request = {} await client.add_assets_to_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.add_assets_to_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11506,8 +11553,9 @@ def test_remove_assets_from_group_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.remove_assets_from_group(request) @@ -11563,26 +11611,28 @@ async def test_remove_assets_from_group_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.remove_assets_from_group - ] = mock_object + ] = mock_rpc request = {} await client.remove_assets_from_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.remove_assets_from_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11954,22 +12004,23 @@ async def test_list_error_frames_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_error_frames - ] = mock_object + ] = mock_rpc request = {} await client.list_error_frames(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_error_frames(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12532,22 +12583,23 @@ async def test_get_error_frame_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_error_frame - ] = mock_object + ] = mock_rpc request = {} await client.get_error_frame(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_error_frame(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12905,22 +12957,23 @@ async def test_list_sources_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_sources - ] = mock_object + ] = mock_rpc request = {} await client.list_sources(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_sources(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13487,22 +13540,23 @@ async def test_get_source_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_source - ] = mock_object + ] = mock_rpc request = {} await client.get_source(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_source(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13815,8 +13869,9 @@ def test_create_source_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_source(request) @@ -13870,26 +13925,28 @@ async def test_create_source_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_source - ] = mock_object + ] = mock_rpc request = {} await client.create_source(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_source(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14199,8 +14256,9 @@ def test_update_source_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_source(request) @@ -14254,26 +14312,28 @@ async def test_update_source_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_source - ] = mock_object + ] = mock_rpc request = {} await client.update_source(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_source(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14575,8 +14635,9 @@ def test_delete_source_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_source(request) @@ -14630,26 +14691,28 @@ async def test_delete_source_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_source - ] = mock_object + ] = mock_rpc request = {} await client.delete_source(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_source(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15014,22 +15077,23 @@ async def test_list_preference_sets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_preference_sets - ] = mock_object + ] = mock_rpc request = {} await client.list_preference_sets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_preference_sets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15611,22 +15675,23 @@ async def test_get_preference_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_preference_set - ] = mock_object + ] = mock_rpc request = {} await client.get_preference_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_preference_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15949,8 +16014,9 @@ def test_create_preference_set_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_preference_set(request) @@ -16006,26 +16072,28 @@ async def test_create_preference_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_preference_set - ] = mock_object + ] = mock_rpc request = {} await client.create_preference_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_preference_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -16357,8 +16425,9 @@ def test_update_preference_set_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_preference_set(request) @@ -16414,26 +16483,28 @@ async def test_update_preference_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_preference_set - ] = mock_object + ] = mock_rpc request = {} await client.update_preference_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_preference_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -16757,8 +16828,9 @@ def test_delete_preference_set_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_preference_set(request) @@ -16814,26 +16886,28 @@ async def test_delete_preference_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_preference_set - ] = mock_object + ] = mock_rpc request = {} await client.delete_preference_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_preference_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -17193,22 +17267,23 @@ async def test_get_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_settings - ] = mock_object + ] = mock_rpc request = {} await client.get_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -17503,8 +17578,9 @@ def test_update_settings_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_settings(request) @@ -17558,26 +17634,28 @@ async def test_update_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_settings - ] = mock_object + ] = mock_rpc request = {} await client.update_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -17891,8 +17969,9 @@ def test_create_report_config_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_report_config(request) @@ -17948,26 +18027,28 @@ async def test_create_report_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_report_config - ] = mock_object + ] = mock_rpc request = {} await client.create_report_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_report_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -18360,22 +18441,23 @@ async def test_get_report_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_report_config - ] = mock_object + ] = mock_rpc request = {} await client.get_report_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_report_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -18759,22 +18841,23 @@ async def test_list_report_configs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_report_configs - ] = mock_object + ] = mock_rpc request = {} await client.list_report_configs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_report_configs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -19294,8 +19377,9 @@ def test_delete_report_config_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_report_config(request) @@ -19351,26 +19435,28 @@ async def test_delete_report_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_report_config - ] = mock_object + ] = mock_rpc request = {} await client.delete_report_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_report_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -19675,8 +19761,9 @@ def test_create_report_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_report(request) @@ -19730,26 +19817,28 @@ async def test_create_report_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_report - ] = mock_object + ] = mock_rpc request = {} await client.create_report(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_report(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -20125,22 +20214,23 @@ async def test_get_report_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_report - ] = mock_object + ] = mock_rpc request = {} await client.get_report(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_report(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -20506,22 +20596,23 @@ async def test_list_reports_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_reports - ] = mock_object + ] = mock_rpc request = {} await client.list_reports(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_reports(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -21012,8 +21103,9 @@ def test_delete_report_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_report(request) @@ -21067,26 +21159,28 @@ async def test_delete_report_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_report - ] = mock_object + ] = mock_rpc request = {} await client.delete_report(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_report(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-monitoring-dashboards/google/cloud/monitoring_dashboard/gapic_version.py b/packages/google-cloud-monitoring-dashboards/google/cloud/monitoring_dashboard/gapic_version.py index 42243510aeef..558c8aab67c5 100644 --- a/packages/google-cloud-monitoring-dashboards/google/cloud/monitoring_dashboard/gapic_version.py +++ b/packages/google-cloud-monitoring-dashboards/google/cloud/monitoring_dashboard/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.15.3" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-monitoring-dashboards/google/cloud/monitoring_dashboard_v1/gapic_version.py b/packages/google-cloud-monitoring-dashboards/google/cloud/monitoring_dashboard_v1/gapic_version.py index 42243510aeef..558c8aab67c5 100644 --- a/packages/google-cloud-monitoring-dashboards/google/cloud/monitoring_dashboard_v1/gapic_version.py +++ b/packages/google-cloud-monitoring-dashboards/google/cloud/monitoring_dashboard_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.15.3" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-monitoring-dashboards/google/cloud/monitoring_dashboard_v1/services/dashboards_service/async_client.py b/packages/google-cloud-monitoring-dashboards/google/cloud/monitoring_dashboard_v1/services/dashboards_service/async_client.py index 01598f0dc48f..264addc412dc 100644 --- a/packages/google-cloud-monitoring-dashboards/google/cloud/monitoring_dashboard_v1/services/dashboards_service/async_client.py +++ b/packages/google-cloud-monitoring-dashboards/google/cloud/monitoring_dashboard_v1/services/dashboards_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -199,9 +198,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DashboardsServiceClient).get_transport_class, type(DashboardsServiceClient) - ) + get_transport_class = DashboardsServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-monitoring-dashboards/google/cloud/monitoring_dashboard_v1/services/dashboards_service/client.py b/packages/google-cloud-monitoring-dashboards/google/cloud/monitoring_dashboard_v1/services/dashboards_service/client.py index a4edc5f1ad2c..1150baf5c121 100644 --- a/packages/google-cloud-monitoring-dashboards/google/cloud/monitoring_dashboard_v1/services/dashboards_service/client.py +++ b/packages/google-cloud-monitoring-dashboards/google/cloud/monitoring_dashboard_v1/services/dashboards_service/client.py @@ -687,7 +687,7 @@ def __init__( Type[DashboardsServiceTransport], Callable[..., DashboardsServiceTransport], ] = ( - type(self).get_transport_class(transport) + DashboardsServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DashboardsServiceTransport], transport) ) diff --git a/packages/google-cloud-monitoring-dashboards/samples/generated_samples/snippet_metadata_google.monitoring.dashboard.v1.json b/packages/google-cloud-monitoring-dashboards/samples/generated_samples/snippet_metadata_google.monitoring.dashboard.v1.json index e3add5b1b112..c3c28f8fe357 100644 --- a/packages/google-cloud-monitoring-dashboards/samples/generated_samples/snippet_metadata_google.monitoring.dashboard.v1.json +++ b/packages/google-cloud-monitoring-dashboards/samples/generated_samples/snippet_metadata_google.monitoring.dashboard.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-monitoring-dashboards", - "version": "2.15.3" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-monitoring-dashboards/tests/unit/gapic/monitoring_dashboard_v1/test_dashboards_service.py b/packages/google-cloud-monitoring-dashboards/tests/unit/gapic/monitoring_dashboard_v1/test_dashboards_service.py index 3c874a5d2409..1914d8a59af2 100644 --- a/packages/google-cloud-monitoring-dashboards/tests/unit/gapic/monitoring_dashboard_v1/test_dashboards_service.py +++ b/packages/google-cloud-monitoring-dashboards/tests/unit/gapic/monitoring_dashboard_v1/test_dashboards_service.py @@ -1361,22 +1361,23 @@ async def test_create_dashboard_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_dashboard - ] = mock_object + ] = mock_rpc request = {} await client.create_dashboard(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_dashboard(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1742,22 +1743,23 @@ async def test_list_dashboards_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_dashboards - ] = mock_object + ] = mock_rpc request = {} await client.list_dashboards(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_dashboards(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2307,22 +2309,23 @@ async def test_get_dashboard_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_dashboard - ] = mock_object + ] = mock_rpc request = {} await client.get_dashboard(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_dashboard(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2666,22 +2669,23 @@ async def test_delete_dashboard_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_dashboard - ] = mock_object + ] = mock_rpc request = {} await client.delete_dashboard(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_dashboard(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3026,22 +3030,23 @@ async def test_update_dashboard_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_dashboard - ] = mock_object + ] = mock_rpc request = {} await client.update_dashboard(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_dashboard(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-monitoring-metrics-scopes/google/cloud/monitoring_metrics_scope/gapic_version.py b/packages/google-cloud-monitoring-metrics-scopes/google/cloud/monitoring_metrics_scope/gapic_version.py index aedd949fadb2..558c8aab67c5 100644 --- a/packages/google-cloud-monitoring-metrics-scopes/google/cloud/monitoring_metrics_scope/gapic_version.py +++ b/packages/google-cloud-monitoring-metrics-scopes/google/cloud/monitoring_metrics_scope/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.6.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-monitoring-metrics-scopes/google/cloud/monitoring_metrics_scope_v1/gapic_version.py b/packages/google-cloud-monitoring-metrics-scopes/google/cloud/monitoring_metrics_scope_v1/gapic_version.py index aedd949fadb2..558c8aab67c5 100644 --- a/packages/google-cloud-monitoring-metrics-scopes/google/cloud/monitoring_metrics_scope_v1/gapic_version.py +++ b/packages/google-cloud-monitoring-metrics-scopes/google/cloud/monitoring_metrics_scope_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.6.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-monitoring-metrics-scopes/google/cloud/monitoring_metrics_scope_v1/services/metrics_scopes/async_client.py b/packages/google-cloud-monitoring-metrics-scopes/google/cloud/monitoring_metrics_scope_v1/services/metrics_scopes/async_client.py index c583a862294f..81e2179029b2 100644 --- a/packages/google-cloud-monitoring-metrics-scopes/google/cloud/monitoring_metrics_scope_v1/services/metrics_scopes/async_client.py +++ b/packages/google-cloud-monitoring-metrics-scopes/google/cloud/monitoring_metrics_scope_v1/services/metrics_scopes/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -199,9 +198,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(MetricsScopesClient).get_transport_class, type(MetricsScopesClient) - ) + get_transport_class = MetricsScopesClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-monitoring-metrics-scopes/google/cloud/monitoring_metrics_scope_v1/services/metrics_scopes/client.py b/packages/google-cloud-monitoring-metrics-scopes/google/cloud/monitoring_metrics_scope_v1/services/metrics_scopes/client.py index f7adfb9d1d10..2e673dc63dc7 100644 --- a/packages/google-cloud-monitoring-metrics-scopes/google/cloud/monitoring_metrics_scope_v1/services/metrics_scopes/client.py +++ b/packages/google-cloud-monitoring-metrics-scopes/google/cloud/monitoring_metrics_scope_v1/services/metrics_scopes/client.py @@ -677,7 +677,7 @@ def __init__( transport_init: Union[ Type[MetricsScopesTransport], Callable[..., MetricsScopesTransport] ] = ( - type(self).get_transport_class(transport) + MetricsScopesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., MetricsScopesTransport], transport) ) diff --git a/packages/google-cloud-monitoring-metrics-scopes/samples/generated_samples/snippet_metadata_google.monitoring.metricsscope.v1.json b/packages/google-cloud-monitoring-metrics-scopes/samples/generated_samples/snippet_metadata_google.monitoring.metricsscope.v1.json index 0989f49feb2b..0cc0413d2cdf 100644 --- a/packages/google-cloud-monitoring-metrics-scopes/samples/generated_samples/snippet_metadata_google.monitoring.metricsscope.v1.json +++ b/packages/google-cloud-monitoring-metrics-scopes/samples/generated_samples/snippet_metadata_google.monitoring.metricsscope.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-monitoring-metrics-scopes", - "version": "1.6.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-monitoring-metrics-scopes/tests/unit/gapic/monitoring_metrics_scope_v1/test_metrics_scopes.py b/packages/google-cloud-monitoring-metrics-scopes/tests/unit/gapic/monitoring_metrics_scope_v1/test_metrics_scopes.py index b4d764c4283b..4449d579aae2 100644 --- a/packages/google-cloud-monitoring-metrics-scopes/tests/unit/gapic/monitoring_metrics_scope_v1/test_metrics_scopes.py +++ b/packages/google-cloud-monitoring-metrics-scopes/tests/unit/gapic/monitoring_metrics_scope_v1/test_metrics_scopes.py @@ -1280,22 +1280,23 @@ async def test_get_metrics_scope_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_metrics_scope - ] = mock_object + ] = mock_rpc request = {} await client.get_metrics_scope(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_metrics_scope(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1666,22 +1667,23 @@ async def test_list_metrics_scopes_by_monitored_project_async_use_cached_wrapped ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_metrics_scopes_by_monitored_project - ] = mock_object + ] = mock_rpc request = {} await client.list_metrics_scopes_by_monitored_project(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_metrics_scopes_by_monitored_project(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1844,8 +1846,9 @@ def test_create_monitored_project_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_monitored_project(request) @@ -1901,26 +1904,28 @@ async def test_create_monitored_project_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_monitored_project - ] = mock_object + ] = mock_rpc request = {} await client.create_monitored_project(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_monitored_project(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2242,8 +2247,9 @@ def test_delete_monitored_project_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_monitored_project(request) @@ -2299,26 +2305,28 @@ async def test_delete_monitored_project_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_monitored_project - ] = mock_object + ] = mock_rpc request = {} await client.delete_monitored_project(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_monitored_project(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-monitoring/google/cloud/monitoring/gapic_version.py b/packages/google-cloud-monitoring/google/cloud/monitoring/gapic_version.py index d0118fc6ad86..558c8aab67c5 100644 --- a/packages/google-cloud-monitoring/google/cloud/monitoring/gapic_version.py +++ b/packages/google-cloud-monitoring/google/cloud/monitoring/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.22.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-monitoring/google/cloud/monitoring_v3/gapic_version.py b/packages/google-cloud-monitoring/google/cloud/monitoring_v3/gapic_version.py index d0118fc6ad86..558c8aab67c5 100644 --- a/packages/google-cloud-monitoring/google/cloud/monitoring_v3/gapic_version.py +++ b/packages/google-cloud-monitoring/google/cloud/monitoring_v3/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.22.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-monitoring/google/cloud/monitoring_v3/services/alert_policy_service/async_client.py b/packages/google-cloud-monitoring/google/cloud/monitoring_v3/services/alert_policy_service/async_client.py index 4773b086a208..f7526d46d63c 100644 --- a/packages/google-cloud-monitoring/google/cloud/monitoring_v3/services/alert_policy_service/async_client.py +++ b/packages/google-cloud-monitoring/google/cloud/monitoring_v3/services/alert_policy_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -209,10 +208,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AlertPolicyServiceClient).get_transport_class, - type(AlertPolicyServiceClient), - ) + get_transport_class = AlertPolicyServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-monitoring/google/cloud/monitoring_v3/services/alert_policy_service/client.py b/packages/google-cloud-monitoring/google/cloud/monitoring_v3/services/alert_policy_service/client.py index 1dcd756a9dab..618a8d2f3d0b 100644 --- a/packages/google-cloud-monitoring/google/cloud/monitoring_v3/services/alert_policy_service/client.py +++ b/packages/google-cloud-monitoring/google/cloud/monitoring_v3/services/alert_policy_service/client.py @@ -696,7 +696,7 @@ def __init__( Type[AlertPolicyServiceTransport], Callable[..., AlertPolicyServiceTransport], ] = ( - type(self).get_transport_class(transport) + AlertPolicyServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AlertPolicyServiceTransport], transport) ) diff --git a/packages/google-cloud-monitoring/google/cloud/monitoring_v3/services/group_service/async_client.py b/packages/google-cloud-monitoring/google/cloud/monitoring_v3/services/group_service/async_client.py index 7c65bc1fe162..4e5b6af214c5 100644 --- a/packages/google-cloud-monitoring/google/cloud/monitoring_v3/services/group_service/async_client.py +++ b/packages/google-cloud-monitoring/google/cloud/monitoring_v3/services/group_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -200,9 +199,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(GroupServiceClient).get_transport_class, type(GroupServiceClient) - ) + get_transport_class = GroupServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-monitoring/google/cloud/monitoring_v3/services/group_service/client.py b/packages/google-cloud-monitoring/google/cloud/monitoring_v3/services/group_service/client.py index d767d8294df6..48ae58881570 100644 --- a/packages/google-cloud-monitoring/google/cloud/monitoring_v3/services/group_service/client.py +++ b/packages/google-cloud-monitoring/google/cloud/monitoring_v3/services/group_service/client.py @@ -665,7 +665,7 @@ def __init__( transport_init: Union[ Type[GroupServiceTransport], Callable[..., GroupServiceTransport] ] = ( - type(self).get_transport_class(transport) + GroupServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., GroupServiceTransport], transport) ) diff --git a/packages/google-cloud-monitoring/google/cloud/monitoring_v3/services/metric_service/async_client.py b/packages/google-cloud-monitoring/google/cloud/monitoring_v3/services/metric_service/async_client.py index 10f1f2308b57..31d1623340e2 100644 --- a/packages/google-cloud-monitoring/google/cloud/monitoring_v3/services/metric_service/async_client.py +++ b/packages/google-cloud-monitoring/google/cloud/monitoring_v3/services/metric_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -207,9 +206,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(MetricServiceClient).get_transport_class, type(MetricServiceClient) - ) + get_transport_class = MetricServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-monitoring/google/cloud/monitoring_v3/services/metric_service/client.py b/packages/google-cloud-monitoring/google/cloud/monitoring_v3/services/metric_service/client.py index 1f4939ba5ba1..07ccdba17fe4 100644 --- a/packages/google-cloud-monitoring/google/cloud/monitoring_v3/services/metric_service/client.py +++ b/packages/google-cloud-monitoring/google/cloud/monitoring_v3/services/metric_service/client.py @@ -703,7 +703,7 @@ def __init__( transport_init: Union[ Type[MetricServiceTransport], Callable[..., MetricServiceTransport] ] = ( - type(self).get_transport_class(transport) + MetricServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., MetricServiceTransport], transport) ) diff --git a/packages/google-cloud-monitoring/google/cloud/monitoring_v3/services/notification_channel_service/async_client.py b/packages/google-cloud-monitoring/google/cloud/monitoring_v3/services/notification_channel_service/async_client.py index 8d0ae050ab45..e43dba38dcc6 100644 --- a/packages/google-cloud-monitoring/google/cloud/monitoring_v3/services/notification_channel_service/async_client.py +++ b/packages/google-cloud-monitoring/google/cloud/monitoring_v3/services/notification_channel_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -219,10 +218,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(NotificationChannelServiceClient).get_transport_class, - type(NotificationChannelServiceClient), - ) + get_transport_class = NotificationChannelServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-monitoring/google/cloud/monitoring_v3/services/notification_channel_service/client.py b/packages/google-cloud-monitoring/google/cloud/monitoring_v3/services/notification_channel_service/client.py index b3ea2fa69900..8cf585914006 100644 --- a/packages/google-cloud-monitoring/google/cloud/monitoring_v3/services/notification_channel_service/client.py +++ b/packages/google-cloud-monitoring/google/cloud/monitoring_v3/services/notification_channel_service/client.py @@ -699,7 +699,7 @@ def __init__( Type[NotificationChannelServiceTransport], Callable[..., NotificationChannelServiceTransport], ] = ( - type(self).get_transport_class(transport) + NotificationChannelServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., NotificationChannelServiceTransport], transport) ) diff --git a/packages/google-cloud-monitoring/google/cloud/monitoring_v3/services/query_service/async_client.py b/packages/google-cloud-monitoring/google/cloud/monitoring_v3/services/query_service/async_client.py index 82b6c88f4c48..d76cde69760f 100644 --- a/packages/google-cloud-monitoring/google/cloud/monitoring_v3/services/query_service/async_client.py +++ b/packages/google-cloud-monitoring/google/cloud/monitoring_v3/services/query_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -186,9 +185,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(QueryServiceClient).get_transport_class, type(QueryServiceClient) - ) + get_transport_class = QueryServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-monitoring/google/cloud/monitoring_v3/services/query_service/client.py b/packages/google-cloud-monitoring/google/cloud/monitoring_v3/services/query_service/client.py index b1093cc9a15b..eb712ba90150 100644 --- a/packages/google-cloud-monitoring/google/cloud/monitoring_v3/services/query_service/client.py +++ b/packages/google-cloud-monitoring/google/cloud/monitoring_v3/services/query_service/client.py @@ -636,7 +636,7 @@ def __init__( transport_init: Union[ Type[QueryServiceTransport], Callable[..., QueryServiceTransport] ] = ( - type(self).get_transport_class(transport) + QueryServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., QueryServiceTransport], transport) ) diff --git a/packages/google-cloud-monitoring/google/cloud/monitoring_v3/services/service_monitoring_service/async_client.py b/packages/google-cloud-monitoring/google/cloud/monitoring_v3/services/service_monitoring_service/async_client.py index 0997f7c773e7..162fc01bc99a 100644 --- a/packages/google-cloud-monitoring/google/cloud/monitoring_v3/services/service_monitoring_service/async_client.py +++ b/packages/google-cloud-monitoring/google/cloud/monitoring_v3/services/service_monitoring_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -210,10 +209,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ServiceMonitoringServiceClient).get_transport_class, - type(ServiceMonitoringServiceClient), - ) + get_transport_class = ServiceMonitoringServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-monitoring/google/cloud/monitoring_v3/services/service_monitoring_service/client.py b/packages/google-cloud-monitoring/google/cloud/monitoring_v3/services/service_monitoring_service/client.py index 1ea7deb2fdef..2ec6b313a2c7 100644 --- a/packages/google-cloud-monitoring/google/cloud/monitoring_v3/services/service_monitoring_service/client.py +++ b/packages/google-cloud-monitoring/google/cloud/monitoring_v3/services/service_monitoring_service/client.py @@ -695,7 +695,7 @@ def __init__( Type[ServiceMonitoringServiceTransport], Callable[..., ServiceMonitoringServiceTransport], ] = ( - type(self).get_transport_class(transport) + ServiceMonitoringServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ServiceMonitoringServiceTransport], transport) ) diff --git a/packages/google-cloud-monitoring/google/cloud/monitoring_v3/services/snooze_service/async_client.py b/packages/google-cloud-monitoring/google/cloud/monitoring_v3/services/snooze_service/async_client.py index 2fb5080f0e09..e8afc0ecc5eb 100644 --- a/packages/google-cloud-monitoring/google/cloud/monitoring_v3/services/snooze_service/async_client.py +++ b/packages/google-cloud-monitoring/google/cloud/monitoring_v3/services/snooze_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -199,9 +198,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(SnoozeServiceClient).get_transport_class, type(SnoozeServiceClient) - ) + get_transport_class = SnoozeServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-monitoring/google/cloud/monitoring_v3/services/snooze_service/client.py b/packages/google-cloud-monitoring/google/cloud/monitoring_v3/services/snooze_service/client.py index 0ad2a23a3a68..08346b501dd9 100644 --- a/packages/google-cloud-monitoring/google/cloud/monitoring_v3/services/snooze_service/client.py +++ b/packages/google-cloud-monitoring/google/cloud/monitoring_v3/services/snooze_service/client.py @@ -680,7 +680,7 @@ def __init__( transport_init: Union[ Type[SnoozeServiceTransport], Callable[..., SnoozeServiceTransport] ] = ( - type(self).get_transport_class(transport) + SnoozeServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SnoozeServiceTransport], transport) ) diff --git a/packages/google-cloud-monitoring/google/cloud/monitoring_v3/services/uptime_check_service/async_client.py b/packages/google-cloud-monitoring/google/cloud/monitoring_v3/services/uptime_check_service/async_client.py index 7ea43a159148..984044837b9d 100644 --- a/packages/google-cloud-monitoring/google/cloud/monitoring_v3/services/uptime_check_service/async_client.py +++ b/packages/google-cloud-monitoring/google/cloud/monitoring_v3/services/uptime_check_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -205,10 +204,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(UptimeCheckServiceClient).get_transport_class, - type(UptimeCheckServiceClient), - ) + get_transport_class = UptimeCheckServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-monitoring/google/cloud/monitoring_v3/services/uptime_check_service/client.py b/packages/google-cloud-monitoring/google/cloud/monitoring_v3/services/uptime_check_service/client.py index 9cf2415cbf9b..25711428b897 100644 --- a/packages/google-cloud-monitoring/google/cloud/monitoring_v3/services/uptime_check_service/client.py +++ b/packages/google-cloud-monitoring/google/cloud/monitoring_v3/services/uptime_check_service/client.py @@ -695,7 +695,7 @@ def __init__( Type[UptimeCheckServiceTransport], Callable[..., UptimeCheckServiceTransport], ] = ( - type(self).get_transport_class(transport) + UptimeCheckServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., UptimeCheckServiceTransport], transport) ) diff --git a/packages/google-cloud-monitoring/samples/generated_samples/snippet_metadata_google.monitoring.v3.json b/packages/google-cloud-monitoring/samples/generated_samples/snippet_metadata_google.monitoring.v3.json index f6e53cbc89e1..04888a5ff523 100644 --- a/packages/google-cloud-monitoring/samples/generated_samples/snippet_metadata_google.monitoring.v3.json +++ b/packages/google-cloud-monitoring/samples/generated_samples/snippet_metadata_google.monitoring.v3.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-monitoring", - "version": "2.22.2" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-monitoring/tests/unit/gapic/monitoring_v3/test_alert_policy_service.py b/packages/google-cloud-monitoring/tests/unit/gapic/monitoring_v3/test_alert_policy_service.py index 6e999d557a15..feef56f33603 100644 --- a/packages/google-cloud-monitoring/tests/unit/gapic/monitoring_v3/test_alert_policy_service.py +++ b/packages/google-cloud-monitoring/tests/unit/gapic/monitoring_v3/test_alert_policy_service.py @@ -1322,22 +1322,23 @@ async def test_list_alert_policies_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_alert_policies - ] = mock_object + ] = mock_rpc request = {} await client.list_alert_policies(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_alert_policies(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1914,22 +1915,23 @@ async def test_get_alert_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_alert_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_alert_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_alert_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2306,22 +2308,23 @@ async def test_create_alert_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_alert_policy - ] = mock_object + ] = mock_rpc request = {} await client.create_alert_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_alert_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2699,22 +2702,23 @@ async def test_delete_alert_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_alert_policy - ] = mock_object + ] = mock_rpc request = {} await client.delete_alert_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_alert_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3084,22 +3088,23 @@ async def test_update_alert_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_alert_policy - ] = mock_object + ] = mock_rpc request = {} await client.update_alert_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_alert_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-monitoring/tests/unit/gapic/monitoring_v3/test_group_service.py b/packages/google-cloud-monitoring/tests/unit/gapic/monitoring_v3/test_group_service.py index fe429edbc4dc..bdf418823868 100644 --- a/packages/google-cloud-monitoring/tests/unit/gapic/monitoring_v3/test_group_service.py +++ b/packages/google-cloud-monitoring/tests/unit/gapic/monitoring_v3/test_group_service.py @@ -1253,22 +1253,23 @@ async def test_list_groups_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_groups - ] = mock_object + ] = mock_rpc request = {} await client.list_groups(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_groups(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1821,22 +1822,23 @@ async def test_get_group_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_group - ] = mock_object + ] = mock_rpc request = {} await client.get_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2201,22 +2203,23 @@ async def test_create_group_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_group - ] = mock_object + ] = mock_rpc request = {} await client.create_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2587,22 +2590,23 @@ async def test_update_group_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_group - ] = mock_object + ] = mock_rpc request = {} await client.update_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2948,22 +2952,23 @@ async def test_delete_group_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_group - ] = mock_object + ] = mock_rpc request = {} await client.delete_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3322,22 +3327,23 @@ async def test_list_group_members_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_group_members - ] = mock_object + ] = mock_rpc request = {} await client.list_group_members(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_group_members(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-monitoring/tests/unit/gapic/monitoring_v3/test_metric_service.py b/packages/google-cloud-monitoring/tests/unit/gapic/monitoring_v3/test_metric_service.py index cbfef71a0ac4..e728971e12b8 100644 --- a/packages/google-cloud-monitoring/tests/unit/gapic/monitoring_v3/test_metric_service.py +++ b/packages/google-cloud-monitoring/tests/unit/gapic/monitoring_v3/test_metric_service.py @@ -1288,22 +1288,23 @@ async def test_list_monitored_resource_descriptors_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_monitored_resource_descriptors - ] = mock_object + ] = mock_rpc request = {} await client.list_monitored_resource_descriptors(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_monitored_resource_descriptors(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1898,22 +1899,23 @@ async def test_get_monitored_resource_descriptor_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_monitored_resource_descriptor - ] = mock_object + ] = mock_rpc request = {} await client.get_monitored_resource_descriptor(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_monitored_resource_descriptor(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2298,22 +2300,23 @@ async def test_list_metric_descriptors_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_metric_descriptors - ] = mock_object + ] = mock_rpc request = {} await client.list_metric_descriptors(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_metric_descriptors(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2912,22 +2915,23 @@ async def test_get_metric_descriptor_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_metric_descriptor - ] = mock_object + ] = mock_rpc request = {} await client.get_metric_descriptor(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_metric_descriptor(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3340,22 +3344,23 @@ async def test_create_metric_descriptor_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_metric_descriptor - ] = mock_object + ] = mock_rpc request = {} await client.create_metric_descriptor(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_metric_descriptor(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3747,22 +3752,23 @@ async def test_delete_metric_descriptor_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_metric_descriptor - ] = mock_object + ] = mock_rpc request = {} await client.delete_metric_descriptor(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_metric_descriptor(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4124,22 +4130,23 @@ async def test_list_time_series_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_time_series - ] = mock_object + ] = mock_rpc request = {} await client.list_time_series(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_time_series(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4719,22 +4726,23 @@ async def test_create_time_series_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_time_series - ] = mock_object + ] = mock_rpc request = {} await client.create_time_series(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_time_series(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5108,22 +5116,23 @@ async def test_create_service_time_series_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_service_time_series - ] = mock_object + ] = mock_rpc request = {} await client.create_service_time_series(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_service_time_series(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-monitoring/tests/unit/gapic/monitoring_v3/test_notification_channel_service.py b/packages/google-cloud-monitoring/tests/unit/gapic/monitoring_v3/test_notification_channel_service.py index 771d0ec2cd3f..953f5039cd2c 100644 --- a/packages/google-cloud-monitoring/tests/unit/gapic/monitoring_v3/test_notification_channel_service.py +++ b/packages/google-cloud-monitoring/tests/unit/gapic/monitoring_v3/test_notification_channel_service.py @@ -1353,22 +1353,23 @@ async def test_list_notification_channel_descriptors_async_use_cached_wrapped_rp ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_notification_channel_descriptors - ] = mock_object + ] = mock_rpc request = {} await client.list_notification_channel_descriptors(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_notification_channel_descriptors(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1970,22 +1971,23 @@ async def test_get_notification_channel_descriptor_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_notification_channel_descriptor - ] = mock_object + ] = mock_rpc request = {} await client.get_notification_channel_descriptor(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_notification_channel_descriptor(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2377,22 +2379,23 @@ async def test_list_notification_channels_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_notification_channels - ] = mock_object + ] = mock_rpc request = {} await client.list_notification_channels(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_notification_channels(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2986,22 +2989,23 @@ async def test_get_notification_channel_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_notification_channel - ] = mock_object + ] = mock_rpc request = {} await client.get_notification_channel(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_notification_channel(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3400,22 +3404,23 @@ async def test_create_notification_channel_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_notification_channel - ] = mock_object + ] = mock_rpc request = {} await client.create_notification_channel(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_notification_channel(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3820,22 +3825,23 @@ async def test_update_notification_channel_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_notification_channel - ] = mock_object + ] = mock_rpc request = {} await client.update_notification_channel(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_notification_channel(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4222,22 +4228,23 @@ async def test_delete_notification_channel_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_notification_channel - ] = mock_object + ] = mock_rpc request = {} await client.delete_notification_channel(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_notification_channel(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4614,22 +4621,23 @@ async def test_send_notification_channel_verification_code_async_use_cached_wrap ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.send_notification_channel_verification_code - ] = mock_object + ] = mock_rpc request = {} await client.send_notification_channel_verification_code(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.send_notification_channel_verification_code(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5007,22 +5015,23 @@ async def test_get_notification_channel_verification_code_async_use_cached_wrapp ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_notification_channel_verification_code - ] = mock_object + ] = mock_rpc request = {} await client.get_notification_channel_verification_code(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_notification_channel_verification_code(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5420,22 +5429,23 @@ async def test_verify_notification_channel_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.verify_notification_channel - ] = mock_object + ] = mock_rpc request = {} await client.verify_notification_channel(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.verify_notification_channel(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-monitoring/tests/unit/gapic/monitoring_v3/test_query_service.py b/packages/google-cloud-monitoring/tests/unit/gapic/monitoring_v3/test_query_service.py index 5523f8fa907c..ea07dc7a3cf8 100644 --- a/packages/google-cloud-monitoring/tests/unit/gapic/monitoring_v3/test_query_service.py +++ b/packages/google-cloud-monitoring/tests/unit/gapic/monitoring_v3/test_query_service.py @@ -1254,22 +1254,23 @@ async def test_query_time_series_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.query_time_series - ] = mock_object + ] = mock_rpc request = {} await client.query_time_series(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.query_time_series(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-monitoring/tests/unit/gapic/monitoring_v3/test_service_monitoring_service.py b/packages/google-cloud-monitoring/tests/unit/gapic/monitoring_v3/test_service_monitoring_service.py index 4caa72d968cb..3667da6f4cf5 100644 --- a/packages/google-cloud-monitoring/tests/unit/gapic/monitoring_v3/test_service_monitoring_service.py +++ b/packages/google-cloud-monitoring/tests/unit/gapic/monitoring_v3/test_service_monitoring_service.py @@ -1324,22 +1324,23 @@ async def test_create_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_service - ] = mock_object + ] = mock_rpc request = {} await client.create_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1699,22 +1700,23 @@ async def test_get_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_service - ] = mock_object + ] = mock_rpc request = {} await client.get_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2065,22 +2067,23 @@ async def test_list_services_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_services - ] = mock_object + ] = mock_rpc request = {} await client.list_services(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_services(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2622,22 +2625,23 @@ async def test_update_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_service - ] = mock_object + ] = mock_rpc request = {} await client.update_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2977,22 +2981,23 @@ async def test_delete_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_service - ] = mock_object + ] = mock_rpc request = {} await client.delete_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3354,22 +3359,23 @@ async def test_create_service_level_objective_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_service_level_objective - ] = mock_object + ] = mock_rpc request = {} await client.create_service_level_objective(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_service_level_objective(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3763,22 +3769,23 @@ async def test_get_service_level_objective_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_service_level_objective - ] = mock_object + ] = mock_rpc request = {} await client.get_service_level_objective(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_service_level_objective(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4159,22 +4166,23 @@ async def test_list_service_level_objectives_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_service_level_objectives - ] = mock_object + ] = mock_rpc request = {} await client.list_service_level_objectives(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_service_level_objectives(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4754,22 +4762,23 @@ async def test_update_service_level_objective_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_service_level_objective - ] = mock_object + ] = mock_rpc request = {} await client.update_service_level_objective(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_service_level_objective(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5139,22 +5148,23 @@ async def test_delete_service_level_objective_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_service_level_objective - ] = mock_object + ] = mock_rpc request = {} await client.delete_service_level_objective(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_service_level_objective(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-monitoring/tests/unit/gapic/monitoring_v3/test_snooze_service.py b/packages/google-cloud-monitoring/tests/unit/gapic/monitoring_v3/test_snooze_service.py index 69f67db6f389..fd3be31706d2 100644 --- a/packages/google-cloud-monitoring/tests/unit/gapic/monitoring_v3/test_snooze_service.py +++ b/packages/google-cloud-monitoring/tests/unit/gapic/monitoring_v3/test_snooze_service.py @@ -1267,22 +1267,23 @@ async def test_create_snooze_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_snooze - ] = mock_object + ] = mock_rpc request = {} await client.create_snooze(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_snooze(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1643,22 +1644,23 @@ async def test_list_snoozes_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_snoozes - ] = mock_object + ] = mock_rpc request = {} await client.list_snoozes(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_snoozes(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2202,22 +2204,23 @@ async def test_get_snooze_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_snooze - ] = mock_object + ] = mock_rpc request = {} await client.get_snooze(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_snooze(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2563,22 +2566,23 @@ async def test_update_snooze_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_snooze - ] = mock_object + ] = mock_rpc request = {} await client.update_snooze(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_snooze(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-monitoring/tests/unit/gapic/monitoring_v3/test_uptime_check_service.py b/packages/google-cloud-monitoring/tests/unit/gapic/monitoring_v3/test_uptime_check_service.py index 1a5ebda12b36..c19806cf45c5 100644 --- a/packages/google-cloud-monitoring/tests/unit/gapic/monitoring_v3/test_uptime_check_service.py +++ b/packages/google-cloud-monitoring/tests/unit/gapic/monitoring_v3/test_uptime_check_service.py @@ -1313,22 +1313,23 @@ async def test_list_uptime_check_configs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_uptime_check_configs - ] = mock_object + ] = mock_rpc request = {} await client.list_uptime_check_configs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_uptime_check_configs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1921,22 +1922,23 @@ async def test_get_uptime_check_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_uptime_check_config - ] = mock_object + ] = mock_rpc request = {} await client.get_uptime_check_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_uptime_check_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2333,22 +2335,23 @@ async def test_create_uptime_check_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_uptime_check_config - ] = mock_object + ] = mock_rpc request = {} await client.create_uptime_check_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_uptime_check_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2751,22 +2754,23 @@ async def test_update_uptime_check_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_uptime_check_config - ] = mock_object + ] = mock_rpc request = {} await client.update_uptime_check_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_uptime_check_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3142,22 +3146,23 @@ async def test_delete_uptime_check_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_uptime_check_config - ] = mock_object + ] = mock_rpc request = {} await client.delete_uptime_check_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_uptime_check_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3521,22 +3526,23 @@ async def test_list_uptime_check_ips_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_uptime_check_ips - ] = mock_object + ] = mock_rpc request = {} await client.list_uptime_check_ips(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_uptime_check_ips(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-netapp/google/cloud/netapp/gapic_version.py b/packages/google-cloud-netapp/google/cloud/netapp/gapic_version.py index fb3463bbb3c2..558c8aab67c5 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp/gapic_version.py +++ b/packages/google-cloud-netapp/google/cloud/netapp/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.13" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/gapic_version.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/gapic_version.py index fb3463bbb3c2..558c8aab67c5 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/gapic_version.py +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.13" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/async_client.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/async_client.py index 4f00527ddadc..a1bd9a72ca72 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/async_client.py +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -219,9 +218,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(NetAppClient).get_transport_class, type(NetAppClient) - ) + get_transport_class = NetAppClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/client.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/client.py index b1f7a0fadd6f..8d8dc7fd2ef6 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/client.py +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/client.py @@ -880,7 +880,7 @@ def __init__( transport_init: Union[ Type[NetAppTransport], Callable[..., NetAppTransport] ] = ( - type(self).get_transport_class(transport) + NetAppClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., NetAppTransport], transport) ) diff --git a/packages/google-cloud-netapp/samples/generated_samples/snippet_metadata_google.cloud.netapp.v1.json b/packages/google-cloud-netapp/samples/generated_samples/snippet_metadata_google.cloud.netapp.v1.json index 6c152bc7c312..3edfdb18757c 100644 --- a/packages/google-cloud-netapp/samples/generated_samples/snippet_metadata_google.cloud.netapp.v1.json +++ b/packages/google-cloud-netapp/samples/generated_samples/snippet_metadata_google.cloud.netapp.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-netapp", - "version": "0.3.13" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-netapp/tests/unit/gapic/netapp_v1/test_net_app.py b/packages/google-cloud-netapp/tests/unit/gapic/netapp_v1/test_net_app.py index f5361c806474..0465f736de27 100644 --- a/packages/google-cloud-netapp/tests/unit/gapic/netapp_v1/test_net_app.py +++ b/packages/google-cloud-netapp/tests/unit/gapic/netapp_v1/test_net_app.py @@ -1265,22 +1265,23 @@ async def test_list_storage_pools_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_storage_pools - ] = mock_object + ] = mock_rpc request = {} await client.list_storage_pools(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_storage_pools(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1799,8 +1800,9 @@ def test_create_storage_pool_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_storage_pool(request) @@ -1856,26 +1858,28 @@ async def test_create_storage_pool_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_storage_pool - ] = mock_object + ] = mock_rpc request = {} await client.create_storage_pool(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_storage_pool(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2302,22 +2306,23 @@ async def test_get_storage_pool_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_storage_pool - ] = mock_object + ] = mock_rpc request = {} await client.get_storage_pool(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_storage_pool(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2648,8 +2653,9 @@ def test_update_storage_pool_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_storage_pool(request) @@ -2705,26 +2711,28 @@ async def test_update_storage_pool_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_storage_pool - ] = mock_object + ] = mock_rpc request = {} await client.update_storage_pool(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_storage_pool(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3045,8 +3053,9 @@ def test_delete_storage_pool_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_storage_pool(request) @@ -3102,26 +3111,28 @@ async def test_delete_storage_pool_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_storage_pool - ] = mock_object + ] = mock_rpc request = {} await client.delete_storage_pool(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_storage_pool(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3486,22 +3497,23 @@ async def test_list_volumes_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_volumes - ] = mock_object + ] = mock_rpc request = {} await client.list_volumes(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_volumes(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4125,22 +4137,23 @@ async def test_get_volume_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_volume - ] = mock_object + ] = mock_rpc request = {} await client.get_volume(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_volume(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4485,8 +4498,9 @@ def test_create_volume_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_volume(request) @@ -4540,26 +4554,28 @@ async def test_create_volume_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_volume - ] = mock_object + ] = mock_rpc request = {} await client.create_volume(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_volume(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4865,8 +4881,9 @@ def test_update_volume_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_volume(request) @@ -4920,26 +4937,28 @@ async def test_update_volume_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_volume - ] = mock_object + ] = mock_rpc request = {} await client.update_volume(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_volume(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5239,8 +5258,9 @@ def test_delete_volume_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_volume(request) @@ -5294,26 +5314,28 @@ async def test_delete_volume_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_volume - ] = mock_object + ] = mock_rpc request = {} await client.delete_volume(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_volume(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5605,8 +5627,9 @@ def test_revert_volume_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.revert_volume(request) @@ -5660,26 +5683,28 @@ async def test_revert_volume_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.revert_volume - ] = mock_object + ] = mock_rpc request = {} await client.revert_volume(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.revert_volume(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5952,22 +5977,23 @@ async def test_list_snapshots_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_snapshots - ] = mock_object + ] = mock_rpc request = {} await client.list_snapshots(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_snapshots(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6524,22 +6550,23 @@ async def test_get_snapshot_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_snapshot - ] = mock_object + ] = mock_rpc request = {} await client.get_snapshot(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_snapshot(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6838,8 +6865,9 @@ def test_create_snapshot_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_snapshot(request) @@ -6893,26 +6921,28 @@ async def test_create_snapshot_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_snapshot - ] = mock_object + ] = mock_rpc request = {} await client.create_snapshot(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_snapshot(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7222,8 +7252,9 @@ def test_delete_snapshot_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_snapshot(request) @@ -7277,26 +7308,28 @@ async def test_delete_snapshot_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_snapshot - ] = mock_object + ] = mock_rpc request = {} await client.delete_snapshot(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_snapshot(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7582,8 +7615,9 @@ def test_update_snapshot_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_snapshot(request) @@ -7637,26 +7671,28 @@ async def test_update_snapshot_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_snapshot - ] = mock_object + ] = mock_rpc request = {} await client.update_snapshot(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_snapshot(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8034,22 +8070,23 @@ async def test_list_active_directories_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_active_directories - ] = mock_object + ] = mock_rpc request = {} await client.list_active_directories(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_active_directories(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8682,22 +8719,23 @@ async def test_get_active_directory_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_active_directory - ] = mock_object + ] = mock_rpc request = {} await client.get_active_directory(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_active_directory(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9052,8 +9090,9 @@ def test_create_active_directory_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_active_directory(request) @@ -9109,26 +9148,28 @@ async def test_create_active_directory_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_active_directory - ] = mock_object + ] = mock_rpc request = {} await client.create_active_directory(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_active_directory(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9456,8 +9497,9 @@ def test_update_active_directory_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_active_directory(request) @@ -9513,26 +9555,28 @@ async def test_update_active_directory_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_active_directory - ] = mock_object + ] = mock_rpc request = {} await client.update_active_directory(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_active_directory(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9854,8 +9898,9 @@ def test_delete_active_directory_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_active_directory(request) @@ -9911,26 +9956,28 @@ async def test_delete_active_directory_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_active_directory - ] = mock_object + ] = mock_rpc request = {} await client.delete_active_directory(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_active_directory(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10298,22 +10345,23 @@ async def test_list_kms_configs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_kms_configs - ] = mock_object + ] = mock_rpc request = {} await client.list_kms_configs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_kms_configs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10812,8 +10860,9 @@ def test_create_kms_config_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_kms_config(request) @@ -10869,26 +10918,28 @@ async def test_create_kms_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_kms_config - ] = mock_object + ] = mock_rpc request = {} await client.create_kms_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_kms_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11282,22 +11333,23 @@ async def test_get_kms_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_kms_config - ] = mock_object + ] = mock_rpc request = {} await client.get_kms_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_kms_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11602,8 +11654,9 @@ def test_update_kms_config_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_kms_config(request) @@ -11659,26 +11712,28 @@ async def test_update_kms_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_kms_config - ] = mock_object + ] = mock_rpc request = {} await client.update_kms_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_kms_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11988,8 +12043,9 @@ def test_encrypt_volumes_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.encrypt_volumes(request) @@ -12043,26 +12099,28 @@ async def test_encrypt_volumes_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.encrypt_volumes - ] = mock_object + ] = mock_rpc request = {} await client.encrypt_volumes(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.encrypt_volumes(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12342,22 +12400,23 @@ async def test_verify_kms_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.verify_kms_config - ] = mock_object + ] = mock_rpc request = {} await client.verify_kms_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.verify_kms_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12586,8 +12645,9 @@ def test_delete_kms_config_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_kms_config(request) @@ -12643,26 +12703,28 @@ async def test_delete_kms_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_kms_config - ] = mock_object + ] = mock_rpc request = {} await client.delete_kms_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_kms_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13037,22 +13099,23 @@ async def test_list_replications_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_replications - ] = mock_object + ] = mock_rpc request = {} await client.list_replications(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_replications(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13645,22 +13708,23 @@ async def test_get_replication_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_replication - ] = mock_object + ] = mock_rpc request = {} await client.get_replication(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_replication(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13986,8 +14050,9 @@ def test_create_replication_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_replication(request) @@ -14043,26 +14108,28 @@ async def test_create_replication_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_replication - ] = mock_object + ] = mock_rpc request = {} await client.create_replication(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_replication(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14393,8 +14460,9 @@ def test_delete_replication_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_replication(request) @@ -14450,26 +14518,28 @@ async def test_delete_replication_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_replication - ] = mock_object + ] = mock_rpc request = {} await client.delete_replication(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_replication(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14775,8 +14845,9 @@ def test_update_replication_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_replication(request) @@ -14832,26 +14903,28 @@ async def test_update_replication_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_replication - ] = mock_object + ] = mock_rpc request = {} await client.update_replication(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_replication(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15164,8 +15237,9 @@ def test_stop_replication_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.stop_replication(request) @@ -15219,26 +15293,28 @@ async def test_stop_replication_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.stop_replication - ] = mock_object + ] = mock_rpc request = {} await client.stop_replication(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.stop_replication(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15456,8 +15532,9 @@ def test_resume_replication_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.resume_replication(request) @@ -15513,26 +15590,28 @@ async def test_resume_replication_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.resume_replication - ] = mock_object + ] = mock_rpc request = {} await client.resume_replication(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.resume_replication(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15757,8 +15836,9 @@ def test_reverse_replication_direction_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.reverse_replication_direction(request) @@ -15814,26 +15894,28 @@ async def test_reverse_replication_direction_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.reverse_replication_direction - ] = mock_object + ] = mock_rpc request = {} await client.reverse_replication_direction(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.reverse_replication_direction(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -16060,8 +16142,9 @@ def test_create_backup_vault_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_backup_vault(request) @@ -16117,26 +16200,28 @@ async def test_create_backup_vault_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_backup_vault - ] = mock_object + ] = mock_rpc request = {} await client.create_backup_vault(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_backup_vault(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -16521,22 +16606,23 @@ async def test_get_backup_vault_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_backup_vault - ] = mock_object + ] = mock_rpc request = {} await client.get_backup_vault(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_backup_vault(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -16910,22 +16996,23 @@ async def test_list_backup_vaults_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_backup_vaults - ] = mock_object + ] = mock_rpc request = {} await client.list_backup_vaults(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_backup_vaults(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -17438,8 +17525,9 @@ def test_update_backup_vault_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_backup_vault(request) @@ -17495,26 +17583,28 @@ async def test_update_backup_vault_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_backup_vault - ] = mock_object + ] = mock_rpc request = {} await client.update_backup_vault(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_backup_vault(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -17835,8 +17925,9 @@ def test_delete_backup_vault_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_backup_vault(request) @@ -17892,26 +17983,28 @@ async def test_delete_backup_vault_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_backup_vault - ] = mock_object + ] = mock_rpc request = {} await client.delete_backup_vault(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_backup_vault(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -18213,8 +18306,9 @@ def test_create_backup_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_backup(request) @@ -18268,26 +18362,28 @@ async def test_create_backup_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_backup - ] = mock_object + ] = mock_rpc request = {} await client.create_backup(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -18672,22 +18768,23 @@ async def test_get_backup_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_backup - ] = mock_object + ] = mock_rpc request = {} await client.get_backup(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -19055,22 +19152,23 @@ async def test_list_backups_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_backups - ] = mock_object + ] = mock_rpc request = {} await client.list_backups(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_backups(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -19559,8 +19657,9 @@ def test_delete_backup_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_backup(request) @@ -19614,26 +19713,28 @@ async def test_delete_backup_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_backup - ] = mock_object + ] = mock_rpc request = {} await client.delete_backup(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -19919,8 +20020,9 @@ def test_update_backup_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_backup(request) @@ -19974,26 +20076,28 @@ async def test_update_backup_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_backup - ] = mock_object + ] = mock_rpc request = {} await client.update_backup(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -20305,8 +20409,9 @@ def test_create_backup_policy_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_backup_policy(request) @@ -20362,26 +20467,28 @@ async def test_create_backup_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_backup_policy - ] = mock_object + ] = mock_rpc request = {} await client.create_backup_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_backup_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -20789,22 +20896,23 @@ async def test_get_backup_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_backup_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_backup_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_backup_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -21198,22 +21306,23 @@ async def test_list_backup_policies_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_backup_policies - ] = mock_object + ] = mock_rpc request = {} await client.list_backup_policies(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_backup_policies(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -21727,8 +21836,9 @@ def test_update_backup_policy_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_backup_policy(request) @@ -21784,26 +21894,28 @@ async def test_update_backup_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_backup_policy - ] = mock_object + ] = mock_rpc request = {} await client.update_backup_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_backup_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -22124,8 +22236,9 @@ def test_delete_backup_policy_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_backup_policy(request) @@ -22181,26 +22294,28 @@ async def test_delete_backup_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_backup_policy - ] = mock_object + ] = mock_rpc request = {} await client.delete_backup_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_backup_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-network-connectivity/google/cloud/networkconnectivity/gapic_version.py b/packages/google-cloud-network-connectivity/google/cloud/networkconnectivity/gapic_version.py index c5d1f9b11808..558c8aab67c5 100644 --- a/packages/google-cloud-network-connectivity/google/cloud/networkconnectivity/gapic_version.py +++ b/packages/google-cloud-network-connectivity/google/cloud/networkconnectivity/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.4.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-network-connectivity/google/cloud/networkconnectivity_v1/gapic_version.py b/packages/google-cloud-network-connectivity/google/cloud/networkconnectivity_v1/gapic_version.py index c5d1f9b11808..558c8aab67c5 100644 --- a/packages/google-cloud-network-connectivity/google/cloud/networkconnectivity_v1/gapic_version.py +++ b/packages/google-cloud-network-connectivity/google/cloud/networkconnectivity_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.4.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-network-connectivity/google/cloud/networkconnectivity_v1/services/hub_service/async_client.py b/packages/google-cloud-network-connectivity/google/cloud/networkconnectivity_v1/services/hub_service/async_client.py index 10db317088ee..29e05a5d10e0 100644 --- a/packages/google-cloud-network-connectivity/google/cloud/networkconnectivity_v1/services/hub_service/async_client.py +++ b/packages/google-cloud-network-connectivity/google/cloud/networkconnectivity_v1/services/hub_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -217,9 +216,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(HubServiceClient).get_transport_class, type(HubServiceClient) - ) + get_transport_class = HubServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-network-connectivity/google/cloud/networkconnectivity_v1/services/hub_service/client.py b/packages/google-cloud-network-connectivity/google/cloud/networkconnectivity_v1/services/hub_service/client.py index 57b1a0ab74cd..7ea92a88b79e 100644 --- a/packages/google-cloud-network-connectivity/google/cloud/networkconnectivity_v1/services/hub_service/client.py +++ b/packages/google-cloud-network-connectivity/google/cloud/networkconnectivity_v1/services/hub_service/client.py @@ -841,7 +841,7 @@ def __init__( transport_init: Union[ Type[HubServiceTransport], Callable[..., HubServiceTransport] ] = ( - type(self).get_transport_class(transport) + HubServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., HubServiceTransport], transport) ) diff --git a/packages/google-cloud-network-connectivity/google/cloud/networkconnectivity_v1/services/policy_based_routing_service/async_client.py b/packages/google-cloud-network-connectivity/google/cloud/networkconnectivity_v1/services/policy_based_routing_service/async_client.py index 75a2cf8878b9..011ca96d5b65 100644 --- a/packages/google-cloud-network-connectivity/google/cloud/networkconnectivity_v1/services/policy_based_routing_service/async_client.py +++ b/packages/google-cloud-network-connectivity/google/cloud/networkconnectivity_v1/services/policy_based_routing_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -217,10 +216,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(PolicyBasedRoutingServiceClient).get_transport_class, - type(PolicyBasedRoutingServiceClient), - ) + get_transport_class = PolicyBasedRoutingServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-network-connectivity/google/cloud/networkconnectivity_v1/services/policy_based_routing_service/client.py b/packages/google-cloud-network-connectivity/google/cloud/networkconnectivity_v1/services/policy_based_routing_service/client.py index d9a2561626ff..99cf3fb37757 100644 --- a/packages/google-cloud-network-connectivity/google/cloud/networkconnectivity_v1/services/policy_based_routing_service/client.py +++ b/packages/google-cloud-network-connectivity/google/cloud/networkconnectivity_v1/services/policy_based_routing_service/client.py @@ -698,7 +698,7 @@ def __init__( Type[PolicyBasedRoutingServiceTransport], Callable[..., PolicyBasedRoutingServiceTransport], ] = ( - type(self).get_transport_class(transport) + PolicyBasedRoutingServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., PolicyBasedRoutingServiceTransport], transport) ) diff --git a/packages/google-cloud-network-connectivity/google/cloud/networkconnectivity_v1alpha1/gapic_version.py b/packages/google-cloud-network-connectivity/google/cloud/networkconnectivity_v1alpha1/gapic_version.py index c5d1f9b11808..558c8aab67c5 100644 --- a/packages/google-cloud-network-connectivity/google/cloud/networkconnectivity_v1alpha1/gapic_version.py +++ b/packages/google-cloud-network-connectivity/google/cloud/networkconnectivity_v1alpha1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.4.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-network-connectivity/google/cloud/networkconnectivity_v1alpha1/services/hub_service/async_client.py b/packages/google-cloud-network-connectivity/google/cloud/networkconnectivity_v1alpha1/services/hub_service/async_client.py index b87d1fe17194..7d7a93a4ce56 100644 --- a/packages/google-cloud-network-connectivity/google/cloud/networkconnectivity_v1alpha1/services/hub_service/async_client.py +++ b/packages/google-cloud-network-connectivity/google/cloud/networkconnectivity_v1alpha1/services/hub_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -205,9 +204,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(HubServiceClient).get_transport_class, type(HubServiceClient) - ) + get_transport_class = HubServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-network-connectivity/google/cloud/networkconnectivity_v1alpha1/services/hub_service/client.py b/packages/google-cloud-network-connectivity/google/cloud/networkconnectivity_v1alpha1/services/hub_service/client.py index e4e22acaee83..56b910321e85 100644 --- a/packages/google-cloud-network-connectivity/google/cloud/networkconnectivity_v1alpha1/services/hub_service/client.py +++ b/packages/google-cloud-network-connectivity/google/cloud/networkconnectivity_v1alpha1/services/hub_service/client.py @@ -750,7 +750,7 @@ def __init__( transport_init: Union[ Type[HubServiceTransport], Callable[..., HubServiceTransport] ] = ( - type(self).get_transport_class(transport) + HubServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., HubServiceTransport], transport) ) diff --git a/packages/google-cloud-network-connectivity/samples/generated_samples/snippet_metadata_google.cloud.networkconnectivity.v1.json b/packages/google-cloud-network-connectivity/samples/generated_samples/snippet_metadata_google.cloud.networkconnectivity.v1.json index a46810aa2d57..62ba18e54b67 100644 --- a/packages/google-cloud-network-connectivity/samples/generated_samples/snippet_metadata_google.cloud.networkconnectivity.v1.json +++ b/packages/google-cloud-network-connectivity/samples/generated_samples/snippet_metadata_google.cloud.networkconnectivity.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-network-connectivity", - "version": "2.4.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-network-connectivity/samples/generated_samples/snippet_metadata_google.cloud.networkconnectivity.v1alpha1.json b/packages/google-cloud-network-connectivity/samples/generated_samples/snippet_metadata_google.cloud.networkconnectivity.v1alpha1.json index 5a00c0c2ca27..7d9f482d0d17 100644 --- a/packages/google-cloud-network-connectivity/samples/generated_samples/snippet_metadata_google.cloud.networkconnectivity.v1alpha1.json +++ b/packages/google-cloud-network-connectivity/samples/generated_samples/snippet_metadata_google.cloud.networkconnectivity.v1alpha1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-network-connectivity", - "version": "2.4.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-network-connectivity/tests/unit/gapic/networkconnectivity_v1/test_hub_service.py b/packages/google-cloud-network-connectivity/tests/unit/gapic/networkconnectivity_v1/test_hub_service.py index 839858800503..32631ab74106 100644 --- a/packages/google-cloud-network-connectivity/tests/unit/gapic/networkconnectivity_v1/test_hub_service.py +++ b/packages/google-cloud-network-connectivity/tests/unit/gapic/networkconnectivity_v1/test_hub_service.py @@ -1243,22 +1243,23 @@ async def test_list_hubs_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_hubs - ] = mock_object + ] = mock_rpc request = {} await client.list_hubs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_hubs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1813,22 +1814,23 @@ async def test_get_hub_async_use_cached_wrapped_rpc(transport: str = "grpc_async ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_hub - ] = mock_object + ] = mock_rpc request = {} await client.get_hub(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_hub(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2129,8 +2131,9 @@ def test_create_hub_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_hub(request) @@ -2182,26 +2185,28 @@ async def test_create_hub_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_hub - ] = mock_object + ] = mock_rpc request = {} await client.create_hub(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_hub(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2511,8 +2516,9 @@ def test_update_hub_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_hub(request) @@ -2564,26 +2570,28 @@ async def test_update_hub_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_hub - ] = mock_object + ] = mock_rpc request = {} await client.update_hub(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_hub(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2885,8 +2893,9 @@ def test_delete_hub_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_hub(request) @@ -2938,26 +2947,28 @@ async def test_delete_hub_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_hub - ] = mock_object + ] = mock_rpc request = {} await client.delete_hub(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_hub(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3312,22 +3323,23 @@ async def test_list_hub_spokes_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_hub_spokes - ] = mock_object + ] = mock_rpc request = {} await client.list_hub_spokes(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_hub_spokes(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3881,22 +3893,23 @@ async def test_list_spokes_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_spokes - ] = mock_object + ] = mock_rpc request = {} await client.list_spokes(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_spokes(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4457,22 +4470,23 @@ async def test_get_spoke_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_spoke - ] = mock_object + ] = mock_rpc request = {} await client.get_spoke(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_spoke(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4777,8 +4791,9 @@ def test_create_spoke_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_spoke(request) @@ -4832,26 +4847,28 @@ async def test_create_spoke_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_spoke - ] = mock_object + ] = mock_rpc request = {} await client.create_spoke(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_spoke(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5161,8 +5178,9 @@ def test_update_spoke_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_spoke(request) @@ -5216,26 +5234,28 @@ async def test_update_spoke_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_spoke - ] = mock_object + ] = mock_rpc request = {} await client.update_spoke(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_spoke(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5543,8 +5563,9 @@ def test_reject_hub_spoke_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.reject_hub_spoke(request) @@ -5598,26 +5619,28 @@ async def test_reject_hub_spoke_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.reject_hub_spoke - ] = mock_object + ] = mock_rpc request = {} await client.reject_hub_spoke(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.reject_hub_spoke(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5923,8 +5946,9 @@ def test_accept_hub_spoke_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.accept_hub_spoke(request) @@ -5978,26 +6002,28 @@ async def test_accept_hub_spoke_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.accept_hub_spoke - ] = mock_object + ] = mock_rpc request = {} await client.accept_hub_spoke(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.accept_hub_spoke(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6299,8 +6325,9 @@ def test_delete_spoke_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_spoke(request) @@ -6354,26 +6381,28 @@ async def test_delete_spoke_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_spoke - ] = mock_object + ] = mock_rpc request = {} await client.delete_spoke(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_spoke(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6728,22 +6757,23 @@ async def test_get_route_table_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_route_table - ] = mock_object + ] = mock_rpc request = {} await client.get_route_table(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_route_table(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7113,22 +7143,23 @@ async def test_get_route_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_route - ] = mock_object + ] = mock_rpc request = {} await client.get_route(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_route(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7496,22 +7527,23 @@ async def test_list_routes_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_routes - ] = mock_object + ] = mock_rpc request = {} await client.list_routes(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_routes(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8075,22 +8107,23 @@ async def test_list_route_tables_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_route_tables - ] = mock_object + ] = mock_rpc request = {} await client.list_route_tables(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_route_tables(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8660,22 +8693,23 @@ async def test_get_group_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_group - ] = mock_object + ] = mock_rpc request = {} await client.get_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9035,22 +9069,23 @@ async def test_list_groups_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_groups - ] = mock_object + ] = mock_rpc request = {} await client.list_groups(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_groups(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-network-connectivity/tests/unit/gapic/networkconnectivity_v1/test_policy_based_routing_service.py b/packages/google-cloud-network-connectivity/tests/unit/gapic/networkconnectivity_v1/test_policy_based_routing_service.py index 6d05d32de51b..cecfe9c66c4a 100644 --- a/packages/google-cloud-network-connectivity/tests/unit/gapic/networkconnectivity_v1/test_policy_based_routing_service.py +++ b/packages/google-cloud-network-connectivity/tests/unit/gapic/networkconnectivity_v1/test_policy_based_routing_service.py @@ -1353,22 +1353,23 @@ async def test_list_policy_based_routes_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_policy_based_routes - ] = mock_object + ] = mock_rpc request = {} await client.list_policy_based_routes(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_policy_based_routes(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1967,22 +1968,23 @@ async def test_get_policy_based_route_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_policy_based_route - ] = mock_object + ] = mock_rpc request = {} await client.get_policy_based_route(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_policy_based_route(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2311,8 +2313,9 @@ def test_create_policy_based_route_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_policy_based_route(request) @@ -2368,26 +2371,28 @@ async def test_create_policy_based_route_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_policy_based_route - ] = mock_object + ] = mock_rpc request = {} await client.create_policy_based_route(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_policy_based_route(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2745,8 +2750,9 @@ def test_delete_policy_based_route_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_policy_based_route(request) @@ -2802,26 +2808,28 @@ async def test_delete_policy_based_route_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_policy_based_route - ] = mock_object + ] = mock_rpc request = {} await client.delete_policy_based_route(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_policy_based_route(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-network-connectivity/tests/unit/gapic/networkconnectivity_v1alpha1/test_hub_service.py b/packages/google-cloud-network-connectivity/tests/unit/gapic/networkconnectivity_v1alpha1/test_hub_service.py index 5325ee676dc6..82fbaed53bda 100644 --- a/packages/google-cloud-network-connectivity/tests/unit/gapic/networkconnectivity_v1alpha1/test_hub_service.py +++ b/packages/google-cloud-network-connectivity/tests/unit/gapic/networkconnectivity_v1alpha1/test_hub_service.py @@ -1239,22 +1239,23 @@ async def test_list_hubs_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_hubs - ] = mock_object + ] = mock_rpc request = {} await client.list_hubs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_hubs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1809,22 +1810,23 @@ async def test_get_hub_async_use_cached_wrapped_rpc(transport: str = "grpc_async ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_hub - ] = mock_object + ] = mock_rpc request = {} await client.get_hub(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_hub(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2125,8 +2127,9 @@ def test_create_hub_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_hub(request) @@ -2178,26 +2181,28 @@ async def test_create_hub_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_hub - ] = mock_object + ] = mock_rpc request = {} await client.create_hub(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_hub(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2507,8 +2512,9 @@ def test_update_hub_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_hub(request) @@ -2560,26 +2566,28 @@ async def test_update_hub_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_hub - ] = mock_object + ] = mock_rpc request = {} await client.update_hub(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_hub(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2881,8 +2889,9 @@ def test_delete_hub_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_hub(request) @@ -2934,26 +2943,28 @@ async def test_delete_hub_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_hub - ] = mock_object + ] = mock_rpc request = {} await client.delete_hub(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_hub(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3308,22 +3319,23 @@ async def test_list_spokes_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_spokes - ] = mock_object + ] = mock_rpc request = {} await client.list_spokes(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_spokes(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3888,22 +3900,23 @@ async def test_get_spoke_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_spoke - ] = mock_object + ] = mock_rpc request = {} await client.get_spoke(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_spoke(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4212,8 +4225,9 @@ def test_create_spoke_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_spoke(request) @@ -4267,26 +4281,28 @@ async def test_create_spoke_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_spoke - ] = mock_object + ] = mock_rpc request = {} await client.create_spoke(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_spoke(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4596,8 +4612,9 @@ def test_update_spoke_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_spoke(request) @@ -4651,26 +4668,28 @@ async def test_update_spoke_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_spoke - ] = mock_object + ] = mock_rpc request = {} await client.update_spoke(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_spoke(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4972,8 +4991,9 @@ def test_delete_spoke_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_spoke(request) @@ -5027,26 +5047,28 @@ async def test_delete_spoke_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_spoke - ] = mock_object + ] = mock_rpc request = {} await client.delete_spoke(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_spoke(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-network-management/google/cloud/network_management/gapic_version.py b/packages/google-cloud-network-management/google/cloud/network_management/gapic_version.py index 8099b154e9b6..558c8aab67c5 100644 --- a/packages/google-cloud-network-management/google/cloud/network_management/gapic_version.py +++ b/packages/google-cloud-network-management/google/cloud/network_management/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.18.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-network-management/google/cloud/network_management_v1/gapic_version.py b/packages/google-cloud-network-management/google/cloud/network_management_v1/gapic_version.py index 8099b154e9b6..558c8aab67c5 100644 --- a/packages/google-cloud-network-management/google/cloud/network_management_v1/gapic_version.py +++ b/packages/google-cloud-network-management/google/cloud/network_management_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.18.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-network-management/google/cloud/network_management_v1/services/reachability_service/async_client.py b/packages/google-cloud-network-management/google/cloud/network_management_v1/services/reachability_service/async_client.py index 2f7b207d3980..61e90df884e8 100644 --- a/packages/google-cloud-network-management/google/cloud/network_management_v1/services/reachability_service/async_client.py +++ b/packages/google-cloud-network-management/google/cloud/network_management_v1/services/reachability_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -211,10 +210,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ReachabilityServiceClient).get_transport_class, - type(ReachabilityServiceClient), - ) + get_transport_class = ReachabilityServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-network-management/google/cloud/network_management_v1/services/reachability_service/client.py b/packages/google-cloud-network-management/google/cloud/network_management_v1/services/reachability_service/client.py index bf1e7e58f9c4..46a2af9cac75 100644 --- a/packages/google-cloud-network-management/google/cloud/network_management_v1/services/reachability_service/client.py +++ b/packages/google-cloud-network-management/google/cloud/network_management_v1/services/reachability_service/client.py @@ -683,7 +683,7 @@ def __init__( Type[ReachabilityServiceTransport], Callable[..., ReachabilityServiceTransport], ] = ( - type(self).get_transport_class(transport) + ReachabilityServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ReachabilityServiceTransport], transport) ) diff --git a/packages/google-cloud-network-management/google/cloud/network_management_v1/types/connectivity_test.py b/packages/google-cloud-network-management/google/cloud/network_management_v1/types/connectivity_test.py index 4f46fe720b10..b500fa10e786 100644 --- a/packages/google-cloud-network-management/google/cloud/network_management_v1/types/connectivity_test.py +++ b/packages/google-cloud-network-management/google/cloud/network_management_v1/types/connectivity_test.py @@ -221,7 +221,7 @@ class Endpoint(proto.Message): provide forwarding information in the control plane. Format: - projects/{project}/global/forwardingRules/{id} + projects/{project}/global/forwardingRules/{id} or projects/{project}/regions/{region}/forwardingRules/{id} forwarding_rule_target (google.cloud.network_management_v1.types.Endpoint.ForwardingRuleTarget): diff --git a/packages/google-cloud-network-management/samples/generated_samples/snippet_metadata_google.cloud.networkmanagement.v1.json b/packages/google-cloud-network-management/samples/generated_samples/snippet_metadata_google.cloud.networkmanagement.v1.json index c60bbedeb52e..21f45711959c 100644 --- a/packages/google-cloud-network-management/samples/generated_samples/snippet_metadata_google.cloud.networkmanagement.v1.json +++ b/packages/google-cloud-network-management/samples/generated_samples/snippet_metadata_google.cloud.networkmanagement.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-network-management", - "version": "1.18.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-network-management/tests/unit/gapic/network_management_v1/test_reachability_service.py b/packages/google-cloud-network-management/tests/unit/gapic/network_management_v1/test_reachability_service.py index 2423c09c5aee..67d82c171f92 100644 --- a/packages/google-cloud-network-management/tests/unit/gapic/network_management_v1/test_reachability_service.py +++ b/packages/google-cloud-network-management/tests/unit/gapic/network_management_v1/test_reachability_service.py @@ -1394,22 +1394,23 @@ async def test_list_connectivity_tests_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_connectivity_tests - ] = mock_object + ] = mock_rpc request = {} await client.list_connectivity_tests(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_connectivity_tests(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2001,22 +2002,23 @@ async def test_get_connectivity_test_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_connectivity_test - ] = mock_object + ] = mock_rpc request = {} await client.get_connectivity_test(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_connectivity_test(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2343,8 +2345,9 @@ def test_create_connectivity_test_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_connectivity_test(request) @@ -2400,26 +2403,28 @@ async def test_create_connectivity_test_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_connectivity_test - ] = mock_object + ] = mock_rpc request = {} await client.create_connectivity_test(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_connectivity_test(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2747,8 +2752,9 @@ def test_update_connectivity_test_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_connectivity_test(request) @@ -2804,26 +2810,28 @@ async def test_update_connectivity_test_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_connectivity_test - ] = mock_object + ] = mock_rpc request = {} await client.update_connectivity_test(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_connectivity_test(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3145,8 +3153,9 @@ def test_rerun_connectivity_test_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.rerun_connectivity_test(request) @@ -3202,26 +3211,28 @@ async def test_rerun_connectivity_test_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.rerun_connectivity_test - ] = mock_object + ] = mock_rpc request = {} await client.rerun_connectivity_test(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.rerun_connectivity_test(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3447,8 +3458,9 @@ def test_delete_connectivity_test_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_connectivity_test(request) @@ -3504,26 +3516,28 @@ async def test_delete_connectivity_test_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_connectivity_test - ] = mock_object + ] = mock_rpc request = {} await client.delete_connectivity_test(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_connectivity_test(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-network-security/google/cloud/network_security/gapic_version.py b/packages/google-cloud-network-security/google/cloud/network_security/gapic_version.py index bc3866048d8a..558c8aab67c5 100644 --- a/packages/google-cloud-network-security/google/cloud/network_security/gapic_version.py +++ b/packages/google-cloud-network-security/google/cloud/network_security/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.9.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1/gapic_version.py b/packages/google-cloud-network-security/google/cloud/network_security_v1/gapic_version.py index bc3866048d8a..558c8aab67c5 100644 --- a/packages/google-cloud-network-security/google/cloud/network_security_v1/gapic_version.py +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.9.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1/services/network_security/async_client.py b/packages/google-cloud-network-security/google/cloud/network_security_v1/services/network_security/async_client.py index 81201de2a85a..9d2803bede8a 100644 --- a/packages/google-cloud-network-security/google/cloud/network_security_v1/services/network_security/async_client.py +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1/services/network_security/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -225,9 +224,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(NetworkSecurityClient).get_transport_class, type(NetworkSecurityClient) - ) + get_transport_class = NetworkSecurityClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1/services/network_security/client.py b/packages/google-cloud-network-security/google/cloud/network_security_v1/services/network_security/client.py index 78b3b842769a..e0cf204cc68e 100644 --- a/packages/google-cloud-network-security/google/cloud/network_security_v1/services/network_security/client.py +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1/services/network_security/client.py @@ -732,7 +732,7 @@ def __init__( transport_init: Union[ Type[NetworkSecurityTransport], Callable[..., NetworkSecurityTransport] ] = ( - type(self).get_transport_class(transport) + NetworkSecurityClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., NetworkSecurityTransport], transport) ) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1beta1/gapic_version.py b/packages/google-cloud-network-security/google/cloud/network_security_v1beta1/gapic_version.py index bc3866048d8a..558c8aab67c5 100644 --- a/packages/google-cloud-network-security/google/cloud/network_security_v1beta1/gapic_version.py +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.9.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1beta1/services/network_security/async_client.py b/packages/google-cloud-network-security/google/cloud/network_security_v1beta1/services/network_security/async_client.py index 3dcdb0cbcd19..53610f814c17 100644 --- a/packages/google-cloud-network-security/google/cloud/network_security_v1beta1/services/network_security/async_client.py +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1beta1/services/network_security/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -225,9 +224,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(NetworkSecurityClient).get_transport_class, type(NetworkSecurityClient) - ) + get_transport_class = NetworkSecurityClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1beta1/services/network_security/client.py b/packages/google-cloud-network-security/google/cloud/network_security_v1beta1/services/network_security/client.py index 459529944f9d..3c65c7d9e122 100644 --- a/packages/google-cloud-network-security/google/cloud/network_security_v1beta1/services/network_security/client.py +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1beta1/services/network_security/client.py @@ -730,7 +730,7 @@ def __init__( transport_init: Union[ Type[NetworkSecurityTransport], Callable[..., NetworkSecurityTransport] ] = ( - type(self).get_transport_class(transport) + NetworkSecurityClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., NetworkSecurityTransport], transport) ) diff --git a/packages/google-cloud-network-security/samples/generated_samples/snippet_metadata_google.cloud.networksecurity.v1.json b/packages/google-cloud-network-security/samples/generated_samples/snippet_metadata_google.cloud.networksecurity.v1.json index 5827aa32254a..0bc5339e0c8a 100644 --- a/packages/google-cloud-network-security/samples/generated_samples/snippet_metadata_google.cloud.networksecurity.v1.json +++ b/packages/google-cloud-network-security/samples/generated_samples/snippet_metadata_google.cloud.networksecurity.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-network-security", - "version": "0.9.11" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-network-security/samples/generated_samples/snippet_metadata_google.cloud.networksecurity.v1beta1.json b/packages/google-cloud-network-security/samples/generated_samples/snippet_metadata_google.cloud.networksecurity.v1beta1.json index 861da31eae88..fd7e421ca0d6 100644 --- a/packages/google-cloud-network-security/samples/generated_samples/snippet_metadata_google.cloud.networksecurity.v1beta1.json +++ b/packages/google-cloud-network-security/samples/generated_samples/snippet_metadata_google.cloud.networksecurity.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-network-security", - "version": "0.9.11" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-network-security/tests/unit/gapic/network_security_v1/test_network_security.py b/packages/google-cloud-network-security/tests/unit/gapic/network_security_v1/test_network_security.py index 56a35bf33cf8..9f2461e71701 100644 --- a/packages/google-cloud-network-security/tests/unit/gapic/network_security_v1/test_network_security.py +++ b/packages/google-cloud-network-security/tests/unit/gapic/network_security_v1/test_network_security.py @@ -1350,22 +1350,23 @@ async def test_list_authorization_policies_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_authorization_policies - ] = mock_object + ] = mock_rpc request = {} await client.list_authorization_policies(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_authorization_policies(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1952,22 +1953,23 @@ async def test_get_authorization_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_authorization_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_authorization_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_authorization_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2288,8 +2290,9 @@ def test_create_authorization_policy_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_authorization_policy(request) @@ -2345,26 +2348,28 @@ async def test_create_authorization_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_authorization_policy - ] = mock_object + ] = mock_rpc request = {} await client.create_authorization_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_authorization_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2700,8 +2705,9 @@ def test_update_authorization_policy_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_authorization_policy(request) @@ -2757,26 +2763,28 @@ async def test_update_authorization_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_authorization_policy - ] = mock_object + ] = mock_rpc request = {} await client.update_authorization_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_authorization_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3106,8 +3114,9 @@ def test_delete_authorization_policy_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_authorization_policy(request) @@ -3163,26 +3172,28 @@ async def test_delete_authorization_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_authorization_policy - ] = mock_object + ] = mock_rpc request = {} await client.delete_authorization_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_authorization_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3554,22 +3565,23 @@ async def test_list_server_tls_policies_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_server_tls_policies - ] = mock_object + ] = mock_rpc request = {} await client.list_server_tls_policies(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_server_tls_policies(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4152,22 +4164,23 @@ async def test_get_server_tls_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_server_tls_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_server_tls_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_server_tls_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4488,8 +4501,9 @@ def test_create_server_tls_policy_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_server_tls_policy(request) @@ -4545,26 +4559,28 @@ async def test_create_server_tls_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_server_tls_policy - ] = mock_object + ] = mock_rpc request = {} await client.create_server_tls_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_server_tls_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4892,8 +4908,9 @@ def test_update_server_tls_policy_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_server_tls_policy(request) @@ -4949,26 +4966,28 @@ async def test_update_server_tls_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_server_tls_policy - ] = mock_object + ] = mock_rpc request = {} await client.update_server_tls_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_server_tls_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5290,8 +5309,9 @@ def test_delete_server_tls_policy_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_server_tls_policy(request) @@ -5347,26 +5367,28 @@ async def test_delete_server_tls_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_server_tls_policy - ] = mock_object + ] = mock_rpc request = {} await client.delete_server_tls_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_server_tls_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5738,22 +5760,23 @@ async def test_list_client_tls_policies_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_client_tls_policies - ] = mock_object + ] = mock_rpc request = {} await client.list_client_tls_policies(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_client_tls_policies(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6336,22 +6359,23 @@ async def test_get_client_tls_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_client_tls_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_client_tls_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_client_tls_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6672,8 +6696,9 @@ def test_create_client_tls_policy_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_client_tls_policy(request) @@ -6729,26 +6754,28 @@ async def test_create_client_tls_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_client_tls_policy - ] = mock_object + ] = mock_rpc request = {} await client.create_client_tls_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_client_tls_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7076,8 +7103,9 @@ def test_update_client_tls_policy_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_client_tls_policy(request) @@ -7133,26 +7161,28 @@ async def test_update_client_tls_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_client_tls_policy - ] = mock_object + ] = mock_rpc request = {} await client.update_client_tls_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_client_tls_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7474,8 +7504,9 @@ def test_delete_client_tls_policy_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_client_tls_policy(request) @@ -7531,26 +7562,28 @@ async def test_delete_client_tls_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_client_tls_policy - ] = mock_object + ] = mock_rpc request = {} await client.delete_client_tls_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_client_tls_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-network-security/tests/unit/gapic/network_security_v1beta1/test_network_security.py b/packages/google-cloud-network-security/tests/unit/gapic/network_security_v1beta1/test_network_security.py index 65468795764f..a7d2777e702a 100644 --- a/packages/google-cloud-network-security/tests/unit/gapic/network_security_v1beta1/test_network_security.py +++ b/packages/google-cloud-network-security/tests/unit/gapic/network_security_v1beta1/test_network_security.py @@ -1317,22 +1317,23 @@ async def test_list_authorization_policies_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_authorization_policies - ] = mock_object + ] = mock_rpc request = {} await client.list_authorization_policies(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_authorization_policies(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1919,22 +1920,23 @@ async def test_get_authorization_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_authorization_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_authorization_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_authorization_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2255,8 +2257,9 @@ def test_create_authorization_policy_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_authorization_policy(request) @@ -2312,26 +2315,28 @@ async def test_create_authorization_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_authorization_policy - ] = mock_object + ] = mock_rpc request = {} await client.create_authorization_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_authorization_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2667,8 +2672,9 @@ def test_update_authorization_policy_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_authorization_policy(request) @@ -2724,26 +2730,28 @@ async def test_update_authorization_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_authorization_policy - ] = mock_object + ] = mock_rpc request = {} await client.update_authorization_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_authorization_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3073,8 +3081,9 @@ def test_delete_authorization_policy_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_authorization_policy(request) @@ -3130,26 +3139,28 @@ async def test_delete_authorization_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_authorization_policy - ] = mock_object + ] = mock_rpc request = {} await client.delete_authorization_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_authorization_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3521,22 +3532,23 @@ async def test_list_server_tls_policies_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_server_tls_policies - ] = mock_object + ] = mock_rpc request = {} await client.list_server_tls_policies(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_server_tls_policies(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4119,22 +4131,23 @@ async def test_get_server_tls_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_server_tls_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_server_tls_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_server_tls_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4455,8 +4468,9 @@ def test_create_server_tls_policy_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_server_tls_policy(request) @@ -4512,26 +4526,28 @@ async def test_create_server_tls_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_server_tls_policy - ] = mock_object + ] = mock_rpc request = {} await client.create_server_tls_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_server_tls_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4859,8 +4875,9 @@ def test_update_server_tls_policy_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_server_tls_policy(request) @@ -4916,26 +4933,28 @@ async def test_update_server_tls_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_server_tls_policy - ] = mock_object + ] = mock_rpc request = {} await client.update_server_tls_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_server_tls_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5257,8 +5276,9 @@ def test_delete_server_tls_policy_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_server_tls_policy(request) @@ -5314,26 +5334,28 @@ async def test_delete_server_tls_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_server_tls_policy - ] = mock_object + ] = mock_rpc request = {} await client.delete_server_tls_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_server_tls_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5705,22 +5727,23 @@ async def test_list_client_tls_policies_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_client_tls_policies - ] = mock_object + ] = mock_rpc request = {} await client.list_client_tls_policies(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_client_tls_policies(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6303,22 +6326,23 @@ async def test_get_client_tls_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_client_tls_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_client_tls_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_client_tls_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6639,8 +6663,9 @@ def test_create_client_tls_policy_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_client_tls_policy(request) @@ -6696,26 +6721,28 @@ async def test_create_client_tls_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_client_tls_policy - ] = mock_object + ] = mock_rpc request = {} await client.create_client_tls_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_client_tls_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7043,8 +7070,9 @@ def test_update_client_tls_policy_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_client_tls_policy(request) @@ -7100,26 +7128,28 @@ async def test_update_client_tls_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_client_tls_policy - ] = mock_object + ] = mock_rpc request = {} await client.update_client_tls_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_client_tls_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7441,8 +7471,9 @@ def test_delete_client_tls_policy_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_client_tls_policy(request) @@ -7498,26 +7529,28 @@ async def test_delete_client_tls_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_client_tls_policy - ] = mock_object + ] = mock_rpc request = {} await client.delete_client_tls_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_client_tls_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-network-services/google/cloud/network_services/gapic_version.py b/packages/google-cloud-network-services/google/cloud/network_services/gapic_version.py index 0f3dcb10f73a..558c8aab67c5 100644 --- a/packages/google-cloud-network-services/google/cloud/network_services/gapic_version.py +++ b/packages/google-cloud-network-services/google/cloud/network_services/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.5.14" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-network-services/google/cloud/network_services_v1/gapic_version.py b/packages/google-cloud-network-services/google/cloud/network_services_v1/gapic_version.py index 0f3dcb10f73a..558c8aab67c5 100644 --- a/packages/google-cloud-network-services/google/cloud/network_services_v1/gapic_version.py +++ b/packages/google-cloud-network-services/google/cloud/network_services_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.5.14" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-network-services/google/cloud/network_services_v1/services/dep_service/async_client.py b/packages/google-cloud-network-services/google/cloud/network_services_v1/services/dep_service/async_client.py index fa65cbdf9b68..55d7ff9fcfe8 100644 --- a/packages/google-cloud-network-services/google/cloud/network_services_v1/services/dep_service/async_client.py +++ b/packages/google-cloud-network-services/google/cloud/network_services_v1/services/dep_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -198,9 +197,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DepServiceClient).get_transport_class, type(DepServiceClient) - ) + get_transport_class = DepServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-network-services/google/cloud/network_services_v1/services/dep_service/client.py b/packages/google-cloud-network-services/google/cloud/network_services_v1/services/dep_service/client.py index f2557c4f7905..20f240be542e 100644 --- a/packages/google-cloud-network-services/google/cloud/network_services_v1/services/dep_service/client.py +++ b/packages/google-cloud-network-services/google/cloud/network_services_v1/services/dep_service/client.py @@ -688,7 +688,7 @@ def __init__( transport_init: Union[ Type[DepServiceTransport], Callable[..., DepServiceTransport] ] = ( - type(self).get_transport_class(transport) + DepServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DepServiceTransport], transport) ) diff --git a/packages/google-cloud-network-services/google/cloud/network_services_v1/services/network_services/async_client.py b/packages/google-cloud-network-services/google/cloud/network_services_v1/services/network_services/async_client.py index 4987b65ca800..a31e7039c505 100644 --- a/packages/google-cloud-network-services/google/cloud/network_services_v1/services/network_services/async_client.py +++ b/packages/google-cloud-network-services/google/cloud/network_services_v1/services/network_services/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -253,9 +252,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(NetworkServicesClient).get_transport_class, type(NetworkServicesClient) - ) + get_transport_class = NetworkServicesClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-network-services/google/cloud/network_services_v1/services/network_services/client.py b/packages/google-cloud-network-services/google/cloud/network_services_v1/services/network_services/client.py index ae53f4122aa6..2d432b5e62c8 100644 --- a/packages/google-cloud-network-services/google/cloud/network_services_v1/services/network_services/client.py +++ b/packages/google-cloud-network-services/google/cloud/network_services_v1/services/network_services/client.py @@ -934,7 +934,7 @@ def __init__( transport_init: Union[ Type[NetworkServicesTransport], Callable[..., NetworkServicesTransport] ] = ( - type(self).get_transport_class(transport) + NetworkServicesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., NetworkServicesTransport], transport) ) diff --git a/packages/google-cloud-network-services/samples/generated_samples/snippet_metadata_google.cloud.networkservices.v1.json b/packages/google-cloud-network-services/samples/generated_samples/snippet_metadata_google.cloud.networkservices.v1.json index a4334bd232b5..5efe8fa4280b 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/snippet_metadata_google.cloud.networkservices.v1.json +++ b/packages/google-cloud-network-services/samples/generated_samples/snippet_metadata_google.cloud.networkservices.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-network-services", - "version": "0.5.14" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-network-services/tests/unit/gapic/network_services_v1/test_dep_service.py b/packages/google-cloud-network-services/tests/unit/gapic/network_services_v1/test_dep_service.py index 30d32a48ca1f..b1a360c0716b 100644 --- a/packages/google-cloud-network-services/tests/unit/gapic/network_services_v1/test_dep_service.py +++ b/packages/google-cloud-network-services/tests/unit/gapic/network_services_v1/test_dep_service.py @@ -1281,22 +1281,23 @@ async def test_list_lb_traffic_extensions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_lb_traffic_extensions - ] = mock_object + ] = mock_rpc request = {} await client.list_lb_traffic_extensions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_lb_traffic_extensions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1883,22 +1884,23 @@ async def test_get_lb_traffic_extension_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_lb_traffic_extension - ] = mock_object + ] = mock_rpc request = {} await client.get_lb_traffic_extension(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_lb_traffic_extension(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2220,8 +2222,9 @@ def test_create_lb_traffic_extension_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_lb_traffic_extension(request) @@ -2277,26 +2280,28 @@ async def test_create_lb_traffic_extension_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_lb_traffic_extension - ] = mock_object + ] = mock_rpc request = {} await client.create_lb_traffic_extension(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_lb_traffic_extension(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2623,8 +2628,9 @@ def test_update_lb_traffic_extension_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_lb_traffic_extension(request) @@ -2680,26 +2686,28 @@ async def test_update_lb_traffic_extension_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_lb_traffic_extension - ] = mock_object + ] = mock_rpc request = {} await client.update_lb_traffic_extension(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_lb_traffic_extension(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3020,8 +3028,9 @@ def test_delete_lb_traffic_extension_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_lb_traffic_extension(request) @@ -3077,26 +3086,28 @@ async def test_delete_lb_traffic_extension_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_lb_traffic_extension - ] = mock_object + ] = mock_rpc request = {} await client.delete_lb_traffic_extension(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_lb_traffic_extension(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3474,22 +3485,23 @@ async def test_list_lb_route_extensions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_lb_route_extensions - ] = mock_object + ] = mock_rpc request = {} await client.list_lb_route_extensions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_lb_route_extensions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4076,22 +4088,23 @@ async def test_get_lb_route_extension_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_lb_route_extension - ] = mock_object + ] = mock_rpc request = {} await client.get_lb_route_extension(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_lb_route_extension(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4413,8 +4426,9 @@ def test_create_lb_route_extension_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_lb_route_extension(request) @@ -4470,26 +4484,28 @@ async def test_create_lb_route_extension_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_lb_route_extension - ] = mock_object + ] = mock_rpc request = {} await client.create_lb_route_extension(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_lb_route_extension(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4816,8 +4832,9 @@ def test_update_lb_route_extension_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_lb_route_extension(request) @@ -4873,26 +4890,28 @@ async def test_update_lb_route_extension_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_lb_route_extension - ] = mock_object + ] = mock_rpc request = {} await client.update_lb_route_extension(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_lb_route_extension(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5213,8 +5232,9 @@ def test_delete_lb_route_extension_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_lb_route_extension(request) @@ -5270,26 +5290,28 @@ async def test_delete_lb_route_extension_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_lb_route_extension - ] = mock_object + ] = mock_rpc request = {} await client.delete_lb_route_extension(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_lb_route_extension(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-network-services/tests/unit/gapic/network_services_v1/test_network_services.py b/packages/google-cloud-network-services/tests/unit/gapic/network_services_v1/test_network_services.py index 2d56a632d53d..44af9ff63a42 100644 --- a/packages/google-cloud-network-services/tests/unit/gapic/network_services_v1/test_network_services.py +++ b/packages/google-cloud-network-services/tests/unit/gapic/network_services_v1/test_network_services.py @@ -1358,22 +1358,23 @@ async def test_list_endpoint_policies_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_endpoint_policies - ] = mock_object + ] = mock_rpc request = {} await client.list_endpoint_policies(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_endpoint_policies(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1965,22 +1966,23 @@ async def test_get_endpoint_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_endpoint_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_endpoint_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_endpoint_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2310,8 +2312,9 @@ def test_create_endpoint_policy_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_endpoint_policy(request) @@ -2367,26 +2370,28 @@ async def test_create_endpoint_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_endpoint_policy - ] = mock_object + ] = mock_rpc request = {} await client.create_endpoint_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_endpoint_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2714,8 +2719,9 @@ def test_update_endpoint_policy_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_endpoint_policy(request) @@ -2771,26 +2777,28 @@ async def test_update_endpoint_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_endpoint_policy - ] = mock_object + ] = mock_rpc request = {} await client.update_endpoint_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_endpoint_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3112,8 +3120,9 @@ def test_delete_endpoint_policy_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_endpoint_policy(request) @@ -3169,26 +3178,28 @@ async def test_delete_endpoint_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_endpoint_policy - ] = mock_object + ] = mock_rpc request = {} await client.delete_endpoint_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_endpoint_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3547,22 +3558,23 @@ async def test_list_gateways_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_gateways - ] = mock_object + ] = mock_rpc request = {} await client.list_gateways(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_gateways(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4123,22 +4135,23 @@ async def test_get_gateway_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_gateway - ] = mock_object + ] = mock_rpc request = {} await client.get_gateway(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_gateway(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4441,8 +4454,9 @@ def test_create_gateway_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_gateway(request) @@ -4496,26 +4510,28 @@ async def test_create_gateway_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_gateway - ] = mock_object + ] = mock_rpc request = {} await client.create_gateway(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_gateway(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4821,8 +4837,9 @@ def test_update_gateway_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_gateway(request) @@ -4876,26 +4893,28 @@ async def test_update_gateway_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_gateway - ] = mock_object + ] = mock_rpc request = {} await client.update_gateway(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_gateway(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5195,8 +5214,9 @@ def test_delete_gateway_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_gateway(request) @@ -5250,26 +5270,28 @@ async def test_delete_gateway_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_gateway - ] = mock_object + ] = mock_rpc request = {} await client.delete_gateway(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_gateway(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5619,22 +5641,23 @@ async def test_list_grpc_routes_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_grpc_routes - ] = mock_object + ] = mock_rpc request = {} await client.list_grpc_routes(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_grpc_routes(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6192,22 +6215,23 @@ async def test_get_grpc_route_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_grpc_route - ] = mock_object + ] = mock_rpc request = {} await client.get_grpc_route(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_grpc_route(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6520,8 +6544,9 @@ def test_create_grpc_route_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_grpc_route(request) @@ -6577,26 +6602,28 @@ async def test_create_grpc_route_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_grpc_route - ] = mock_object + ] = mock_rpc request = {} await client.create_grpc_route(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_grpc_route(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6920,8 +6947,9 @@ def test_update_grpc_route_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_grpc_route(request) @@ -6977,26 +7005,28 @@ async def test_update_grpc_route_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_grpc_route - ] = mock_object + ] = mock_rpc request = {} await client.update_grpc_route(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_grpc_route(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7314,8 +7344,9 @@ def test_delete_grpc_route_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_grpc_route(request) @@ -7371,26 +7402,28 @@ async def test_delete_grpc_route_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_grpc_route - ] = mock_object + ] = mock_rpc request = {} await client.delete_grpc_route(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_grpc_route(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7750,22 +7783,23 @@ async def test_list_http_routes_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_http_routes - ] = mock_object + ] = mock_rpc request = {} await client.list_http_routes(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_http_routes(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8323,22 +8357,23 @@ async def test_get_http_route_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_http_route - ] = mock_object + ] = mock_rpc request = {} await client.get_http_route(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_http_route(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8651,8 +8686,9 @@ def test_create_http_route_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_http_route(request) @@ -8708,26 +8744,28 @@ async def test_create_http_route_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_http_route - ] = mock_object + ] = mock_rpc request = {} await client.create_http_route(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_http_route(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9051,8 +9089,9 @@ def test_update_http_route_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_http_route(request) @@ -9108,26 +9147,28 @@ async def test_update_http_route_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_http_route - ] = mock_object + ] = mock_rpc request = {} await client.update_http_route(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_http_route(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9445,8 +9486,9 @@ def test_delete_http_route_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_http_route(request) @@ -9502,26 +9544,28 @@ async def test_delete_http_route_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_http_route - ] = mock_object + ] = mock_rpc request = {} await client.delete_http_route(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_http_route(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9879,22 +9923,23 @@ async def test_list_tcp_routes_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_tcp_routes - ] = mock_object + ] = mock_rpc request = {} await client.list_tcp_routes(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_tcp_routes(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10449,22 +10494,23 @@ async def test_get_tcp_route_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_tcp_route - ] = mock_object + ] = mock_rpc request = {} await client.get_tcp_route(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_tcp_route(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10765,8 +10811,9 @@ def test_create_tcp_route_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_tcp_route(request) @@ -10820,26 +10867,28 @@ async def test_create_tcp_route_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_tcp_route - ] = mock_object + ] = mock_rpc request = {} await client.create_tcp_route(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_tcp_route(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11147,8 +11196,9 @@ def test_update_tcp_route_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_tcp_route(request) @@ -11202,26 +11252,28 @@ async def test_update_tcp_route_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_tcp_route - ] = mock_object + ] = mock_rpc request = {} await client.update_tcp_route(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_tcp_route(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11523,8 +11575,9 @@ def test_delete_tcp_route_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_tcp_route(request) @@ -11578,26 +11631,28 @@ async def test_delete_tcp_route_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_tcp_route - ] = mock_object + ] = mock_rpc request = {} await client.delete_tcp_route(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_tcp_route(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11945,22 +12000,23 @@ async def test_list_tls_routes_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_tls_routes - ] = mock_object + ] = mock_rpc request = {} await client.list_tls_routes(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_tls_routes(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12515,22 +12571,23 @@ async def test_get_tls_route_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_tls_route - ] = mock_object + ] = mock_rpc request = {} await client.get_tls_route(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_tls_route(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12831,8 +12888,9 @@ def test_create_tls_route_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_tls_route(request) @@ -12886,26 +12944,28 @@ async def test_create_tls_route_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_tls_route - ] = mock_object + ] = mock_rpc request = {} await client.create_tls_route(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_tls_route(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13213,8 +13273,9 @@ def test_update_tls_route_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_tls_route(request) @@ -13268,26 +13329,28 @@ async def test_update_tls_route_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_tls_route - ] = mock_object + ] = mock_rpc request = {} await client.update_tls_route(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_tls_route(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13589,8 +13652,9 @@ def test_delete_tls_route_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_tls_route(request) @@ -13644,26 +13708,28 @@ async def test_delete_tls_route_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_tls_route - ] = mock_object + ] = mock_rpc request = {} await client.delete_tls_route(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_tls_route(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14024,22 +14090,23 @@ async def test_list_service_bindings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_service_bindings - ] = mock_object + ] = mock_rpc request = {} await client.list_service_bindings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_service_bindings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14619,22 +14686,23 @@ async def test_get_service_binding_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_service_binding - ] = mock_object + ] = mock_rpc request = {} await client.get_service_binding(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_service_binding(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14955,8 +15023,9 @@ def test_create_service_binding_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_service_binding(request) @@ -15012,26 +15081,28 @@ async def test_create_service_binding_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_service_binding - ] = mock_object + ] = mock_rpc request = {} await client.create_service_binding(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_service_binding(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15363,8 +15434,9 @@ def test_delete_service_binding_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_service_binding(request) @@ -15420,26 +15492,28 @@ async def test_delete_service_binding_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_service_binding - ] = mock_object + ] = mock_rpc request = {} await client.delete_service_binding(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_service_binding(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15798,22 +15872,23 @@ async def test_list_meshes_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_meshes - ] = mock_object + ] = mock_rpc request = {} await client.list_meshes(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_meshes(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -16363,22 +16438,23 @@ async def test_get_mesh_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_mesh - ] = mock_object + ] = mock_rpc request = {} await client.get_mesh(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_mesh(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -16675,8 +16751,9 @@ def test_create_mesh_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_mesh(request) @@ -16730,26 +16807,28 @@ async def test_create_mesh_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_mesh - ] = mock_object + ] = mock_rpc request = {} await client.create_mesh(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_mesh(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -17055,8 +17134,9 @@ def test_update_mesh_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_mesh(request) @@ -17110,26 +17190,28 @@ async def test_update_mesh_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_mesh - ] = mock_object + ] = mock_rpc request = {} await client.update_mesh(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_mesh(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -17429,8 +17511,9 @@ def test_delete_mesh_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_mesh(request) @@ -17484,26 +17567,28 @@ async def test_delete_mesh_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_mesh - ] = mock_object + ] = mock_rpc request = {} await client.delete_mesh(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_mesh(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-notebooks/google/cloud/notebooks/gapic_version.py b/packages/google-cloud-notebooks/google/cloud/notebooks/gapic_version.py index 3ba9a6de4897..558c8aab67c5 100644 --- a/packages/google-cloud-notebooks/google/cloud/notebooks/gapic_version.py +++ b/packages/google-cloud-notebooks/google/cloud/notebooks/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.10.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-notebooks/google/cloud/notebooks_v1/gapic_version.py b/packages/google-cloud-notebooks/google/cloud/notebooks_v1/gapic_version.py index 3ba9a6de4897..558c8aab67c5 100644 --- a/packages/google-cloud-notebooks/google/cloud/notebooks_v1/gapic_version.py +++ b/packages/google-cloud-notebooks/google/cloud/notebooks_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.10.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-notebooks/google/cloud/notebooks_v1/services/managed_notebook_service/async_client.py b/packages/google-cloud-notebooks/google/cloud/notebooks_v1/services/managed_notebook_service/async_client.py index a54f6646e109..f71482ac7d9d 100644 --- a/packages/google-cloud-notebooks/google/cloud/notebooks_v1/services/managed_notebook_service/async_client.py +++ b/packages/google-cloud-notebooks/google/cloud/notebooks_v1/services/managed_notebook_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -203,10 +202,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ManagedNotebookServiceClient).get_transport_class, - type(ManagedNotebookServiceClient), - ) + get_transport_class = ManagedNotebookServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-notebooks/google/cloud/notebooks_v1/services/managed_notebook_service/client.py b/packages/google-cloud-notebooks/google/cloud/notebooks_v1/services/managed_notebook_service/client.py index c131a98d5abc..1e80fe0c93ee 100644 --- a/packages/google-cloud-notebooks/google/cloud/notebooks_v1/services/managed_notebook_service/client.py +++ b/packages/google-cloud-notebooks/google/cloud/notebooks_v1/services/managed_notebook_service/client.py @@ -679,7 +679,7 @@ def __init__( Type[ManagedNotebookServiceTransport], Callable[..., ManagedNotebookServiceTransport], ] = ( - type(self).get_transport_class(transport) + ManagedNotebookServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ManagedNotebookServiceTransport], transport) ) diff --git a/packages/google-cloud-notebooks/google/cloud/notebooks_v1/services/notebook_service/async_client.py b/packages/google-cloud-notebooks/google/cloud/notebooks_v1/services/notebook_service/async_client.py index 20dc69a9bac8..b2e6e1d8ab48 100644 --- a/packages/google-cloud-notebooks/google/cloud/notebooks_v1/services/notebook_service/async_client.py +++ b/packages/google-cloud-notebooks/google/cloud/notebooks_v1/services/notebook_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -213,9 +212,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(NotebookServiceClient).get_transport_class, type(NotebookServiceClient) - ) + get_transport_class = NotebookServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-notebooks/google/cloud/notebooks_v1/services/notebook_service/client.py b/packages/google-cloud-notebooks/google/cloud/notebooks_v1/services/notebook_service/client.py index b4968ce0e669..2f5beca9641a 100644 --- a/packages/google-cloud-notebooks/google/cloud/notebooks_v1/services/notebook_service/client.py +++ b/packages/google-cloud-notebooks/google/cloud/notebooks_v1/services/notebook_service/client.py @@ -760,7 +760,7 @@ def __init__( transport_init: Union[ Type[NotebookServiceTransport], Callable[..., NotebookServiceTransport] ] = ( - type(self).get_transport_class(transport) + NotebookServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., NotebookServiceTransport], transport) ) diff --git a/packages/google-cloud-notebooks/google/cloud/notebooks_v1beta1/gapic_version.py b/packages/google-cloud-notebooks/google/cloud/notebooks_v1beta1/gapic_version.py index 3ba9a6de4897..558c8aab67c5 100644 --- a/packages/google-cloud-notebooks/google/cloud/notebooks_v1beta1/gapic_version.py +++ b/packages/google-cloud-notebooks/google/cloud/notebooks_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.10.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-notebooks/google/cloud/notebooks_v1beta1/services/notebook_service/async_client.py b/packages/google-cloud-notebooks/google/cloud/notebooks_v1beta1/services/notebook_service/async_client.py index 99e923628f94..c3f7e3f1241f 100644 --- a/packages/google-cloud-notebooks/google/cloud/notebooks_v1beta1/services/notebook_service/async_client.py +++ b/packages/google-cloud-notebooks/google/cloud/notebooks_v1beta1/services/notebook_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -199,9 +198,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(NotebookServiceClient).get_transport_class, type(NotebookServiceClient) - ) + get_transport_class = NotebookServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-notebooks/google/cloud/notebooks_v1beta1/services/notebook_service/client.py b/packages/google-cloud-notebooks/google/cloud/notebooks_v1beta1/services/notebook_service/client.py index a88d2cdce7ff..cc14e23ec87f 100644 --- a/packages/google-cloud-notebooks/google/cloud/notebooks_v1beta1/services/notebook_service/client.py +++ b/packages/google-cloud-notebooks/google/cloud/notebooks_v1beta1/services/notebook_service/client.py @@ -685,7 +685,7 @@ def __init__( transport_init: Union[ Type[NotebookServiceTransport], Callable[..., NotebookServiceTransport] ] = ( - type(self).get_transport_class(transport) + NotebookServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., NotebookServiceTransport], transport) ) diff --git a/packages/google-cloud-notebooks/google/cloud/notebooks_v2/gapic_version.py b/packages/google-cloud-notebooks/google/cloud/notebooks_v2/gapic_version.py index 3ba9a6de4897..558c8aab67c5 100644 --- a/packages/google-cloud-notebooks/google/cloud/notebooks_v2/gapic_version.py +++ b/packages/google-cloud-notebooks/google/cloud/notebooks_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.10.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-notebooks/google/cloud/notebooks_v2/services/notebook_service/async_client.py b/packages/google-cloud-notebooks/google/cloud/notebooks_v2/services/notebook_service/async_client.py index 01b2298414b3..4ef25247a7fd 100644 --- a/packages/google-cloud-notebooks/google/cloud/notebooks_v2/services/notebook_service/async_client.py +++ b/packages/google-cloud-notebooks/google/cloud/notebooks_v2/services/notebook_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -201,9 +200,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(NotebookServiceClient).get_transport_class, type(NotebookServiceClient) - ) + get_transport_class = NotebookServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-notebooks/google/cloud/notebooks_v2/services/notebook_service/client.py b/packages/google-cloud-notebooks/google/cloud/notebooks_v2/services/notebook_service/client.py index ed3b5ecbabc2..7baeadff1637 100644 --- a/packages/google-cloud-notebooks/google/cloud/notebooks_v2/services/notebook_service/client.py +++ b/packages/google-cloud-notebooks/google/cloud/notebooks_v2/services/notebook_service/client.py @@ -676,7 +676,7 @@ def __init__( transport_init: Union[ Type[NotebookServiceTransport], Callable[..., NotebookServiceTransport] ] = ( - type(self).get_transport_class(transport) + NotebookServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., NotebookServiceTransport], transport) ) diff --git a/packages/google-cloud-notebooks/samples/generated_samples/snippet_metadata_google.cloud.notebooks.v1.json b/packages/google-cloud-notebooks/samples/generated_samples/snippet_metadata_google.cloud.notebooks.v1.json index 916d5db416ba..1e5dcb8dbe89 100644 --- a/packages/google-cloud-notebooks/samples/generated_samples/snippet_metadata_google.cloud.notebooks.v1.json +++ b/packages/google-cloud-notebooks/samples/generated_samples/snippet_metadata_google.cloud.notebooks.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-notebooks", - "version": "1.10.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-notebooks/samples/generated_samples/snippet_metadata_google.cloud.notebooks.v1beta1.json b/packages/google-cloud-notebooks/samples/generated_samples/snippet_metadata_google.cloud.notebooks.v1beta1.json index 680e594d57e6..7bc31789024f 100644 --- a/packages/google-cloud-notebooks/samples/generated_samples/snippet_metadata_google.cloud.notebooks.v1beta1.json +++ b/packages/google-cloud-notebooks/samples/generated_samples/snippet_metadata_google.cloud.notebooks.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-notebooks", - "version": "1.10.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-notebooks/samples/generated_samples/snippet_metadata_google.cloud.notebooks.v2.json b/packages/google-cloud-notebooks/samples/generated_samples/snippet_metadata_google.cloud.notebooks.v2.json index 7036be8506c1..6fad38d8fc89 100644 --- a/packages/google-cloud-notebooks/samples/generated_samples/snippet_metadata_google.cloud.notebooks.v2.json +++ b/packages/google-cloud-notebooks/samples/generated_samples/snippet_metadata_google.cloud.notebooks.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-notebooks", - "version": "1.10.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-notebooks/tests/unit/gapic/notebooks_v1/test_managed_notebook_service.py b/packages/google-cloud-notebooks/tests/unit/gapic/notebooks_v1/test_managed_notebook_service.py index 62d132651637..ff7eb1f48ccd 100644 --- a/packages/google-cloud-notebooks/tests/unit/gapic/notebooks_v1/test_managed_notebook_service.py +++ b/packages/google-cloud-notebooks/tests/unit/gapic/notebooks_v1/test_managed_notebook_service.py @@ -1329,22 +1329,23 @@ async def test_list_runtimes_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_runtimes - ] = mock_object + ] = mock_rpc request = {} await client.list_runtimes(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_runtimes(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1895,22 +1896,23 @@ async def test_get_runtime_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_runtime - ] = mock_object + ] = mock_rpc request = {} await client.get_runtime(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_runtime(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2207,8 +2209,9 @@ def test_create_runtime_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_runtime(request) @@ -2262,26 +2265,28 @@ async def test_create_runtime_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_runtime - ] = mock_object + ] = mock_rpc request = {} await client.create_runtime(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_runtime(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2591,8 +2596,9 @@ def test_update_runtime_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_runtime(request) @@ -2646,26 +2652,28 @@ async def test_update_runtime_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_runtime - ] = mock_object + ] = mock_rpc request = {} await client.update_runtime(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_runtime(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2967,8 +2975,9 @@ def test_delete_runtime_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_runtime(request) @@ -3022,26 +3031,28 @@ async def test_delete_runtime_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_runtime - ] = mock_object + ] = mock_rpc request = {} await client.delete_runtime(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_runtime(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3333,8 +3344,9 @@ def test_start_runtime_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.start_runtime(request) @@ -3388,26 +3400,28 @@ async def test_start_runtime_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.start_runtime - ] = mock_object + ] = mock_rpc request = {} await client.start_runtime(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.start_runtime(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3699,8 +3713,9 @@ def test_stop_runtime_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.stop_runtime(request) @@ -3754,26 +3769,28 @@ async def test_stop_runtime_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.stop_runtime - ] = mock_object + ] = mock_rpc request = {} await client.stop_runtime(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.stop_runtime(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4067,8 +4084,9 @@ def test_switch_runtime_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.switch_runtime(request) @@ -4122,26 +4140,28 @@ async def test_switch_runtime_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.switch_runtime - ] = mock_object + ] = mock_rpc request = {} await client.switch_runtime(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.switch_runtime(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4433,8 +4453,9 @@ def test_reset_runtime_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.reset_runtime(request) @@ -4488,26 +4509,28 @@ async def test_reset_runtime_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.reset_runtime - ] = mock_object + ] = mock_rpc request = {} await client.reset_runtime(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.reset_runtime(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4799,8 +4822,9 @@ def test_upgrade_runtime_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.upgrade_runtime(request) @@ -4854,26 +4878,28 @@ async def test_upgrade_runtime_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.upgrade_runtime - ] = mock_object + ] = mock_rpc request = {} await client.upgrade_runtime(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.upgrade_runtime(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5175,8 +5201,9 @@ def test_report_runtime_event_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.report_runtime_event(request) @@ -5232,26 +5259,28 @@ async def test_report_runtime_event_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.report_runtime_event - ] = mock_object + ] = mock_rpc request = {} await client.report_runtime_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.report_runtime_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5623,22 +5652,23 @@ async def test_refresh_runtime_token_internal_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.refresh_runtime_token_internal - ] = mock_object + ] = mock_rpc request = {} await client.refresh_runtime_token_internal(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.refresh_runtime_token_internal(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5954,8 +5984,9 @@ def test_diagnose_runtime_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.diagnose_runtime(request) @@ -6009,26 +6040,28 @@ async def test_diagnose_runtime_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.diagnose_runtime - ] = mock_object + ] = mock_rpc request = {} await client.diagnose_runtime(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.diagnose_runtime(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-notebooks/tests/unit/gapic/notebooks_v1/test_notebook_service.py b/packages/google-cloud-notebooks/tests/unit/gapic/notebooks_v1/test_notebook_service.py index 115a5e62954a..4be71e5dcbd8 100644 --- a/packages/google-cloud-notebooks/tests/unit/gapic/notebooks_v1/test_notebook_service.py +++ b/packages/google-cloud-notebooks/tests/unit/gapic/notebooks_v1/test_notebook_service.py @@ -1303,22 +1303,23 @@ async def test_list_instances_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_instances - ] = mock_object + ] = mock_rpc request = {} await client.list_instances(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_instances(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1935,22 +1936,23 @@ async def test_get_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_instance - ] = mock_object + ] = mock_rpc request = {} await client.get_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2289,8 +2291,9 @@ def test_create_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_instance(request) @@ -2344,26 +2347,28 @@ async def test_create_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_instance - ] = mock_object + ] = mock_rpc request = {} await client.create_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2683,8 +2688,9 @@ def test_register_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.register_instance(request) @@ -2740,26 +2746,28 @@ async def test_register_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.register_instance - ] = mock_object + ] = mock_rpc request = {} await client.register_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.register_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2984,8 +2992,9 @@ def test_set_instance_accelerator_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.set_instance_accelerator(request) @@ -3041,26 +3050,28 @@ async def test_set_instance_accelerator_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_instance_accelerator - ] = mock_object + ] = mock_rpc request = {} await client.set_instance_accelerator(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.set_instance_accelerator(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3287,8 +3298,9 @@ def test_set_instance_machine_type_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.set_instance_machine_type(request) @@ -3344,26 +3356,28 @@ async def test_set_instance_machine_type_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_instance_machine_type - ] = mock_object + ] = mock_rpc request = {} await client.set_instance_machine_type(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.set_instance_machine_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3588,8 +3602,9 @@ def test_update_instance_config_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_instance_config(request) @@ -3645,26 +3660,28 @@ async def test_update_instance_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_instance_config - ] = mock_object + ] = mock_rpc request = {} await client.update_instance_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_instance_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3889,8 +3906,9 @@ def test_update_shielded_instance_config_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_shielded_instance_config(request) @@ -3946,26 +3964,28 @@ async def test_update_shielded_instance_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_shielded_instance_config - ] = mock_object + ] = mock_rpc request = {} await client.update_shielded_instance_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_shielded_instance_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4190,8 +4210,9 @@ def test_set_instance_labels_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.set_instance_labels(request) @@ -4247,26 +4268,28 @@ async def test_set_instance_labels_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_instance_labels - ] = mock_object + ] = mock_rpc request = {} await client.set_instance_labels(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.set_instance_labels(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4544,22 +4567,23 @@ async def test_update_instance_metadata_items_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_instance_metadata_items - ] = mock_object + ] = mock_rpc request = {} await client.update_instance_metadata_items(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_instance_metadata_items(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4774,8 +4798,9 @@ def test_delete_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_instance(request) @@ -4829,26 +4854,28 @@ async def test_delete_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_instance - ] = mock_object + ] = mock_rpc request = {} await client.delete_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5138,8 +5165,9 @@ def test_start_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.start_instance(request) @@ -5193,26 +5221,28 @@ async def test_start_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.start_instance - ] = mock_object + ] = mock_rpc request = {} await client.start_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.start_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5420,8 +5450,9 @@ def test_stop_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.stop_instance(request) @@ -5475,26 +5506,28 @@ async def test_stop_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.stop_instance - ] = mock_object + ] = mock_rpc request = {} await client.stop_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.stop_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5702,8 +5735,9 @@ def test_reset_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.reset_instance(request) @@ -5757,26 +5791,28 @@ async def test_reset_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.reset_instance - ] = mock_object + ] = mock_rpc request = {} await client.reset_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.reset_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5996,8 +6032,9 @@ def test_report_instance_info_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.report_instance_info(request) @@ -6053,26 +6090,28 @@ async def test_report_instance_info_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.report_instance_info - ] = mock_object + ] = mock_rpc request = {} await client.report_instance_info(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.report_instance_info(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6364,22 +6403,23 @@ async def test_is_instance_upgradeable_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.is_instance_upgradeable - ] = mock_object + ] = mock_rpc request = {} await client.is_instance_upgradeable(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.is_instance_upgradeable(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6672,22 +6712,23 @@ async def test_get_instance_health_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_instance_health - ] = mock_object + ] = mock_rpc request = {} await client.get_instance_health(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_instance_health(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6994,8 +7035,9 @@ def test_upgrade_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.upgrade_instance(request) @@ -7049,26 +7091,28 @@ async def test_upgrade_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.upgrade_instance - ] = mock_object + ] = mock_rpc request = {} await client.upgrade_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.upgrade_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7286,8 +7330,9 @@ def test_rollback_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.rollback_instance(request) @@ -7343,26 +7388,28 @@ async def test_rollback_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.rollback_instance - ] = mock_object + ] = mock_rpc request = {} await client.rollback_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.rollback_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7584,8 +7631,9 @@ def test_diagnose_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.diagnose_instance(request) @@ -7641,26 +7689,28 @@ async def test_diagnose_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.diagnose_instance - ] = mock_object + ] = mock_rpc request = {} await client.diagnose_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.diagnose_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7991,8 +8041,9 @@ def test_upgrade_instance_internal_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.upgrade_instance_internal(request) @@ -8048,26 +8099,28 @@ async def test_upgrade_instance_internal_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.upgrade_instance_internal - ] = mock_object + ] = mock_rpc request = {} await client.upgrade_instance_internal(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.upgrade_instance_internal(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8352,22 +8405,23 @@ async def test_list_environments_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_environments - ] = mock_object + ] = mock_rpc request = {} await client.list_environments(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_environments(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8939,22 +8993,23 @@ async def test_get_environment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_environment - ] = mock_object + ] = mock_rpc request = {} await client.get_environment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_environment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9265,8 +9320,9 @@ def test_create_environment_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_environment(request) @@ -9322,26 +9378,28 @@ async def test_create_environment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_environment - ] = mock_object + ] = mock_rpc request = {} await client.create_environment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_environment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9671,8 +9729,9 @@ def test_delete_environment_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_environment(request) @@ -9728,26 +9787,28 @@ async def test_delete_environment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_environment - ] = mock_object + ] = mock_rpc request = {} await client.delete_environment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_environment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10112,22 +10173,23 @@ async def test_list_schedules_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_schedules - ] = mock_object + ] = mock_rpc request = {} await client.list_schedules(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_schedules(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10687,22 +10749,23 @@ async def test_get_schedule_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_schedule - ] = mock_object + ] = mock_rpc request = {} await client.get_schedule(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_schedule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11001,8 +11064,9 @@ def test_delete_schedule_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_schedule(request) @@ -11056,26 +11120,28 @@ async def test_delete_schedule_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_schedule - ] = mock_object + ] = mock_rpc request = {} await client.delete_schedule(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_schedule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11367,8 +11433,9 @@ def test_create_schedule_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_schedule(request) @@ -11422,26 +11489,28 @@ async def test_create_schedule_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_schedule - ] = mock_object + ] = mock_rpc request = {} await client.create_schedule(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_schedule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11753,8 +11822,9 @@ def test_trigger_schedule_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.trigger_schedule(request) @@ -11808,26 +11878,28 @@ async def test_trigger_schedule_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.trigger_schedule - ] = mock_object + ] = mock_rpc request = {} await client.trigger_schedule(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.trigger_schedule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12100,22 +12172,23 @@ async def test_list_executions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_executions - ] = mock_object + ] = mock_rpc request = {} await client.list_executions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_executions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12675,22 +12748,23 @@ async def test_get_execution_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_execution - ] = mock_object + ] = mock_rpc request = {} await client.get_execution(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_execution(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12991,8 +13065,9 @@ def test_delete_execution_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_execution(request) @@ -13046,26 +13121,28 @@ async def test_delete_execution_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_execution - ] = mock_object + ] = mock_rpc request = {} await client.delete_execution(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_execution(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13359,8 +13436,9 @@ def test_create_execution_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_execution(request) @@ -13414,26 +13492,28 @@ async def test_create_execution_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_execution - ] = mock_object + ] = mock_rpc request = {} await client.create_execution(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_execution(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-notebooks/tests/unit/gapic/notebooks_v1beta1/test_notebook_service.py b/packages/google-cloud-notebooks/tests/unit/gapic/notebooks_v1beta1/test_notebook_service.py index 24f710a81ef9..7b563b8bc0fd 100644 --- a/packages/google-cloud-notebooks/tests/unit/gapic/notebooks_v1beta1/test_notebook_service.py +++ b/packages/google-cloud-notebooks/tests/unit/gapic/notebooks_v1beta1/test_notebook_service.py @@ -1326,22 +1326,23 @@ async def test_list_instances_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_instances - ] = mock_object + ] = mock_rpc request = {} await client.list_instances(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_instances(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1867,22 +1868,23 @@ async def test_get_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_instance - ] = mock_object + ] = mock_rpc request = {} await client.get_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2135,8 +2137,9 @@ def test_create_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_instance(request) @@ -2190,26 +2193,28 @@ async def test_create_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_instance - ] = mock_object + ] = mock_rpc request = {} await client.create_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2427,8 +2432,9 @@ def test_register_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.register_instance(request) @@ -2484,26 +2490,28 @@ async def test_register_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.register_instance - ] = mock_object + ] = mock_rpc request = {} await client.register_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.register_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2728,8 +2736,9 @@ def test_set_instance_accelerator_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.set_instance_accelerator(request) @@ -2785,26 +2794,28 @@ async def test_set_instance_accelerator_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_instance_accelerator - ] = mock_object + ] = mock_rpc request = {} await client.set_instance_accelerator(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.set_instance_accelerator(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3031,8 +3042,9 @@ def test_set_instance_machine_type_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.set_instance_machine_type(request) @@ -3088,26 +3100,28 @@ async def test_set_instance_machine_type_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_instance_machine_type - ] = mock_object + ] = mock_rpc request = {} await client.set_instance_machine_type(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.set_instance_machine_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3331,8 +3345,9 @@ def test_set_instance_labels_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.set_instance_labels(request) @@ -3388,26 +3403,28 @@ async def test_set_instance_labels_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_instance_labels - ] = mock_object + ] = mock_rpc request = {} await client.set_instance_labels(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.set_instance_labels(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3621,8 +3638,9 @@ def test_delete_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_instance(request) @@ -3676,26 +3694,28 @@ async def test_delete_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_instance - ] = mock_object + ] = mock_rpc request = {} await client.delete_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3903,8 +3923,9 @@ def test_start_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.start_instance(request) @@ -3958,26 +3979,28 @@ async def test_start_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.start_instance - ] = mock_object + ] = mock_rpc request = {} await client.start_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.start_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4185,8 +4208,9 @@ def test_stop_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.stop_instance(request) @@ -4240,26 +4264,28 @@ async def test_stop_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.stop_instance - ] = mock_object + ] = mock_rpc request = {} await client.stop_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.stop_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4467,8 +4493,9 @@ def test_reset_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.reset_instance(request) @@ -4522,26 +4549,28 @@ async def test_reset_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.reset_instance - ] = mock_object + ] = mock_rpc request = {} await client.reset_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.reset_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4761,8 +4790,9 @@ def test_report_instance_info_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.report_instance_info(request) @@ -4818,26 +4848,28 @@ async def test_report_instance_info_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.report_instance_info - ] = mock_object + ] = mock_rpc request = {} await client.report_instance_info(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.report_instance_info(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5129,22 +5161,23 @@ async def test_is_instance_upgradeable_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.is_instance_upgradeable - ] = mock_object + ] = mock_rpc request = {} await client.is_instance_upgradeable(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.is_instance_upgradeable(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5369,8 +5402,9 @@ def test_upgrade_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.upgrade_instance(request) @@ -5424,26 +5458,28 @@ async def test_upgrade_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.upgrade_instance - ] = mock_object + ] = mock_rpc request = {} await client.upgrade_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.upgrade_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5664,8 +5700,9 @@ def test_upgrade_instance_internal_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.upgrade_instance_internal(request) @@ -5721,26 +5758,28 @@ async def test_upgrade_instance_internal_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.upgrade_instance_internal - ] = mock_object + ] = mock_rpc request = {} await client.upgrade_instance_internal(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.upgrade_instance_internal(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6025,22 +6064,23 @@ async def test_list_environments_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_environments - ] = mock_object + ] = mock_rpc request = {} await client.list_environments(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_environments(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6526,22 +6566,23 @@ async def test_get_environment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_environment - ] = mock_object + ] = mock_rpc request = {} await client.get_environment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_environment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6770,8 +6811,9 @@ def test_create_environment_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_environment(request) @@ -6827,26 +6869,28 @@ async def test_create_environment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_environment - ] = mock_object + ] = mock_rpc request = {} await client.create_environment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_environment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7070,8 +7114,9 @@ def test_delete_environment_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_environment(request) @@ -7127,26 +7172,28 @@ async def test_delete_environment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_environment - ] = mock_object + ] = mock_rpc request = {} await client.delete_environment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_environment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-notebooks/tests/unit/gapic/notebooks_v2/test_notebook_service.py b/packages/google-cloud-notebooks/tests/unit/gapic/notebooks_v2/test_notebook_service.py index f49194050e98..e103c2cb7800 100644 --- a/packages/google-cloud-notebooks/tests/unit/gapic/notebooks_v2/test_notebook_service.py +++ b/packages/google-cloud-notebooks/tests/unit/gapic/notebooks_v2/test_notebook_service.py @@ -1335,22 +1335,23 @@ async def test_list_instances_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_instances - ] = mock_object + ] = mock_rpc request = {} await client.list_instances(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_instances(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1916,22 +1917,23 @@ async def test_get_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_instance - ] = mock_object + ] = mock_rpc request = {} await client.get_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2238,8 +2240,9 @@ def test_create_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_instance(request) @@ -2293,26 +2296,28 @@ async def test_create_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_instance - ] = mock_object + ] = mock_rpc request = {} await client.create_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2622,8 +2627,9 @@ def test_update_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_instance(request) @@ -2677,26 +2683,28 @@ async def test_update_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_instance - ] = mock_object + ] = mock_rpc request = {} await client.update_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2998,8 +3006,9 @@ def test_delete_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_instance(request) @@ -3053,26 +3062,28 @@ async def test_delete_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_instance - ] = mock_object + ] = mock_rpc request = {} await client.delete_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3362,8 +3373,9 @@ def test_start_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.start_instance(request) @@ -3417,26 +3429,28 @@ async def test_start_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.start_instance - ] = mock_object + ] = mock_rpc request = {} await client.start_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.start_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3644,8 +3658,9 @@ def test_stop_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.stop_instance(request) @@ -3699,26 +3714,28 @@ async def test_stop_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.stop_instance - ] = mock_object + ] = mock_rpc request = {} await client.stop_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.stop_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3926,8 +3943,9 @@ def test_reset_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.reset_instance(request) @@ -3981,26 +3999,28 @@ async def test_reset_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.reset_instance - ] = mock_object + ] = mock_rpc request = {} await client.reset_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.reset_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4286,22 +4306,23 @@ async def test_check_instance_upgradability_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.check_instance_upgradability - ] = mock_object + ] = mock_rpc request = {} await client.check_instance_upgradability(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.check_instance_upgradability(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4527,8 +4548,9 @@ def test_upgrade_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.upgrade_instance(request) @@ -4582,26 +4604,28 @@ async def test_upgrade_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.upgrade_instance - ] = mock_object + ] = mock_rpc request = {} await client.upgrade_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.upgrade_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4821,8 +4845,9 @@ def test_rollback_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.rollback_instance(request) @@ -4878,26 +4903,28 @@ async def test_rollback_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.rollback_instance - ] = mock_object + ] = mock_rpc request = {} await client.rollback_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.rollback_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5119,8 +5146,9 @@ def test_diagnose_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.diagnose_instance(request) @@ -5176,26 +5204,28 @@ async def test_diagnose_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.diagnose_instance - ] = mock_object + ] = mock_rpc request = {} await client.diagnose_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.diagnose_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-optimization/google/cloud/optimization/gapic_version.py b/packages/google-cloud-optimization/google/cloud/optimization/gapic_version.py index ab4d576b9121..558c8aab67c5 100644 --- a/packages/google-cloud-optimization/google/cloud/optimization/gapic_version.py +++ b/packages/google-cloud-optimization/google/cloud/optimization/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.8.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-optimization/google/cloud/optimization_v1/gapic_version.py b/packages/google-cloud-optimization/google/cloud/optimization_v1/gapic_version.py index ab4d576b9121..558c8aab67c5 100644 --- a/packages/google-cloud-optimization/google/cloud/optimization_v1/gapic_version.py +++ b/packages/google-cloud-optimization/google/cloud/optimization_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.8.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-optimization/google/cloud/optimization_v1/services/fleet_routing/async_client.py b/packages/google-cloud-optimization/google/cloud/optimization_v1/services/fleet_routing/async_client.py index cf73c35143bc..cbdb15ef79b5 100644 --- a/packages/google-cloud-optimization/google/cloud/optimization_v1/services/fleet_routing/async_client.py +++ b/packages/google-cloud-optimization/google/cloud/optimization_v1/services/fleet_routing/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -207,9 +206,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(FleetRoutingClient).get_transport_class, type(FleetRoutingClient) - ) + get_transport_class = FleetRoutingClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-optimization/google/cloud/optimization_v1/services/fleet_routing/client.py b/packages/google-cloud-optimization/google/cloud/optimization_v1/services/fleet_routing/client.py index fde0869ee3d2..0c650733655e 100644 --- a/packages/google-cloud-optimization/google/cloud/optimization_v1/services/fleet_routing/client.py +++ b/packages/google-cloud-optimization/google/cloud/optimization_v1/services/fleet_routing/client.py @@ -659,7 +659,7 @@ def __init__( transport_init: Union[ Type[FleetRoutingTransport], Callable[..., FleetRoutingTransport] ] = ( - type(self).get_transport_class(transport) + FleetRoutingClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., FleetRoutingTransport], transport) ) diff --git a/packages/google-cloud-optimization/samples/generated_samples/snippet_metadata_google.cloud.optimization.v1.json b/packages/google-cloud-optimization/samples/generated_samples/snippet_metadata_google.cloud.optimization.v1.json index 640d58f92fab..d38082dbd11f 100644 --- a/packages/google-cloud-optimization/samples/generated_samples/snippet_metadata_google.cloud.optimization.v1.json +++ b/packages/google-cloud-optimization/samples/generated_samples/snippet_metadata_google.cloud.optimization.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-optimization", - "version": "1.8.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-optimization/tests/unit/gapic/optimization_v1/test_fleet_routing.py b/packages/google-cloud-optimization/tests/unit/gapic/optimization_v1/test_fleet_routing.py index 2dbc641a9270..c53148327a71 100644 --- a/packages/google-cloud-optimization/tests/unit/gapic/optimization_v1/test_fleet_routing.py +++ b/packages/google-cloud-optimization/tests/unit/gapic/optimization_v1/test_fleet_routing.py @@ -1275,22 +1275,23 @@ async def test_optimize_tours_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.optimize_tours - ] = mock_object + ] = mock_rpc request = {} await client.optimize_tours(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.optimize_tours(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1513,8 +1514,9 @@ def test_batch_optimize_tours_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.batch_optimize_tours(request) @@ -1570,26 +1572,28 @@ async def test_batch_optimize_tours_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_optimize_tours - ] = mock_object + ] = mock_rpc request = {} await client.batch_optimize_tours(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.batch_optimize_tours(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service/gapic_version.py b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service/gapic_version.py index 0b9427f4e8a5..558c8aab67c5 100644 --- a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service/gapic_version.py +++ b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.13.1" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/gapic_version.py b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/gapic_version.py index 0b9427f4e8a5..558c8aab67c5 100644 --- a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/gapic_version.py +++ b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.13.1" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/async_client.py b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/async_client.py index 4a53eea0bd4e..3406e2214500 100644 --- a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/async_client.py +++ b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -204,9 +203,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(EnvironmentsClient).get_transport_class, type(EnvironmentsClient) - ) + get_transport_class = EnvironmentsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/client.py b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/client.py index 76a49163a1ed..a00e4bf1e986 100644 --- a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/client.py +++ b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/client.py @@ -714,7 +714,7 @@ def __init__( transport_init: Union[ Type[EnvironmentsTransport], Callable[..., EnvironmentsTransport] ] = ( - type(self).get_transport_class(transport) + EnvironmentsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., EnvironmentsTransport], transport) ) diff --git a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/image_versions/async_client.py b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/image_versions/async_client.py index 8847459cb86a..3f668fb98f60 100644 --- a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/image_versions/async_client.py +++ b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/image_versions/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -189,9 +188,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ImageVersionsClient).get_transport_class, type(ImageVersionsClient) - ) + get_transport_class = ImageVersionsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/image_versions/client.py b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/image_versions/client.py index d8ab99df5068..2293e28401de 100644 --- a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/image_versions/client.py +++ b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/image_versions/client.py @@ -640,7 +640,7 @@ def __init__( transport_init: Union[ Type[ImageVersionsTransport], Callable[..., ImageVersionsTransport] ] = ( - type(self).get_transport_class(transport) + ImageVersionsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ImageVersionsTransport], transport) ) diff --git a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/gapic_version.py b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/gapic_version.py index 0b9427f4e8a5..558c8aab67c5 100644 --- a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/gapic_version.py +++ b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.13.1" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/services/environments/async_client.py b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/services/environments/async_client.py index 9b08ad9b29c2..7b62020b69b8 100644 --- a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/services/environments/async_client.py +++ b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/services/environments/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -209,9 +208,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(EnvironmentsClient).get_transport_class, type(EnvironmentsClient) - ) + get_transport_class = EnvironmentsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/services/environments/client.py b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/services/environments/client.py index 7a6078d63470..89ac9cf80f59 100644 --- a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/services/environments/client.py +++ b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/services/environments/client.py @@ -719,7 +719,7 @@ def __init__( transport_init: Union[ Type[EnvironmentsTransport], Callable[..., EnvironmentsTransport] ] = ( - type(self).get_transport_class(transport) + EnvironmentsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., EnvironmentsTransport], transport) ) diff --git a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/services/image_versions/async_client.py b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/services/image_versions/async_client.py index 22e1ad6b8d69..c2a9e05cd1a6 100644 --- a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/services/image_versions/async_client.py +++ b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/services/image_versions/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -191,9 +190,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ImageVersionsClient).get_transport_class, type(ImageVersionsClient) - ) + get_transport_class = ImageVersionsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/services/image_versions/client.py b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/services/image_versions/client.py index 002a12334bb3..047471027045 100644 --- a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/services/image_versions/client.py +++ b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/services/image_versions/client.py @@ -642,7 +642,7 @@ def __init__( transport_init: Union[ Type[ImageVersionsTransport], Callable[..., ImageVersionsTransport] ] = ( - type(self).get_transport_class(transport) + ImageVersionsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ImageVersionsTransport], transport) ) diff --git a/packages/google-cloud-orchestration-airflow/samples/generated_samples/snippet_metadata_google.cloud.orchestration.airflow.service.v1.json b/packages/google-cloud-orchestration-airflow/samples/generated_samples/snippet_metadata_google.cloud.orchestration.airflow.service.v1.json index cfea112c527c..3fffc7af33c9 100644 --- a/packages/google-cloud-orchestration-airflow/samples/generated_samples/snippet_metadata_google.cloud.orchestration.airflow.service.v1.json +++ b/packages/google-cloud-orchestration-airflow/samples/generated_samples/snippet_metadata_google.cloud.orchestration.airflow.service.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-orchestration-airflow", - "version": "1.13.1" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-orchestration-airflow/samples/generated_samples/snippet_metadata_google.cloud.orchestration.airflow.service.v1beta1.json b/packages/google-cloud-orchestration-airflow/samples/generated_samples/snippet_metadata_google.cloud.orchestration.airflow.service.v1beta1.json index 7d34b0488dad..4c413d500bdb 100644 --- a/packages/google-cloud-orchestration-airflow/samples/generated_samples/snippet_metadata_google.cloud.orchestration.airflow.service.v1beta1.json +++ b/packages/google-cloud-orchestration-airflow/samples/generated_samples/snippet_metadata_google.cloud.orchestration.airflow.service.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-orchestration-airflow-service", - "version": "1.13.1" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-orchestration-airflow/tests/unit/gapic/service_v1/test_environments.py b/packages/google-cloud-orchestration-airflow/tests/unit/gapic/service_v1/test_environments.py index 3030979d3c23..3751f10f0bbf 100644 --- a/packages/google-cloud-orchestration-airflow/tests/unit/gapic/service_v1/test_environments.py +++ b/packages/google-cloud-orchestration-airflow/tests/unit/gapic/service_v1/test_environments.py @@ -1225,8 +1225,9 @@ def test_create_environment_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_environment(request) @@ -1282,26 +1283,28 @@ async def test_create_environment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_environment - ] = mock_object + ] = mock_rpc request = {} await client.create_environment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_environment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1676,22 +1679,23 @@ async def test_get_environment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_environment - ] = mock_object + ] = mock_rpc request = {} await client.get_environment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_environment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2058,22 +2062,23 @@ async def test_list_environments_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_environments - ] = mock_object + ] = mock_rpc request = {} await client.list_environments(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_environments(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2588,8 +2593,9 @@ def test_update_environment_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_environment(request) @@ -2645,26 +2651,28 @@ async def test_update_environment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_environment - ] = mock_object + ] = mock_rpc request = {} await client.update_environment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_environment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2994,8 +3002,9 @@ def test_delete_environment_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_environment(request) @@ -3051,26 +3060,28 @@ async def test_delete_environment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_environment - ] = mock_object + ] = mock_rpc request = {} await client.delete_environment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_environment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3452,22 +3463,23 @@ async def test_execute_airflow_command_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.execute_airflow_command - ] = mock_object + ] = mock_rpc request = {} await client.execute_airflow_command(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.execute_airflow_command(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3768,22 +3780,23 @@ async def test_stop_airflow_command_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.stop_airflow_command - ] = mock_object + ] = mock_rpc request = {} await client.stop_airflow_command(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.stop_airflow_command(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4076,22 +4089,23 @@ async def test_poll_airflow_command_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.poll_airflow_command - ] = mock_object + ] = mock_rpc request = {} await client.poll_airflow_command(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.poll_airflow_command(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4368,22 +4382,23 @@ async def test_list_workloads_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_workloads - ] = mock_object + ] = mock_rpc request = {} await client.list_workloads(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_workloads(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4945,22 +4960,23 @@ async def test_create_user_workloads_secret_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_user_workloads_secret - ] = mock_object + ] = mock_rpc request = {} await client.create_user_workloads_secret(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_user_workloads_secret(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5343,22 +5359,23 @@ async def test_get_user_workloads_secret_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_user_workloads_secret - ] = mock_object + ] = mock_rpc request = {} await client.get_user_workloads_secret(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_user_workloads_secret(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5733,22 +5750,23 @@ async def test_list_user_workloads_secrets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_user_workloads_secrets - ] = mock_object + ] = mock_rpc request = {} await client.list_user_workloads_secrets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_user_workloads_secrets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6321,22 +6339,23 @@ async def test_update_user_workloads_secret_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_user_workloads_secret - ] = mock_object + ] = mock_rpc request = {} await client.update_user_workloads_secret(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_user_workloads_secret(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6702,22 +6721,23 @@ async def test_delete_user_workloads_secret_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_user_workloads_secret - ] = mock_object + ] = mock_rpc request = {} await client.delete_user_workloads_secret(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_user_workloads_secret(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7081,22 +7101,23 @@ async def test_create_user_workloads_config_map_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_user_workloads_config_map - ] = mock_object + ] = mock_rpc request = {} await client.create_user_workloads_config_map(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_user_workloads_config_map(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7487,22 +7508,23 @@ async def test_get_user_workloads_config_map_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_user_workloads_config_map - ] = mock_object + ] = mock_rpc request = {} await client.get_user_workloads_config_map(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_user_workloads_config_map(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7877,22 +7899,23 @@ async def test_list_user_workloads_config_maps_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_user_workloads_config_maps - ] = mock_object + ] = mock_rpc request = {} await client.list_user_workloads_config_maps(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_user_workloads_config_maps(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8467,22 +8490,23 @@ async def test_update_user_workloads_config_map_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_user_workloads_config_map - ] = mock_object + ] = mock_rpc request = {} await client.update_user_workloads_config_map(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_user_workloads_config_map(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8856,22 +8880,23 @@ async def test_delete_user_workloads_config_map_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_user_workloads_config_map - ] = mock_object + ] = mock_rpc request = {} await client.delete_user_workloads_config_map(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_user_workloads_config_map(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9168,8 +9193,9 @@ def test_save_snapshot_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.save_snapshot(request) @@ -9223,26 +9249,28 @@ async def test_save_snapshot_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.save_snapshot - ] = mock_object + ] = mock_rpc request = {} await client.save_snapshot(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.save_snapshot(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9452,8 +9480,9 @@ def test_load_snapshot_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.load_snapshot(request) @@ -9507,26 +9536,28 @@ async def test_load_snapshot_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.load_snapshot - ] = mock_object + ] = mock_rpc request = {} await client.load_snapshot(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.load_snapshot(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9742,8 +9773,9 @@ def test_database_failover_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.database_failover(request) @@ -9799,26 +9831,28 @@ async def test_database_failover_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.database_failover - ] = mock_object + ] = mock_rpc request = {} await client.database_failover(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.database_failover(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10107,22 +10141,23 @@ async def test_fetch_database_properties_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.fetch_database_properties - ] = mock_object + ] = mock_rpc request = {} await client.fetch_database_properties(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.fetch_database_properties(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-orchestration-airflow/tests/unit/gapic/service_v1/test_image_versions.py b/packages/google-cloud-orchestration-airflow/tests/unit/gapic/service_v1/test_image_versions.py index 78886b8be934..a3d8eec48fb5 100644 --- a/packages/google-cloud-orchestration-airflow/tests/unit/gapic/service_v1/test_image_versions.py +++ b/packages/google-cloud-orchestration-airflow/tests/unit/gapic/service_v1/test_image_versions.py @@ -1292,22 +1292,23 @@ async def test_list_image_versions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_image_versions - ] = mock_object + ] = mock_rpc request = {} await client.list_image_versions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_image_versions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-orchestration-airflow/tests/unit/gapic/service_v1beta1/test_environments.py b/packages/google-cloud-orchestration-airflow/tests/unit/gapic/service_v1beta1/test_environments.py index 6673f35610a6..5cef23532261 100644 --- a/packages/google-cloud-orchestration-airflow/tests/unit/gapic/service_v1beta1/test_environments.py +++ b/packages/google-cloud-orchestration-airflow/tests/unit/gapic/service_v1beta1/test_environments.py @@ -1228,8 +1228,9 @@ def test_create_environment_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_environment(request) @@ -1285,26 +1286,28 @@ async def test_create_environment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_environment - ] = mock_object + ] = mock_rpc request = {} await client.create_environment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_environment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1679,22 +1682,23 @@ async def test_get_environment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_environment - ] = mock_object + ] = mock_rpc request = {} await client.get_environment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_environment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2061,22 +2065,23 @@ async def test_list_environments_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_environments - ] = mock_object + ] = mock_rpc request = {} await client.list_environments(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_environments(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2591,8 +2596,9 @@ def test_update_environment_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_environment(request) @@ -2648,26 +2654,28 @@ async def test_update_environment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_environment - ] = mock_object + ] = mock_rpc request = {} await client.update_environment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_environment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2997,8 +3005,9 @@ def test_delete_environment_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_environment(request) @@ -3054,26 +3063,28 @@ async def test_delete_environment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_environment - ] = mock_object + ] = mock_rpc request = {} await client.delete_environment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_environment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3383,8 +3394,9 @@ def test_restart_web_server_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.restart_web_server(request) @@ -3440,26 +3452,28 @@ async def test_restart_web_server_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.restart_web_server - ] = mock_object + ] = mock_rpc request = {} await client.restart_web_server(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.restart_web_server(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3675,8 +3689,9 @@ def test_check_upgrade_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.check_upgrade(request) @@ -3730,26 +3745,28 @@ async def test_check_upgrade_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.check_upgrade - ] = mock_object + ] = mock_rpc request = {} await client.check_upgrade(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.check_upgrade(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4039,22 +4056,23 @@ async def test_execute_airflow_command_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.execute_airflow_command - ] = mock_object + ] = mock_rpc request = {} await client.execute_airflow_command(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.execute_airflow_command(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4355,22 +4373,23 @@ async def test_stop_airflow_command_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.stop_airflow_command - ] = mock_object + ] = mock_rpc request = {} await client.stop_airflow_command(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.stop_airflow_command(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4663,22 +4682,23 @@ async def test_poll_airflow_command_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.poll_airflow_command - ] = mock_object + ] = mock_rpc request = {} await client.poll_airflow_command(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.poll_airflow_command(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4955,22 +4975,23 @@ async def test_list_workloads_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_workloads - ] = mock_object + ] = mock_rpc request = {} await client.list_workloads(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_workloads(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5532,22 +5553,23 @@ async def test_create_user_workloads_secret_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_user_workloads_secret - ] = mock_object + ] = mock_rpc request = {} await client.create_user_workloads_secret(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_user_workloads_secret(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5930,22 +5952,23 @@ async def test_get_user_workloads_secret_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_user_workloads_secret - ] = mock_object + ] = mock_rpc request = {} await client.get_user_workloads_secret(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_user_workloads_secret(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6320,22 +6343,23 @@ async def test_list_user_workloads_secrets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_user_workloads_secrets - ] = mock_object + ] = mock_rpc request = {} await client.list_user_workloads_secrets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_user_workloads_secrets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6908,22 +6932,23 @@ async def test_update_user_workloads_secret_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_user_workloads_secret - ] = mock_object + ] = mock_rpc request = {} await client.update_user_workloads_secret(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_user_workloads_secret(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7289,22 +7314,23 @@ async def test_delete_user_workloads_secret_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_user_workloads_secret - ] = mock_object + ] = mock_rpc request = {} await client.delete_user_workloads_secret(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_user_workloads_secret(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7668,22 +7694,23 @@ async def test_create_user_workloads_config_map_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_user_workloads_config_map - ] = mock_object + ] = mock_rpc request = {} await client.create_user_workloads_config_map(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_user_workloads_config_map(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8074,22 +8101,23 @@ async def test_get_user_workloads_config_map_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_user_workloads_config_map - ] = mock_object + ] = mock_rpc request = {} await client.get_user_workloads_config_map(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_user_workloads_config_map(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8464,22 +8492,23 @@ async def test_list_user_workloads_config_maps_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_user_workloads_config_maps - ] = mock_object + ] = mock_rpc request = {} await client.list_user_workloads_config_maps(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_user_workloads_config_maps(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9054,22 +9083,23 @@ async def test_update_user_workloads_config_map_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_user_workloads_config_map - ] = mock_object + ] = mock_rpc request = {} await client.update_user_workloads_config_map(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_user_workloads_config_map(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9443,22 +9473,23 @@ async def test_delete_user_workloads_config_map_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_user_workloads_config_map - ] = mock_object + ] = mock_rpc request = {} await client.delete_user_workloads_config_map(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_user_workloads_config_map(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9755,8 +9786,9 @@ def test_save_snapshot_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.save_snapshot(request) @@ -9810,26 +9842,28 @@ async def test_save_snapshot_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.save_snapshot - ] = mock_object + ] = mock_rpc request = {} await client.save_snapshot(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.save_snapshot(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10039,8 +10073,9 @@ def test_load_snapshot_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.load_snapshot(request) @@ -10094,26 +10129,28 @@ async def test_load_snapshot_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.load_snapshot - ] = mock_object + ] = mock_rpc request = {} await client.load_snapshot(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.load_snapshot(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10329,8 +10366,9 @@ def test_database_failover_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.database_failover(request) @@ -10386,26 +10424,28 @@ async def test_database_failover_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.database_failover - ] = mock_object + ] = mock_rpc request = {} await client.database_failover(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.database_failover(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10694,22 +10734,23 @@ async def test_fetch_database_properties_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.fetch_database_properties - ] = mock_object + ] = mock_rpc request = {} await client.fetch_database_properties(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.fetch_database_properties(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-orchestration-airflow/tests/unit/gapic/service_v1beta1/test_image_versions.py b/packages/google-cloud-orchestration-airflow/tests/unit/gapic/service_v1beta1/test_image_versions.py index 3001c786e527..63387d986c9b 100644 --- a/packages/google-cloud-orchestration-airflow/tests/unit/gapic/service_v1beta1/test_image_versions.py +++ b/packages/google-cloud-orchestration-airflow/tests/unit/gapic/service_v1beta1/test_image_versions.py @@ -1292,22 +1292,23 @@ async def test_list_image_versions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_image_versions - ] = mock_object + ] = mock_rpc request = {} await client.list_image_versions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_image_versions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-os-config/google/cloud/osconfig/gapic_version.py b/packages/google-cloud-os-config/google/cloud/osconfig/gapic_version.py index 1a7fb072f786..558c8aab67c5 100644 --- a/packages/google-cloud-os-config/google/cloud/osconfig/gapic_version.py +++ b/packages/google-cloud-os-config/google/cloud/osconfig/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.17.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-os-config/google/cloud/osconfig_v1/gapic_version.py b/packages/google-cloud-os-config/google/cloud/osconfig_v1/gapic_version.py index 1a7fb072f786..558c8aab67c5 100644 --- a/packages/google-cloud-os-config/google/cloud/osconfig_v1/gapic_version.py +++ b/packages/google-cloud-os-config/google/cloud/osconfig_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.17.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/os_config_service/async_client.py b/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/os_config_service/async_client.py index 2845fef1aef1..04a0ae0e8e95 100644 --- a/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/os_config_service/async_client.py +++ b/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/os_config_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -202,9 +201,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(OsConfigServiceClient).get_transport_class, type(OsConfigServiceClient) - ) + get_transport_class = OsConfigServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/os_config_service/client.py b/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/os_config_service/client.py index 0e58e7b579e0..df6e2c11f687 100644 --- a/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/os_config_service/client.py +++ b/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/os_config_service/client.py @@ -708,7 +708,7 @@ def __init__( transport_init: Union[ Type[OsConfigServiceTransport], Callable[..., OsConfigServiceTransport] ] = ( - type(self).get_transport_class(transport) + OsConfigServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., OsConfigServiceTransport], transport) ) diff --git a/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/os_config_zonal_service/async_client.py b/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/os_config_zonal_service/async_client.py index 264318e6fa25..28af081169b2 100644 --- a/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/os_config_zonal_service/async_client.py +++ b/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/os_config_zonal_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -230,10 +229,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(OsConfigZonalServiceClient).get_transport_class, - type(OsConfigZonalServiceClient), - ) + get_transport_class = OsConfigZonalServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/os_config_zonal_service/client.py b/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/os_config_zonal_service/client.py index 62ba934411d6..85275e9b250a 100644 --- a/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/os_config_zonal_service/client.py +++ b/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/os_config_zonal_service/client.py @@ -796,7 +796,7 @@ def __init__( Type[OsConfigZonalServiceTransport], Callable[..., OsConfigZonalServiceTransport], ] = ( - type(self).get_transport_class(transport) + OsConfigZonalServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., OsConfigZonalServiceTransport], transport) ) diff --git a/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/gapic_version.py b/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/gapic_version.py index 1a7fb072f786..558c8aab67c5 100644 --- a/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/gapic_version.py +++ b/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.17.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/services/os_config_zonal_service/async_client.py b/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/services/os_config_zonal_service/async_client.py index 3fee77e860dd..83c038226273 100644 --- a/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/services/os_config_zonal_service/async_client.py +++ b/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/services/os_config_zonal_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -239,10 +238,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(OsConfigZonalServiceClient).get_transport_class, - type(OsConfigZonalServiceClient), - ) + get_transport_class = OsConfigZonalServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/services/os_config_zonal_service/client.py b/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/services/os_config_zonal_service/client.py index 57632b52a3b0..06006432eb3c 100644 --- a/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/services/os_config_zonal_service/client.py +++ b/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/services/os_config_zonal_service/client.py @@ -820,7 +820,7 @@ def __init__( Type[OsConfigZonalServiceTransport], Callable[..., OsConfigZonalServiceTransport], ] = ( - type(self).get_transport_class(transport) + OsConfigZonalServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., OsConfigZonalServiceTransport], transport) ) diff --git a/packages/google-cloud-os-config/samples/generated_samples/snippet_metadata_google.cloud.osconfig.v1.json b/packages/google-cloud-os-config/samples/generated_samples/snippet_metadata_google.cloud.osconfig.v1.json index c80c378773a8..99230c1f96c0 100644 --- a/packages/google-cloud-os-config/samples/generated_samples/snippet_metadata_google.cloud.osconfig.v1.json +++ b/packages/google-cloud-os-config/samples/generated_samples/snippet_metadata_google.cloud.osconfig.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-os-config", - "version": "1.17.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-os-config/samples/generated_samples/snippet_metadata_google.cloud.osconfig.v1alpha.json b/packages/google-cloud-os-config/samples/generated_samples/snippet_metadata_google.cloud.osconfig.v1alpha.json index 92594aea0d69..0ae2360ae3ee 100644 --- a/packages/google-cloud-os-config/samples/generated_samples/snippet_metadata_google.cloud.osconfig.v1alpha.json +++ b/packages/google-cloud-os-config/samples/generated_samples/snippet_metadata_google.cloud.osconfig.v1alpha.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-os-config", - "version": "1.17.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-os-config/tests/unit/gapic/osconfig_v1/test_os_config_service.py b/packages/google-cloud-os-config/tests/unit/gapic/osconfig_v1/test_os_config_service.py index 5931b99fae50..e95ffa209f42 100644 --- a/packages/google-cloud-os-config/tests/unit/gapic/osconfig_v1/test_os_config_service.py +++ b/packages/google-cloud-os-config/tests/unit/gapic/osconfig_v1/test_os_config_service.py @@ -1350,22 +1350,23 @@ async def test_execute_patch_job_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.execute_patch_job - ] = mock_object + ] = mock_rpc request = {} await client.execute_patch_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.execute_patch_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1671,22 +1672,23 @@ async def test_get_patch_job_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_patch_job - ] = mock_object + ] = mock_rpc request = {} await client.get_patch_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_patch_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2068,22 +2070,23 @@ async def test_cancel_patch_job_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.cancel_patch_job - ] = mock_object + ] = mock_rpc request = {} await client.cancel_patch_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.cancel_patch_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2366,22 +2369,23 @@ async def test_list_patch_jobs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_patch_jobs - ] = mock_object + ] = mock_rpc request = {} await client.list_patch_jobs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_patch_jobs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2941,22 +2945,23 @@ async def test_list_patch_job_instance_details_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_patch_job_instance_details - ] = mock_object + ] = mock_rpc request = {} await client.list_patch_job_instance_details(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_patch_job_instance_details(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3541,22 +3546,23 @@ async def test_create_patch_deployment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_patch_deployment - ] = mock_object + ] = mock_rpc request = {} await client.create_patch_deployment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_patch_deployment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3958,22 +3964,23 @@ async def test_get_patch_deployment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_patch_deployment - ] = mock_object + ] = mock_rpc request = {} await client.get_patch_deployment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_patch_deployment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4352,22 +4359,23 @@ async def test_list_patch_deployments_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_patch_deployments - ] = mock_object + ] = mock_rpc request = {} await client.list_patch_deployments(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_patch_deployments(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4935,22 +4943,23 @@ async def test_delete_patch_deployment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_patch_deployment - ] = mock_object + ] = mock_rpc request = {} await client.delete_patch_deployment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_patch_deployment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5316,22 +5325,23 @@ async def test_update_patch_deployment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_patch_deployment - ] = mock_object + ] = mock_rpc request = {} await client.update_patch_deployment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_patch_deployment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5724,22 +5734,23 @@ async def test_pause_patch_deployment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.pause_patch_deployment - ] = mock_object + ] = mock_rpc request = {} await client.pause_patch_deployment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.pause_patch_deployment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6122,22 +6133,23 @@ async def test_resume_patch_deployment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.resume_patch_deployment - ] = mock_object + ] = mock_rpc request = {} await client.resume_patch_deployment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.resume_patch_deployment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-os-config/tests/unit/gapic/osconfig_v1/test_os_config_zonal_service.py b/packages/google-cloud-os-config/tests/unit/gapic/osconfig_v1/test_os_config_zonal_service.py index 2453c353a613..5e5eb99be580 100644 --- a/packages/google-cloud-os-config/tests/unit/gapic/osconfig_v1/test_os_config_zonal_service.py +++ b/packages/google-cloud-os-config/tests/unit/gapic/osconfig_v1/test_os_config_zonal_service.py @@ -1327,8 +1327,9 @@ def test_create_os_policy_assignment_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_os_policy_assignment(request) @@ -1384,26 +1385,28 @@ async def test_create_os_policy_assignment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_os_policy_assignment - ] = mock_object + ] = mock_rpc request = {} await client.create_os_policy_assignment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_os_policy_assignment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1739,8 +1742,9 @@ def test_update_os_policy_assignment_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_os_policy_assignment(request) @@ -1796,26 +1800,28 @@ async def test_update_os_policy_assignment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_os_policy_assignment - ] = mock_object + ] = mock_rpc request = {} await client.update_os_policy_assignment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_os_policy_assignment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2230,22 +2236,23 @@ async def test_get_os_policy_assignment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_os_policy_assignment - ] = mock_object + ] = mock_rpc request = {} await client.get_os_policy_assignment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_os_policy_assignment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2639,22 +2646,23 @@ async def test_list_os_policy_assignments_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_os_policy_assignments - ] = mock_object + ] = mock_rpc request = {} await client.list_os_policy_assignments(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_os_policy_assignments(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3239,22 +3247,23 @@ async def test_list_os_policy_assignment_revisions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_os_policy_assignment_revisions - ] = mock_object + ] = mock_rpc request = {} await client.list_os_policy_assignment_revisions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_os_policy_assignment_revisions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3783,8 +3792,9 @@ def test_delete_os_policy_assignment_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_os_policy_assignment(request) @@ -3840,26 +3850,28 @@ async def test_delete_os_policy_assignment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_os_policy_assignment - ] = mock_object + ] = mock_rpc request = {} await client.delete_os_policy_assignment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_os_policy_assignment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4244,22 +4256,23 @@ async def test_get_os_policy_assignment_report_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_os_policy_assignment_report - ] = mock_object + ] = mock_rpc request = {} await client.get_os_policy_assignment_report(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_os_policy_assignment_report(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4652,22 +4665,23 @@ async def test_list_os_policy_assignment_reports_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_os_policy_assignment_reports - ] = mock_object + ] = mock_rpc request = {} await client.list_os_policy_assignment_reports(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_os_policy_assignment_reports(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5243,22 +5257,23 @@ async def test_get_inventory_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_inventory - ] = mock_object + ] = mock_rpc request = {} await client.get_inventory(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_inventory(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5609,22 +5624,23 @@ async def test_list_inventories_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_inventories - ] = mock_object + ] = mock_rpc request = {} await client.list_inventories(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_inventories(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6180,22 +6196,23 @@ async def test_get_vulnerability_report_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_vulnerability_report - ] = mock_object + ] = mock_rpc request = {} await client.get_vulnerability_report(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_vulnerability_report(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6572,22 +6589,23 @@ async def test_list_vulnerability_reports_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_vulnerability_reports - ] = mock_object + ] = mock_rpc request = {} await client.list_vulnerability_reports(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_vulnerability_reports(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-os-config/tests/unit/gapic/osconfig_v1alpha/test_os_config_zonal_service.py b/packages/google-cloud-os-config/tests/unit/gapic/osconfig_v1alpha/test_os_config_zonal_service.py index 99d974af8039..a34b7eee9541 100644 --- a/packages/google-cloud-os-config/tests/unit/gapic/osconfig_v1alpha/test_os_config_zonal_service.py +++ b/packages/google-cloud-os-config/tests/unit/gapic/osconfig_v1alpha/test_os_config_zonal_service.py @@ -1329,8 +1329,9 @@ def test_create_os_policy_assignment_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_os_policy_assignment(request) @@ -1386,26 +1387,28 @@ async def test_create_os_policy_assignment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_os_policy_assignment - ] = mock_object + ] = mock_rpc request = {} await client.create_os_policy_assignment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_os_policy_assignment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1741,8 +1744,9 @@ def test_update_os_policy_assignment_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_os_policy_assignment(request) @@ -1798,26 +1802,28 @@ async def test_update_os_policy_assignment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_os_policy_assignment - ] = mock_object + ] = mock_rpc request = {} await client.update_os_policy_assignment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_os_policy_assignment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2232,22 +2238,23 @@ async def test_get_os_policy_assignment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_os_policy_assignment - ] = mock_object + ] = mock_rpc request = {} await client.get_os_policy_assignment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_os_policy_assignment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2641,22 +2648,23 @@ async def test_list_os_policy_assignments_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_os_policy_assignments - ] = mock_object + ] = mock_rpc request = {} await client.list_os_policy_assignments(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_os_policy_assignments(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3241,22 +3249,23 @@ async def test_list_os_policy_assignment_revisions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_os_policy_assignment_revisions - ] = mock_object + ] = mock_rpc request = {} await client.list_os_policy_assignment_revisions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_os_policy_assignment_revisions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3785,8 +3794,9 @@ def test_delete_os_policy_assignment_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_os_policy_assignment(request) @@ -3842,26 +3852,28 @@ async def test_delete_os_policy_assignment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_os_policy_assignment - ] = mock_object + ] = mock_rpc request = {} await client.delete_os_policy_assignment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_os_policy_assignment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4260,22 +4272,23 @@ async def test_get_instance_os_policies_compliance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_instance_os_policies_compliance - ] = mock_object + ] = mock_rpc request = {} await client.get_instance_os_policies_compliance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_instance_os_policies_compliance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4684,22 +4697,23 @@ async def test_list_instance_os_policies_compliances_async_use_cached_wrapped_rp ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_instance_os_policies_compliances - ] = mock_object + ] = mock_rpc request = {} await client.list_instance_os_policies_compliances(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_instance_os_policies_compliances(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5305,22 +5319,23 @@ async def test_get_os_policy_assignment_report_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_os_policy_assignment_report - ] = mock_object + ] = mock_rpc request = {} await client.get_os_policy_assignment_report(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_os_policy_assignment_report(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5713,22 +5728,23 @@ async def test_list_os_policy_assignment_reports_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_os_policy_assignment_reports - ] = mock_object + ] = mock_rpc request = {} await client.list_os_policy_assignment_reports(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_os_policy_assignment_reports(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6304,22 +6320,23 @@ async def test_get_inventory_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_inventory - ] = mock_object + ] = mock_rpc request = {} await client.get_inventory(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_inventory(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6670,22 +6687,23 @@ async def test_list_inventories_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_inventories - ] = mock_object + ] = mock_rpc request = {} await client.list_inventories(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_inventories(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7241,22 +7259,23 @@ async def test_get_vulnerability_report_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_vulnerability_report - ] = mock_object + ] = mock_rpc request = {} await client.get_vulnerability_report(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_vulnerability_report(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7633,22 +7652,23 @@ async def test_list_vulnerability_reports_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_vulnerability_reports - ] = mock_object + ] = mock_rpc request = {} await client.list_vulnerability_reports(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_vulnerability_reports(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-os-login/google/cloud/oslogin/gapic_version.py b/packages/google-cloud-os-login/google/cloud/oslogin/gapic_version.py index f1df4c417db6..558c8aab67c5 100644 --- a/packages/google-cloud-os-login/google/cloud/oslogin/gapic_version.py +++ b/packages/google-cloud-os-login/google/cloud/oslogin/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.14.6" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-os-login/google/cloud/oslogin_v1/gapic_version.py b/packages/google-cloud-os-login/google/cloud/oslogin_v1/gapic_version.py index f1df4c417db6..558c8aab67c5 100644 --- a/packages/google-cloud-os-login/google/cloud/oslogin_v1/gapic_version.py +++ b/packages/google-cloud-os-login/google/cloud/oslogin_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.14.6" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-os-login/google/cloud/oslogin_v1/services/os_login_service/async_client.py b/packages/google-cloud-os-login/google/cloud/oslogin_v1/services/os_login_service/async_client.py index 3830066fa837..d3f4d4cc7247 100644 --- a/packages/google-cloud-os-login/google/cloud/oslogin_v1/services/os_login_service/async_client.py +++ b/packages/google-cloud-os-login/google/cloud/oslogin_v1/services/os_login_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -200,9 +199,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(OsLoginServiceClient).get_transport_class, type(OsLoginServiceClient) - ) + get_transport_class = OsLoginServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-os-login/google/cloud/oslogin_v1/services/os_login_service/client.py b/packages/google-cloud-os-login/google/cloud/oslogin_v1/services/os_login_service/client.py index 8de4ba1b5e73..f8726f5df1de 100644 --- a/packages/google-cloud-os-login/google/cloud/oslogin_v1/services/os_login_service/client.py +++ b/packages/google-cloud-os-login/google/cloud/oslogin_v1/services/os_login_service/client.py @@ -679,7 +679,7 @@ def __init__( transport_init: Union[ Type[OsLoginServiceTransport], Callable[..., OsLoginServiceTransport] ] = ( - type(self).get_transport_class(transport) + OsLoginServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., OsLoginServiceTransport], transport) ) diff --git a/packages/google-cloud-os-login/samples/generated_samples/snippet_metadata_google.cloud.oslogin.v1.json b/packages/google-cloud-os-login/samples/generated_samples/snippet_metadata_google.cloud.oslogin.v1.json index 8357de8d6398..92b367f767a3 100644 --- a/packages/google-cloud-os-login/samples/generated_samples/snippet_metadata_google.cloud.oslogin.v1.json +++ b/packages/google-cloud-os-login/samples/generated_samples/snippet_metadata_google.cloud.oslogin.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-os-login", - "version": "2.14.6" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-os-login/tests/unit/gapic/oslogin_v1/test_os_login_service.py b/packages/google-cloud-os-login/tests/unit/gapic/oslogin_v1/test_os_login_service.py index 1aa66d35f207..6fe4e7cec0ff 100644 --- a/packages/google-cloud-os-login/tests/unit/gapic/oslogin_v1/test_os_login_service.py +++ b/packages/google-cloud-os-login/tests/unit/gapic/oslogin_v1/test_os_login_service.py @@ -1313,22 +1313,23 @@ async def test_create_ssh_public_key_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_ssh_public_key - ] = mock_object + ] = mock_rpc request = {} await client.create_ssh_public_key(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_ssh_public_key(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1704,22 +1705,23 @@ async def test_delete_posix_account_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_posix_account - ] = mock_object + ] = mock_rpc request = {} await client.delete_posix_account(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_posix_account(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2075,22 +2077,23 @@ async def test_delete_ssh_public_key_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_ssh_public_key - ] = mock_object + ] = mock_rpc request = {} await client.delete_ssh_public_key(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_ssh_public_key(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2454,22 +2457,23 @@ async def test_get_login_profile_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_login_profile - ] = mock_object + ] = mock_rpc request = {} await client.get_login_profile(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_login_profile(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2849,22 +2853,23 @@ async def test_get_ssh_public_key_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_ssh_public_key - ] = mock_object + ] = mock_rpc request = {} await client.get_ssh_public_key(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_ssh_public_key(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3240,22 +3245,23 @@ async def test_import_ssh_public_key_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_ssh_public_key - ] = mock_object + ] = mock_rpc request = {} await client.import_ssh_public_key(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.import_ssh_public_key(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3656,22 +3662,23 @@ async def test_update_ssh_public_key_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_ssh_public_key - ] = mock_object + ] = mock_rpc request = {} await client.update_ssh_public_key(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_ssh_public_key(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-parallelstore/google/cloud/parallelstore/gapic_version.py b/packages/google-cloud-parallelstore/google/cloud/parallelstore/gapic_version.py index 355df6b536f8..558c8aab67c5 100644 --- a/packages/google-cloud-parallelstore/google/cloud/parallelstore/gapic_version.py +++ b/packages/google-cloud-parallelstore/google/cloud/parallelstore/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.2.3" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1beta/gapic_version.py b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1beta/gapic_version.py index 355df6b536f8..558c8aab67c5 100644 --- a/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1beta/gapic_version.py +++ b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.2.3" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1beta/services/parallelstore/async_client.py b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1beta/services/parallelstore/async_client.py index 1681d69cc4e1..cfb13d90803a 100644 --- a/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1beta/services/parallelstore/async_client.py +++ b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1beta/services/parallelstore/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -224,9 +223,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ParallelstoreClient).get_transport_class, type(ParallelstoreClient) - ) + get_transport_class = ParallelstoreClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1beta/services/parallelstore/client.py b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1beta/services/parallelstore/client.py index e653427432c9..1cb9552118a6 100644 --- a/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1beta/services/parallelstore/client.py +++ b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1beta/services/parallelstore/client.py @@ -748,7 +748,7 @@ def __init__( transport_init: Union[ Type[ParallelstoreTransport], Callable[..., ParallelstoreTransport] ] = ( - type(self).get_transport_class(transport) + ParallelstoreClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ParallelstoreTransport], transport) ) diff --git a/packages/google-cloud-parallelstore/samples/generated_samples/snippet_metadata_google.cloud.parallelstore.v1beta.json b/packages/google-cloud-parallelstore/samples/generated_samples/snippet_metadata_google.cloud.parallelstore.v1beta.json index 5cd9eff36350..01334a8f8053 100644 --- a/packages/google-cloud-parallelstore/samples/generated_samples/snippet_metadata_google.cloud.parallelstore.v1beta.json +++ b/packages/google-cloud-parallelstore/samples/generated_samples/snippet_metadata_google.cloud.parallelstore.v1beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-parallelstore", - "version": "0.2.3" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-parallelstore/tests/unit/gapic/parallelstore_v1beta/test_parallelstore.py b/packages/google-cloud-parallelstore/tests/unit/gapic/parallelstore_v1beta/test_parallelstore.py index ead571555c69..6d759022cccf 100644 --- a/packages/google-cloud-parallelstore/tests/unit/gapic/parallelstore_v1beta/test_parallelstore.py +++ b/packages/google-cloud-parallelstore/tests/unit/gapic/parallelstore_v1beta/test_parallelstore.py @@ -1300,22 +1300,23 @@ async def test_list_instances_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_instances - ] = mock_object + ] = mock_rpc request = {} await client.list_instances(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_instances(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1896,22 +1897,23 @@ async def test_get_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_instance - ] = mock_object + ] = mock_rpc request = {} await client.get_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2234,8 +2236,9 @@ def test_create_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_instance(request) @@ -2289,26 +2292,28 @@ async def test_create_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_instance - ] = mock_object + ] = mock_rpc request = {} await client.create_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2618,8 +2623,9 @@ def test_update_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_instance(request) @@ -2673,26 +2679,28 @@ async def test_update_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_instance - ] = mock_object + ] = mock_rpc request = {} await client.update_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2994,8 +3002,9 @@ def test_delete_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_instance(request) @@ -3049,26 +3058,28 @@ async def test_delete_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_instance - ] = mock_object + ] = mock_rpc request = {} await client.delete_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3360,8 +3371,9 @@ def test_import_data_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.import_data(request) @@ -3415,26 +3427,28 @@ async def test_import_data_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_data - ] = mock_object + ] = mock_rpc request = {} await client.import_data(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.import_data(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3644,8 +3658,9 @@ def test_export_data_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.export_data(request) @@ -3699,26 +3714,28 @@ async def test_export_data_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.export_data - ] = mock_object + ] = mock_rpc request = {} await client.export_data(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.export_data(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio From ffcc359a473055b2320061d76b542b0ab560aad5 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 19 Aug 2024 10:58:50 -0400 Subject: [PATCH 017/108] chore: [Many APIs] Update gapic-generator-python to v1.18.5 (#12999) - [ ] Regenerate this pull request now. PiperOrigin-RevId: 661268868 Source-Link: https://github.com/googleapis/googleapis/commit/f7d214cb08cd7d9b018d44564a8b184263f64177 Source-Link: https://github.com/googleapis/googleapis-gen/commit/79a8411bbdb25a983fa3aae8c0e14327df129f94 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWVkZ2VuZXR3b3JrLy5Pd2xCb3QueWFtbCIsImgiOiI3OWE4NDExYmJkYjI1YTk4M2ZhM2FhZThjMGUxNDMyN2RmMTI5Zjk0In0= Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWVudGVycHJpc2Vrbm93bGVkZ2VncmFwaC8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWVzc2VudGlhbC1jb250YWN0cy8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWV2ZW50YXJjLXB1Ymxpc2hpbmcvLk93bEJvdC55YW1sIiwiaCI6Ijc5YTg0MTFiYmRiMjVhOTgzZmEzYWFlOGMwZTE0MzI3ZGYxMjlmOTQifQ== Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWV2ZW50YXJjLy5Pd2xCb3QueWFtbCIsImgiOiI3OWE4NDExYmJkYjI1YTk4M2ZhM2FhZThjMGUxNDMyN2RmMTI5Zjk0In0= Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWZpbGVzdG9yZS8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWZ1bmN0aW9ucy8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWdkY2hhcmR3YXJlbWFuYWdlbWVudC8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWdrZS1iYWNrdXAvLk93bEJvdC55YW1sIiwiaCI6Ijc5YTg0MTFiYmRiMjVhOTgzZmEzYWFlOGMwZTE0MzI3ZGYxMjlmOTQifQ== Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWdrZS1jb25uZWN0LWdhdGV3YXkvLk93bEJvdC55YW1sIiwiaCI6Ijc5YTg0MTFiYmRiMjVhOTgzZmEzYWFlOGMwZTE0MzI3ZGYxMjlmOTQifQ== Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWdrZS1odWIvLk93bEJvdC55YW1sIiwiaCI6Ijc5YTg0MTFiYmRiMjVhOTgzZmEzYWFlOGMwZTE0MzI3ZGYxMjlmOTQifQ== Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWdrZS1tdWx0aWNsb3VkLy5Pd2xCb3QueWFtbCIsImgiOiI3OWE4NDExYmJkYjI1YTk4M2ZhM2FhZThjMGUxNDMyN2RmMTI5Zjk0In0= Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWdzdWl0ZWFkZG9ucy8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWlhbS1sb2dnaW5nLy5Pd2xCb3QueWFtbCIsImgiOiI3OWE4NDExYmJkYjI1YTk4M2ZhM2FhZThjMGUxNDMyN2RmMTI5Zjk0In0= Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWlhbS8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWlhcC8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWlkcy8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWttcy1pbnZlbnRvcnkvLk93bEJvdC55YW1sIiwiaCI6Ijc5YTg0MTFiYmRiMjVhOTgzZmEzYWFlOGMwZTE0MzI3ZGYxMjlmOTQifQ== Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWttcy8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWxhbmd1YWdlLy5Pd2xCb3QueWFtbCIsImgiOiI3OWE4NDExYmJkYjI1YTk4M2ZhM2FhZThjMGUxNDMyN2RmMTI5Zjk0In0= --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../google/cloud/edgenetwork/gapic_version.py | 2 +- .../cloud/edgenetwork_v1/gapic_version.py | 2 +- .../services/edge_network/async_client.py | 5 +- .../services/edge_network/client.py | 2 +- ..._metadata_google.cloud.edgenetwork.v1.json | 2 +- .../gapic/edgenetwork_v1/test_edge_network.py | 334 +++++++------ .../enterpriseknowledgegraph/gapic_version.py | 2 +- .../gapic_version.py | 2 +- .../async_client.py | 6 +- .../client.py | 2 +- .../types/service.py | 1 - ...gle.cloud.enterpriseknowledgegraph.v1.json | 2 +- ...test_enterprise_knowledge_graph_service.py | 81 +-- .../cloud/essential_contacts/gapic_version.py | 2 +- .../essential_contacts_v1/gapic_version.py | 2 +- .../async_client.py | 6 +- .../essential_contacts_service/client.py | 2 +- ...ata_google.cloud.essentialcontacts.v1.json | 2 +- .../test_essential_contacts_service.py | 63 +-- .../eventarc_publishing/gapic_version.py | 2 +- .../eventarc_publishing_v1/gapic_version.py | 2 +- .../services/publisher/async_client.py | 5 +- .../services/publisher/client.py | 2 +- ...a_google.cloud.eventarc.publishing.v1.json | 2 +- .../eventarc_publishing_v1/test_publisher.py | 18 +- .../google/cloud/eventarc/gapic_version.py | 2 +- .../google/cloud/eventarc_v1/gapic_version.py | 2 +- .../services/eventarc/async_client.py | 5 +- .../eventarc_v1/services/eventarc/client.py | 2 +- ...pet_metadata_google.cloud.eventarc.v1.json | 2 +- .../unit/gapic/eventarc_v1/test_eventarc.py | 242 +++++---- .../google/cloud/filestore/gapic_version.py | 2 +- .../cloud/filestore_v1/gapic_version.py | 2 +- .../cloud_filestore_manager/async_client.py | 6 +- .../cloud_filestore_manager/client.py | 2 +- ...et_metadata_google.cloud.filestore.v1.json | 2 +- .../test_cloud_filestore_manager.py | 263 +++++----- .../gdchardwaremanagement/gapic_version.py | 2 +- .../gapic_version.py | 2 +- .../gdc_hardware_management/async_client.py | 6 +- .../gdc_hardware_management/client.py | 2 +- .../types/resources.py | 8 +- ...e.cloud.gdchardwaremanagement.v1alpha.json | 2 +- .../test_gdc_hardware_management.py | 467 ++++++++++-------- .../google/cloud/gke_backup/gapic_version.py | 2 +- .../cloud/gke_backup_v1/gapic_version.py | 2 +- .../services/backup_for_gke/async_client.py | 5 +- .../services/backup_for_gke/client.py | 2 +- ...et_metadata_google.cloud.gkebackup.v1.json | 2 +- .../gke_backup_v1/test_backup_for_gke.py | 345 +++++++------ .../cloud/gkeconnect/gateway/gapic_version.py | 2 +- .../gateway_v1beta1/gapic_version.py | 2 +- .../services/gateway_service/async_client.py | 5 +- .../services/gateway_service/client.py | 2 +- ...ogle.cloud.gkeconnect.gateway.v1beta1.json | 2 +- .../gateway_v1beta1/test_gateway_service.py | 45 +- .../google/cloud/gkehub/gapic_version.py | 2 +- .../configmanagement_v1/gapic_version.py | 2 +- .../google/cloud/gkehub_v1/gapic_version.py | 2 +- .../multiclusteringress_v1/gapic_version.py | 2 +- .../services/gke_hub/async_client.py | 5 +- .../gkehub_v1/services/gke_hub/client.py | 2 +- .../cloud/gkehub_v1beta1/gapic_version.py | 2 +- .../async_client.py | 6 +- .../gke_hub_membership_service/client.py | 2 +- ...ippet_metadata_google.cloud.gkehub.v1.json | 2 +- ..._metadata_google.cloud.gkehub.v1beta1.json | 2 +- .../unit/gapic/gkehub_v1/test_gke_hub.py | 159 +++--- .../test_gke_hub_membership_service.py | 102 ++-- .../cloud/gke_multicloud/gapic_version.py | 2 +- .../cloud/gke_multicloud_v1/gapic_version.py | 2 +- .../attached_clusters/async_client.py | 5 +- .../services/attached_clusters/client.py | 2 +- .../services/aws_clusters/async_client.py | 5 +- .../services/aws_clusters/client.py | 2 +- .../services/azure_clusters/async_client.py | 5 +- .../services/azure_clusters/client.py | 2 +- ...etadata_google.cloud.gkemulticloud.v1.json | 2 +- .../test_attached_clusters.py | 121 +++-- .../gke_multicloud_v1/test_aws_clusters.py | 214 ++++---- .../gke_multicloud_v1/test_azure_clusters.py | 251 ++++++---- .../cloud/gsuiteaddons/gapic_version.py | 2 +- .../cloud/gsuiteaddons_v1/gapic_version.py | 2 +- .../services/g_suite_add_ons/async_client.py | 5 +- .../services/g_suite_add_ons/client.py | 2 +- ...metadata_google.cloud.gsuiteaddons.v1.json | 2 +- .../gsuiteaddons_v1/test_g_suite_add_ons.py | 81 +-- .../google/cloud/iam_logging/gapic_version.py | 2 +- .../cloud/iam_logging_v1/gapic_version.py | 2 +- .../google/cloud/iam/gapic_version.py | 2 +- .../google/cloud/iam_admin/gapic_version.py | 2 +- .../cloud/iam_admin_v1/gapic_version.py | 2 +- .../iam_admin_v1/services/iam/async_client.py | 5 +- .../cloud/iam_admin_v1/services/iam/client.py | 2 +- .../cloud/iam_credentials/gapic_version.py | 2 +- .../cloud/iam_credentials_v1/gapic_version.py | 2 +- .../services/iam_credentials/async_client.py | 5 +- .../services/iam_credentials/client.py | 2 +- .../google/cloud/iam_v2/gapic_version.py | 2 +- .../iam_v2/services/policies/async_client.py | 5 +- .../cloud/iam_v2/services/policies/client.py | 2 +- .../google/cloud/iam_v2beta/gapic_version.py | 2 +- .../services/policies/async_client.py | 5 +- .../iam_v2beta/services/policies/client.py | 2 +- .../snippet_metadata_google.iam.admin.v1.json | 2 +- ...et_metadata_google.iam.credentials.v1.json | 2 +- .../snippet_metadata_google.iam.v2.json | 2 +- .../snippet_metadata_google.iam.v2beta.json | 2 +- .../tests/unit/gapic/iam_admin_v1/test_iam.py | 279 ++++++----- .../test_iam_credentials.py | 36 +- .../tests/unit/gapic/iam_v2/test_policies.py | 75 +-- .../unit/gapic/iam_v2beta/test_policies.py | 75 +-- .../google/cloud/iap/gapic_version.py | 2 +- .../google/cloud/iap_v1/gapic_version.py | 2 +- .../async_client.py | 6 +- .../client.py | 2 +- .../async_client.py | 6 +- .../client.py | 2 +- .../snippet_metadata_google.cloud.iap.v1.json | 2 +- ...test_identity_aware_proxy_admin_service.py | 90 ++-- ...est_identity_aware_proxy_o_auth_service.py | 72 +-- .../google/cloud/ids/gapic_version.py | 2 +- .../google/cloud/ids_v1/gapic_version.py | 2 +- .../cloud/ids_v1/services/ids/async_client.py | 5 +- .../cloud/ids_v1/services/ids/client.py | 2 +- .../snippet_metadata_google.cloud.ids.v1.json | 2 +- .../tests/unit/gapic/ids_v1/test_ids.py | 56 ++- .../cloud/kms_inventory/gapic_version.py | 2 +- .../cloud/kms_inventory_v1/gapic_version.py | 2 +- .../key_dashboard_service/async_client.py | 6 +- .../services/key_dashboard_service/client.py | 2 +- .../key_tracking_service/async_client.py | 6 +- .../services/key_tracking_service/client.py | 2 +- ...etadata_google.cloud.kms.inventory.v1.json | 2 +- .../test_key_dashboard_service.py | 9 +- .../test_key_tracking_service.py | 18 +- .../google/cloud/kms/gapic_version.py | 2 +- .../google/cloud/kms_v1/gapic_version.py | 2 +- .../kms_v1/services/autokey/async_client.py | 6 +- .../cloud/kms_v1/services/autokey/client.py | 2 +- .../services/autokey_admin/async_client.py | 6 +- .../kms_v1/services/autokey_admin/client.py | 2 +- .../services/ekm_service/async_client.py | 5 +- .../kms_v1/services/ekm_service/client.py | 2 +- .../key_management_service/async_client.py | 6 +- .../services/key_management_service/client.py | 2 +- .../snippet_metadata_google.cloud.kms.v1.json | 2 +- .../tests/unit/gapic/kms_v1/test_autokey.py | 37 +- .../unit/gapic/kms_v1/test_autokey_admin.py | 27 +- .../unit/gapic/kms_v1/test_ekm_service.py | 63 +-- .../kms_v1/test_key_management_service.py | 252 +++++----- .../google/cloud/language/gapic_version.py | 2 +- .../google/cloud/language_v1/gapic_version.py | 2 +- .../services/language_service/async_client.py | 5 +- .../services/language_service/client.py | 2 +- .../cloud/language_v1beta2/gapic_version.py | 2 +- .../services/language_service/async_client.py | 5 +- .../services/language_service/client.py | 2 +- .../google/cloud/language_v2/gapic_version.py | 2 +- .../services/language_service/async_client.py | 5 +- .../services/language_service/client.py | 2 +- ...pet_metadata_google.cloud.language.v1.json | 2 +- ...etadata_google.cloud.language.v1beta2.json | 2 +- ...pet_metadata_google.cloud.language.v2.json | 2 +- .../language_v1/test_language_service.py | 63 +-- .../language_v1beta2/test_language_service.py | 63 +-- .../language_v2/test_language_service.py | 45 +- 167 files changed, 2428 insertions(+), 2000 deletions(-) diff --git a/packages/google-cloud-edgenetwork/google/cloud/edgenetwork/gapic_version.py b/packages/google-cloud-edgenetwork/google/cloud/edgenetwork/gapic_version.py index 9413c3341313..558c8aab67c5 100644 --- a/packages/google-cloud-edgenetwork/google/cloud/edgenetwork/gapic_version.py +++ b/packages/google-cloud-edgenetwork/google/cloud/edgenetwork/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.10" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-edgenetwork/google/cloud/edgenetwork_v1/gapic_version.py b/packages/google-cloud-edgenetwork/google/cloud/edgenetwork_v1/gapic_version.py index 9413c3341313..558c8aab67c5 100644 --- a/packages/google-cloud-edgenetwork/google/cloud/edgenetwork_v1/gapic_version.py +++ b/packages/google-cloud-edgenetwork/google/cloud/edgenetwork_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.10" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-edgenetwork/google/cloud/edgenetwork_v1/services/edge_network/async_client.py b/packages/google-cloud-edgenetwork/google/cloud/edgenetwork_v1/services/edge_network/async_client.py index 927e28fd9d25..5d3bf1dcd8e0 100644 --- a/packages/google-cloud-edgenetwork/google/cloud/edgenetwork_v1/services/edge_network/async_client.py +++ b/packages/google-cloud-edgenetwork/google/cloud/edgenetwork_v1/services/edge_network/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -211,9 +210,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(EdgeNetworkClient).get_transport_class, type(EdgeNetworkClient) - ) + get_transport_class = EdgeNetworkClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-edgenetwork/google/cloud/edgenetwork_v1/services/edge_network/client.py b/packages/google-cloud-edgenetwork/google/cloud/edgenetwork_v1/services/edge_network/client.py index 1fd5cef15056..a14b5edbb688 100644 --- a/packages/google-cloud-edgenetwork/google/cloud/edgenetwork_v1/services/edge_network/client.py +++ b/packages/google-cloud-edgenetwork/google/cloud/edgenetwork_v1/services/edge_network/client.py @@ -788,7 +788,7 @@ def __init__( transport_init: Union[ Type[EdgeNetworkTransport], Callable[..., EdgeNetworkTransport] ] = ( - type(self).get_transport_class(transport) + EdgeNetworkClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., EdgeNetworkTransport], transport) ) diff --git a/packages/google-cloud-edgenetwork/samples/generated_samples/snippet_metadata_google.cloud.edgenetwork.v1.json b/packages/google-cloud-edgenetwork/samples/generated_samples/snippet_metadata_google.cloud.edgenetwork.v1.json index 381700d4c926..04d64b8b1330 100644 --- a/packages/google-cloud-edgenetwork/samples/generated_samples/snippet_metadata_google.cloud.edgenetwork.v1.json +++ b/packages/google-cloud-edgenetwork/samples/generated_samples/snippet_metadata_google.cloud.edgenetwork.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-edgenetwork", - "version": "0.1.10" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-edgenetwork/tests/unit/gapic/edgenetwork_v1/test_edge_network.py b/packages/google-cloud-edgenetwork/tests/unit/gapic/edgenetwork_v1/test_edge_network.py index 88fabc1fa350..caf9eb0e92c6 100644 --- a/packages/google-cloud-edgenetwork/tests/unit/gapic/edgenetwork_v1/test_edge_network.py +++ b/packages/google-cloud-edgenetwork/tests/unit/gapic/edgenetwork_v1/test_edge_network.py @@ -1253,22 +1253,23 @@ async def test_initialize_zone_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.initialize_zone - ] = mock_object + ] = mock_rpc request = {} await client.initialize_zone(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.initialize_zone(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1621,22 +1622,23 @@ async def test_list_zones_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_zones - ] = mock_object + ] = mock_rpc request = {} await client.list_zones(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_zones(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2182,22 +2184,23 @@ async def test_get_zone_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_zone - ] = mock_object + ] = mock_rpc request = {} await client.get_zone(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_zone(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2553,22 +2556,23 @@ async def test_list_networks_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_networks - ] = mock_object + ] = mock_rpc request = {} await client.list_networks(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_networks(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3119,22 +3123,23 @@ async def test_get_network_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_network - ] = mock_object + ] = mock_rpc request = {} await client.get_network(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_network(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3480,22 +3485,23 @@ async def test_diagnose_network_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.diagnose_network - ] = mock_object + ] = mock_rpc request = {} await client.diagnose_network(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.diagnose_network(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3789,8 +3795,9 @@ def test_create_network_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_network(request) @@ -3844,26 +3851,28 @@ async def test_create_network_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_network - ] = mock_object + ] = mock_rpc request = {} await client.create_network(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_network(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4175,8 +4184,9 @@ def test_delete_network_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_network(request) @@ -4230,26 +4240,28 @@ async def test_delete_network_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_network - ] = mock_object + ] = mock_rpc request = {} await client.delete_network(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_network(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4604,22 +4616,23 @@ async def test_list_subnets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_subnets - ] = mock_object + ] = mock_rpc request = {} await client.list_subnets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_subnets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5183,22 +5196,23 @@ async def test_get_subnet_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_subnet - ] = mock_object + ] = mock_rpc request = {} await client.get_subnet(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_subnet(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5505,8 +5519,9 @@ def test_create_subnet_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_subnet(request) @@ -5560,26 +5575,28 @@ async def test_create_subnet_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_subnet - ] = mock_object + ] = mock_rpc request = {} await client.create_subnet(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_subnet(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5889,8 +5906,9 @@ def test_update_subnet_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_subnet(request) @@ -5944,26 +5962,28 @@ async def test_update_subnet_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_subnet - ] = mock_object + ] = mock_rpc request = {} await client.update_subnet(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_subnet(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6265,8 +6285,9 @@ def test_delete_subnet_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_subnet(request) @@ -6320,26 +6341,28 @@ async def test_delete_subnet_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_subnet - ] = mock_object + ] = mock_rpc request = {} await client.delete_subnet(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_subnet(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6706,22 +6729,23 @@ async def test_list_interconnects_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_interconnects - ] = mock_object + ] = mock_rpc request = {} await client.list_interconnects(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_interconnects(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7303,22 +7327,23 @@ async def test_get_interconnect_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_interconnect - ] = mock_object + ] = mock_rpc request = {} await client.get_interconnect(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_interconnect(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7687,22 +7712,23 @@ async def test_diagnose_interconnect_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.diagnose_interconnect - ] = mock_object + ] = mock_rpc request = {} await client.diagnose_interconnect(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.diagnose_interconnect(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8080,22 +8106,23 @@ async def test_list_interconnect_attachments_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_interconnect_attachments - ] = mock_object + ] = mock_rpc request = {} await client.list_interconnect_attachments(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_interconnect_attachments(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8692,22 +8719,23 @@ async def test_get_interconnect_attachment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_interconnect_attachment - ] = mock_object + ] = mock_rpc request = {} await client.get_interconnect_attachment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_interconnect_attachment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9038,8 +9066,9 @@ def test_create_interconnect_attachment_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_interconnect_attachment(request) @@ -9095,26 +9124,28 @@ async def test_create_interconnect_attachment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_interconnect_attachment - ] = mock_object + ] = mock_rpc request = {} await client.create_interconnect_attachment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_interconnect_attachment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9448,8 +9479,9 @@ def test_delete_interconnect_attachment_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_interconnect_attachment(request) @@ -9505,26 +9537,28 @@ async def test_delete_interconnect_attachment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_interconnect_attachment - ] = mock_object + ] = mock_rpc request = {} await client.delete_interconnect_attachment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_interconnect_attachment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9890,22 +9924,23 @@ async def test_list_routers_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_routers - ] = mock_object + ] = mock_rpc request = {} await client.list_routers(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_routers(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10460,22 +10495,23 @@ async def test_get_router_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_router - ] = mock_object + ] = mock_rpc request = {} await client.get_router(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_router(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10823,22 +10859,23 @@ async def test_diagnose_router_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.diagnose_router - ] = mock_object + ] = mock_rpc request = {} await client.diagnose_router(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.diagnose_router(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11132,8 +11169,9 @@ def test_create_router_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_router(request) @@ -11187,26 +11225,28 @@ async def test_create_router_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_router - ] = mock_object + ] = mock_rpc request = {} await client.create_router(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_router(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11516,8 +11556,9 @@ def test_update_router_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_router(request) @@ -11571,26 +11612,28 @@ async def test_update_router_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_router - ] = mock_object + ] = mock_rpc request = {} await client.update_router(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_router(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11892,8 +11935,9 @@ def test_delete_router_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_router(request) @@ -11947,26 +11991,28 @@ async def test_delete_router_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_router - ] = mock_object + ] = mock_rpc request = {} await client.delete_router(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_router(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-enterpriseknowledgegraph/google/cloud/enterpriseknowledgegraph/gapic_version.py b/packages/google-cloud-enterpriseknowledgegraph/google/cloud/enterpriseknowledgegraph/gapic_version.py index 075108786e34..558c8aab67c5 100644 --- a/packages/google-cloud-enterpriseknowledgegraph/google/cloud/enterpriseknowledgegraph/gapic_version.py +++ b/packages/google-cloud-enterpriseknowledgegraph/google/cloud/enterpriseknowledgegraph/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-enterpriseknowledgegraph/google/cloud/enterpriseknowledgegraph_v1/gapic_version.py b/packages/google-cloud-enterpriseknowledgegraph/google/cloud/enterpriseknowledgegraph_v1/gapic_version.py index 075108786e34..558c8aab67c5 100644 --- a/packages/google-cloud-enterpriseknowledgegraph/google/cloud/enterpriseknowledgegraph_v1/gapic_version.py +++ b/packages/google-cloud-enterpriseknowledgegraph/google/cloud/enterpriseknowledgegraph_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-enterpriseknowledgegraph/google/cloud/enterpriseknowledgegraph_v1/services/enterprise_knowledge_graph_service/async_client.py b/packages/google-cloud-enterpriseknowledgegraph/google/cloud/enterpriseknowledgegraph_v1/services/enterprise_knowledge_graph_service/async_client.py index d9ad93ad3500..a2f82b0b04e1 100644 --- a/packages/google-cloud-enterpriseknowledgegraph/google/cloud/enterpriseknowledgegraph_v1/services/enterprise_knowledge_graph_service/async_client.py +++ b/packages/google-cloud-enterpriseknowledgegraph/google/cloud/enterpriseknowledgegraph_v1/services/enterprise_knowledge_graph_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -229,10 +228,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(EnterpriseKnowledgeGraphServiceClient).get_transport_class, - type(EnterpriseKnowledgeGraphServiceClient), - ) + get_transport_class = EnterpriseKnowledgeGraphServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-enterpriseknowledgegraph/google/cloud/enterpriseknowledgegraph_v1/services/enterprise_knowledge_graph_service/client.py b/packages/google-cloud-enterpriseknowledgegraph/google/cloud/enterpriseknowledgegraph_v1/services/enterprise_knowledge_graph_service/client.py index fe399771ebe9..1f7e73f78c3d 100644 --- a/packages/google-cloud-enterpriseknowledgegraph/google/cloud/enterpriseknowledgegraph_v1/services/enterprise_knowledge_graph_service/client.py +++ b/packages/google-cloud-enterpriseknowledgegraph/google/cloud/enterpriseknowledgegraph_v1/services/enterprise_knowledge_graph_service/client.py @@ -770,7 +770,7 @@ def __init__( Type[EnterpriseKnowledgeGraphServiceTransport], Callable[..., EnterpriseKnowledgeGraphServiceTransport], ] = ( - type(self).get_transport_class(transport) + EnterpriseKnowledgeGraphServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast( Callable[..., EnterpriseKnowledgeGraphServiceTransport], transport diff --git a/packages/google-cloud-enterpriseknowledgegraph/google/cloud/enterpriseknowledgegraph_v1/types/service.py b/packages/google-cloud-enterpriseknowledgegraph/google/cloud/enterpriseknowledgegraph_v1/types/service.py index e3eb7a1ad0bf..5fadf09380d2 100644 --- a/packages/google-cloud-enterpriseknowledgegraph/google/cloud/enterpriseknowledgegraph_v1/types/service.py +++ b/packages/google-cloud-enterpriseknowledgegraph/google/cloud/enterpriseknowledgegraph_v1/types/service.py @@ -191,7 +191,6 @@ class Options(proto.Message): - schema.org/addressLocality - schema.org/addressRegion - schema.org/addressCountry - Warning: processing will no longer be regionalized! """ diff --git a/packages/google-cloud-enterpriseknowledgegraph/samples/generated_samples/snippet_metadata_google.cloud.enterpriseknowledgegraph.v1.json b/packages/google-cloud-enterpriseknowledgegraph/samples/generated_samples/snippet_metadata_google.cloud.enterpriseknowledgegraph.v1.json index 3b14c3c85670..ada6b2e84d57 100644 --- a/packages/google-cloud-enterpriseknowledgegraph/samples/generated_samples/snippet_metadata_google.cloud.enterpriseknowledgegraph.v1.json +++ b/packages/google-cloud-enterpriseknowledgegraph/samples/generated_samples/snippet_metadata_google.cloud.enterpriseknowledgegraph.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-enterpriseknowledgegraph", - "version": "0.3.11" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-enterpriseknowledgegraph/tests/unit/gapic/enterpriseknowledgegraph_v1/test_enterprise_knowledge_graph_service.py b/packages/google-cloud-enterpriseknowledgegraph/tests/unit/gapic/enterpriseknowledgegraph_v1/test_enterprise_knowledge_graph_service.py index 24bbe0c35f86..9249d292efb5 100644 --- a/packages/google-cloud-enterpriseknowledgegraph/tests/unit/gapic/enterpriseknowledgegraph_v1/test_enterprise_knowledge_graph_service.py +++ b/packages/google-cloud-enterpriseknowledgegraph/tests/unit/gapic/enterpriseknowledgegraph_v1/test_enterprise_knowledge_graph_service.py @@ -1407,22 +1407,23 @@ async def test_create_entity_reconciliation_job_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_entity_reconciliation_job - ] = mock_object + ] = mock_rpc request = {} await client.create_entity_reconciliation_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_entity_reconciliation_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1818,22 +1819,23 @@ async def test_get_entity_reconciliation_job_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_entity_reconciliation_job - ] = mock_object + ] = mock_rpc request = {} await client.get_entity_reconciliation_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_entity_reconciliation_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2212,22 +2214,23 @@ async def test_list_entity_reconciliation_jobs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_entity_reconciliation_jobs - ] = mock_object + ] = mock_rpc request = {} await client.list_entity_reconciliation_jobs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_entity_reconciliation_jobs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2797,22 +2800,23 @@ async def test_cancel_entity_reconciliation_job_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.cancel_entity_reconciliation_job - ] = mock_object + ] = mock_rpc request = {} await client.cancel_entity_reconciliation_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.cancel_entity_reconciliation_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3169,22 +3173,23 @@ async def test_delete_entity_reconciliation_job_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_entity_reconciliation_job - ] = mock_object + ] = mock_rpc request = {} await client.delete_entity_reconciliation_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_entity_reconciliation_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3528,22 +3533,23 @@ async def test_lookup_async_use_cached_wrapped_rpc(transport: str = "grpc_asynci ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.lookup - ] = mock_object + ] = mock_rpc request = {} await client.lookup(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.lookup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3894,22 +3900,23 @@ async def test_search_async_use_cached_wrapped_rpc(transport: str = "grpc_asynci ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search - ] = mock_object + ] = mock_rpc request = {} await client.search(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4262,22 +4269,23 @@ async def test_lookup_public_kg_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.lookup_public_kg - ] = mock_object + ] = mock_rpc request = {} await client.lookup_public_kg(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.lookup_public_kg(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4632,22 +4640,23 @@ async def test_search_public_kg_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search_public_kg - ] = mock_object + ] = mock_rpc request = {} await client.search_public_kg(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_public_kg(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-essential-contacts/google/cloud/essential_contacts/gapic_version.py b/packages/google-cloud-essential-contacts/google/cloud/essential_contacts/gapic_version.py index e114cc3e196a..558c8aab67c5 100644 --- a/packages/google-cloud-essential-contacts/google/cloud/essential_contacts/gapic_version.py +++ b/packages/google-cloud-essential-contacts/google/cloud/essential_contacts/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.7.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-essential-contacts/google/cloud/essential_contacts_v1/gapic_version.py b/packages/google-cloud-essential-contacts/google/cloud/essential_contacts_v1/gapic_version.py index e114cc3e196a..558c8aab67c5 100644 --- a/packages/google-cloud-essential-contacts/google/cloud/essential_contacts_v1/gapic_version.py +++ b/packages/google-cloud-essential-contacts/google/cloud/essential_contacts_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.7.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-essential-contacts/google/cloud/essential_contacts_v1/services/essential_contacts_service/async_client.py b/packages/google-cloud-essential-contacts/google/cloud/essential_contacts_v1/services/essential_contacts_service/async_client.py index a8a2635c50e5..48326e8c1fdb 100644 --- a/packages/google-cloud-essential-contacts/google/cloud/essential_contacts_v1/services/essential_contacts_service/async_client.py +++ b/packages/google-cloud-essential-contacts/google/cloud/essential_contacts_v1/services/essential_contacts_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -198,10 +197,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(EssentialContactsServiceClient).get_transport_class, - type(EssentialContactsServiceClient), - ) + get_transport_class = EssentialContactsServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-essential-contacts/google/cloud/essential_contacts_v1/services/essential_contacts_service/client.py b/packages/google-cloud-essential-contacts/google/cloud/essential_contacts_v1/services/essential_contacts_service/client.py index cee9bbac904f..9f9fb27628f7 100644 --- a/packages/google-cloud-essential-contacts/google/cloud/essential_contacts_v1/services/essential_contacts_service/client.py +++ b/packages/google-cloud-essential-contacts/google/cloud/essential_contacts_v1/services/essential_contacts_service/client.py @@ -669,7 +669,7 @@ def __init__( Type[EssentialContactsServiceTransport], Callable[..., EssentialContactsServiceTransport], ] = ( - type(self).get_transport_class(transport) + EssentialContactsServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., EssentialContactsServiceTransport], transport) ) diff --git a/packages/google-cloud-essential-contacts/samples/generated_samples/snippet_metadata_google.cloud.essentialcontacts.v1.json b/packages/google-cloud-essential-contacts/samples/generated_samples/snippet_metadata_google.cloud.essentialcontacts.v1.json index f912cbf19d79..5c3df15cfed5 100644 --- a/packages/google-cloud-essential-contacts/samples/generated_samples/snippet_metadata_google.cloud.essentialcontacts.v1.json +++ b/packages/google-cloud-essential-contacts/samples/generated_samples/snippet_metadata_google.cloud.essentialcontacts.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-essential-contacts", - "version": "1.7.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-essential-contacts/tests/unit/gapic/essential_contacts_v1/test_essential_contacts_service.py b/packages/google-cloud-essential-contacts/tests/unit/gapic/essential_contacts_v1/test_essential_contacts_service.py index 2ac129d5b9df..175e912a2da1 100644 --- a/packages/google-cloud-essential-contacts/tests/unit/gapic/essential_contacts_v1/test_essential_contacts_service.py +++ b/packages/google-cloud-essential-contacts/tests/unit/gapic/essential_contacts_v1/test_essential_contacts_service.py @@ -1375,22 +1375,23 @@ async def test_create_contact_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_contact - ] = mock_object + ] = mock_rpc request = {} await client.create_contact(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_contact(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1765,22 +1766,23 @@ async def test_update_contact_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_contact - ] = mock_object + ] = mock_rpc request = {} await client.update_contact(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_contact(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2147,22 +2149,23 @@ async def test_list_contacts_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_contacts - ] = mock_object + ] = mock_rpc request = {} await client.list_contacts(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_contacts(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2719,22 +2722,23 @@ async def test_get_contact_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_contact - ] = mock_object + ] = mock_rpc request = {} await client.get_contact(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_contact(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3082,22 +3086,23 @@ async def test_delete_contact_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_contact - ] = mock_object + ] = mock_rpc request = {} await client.delete_contact(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_contact(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3441,22 +3446,23 @@ async def test_compute_contacts_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.compute_contacts - ] = mock_object + ] = mock_rpc request = {} await client.compute_contacts(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.compute_contacts(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3920,22 +3926,23 @@ async def test_send_test_message_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.send_test_message - ] = mock_object + ] = mock_rpc request = {} await client.send_test_message(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.send_test_message(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-eventarc-publishing/google/cloud/eventarc_publishing/gapic_version.py b/packages/google-cloud-eventarc-publishing/google/cloud/eventarc_publishing/gapic_version.py index 02b228845902..558c8aab67c5 100644 --- a/packages/google-cloud-eventarc-publishing/google/cloud/eventarc_publishing/gapic_version.py +++ b/packages/google-cloud-eventarc-publishing/google/cloud/eventarc_publishing/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-eventarc-publishing/google/cloud/eventarc_publishing_v1/gapic_version.py b/packages/google-cloud-eventarc-publishing/google/cloud/eventarc_publishing_v1/gapic_version.py index 02b228845902..558c8aab67c5 100644 --- a/packages/google-cloud-eventarc-publishing/google/cloud/eventarc_publishing_v1/gapic_version.py +++ b/packages/google-cloud-eventarc-publishing/google/cloud/eventarc_publishing_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-eventarc-publishing/google/cloud/eventarc_publishing_v1/services/publisher/async_client.py b/packages/google-cloud-eventarc-publishing/google/cloud/eventarc_publishing_v1/services/publisher/async_client.py index 2021bfb8ad3a..2039a452d57f 100644 --- a/packages/google-cloud-eventarc-publishing/google/cloud/eventarc_publishing_v1/services/publisher/async_client.py +++ b/packages/google-cloud-eventarc-publishing/google/cloud/eventarc_publishing_v1/services/publisher/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -206,9 +205,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(PublisherClient).get_transport_class, type(PublisherClient) - ) + get_transport_class = PublisherClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-eventarc-publishing/google/cloud/eventarc_publishing_v1/services/publisher/client.py b/packages/google-cloud-eventarc-publishing/google/cloud/eventarc_publishing_v1/services/publisher/client.py index 6c8f6bc511cf..ab5ecc27a959 100644 --- a/packages/google-cloud-eventarc-publishing/google/cloud/eventarc_publishing_v1/services/publisher/client.py +++ b/packages/google-cloud-eventarc-publishing/google/cloud/eventarc_publishing_v1/services/publisher/client.py @@ -660,7 +660,7 @@ def __init__( transport_init: Union[ Type[PublisherTransport], Callable[..., PublisherTransport] ] = ( - type(self).get_transport_class(transport) + PublisherClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., PublisherTransport], transport) ) diff --git a/packages/google-cloud-eventarc-publishing/samples/generated_samples/snippet_metadata_google.cloud.eventarc.publishing.v1.json b/packages/google-cloud-eventarc-publishing/samples/generated_samples/snippet_metadata_google.cloud.eventarc.publishing.v1.json index 784426185f8b..315310cec9dd 100644 --- a/packages/google-cloud-eventarc-publishing/samples/generated_samples/snippet_metadata_google.cloud.eventarc.publishing.v1.json +++ b/packages/google-cloud-eventarc-publishing/samples/generated_samples/snippet_metadata_google.cloud.eventarc.publishing.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-eventarc-publishing", - "version": "0.6.11" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-eventarc-publishing/tests/unit/gapic/eventarc_publishing_v1/test_publisher.py b/packages/google-cloud-eventarc-publishing/tests/unit/gapic/eventarc_publishing_v1/test_publisher.py index 588619e9d090..856abb6bb783 100644 --- a/packages/google-cloud-eventarc-publishing/tests/unit/gapic/eventarc_publishing_v1/test_publisher.py +++ b/packages/google-cloud-eventarc-publishing/tests/unit/gapic/eventarc_publishing_v1/test_publisher.py @@ -1244,22 +1244,23 @@ async def test_publish_channel_connection_events_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.publish_channel_connection_events - ] = mock_object + ] = mock_rpc request = {} await client.publish_channel_connection_events(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.publish_channel_connection_events(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1525,22 +1526,23 @@ async def test_publish_events_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.publish_events - ] = mock_object + ] = mock_rpc request = {} await client.publish_events(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.publish_events(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-eventarc/google/cloud/eventarc/gapic_version.py b/packages/google-cloud-eventarc/google/cloud/eventarc/gapic_version.py index f6b57ae22bbe..558c8aab67c5 100644 --- a/packages/google-cloud-eventarc/google/cloud/eventarc/gapic_version.py +++ b/packages/google-cloud-eventarc/google/cloud/eventarc/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.11.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-eventarc/google/cloud/eventarc_v1/gapic_version.py b/packages/google-cloud-eventarc/google/cloud/eventarc_v1/gapic_version.py index f6b57ae22bbe..558c8aab67c5 100644 --- a/packages/google-cloud-eventarc/google/cloud/eventarc_v1/gapic_version.py +++ b/packages/google-cloud-eventarc/google/cloud/eventarc_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.11.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py b/packages/google-cloud-eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py index 251dfba33e20..2e019a1c449f 100644 --- a/packages/google-cloud-eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py +++ b/packages/google-cloud-eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -223,9 +222,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(EventarcClient).get_transport_class, type(EventarcClient) - ) + get_transport_class = EventarcClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-eventarc/google/cloud/eventarc_v1/services/eventarc/client.py b/packages/google-cloud-eventarc/google/cloud/eventarc_v1/services/eventarc/client.py index 2eec3e6861b8..742f41e3caa5 100644 --- a/packages/google-cloud-eventarc/google/cloud/eventarc_v1/services/eventarc/client.py +++ b/packages/google-cloud-eventarc/google/cloud/eventarc_v1/services/eventarc/client.py @@ -862,7 +862,7 @@ def __init__( transport_init: Union[ Type[EventarcTransport], Callable[..., EventarcTransport] ] = ( - type(self).get_transport_class(transport) + EventarcClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., EventarcTransport], transport) ) diff --git a/packages/google-cloud-eventarc/samples/generated_samples/snippet_metadata_google.cloud.eventarc.v1.json b/packages/google-cloud-eventarc/samples/generated_samples/snippet_metadata_google.cloud.eventarc.v1.json index 3f1fc0089484..596aaecb1b9c 100644 --- a/packages/google-cloud-eventarc/samples/generated_samples/snippet_metadata_google.cloud.eventarc.v1.json +++ b/packages/google-cloud-eventarc/samples/generated_samples/snippet_metadata_google.cloud.eventarc.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-eventarc", - "version": "1.11.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py b/packages/google-cloud-eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py index 664bd638e91a..11192bebc18f 100644 --- a/packages/google-cloud-eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py +++ b/packages/google-cloud-eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py @@ -1264,22 +1264,23 @@ async def test_get_trigger_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_trigger - ] = mock_object + ] = mock_rpc request = {} await client.get_trigger(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_trigger(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1641,22 +1642,23 @@ async def test_list_triggers_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_triggers - ] = mock_object + ] = mock_rpc request = {} await client.list_triggers(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_triggers(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2147,8 +2149,9 @@ def test_create_trigger_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_trigger(request) @@ -2202,26 +2205,28 @@ async def test_create_trigger_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_trigger - ] = mock_object + ] = mock_rpc request = {} await client.create_trigger(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_trigger(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2527,8 +2532,9 @@ def test_update_trigger_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_trigger(request) @@ -2582,26 +2588,28 @@ async def test_update_trigger_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_trigger - ] = mock_object + ] = mock_rpc request = {} await client.update_trigger(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_trigger(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2913,8 +2921,9 @@ def test_delete_trigger_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_trigger(request) @@ -2968,26 +2977,28 @@ async def test_delete_trigger_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_trigger - ] = mock_object + ] = mock_rpc request = {} await client.delete_trigger(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_trigger(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3359,22 +3370,23 @@ async def test_get_channel_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_channel - ] = mock_object + ] = mock_rpc request = {} await client.get_channel(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_channel(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3736,22 +3748,23 @@ async def test_list_channels_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_channels - ] = mock_object + ] = mock_rpc request = {} await client.list_channels(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_channels(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4242,8 +4255,9 @@ def test_create_channel_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_channel(request) @@ -4297,26 +4311,28 @@ async def test_create_channel_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_channel_ - ] = mock_object + ] = mock_rpc request = {} await client.create_channel(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_channel(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4622,8 +4638,9 @@ def test_update_channel_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_channel(request) @@ -4677,26 +4694,28 @@ async def test_update_channel_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_channel - ] = mock_object + ] = mock_rpc request = {} await client.update_channel(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_channel(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4996,8 +5015,9 @@ def test_delete_channel_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_channel(request) @@ -5051,26 +5071,28 @@ async def test_delete_channel_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_channel - ] = mock_object + ] = mock_rpc request = {} await client.delete_channel(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_channel(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5419,22 +5441,23 @@ async def test_get_provider_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_provider - ] = mock_object + ] = mock_rpc request = {} await client.get_provider(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_provider(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5790,22 +5813,23 @@ async def test_list_providers_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_providers - ] = mock_object + ] = mock_rpc request = {} await client.list_providers(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_providers(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6372,22 +6396,23 @@ async def test_get_channel_connection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_channel_connection - ] = mock_object + ] = mock_rpc request = {} await client.get_channel_connection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_channel_connection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6770,22 +6795,23 @@ async def test_list_channel_connections_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_channel_connections - ] = mock_object + ] = mock_rpc request = {} await client.list_channel_connections(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_channel_connections(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7309,8 +7335,9 @@ def test_create_channel_connection_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_channel_connection(request) @@ -7366,26 +7393,28 @@ async def test_create_channel_connection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_channel_connection - ] = mock_object + ] = mock_rpc request = {} await client.create_channel_connection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_channel_connection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7725,8 +7754,9 @@ def test_delete_channel_connection_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_channel_connection(request) @@ -7782,26 +7812,28 @@ async def test_delete_channel_connection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_channel_connection - ] = mock_object + ] = mock_rpc request = {} await client.delete_channel_connection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_channel_connection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8174,22 +8206,23 @@ async def test_get_google_channel_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_google_channel_config - ] = mock_object + ] = mock_rpc request = {} await client.get_google_channel_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_google_channel_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8562,22 +8595,23 @@ async def test_update_google_channel_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_google_channel_config - ] = mock_object + ] = mock_rpc request = {} await client.update_google_channel_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_google_channel_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-filestore/google/cloud/filestore/gapic_version.py b/packages/google-cloud-filestore/google/cloud/filestore/gapic_version.py index 64ddc0e431e3..558c8aab67c5 100644 --- a/packages/google-cloud-filestore/google/cloud/filestore/gapic_version.py +++ b/packages/google-cloud-filestore/google/cloud/filestore/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.9.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-filestore/google/cloud/filestore_v1/gapic_version.py b/packages/google-cloud-filestore/google/cloud/filestore_v1/gapic_version.py index 64ddc0e431e3..558c8aab67c5 100644 --- a/packages/google-cloud-filestore/google/cloud/filestore_v1/gapic_version.py +++ b/packages/google-cloud-filestore/google/cloud/filestore_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.9.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-filestore/google/cloud/filestore_v1/services/cloud_filestore_manager/async_client.py b/packages/google-cloud-filestore/google/cloud/filestore_v1/services/cloud_filestore_manager/async_client.py index d204a1fd18f7..427e84707f01 100644 --- a/packages/google-cloud-filestore/google/cloud/filestore_v1/services/cloud_filestore_manager/async_client.py +++ b/packages/google-cloud-filestore/google/cloud/filestore_v1/services/cloud_filestore_manager/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -226,10 +225,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(CloudFilestoreManagerClient).get_transport_class, - type(CloudFilestoreManagerClient), - ) + get_transport_class = CloudFilestoreManagerClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-filestore/google/cloud/filestore_v1/services/cloud_filestore_manager/client.py b/packages/google-cloud-filestore/google/cloud/filestore_v1/services/cloud_filestore_manager/client.py index 4a014a288a09..946d707ace3b 100644 --- a/packages/google-cloud-filestore/google/cloud/filestore_v1/services/cloud_filestore_manager/client.py +++ b/packages/google-cloud-filestore/google/cloud/filestore_v1/services/cloud_filestore_manager/client.py @@ -746,7 +746,7 @@ def __init__( Type[CloudFilestoreManagerTransport], Callable[..., CloudFilestoreManagerTransport], ] = ( - type(self).get_transport_class(transport) + CloudFilestoreManagerClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., CloudFilestoreManagerTransport], transport) ) diff --git a/packages/google-cloud-filestore/samples/generated_samples/snippet_metadata_google.cloud.filestore.v1.json b/packages/google-cloud-filestore/samples/generated_samples/snippet_metadata_google.cloud.filestore.v1.json index 6e6670fd88ff..a9b41ceec6d9 100644 --- a/packages/google-cloud-filestore/samples/generated_samples/snippet_metadata_google.cloud.filestore.v1.json +++ b/packages/google-cloud-filestore/samples/generated_samples/snippet_metadata_google.cloud.filestore.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-filestore", - "version": "1.9.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-filestore/tests/unit/gapic/filestore_v1/test_cloud_filestore_manager.py b/packages/google-cloud-filestore/tests/unit/gapic/filestore_v1/test_cloud_filestore_manager.py index 4b05ea0d76d2..fde8cbd7c69f 100644 --- a/packages/google-cloud-filestore/tests/unit/gapic/filestore_v1/test_cloud_filestore_manager.py +++ b/packages/google-cloud-filestore/tests/unit/gapic/filestore_v1/test_cloud_filestore_manager.py @@ -1376,22 +1376,23 @@ async def test_list_instances_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_instances - ] = mock_object + ] = mock_rpc request = {} await client.list_instances(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_instances(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1967,22 +1968,23 @@ async def test_get_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_instance - ] = mock_object + ] = mock_rpc request = {} await client.get_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2298,8 +2300,9 @@ def test_create_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_instance(request) @@ -2353,26 +2356,28 @@ async def test_create_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_instance - ] = mock_object + ] = mock_rpc request = {} await client.create_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2679,8 +2684,9 @@ def test_update_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_instance(request) @@ -2734,26 +2740,28 @@ async def test_update_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_instance - ] = mock_object + ] = mock_rpc request = {} await client.update_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3060,8 +3068,9 @@ def test_restore_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.restore_instance(request) @@ -3115,26 +3124,28 @@ async def test_restore_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.restore_instance - ] = mock_object + ] = mock_rpc request = {} await client.restore_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.restore_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3345,8 +3356,9 @@ def test_revert_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.revert_instance(request) @@ -3400,26 +3412,28 @@ async def test_revert_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.revert_instance - ] = mock_object + ] = mock_rpc request = {} await client.revert_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.revert_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3628,8 +3642,9 @@ def test_delete_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_instance(request) @@ -3683,26 +3698,28 @@ async def test_delete_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_instance - ] = mock_object + ] = mock_rpc request = {} await client.delete_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4055,22 +4072,23 @@ async def test_list_snapshots_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_snapshots - ] = mock_object + ] = mock_rpc request = {} await client.list_snapshots(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_snapshots(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4623,22 +4641,23 @@ async def test_get_snapshot_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_snapshot - ] = mock_object + ] = mock_rpc request = {} await client.get_snapshot(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_snapshot(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4940,8 +4959,9 @@ def test_create_snapshot_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_snapshot(request) @@ -4995,26 +5015,28 @@ async def test_create_snapshot_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_snapshot - ] = mock_object + ] = mock_rpc request = {} await client.create_snapshot(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_snapshot(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5325,8 +5347,9 @@ def test_delete_snapshot_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_snapshot(request) @@ -5380,26 +5403,28 @@ async def test_delete_snapshot_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_snapshot - ] = mock_object + ] = mock_rpc request = {} await client.delete_snapshot(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_snapshot(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5686,8 +5711,9 @@ def test_update_snapshot_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_snapshot(request) @@ -5741,26 +5767,28 @@ async def test_update_snapshot_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_snapshot - ] = mock_object + ] = mock_rpc request = {} await client.update_snapshot(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_snapshot(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6126,22 +6154,23 @@ async def test_list_backups_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_backups - ] = mock_object + ] = mock_rpc request = {} await client.list_backups(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_backups(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6717,22 +6746,23 @@ async def test_get_backup_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_backup - ] = mock_object + ] = mock_rpc request = {} await client.get_backup(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7050,8 +7080,9 @@ def test_create_backup_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_backup(request) @@ -7105,26 +7136,28 @@ async def test_create_backup_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_backup - ] = mock_object + ] = mock_rpc request = {} await client.create_backup(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7435,8 +7468,9 @@ def test_delete_backup_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_backup(request) @@ -7490,26 +7524,28 @@ async def test_delete_backup_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_backup - ] = mock_object + ] = mock_rpc request = {} await client.delete_backup(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7796,8 +7832,9 @@ def test_update_backup_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_backup(request) @@ -7851,26 +7888,28 @@ async def test_update_backup_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_backup - ] = mock_object + ] = mock_rpc request = {} await client.update_backup(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement/gapic_version.py b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement/gapic_version.py index 114e40645800..558c8aab67c5 100644 --- a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement/gapic_version.py +++ b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.3" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/gapic_version.py b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/gapic_version.py index 114e40645800..558c8aab67c5 100644 --- a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/gapic_version.py +++ b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.3" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/async_client.py b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/async_client.py index 697d69657cea..b0755c816a88 100644 --- a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/async_client.py +++ b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -220,10 +219,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(GDCHardwareManagementClient).get_transport_class, - type(GDCHardwareManagementClient), - ) + get_transport_class = GDCHardwareManagementClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/client.py b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/client.py index eb97dedfc5f4..44e392dffb0f 100644 --- a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/client.py +++ b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/client.py @@ -838,7 +838,7 @@ def __init__( Type[GDCHardwareManagementTransport], Callable[..., GDCHardwareManagementTransport], ] = ( - type(self).get_transport_class(transport) + GDCHardwareManagementClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., GDCHardwareManagementTransport], transport) ) diff --git a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/types/resources.py b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/types/resources.py index 2c0bf0948648..54d902b9ae68 100644 --- a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/types/resources.py +++ b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/types/resources.py @@ -301,12 +301,12 @@ class Site(proto.Message): Optional. Any additional notes for this Site. Please include information about: - - security or access restrictions - - any regulations affecting the technicians + - security or access restrictions + - any regulations affecting the technicians visiting the site - - any special process or approval required to + - any special process or approval required to move the equipment - - whether a representative will be available + - whether a representative will be available during site visits """ diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/snippet_metadata_google.cloud.gdchardwaremanagement.v1alpha.json b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/snippet_metadata_google.cloud.gdchardwaremanagement.v1alpha.json index 1c827251228e..9716952b0d31 100644 --- a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/snippet_metadata_google.cloud.gdchardwaremanagement.v1alpha.json +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/snippet_metadata_google.cloud.gdchardwaremanagement.v1alpha.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-gdchardwaremanagement", - "version": "0.1.3" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-gdchardwaremanagement/tests/unit/gapic/gdchardwaremanagement_v1alpha/test_gdc_hardware_management.py b/packages/google-cloud-gdchardwaremanagement/tests/unit/gapic/gdchardwaremanagement_v1alpha/test_gdc_hardware_management.py index dce36c2ed34b..72d9a360aa19 100644 --- a/packages/google-cloud-gdchardwaremanagement/tests/unit/gapic/gdchardwaremanagement_v1alpha/test_gdc_hardware_management.py +++ b/packages/google-cloud-gdchardwaremanagement/tests/unit/gapic/gdchardwaremanagement_v1alpha/test_gdc_hardware_management.py @@ -1379,22 +1379,23 @@ async def test_list_orders_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_orders - ] = mock_object + ] = mock_rpc request = {} await client.list_orders(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_orders(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1961,22 +1962,23 @@ async def test_get_order_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_order - ] = mock_object + ] = mock_rpc request = {} await client.get_order(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_order(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2285,8 +2287,9 @@ def test_create_order_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_order(request) @@ -2340,26 +2343,28 @@ async def test_create_order_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_order - ] = mock_object + ] = mock_rpc request = {} await client.create_order(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_order(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2669,8 +2674,9 @@ def test_update_order_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_order(request) @@ -2724,26 +2730,28 @@ async def test_update_order_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_order - ] = mock_object + ] = mock_rpc request = {} await client.update_order(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_order(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3045,8 +3053,9 @@ def test_delete_order_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_order(request) @@ -3100,26 +3109,28 @@ async def test_delete_order_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_order - ] = mock_object + ] = mock_rpc request = {} await client.delete_order(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_order(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3411,8 +3422,9 @@ def test_submit_order_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.submit_order(request) @@ -3466,26 +3478,28 @@ async def test_submit_order_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.submit_order - ] = mock_object + ] = mock_rpc request = {} await client.submit_order(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.submit_order(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3838,22 +3852,23 @@ async def test_list_sites_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_sites - ] = mock_object + ] = mock_rpc request = {} await client.list_sites(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_sites(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4408,22 +4423,23 @@ async def test_get_site_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_site - ] = mock_object + ] = mock_rpc request = {} await client.get_site(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_site(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4724,8 +4740,9 @@ def test_create_site_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_site(request) @@ -4779,26 +4796,28 @@ async def test_create_site_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_site - ] = mock_object + ] = mock_rpc request = {} await client.create_site(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_site(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5108,8 +5127,9 @@ def test_update_site_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_site(request) @@ -5163,26 +5183,28 @@ async def test_update_site_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_site - ] = mock_object + ] = mock_rpc request = {} await client.update_site(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_site(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5559,22 +5581,23 @@ async def test_list_hardware_groups_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_hardware_groups - ] = mock_object + ] = mock_rpc request = {} await client.list_hardware_groups(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_hardware_groups(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6161,22 +6184,23 @@ async def test_get_hardware_group_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_hardware_group - ] = mock_object + ] = mock_rpc request = {} await client.get_hardware_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_hardware_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6502,8 +6526,9 @@ def test_create_hardware_group_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_hardware_group(request) @@ -6559,26 +6584,28 @@ async def test_create_hardware_group_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_hardware_group - ] = mock_object + ] = mock_rpc request = {} await client.create_hardware_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_hardware_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6909,8 +6936,9 @@ def test_update_hardware_group_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_hardware_group(request) @@ -6966,26 +6994,28 @@ async def test_update_hardware_group_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_hardware_group - ] = mock_object + ] = mock_rpc request = {} await client.update_hardware_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_hardware_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7308,8 +7338,9 @@ def test_delete_hardware_group_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_hardware_group(request) @@ -7365,26 +7396,28 @@ async def test_delete_hardware_group_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_hardware_group - ] = mock_object + ] = mock_rpc request = {} await client.delete_hardware_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_hardware_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7749,22 +7782,23 @@ async def test_list_hardware_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_hardware - ] = mock_object + ] = mock_rpc request = {} await client.list_hardware(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_hardware(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8330,22 +8364,23 @@ async def test_get_hardware_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_hardware - ] = mock_object + ] = mock_rpc request = {} await client.get_hardware(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_hardware(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8650,8 +8685,9 @@ def test_create_hardware_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_hardware(request) @@ -8705,26 +8741,28 @@ async def test_create_hardware_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_hardware - ] = mock_object + ] = mock_rpc request = {} await client.create_hardware(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_hardware(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9034,8 +9072,9 @@ def test_update_hardware_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_hardware(request) @@ -9089,26 +9128,28 @@ async def test_update_hardware_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_hardware - ] = mock_object + ] = mock_rpc request = {} await client.update_hardware(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_hardware(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9408,8 +9449,9 @@ def test_delete_hardware_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_hardware(request) @@ -9463,26 +9505,28 @@ async def test_delete_hardware_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_hardware - ] = mock_object + ] = mock_rpc request = {} await client.delete_hardware(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_hardware(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9837,22 +9881,23 @@ async def test_list_comments_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_comments - ] = mock_object + ] = mock_rpc request = {} await client.list_comments(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_comments(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10403,22 +10448,23 @@ async def test_get_comment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_comment - ] = mock_object + ] = mock_rpc request = {} await client.get_comment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_comment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10715,8 +10761,9 @@ def test_create_comment_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_comment(request) @@ -10770,26 +10817,28 @@ async def test_create_comment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_comment - ] = mock_object + ] = mock_rpc request = {} await client.create_comment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_comment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11177,22 +11226,23 @@ async def test_list_change_log_entries_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_change_log_entries - ] = mock_object + ] = mock_rpc request = {} await client.list_change_log_entries(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_change_log_entries(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11770,22 +11820,23 @@ async def test_get_change_log_entry_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_change_log_entry - ] = mock_object + ] = mock_rpc request = {} await client.get_change_log_entry(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_change_log_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12153,22 +12204,23 @@ async def test_list_skus_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_skus - ] = mock_object + ] = mock_rpc request = {} await client.list_skus(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_skus(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12729,22 +12781,23 @@ async def test_get_sku_async_use_cached_wrapped_rpc(transport: str = "grpc_async ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_sku - ] = mock_object + ] = mock_rpc request = {} await client.get_sku(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_sku(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13108,22 +13161,23 @@ async def test_list_zones_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_zones - ] = mock_object + ] = mock_rpc request = {} await client.list_zones(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_zones(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13678,22 +13732,23 @@ async def test_get_zone_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_zone - ] = mock_object + ] = mock_rpc request = {} await client.get_zone(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_zone(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13992,8 +14047,9 @@ def test_create_zone_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_zone(request) @@ -14047,26 +14103,28 @@ async def test_create_zone_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_zone - ] = mock_object + ] = mock_rpc request = {} await client.create_zone(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_zone(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14372,8 +14430,9 @@ def test_update_zone_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_zone(request) @@ -14427,26 +14486,28 @@ async def test_update_zone_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_zone - ] = mock_object + ] = mock_rpc request = {} await client.update_zone(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_zone(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14746,8 +14807,9 @@ def test_delete_zone_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_zone(request) @@ -14801,26 +14863,28 @@ async def test_delete_zone_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_zone - ] = mock_object + ] = mock_rpc request = {} await client.delete_zone(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_zone(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15118,8 +15182,9 @@ def test_signal_zone_state_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.signal_zone_state(request) @@ -15175,26 +15240,28 @@ async def test_signal_zone_state_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.signal_zone_state - ] = mock_object + ] = mock_rpc request = {} await client.signal_zone_state(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.signal_zone_state(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-gke-backup/google/cloud/gke_backup/gapic_version.py b/packages/google-cloud-gke-backup/google/cloud/gke_backup/gapic_version.py index 0d21cc226e8f..558c8aab67c5 100644 --- a/packages/google-cloud-gke-backup/google/cloud/gke_backup/gapic_version.py +++ b/packages/google-cloud-gke-backup/google/cloud/gke_backup/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.5.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-gke-backup/google/cloud/gke_backup_v1/gapic_version.py b/packages/google-cloud-gke-backup/google/cloud/gke_backup_v1/gapic_version.py index 0d21cc226e8f..558c8aab67c5 100644 --- a/packages/google-cloud-gke-backup/google/cloud/gke_backup_v1/gapic_version.py +++ b/packages/google-cloud-gke-backup/google/cloud/gke_backup_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.5.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-gke-backup/google/cloud/gke_backup_v1/services/backup_for_gke/async_client.py b/packages/google-cloud-gke-backup/google/cloud/gke_backup_v1/services/backup_for_gke/async_client.py index dce7b21caeff..d5e64a868191 100644 --- a/packages/google-cloud-gke-backup/google/cloud/gke_backup_v1/services/backup_for_gke/async_client.py +++ b/packages/google-cloud-gke-backup/google/cloud/gke_backup_v1/services/backup_for_gke/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -221,9 +220,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(BackupForGKEClient).get_transport_class, type(BackupForGKEClient) - ) + get_transport_class = BackupForGKEClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-gke-backup/google/cloud/gke_backup_v1/services/backup_for_gke/client.py b/packages/google-cloud-gke-backup/google/cloud/gke_backup_v1/services/backup_for_gke/client.py index 130ad5a4e31e..294f3e9ce29a 100644 --- a/packages/google-cloud-gke-backup/google/cloud/gke_backup_v1/services/backup_for_gke/client.py +++ b/packages/google-cloud-gke-backup/google/cloud/gke_backup_v1/services/backup_for_gke/client.py @@ -847,7 +847,7 @@ def __init__( transport_init: Union[ Type[BackupForGKETransport], Callable[..., BackupForGKETransport] ] = ( - type(self).get_transport_class(transport) + BackupForGKEClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., BackupForGKETransport], transport) ) diff --git a/packages/google-cloud-gke-backup/samples/generated_samples/snippet_metadata_google.cloud.gkebackup.v1.json b/packages/google-cloud-gke-backup/samples/generated_samples/snippet_metadata_google.cloud.gkebackup.v1.json index a47a4fb2e00f..76d218ee10ad 100644 --- a/packages/google-cloud-gke-backup/samples/generated_samples/snippet_metadata_google.cloud.gkebackup.v1.json +++ b/packages/google-cloud-gke-backup/samples/generated_samples/snippet_metadata_google.cloud.gkebackup.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-gke-backup", - "version": "0.5.11" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-gke-backup/tests/unit/gapic/gke_backup_v1/test_backup_for_gke.py b/packages/google-cloud-gke-backup/tests/unit/gapic/gke_backup_v1/test_backup_for_gke.py index a564ab37fc8b..89b95504dbc7 100644 --- a/packages/google-cloud-gke-backup/tests/unit/gapic/gke_backup_v1/test_backup_for_gke.py +++ b/packages/google-cloud-gke-backup/tests/unit/gapic/gke_backup_v1/test_backup_for_gke.py @@ -1244,8 +1244,9 @@ def test_create_backup_plan_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_backup_plan(request) @@ -1301,26 +1302,28 @@ async def test_create_backup_plan_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_backup_plan - ] = mock_object + ] = mock_rpc request = {} await client.create_backup_plan(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_backup_plan(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1715,22 +1718,23 @@ async def test_list_backup_plans_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_backup_plans - ] = mock_object + ] = mock_rpc request = {} await client.list_backup_plans(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_backup_plans(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2323,22 +2327,23 @@ async def test_get_backup_plan_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_backup_plan - ] = mock_object + ] = mock_rpc request = {} await client.get_backup_plan(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_backup_plan(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2657,8 +2662,9 @@ def test_update_backup_plan_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_backup_plan(request) @@ -2714,26 +2720,28 @@ async def test_update_backup_plan_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_backup_plan - ] = mock_object + ] = mock_rpc request = {} await client.update_backup_plan(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_backup_plan(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3055,8 +3063,9 @@ def test_delete_backup_plan_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_backup_plan(request) @@ -3112,26 +3121,28 @@ async def test_delete_backup_plan_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_backup_plan - ] = mock_object + ] = mock_rpc request = {} await client.delete_backup_plan(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_backup_plan(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3433,8 +3444,9 @@ def test_create_backup_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_backup(request) @@ -3488,26 +3500,28 @@ async def test_create_backup_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_backup - ] = mock_object + ] = mock_rpc request = {} await client.create_backup(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3879,22 +3893,23 @@ async def test_list_backups_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_backups - ] = mock_object + ] = mock_rpc request = {} await client.list_backups(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_backups(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4484,22 +4499,23 @@ async def test_get_backup_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_backup - ] = mock_object + ] = mock_rpc request = {} await client.get_backup(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4816,8 +4832,9 @@ def test_update_backup_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_backup(request) @@ -4871,26 +4888,28 @@ async def test_update_backup_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_backup - ] = mock_object + ] = mock_rpc request = {} await client.update_backup(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5192,8 +5211,9 @@ def test_delete_backup_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_backup(request) @@ -5247,26 +5267,28 @@ async def test_delete_backup_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_backup - ] = mock_object + ] = mock_rpc request = {} await client.delete_backup(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5630,22 +5652,23 @@ async def test_list_volume_backups_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_volume_backups - ] = mock_object + ] = mock_rpc request = {} await client.list_volume_backups(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_volume_backups(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6242,22 +6265,23 @@ async def test_get_volume_backup_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_volume_backup - ] = mock_object + ] = mock_rpc request = {} await client.get_volume_backup(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_volume_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6586,8 +6610,9 @@ def test_create_restore_plan_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_restore_plan(request) @@ -6643,26 +6668,28 @@ async def test_create_restore_plan_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_restore_plan - ] = mock_object + ] = mock_rpc request = {} await client.create_restore_plan(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_restore_plan(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7059,22 +7086,23 @@ async def test_list_restore_plans_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_restore_plans - ] = mock_object + ] = mock_rpc request = {} await client.list_restore_plans(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_restore_plans(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7660,22 +7688,23 @@ async def test_get_restore_plan_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_restore_plan - ] = mock_object + ] = mock_rpc request = {} await client.get_restore_plan(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_restore_plan(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7988,8 +8017,9 @@ def test_update_restore_plan_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_restore_plan(request) @@ -8045,26 +8075,28 @@ async def test_update_restore_plan_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_restore_plan - ] = mock_object + ] = mock_rpc request = {} await client.update_restore_plan(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_restore_plan(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8386,8 +8418,9 @@ def test_delete_restore_plan_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_restore_plan(request) @@ -8443,26 +8476,28 @@ async def test_delete_restore_plan_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_restore_plan - ] = mock_object + ] = mock_rpc request = {} await client.delete_restore_plan(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_restore_plan(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8764,8 +8799,9 @@ def test_create_restore_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_restore(request) @@ -8819,26 +8855,28 @@ async def test_create_restore_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_restore - ] = mock_object + ] = mock_rpc request = {} await client.create_restore(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_restore(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9213,22 +9251,23 @@ async def test_list_restores_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_restores - ] = mock_object + ] = mock_rpc request = {} await client.list_restores(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_restores(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9806,22 +9845,23 @@ async def test_get_restore_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_restore - ] = mock_object + ] = mock_rpc request = {} await client.get_restore(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_restore(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10128,8 +10168,9 @@ def test_update_restore_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_restore(request) @@ -10183,26 +10224,28 @@ async def test_update_restore_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_restore - ] = mock_object + ] = mock_rpc request = {} await client.update_restore(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_restore(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10504,8 +10547,9 @@ def test_delete_restore_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_restore(request) @@ -10559,26 +10603,28 @@ async def test_delete_restore_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_restore - ] = mock_object + ] = mock_rpc request = {} await client.delete_restore(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_restore(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10942,22 +10988,23 @@ async def test_list_volume_restores_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_volume_restores - ] = mock_object + ] = mock_rpc request = {} await client.list_volume_restores(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_volume_restores(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11551,22 +11598,23 @@ async def test_get_volume_restore_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_volume_restore - ] = mock_object + ] = mock_rpc request = {} await client.get_volume_restore(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_volume_restore(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11952,22 +12000,23 @@ async def test_get_backup_index_download_url_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_backup_index_download_url - ] = mock_object + ] = mock_rpc request = {} await client.get_backup_index_download_url(https://github.com/request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_backup_index_download_url(https://github.com/request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway/gapic_version.py b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway/gapic_version.py index 0b9ea1630113..558c8aab67c5 100644 --- a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway/gapic_version.py +++ b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.8.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/gapic_version.py b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/gapic_version.py index 0b9ea1630113..558c8aab67c5 100644 --- a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/gapic_version.py +++ b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.8.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_service/async_client.py b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_service/async_client.py index 3a10500d0eac..067ac7ab5cc4 100644 --- a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_service/async_client.py +++ b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -191,9 +190,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(GatewayServiceClient).get_transport_class, type(GatewayServiceClient) - ) + get_transport_class = GatewayServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_service/client.py b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_service/client.py index 3a0d84f49a71..bc179d1d0508 100644 --- a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_service/client.py +++ b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_service/client.py @@ -642,7 +642,7 @@ def __init__( transport_init: Union[ Type[GatewayServiceTransport], Callable[..., GatewayServiceTransport] ] = ( - type(self).get_transport_class(transport) + GatewayServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., GatewayServiceTransport], transport) ) diff --git a/packages/google-cloud-gke-connect-gateway/samples/generated_samples/snippet_metadata_google.cloud.gkeconnect.gateway.v1beta1.json b/packages/google-cloud-gke-connect-gateway/samples/generated_samples/snippet_metadata_google.cloud.gkeconnect.gateway.v1beta1.json index 6488d9967d62..6586a7512b44 100644 --- a/packages/google-cloud-gke-connect-gateway/samples/generated_samples/snippet_metadata_google.cloud.gkeconnect.gateway.v1beta1.json +++ b/packages/google-cloud-gke-connect-gateway/samples/generated_samples/snippet_metadata_google.cloud.gkeconnect.gateway.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-gke-connect-gateway", - "version": "0.8.11" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-gke-connect-gateway/tests/unit/gapic/gateway_v1beta1/test_gateway_service.py b/packages/google-cloud-gke-connect-gateway/tests/unit/gapic/gateway_v1beta1/test_gateway_service.py index e25c6ba045b6..97430148490d 100644 --- a/packages/google-cloud-gke-connect-gateway/tests/unit/gapic/gateway_v1beta1/test_gateway_service.py +++ b/packages/google-cloud-gke-connect-gateway/tests/unit/gapic/gateway_v1beta1/test_gateway_service.py @@ -1261,22 +1261,23 @@ async def test_get_resource_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_resource - ] = mock_object + ] = mock_rpc request = {} await client.get_resource(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_resource(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1507,22 +1508,23 @@ async def test_post_resource_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.post_resource - ] = mock_object + ] = mock_rpc request = {} await client.post_resource(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.post_resource(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1753,22 +1755,23 @@ async def test_delete_resource_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_resource - ] = mock_object + ] = mock_rpc request = {} await client.delete_resource(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_resource(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1999,22 +2002,23 @@ async def test_put_resource_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.put_resource - ] = mock_object + ] = mock_rpc request = {} await client.put_resource(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.put_resource(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2245,22 +2249,23 @@ async def test_patch_resource_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.patch_resource - ] = mock_object + ] = mock_rpc request = {} await client.patch_resource(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.patch_resource(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-gke-hub/google/cloud/gkehub/gapic_version.py b/packages/google-cloud-gke-hub/google/cloud/gkehub/gapic_version.py index d5efd02cf7f8..558c8aab67c5 100644 --- a/packages/google-cloud-gke-hub/google/cloud/gkehub/gapic_version.py +++ b/packages/google-cloud-gke-hub/google/cloud/gkehub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.14.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-gke-hub/google/cloud/gkehub_v1/configmanagement_v1/gapic_version.py b/packages/google-cloud-gke-hub/google/cloud/gkehub_v1/configmanagement_v1/gapic_version.py index d5efd02cf7f8..558c8aab67c5 100644 --- a/packages/google-cloud-gke-hub/google/cloud/gkehub_v1/configmanagement_v1/gapic_version.py +++ b/packages/google-cloud-gke-hub/google/cloud/gkehub_v1/configmanagement_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.14.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-gke-hub/google/cloud/gkehub_v1/gapic_version.py b/packages/google-cloud-gke-hub/google/cloud/gkehub_v1/gapic_version.py index d5efd02cf7f8..558c8aab67c5 100644 --- a/packages/google-cloud-gke-hub/google/cloud/gkehub_v1/gapic_version.py +++ b/packages/google-cloud-gke-hub/google/cloud/gkehub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.14.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-gke-hub/google/cloud/gkehub_v1/multiclusteringress_v1/gapic_version.py b/packages/google-cloud-gke-hub/google/cloud/gkehub_v1/multiclusteringress_v1/gapic_version.py index d5efd02cf7f8..558c8aab67c5 100644 --- a/packages/google-cloud-gke-hub/google/cloud/gkehub_v1/multiclusteringress_v1/gapic_version.py +++ b/packages/google-cloud-gke-hub/google/cloud/gkehub_v1/multiclusteringress_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.14.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-gke-hub/google/cloud/gkehub_v1/services/gke_hub/async_client.py b/packages/google-cloud-gke-hub/google/cloud/gkehub_v1/services/gke_hub/async_client.py index c861d644f27d..f3ec14566565 100644 --- a/packages/google-cloud-gke-hub/google/cloud/gkehub_v1/services/gke_hub/async_client.py +++ b/packages/google-cloud-gke-hub/google/cloud/gkehub_v1/services/gke_hub/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -202,9 +201,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(GkeHubClient).get_transport_class, type(GkeHubClient) - ) + get_transport_class = GkeHubClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-gke-hub/google/cloud/gkehub_v1/services/gke_hub/client.py b/packages/google-cloud-gke-hub/google/cloud/gkehub_v1/services/gke_hub/client.py index 8e1f049cb4b0..733b0972e4ac 100644 --- a/packages/google-cloud-gke-hub/google/cloud/gkehub_v1/services/gke_hub/client.py +++ b/packages/google-cloud-gke-hub/google/cloud/gkehub_v1/services/gke_hub/client.py @@ -702,7 +702,7 @@ def __init__( transport_init: Union[ Type[GkeHubTransport], Callable[..., GkeHubTransport] ] = ( - type(self).get_transport_class(transport) + GkeHubClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., GkeHubTransport], transport) ) diff --git a/packages/google-cloud-gke-hub/google/cloud/gkehub_v1beta1/gapic_version.py b/packages/google-cloud-gke-hub/google/cloud/gkehub_v1beta1/gapic_version.py index d5efd02cf7f8..558c8aab67c5 100644 --- a/packages/google-cloud-gke-hub/google/cloud/gkehub_v1beta1/gapic_version.py +++ b/packages/google-cloud-gke-hub/google/cloud/gkehub_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.14.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-gke-hub/google/cloud/gkehub_v1beta1/services/gke_hub_membership_service/async_client.py b/packages/google-cloud-gke-hub/google/cloud/gkehub_v1beta1/services/gke_hub_membership_service/async_client.py index 6bcaac476c8c..33d73001e760 100644 --- a/packages/google-cloud-gke-hub/google/cloud/gkehub_v1beta1/services/gke_hub_membership_service/async_client.py +++ b/packages/google-cloud-gke-hub/google/cloud/gkehub_v1beta1/services/gke_hub_membership_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -216,10 +215,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(GkeHubMembershipServiceClient).get_transport_class, - type(GkeHubMembershipServiceClient), - ) + get_transport_class = GkeHubMembershipServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-gke-hub/google/cloud/gkehub_v1beta1/services/gke_hub_membership_service/client.py b/packages/google-cloud-gke-hub/google/cloud/gkehub_v1beta1/services/gke_hub_membership_service/client.py index 78a5e1a3cd85..c84ff7e7a10b 100644 --- a/packages/google-cloud-gke-hub/google/cloud/gkehub_v1beta1/services/gke_hub_membership_service/client.py +++ b/packages/google-cloud-gke-hub/google/cloud/gkehub_v1beta1/services/gke_hub_membership_service/client.py @@ -692,7 +692,7 @@ def __init__( Type[GkeHubMembershipServiceTransport], Callable[..., GkeHubMembershipServiceTransport], ] = ( - type(self).get_transport_class(transport) + GkeHubMembershipServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., GkeHubMembershipServiceTransport], transport) ) diff --git a/packages/google-cloud-gke-hub/samples/generated_samples/snippet_metadata_google.cloud.gkehub.v1.json b/packages/google-cloud-gke-hub/samples/generated_samples/snippet_metadata_google.cloud.gkehub.v1.json index 0854c58fc225..f5d7aaa2ae7a 100644 --- a/packages/google-cloud-gke-hub/samples/generated_samples/snippet_metadata_google.cloud.gkehub.v1.json +++ b/packages/google-cloud-gke-hub/samples/generated_samples/snippet_metadata_google.cloud.gkehub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-gke-hub", - "version": "1.14.2" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-gke-hub/samples/generated_samples/snippet_metadata_google.cloud.gkehub.v1beta1.json b/packages/google-cloud-gke-hub/samples/generated_samples/snippet_metadata_google.cloud.gkehub.v1beta1.json index 017216380c2d..5f7f541e37e3 100644 --- a/packages/google-cloud-gke-hub/samples/generated_samples/snippet_metadata_google.cloud.gkehub.v1beta1.json +++ b/packages/google-cloud-gke-hub/samples/generated_samples/snippet_metadata_google.cloud.gkehub.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-gke-hub", - "version": "1.14.2" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-gke-hub/tests/unit/gapic/gkehub_v1/test_gke_hub.py b/packages/google-cloud-gke-hub/tests/unit/gapic/gkehub_v1/test_gke_hub.py index d2bf7688b4ec..78fa3ce63464 100644 --- a/packages/google-cloud-gke-hub/tests/unit/gapic/gkehub_v1/test_gke_hub.py +++ b/packages/google-cloud-gke-hub/tests/unit/gapic/gkehub_v1/test_gke_hub.py @@ -1237,22 +1237,23 @@ async def test_list_memberships_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_memberships - ] = mock_object + ] = mock_rpc request = {} await client.list_memberships(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_memberships(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1803,22 +1804,23 @@ async def test_list_features_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_features - ] = mock_object + ] = mock_rpc request = {} await client.list_features(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_features(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2370,22 +2372,23 @@ async def test_get_membership_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_membership - ] = mock_object + ] = mock_rpc request = {} await client.get_membership(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_membership(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2740,22 +2743,23 @@ async def test_get_feature_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_feature - ] = mock_object + ] = mock_rpc request = {} await client.get_feature(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_feature(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3056,8 +3060,9 @@ def test_create_membership_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_membership(request) @@ -3113,26 +3118,28 @@ async def test_create_membership_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_membership - ] = mock_object + ] = mock_rpc request = {} await client.create_membership(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_membership(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3488,8 +3495,9 @@ def test_create_feature_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_feature(request) @@ -3543,26 +3551,28 @@ async def test_create_feature_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_feature - ] = mock_object + ] = mock_rpc request = {} await client.create_feature(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_feature(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3882,8 +3892,9 @@ def test_delete_membership_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_membership(request) @@ -3939,26 +3950,28 @@ async def test_delete_membership_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_membership - ] = mock_object + ] = mock_rpc request = {} await client.delete_membership(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_membership(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4260,8 +4273,9 @@ def test_delete_feature_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_feature(request) @@ -4315,26 +4329,28 @@ async def test_delete_feature_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_feature - ] = mock_object + ] = mock_rpc request = {} await client.delete_feature(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_feature(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4634,8 +4650,9 @@ def test_update_membership_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_membership(request) @@ -4691,26 +4708,28 @@ async def test_update_membership_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_membership - ] = mock_object + ] = mock_rpc request = {} await client.update_membership(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_membership(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5064,8 +5083,9 @@ def test_update_feature_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_feature(request) @@ -5119,26 +5139,28 @@ async def test_update_feature_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_feature - ] = mock_object + ] = mock_rpc request = {} await client.update_feature(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_feature(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5518,22 +5540,23 @@ async def test_generate_connect_manifest_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.generate_connect_manifest - ] = mock_object + ] = mock_rpc request = {} await client.generate_connect_manifest(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.generate_connect_manifest(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-gke-hub/tests/unit/gapic/gkehub_v1beta1/test_gke_hub_membership_service.py b/packages/google-cloud-gke-hub/tests/unit/gapic/gkehub_v1beta1/test_gke_hub_membership_service.py index dad3d991e27a..01ecff03b32d 100644 --- a/packages/google-cloud-gke-hub/tests/unit/gapic/gkehub_v1beta1/test_gke_hub_membership_service.py +++ b/packages/google-cloud-gke-hub/tests/unit/gapic/gkehub_v1beta1/test_gke_hub_membership_service.py @@ -1382,22 +1382,23 @@ async def test_list_memberships_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_memberships - ] = mock_object + ] = mock_rpc request = {} await client.list_memberships(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_memberships(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1956,22 +1957,23 @@ async def test_get_membership_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_membership - ] = mock_object + ] = mock_rpc request = {} await client.get_membership(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_membership(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2286,8 +2288,9 @@ def test_create_membership_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_membership(request) @@ -2343,26 +2346,28 @@ async def test_create_membership_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_membership - ] = mock_object + ] = mock_rpc request = {} await client.create_membership(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_membership(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2692,8 +2697,9 @@ def test_delete_membership_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_membership(request) @@ -2749,26 +2755,28 @@ async def test_delete_membership_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_membership - ] = mock_object + ] = mock_rpc request = {} await client.delete_membership(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_membership(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3078,8 +3086,9 @@ def test_update_membership_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_membership(request) @@ -3135,26 +3144,28 @@ async def test_update_membership_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_membership - ] = mock_object + ] = mock_rpc request = {} await client.update_membership(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_membership(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3542,22 +3553,23 @@ async def test_generate_connect_manifest_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.generate_connect_manifest - ] = mock_object + ] = mock_rpc request = {} await client.generate_connect_manifest(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.generate_connect_manifest(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3839,22 +3851,23 @@ async def test_validate_exclusivity_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.validate_exclusivity - ] = mock_object + ] = mock_rpc request = {} await client.validate_exclusivity(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.validate_exclusivity(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4144,22 +4157,23 @@ async def test_generate_exclusivity_manifest_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.generate_exclusivity_manifest - ] = mock_object + ] = mock_rpc request = {} await client.generate_exclusivity_manifest(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.generate_exclusivity_manifest(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud/gapic_version.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud/gapic_version.py index 44e5c049e336..558c8aab67c5 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud/gapic_version.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.12" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/gapic_version.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/gapic_version.py index 44e5c049e336..558c8aab67c5 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/gapic_version.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.12" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/attached_clusters/async_client.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/attached_clusters/async_client.py index 91b52e6b4349..47e8caee062a 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/attached_clusters/async_client.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/attached_clusters/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -209,9 +208,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AttachedClustersClient).get_transport_class, type(AttachedClustersClient) - ) + get_transport_class = AttachedClustersClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/attached_clusters/client.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/attached_clusters/client.py index 4509b48910dc..a380b95dfcf7 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/attached_clusters/client.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/attached_clusters/client.py @@ -700,7 +700,7 @@ def __init__( Type[AttachedClustersTransport], Callable[..., AttachedClustersTransport], ] = ( - type(self).get_transport_class(transport) + AttachedClustersClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AttachedClustersTransport], transport) ) diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/aws_clusters/async_client.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/aws_clusters/async_client.py index 1c0aae397254..8c6e9aabe5f8 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/aws_clusters/async_client.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/aws_clusters/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -203,9 +202,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AwsClustersClient).get_transport_class, type(AwsClustersClient) - ) + get_transport_class = AwsClustersClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/aws_clusters/client.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/aws_clusters/client.py index 7def99b4be4c..1384fc78d2e8 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/aws_clusters/client.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/aws_clusters/client.py @@ -718,7 +718,7 @@ def __init__( transport_init: Union[ Type[AwsClustersTransport], Callable[..., AwsClustersTransport] ] = ( - type(self).get_transport_class(transport) + AwsClustersClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AwsClustersTransport], transport) ) diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/azure_clusters/async_client.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/azure_clusters/async_client.py index 868dfc8ef223..f046c5526f4a 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/azure_clusters/async_client.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/azure_clusters/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -215,9 +214,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AzureClustersClient).get_transport_class, type(AzureClustersClient) - ) + get_transport_class = AzureClustersClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/azure_clusters/client.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/azure_clusters/client.py index 4cc2fece647a..d7016b66df5d 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/azure_clusters/client.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/azure_clusters/client.py @@ -741,7 +741,7 @@ def __init__( transport_init: Union[ Type[AzureClustersTransport], Callable[..., AzureClustersTransport] ] = ( - type(self).get_transport_class(transport) + AzureClustersClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AzureClustersTransport], transport) ) diff --git a/packages/google-cloud-gke-multicloud/samples/generated_samples/snippet_metadata_google.cloud.gkemulticloud.v1.json b/packages/google-cloud-gke-multicloud/samples/generated_samples/snippet_metadata_google.cloud.gkemulticloud.v1.json index c825c6afff93..08dd05577dce 100644 --- a/packages/google-cloud-gke-multicloud/samples/generated_samples/snippet_metadata_google.cloud.gkemulticloud.v1.json +++ b/packages/google-cloud-gke-multicloud/samples/generated_samples/snippet_metadata_google.cloud.gkemulticloud.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-gke-multicloud", - "version": "0.6.12" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-gke-multicloud/tests/unit/gapic/gke_multicloud_v1/test_attached_clusters.py b/packages/google-cloud-gke-multicloud/tests/unit/gapic/gke_multicloud_v1/test_attached_clusters.py index cd95340fef75..a438c01e8714 100644 --- a/packages/google-cloud-gke-multicloud/tests/unit/gapic/gke_multicloud_v1/test_attached_clusters.py +++ b/packages/google-cloud-gke-multicloud/tests/unit/gapic/gke_multicloud_v1/test_attached_clusters.py @@ -1285,8 +1285,9 @@ def test_create_attached_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_attached_cluster(request) @@ -1342,26 +1343,28 @@ async def test_create_attached_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_attached_cluster - ] = mock_object + ] = mock_rpc request = {} await client.create_attached_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_attached_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1689,8 +1692,9 @@ def test_update_attached_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_attached_cluster(request) @@ -1746,26 +1750,28 @@ async def test_update_attached_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_attached_cluster - ] = mock_object + ] = mock_rpc request = {} await client.update_attached_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_attached_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2093,8 +2099,9 @@ def test_import_attached_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.import_attached_cluster(request) @@ -2150,26 +2157,28 @@ async def test_import_attached_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_attached_cluster - ] = mock_object + ] = mock_rpc request = {} await client.import_attached_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.import_attached_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2575,22 +2584,23 @@ async def test_get_attached_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_attached_cluster - ] = mock_object + ] = mock_rpc request = {} await client.get_attached_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_attached_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2983,22 +2993,23 @@ async def test_list_attached_clusters_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_attached_clusters - ] = mock_object + ] = mock_rpc request = {} await client.list_attached_clusters(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_attached_clusters(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3517,8 +3528,9 @@ def test_delete_attached_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_attached_cluster(request) @@ -3574,26 +3586,28 @@ async def test_delete_attached_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_attached_cluster - ] = mock_object + ] = mock_rpc request = {} await client.delete_attached_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_attached_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3963,22 +3977,23 @@ async def test_get_attached_server_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_attached_server_config - ] = mock_object + ] = mock_rpc request = {} await client.get_attached_server_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_attached_server_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4367,22 +4382,23 @@ async def test_generate_attached_cluster_install_manifest_async_use_cached_wrapp ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.generate_attached_cluster_install_manifest - ] = mock_object + ] = mock_rpc request = {} await client.generate_attached_cluster_install_manifest(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.generate_attached_cluster_install_manifest(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4797,22 +4813,23 @@ async def test_generate_attached_cluster_agent_token_async_use_cached_wrapped_rp ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.generate_attached_cluster_agent_token - ] = mock_object + ] = mock_rpc request = {} await client.generate_attached_cluster_agent_token(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.generate_attached_cluster_agent_token(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-gke-multicloud/tests/unit/gapic/gke_multicloud_v1/test_aws_clusters.py b/packages/google-cloud-gke-multicloud/tests/unit/gapic/gke_multicloud_v1/test_aws_clusters.py index c9b0c25fe241..87fa3f604e12 100644 --- a/packages/google-cloud-gke-multicloud/tests/unit/gapic/gke_multicloud_v1/test_aws_clusters.py +++ b/packages/google-cloud-gke-multicloud/tests/unit/gapic/gke_multicloud_v1/test_aws_clusters.py @@ -1217,8 +1217,9 @@ def test_create_aws_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_aws_cluster(request) @@ -1274,26 +1275,28 @@ async def test_create_aws_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_aws_cluster - ] = mock_object + ] = mock_rpc request = {} await client.create_aws_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_aws_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1619,8 +1622,9 @@ def test_update_aws_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_aws_cluster(request) @@ -1676,26 +1680,28 @@ async def test_update_aws_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_aws_cluster - ] = mock_object + ] = mock_rpc request = {} await client.update_aws_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_aws_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2085,22 +2091,23 @@ async def test_get_aws_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_aws_cluster - ] = mock_object + ] = mock_rpc request = {} await client.get_aws_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_aws_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2477,22 +2484,23 @@ async def test_list_aws_clusters_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_aws_clusters - ] = mock_object + ] = mock_rpc request = {} await client.list_aws_clusters(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_aws_clusters(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3009,8 +3017,9 @@ def test_delete_aws_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_aws_cluster(request) @@ -3066,26 +3075,28 @@ async def test_delete_aws_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_aws_cluster - ] = mock_object + ] = mock_rpc request = {} await client.delete_aws_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_aws_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3478,22 +3489,23 @@ async def test_generate_aws_cluster_agent_token_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.generate_aws_cluster_agent_token - ] = mock_object + ] = mock_rpc request = {} await client.generate_aws_cluster_agent_token(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.generate_aws_cluster_agent_token(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3784,22 +3796,23 @@ async def test_generate_aws_access_token_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.generate_aws_access_token - ] = mock_object + ] = mock_rpc request = {} await client.generate_aws_access_token(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.generate_aws_access_token(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4029,8 +4042,9 @@ def test_create_aws_node_pool_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_aws_node_pool(request) @@ -4086,26 +4100,28 @@ async def test_create_aws_node_pool_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_aws_node_pool - ] = mock_object + ] = mock_rpc request = {} await client.create_aws_node_pool(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_aws_node_pool(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4431,8 +4447,9 @@ def test_update_aws_node_pool_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_aws_node_pool(request) @@ -4488,26 +4505,28 @@ async def test_update_aws_node_pool_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_aws_node_pool - ] = mock_object + ] = mock_rpc request = {} await client.update_aws_node_pool(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_aws_node_pool(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4828,8 +4847,9 @@ def test_rollback_aws_node_pool_update_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.rollback_aws_node_pool_update(request) @@ -4885,26 +4905,28 @@ async def test_rollback_aws_node_pool_update_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.rollback_aws_node_pool_update - ] = mock_object + ] = mock_rpc request = {} await client.rollback_aws_node_pool_update(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.rollback_aws_node_pool_update(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5289,22 +5311,23 @@ async def test_get_aws_node_pool_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_aws_node_pool - ] = mock_object + ] = mock_rpc request = {} await client.get_aws_node_pool(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_aws_node_pool(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5689,22 +5712,23 @@ async def test_list_aws_node_pools_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_aws_node_pools - ] = mock_object + ] = mock_rpc request = {} await client.list_aws_node_pools(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_aws_node_pools(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6221,8 +6245,9 @@ def test_delete_aws_node_pool_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_aws_node_pool(request) @@ -6278,26 +6303,28 @@ async def test_delete_aws_node_pool_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_aws_node_pool - ] = mock_object + ] = mock_rpc request = {} await client.delete_aws_node_pool(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_aws_node_pool(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6690,22 +6717,23 @@ async def test_get_aws_open_id_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_aws_open_id_config - ] = mock_object + ] = mock_rpc request = {} await client.get_aws_open_id_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_aws_open_id_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7002,22 +7030,23 @@ async def test_get_aws_json_web_keys_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_aws_json_web_keys - ] = mock_object + ] = mock_rpc request = {} await client.get_aws_json_web_keys(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_aws_json_web_keys(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7303,22 +7332,23 @@ async def test_get_aws_server_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_aws_server_config - ] = mock_object + ] = mock_rpc request = {} await client.get_aws_server_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_aws_server_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-gke-multicloud/tests/unit/gapic/gke_multicloud_v1/test_azure_clusters.py b/packages/google-cloud-gke-multicloud/tests/unit/gapic/gke_multicloud_v1/test_azure_clusters.py index 85c2f0eb675e..fc5333b22ff8 100644 --- a/packages/google-cloud-gke-multicloud/tests/unit/gapic/gke_multicloud_v1/test_azure_clusters.py +++ b/packages/google-cloud-gke-multicloud/tests/unit/gapic/gke_multicloud_v1/test_azure_clusters.py @@ -1250,8 +1250,9 @@ def test_create_azure_client_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_azure_client(request) @@ -1307,26 +1308,28 @@ async def test_create_azure_client_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_azure_client - ] = mock_object + ] = mock_rpc request = {} await client.create_azure_client(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_azure_client(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1719,22 +1722,23 @@ async def test_get_azure_client_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_azure_client - ] = mock_object + ] = mock_rpc request = {} await client.get_azure_client(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_azure_client(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2107,22 +2111,23 @@ async def test_list_azure_clients_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_azure_clients - ] = mock_object + ] = mock_rpc request = {} await client.list_azure_clients(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_azure_clients(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2637,8 +2642,9 @@ def test_delete_azure_client_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_azure_client(request) @@ -2694,26 +2700,28 @@ async def test_delete_azure_client_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_azure_client - ] = mock_object + ] = mock_rpc request = {} await client.delete_azure_client(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_azure_client(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3025,8 +3033,9 @@ def test_create_azure_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_azure_cluster(request) @@ -3082,26 +3091,28 @@ async def test_create_azure_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_azure_cluster - ] = mock_object + ] = mock_rpc request = {} await client.create_azure_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_azure_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3428,8 +3439,9 @@ def test_update_azure_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_azure_cluster(request) @@ -3485,26 +3497,28 @@ async def test_update_azure_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_azure_cluster - ] = mock_object + ] = mock_rpc request = {} await client.update_azure_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_azure_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3911,22 +3925,23 @@ async def test_get_azure_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_azure_cluster - ] = mock_object + ] = mock_rpc request = {} await client.get_azure_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_azure_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4319,22 +4334,23 @@ async def test_list_azure_clusters_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_azure_clusters - ] = mock_object + ] = mock_rpc request = {} await client.list_azure_clusters(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_azure_clusters(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4851,8 +4867,9 @@ def test_delete_azure_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_azure_cluster(request) @@ -4908,26 +4925,28 @@ async def test_delete_azure_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_azure_cluster - ] = mock_object + ] = mock_rpc request = {} await client.delete_azure_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_azure_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5321,22 +5340,23 @@ async def test_generate_azure_cluster_agent_token_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.generate_azure_cluster_agent_token - ] = mock_object + ] = mock_rpc request = {} await client.generate_azure_cluster_agent_token(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.generate_azure_cluster_agent_token(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5627,22 +5647,23 @@ async def test_generate_azure_access_token_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.generate_azure_access_token - ] = mock_object + ] = mock_rpc request = {} await client.generate_azure_access_token(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.generate_azure_access_token(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5873,8 +5894,9 @@ def test_create_azure_node_pool_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_azure_node_pool(request) @@ -5930,26 +5952,28 @@ async def test_create_azure_node_pool_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_azure_node_pool - ] = mock_object + ] = mock_rpc request = {} await client.create_azure_node_pool(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_azure_node_pool(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6277,8 +6301,9 @@ def test_update_azure_node_pool_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_azure_node_pool(request) @@ -6334,26 +6359,28 @@ async def test_update_azure_node_pool_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_azure_node_pool - ] = mock_object + ] = mock_rpc request = {} await client.update_azure_node_pool(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_azure_node_pool(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6753,22 +6780,23 @@ async def test_get_azure_node_pool_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_azure_node_pool - ] = mock_object + ] = mock_rpc request = {} await client.get_azure_node_pool(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_azure_node_pool(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7156,22 +7184,23 @@ async def test_list_azure_node_pools_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_azure_node_pools - ] = mock_object + ] = mock_rpc request = {} await client.list_azure_node_pools(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_azure_node_pools(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7690,8 +7719,9 @@ def test_delete_azure_node_pool_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_azure_node_pool(request) @@ -7747,26 +7777,28 @@ async def test_delete_azure_node_pool_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_azure_node_pool - ] = mock_object + ] = mock_rpc request = {} await client.delete_azure_node_pool(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_azure_node_pool(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8160,22 +8192,23 @@ async def test_get_azure_open_id_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_azure_open_id_config - ] = mock_object + ] = mock_rpc request = {} await client.get_azure_open_id_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_azure_open_id_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8559,22 +8592,23 @@ async def test_get_azure_json_web_keys_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_azure_json_web_keys - ] = mock_object + ] = mock_rpc request = {} await client.get_azure_json_web_keys(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_azure_json_web_keys(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8947,22 +8981,23 @@ async def test_get_azure_server_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_azure_server_config - ] = mock_object + ] = mock_rpc request = {} await client.get_azure_server_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_azure_server_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons/gapic_version.py b/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons/gapic_version.py index 3425e3287cda..558c8aab67c5 100644 --- a/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons/gapic_version.py +++ b/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.10" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/gapic_version.py b/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/gapic_version.py index 3425e3287cda..558c8aab67c5 100644 --- a/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/gapic_version.py +++ b/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.10" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/services/g_suite_add_ons/async_client.py b/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/services/g_suite_add_ons/async_client.py index 087b09e14389..f67a82911280 100644 --- a/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/services/g_suite_add_ons/async_client.py +++ b/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/services/g_suite_add_ons/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -226,9 +225,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(GSuiteAddOnsClient).get_transport_class, type(GSuiteAddOnsClient) - ) + get_transport_class = GSuiteAddOnsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/services/g_suite_add_ons/client.py b/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/services/g_suite_add_ons/client.py index 981c0ec66e2c..fc417f4a2402 100644 --- a/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/services/g_suite_add_ons/client.py +++ b/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/services/g_suite_add_ons/client.py @@ -724,7 +724,7 @@ def __init__( transport_init: Union[ Type[GSuiteAddOnsTransport], Callable[..., GSuiteAddOnsTransport] ] = ( - type(self).get_transport_class(transport) + GSuiteAddOnsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., GSuiteAddOnsTransport], transport) ) diff --git a/packages/google-cloud-gsuiteaddons/samples/generated_samples/snippet_metadata_google.cloud.gsuiteaddons.v1.json b/packages/google-cloud-gsuiteaddons/samples/generated_samples/snippet_metadata_google.cloud.gsuiteaddons.v1.json index d89c132e9784..1a4e67e71b2d 100644 --- a/packages/google-cloud-gsuiteaddons/samples/generated_samples/snippet_metadata_google.cloud.gsuiteaddons.v1.json +++ b/packages/google-cloud-gsuiteaddons/samples/generated_samples/snippet_metadata_google.cloud.gsuiteaddons.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-gsuiteaddons", - "version": "0.3.10" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-gsuiteaddons/tests/unit/gapic/gsuiteaddons_v1/test_g_suite_add_ons.py b/packages/google-cloud-gsuiteaddons/tests/unit/gapic/gsuiteaddons_v1/test_g_suite_add_ons.py index e6f3e6b8cd7d..fef59387cd74 100644 --- a/packages/google-cloud-gsuiteaddons/tests/unit/gapic/gsuiteaddons_v1/test_g_suite_add_ons.py +++ b/packages/google-cloud-gsuiteaddons/tests/unit/gapic/gsuiteaddons_v1/test_g_suite_add_ons.py @@ -1287,22 +1287,23 @@ async def test_get_authorization_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_authorization - ] = mock_object + ] = mock_rpc request = {} await client.get_authorization(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_authorization(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1683,22 +1684,23 @@ async def test_create_deployment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_deployment - ] = mock_object + ] = mock_rpc request = {} await client.create_deployment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_deployment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2095,22 +2097,23 @@ async def test_replace_deployment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.replace_deployment - ] = mock_object + ] = mock_rpc request = {} await client.replace_deployment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.replace_deployment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2479,22 +2482,23 @@ async def test_get_deployment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_deployment - ] = mock_object + ] = mock_rpc request = {} await client.get_deployment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_deployment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2851,22 +2855,23 @@ async def test_list_deployments_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_deployments - ] = mock_object + ] = mock_rpc request = {} await client.list_deployments(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_deployments(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3414,22 +3419,23 @@ async def test_delete_deployment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_deployment - ] = mock_object + ] = mock_rpc request = {} await client.delete_deployment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_deployment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3784,22 +3790,23 @@ async def test_install_deployment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.install_deployment - ] = mock_object + ] = mock_rpc request = {} await client.install_deployment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.install_deployment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4154,22 +4161,23 @@ async def test_uninstall_deployment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.uninstall_deployment - ] = mock_object + ] = mock_rpc request = {} await client.uninstall_deployment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.uninstall_deployment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4532,22 +4540,23 @@ async def test_get_install_status_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_install_status - ] = mock_object + ] = mock_rpc request = {} await client.get_install_status(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_install_status(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-iam-logging/google/cloud/iam_logging/gapic_version.py b/packages/google-cloud-iam-logging/google/cloud/iam_logging/gapic_version.py index 5dea85083756..558c8aab67c5 100644 --- a/packages/google-cloud-iam-logging/google/cloud/iam_logging/gapic_version.py +++ b/packages/google-cloud-iam-logging/google/cloud/iam_logging/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.3.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-iam-logging/google/cloud/iam_logging_v1/gapic_version.py b/packages/google-cloud-iam-logging/google/cloud/iam_logging_v1/gapic_version.py index 5dea85083756..558c8aab67c5 100644 --- a/packages/google-cloud-iam-logging/google/cloud/iam_logging_v1/gapic_version.py +++ b/packages/google-cloud-iam-logging/google/cloud/iam_logging_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.3.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-iam/google/cloud/iam/gapic_version.py b/packages/google-cloud-iam/google/cloud/iam/gapic_version.py index 1d4d52716bc4..558c8aab67c5 100644 --- a/packages/google-cloud-iam/google/cloud/iam/gapic_version.py +++ b/packages/google-cloud-iam/google/cloud/iam/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.15.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-iam/google/cloud/iam_admin/gapic_version.py b/packages/google-cloud-iam/google/cloud/iam_admin/gapic_version.py index 1d4d52716bc4..558c8aab67c5 100644 --- a/packages/google-cloud-iam/google/cloud/iam_admin/gapic_version.py +++ b/packages/google-cloud-iam/google/cloud/iam_admin/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.15.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-iam/google/cloud/iam_admin_v1/gapic_version.py b/packages/google-cloud-iam/google/cloud/iam_admin_v1/gapic_version.py index 1d4d52716bc4..558c8aab67c5 100644 --- a/packages/google-cloud-iam/google/cloud/iam_admin_v1/gapic_version.py +++ b/packages/google-cloud-iam/google/cloud/iam_admin_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.15.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-iam/google/cloud/iam_admin_v1/services/iam/async_client.py b/packages/google-cloud-iam/google/cloud/iam_admin_v1/services/iam/async_client.py index 0c1d544ab95f..bf4f24718f22 100644 --- a/packages/google-cloud-iam/google/cloud/iam_admin_v1/services/iam/async_client.py +++ b/packages/google-cloud-iam/google/cloud/iam_admin_v1/services/iam/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -214,9 +213,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(IAMClient).get_transport_class, type(IAMClient) - ) + get_transport_class = IAMClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-iam/google/cloud/iam_admin_v1/services/iam/client.py b/packages/google-cloud-iam/google/cloud/iam_admin_v1/services/iam/client.py index 10326835378e..b48544b0849b 100644 --- a/packages/google-cloud-iam/google/cloud/iam_admin_v1/services/iam/client.py +++ b/packages/google-cloud-iam/google/cloud/iam_admin_v1/services/iam/client.py @@ -705,7 +705,7 @@ def __init__( ) transport_init: Union[Type[IAMTransport], Callable[..., IAMTransport]] = ( - type(self).get_transport_class(transport) + IAMClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., IAMTransport], transport) ) diff --git a/packages/google-cloud-iam/google/cloud/iam_credentials/gapic_version.py b/packages/google-cloud-iam/google/cloud/iam_credentials/gapic_version.py index 1d4d52716bc4..558c8aab67c5 100644 --- a/packages/google-cloud-iam/google/cloud/iam_credentials/gapic_version.py +++ b/packages/google-cloud-iam/google/cloud/iam_credentials/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.15.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-iam/google/cloud/iam_credentials_v1/gapic_version.py b/packages/google-cloud-iam/google/cloud/iam_credentials_v1/gapic_version.py index 1d4d52716bc4..558c8aab67c5 100644 --- a/packages/google-cloud-iam/google/cloud/iam_credentials_v1/gapic_version.py +++ b/packages/google-cloud-iam/google/cloud/iam_credentials_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.15.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-iam/google/cloud/iam_credentials_v1/services/iam_credentials/async_client.py b/packages/google-cloud-iam/google/cloud/iam_credentials_v1/services/iam_credentials/async_client.py index 6ec8a977fd3a..2c5c7a9a8a8a 100644 --- a/packages/google-cloud-iam/google/cloud/iam_credentials_v1/services/iam_credentials/async_client.py +++ b/packages/google-cloud-iam/google/cloud/iam_credentials_v1/services/iam_credentials/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -201,9 +200,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(IAMCredentialsClient).get_transport_class, type(IAMCredentialsClient) - ) + get_transport_class = IAMCredentialsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-iam/google/cloud/iam_credentials_v1/services/iam_credentials/client.py b/packages/google-cloud-iam/google/cloud/iam_credentials_v1/services/iam_credentials/client.py index 7c0ff7b33aa5..953ced2b6266 100644 --- a/packages/google-cloud-iam/google/cloud/iam_credentials_v1/services/iam_credentials/client.py +++ b/packages/google-cloud-iam/google/cloud/iam_credentials_v1/services/iam_credentials/client.py @@ -670,7 +670,7 @@ def __init__( transport_init: Union[ Type[IAMCredentialsTransport], Callable[..., IAMCredentialsTransport] ] = ( - type(self).get_transport_class(transport) + IAMCredentialsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., IAMCredentialsTransport], transport) ) diff --git a/packages/google-cloud-iam/google/cloud/iam_v2/gapic_version.py b/packages/google-cloud-iam/google/cloud/iam_v2/gapic_version.py index 1d4d52716bc4..558c8aab67c5 100644 --- a/packages/google-cloud-iam/google/cloud/iam_v2/gapic_version.py +++ b/packages/google-cloud-iam/google/cloud/iam_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.15.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-iam/google/cloud/iam_v2/services/policies/async_client.py b/packages/google-cloud-iam/google/cloud/iam_v2/services/policies/async_client.py index d643f882d7a6..e44ecb5e4bb1 100644 --- a/packages/google-cloud-iam/google/cloud/iam_v2/services/policies/async_client.py +++ b/packages/google-cloud-iam/google/cloud/iam_v2/services/policies/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -185,9 +184,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(PoliciesClient).get_transport_class, type(PoliciesClient) - ) + get_transport_class = PoliciesClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-iam/google/cloud/iam_v2/services/policies/client.py b/packages/google-cloud-iam/google/cloud/iam_v2/services/policies/client.py index e8989b18cd63..4081c4a51c27 100644 --- a/packages/google-cloud-iam/google/cloud/iam_v2/services/policies/client.py +++ b/packages/google-cloud-iam/google/cloud/iam_v2/services/policies/client.py @@ -641,7 +641,7 @@ def __init__( transport_init: Union[ Type[PoliciesTransport], Callable[..., PoliciesTransport] ] = ( - type(self).get_transport_class(transport) + PoliciesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., PoliciesTransport], transport) ) diff --git a/packages/google-cloud-iam/google/cloud/iam_v2beta/gapic_version.py b/packages/google-cloud-iam/google/cloud/iam_v2beta/gapic_version.py index 1d4d52716bc4..558c8aab67c5 100644 --- a/packages/google-cloud-iam/google/cloud/iam_v2beta/gapic_version.py +++ b/packages/google-cloud-iam/google/cloud/iam_v2beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.15.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-iam/google/cloud/iam_v2beta/services/policies/async_client.py b/packages/google-cloud-iam/google/cloud/iam_v2beta/services/policies/async_client.py index a73d2fbb3c80..2a39795ae1bc 100644 --- a/packages/google-cloud-iam/google/cloud/iam_v2beta/services/policies/async_client.py +++ b/packages/google-cloud-iam/google/cloud/iam_v2beta/services/policies/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -185,9 +184,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(PoliciesClient).get_transport_class, type(PoliciesClient) - ) + get_transport_class = PoliciesClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-iam/google/cloud/iam_v2beta/services/policies/client.py b/packages/google-cloud-iam/google/cloud/iam_v2beta/services/policies/client.py index 51f810cae32f..27c7fef0ee7c 100644 --- a/packages/google-cloud-iam/google/cloud/iam_v2beta/services/policies/client.py +++ b/packages/google-cloud-iam/google/cloud/iam_v2beta/services/policies/client.py @@ -639,7 +639,7 @@ def __init__( transport_init: Union[ Type[PoliciesTransport], Callable[..., PoliciesTransport] ] = ( - type(self).get_transport_class(transport) + PoliciesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., PoliciesTransport], transport) ) diff --git a/packages/google-cloud-iam/samples/generated_samples/snippet_metadata_google.iam.admin.v1.json b/packages/google-cloud-iam/samples/generated_samples/snippet_metadata_google.iam.admin.v1.json index c85376c9af29..13547dc8e5e6 100644 --- a/packages/google-cloud-iam/samples/generated_samples/snippet_metadata_google.iam.admin.v1.json +++ b/packages/google-cloud-iam/samples/generated_samples/snippet_metadata_google.iam.admin.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-iam", - "version": "2.15.2" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-iam/samples/generated_samples/snippet_metadata_google.iam.credentials.v1.json b/packages/google-cloud-iam/samples/generated_samples/snippet_metadata_google.iam.credentials.v1.json index 7b728af767bb..1e024f5ce6ce 100644 --- a/packages/google-cloud-iam/samples/generated_samples/snippet_metadata_google.iam.credentials.v1.json +++ b/packages/google-cloud-iam/samples/generated_samples/snippet_metadata_google.iam.credentials.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-iam", - "version": "2.15.2" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-iam/samples/generated_samples/snippet_metadata_google.iam.v2.json b/packages/google-cloud-iam/samples/generated_samples/snippet_metadata_google.iam.v2.json index 73cf9290f66a..41cce8e24813 100644 --- a/packages/google-cloud-iam/samples/generated_samples/snippet_metadata_google.iam.v2.json +++ b/packages/google-cloud-iam/samples/generated_samples/snippet_metadata_google.iam.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-iam", - "version": "2.15.2" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-iam/samples/generated_samples/snippet_metadata_google.iam.v2beta.json b/packages/google-cloud-iam/samples/generated_samples/snippet_metadata_google.iam.v2beta.json index 9f4fda2d13ff..5c06ffb6bc77 100644 --- a/packages/google-cloud-iam/samples/generated_samples/snippet_metadata_google.iam.v2beta.json +++ b/packages/google-cloud-iam/samples/generated_samples/snippet_metadata_google.iam.v2beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-iam", - "version": "2.15.2" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-iam/tests/unit/gapic/iam_admin_v1/test_iam.py b/packages/google-cloud-iam/tests/unit/gapic/iam_admin_v1/test_iam.py index 6ea9f11a0184..acc8ad8492f7 100644 --- a/packages/google-cloud-iam/tests/unit/gapic/iam_admin_v1/test_iam.py +++ b/packages/google-cloud-iam/tests/unit/gapic/iam_admin_v1/test_iam.py @@ -1182,22 +1182,23 @@ async def test_list_service_accounts_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_service_accounts - ] = mock_object + ] = mock_rpc request = {} await client.list_service_accounts(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_service_accounts(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1794,22 +1795,23 @@ async def test_get_service_account_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_service_account - ] = mock_object + ] = mock_rpc request = {} await client.get_service_account(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_service_account(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2219,22 +2221,23 @@ async def test_create_service_account_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_service_account - ] = mock_object + ] = mock_rpc request = {} await client.create_service_account(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_service_account(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2674,22 +2677,23 @@ async def test_update_service_account_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_service_account - ] = mock_object + ] = mock_rpc request = {} await client.update_service_account(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_service_account(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3009,22 +3013,23 @@ async def test_patch_service_account_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.patch_service_account - ] = mock_object + ] = mock_rpc request = {} await client.patch_service_account(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.patch_service_account(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3317,22 +3322,23 @@ async def test_delete_service_account_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_service_account - ] = mock_object + ] = mock_rpc request = {} await client.delete_service_account(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_service_account(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3690,22 +3696,23 @@ async def test_undelete_service_account_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.undelete_service_account - ] = mock_object + ] = mock_rpc request = {} await client.undelete_service_account(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.undelete_service_account(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3981,22 +3988,23 @@ async def test_enable_service_account_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.enable_service_account - ] = mock_object + ] = mock_rpc request = {} await client.enable_service_account(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.enable_service_account(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4268,22 +4276,23 @@ async def test_disable_service_account_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.disable_service_account - ] = mock_object + ] = mock_rpc request = {} await client.disable_service_account(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.disable_service_account(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4557,22 +4566,23 @@ async def test_list_service_account_keys_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_service_account_keys - ] = mock_object + ] = mock_rpc request = {} await client.list_service_account_keys(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_service_account_keys(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4974,22 +4984,23 @@ async def test_get_service_account_key_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_service_account_key - ] = mock_object + ] = mock_rpc request = {} await client.get_service_account_key(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_service_account_key(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5410,22 +5421,23 @@ async def test_create_service_account_key_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_service_account_key - ] = mock_object + ] = mock_rpc request = {} await client.create_service_account_key(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_service_account_key(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5856,22 +5868,23 @@ async def test_upload_service_account_key_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.upload_service_account_key - ] = mock_object + ] = mock_rpc request = {} await client.upload_service_account_key(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.upload_service_account_key(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6166,22 +6179,23 @@ async def test_delete_service_account_key_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_service_account_key - ] = mock_object + ] = mock_rpc request = {} await client.delete_service_account_key(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_service_account_key(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6537,22 +6551,23 @@ async def test_disable_service_account_key_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.disable_service_account_key - ] = mock_object + ] = mock_rpc request = {} await client.disable_service_account_key(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.disable_service_account_key(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6908,22 +6923,23 @@ async def test_enable_service_account_key_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.enable_service_account_key - ] = mock_object + ] = mock_rpc request = {} await client.enable_service_account_key(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.enable_service_account_key(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7274,22 +7290,23 @@ async def test_sign_blob_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.sign_blob - ] = mock_object + ] = mock_rpc request = {} await client.sign_blob(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.sign_blob(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7653,22 +7670,23 @@ async def test_sign_jwt_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.sign_jwt - ] = mock_object + ] = mock_rpc request = {} await client.sign_jwt(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.sign_jwt(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8028,22 +8046,23 @@ async def test_get_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8410,22 +8429,23 @@ async def test_set_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8802,22 +8822,23 @@ async def test_test_iam_permissions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.test_iam_permissions - ] = mock_object + ] = mock_rpc request = {} await client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.test_iam_permissions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9221,22 +9242,23 @@ async def test_query_grantable_roles_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.query_grantable_roles - ] = mock_object + ] = mock_rpc request = {} await client.query_grantable_roles(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.query_grantable_roles(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9729,22 +9751,23 @@ async def test_list_roles_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_roles - ] = mock_object + ] = mock_rpc request = {} await client.list_roles(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_roles(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10157,22 +10180,23 @@ async def test_get_role_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_role - ] = mock_object + ] = mock_rpc request = {} await client.get_role(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_role(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10469,22 +10493,23 @@ async def test_create_role_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_role - ] = mock_object + ] = mock_rpc request = {} await client.create_role(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_role(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10779,22 +10804,23 @@ async def test_update_role_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_role - ] = mock_object + ] = mock_rpc request = {} await client.update_role(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_role(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11089,22 +11115,23 @@ async def test_delete_role_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_role - ] = mock_object + ] = mock_rpc request = {} await client.delete_role(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_role(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11399,22 +11426,23 @@ async def test_undelete_role_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.undelete_role - ] = mock_object + ] = mock_rpc request = {} await client.undelete_role(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.undelete_role(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11706,22 +11734,23 @@ async def test_query_testable_permissions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.query_testable_permissions - ] = mock_object + ] = mock_rpc request = {} await client.query_testable_permissions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.query_testable_permissions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12138,22 +12167,23 @@ async def test_query_auditable_services_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.query_auditable_services - ] = mock_object + ] = mock_rpc request = {} await client.query_auditable_services(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.query_auditable_services(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12353,22 +12383,23 @@ async def test_lint_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.lint_policy - ] = mock_object + ] = mock_rpc request = {} await client.lint_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.lint_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-iam/tests/unit/gapic/iam_credentials_v1/test_iam_credentials.py b/packages/google-cloud-iam/tests/unit/gapic/iam_credentials_v1/test_iam_credentials.py index bbba65412e54..34da506b5ca6 100644 --- a/packages/google-cloud-iam/tests/unit/gapic/iam_credentials_v1/test_iam_credentials.py +++ b/packages/google-cloud-iam/tests/unit/gapic/iam_credentials_v1/test_iam_credentials.py @@ -1295,22 +1295,23 @@ async def test_generate_access_token_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.generate_access_token - ] = mock_object + ] = mock_rpc request = {} await client.generate_access_token(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.generate_access_token(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1711,22 +1712,23 @@ async def test_generate_id_token_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.generate_id_token - ] = mock_object + ] = mock_rpc request = {} await client.generate_id_token(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.generate_id_token(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2116,22 +2118,23 @@ async def test_sign_blob_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.sign_blob - ] = mock_object + ] = mock_rpc request = {} await client.sign_blob(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.sign_blob(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2505,22 +2508,23 @@ async def test_sign_jwt_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.sign_jwt - ] = mock_object + ] = mock_rpc request = {} await client.sign_jwt(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.sign_jwt(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-iam/tests/unit/gapic/iam_v2/test_policies.py b/packages/google-cloud-iam/tests/unit/gapic/iam_v2/test_policies.py index 78cd23efe3da..8dbfeacdaa09 100644 --- a/packages/google-cloud-iam/tests/unit/gapic/iam_v2/test_policies.py +++ b/packages/google-cloud-iam/tests/unit/gapic/iam_v2/test_policies.py @@ -1241,22 +1241,23 @@ async def test_list_policies_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_policies - ] = mock_object + ] = mock_rpc request = {} await client.list_policies(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_policies(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1812,22 +1813,23 @@ async def test_get_policy_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2128,8 +2130,9 @@ def test_create_policy_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_policy(request) @@ -2183,26 +2186,28 @@ async def test_create_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_policy - ] = mock_object + ] = mock_rpc request = {} await client.create_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2508,8 +2513,9 @@ def test_update_policy_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_policy(request) @@ -2563,26 +2569,28 @@ async def test_update_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_policy - ] = mock_object + ] = mock_rpc request = {} await client.update_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2792,8 +2800,9 @@ def test_delete_policy_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_policy(request) @@ -2847,26 +2856,28 @@ async def test_delete_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_policy - ] = mock_object + ] = mock_rpc request = {} await client.delete_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-iam/tests/unit/gapic/iam_v2beta/test_policies.py b/packages/google-cloud-iam/tests/unit/gapic/iam_v2beta/test_policies.py index 3f62dcda9ee3..67a86045c5d0 100644 --- a/packages/google-cloud-iam/tests/unit/gapic/iam_v2beta/test_policies.py +++ b/packages/google-cloud-iam/tests/unit/gapic/iam_v2beta/test_policies.py @@ -1218,22 +1218,23 @@ async def test_list_policies_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_policies - ] = mock_object + ] = mock_rpc request = {} await client.list_policies(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_policies(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1786,22 +1787,23 @@ async def test_get_policy_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2100,8 +2102,9 @@ def test_create_policy_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_policy(request) @@ -2155,26 +2158,28 @@ async def test_create_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_policy - ] = mock_object + ] = mock_rpc request = {} await client.create_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2480,8 +2485,9 @@ def test_update_policy_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_policy(request) @@ -2535,26 +2541,28 @@ async def test_update_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_policy - ] = mock_object + ] = mock_rpc request = {} await client.update_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2764,8 +2772,9 @@ def test_delete_policy_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_policy(request) @@ -2819,26 +2828,28 @@ async def test_delete_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_policy - ] = mock_object + ] = mock_rpc request = {} await client.delete_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-iap/google/cloud/iap/gapic_version.py b/packages/google-cloud-iap/google/cloud/iap/gapic_version.py index f192c1b4f03b..558c8aab67c5 100644 --- a/packages/google-cloud-iap/google/cloud/iap/gapic_version.py +++ b/packages/google-cloud-iap/google/cloud/iap/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.13.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-iap/google/cloud/iap_v1/gapic_version.py b/packages/google-cloud-iap/google/cloud/iap_v1/gapic_version.py index f192c1b4f03b..558c8aab67c5 100644 --- a/packages/google-cloud-iap/google/cloud/iap_v1/gapic_version.py +++ b/packages/google-cloud-iap/google/cloud/iap_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.13.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_admin_service/async_client.py b/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_admin_service/async_client.py index 7bf4c4e6037a..c84d7a703e22 100644 --- a/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_admin_service/async_client.py +++ b/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_admin_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -212,10 +211,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(IdentityAwareProxyAdminServiceClient).get_transport_class, - type(IdentityAwareProxyAdminServiceClient), - ) + get_transport_class = IdentityAwareProxyAdminServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_admin_service/client.py b/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_admin_service/client.py index 02fe3bad693d..91c97f265e7b 100644 --- a/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_admin_service/client.py +++ b/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_admin_service/client.py @@ -703,7 +703,7 @@ def __init__( Type[IdentityAwareProxyAdminServiceTransport], Callable[..., IdentityAwareProxyAdminServiceTransport], ] = ( - type(self).get_transport_class(transport) + IdentityAwareProxyAdminServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast( Callable[..., IdentityAwareProxyAdminServiceTransport], transport diff --git a/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_o_auth_service/async_client.py b/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_o_auth_service/async_client.py index 1c7e3d81f180..a8f48efae822 100644 --- a/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_o_auth_service/async_client.py +++ b/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_o_auth_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -199,10 +198,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(IdentityAwareProxyOAuthServiceClient).get_transport_class, - type(IdentityAwareProxyOAuthServiceClient), - ) + get_transport_class = IdentityAwareProxyOAuthServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_o_auth_service/client.py b/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_o_auth_service/client.py index 4539a4feeeee..799bf3882e25 100644 --- a/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_o_auth_service/client.py +++ b/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_o_auth_service/client.py @@ -661,7 +661,7 @@ def __init__( Type[IdentityAwareProxyOAuthServiceTransport], Callable[..., IdentityAwareProxyOAuthServiceTransport], ] = ( - type(self).get_transport_class(transport) + IdentityAwareProxyOAuthServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast( Callable[..., IdentityAwareProxyOAuthServiceTransport], transport diff --git a/packages/google-cloud-iap/samples/generated_samples/snippet_metadata_google.cloud.iap.v1.json b/packages/google-cloud-iap/samples/generated_samples/snippet_metadata_google.cloud.iap.v1.json index 199a2283520d..3170bc627f80 100644 --- a/packages/google-cloud-iap/samples/generated_samples/snippet_metadata_google.cloud.iap.v1.json +++ b/packages/google-cloud-iap/samples/generated_samples/snippet_metadata_google.cloud.iap.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-iap", - "version": "1.13.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-iap/tests/unit/gapic/iap_v1/test_identity_aware_proxy_admin_service.py b/packages/google-cloud-iap/tests/unit/gapic/iap_v1/test_identity_aware_proxy_admin_service.py index cc3aa370e4b2..f0e0512368a9 100644 --- a/packages/google-cloud-iap/tests/unit/gapic/iap_v1/test_identity_aware_proxy_admin_service.py +++ b/packages/google-cloud-iap/tests/unit/gapic/iap_v1/test_identity_aware_proxy_admin_service.py @@ -1388,22 +1388,23 @@ async def test_set_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1691,22 +1692,23 @@ async def test_get_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2002,22 +2004,23 @@ async def test_test_iam_permissions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.test_iam_permissions - ] = mock_object + ] = mock_rpc request = {} await client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.test_iam_permissions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2312,22 +2315,23 @@ async def test_get_iap_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_iap_settings - ] = mock_object + ] = mock_rpc request = {} await client.get_iap_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_iap_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2600,22 +2604,23 @@ async def test_update_iap_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_iap_settings - ] = mock_object + ] = mock_rpc request = {} await client.update_iap_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_iap_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2901,22 +2906,23 @@ async def test_list_tunnel_dest_groups_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_tunnel_dest_groups - ] = mock_object + ] = mock_rpc request = {} await client.list_tunnel_dest_groups(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_tunnel_dest_groups(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3498,22 +3504,23 @@ async def test_create_tunnel_dest_group_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_tunnel_dest_group - ] = mock_object + ] = mock_rpc request = {} await client.create_tunnel_dest_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_tunnel_dest_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3915,22 +3922,23 @@ async def test_get_tunnel_dest_group_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_tunnel_dest_group - ] = mock_object + ] = mock_rpc request = {} await client.get_tunnel_dest_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_tunnel_dest_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4299,22 +4307,23 @@ async def test_delete_tunnel_dest_group_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_tunnel_dest_group - ] = mock_object + ] = mock_rpc request = {} await client.delete_tunnel_dest_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_tunnel_dest_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4679,22 +4688,23 @@ async def test_update_tunnel_dest_group_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_tunnel_dest_group - ] = mock_object + ] = mock_rpc request = {} await client.update_tunnel_dest_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_tunnel_dest_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-iap/tests/unit/gapic/iap_v1/test_identity_aware_proxy_o_auth_service.py b/packages/google-cloud-iap/tests/unit/gapic/iap_v1/test_identity_aware_proxy_o_auth_service.py index 640bf7b1b400..7fd52e678178 100644 --- a/packages/google-cloud-iap/tests/unit/gapic/iap_v1/test_identity_aware_proxy_o_auth_service.py +++ b/packages/google-cloud-iap/tests/unit/gapic/iap_v1/test_identity_aware_proxy_o_auth_service.py @@ -1373,22 +1373,23 @@ async def test_list_brands_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_brands - ] = mock_object + ] = mock_rpc request = {} await client.list_brands(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_brands(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1661,22 +1662,23 @@ async def test_create_brand_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_brand - ] = mock_object + ] = mock_rpc request = {} await client.create_brand(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_brand(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1954,22 +1956,23 @@ async def test_get_brand_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_brand - ] = mock_object + ] = mock_rpc request = {} await client.get_brand(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_brand(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2259,22 +2262,23 @@ async def test_create_identity_aware_proxy_client_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_identity_aware_proxy_client - ] = mock_object + ] = mock_rpc request = {} await client.create_identity_aware_proxy_client(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_identity_aware_proxy_client(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2567,22 +2571,23 @@ async def test_list_identity_aware_proxy_clients_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_identity_aware_proxy_clients - ] = mock_object + ] = mock_rpc request = {} await client.list_identity_aware_proxy_clients(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_identity_aware_proxy_clients(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3079,22 +3084,23 @@ async def test_get_identity_aware_proxy_client_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_identity_aware_proxy_client - ] = mock_object + ] = mock_rpc request = {} await client.get_identity_aware_proxy_client(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_identity_aware_proxy_client(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3393,22 +3399,23 @@ async def test_reset_identity_aware_proxy_client_secret_async_use_cached_wrapped ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.reset_identity_aware_proxy_client_secret - ] = mock_object + ] = mock_rpc request = {} await client.reset_identity_aware_proxy_client_secret(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.reset_identity_aware_proxy_client_secret(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3692,22 +3699,23 @@ async def test_delete_identity_aware_proxy_client_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_identity_aware_proxy_client - ] = mock_object + ] = mock_rpc request = {} await client.delete_identity_aware_proxy_client(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_identity_aware_proxy_client(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-ids/google/cloud/ids/gapic_version.py b/packages/google-cloud-ids/google/cloud/ids/gapic_version.py index e114cc3e196a..558c8aab67c5 100644 --- a/packages/google-cloud-ids/google/cloud/ids/gapic_version.py +++ b/packages/google-cloud-ids/google/cloud/ids/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.7.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-ids/google/cloud/ids_v1/gapic_version.py b/packages/google-cloud-ids/google/cloud/ids_v1/gapic_version.py index e114cc3e196a..558c8aab67c5 100644 --- a/packages/google-cloud-ids/google/cloud/ids_v1/gapic_version.py +++ b/packages/google-cloud-ids/google/cloud/ids_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.7.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-ids/google/cloud/ids_v1/services/ids/async_client.py b/packages/google-cloud-ids/google/cloud/ids_v1/services/ids/async_client.py index cbc7be64875d..ba4390b906f4 100644 --- a/packages/google-cloud-ids/google/cloud/ids_v1/services/ids/async_client.py +++ b/packages/google-cloud-ids/google/cloud/ids_v1/services/ids/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -182,9 +181,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(IDSClient).get_transport_class, type(IDSClient) - ) + get_transport_class = IDSClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-ids/google/cloud/ids_v1/services/ids/client.py b/packages/google-cloud-ids/google/cloud/ids_v1/services/ids/client.py index 854672446f38..fbb80b1ef322 100644 --- a/packages/google-cloud-ids/google/cloud/ids_v1/services/ids/client.py +++ b/packages/google-cloud-ids/google/cloud/ids_v1/services/ids/client.py @@ -658,7 +658,7 @@ def __init__( ) transport_init: Union[Type[IDSTransport], Callable[..., IDSTransport]] = ( - type(self).get_transport_class(transport) + IDSClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., IDSTransport], transport) ) diff --git a/packages/google-cloud-ids/samples/generated_samples/snippet_metadata_google.cloud.ids.v1.json b/packages/google-cloud-ids/samples/generated_samples/snippet_metadata_google.cloud.ids.v1.json index 48b4c4e6ccb5..49e7eb64c25f 100644 --- a/packages/google-cloud-ids/samples/generated_samples/snippet_metadata_google.cloud.ids.v1.json +++ b/packages/google-cloud-ids/samples/generated_samples/snippet_metadata_google.cloud.ids.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-ids", - "version": "1.7.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-ids/tests/unit/gapic/ids_v1/test_ids.py b/packages/google-cloud-ids/tests/unit/gapic/ids_v1/test_ids.py index b3d97be58824..c9c83bc1e7f9 100644 --- a/packages/google-cloud-ids/tests/unit/gapic/ids_v1/test_ids.py +++ b/packages/google-cloud-ids/tests/unit/gapic/ids_v1/test_ids.py @@ -1205,22 +1205,23 @@ async def test_list_endpoints_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_endpoints - ] = mock_object + ] = mock_rpc request = {} await client.list_endpoints(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_endpoints(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1786,22 +1787,23 @@ async def test_get_endpoint_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_endpoint - ] = mock_object + ] = mock_rpc request = {} await client.get_endpoint(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_endpoint(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2108,8 +2110,9 @@ def test_create_endpoint_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_endpoint(request) @@ -2163,26 +2166,28 @@ async def test_create_endpoint_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_endpoint - ] = mock_object + ] = mock_rpc request = {} await client.create_endpoint(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_endpoint(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2494,8 +2499,9 @@ def test_delete_endpoint_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_endpoint(request) @@ -2549,26 +2555,28 @@ async def test_delete_endpoint_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_endpoint - ] = mock_object + ] = mock_rpc request = {} await client.delete_endpoint(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_endpoint(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-kms-inventory/google/cloud/kms_inventory/gapic_version.py b/packages/google-cloud-kms-inventory/google/cloud/kms_inventory/gapic_version.py index c1557c8c086b..558c8aab67c5 100644 --- a/packages/google-cloud-kms-inventory/google/cloud/kms_inventory/gapic_version.py +++ b/packages/google-cloud-kms-inventory/google/cloud/kms_inventory/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.2.8" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/gapic_version.py b/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/gapic_version.py index c1557c8c086b..558c8aab67c5 100644 --- a/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/gapic_version.py +++ b/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.2.8" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/services/key_dashboard_service/async_client.py b/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/services/key_dashboard_service/async_client.py index c3ae273ae269..13cf7a337974 100644 --- a/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/services/key_dashboard_service/async_client.py +++ b/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/services/key_dashboard_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -199,10 +198,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(KeyDashboardServiceClient).get_transport_class, - type(KeyDashboardServiceClient), - ) + get_transport_class = KeyDashboardServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/services/key_dashboard_service/client.py b/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/services/key_dashboard_service/client.py index 603ff83aa0a9..fc00de358d93 100644 --- a/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/services/key_dashboard_service/client.py +++ b/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/services/key_dashboard_service/client.py @@ -697,7 +697,7 @@ def __init__( Type[KeyDashboardServiceTransport], Callable[..., KeyDashboardServiceTransport], ] = ( - type(self).get_transport_class(transport) + KeyDashboardServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., KeyDashboardServiceTransport], transport) ) diff --git a/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/services/key_tracking_service/async_client.py b/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/services/key_tracking_service/async_client.py index c344a9b209bd..e743db681531 100644 --- a/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/services/key_tracking_service/async_client.py +++ b/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/services/key_tracking_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -201,10 +200,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(KeyTrackingServiceClient).get_transport_class, - type(KeyTrackingServiceClient), - ) + get_transport_class = KeyTrackingServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/services/key_tracking_service/client.py b/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/services/key_tracking_service/client.py index 462fb6277fbe..0db475b6fbae 100644 --- a/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/services/key_tracking_service/client.py +++ b/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/services/key_tracking_service/client.py @@ -706,7 +706,7 @@ def __init__( Type[KeyTrackingServiceTransport], Callable[..., KeyTrackingServiceTransport], ] = ( - type(self).get_transport_class(transport) + KeyTrackingServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., KeyTrackingServiceTransport], transport) ) diff --git a/packages/google-cloud-kms-inventory/samples/generated_samples/snippet_metadata_google.cloud.kms.inventory.v1.json b/packages/google-cloud-kms-inventory/samples/generated_samples/snippet_metadata_google.cloud.kms.inventory.v1.json index 5e8e21aed1ec..b884eb2a257b 100644 --- a/packages/google-cloud-kms-inventory/samples/generated_samples/snippet_metadata_google.cloud.kms.inventory.v1.json +++ b/packages/google-cloud-kms-inventory/samples/generated_samples/snippet_metadata_google.cloud.kms.inventory.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-kms-inventory", - "version": "0.2.8" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-kms-inventory/tests/unit/gapic/kms_inventory_v1/test_key_dashboard_service.py b/packages/google-cloud-kms-inventory/tests/unit/gapic/kms_inventory_v1/test_key_dashboard_service.py index e93ae8e959f9..46a87e6bd539 100644 --- a/packages/google-cloud-kms-inventory/tests/unit/gapic/kms_inventory_v1/test_key_dashboard_service.py +++ b/packages/google-cloud-kms-inventory/tests/unit/gapic/kms_inventory_v1/test_key_dashboard_service.py @@ -1354,22 +1354,23 @@ async def test_list_crypto_keys_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_crypto_keys - ] = mock_object + ] = mock_rpc request = {} await client.list_crypto_keys(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_crypto_keys(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-kms-inventory/tests/unit/gapic/kms_inventory_v1/test_key_tracking_service.py b/packages/google-cloud-kms-inventory/tests/unit/gapic/kms_inventory_v1/test_key_tracking_service.py index d8e8b864e2bf..8fed964eba44 100644 --- a/packages/google-cloud-kms-inventory/tests/unit/gapic/kms_inventory_v1/test_key_tracking_service.py +++ b/packages/google-cloud-kms-inventory/tests/unit/gapic/kms_inventory_v1/test_key_tracking_service.py @@ -1342,22 +1342,23 @@ async def test_get_protected_resources_summary_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_protected_resources_summary - ] = mock_object + ] = mock_rpc request = {} await client.get_protected_resources_summary(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_protected_resources_summary(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1738,22 +1739,23 @@ async def test_search_protected_resources_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search_protected_resources - ] = mock_object + ] = mock_rpc request = {} await client.search_protected_resources(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_protected_resources(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-kms/google/cloud/kms/gapic_version.py b/packages/google-cloud-kms/google/cloud/kms/gapic_version.py index 92e7066273fd..558c8aab67c5 100644 --- a/packages/google-cloud-kms/google/cloud/kms/gapic_version.py +++ b/packages/google-cloud-kms/google/cloud/kms/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.24.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/gapic_version.py b/packages/google-cloud-kms/google/cloud/kms_v1/gapic_version.py index 92e7066273fd..558c8aab67c5 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/gapic_version.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.24.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/async_client.py b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/async_client.py index 371a56b83416..6f5a61fc292d 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/async_client.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -205,9 +204,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AutokeyClient).get_transport_class, type(AutokeyClient) - ) + get_transport_class = AutokeyClient.get_transport_class def __init__( self, @@ -858,7 +855,6 @@ async def set_iam_policy( **JSON Example** :: - { "bindings": [ { diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/client.py b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/client.py index a242e51cf7c5..e17e6a6fb76e 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/client.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/client.py @@ -703,7 +703,7 @@ def __init__( transport_init: Union[ Type[AutokeyTransport], Callable[..., AutokeyTransport] ] = ( - type(self).get_transport_class(transport) + AutokeyClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AutokeyTransport], transport) ) diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/async_client.py b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/async_client.py index 3b44b0dde122..6c285d379502 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/async_client.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -198,9 +197,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AutokeyAdminClient).get_transport_class, type(AutokeyAdminClient) - ) + get_transport_class = AutokeyAdminClient.get_transport_class def __init__( self, @@ -828,7 +825,6 @@ async def set_iam_policy( **JSON Example** :: - { "bindings": [ { diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/client.py b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/client.py index 5969b81a0ad6..ea8aa35437b3 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/client.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/client.py @@ -661,7 +661,7 @@ def __init__( transport_init: Union[ Type[AutokeyAdminTransport], Callable[..., AutokeyAdminTransport] ] = ( - type(self).get_transport_class(transport) + AutokeyAdminClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AutokeyAdminTransport], transport) ) diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/services/ekm_service/async_client.py b/packages/google-cloud-kms/google/cloud/kms_v1/services/ekm_service/async_client.py index 0c7b641fbf24..c103cc70b30b 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/services/ekm_service/async_client.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/services/ekm_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -198,9 +197,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(EkmServiceClient).get_transport_class, type(EkmServiceClient) - ) + get_transport_class = EkmServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/services/ekm_service/client.py b/packages/google-cloud-kms/google/cloud/kms_v1/services/ekm_service/client.py index 71ee39954631..923a6f043e2b 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/services/ekm_service/client.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/services/ekm_service/client.py @@ -711,7 +711,7 @@ def __init__( transport_init: Union[ Type[EkmServiceTransport], Callable[..., EkmServiceTransport] ] = ( - type(self).get_transport_class(transport) + EkmServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., EkmServiceTransport], transport) ) diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/services/key_management_service/async_client.py b/packages/google-cloud-kms/google/cloud/kms_v1/services/key_management_service/async_client.py index 716c68a8b20d..fd207c2cdb13 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/services/key_management_service/async_client.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/services/key_management_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -226,10 +225,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(KeyManagementServiceClient).get_transport_class, - type(KeyManagementServiceClient), - ) + get_transport_class = KeyManagementServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/services/key_management_service/client.py b/packages/google-cloud-kms/google/cloud/kms_v1/services/key_management_service/client.py index de0b731f37e0..132311aedc2c 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/services/key_management_service/client.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/services/key_management_service/client.py @@ -786,7 +786,7 @@ def __init__( Type[KeyManagementServiceTransport], Callable[..., KeyManagementServiceTransport], ] = ( - type(self).get_transport_class(transport) + KeyManagementServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., KeyManagementServiceTransport], transport) ) diff --git a/packages/google-cloud-kms/samples/generated_samples/snippet_metadata_google.cloud.kms.v1.json b/packages/google-cloud-kms/samples/generated_samples/snippet_metadata_google.cloud.kms.v1.json index ca07d69ee987..d50814dbd477 100644 --- a/packages/google-cloud-kms/samples/generated_samples/snippet_metadata_google.cloud.kms.v1.json +++ b/packages/google-cloud-kms/samples/generated_samples/snippet_metadata_google.cloud.kms.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-kms", - "version": "2.24.2" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-kms/tests/unit/gapic/kms_v1/test_autokey.py b/packages/google-cloud-kms/tests/unit/gapic/kms_v1/test_autokey.py index c7098cc65347..3e124c98ef19 100644 --- a/packages/google-cloud-kms/tests/unit/gapic/kms_v1/test_autokey.py +++ b/packages/google-cloud-kms/tests/unit/gapic/kms_v1/test_autokey.py @@ -1184,8 +1184,9 @@ def test_create_key_handle_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_key_handle(request) @@ -1241,26 +1242,28 @@ async def test_create_key_handle_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_key_handle - ] = mock_object + ] = mock_rpc request = {} await client.create_key_handle(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_key_handle(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1642,22 +1645,23 @@ async def test_get_key_handle_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_key_handle - ] = mock_object + ] = mock_rpc request = {} await client.get_key_handle(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_key_handle(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2005,22 +2009,23 @@ async def test_list_key_handles_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_key_handles - ] = mock_object + ] = mock_rpc request = {} await client.list_key_handles(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_key_handles(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-kms/tests/unit/gapic/kms_v1/test_autokey_admin.py b/packages/google-cloud-kms/tests/unit/gapic/kms_v1/test_autokey_admin.py index d9da410610d7..6155ff4520d9 100644 --- a/packages/google-cloud-kms/tests/unit/gapic/kms_v1/test_autokey_admin.py +++ b/packages/google-cloud-kms/tests/unit/gapic/kms_v1/test_autokey_admin.py @@ -1278,22 +1278,23 @@ async def test_update_autokey_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_autokey_config - ] = mock_object + ] = mock_rpc request = {} await client.update_autokey_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_autokey_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1680,22 +1681,23 @@ async def test_get_autokey_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_autokey_config - ] = mock_object + ] = mock_rpc request = {} await client.get_autokey_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_autokey_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2069,22 +2071,23 @@ async def test_show_effective_autokey_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.show_effective_autokey_config - ] = mock_object + ] = mock_rpc request = {} await client.show_effective_autokey_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.show_effective_autokey_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-kms/tests/unit/gapic/kms_v1/test_ekm_service.py b/packages/google-cloud-kms/tests/unit/gapic/kms_v1/test_ekm_service.py index e4bdd5a3b25c..ccc65aa6fb01 100644 --- a/packages/google-cloud-kms/tests/unit/gapic/kms_v1/test_ekm_service.py +++ b/packages/google-cloud-kms/tests/unit/gapic/kms_v1/test_ekm_service.py @@ -1271,22 +1271,23 @@ async def test_list_ekm_connections_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_ekm_connections - ] = mock_object + ] = mock_rpc request = {} await client.list_ekm_connections(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_ekm_connections(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1873,22 +1874,23 @@ async def test_get_ekm_connection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_ekm_connection - ] = mock_object + ] = mock_rpc request = {} await client.get_ekm_connection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_ekm_connection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2283,22 +2285,23 @@ async def test_create_ekm_connection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_ekm_connection - ] = mock_object + ] = mock_rpc request = {} await client.create_ekm_connection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_ekm_connection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2707,22 +2710,23 @@ async def test_update_ekm_connection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_ekm_connection - ] = mock_object + ] = mock_rpc request = {} await client.update_ekm_connection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_ekm_connection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3103,22 +3107,23 @@ async def test_get_ekm_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_ekm_config - ] = mock_object + ] = mock_rpc request = {} await client.get_ekm_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_ekm_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3478,22 +3483,23 @@ async def test_update_ekm_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_ekm_config - ] = mock_object + ] = mock_rpc request = {} await client.update_ekm_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_ekm_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3871,22 +3877,23 @@ async def test_verify_connectivity_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.verify_connectivity - ] = mock_object + ] = mock_rpc request = {} await client.verify_connectivity(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.verify_connectivity(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-kms/tests/unit/gapic/kms_v1/test_key_management_service.py b/packages/google-cloud-kms/tests/unit/gapic/kms_v1/test_key_management_service.py index ee2c45c459b0..f362a35fe67d 100644 --- a/packages/google-cloud-kms/tests/unit/gapic/kms_v1/test_key_management_service.py +++ b/packages/google-cloud-kms/tests/unit/gapic/kms_v1/test_key_management_service.py @@ -1370,22 +1370,23 @@ async def test_list_key_rings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_key_rings - ] = mock_object + ] = mock_rpc request = {} await client.list_key_rings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_key_rings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1941,22 +1942,23 @@ async def test_list_crypto_keys_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_crypto_keys - ] = mock_object + ] = mock_rpc request = {} await client.list_crypto_keys(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_crypto_keys(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2523,22 +2525,23 @@ async def test_list_crypto_key_versions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_crypto_key_versions - ] = mock_object + ] = mock_rpc request = {} await client.list_crypto_key_versions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_crypto_key_versions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3114,22 +3117,23 @@ async def test_list_import_jobs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_import_jobs - ] = mock_object + ] = mock_rpc request = {} await client.list_import_jobs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_import_jobs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3674,22 +3678,23 @@ async def test_get_key_ring_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_key_ring - ] = mock_object + ] = mock_rpc request = {} await client.get_key_ring(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_key_ring(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4043,22 +4048,23 @@ async def test_get_crypto_key_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_crypto_key - ] = mock_object + ] = mock_rpc request = {} await client.get_crypto_key(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_crypto_key(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4455,22 +4461,23 @@ async def test_get_crypto_key_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_crypto_key_version - ] = mock_object + ] = mock_rpc request = {} await client.get_crypto_key_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_crypto_key_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4866,22 +4873,23 @@ async def test_get_public_key_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_public_key - ] = mock_object + ] = mock_rpc request = {} await client.get_public_key(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_public_key(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5247,22 +5255,23 @@ async def test_get_import_job_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_import_job - ] = mock_object + ] = mock_rpc request = {} await client.get_import_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_import_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5618,22 +5627,23 @@ async def test_create_key_ring_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_key_ring - ] = mock_object + ] = mock_rpc request = {} await client.create_key_ring(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_key_ring(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6019,22 +6029,23 @@ async def test_create_crypto_key_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_crypto_key - ] = mock_object + ] = mock_rpc request = {} await client.create_crypto_key(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_crypto_key(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6461,22 +6472,23 @@ async def test_create_crypto_key_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_crypto_key_version - ] = mock_object + ] = mock_rpc request = {} await client.create_crypto_key_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_crypto_key_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6920,22 +6932,23 @@ async def test_import_crypto_key_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_crypto_key_version - ] = mock_object + ] = mock_rpc request = {} await client.import_crypto_key_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.import_crypto_key_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7257,22 +7270,23 @@ async def test_create_import_job_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_import_job - ] = mock_object + ] = mock_rpc request = {} await client.create_import_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_import_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7671,22 +7685,23 @@ async def test_update_crypto_key_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_crypto_key - ] = mock_object + ] = mock_rpc request = {} await client.update_crypto_key(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_crypto_key(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8099,22 +8114,23 @@ async def test_update_crypto_key_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_crypto_key_version - ] = mock_object + ] = mock_rpc request = {} await client.update_crypto_key_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_crypto_key_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8532,22 +8548,23 @@ async def test_update_crypto_key_primary_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_crypto_key_primary_version - ] = mock_object + ] = mock_rpc request = {} await client.update_crypto_key_primary_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_crypto_key_primary_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8965,22 +8982,23 @@ async def test_destroy_crypto_key_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.destroy_crypto_key_version - ] = mock_object + ] = mock_rpc request = {} await client.destroy_crypto_key_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.destroy_crypto_key_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9410,22 +9428,23 @@ async def test_restore_crypto_key_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.restore_crypto_key_version - ] = mock_object + ] = mock_rpc request = {} await client.restore_crypto_key_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.restore_crypto_key_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9819,22 +9838,23 @@ async def test_encrypt_async_use_cached_wrapped_rpc(transport: str = "grpc_async ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.encrypt - ] = mock_object + ] = mock_rpc request = {} await client.encrypt(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.encrypt(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10205,22 +10225,23 @@ async def test_decrypt_async_use_cached_wrapped_rpc(transport: str = "grpc_async ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.decrypt - ] = mock_object + ] = mock_rpc request = {} await client.decrypt(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.decrypt(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10604,22 +10625,23 @@ async def test_raw_encrypt_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.raw_encrypt - ] = mock_object + ] = mock_rpc request = {} await client.raw_encrypt(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.raw_encrypt(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10912,22 +10934,23 @@ async def test_raw_decrypt_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.raw_decrypt - ] = mock_object + ] = mock_rpc request = {} await client.raw_decrypt(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.raw_decrypt(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11214,22 +11237,23 @@ async def test_asymmetric_sign_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.asymmetric_sign - ] = mock_object + ] = mock_rpc request = {} await client.asymmetric_sign(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.asymmetric_sign(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11614,22 +11638,23 @@ async def test_asymmetric_decrypt_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.asymmetric_decrypt - ] = mock_object + ] = mock_rpc request = {} await client.asymmetric_decrypt(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.asymmetric_decrypt(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12009,22 +12034,23 @@ async def test_mac_sign_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.mac_sign - ] = mock_object + ] = mock_rpc request = {} await client.mac_sign(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.mac_sign(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12402,22 +12428,23 @@ async def test_mac_verify_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.mac_verify - ] = mock_object + ] = mock_rpc request = {} await client.mac_verify(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.mac_verify(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12809,22 +12836,23 @@ async def test_generate_random_bytes_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.generate_random_bytes - ] = mock_object + ] = mock_rpc request = {} await client.generate_random_bytes(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.generate_random_bytes(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-language/google/cloud/language/gapic_version.py b/packages/google-cloud-language/google/cloud/language/gapic_version.py index 773005b00ca9..558c8aab67c5 100644 --- a/packages/google-cloud-language/google/cloud/language/gapic_version.py +++ b/packages/google-cloud-language/google/cloud/language/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.14.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-language/google/cloud/language_v1/gapic_version.py b/packages/google-cloud-language/google/cloud/language_v1/gapic_version.py index 773005b00ca9..558c8aab67c5 100644 --- a/packages/google-cloud-language/google/cloud/language_v1/gapic_version.py +++ b/packages/google-cloud-language/google/cloud/language_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.14.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-language/google/cloud/language_v1/services/language_service/async_client.py b/packages/google-cloud-language/google/cloud/language_v1/services/language_service/async_client.py index 1a97fdcd1a67..4ec9299a6e7d 100644 --- a/packages/google-cloud-language/google/cloud/language_v1/services/language_service/async_client.py +++ b/packages/google-cloud-language/google/cloud/language_v1/services/language_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -186,9 +185,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(LanguageServiceClient).get_transport_class, type(LanguageServiceClient) - ) + get_transport_class = LanguageServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-language/google/cloud/language_v1/services/language_service/client.py b/packages/google-cloud-language/google/cloud/language_v1/services/language_service/client.py index 0723561bbce0..50c93270a8c4 100644 --- a/packages/google-cloud-language/google/cloud/language_v1/services/language_service/client.py +++ b/packages/google-cloud-language/google/cloud/language_v1/services/language_service/client.py @@ -641,7 +641,7 @@ def __init__( transport_init: Union[ Type[LanguageServiceTransport], Callable[..., LanguageServiceTransport] ] = ( - type(self).get_transport_class(transport) + LanguageServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., LanguageServiceTransport], transport) ) diff --git a/packages/google-cloud-language/google/cloud/language_v1beta2/gapic_version.py b/packages/google-cloud-language/google/cloud/language_v1beta2/gapic_version.py index 773005b00ca9..558c8aab67c5 100644 --- a/packages/google-cloud-language/google/cloud/language_v1beta2/gapic_version.py +++ b/packages/google-cloud-language/google/cloud/language_v1beta2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.14.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-language/google/cloud/language_v1beta2/services/language_service/async_client.py b/packages/google-cloud-language/google/cloud/language_v1beta2/services/language_service/async_client.py index 29304fb779b3..e571f44fb125 100644 --- a/packages/google-cloud-language/google/cloud/language_v1beta2/services/language_service/async_client.py +++ b/packages/google-cloud-language/google/cloud/language_v1beta2/services/language_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -186,9 +185,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(LanguageServiceClient).get_transport_class, type(LanguageServiceClient) - ) + get_transport_class = LanguageServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-language/google/cloud/language_v1beta2/services/language_service/client.py b/packages/google-cloud-language/google/cloud/language_v1beta2/services/language_service/client.py index e58bd04d0adc..a14dd15603d2 100644 --- a/packages/google-cloud-language/google/cloud/language_v1beta2/services/language_service/client.py +++ b/packages/google-cloud-language/google/cloud/language_v1beta2/services/language_service/client.py @@ -641,7 +641,7 @@ def __init__( transport_init: Union[ Type[LanguageServiceTransport], Callable[..., LanguageServiceTransport] ] = ( - type(self).get_transport_class(transport) + LanguageServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., LanguageServiceTransport], transport) ) diff --git a/packages/google-cloud-language/google/cloud/language_v2/gapic_version.py b/packages/google-cloud-language/google/cloud/language_v2/gapic_version.py index 773005b00ca9..558c8aab67c5 100644 --- a/packages/google-cloud-language/google/cloud/language_v2/gapic_version.py +++ b/packages/google-cloud-language/google/cloud/language_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.14.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-language/google/cloud/language_v2/services/language_service/async_client.py b/packages/google-cloud-language/google/cloud/language_v2/services/language_service/async_client.py index 9b8367bd227e..23f9a6fffdaa 100644 --- a/packages/google-cloud-language/google/cloud/language_v2/services/language_service/async_client.py +++ b/packages/google-cloud-language/google/cloud/language_v2/services/language_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -186,9 +185,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(LanguageServiceClient).get_transport_class, type(LanguageServiceClient) - ) + get_transport_class = LanguageServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-language/google/cloud/language_v2/services/language_service/client.py b/packages/google-cloud-language/google/cloud/language_v2/services/language_service/client.py index 12bf1e846c28..049cd70aee2b 100644 --- a/packages/google-cloud-language/google/cloud/language_v2/services/language_service/client.py +++ b/packages/google-cloud-language/google/cloud/language_v2/services/language_service/client.py @@ -641,7 +641,7 @@ def __init__( transport_init: Union[ Type[LanguageServiceTransport], Callable[..., LanguageServiceTransport] ] = ( - type(self).get_transport_class(transport) + LanguageServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., LanguageServiceTransport], transport) ) diff --git a/packages/google-cloud-language/samples/generated_samples/snippet_metadata_google.cloud.language.v1.json b/packages/google-cloud-language/samples/generated_samples/snippet_metadata_google.cloud.language.v1.json index 9fe85dbfc174..4e481f59df7e 100644 --- a/packages/google-cloud-language/samples/generated_samples/snippet_metadata_google.cloud.language.v1.json +++ b/packages/google-cloud-language/samples/generated_samples/snippet_metadata_google.cloud.language.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-language", - "version": "2.14.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-language/samples/generated_samples/snippet_metadata_google.cloud.language.v1beta2.json b/packages/google-cloud-language/samples/generated_samples/snippet_metadata_google.cloud.language.v1beta2.json index 71ab9b64d3da..fb6633f26470 100644 --- a/packages/google-cloud-language/samples/generated_samples/snippet_metadata_google.cloud.language.v1beta2.json +++ b/packages/google-cloud-language/samples/generated_samples/snippet_metadata_google.cloud.language.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-language", - "version": "2.14.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-language/samples/generated_samples/snippet_metadata_google.cloud.language.v2.json b/packages/google-cloud-language/samples/generated_samples/snippet_metadata_google.cloud.language.v2.json index 1869a439faf3..e25172b52357 100644 --- a/packages/google-cloud-language/samples/generated_samples/snippet_metadata_google.cloud.language.v2.json +++ b/packages/google-cloud-language/samples/generated_samples/snippet_metadata_google.cloud.language.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-language", - "version": "2.14.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-language/tests/unit/gapic/language_v1/test_language_service.py b/packages/google-cloud-language/tests/unit/gapic/language_v1/test_language_service.py index 7612f714e19d..51cf20e38e54 100644 --- a/packages/google-cloud-language/tests/unit/gapic/language_v1/test_language_service.py +++ b/packages/google-cloud-language/tests/unit/gapic/language_v1/test_language_service.py @@ -1313,22 +1313,23 @@ async def test_analyze_sentiment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.analyze_sentiment - ] = mock_object + ] = mock_rpc request = {} await client.analyze_sentiment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.analyze_sentiment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1643,22 +1644,23 @@ async def test_analyze_entities_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.analyze_entities - ] = mock_object + ] = mock_rpc request = {} await client.analyze_entities(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.analyze_entities(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1978,22 +1980,23 @@ async def test_analyze_entity_sentiment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.analyze_entity_sentiment - ] = mock_object + ] = mock_rpc request = {} await client.analyze_entity_sentiment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.analyze_entity_sentiment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2306,22 +2309,23 @@ async def test_analyze_syntax_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.analyze_syntax - ] = mock_object + ] = mock_rpc request = {} await client.analyze_syntax(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.analyze_syntax(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2622,22 +2626,23 @@ async def test_classify_text_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.classify_text - ] = mock_object + ] = mock_rpc request = {} await client.classify_text(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.classify_text(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2925,22 +2930,23 @@ async def test_moderate_text_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.moderate_text - ] = mock_object + ] = mock_rpc request = {} await client.moderate_text(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.moderate_text(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3233,22 +3239,23 @@ async def test_annotate_text_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.annotate_text - ] = mock_object + ] = mock_rpc request = {} await client.annotate_text(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.annotate_text(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-language/tests/unit/gapic/language_v1beta2/test_language_service.py b/packages/google-cloud-language/tests/unit/gapic/language_v1beta2/test_language_service.py index 4400a27f18d9..ba3205a88651 100644 --- a/packages/google-cloud-language/tests/unit/gapic/language_v1beta2/test_language_service.py +++ b/packages/google-cloud-language/tests/unit/gapic/language_v1beta2/test_language_service.py @@ -1313,22 +1313,23 @@ async def test_analyze_sentiment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.analyze_sentiment - ] = mock_object + ] = mock_rpc request = {} await client.analyze_sentiment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.analyze_sentiment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1643,22 +1644,23 @@ async def test_analyze_entities_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.analyze_entities - ] = mock_object + ] = mock_rpc request = {} await client.analyze_entities(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.analyze_entities(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1978,22 +1980,23 @@ async def test_analyze_entity_sentiment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.analyze_entity_sentiment - ] = mock_object + ] = mock_rpc request = {} await client.analyze_entity_sentiment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.analyze_entity_sentiment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2306,22 +2309,23 @@ async def test_analyze_syntax_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.analyze_syntax - ] = mock_object + ] = mock_rpc request = {} await client.analyze_syntax(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.analyze_syntax(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2622,22 +2626,23 @@ async def test_classify_text_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.classify_text - ] = mock_object + ] = mock_rpc request = {} await client.classify_text(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.classify_text(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2925,22 +2930,23 @@ async def test_moderate_text_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.moderate_text - ] = mock_object + ] = mock_rpc request = {} await client.moderate_text(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.moderate_text(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3233,22 +3239,23 @@ async def test_annotate_text_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.annotate_text - ] = mock_object + ] = mock_rpc request = {} await client.annotate_text(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.annotate_text(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-language/tests/unit/gapic/language_v2/test_language_service.py b/packages/google-cloud-language/tests/unit/gapic/language_v2/test_language_service.py index c64637779d59..3971108bc1d8 100644 --- a/packages/google-cloud-language/tests/unit/gapic/language_v2/test_language_service.py +++ b/packages/google-cloud-language/tests/unit/gapic/language_v2/test_language_service.py @@ -1316,22 +1316,23 @@ async def test_analyze_sentiment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.analyze_sentiment - ] = mock_object + ] = mock_rpc request = {} await client.analyze_sentiment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.analyze_sentiment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1651,22 +1652,23 @@ async def test_analyze_entities_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.analyze_entities - ] = mock_object + ] = mock_rpc request = {} await client.analyze_entities(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.analyze_entities(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1978,22 +1980,23 @@ async def test_classify_text_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.classify_text - ] = mock_object + ] = mock_rpc request = {} await client.classify_text(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.classify_text(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2294,22 +2297,23 @@ async def test_moderate_text_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.moderate_text - ] = mock_object + ] = mock_rpc request = {} await client.moderate_text(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.moderate_text(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2610,22 +2614,23 @@ async def test_annotate_text_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.annotate_text - ] = mock_object + ] = mock_rpc request = {} await client.annotate_text(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.annotate_text(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio From 38b9decd5d2bb577fdcacac026fe1a806c07ee9b Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 19 Aug 2024 11:00:21 -0400 Subject: [PATCH 018/108] chore: [Many APIs] Update gapic-generator-python to v1.18.5 (#12998) - [x] Regenerate this pull request now. PiperOrigin-RevId: 661268868 Source-Link: https://github.com/googleapis/googleapis/commit/f7d214cb08cd7d9b018d44564a8b184263f64177 Source-Link: https://github.com/googleapis/googleapis-gen/commit/79a8411bbdb25a983fa3aae8c0e14327df129f94 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWRhdGEtZnVzaW9uLy5Pd2xCb3QueWFtbCIsImgiOiI3OWE4NDExYmJkYjI1YTk4M2ZhM2FhZThjMGUxNDMyN2RmMTI5Zjk0In0= Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWRhdGEtcW5hLy5Pd2xCb3QueWFtbCIsImgiOiI3OWE4NDExYmJkYjI1YTk4M2ZhM2FhZThjMGUxNDMyN2RmMTI5Zjk0In0= Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWRhdGFjYXRhbG9nLWxpbmVhZ2UvLk93bEJvdC55YW1sIiwiaCI6Ijc5YTg0MTFiYmRiMjVhOTgzZmEzYWFlOGMwZTE0MzI3ZGYxMjlmOTQifQ== Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWRhdGFjYXRhbG9nLy5Pd2xCb3QueWFtbCIsImgiOiI3OWE4NDExYmJkYjI1YTk4M2ZhM2FhZThjMGUxNDMyN2RmMTI5Zjk0In0= Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWRhdGFmbG93LWNsaWVudC8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWRhdGFmb3JtLy5Pd2xCb3QueWFtbCIsImgiOiI3OWE4NDExYmJkYjI1YTk4M2ZhM2FhZThjMGUxNDMyN2RmMTI5Zjk0In0= Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWRhdGFsYWJlbGluZy8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWRhdGFwbGV4Ly5Pd2xCb3QueWFtbCIsImgiOiI3OWE4NDExYmJkYjI1YTk4M2ZhM2FhZThjMGUxNDMyN2RmMTI5Zjk0In0= Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWRhdGFwcm9jLW1ldGFzdG9yZS8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWRhdGFwcm9jLy5Pd2xCb3QueWFtbCIsImgiOiI3OWE4NDExYmJkYjI1YTk4M2ZhM2FhZThjMGUxNDMyN2RmMTI5Zjk0In0= Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWRhdGFzdHJlYW0vLk93bEJvdC55YW1sIiwiaCI6Ijc5YTg0MTFiYmRiMjVhOTgzZmEzYWFlOGMwZTE0MzI3ZGYxMjlmOTQifQ== Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWRlcGxveS8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWRldmVsb3BlcmNvbm5lY3QvLk93bEJvdC55YW1sIiwiaCI6Ijc5YTg0MTFiYmRiMjVhOTgzZmEzYWFlOGMwZTE0MzI3ZGYxMjlmOTQifQ== Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWRpYWxvZ2Zsb3cvLk93bEJvdC55YW1sIiwiaCI6Ijc5YTg0MTFiYmRiMjVhOTgzZmEzYWFlOGMwZTE0MzI3ZGYxMjlmOTQifQ== Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWRpc2NvdmVyeWVuZ2luZS8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWRscC8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWRtcy8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWRvY3VtZW50YWkvLk93bEJvdC55YW1sIiwiaCI6Ijc5YTg0MTFiYmRiMjVhOTgzZmEzYWFlOGMwZTE0MzI3ZGYxMjlmOTQifQ== Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWRvbWFpbnMvLk93bEJvdC55YW1sIiwiaCI6Ijc5YTg0MTFiYmRiMjVhOTgzZmEzYWFlOGMwZTE0MzI3ZGYxMjlmOTQifQ== Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWVkZ2Vjb250YWluZXIvLk93bEJvdC55YW1sIiwiaCI6Ijc5YTg0MTFiYmRiMjVhOTgzZmEzYWFlOGMwZTE0MzI3ZGYxMjlmOTQifQ== --------- Co-authored-by: Owl Bot Co-authored-by: Victor Chudnovsky Co-authored-by: Anthonios Partheniou --- .../google/cloud/data_fusion/gapic_version.py | 2 +- .../cloud/data_fusion_v1/gapic_version.py | 2 +- .../services/data_fusion/async_client.py | 5 +- .../services/data_fusion/client.py | 2 +- ...t_metadata_google.cloud.datafusion.v1.json | 2 +- .../gapic/data_fusion_v1/test_data_fusion.py | 103 +-- .../google/cloud/dataqna/gapic_version.py | 2 +- .../cloud/dataqna_v1alpha/gapic_version.py | 2 +- .../auto_suggestion_service/async_client.py | 6 +- .../auto_suggestion_service/client.py | 2 +- .../services/question_service/async_client.py | 5 +- .../services/question_service/client.py | 2 +- ...metadata_google.cloud.dataqna.v1alpha.json | 2 +- .../test_auto_suggestion_service.py | 9 +- .../dataqna_v1alpha/test_question_service.py | 45 +- .../datacatalog_lineage/gapic_version.py | 2 +- .../datacatalog_lineage_v1/gapic_version.py | 2 +- .../services/lineage/async_client.py | 5 +- .../services/lineage/client.py | 2 +- ...a_google.cloud.datacatalog.lineage.v1.json | 2 +- .../datacatalog_lineage_v1/test_lineage.py | 173 ++--- .../google/cloud/datacatalog/gapic_version.py | 2 +- .../cloud/datacatalog_v1/gapic_version.py | 2 +- .../services/data_catalog/async_client.py | 5 +- .../services/data_catalog/client.py | 2 +- .../policy_tag_manager/async_client.py | 5 +- .../services/policy_tag_manager/client.py | 2 +- .../async_client.py | 6 +- .../client.py | 2 +- .../datacatalog_v1beta1/gapic_version.py | 2 +- .../services/data_catalog/async_client.py | 5 +- .../services/data_catalog/client.py | 2 +- .../policy_tag_manager/async_client.py | 5 +- .../services/policy_tag_manager/client.py | 2 +- .../async_client.py | 6 +- .../client.py | 2 +- ..._metadata_google.cloud.datacatalog.v1.json | 2 +- ...data_google.cloud.datacatalog.v1beta1.json | 2 +- .../gapic/datacatalog_v1/test_data_catalog.py | 326 +++++----- .../datacatalog_v1/test_policy_tag_manager.py | 117 ++-- .../test_policy_tag_manager_serialization.py | 27 +- .../datacatalog_v1beta1/test_data_catalog.py | 252 ++++---- .../test_policy_tag_manager.py | 117 ++-- .../test_policy_tag_manager_serialization.py | 18 +- .../google/cloud/dataflow/gapic_version.py | 2 +- .../cloud/dataflow_v1beta3/gapic_version.py | 2 +- .../flex_templates_service/async_client.py | 6 +- .../services/flex_templates_service/client.py | 2 +- .../services/jobs_v1_beta3/async_client.py | 5 +- .../services/jobs_v1_beta3/client.py | 2 +- .../messages_v1_beta3/async_client.py | 5 +- .../services/messages_v1_beta3/client.py | 2 +- .../services/metrics_v1_beta3/async_client.py | 5 +- .../services/metrics_v1_beta3/client.py | 2 +- .../snapshots_v1_beta3/async_client.py | 5 +- .../services/snapshots_v1_beta3/client.py | 2 +- .../templates_service/async_client.py | 5 +- .../services/templates_service/client.py | 2 +- ...ppet_metadata_google.dataflow.v1beta3.json | 2 +- .../test_flex_templates_service.py | 9 +- .../dataflow_v1beta3/test_jobs_v1_beta3.py | 63 +- .../test_messages_v1_beta3.py | 9 +- .../dataflow_v1beta3/test_metrics_v1_beta3.py | 27 +- .../test_snapshots_v1_beta3.py | 27 +- .../test_templates_service.py | 27 +- .../google/cloud/dataform/gapic_version.py | 2 +- .../cloud/dataform_v1beta1/gapic_version.py | 2 +- .../services/dataform/async_client.py | 5 +- .../services/dataform/client.py | 2 +- ...etadata_google.cloud.dataform.v1beta1.json | 2 +- .../gapic/dataform_v1beta1/test_dataform.py | 459 ++++++++------ .../cloud/datalabeling/gapic_version.py | 2 +- .../datalabeling_v1beta1/gapic_version.py | 2 +- .../data_labeling_service/async_client.py | 6 +- .../services/data_labeling_service/client.py | 2 +- ...ata_google.cloud.datalabeling.v1beta1.json | 2 +- .../test_data_labeling_service.py | 366 ++++++----- .../google/cloud/dataplex/gapic_version.py | 2 +- .../google/cloud/dataplex_v1/gapic_version.py | 2 +- .../services/catalog_service/async_client.py | 5 +- .../services/catalog_service/client.py | 2 +- .../services/content_service/async_client.py | 5 +- .../services/content_service/client.py | 2 +- .../data_scan_service/async_client.py | 5 +- .../services/data_scan_service/client.py | 2 +- .../data_taxonomy_service/async_client.py | 6 +- .../services/data_taxonomy_service/client.py | 2 +- .../services/dataplex_service/async_client.py | 5 +- .../services/dataplex_service/client.py | 2 +- .../services/metadata_service/async_client.py | 5 +- .../services/metadata_service/client.py | 2 +- .../google/cloud/dataplex_v1/types/catalog.py | 4 +- ...pet_metadata_google.cloud.dataplex.v1.json | 2 +- .../gapic/dataplex_v1/test_catalog_service.py | 288 +++++---- .../gapic/dataplex_v1/test_content_service.py | 72 ++- .../dataplex_v1/test_data_scan_service.py | 111 ++-- .../dataplex_v1/test_data_taxonomy_service.py | 225 ++++--- .../dataplex_v1/test_dataplex_service.py | 447 +++++++------ .../dataplex_v1/test_metadata_service.py | 81 +-- .../google/cloud/metastore/gapic_version.py | 2 +- .../cloud/metastore_v1/gapic_version.py | 2 +- .../dataproc_metastore/async_client.py | 5 +- .../services/dataproc_metastore/client.py | 2 +- .../async_client.py | 6 +- .../dataproc_metastore_federation/client.py | 2 +- .../cloud/metastore_v1alpha/gapic_version.py | 2 +- .../dataproc_metastore/async_client.py | 5 +- .../services/dataproc_metastore/client.py | 2 +- .../async_client.py | 6 +- .../dataproc_metastore_federation/client.py | 2 +- .../cloud/metastore_v1beta/gapic_version.py | 2 +- .../dataproc_metastore/async_client.py | 5 +- .../services/dataproc_metastore/client.py | 2 +- .../async_client.py | 6 +- .../dataproc_metastore_federation/client.py | 2 +- ...et_metadata_google.cloud.metastore.v1.json | 2 +- ...tadata_google.cloud.metastore.v1alpha.json | 2 +- ...etadata_google.cloud.metastore.v1beta.json | 2 +- .../metastore_v1/test_dataproc_metastore.py | 282 +++++---- .../test_dataproc_metastore_federation.py | 75 ++- .../test_dataproc_metastore.py | 291 +++++---- .../test_dataproc_metastore_federation.py | 75 ++- .../test_dataproc_metastore.py | 291 +++++---- .../test_dataproc_metastore_federation.py | 75 ++- .../google/cloud/dataproc/gapic_version.py | 2 +- .../google/cloud/dataproc_v1/gapic_version.py | 2 +- .../async_client.py | 6 +- .../autoscaling_policy_service/client.py | 2 +- .../services/batch_controller/async_client.py | 5 +- .../services/batch_controller/client.py | 2 +- .../cluster_controller/async_client.py | 5 +- .../services/cluster_controller/client.py | 2 +- .../services/job_controller/async_client.py | 5 +- .../services/job_controller/client.py | 2 +- .../node_group_controller/async_client.py | 6 +- .../services/node_group_controller/client.py | 2 +- .../session_controller/async_client.py | 5 +- .../services/session_controller/client.py | 2 +- .../async_client.py | 6 +- .../session_template_controller/client.py | 2 +- .../workflow_template_service/async_client.py | 6 +- .../workflow_template_service/client.py | 2 +- ...pet_metadata_google.cloud.dataproc.v1.json | 2 +- .../test_autoscaling_policy_service.py | 45 +- .../dataproc_v1/test_batch_controller.py | 46 +- .../dataproc_v1/test_cluster_controller.py | 132 ++-- .../gapic/dataproc_v1/test_job_controller.py | 73 ++- .../dataproc_v1/test_node_group_controller.py | 47 +- .../dataproc_v1/test_session_controller.py | 75 ++- .../test_session_template_controller.py | 45 +- .../test_workflow_template_service.py | 83 +-- .../google/cloud/datastream/gapic_version.py | 2 +- .../cloud/datastream_v1/gapic_version.py | 2 +- .../services/datastream/async_client.py | 5 +- .../services/datastream/client.py | 2 +- .../datastream_v1alpha1/gapic_version.py | 2 +- .../services/datastream/async_client.py | 5 +- .../services/datastream/client.py | 2 +- ...t_metadata_google.cloud.datastream.v1.json | 2 +- ...data_google.cloud.datastream.v1alpha1.json | 2 +- .../gapic/datastream_v1/test_datastream.py | 325 ++++++---- .../datastream_v1alpha1/test_datastream.py | 299 +++++---- .../google/cloud/deploy/gapic_version.py | 2 +- .../google/cloud/deploy_v1/gapic_version.py | 2 +- .../services/cloud_deploy/async_client.py | 5 +- .../deploy_v1/services/cloud_deploy/client.py | 2 +- .../cloud/deploy_v1/types/cloud_deploy.py | 2 +- ...ippet_metadata_google.cloud.deploy.v1.json | 2 +- .../unit/gapic/deploy_v1/test_cloud_deploy.py | 500 ++++++++------- .../cloud/developerconnect/gapic_version.py | 2 +- .../developerconnect_v1/gapic_version.py | 2 +- .../developer_connect/async_client.py | 5 +- .../services/developer_connect/client.py | 2 +- ...data_google.cloud.developerconnect.v1.json | 2 +- .../test_developer_connect.py | 176 ++--- .../google/cloud/dialogflow/gapic_version.py | 2 +- .../cloud/dialogflow_v2/gapic_version.py | 2 +- .../services/agents/async_client.py | 5 +- .../dialogflow_v2/services/agents/client.py | 2 +- .../services/answer_records/async_client.py | 5 +- .../services/answer_records/client.py | 2 +- .../services/contexts/async_client.py | 5 +- .../dialogflow_v2/services/contexts/client.py | 2 +- .../conversation_datasets/async_client.py | 6 +- .../services/conversation_datasets/client.py | 2 +- .../conversation_models/async_client.py | 6 +- .../services/conversation_models/client.py | 2 +- .../conversation_profiles/async_client.py | 6 +- .../services/conversation_profiles/client.py | 2 +- .../services/conversations/async_client.py | 5 +- .../services/conversations/client.py | 2 +- .../services/documents/async_client.py | 5 +- .../services/documents/client.py | 2 +- .../encryption_spec_service/async_client.py | 6 +- .../encryption_spec_service/client.py | 2 +- .../services/entity_types/async_client.py | 5 +- .../services/entity_types/client.py | 2 +- .../services/environments/async_client.py | 5 +- .../services/environments/client.py | 2 +- .../services/fulfillments/async_client.py | 5 +- .../services/fulfillments/client.py | 2 +- .../services/generators/async_client.py | 5 +- .../services/generators/client.py | 2 +- .../services/intents/async_client.py | 5 +- .../dialogflow_v2/services/intents/client.py | 2 +- .../services/knowledge_bases/async_client.py | 5 +- .../services/knowledge_bases/client.py | 2 +- .../services/participants/async_client.py | 5 +- .../services/participants/client.py | 2 +- .../session_entity_types/async_client.py | 6 +- .../services/session_entity_types/client.py | 2 +- .../services/sessions/async_client.py | 5 +- .../dialogflow_v2/services/sessions/client.py | 2 +- .../services/versions/async_client.py | 5 +- .../dialogflow_v2/services/versions/client.py | 2 +- .../cloud/dialogflow_v2beta1/gapic_version.py | 2 +- .../services/agents/async_client.py | 5 +- .../services/agents/client.py | 2 +- .../services/answer_records/async_client.py | 5 +- .../services/answer_records/client.py | 2 +- .../services/contexts/async_client.py | 5 +- .../services/contexts/client.py | 2 +- .../conversation_profiles/async_client.py | 6 +- .../services/conversation_profiles/client.py | 2 +- .../services/conversations/async_client.py | 5 +- .../services/conversations/client.py | 2 +- .../services/documents/async_client.py | 5 +- .../services/documents/client.py | 2 +- .../encryption_spec_service/async_client.py | 6 +- .../encryption_spec_service/client.py | 2 +- .../services/entity_types/async_client.py | 5 +- .../services/entity_types/client.py | 2 +- .../services/environments/async_client.py | 5 +- .../services/environments/client.py | 2 +- .../services/fulfillments/async_client.py | 5 +- .../services/fulfillments/client.py | 2 +- .../services/generators/async_client.py | 5 +- .../services/generators/client.py | 2 +- .../services/intents/async_client.py | 5 +- .../services/intents/client.py | 2 +- .../services/knowledge_bases/async_client.py | 5 +- .../services/knowledge_bases/client.py | 2 +- .../services/participants/async_client.py | 5 +- .../services/participants/client.py | 2 +- .../session_entity_types/async_client.py | 6 +- .../services/session_entity_types/client.py | 2 +- .../services/sessions/async_client.py | 5 +- .../services/sessions/client.py | 2 +- .../services/versions/async_client.py | 5 +- .../services/versions/client.py | 2 +- ...t_metadata_google.cloud.dialogflow.v2.json | 2 +- ...adata_google.cloud.dialogflow.v2beta1.json | 2 +- .../unit/gapic/dialogflow_v2/test_agents.py | 121 ++-- .../dialogflow_v2/test_answer_records.py | 18 +- .../unit/gapic/dialogflow_v2/test_contexts.py | 54 +- .../test_conversation_datasets.py | 75 ++- .../dialogflow_v2/test_conversation_models.py | 131 ++-- .../test_conversation_profiles.py | 83 +-- .../gapic/dialogflow_v2/test_conversations.py | 81 +-- .../gapic/dialogflow_v2/test_documents.py | 132 ++-- .../test_encryption_spec_service.py | 28 +- .../gapic/dialogflow_v2/test_entity_types.py | 140 ++-- .../gapic/dialogflow_v2/test_environments.py | 54 +- .../gapic/dialogflow_v2/test_fulfillments.py | 18 +- .../gapic/dialogflow_v2/test_generators.py | 45 +- .../unit/gapic/dialogflow_v2/test_intents.py | 83 +-- .../dialogflow_v2/test_knowledge_bases.py | 45 +- .../gapic/dialogflow_v2/test_participants.py | 90 +-- .../test_session_entity_types.py | 45 +- .../unit/gapic/dialogflow_v2/test_sessions.py | 18 +- .../unit/gapic/dialogflow_v2/test_versions.py | 45 +- .../gapic/dialogflow_v2beta1/test_agents.py | 121 ++-- .../dialogflow_v2beta1/test_answer_records.py | 27 +- .../gapic/dialogflow_v2beta1/test_contexts.py | 54 +- .../test_conversation_profiles.py | 83 +-- .../dialogflow_v2beta1/test_conversations.py | 90 +-- .../dialogflow_v2beta1/test_documents.py | 113 ++-- .../test_encryption_spec_service.py | 28 +- .../dialogflow_v2beta1/test_entity_types.py | 140 ++-- .../dialogflow_v2beta1/test_environments.py | 54 +- .../dialogflow_v2beta1/test_fulfillments.py | 18 +- .../dialogflow_v2beta1/test_generators.py | 45 +- .../gapic/dialogflow_v2beta1/test_intents.py | 83 +-- .../test_knowledge_bases.py | 45 +- .../dialogflow_v2beta1/test_participants.py | 108 ++-- .../test_session_entity_types.py | 45 +- .../gapic/dialogflow_v2beta1/test_sessions.py | 18 +- .../gapic/dialogflow_v2beta1/test_versions.py | 45 +- .../cloud/discoveryengine/gapic_version.py | 2 +- .../cloud/discoveryengine_v1/gapic_version.py | 2 +- .../completion_service/async_client.py | 5 +- .../services/completion_service/client.py | 2 +- .../services/control_service/async_client.py | 5 +- .../services/control_service/client.py | 2 +- .../async_client.py | 6 +- .../conversational_search_service/client.py | 2 +- .../data_store_service/async_client.py | 5 +- .../services/data_store_service/client.py | 2 +- .../services/document_service/async_client.py | 5 +- .../services/document_service/client.py | 2 +- .../services/engine_service/async_client.py | 5 +- .../services/engine_service/client.py | 2 +- .../async_client.py | 6 +- .../grounded_generation_service/client.py | 2 +- .../services/project_service/async_client.py | 5 +- .../services/project_service/client.py | 2 +- .../services/rank_service/async_client.py | 5 +- .../services/rank_service/client.py | 2 +- .../recommendation_service/async_client.py | 6 +- .../services/recommendation_service/client.py | 2 +- .../services/schema_service/async_client.py | 5 +- .../services/schema_service/client.py | 2 +- .../services/search_service/async_client.py | 5 +- .../services/search_service/client.py | 2 +- .../async_client.py | 6 +- .../site_search_engine_service/client.py | 2 +- .../user_event_service/async_client.py | 5 +- .../services/user_event_service/client.py | 2 +- .../discoveryengine_v1alpha/gapic_version.py | 2 +- .../acl_config_service/async_client.py | 5 +- .../services/acl_config_service/client.py | 2 +- .../services/chunk_service/async_client.py | 5 +- .../services/chunk_service/client.py | 2 +- .../completion_service/async_client.py | 5 +- .../services/completion_service/client.py | 2 +- .../services/control_service/async_client.py | 5 +- .../services/control_service/client.py | 2 +- .../async_client.py | 6 +- .../conversational_search_service/client.py | 2 +- .../data_store_service/async_client.py | 5 +- .../services/data_store_service/client.py | 2 +- .../services/document_service/async_client.py | 5 +- .../services/document_service/client.py | 2 +- .../services/engine_service/async_client.py | 5 +- .../services/engine_service/client.py | 2 +- .../estimate_billing_service/async_client.py | 6 +- .../estimate_billing_service/client.py | 2 +- .../evaluation_service/async_client.py | 5 +- .../services/evaluation_service/client.py | 2 +- .../async_client.py | 6 +- .../grounded_generation_service/client.py | 2 +- .../services/project_service/async_client.py | 5 +- .../services/project_service/client.py | 2 +- .../services/rank_service/async_client.py | 5 +- .../services/rank_service/client.py | 2 +- .../recommendation_service/async_client.py | 6 +- .../services/recommendation_service/client.py | 2 +- .../sample_query_service/async_client.py | 6 +- .../services/sample_query_service/client.py | 2 +- .../sample_query_set_service/async_client.py | 6 +- .../sample_query_set_service/client.py | 2 +- .../services/schema_service/async_client.py | 5 +- .../services/schema_service/client.py | 2 +- .../services/search_service/async_client.py | 5 +- .../services/search_service/client.py | 2 +- .../search_tuning_service/async_client.py | 6 +- .../services/search_tuning_service/client.py | 2 +- .../serving_config_service/async_client.py | 6 +- .../services/serving_config_service/client.py | 2 +- .../async_client.py | 6 +- .../site_search_engine_service/client.py | 2 +- .../user_event_service/async_client.py | 5 +- .../services/user_event_service/client.py | 2 +- .../discoveryengine_v1beta/gapic_version.py | 2 +- .../completion_service/async_client.py | 5 +- .../services/completion_service/client.py | 2 +- .../services/control_service/async_client.py | 5 +- .../services/control_service/client.py | 2 +- .../async_client.py | 6 +- .../conversational_search_service/client.py | 2 +- .../data_store_service/async_client.py | 5 +- .../services/data_store_service/client.py | 2 +- .../services/document_service/async_client.py | 5 +- .../services/document_service/client.py | 2 +- .../services/engine_service/async_client.py | 5 +- .../services/engine_service/client.py | 2 +- .../evaluation_service/async_client.py | 5 +- .../services/evaluation_service/client.py | 2 +- .../async_client.py | 6 +- .../grounded_generation_service/client.py | 2 +- .../services/project_service/async_client.py | 5 +- .../services/project_service/client.py | 2 +- .../services/rank_service/async_client.py | 5 +- .../services/rank_service/client.py | 2 +- .../recommendation_service/async_client.py | 6 +- .../services/recommendation_service/client.py | 2 +- .../sample_query_service/async_client.py | 6 +- .../services/sample_query_service/client.py | 2 +- .../sample_query_set_service/async_client.py | 6 +- .../sample_query_set_service/client.py | 2 +- .../services/schema_service/async_client.py | 5 +- .../services/schema_service/client.py | 2 +- .../services/search_service/async_client.py | 5 +- .../services/search_service/client.py | 2 +- .../search_tuning_service/async_client.py | 6 +- .../services/search_tuning_service/client.py | 2 +- .../serving_config_service/async_client.py | 6 +- .../services/serving_config_service/client.py | 2 +- .../async_client.py | 6 +- .../site_search_engine_service/client.py | 2 +- .../user_event_service/async_client.py | 5 +- .../services/user_event_service/client.py | 2 +- ...adata_google.cloud.discoveryengine.v1.json | 2 +- ..._google.cloud.discoveryengine.v1alpha.json | 2 +- ...a_google.cloud.discoveryengine.v1beta.json | 2 +- .../test_completion_service.py | 85 +-- .../test_control_service.py | 45 +- .../test_conversational_search_service.py | 117 ++-- .../test_data_store_service.py | 65 +- .../test_document_service.py | 83 +-- .../discoveryengine_v1/test_engine_service.py | 65 +- .../test_grounded_generation_service.py | 9 +- .../test_project_service.py | 19 +- .../discoveryengine_v1/test_rank_service.py | 9 +- .../test_recommendation_service.py | 9 +- .../discoveryengine_v1/test_schema_service.py | 75 ++- .../discoveryengine_v1/test_search_service.py | 9 +- .../test_site_search_engine_service.py | 188 +++--- .../test_user_event_service.py | 37 +- .../test_acl_config_service.py | 18 +- .../test_chunk_service.py | 18 +- .../test_completion_service.py | 85 +-- .../test_control_service.py | 45 +- .../test_conversational_search_service.py | 117 ++-- .../test_data_store_service.py | 83 +-- .../test_document_service.py | 92 +-- .../test_engine_service.py | 102 +-- .../test_estimate_billing_service.py | 19 +- .../test_evaluation_service.py | 46 +- .../test_grounded_generation_service.py | 9 +- .../test_project_service.py | 37 +- .../test_rank_service.py | 9 +- .../test_recommendation_service.py | 9 +- .../test_sample_query_service.py | 64 +- .../test_sample_query_set_service.py | 45 +- .../test_schema_service.py | 75 ++- .../test_search_service.py | 9 +- .../test_search_tuning_service.py | 28 +- .../test_serving_config_service.py | 27 +- .../test_site_search_engine_service.py | 216 ++++--- .../test_user_event_service.py | 56 +- .../test_completion_service.py | 85 +-- .../test_control_service.py | 45 +- .../test_conversational_search_service.py | 117 ++-- .../test_data_store_service.py | 65 +- .../test_document_service.py | 83 +-- .../test_engine_service.py | 102 +-- .../test_evaluation_service.py | 46 +- .../test_grounded_generation_service.py | 9 +- .../test_project_service.py | 19 +- .../test_rank_service.py | 9 +- .../test_recommendation_service.py | 9 +- .../test_sample_query_service.py | 64 +- .../test_sample_query_set_service.py | 45 +- .../test_schema_service.py | 75 ++- .../test_search_service.py | 9 +- .../test_search_tuning_service.py | 28 +- .../test_serving_config_service.py | 27 +- .../test_site_search_engine_service.py | 188 +++--- .../test_user_event_service.py | 56 +- .../google/cloud/clouddms/gapic_version.py | 2 +- .../google/cloud/clouddms_v1/gapic_version.py | 2 +- .../data_migration_service/async_client.py | 6 +- .../services/data_migration_service/client.py | 2 +- .../types/conversionworkspace_resources.py | 7 +- ...pet_metadata_google.cloud.clouddms.v1.json | 2 +- .../test_data_migration_service.py | 599 ++++++++++-------- .../google/cloud/documentai/gapic_version.py | 2 +- .../cloud/documentai_v1/gapic_version.py | 2 +- .../async_client.py | 6 +- .../document_processor_service/client.py | 2 +- .../cloud/documentai_v1beta2/gapic_version.py | 2 +- .../async_client.py | 6 +- .../document_understanding_service/client.py | 2 +- .../cloud/documentai_v1beta3/gapic_version.py | 2 +- .../async_client.py | 6 +- .../document_processor_service/client.py | 2 +- .../services/document_service/async_client.py | 5 +- .../services/document_service/client.py | 2 +- .../types/document_service.py | 4 +- ...t_metadata_google.cloud.documentai.v1.json | 2 +- ...adata_google.cloud.documentai.v1beta2.json | 2 +- ...adata_google.cloud.documentai.v1beta3.json | 2 +- .../test_document_processor_service.py | 308 +++++---- .../test_document_understanding_service.py | 28 +- .../test_document_processor_service.py | 327 ++++++---- .../test_document_service.py | 93 +-- .../google/cloud/domains/gapic_version.py | 2 +- .../google/cloud/domains_v1/gapic_version.py | 2 +- .../services/domains/async_client.py | 5 +- .../domains_v1/services/domains/client.py | 2 +- .../cloud/domains_v1beta1/gapic_version.py | 2 +- .../services/domains/async_client.py | 5 +- .../services/domains/client.py | 2 +- ...ppet_metadata_google.cloud.domains.v1.json | 2 +- ...metadata_google.cloud.domains.v1beta1.json | 2 +- .../unit/gapic/domains_v1/test_domains.py | 215 ++++--- .../gapic/domains_v1beta1/test_domains.py | 215 ++++--- .../cloud/edgecontainer/gapic_version.py | 2 +- .../cloud/edgecontainer_v1/gapic_version.py | 2 +- .../services/edge_container/async_client.py | 5 +- .../services/edge_container/client.py | 2 +- .../cloud/edgecontainer_v1/types/resources.py | 2 +- ...etadata_google.cloud.edgecontainer.v1.json | 2 +- .../edgecontainer_v1/test_edge_container.py | 270 ++++---- 505 files changed, 8257 insertions(+), 6865 deletions(-) diff --git a/packages/google-cloud-data-fusion/google/cloud/data_fusion/gapic_version.py b/packages/google-cloud-data-fusion/google/cloud/data_fusion/gapic_version.py index 3ba9a6de4897..558c8aab67c5 100644 --- a/packages/google-cloud-data-fusion/google/cloud/data_fusion/gapic_version.py +++ b/packages/google-cloud-data-fusion/google/cloud/data_fusion/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.10.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-data-fusion/google/cloud/data_fusion_v1/gapic_version.py b/packages/google-cloud-data-fusion/google/cloud/data_fusion_v1/gapic_version.py index 3ba9a6de4897..558c8aab67c5 100644 --- a/packages/google-cloud-data-fusion/google/cloud/data_fusion_v1/gapic_version.py +++ b/packages/google-cloud-data-fusion/google/cloud/data_fusion_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.10.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-data-fusion/google/cloud/data_fusion_v1/services/data_fusion/async_client.py b/packages/google-cloud-data-fusion/google/cloud/data_fusion_v1/services/data_fusion/async_client.py index 54b9be0842e3..752f062de3b3 100644 --- a/packages/google-cloud-data-fusion/google/cloud/data_fusion_v1/services/data_fusion/async_client.py +++ b/packages/google-cloud-data-fusion/google/cloud/data_fusion_v1/services/data_fusion/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -192,9 +191,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DataFusionClient).get_transport_class, type(DataFusionClient) - ) + get_transport_class = DataFusionClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-data-fusion/google/cloud/data_fusion_v1/services/data_fusion/client.py b/packages/google-cloud-data-fusion/google/cloud/data_fusion_v1/services/data_fusion/client.py index 1adb5e2f23cc..1fb70174685f 100644 --- a/packages/google-cloud-data-fusion/google/cloud/data_fusion_v1/services/data_fusion/client.py +++ b/packages/google-cloud-data-fusion/google/cloud/data_fusion_v1/services/data_fusion/client.py @@ -688,7 +688,7 @@ def __init__( transport_init: Union[ Type[DataFusionTransport], Callable[..., DataFusionTransport] ] = ( - type(self).get_transport_class(transport) + DataFusionClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DataFusionTransport], transport) ) diff --git a/packages/google-cloud-data-fusion/samples/generated_samples/snippet_metadata_google.cloud.datafusion.v1.json b/packages/google-cloud-data-fusion/samples/generated_samples/snippet_metadata_google.cloud.datafusion.v1.json index 621909d85f63..1c5648e25de2 100644 --- a/packages/google-cloud-data-fusion/samples/generated_samples/snippet_metadata_google.cloud.datafusion.v1.json +++ b/packages/google-cloud-data-fusion/samples/generated_samples/snippet_metadata_google.cloud.datafusion.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-data-fusion", - "version": "1.10.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-data-fusion/tests/unit/gapic/data_fusion_v1/test_data_fusion.py b/packages/google-cloud-data-fusion/tests/unit/gapic/data_fusion_v1/test_data_fusion.py index 46e3afdc643a..c3738bf000c3 100644 --- a/packages/google-cloud-data-fusion/tests/unit/gapic/data_fusion_v1/test_data_fusion.py +++ b/packages/google-cloud-data-fusion/tests/unit/gapic/data_fusion_v1/test_data_fusion.py @@ -1268,22 +1268,23 @@ async def test_list_available_versions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_available_versions - ] = mock_object + ] = mock_rpc request = {} await client.list_available_versions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_available_versions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1854,22 +1855,23 @@ async def test_list_instances_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_instances - ] = mock_object + ] = mock_rpc request = {} await client.list_instances(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_instances(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2391,22 +2393,23 @@ async def test_get_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_instance - ] = mock_object + ] = mock_rpc request = {} await client.get_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2657,8 +2660,9 @@ def test_create_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_instance(request) @@ -2712,26 +2716,28 @@ async def test_create_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_instance - ] = mock_object + ] = mock_rpc request = {} await client.create_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3041,8 +3047,9 @@ def test_delete_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_instance(request) @@ -3096,26 +3103,28 @@ async def test_delete_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_instance - ] = mock_object + ] = mock_rpc request = {} await client.delete_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3401,8 +3410,9 @@ def test_update_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_instance(request) @@ -3456,26 +3466,28 @@ async def test_update_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_instance - ] = mock_object + ] = mock_rpc request = {} await client.update_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3777,8 +3789,9 @@ def test_restart_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.restart_instance(request) @@ -3832,26 +3845,28 @@ async def test_restart_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.restart_instance - ] = mock_object + ] = mock_rpc request = {} await client.restart_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.restart_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-data-qna/google/cloud/dataqna/gapic_version.py b/packages/google-cloud-data-qna/google/cloud/dataqna/gapic_version.py index 52ac68d02611..558c8aab67c5 100644 --- a/packages/google-cloud-data-qna/google/cloud/dataqna/gapic_version.py +++ b/packages/google-cloud-data-qna/google/cloud/dataqna/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.10.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/gapic_version.py b/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/gapic_version.py index 52ac68d02611..558c8aab67c5 100644 --- a/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/gapic_version.py +++ b/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.10.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/services/auto_suggestion_service/async_client.py b/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/services/auto_suggestion_service/async_client.py index b7c6f8cc25f8..76031483cd3d 100644 --- a/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/services/auto_suggestion_service/async_client.py +++ b/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/services/auto_suggestion_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -257,10 +256,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AutoSuggestionServiceClient).get_transport_class, - type(AutoSuggestionServiceClient), - ) + get_transport_class = AutoSuggestionServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/services/auto_suggestion_service/client.py b/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/services/auto_suggestion_service/client.py index a6da93e4f7b8..6de5b4c512ba 100644 --- a/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/services/auto_suggestion_service/client.py +++ b/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/services/auto_suggestion_service/client.py @@ -718,7 +718,7 @@ def __init__( Type[AutoSuggestionServiceTransport], Callable[..., AutoSuggestionServiceTransport], ] = ( - type(self).get_transport_class(transport) + AutoSuggestionServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AutoSuggestionServiceTransport], transport) ) diff --git a/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/services/question_service/async_client.py b/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/services/question_service/async_client.py index be6bf229721a..6ea78adb9c12 100644 --- a/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/services/question_service/async_client.py +++ b/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/services/question_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -215,9 +214,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(QuestionServiceClient).get_transport_class, type(QuestionServiceClient) - ) + get_transport_class = QuestionServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/services/question_service/client.py b/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/services/question_service/client.py index 34323c3d1076..a1793ae50ed9 100644 --- a/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/services/question_service/client.py +++ b/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/services/question_service/client.py @@ -711,7 +711,7 @@ def __init__( transport_init: Union[ Type[QuestionServiceTransport], Callable[..., QuestionServiceTransport] ] = ( - type(self).get_transport_class(transport) + QuestionServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., QuestionServiceTransport], transport) ) diff --git a/packages/google-cloud-data-qna/samples/generated_samples/snippet_metadata_google.cloud.dataqna.v1alpha.json b/packages/google-cloud-data-qna/samples/generated_samples/snippet_metadata_google.cloud.dataqna.v1alpha.json index d104234fa78b..2325b770fe09 100644 --- a/packages/google-cloud-data-qna/samples/generated_samples/snippet_metadata_google.cloud.dataqna.v1alpha.json +++ b/packages/google-cloud-data-qna/samples/generated_samples/snippet_metadata_google.cloud.dataqna.v1alpha.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-data-qna", - "version": "0.10.11" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-data-qna/tests/unit/gapic/dataqna_v1alpha/test_auto_suggestion_service.py b/packages/google-cloud-data-qna/tests/unit/gapic/dataqna_v1alpha/test_auto_suggestion_service.py index e3de3b31049f..45c3f5ea6ca8 100644 --- a/packages/google-cloud-data-qna/tests/unit/gapic/dataqna_v1alpha/test_auto_suggestion_service.py +++ b/packages/google-cloud-data-qna/tests/unit/gapic/dataqna_v1alpha/test_auto_suggestion_service.py @@ -1347,22 +1347,23 @@ async def test_suggest_queries_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.suggest_queries - ] = mock_object + ] = mock_rpc request = {} await client.suggest_queries(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.suggest_queries(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-data-qna/tests/unit/gapic/dataqna_v1alpha/test_question_service.py b/packages/google-cloud-data-qna/tests/unit/gapic/dataqna_v1alpha/test_question_service.py index 8458160723db..6765274f125a 100644 --- a/packages/google-cloud-data-qna/tests/unit/gapic/dataqna_v1alpha/test_question_service.py +++ b/packages/google-cloud-data-qna/tests/unit/gapic/dataqna_v1alpha/test_question_service.py @@ -1325,22 +1325,23 @@ async def test_get_question_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_question - ] = mock_object + ] = mock_rpc request = {} await client.get_question(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_question(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1705,22 +1706,23 @@ async def test_create_question_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_question - ] = mock_object + ] = mock_rpc request = {} await client.create_question(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_question(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2101,22 +2103,23 @@ async def test_execute_question_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.execute_question - ] = mock_object + ] = mock_rpc request = {} await client.execute_question(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.execute_question(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2496,22 +2499,23 @@ async def test_get_user_feedback_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_user_feedback - ] = mock_object + ] = mock_rpc request = {} await client.get_user_feedback(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_user_feedback(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2889,22 +2893,23 @@ async def test_update_user_feedback_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_user_feedback - ] = mock_object + ] = mock_rpc request = {} await client.update_user_feedback(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_user_feedback(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog_lineage/gapic_version.py b/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog_lineage/gapic_version.py index cbc79b808afc..558c8aab67c5 100644 --- a/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog_lineage/gapic_version.py +++ b/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog_lineage/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.8" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog_lineage_v1/gapic_version.py b/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog_lineage_v1/gapic_version.py index cbc79b808afc..558c8aab67c5 100644 --- a/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog_lineage_v1/gapic_version.py +++ b/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog_lineage_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.8" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog_lineage_v1/services/lineage/async_client.py b/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog_lineage_v1/services/lineage/async_client.py index bc785e00f39f..128a73fb72df 100644 --- a/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog_lineage_v1/services/lineage/async_client.py +++ b/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog_lineage_v1/services/lineage/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -196,9 +195,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(LineageClient).get_transport_class, type(LineageClient) - ) + get_transport_class = LineageClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog_lineage_v1/services/lineage/client.py b/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog_lineage_v1/services/lineage/client.py index 3bb5b735b73f..349c60ba95f4 100644 --- a/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog_lineage_v1/services/lineage/client.py +++ b/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog_lineage_v1/services/lineage/client.py @@ -718,7 +718,7 @@ def __init__( transport_init: Union[ Type[LineageTransport], Callable[..., LineageTransport] ] = ( - type(self).get_transport_class(transport) + LineageClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., LineageTransport], transport) ) diff --git a/packages/google-cloud-datacatalog-lineage/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.lineage.v1.json b/packages/google-cloud-datacatalog-lineage/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.lineage.v1.json index 2156bebef95e..02d1172d6df8 100644 --- a/packages/google-cloud-datacatalog-lineage/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.lineage.v1.json +++ b/packages/google-cloud-datacatalog-lineage/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.lineage.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-datacatalog-lineage", - "version": "0.3.8" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-datacatalog-lineage/tests/unit/gapic/datacatalog_lineage_v1/test_lineage.py b/packages/google-cloud-datacatalog-lineage/tests/unit/gapic/datacatalog_lineage_v1/test_lineage.py index 31e8666c9ddd..3b3d72019372 100644 --- a/packages/google-cloud-datacatalog-lineage/tests/unit/gapic/datacatalog_lineage_v1/test_lineage.py +++ b/packages/google-cloud-datacatalog-lineage/tests/unit/gapic/datacatalog_lineage_v1/test_lineage.py @@ -1249,22 +1249,23 @@ async def test_process_open_lineage_run_event_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.process_open_lineage_run_event - ] = mock_object + ] = mock_rpc request = {} await client.process_open_lineage_run_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.process_open_lineage_run_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1679,22 +1680,23 @@ async def test_create_process_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_process - ] = mock_object + ] = mock_rpc request = {} await client.create_process(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_process(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2050,22 +2052,23 @@ async def test_update_process_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_process - ] = mock_object + ] = mock_rpc request = {} await client.update_process(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_process(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2425,22 +2428,23 @@ async def test_get_process_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_process - ] = mock_object + ] = mock_rpc request = {} await client.get_process(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_process(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2789,22 +2793,23 @@ async def test_list_processes_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_processes - ] = mock_object + ] = mock_rpc request = {} await client.list_processes(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_processes(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3291,8 +3296,9 @@ def test_delete_process_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_process(request) @@ -3346,26 +3352,28 @@ async def test_delete_process_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_process - ] = mock_object + ] = mock_rpc request = {} await client.delete_process(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_process(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3717,22 +3725,23 @@ async def test_create_run_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_run - ] = mock_object + ] = mock_rpc request = {} await client.create_run(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_run(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4091,22 +4100,23 @@ async def test_update_run_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_run - ] = mock_object + ] = mock_rpc request = {} await client.update_run(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_run(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4469,22 +4479,23 @@ async def test_get_run_async_use_cached_wrapped_rpc(transport: str = "grpc_async ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_run - ] = mock_object + ] = mock_rpc request = {} await client.get_run(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_run(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4833,22 +4844,23 @@ async def test_list_runs_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_runs - ] = mock_object + ] = mock_rpc request = {} await client.list_runs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_runs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5335,8 +5347,9 @@ def test_delete_run_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_run(request) @@ -5388,26 +5401,28 @@ async def test_delete_run_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_run - ] = mock_object + ] = mock_rpc request = {} await client.delete_run(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_run(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5767,22 +5782,23 @@ async def test_create_lineage_event_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_lineage_event - ] = mock_object + ] = mock_rpc request = {} await client.create_lineage_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_lineage_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6161,22 +6177,23 @@ async def test_get_lineage_event_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_lineage_event - ] = mock_object + ] = mock_rpc request = {} await client.get_lineage_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_lineage_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6549,22 +6566,23 @@ async def test_list_lineage_events_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_lineage_events - ] = mock_object + ] = mock_rpc request = {} await client.list_lineage_events(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_lineage_events(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7130,22 +7148,23 @@ async def test_delete_lineage_event_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_lineage_event - ] = mock_object + ] = mock_rpc request = {} await client.delete_lineage_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_lineage_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7497,22 +7516,23 @@ async def test_search_links_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search_links - ] = mock_object + ] = mock_rpc request = {} await client.search_links(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_links(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7988,22 +8008,23 @@ async def test_batch_search_link_processes_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_search_link_processes - ] = mock_object + ] = mock_rpc request = {} await client.batch_search_link_processes(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.batch_search_link_processes(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog/gapic_version.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog/gapic_version.py index 91a01ec63c59..558c8aab67c5 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog/gapic_version.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.20.1" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/gapic_version.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/gapic_version.py index 91a01ec63c59..558c8aab67c5 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/gapic_version.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.20.1" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/async_client.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/async_client.py index 11e88eef1af1..8563a22d9452 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/async_client.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -219,9 +218,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DataCatalogClient).get_transport_class, type(DataCatalogClient) - ) + get_transport_class = DataCatalogClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/client.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/client.py index 82a91dfeb438..60d5ae521e3a 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/client.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/client.py @@ -797,7 +797,7 @@ def __init__( transport_init: Union[ Type[DataCatalogTransport], Callable[..., DataCatalogTransport] ] = ( - type(self).get_transport_class(transport) + DataCatalogClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DataCatalogTransport], transport) ) diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager/async_client.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager/async_client.py index d058c3c6634f..7d163597edd4 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager/async_client.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -199,9 +198,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(PolicyTagManagerClient).get_transport_class, type(PolicyTagManagerClient) - ) + get_transport_class = PolicyTagManagerClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager/client.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager/client.py index b1f2e7a86ec0..61ccfadc83f7 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager/client.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager/client.py @@ -695,7 +695,7 @@ def __init__( Type[PolicyTagManagerTransport], Callable[..., PolicyTagManagerTransport], ] = ( - type(self).get_transport_class(transport) + PolicyTagManagerClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., PolicyTagManagerTransport], transport) ) diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/async_client.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/async_client.py index a10cd8d0705a..749b175afc3b 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/async_client.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -209,10 +208,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(PolicyTagManagerSerializationClient).get_transport_class, - type(PolicyTagManagerSerializationClient), - ) + get_transport_class = PolicyTagManagerSerializationClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/client.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/client.py index c604038771ed..800d04876397 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/client.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/client.py @@ -687,7 +687,7 @@ def __init__( Type[PolicyTagManagerSerializationTransport], Callable[..., PolicyTagManagerSerializationTransport], ] = ( - type(self).get_transport_class(transport) + PolicyTagManagerSerializationClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast( Callable[..., PolicyTagManagerSerializationTransport], transport diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/gapic_version.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/gapic_version.py index 91a01ec63c59..558c8aab67c5 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/gapic_version.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.20.1" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/data_catalog/async_client.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/data_catalog/async_client.py index 72b520b9d2e8..7597eda537a5 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/data_catalog/async_client.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/data_catalog/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -216,9 +215,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DataCatalogClient).get_transport_class, type(DataCatalogClient) - ) + get_transport_class = DataCatalogClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/data_catalog/client.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/data_catalog/client.py index f9f1045b6b8e..1760cad85812 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/data_catalog/client.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/data_catalog/client.py @@ -794,7 +794,7 @@ def __init__( transport_init: Union[ Type[DataCatalogTransport], Callable[..., DataCatalogTransport] ] = ( - type(self).get_transport_class(transport) + DataCatalogClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DataCatalogTransport], transport) ) diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/async_client.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/async_client.py index f7bb96f65d78..f0edd007b8ca 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/async_client.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -195,9 +194,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(PolicyTagManagerClient).get_transport_class, type(PolicyTagManagerClient) - ) + get_transport_class = PolicyTagManagerClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/client.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/client.py index 485ecfc41a77..420376ff3822 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/client.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/client.py @@ -691,7 +691,7 @@ def __init__( Type[PolicyTagManagerTransport], Callable[..., PolicyTagManagerTransport], ] = ( - type(self).get_transport_class(transport) + PolicyTagManagerClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., PolicyTagManagerTransport], transport) ) diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/async_client.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/async_client.py index c29575ad8158..ed73043cb897 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/async_client.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -206,10 +205,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(PolicyTagManagerSerializationClient).get_transport_class, - type(PolicyTagManagerSerializationClient), - ) + get_transport_class = PolicyTagManagerSerializationClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/client.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/client.py index 3818bafca657..179bf24456d1 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/client.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/client.py @@ -684,7 +684,7 @@ def __init__( Type[PolicyTagManagerSerializationTransport], Callable[..., PolicyTagManagerSerializationTransport], ] = ( - type(self).get_transport_class(transport) + PolicyTagManagerSerializationClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast( Callable[..., PolicyTagManagerSerializationTransport], transport diff --git a/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1.json b/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1.json index ebefaaae62a8..1659a652e62c 100644 --- a/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1.json +++ b/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-datacatalog", - "version": "3.20.1" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1beta1.json b/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1beta1.json index d906aa94b097..14e0e75feb01 100644 --- a/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1beta1.json +++ b/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-datacatalog", - "version": "3.20.1" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_data_catalog.py b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_data_catalog.py index d6eda9c1da89..c6db81642361 100644 --- a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_data_catalog.py +++ b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_data_catalog.py @@ -1261,22 +1261,23 @@ async def test_search_catalog_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search_catalog - ] = mock_object + ] = mock_rpc request = {} await client.search_catalog(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_catalog(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1801,22 +1802,23 @@ async def test_create_entry_group_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_entry_group - ] = mock_object + ] = mock_rpc request = {} await client.create_entry_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_entry_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2205,22 +2207,23 @@ async def test_get_entry_group_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_entry_group - ] = mock_object + ] = mock_rpc request = {} await client.get_entry_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_entry_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2597,22 +2600,23 @@ async def test_update_entry_group_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_entry_group - ] = mock_object + ] = mock_rpc request = {} await client.update_entry_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_entry_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2990,22 +2994,23 @@ async def test_delete_entry_group_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_entry_group - ] = mock_object + ] = mock_rpc request = {} await client.delete_entry_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_entry_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3367,22 +3372,23 @@ async def test_list_entry_groups_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_entry_groups - ] = mock_object + ] = mock_rpc request = {} await client.list_entry_groups(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_entry_groups(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3959,22 +3965,23 @@ async def test_create_entry_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_entry - ] = mock_object + ] = mock_rpc request = {} await client.create_entry(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4357,22 +4364,23 @@ async def test_update_entry_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_entry - ] = mock_object + ] = mock_rpc request = {} await client.update_entry(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4728,22 +4736,23 @@ async def test_delete_entry_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_entry - ] = mock_object + ] = mock_rpc request = {} await client.delete_entry(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5095,22 +5104,23 @@ async def test_get_entry_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_entry - ] = mock_object + ] = mock_rpc request = {} await client.get_entry(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5485,22 +5495,23 @@ async def test_lookup_entry_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.lookup_entry - ] = mock_object + ] = mock_rpc request = {} await client.lookup_entry(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.lookup_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5716,22 +5727,23 @@ async def test_list_entries_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_entries - ] = mock_object + ] = mock_rpc request = {} await client.list_entries(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_entries(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6287,22 +6299,23 @@ async def test_modify_entry_overview_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.modify_entry_overview - ] = mock_object + ] = mock_rpc request = {} await client.modify_entry_overview(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.modify_entry_overview(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6583,22 +6596,23 @@ async def test_modify_entry_contacts_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.modify_entry_contacts - ] = mock_object + ] = mock_rpc request = {} await client.modify_entry_contacts(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.modify_entry_contacts(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6888,22 +6902,23 @@ async def test_create_tag_template_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_tag_template - ] = mock_object + ] = mock_rpc request = {} await client.create_tag_template(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_tag_template(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7290,22 +7305,23 @@ async def test_get_tag_template_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_tag_template - ] = mock_object + ] = mock_rpc request = {} await client.get_tag_template(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_tag_template(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7668,22 +7684,23 @@ async def test_update_tag_template_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_tag_template - ] = mock_object + ] = mock_rpc request = {} await client.update_tag_template(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_tag_template(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8057,22 +8074,23 @@ async def test_delete_tag_template_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_tag_template - ] = mock_object + ] = mock_rpc request = {} await client.delete_tag_template(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_tag_template(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8459,22 +8477,23 @@ async def test_create_tag_template_field_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_tag_template_field - ] = mock_object + ] = mock_rpc request = {} await client.create_tag_template_field(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_tag_template_field(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8887,22 +8906,23 @@ async def test_update_tag_template_field_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_tag_template_field - ] = mock_object + ] = mock_rpc request = {} await client.update_tag_template_field(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_tag_template_field(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9317,22 +9337,23 @@ async def test_rename_tag_template_field_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.rename_tag_template_field - ] = mock_object + ] = mock_rpc request = {} await client.rename_tag_template_field(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.rename_tag_template_field(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9737,22 +9758,23 @@ async def test_rename_tag_template_field_enum_value_async_use_cached_wrapped_rpc ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.rename_tag_template_field_enum_value - ] = mock_object + ] = mock_rpc request = {} await client.rename_tag_template_field_enum_value(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.rename_tag_template_field_enum_value(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10136,22 +10158,23 @@ async def test_delete_tag_template_field_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_tag_template_field - ] = mock_object + ] = mock_rpc request = {} await client.delete_tag_template_field(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_tag_template_field(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10517,22 +10540,23 @@ async def test_create_tag_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_tag - ] = mock_object + ] = mock_rpc request = {} await client.create_tag(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_tag(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10892,22 +10916,23 @@ async def test_update_tag_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_tag - ] = mock_object + ] = mock_rpc request = {} await client.update_tag(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_tag(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11257,22 +11282,23 @@ async def test_delete_tag_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_tag - ] = mock_object + ] = mock_rpc request = {} await client.delete_tag(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_tag(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11612,22 +11638,23 @@ async def test_list_tags_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_tags - ] = mock_object + ] = mock_rpc request = {} await client.list_tags(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_tags(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12116,8 +12143,9 @@ def test_reconcile_tags_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.reconcile_tags(request) @@ -12171,26 +12199,28 @@ async def test_reconcile_tags_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.reconcile_tags - ] = mock_object + ] = mock_rpc request = {} await client.reconcile_tags(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.reconcile_tags(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12447,22 +12477,23 @@ async def test_star_entry_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.star_entry - ] = mock_object + ] = mock_rpc request = {} await client.star_entry(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.star_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12803,22 +12834,23 @@ async def test_unstar_entry_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.unstar_entry - ] = mock_object + ] = mock_rpc request = {} await client.unstar_entry(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.unstar_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13167,22 +13199,23 @@ async def test_set_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13550,22 +13583,23 @@ async def test_get_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13941,22 +13975,23 @@ async def test_test_iam_permissions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.test_iam_permissions - ] = mock_object + ] = mock_rpc request = {} await client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.test_iam_permissions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14197,8 +14232,9 @@ def test_import_entries_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.import_entries(request) @@ -14252,26 +14288,28 @@ async def test_import_entries_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_entries - ] = mock_object + ] = mock_rpc request = {} await client.import_entries(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.import_entries(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager.py b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager.py index b9901349b229..df706a04c106 100644 --- a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager.py +++ b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager.py @@ -1298,22 +1298,23 @@ async def test_create_taxonomy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_taxonomy - ] = mock_object + ] = mock_rpc request = {} await client.create_taxonomy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_taxonomy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1677,22 +1678,23 @@ async def test_delete_taxonomy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_taxonomy - ] = mock_object + ] = mock_rpc request = {} await client.delete_taxonomy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_taxonomy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2046,22 +2048,23 @@ async def test_update_taxonomy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_taxonomy - ] = mock_object + ] = mock_rpc request = {} await client.update_taxonomy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_taxonomy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2426,22 +2429,23 @@ async def test_list_taxonomies_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_taxonomies - ] = mock_object + ] = mock_rpc request = {} await client.list_taxonomies(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_taxonomies(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3002,22 +3006,23 @@ async def test_get_taxonomy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_taxonomy - ] = mock_object + ] = mock_rpc request = {} await client.get_taxonomy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_taxonomy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3400,22 +3405,23 @@ async def test_create_policy_tag_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_policy_tag - ] = mock_object + ] = mock_rpc request = {} await client.create_policy_tag(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_policy_tag(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3796,22 +3802,23 @@ async def test_delete_policy_tag_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_policy_tag - ] = mock_object + ] = mock_rpc request = {} await client.delete_policy_tag(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_policy_tag(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4180,22 +4187,23 @@ async def test_update_policy_tag_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_policy_tag - ] = mock_object + ] = mock_rpc request = {} await client.update_policy_tag(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_policy_tag(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4567,22 +4575,23 @@ async def test_list_policy_tags_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_policy_tags - ] = mock_object + ] = mock_rpc request = {} await client.list_policy_tags(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_policy_tags(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5137,22 +5146,23 @@ async def test_get_policy_tag_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_policy_tag - ] = mock_object + ] = mock_rpc request = {} await client.get_policy_tag(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_policy_tag(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5512,22 +5522,23 @@ async def test_get_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5814,22 +5825,23 @@ async def test_set_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6126,22 +6138,23 @@ async def test_test_iam_permissions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.test_iam_permissions - ] = mock_object + ] = mock_rpc request = {} await client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.test_iam_permissions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager_serialization.py b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager_serialization.py index 7394b316d30b..34366fb4e79e 100644 --- a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager_serialization.py +++ b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager_serialization.py @@ -1352,22 +1352,23 @@ async def test_replace_taxonomy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.replace_taxonomy - ] = mock_object + ] = mock_rpc request = {} await client.replace_taxonomy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.replace_taxonomy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1652,22 +1653,23 @@ async def test_import_taxonomies_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_taxonomies - ] = mock_object + ] = mock_rpc request = {} await client.import_taxonomies(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.import_taxonomies(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1943,22 +1945,23 @@ async def test_export_taxonomies_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.export_taxonomies - ] = mock_object + ] = mock_rpc request = {} await client.export_taxonomies(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.export_taxonomies(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1beta1/test_data_catalog.py b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1beta1/test_data_catalog.py index 27cccbb831ad..92ffd3ce8379 100644 --- a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1beta1/test_data_catalog.py +++ b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1beta1/test_data_catalog.py @@ -1248,22 +1248,23 @@ async def test_search_catalog_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search_catalog - ] = mock_object + ] = mock_rpc request = {} await client.search_catalog(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_catalog(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1788,22 +1789,23 @@ async def test_create_entry_group_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_entry_group - ] = mock_object + ] = mock_rpc request = {} await client.create_entry_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_entry_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2200,22 +2202,23 @@ async def test_update_entry_group_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_entry_group - ] = mock_object + ] = mock_rpc request = {} await client.update_entry_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_entry_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2594,22 +2597,23 @@ async def test_get_entry_group_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_entry_group - ] = mock_object + ] = mock_rpc request = {} await client.get_entry_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_entry_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2977,22 +2981,23 @@ async def test_delete_entry_group_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_entry_group - ] = mock_object + ] = mock_rpc request = {} await client.delete_entry_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_entry_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3354,22 +3359,23 @@ async def test_list_entry_groups_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_entry_groups - ] = mock_object + ] = mock_rpc request = {} await client.list_entry_groups(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_entry_groups(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3943,22 +3949,23 @@ async def test_create_entry_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_entry - ] = mock_object + ] = mock_rpc request = {} await client.create_entry(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4336,22 +4343,23 @@ async def test_update_entry_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_entry - ] = mock_object + ] = mock_rpc request = {} await client.update_entry(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4705,22 +4713,23 @@ async def test_delete_entry_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_entry - ] = mock_object + ] = mock_rpc request = {} await client.delete_entry(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5069,22 +5078,23 @@ async def test_get_entry_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_entry - ] = mock_object + ] = mock_rpc request = {} await client.get_entry(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5448,22 +5458,23 @@ async def test_lookup_entry_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.lookup_entry - ] = mock_object + ] = mock_rpc request = {} await client.lookup_entry(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.lookup_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5677,22 +5688,23 @@ async def test_list_entries_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_entries - ] = mock_object + ] = mock_rpc request = {} await client.list_entries(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_entries(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6258,22 +6270,23 @@ async def test_create_tag_template_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_tag_template - ] = mock_object + ] = mock_rpc request = {} await client.create_tag_template(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_tag_template(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6666,22 +6679,23 @@ async def test_get_tag_template_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_tag_template - ] = mock_object + ] = mock_rpc request = {} await client.get_tag_template(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_tag_template(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7050,22 +7064,23 @@ async def test_update_tag_template_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_tag_template - ] = mock_object + ] = mock_rpc request = {} await client.update_tag_template(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_tag_template(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7442,22 +7457,23 @@ async def test_delete_tag_template_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_tag_template - ] = mock_object + ] = mock_rpc request = {} await client.delete_tag_template(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_tag_template(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7844,22 +7860,23 @@ async def test_create_tag_template_field_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_tag_template_field - ] = mock_object + ] = mock_rpc request = {} await client.create_tag_template_field(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_tag_template_field(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8272,22 +8289,23 @@ async def test_update_tag_template_field_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_tag_template_field - ] = mock_object + ] = mock_rpc request = {} await client.update_tag_template_field(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_tag_template_field(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8702,22 +8720,23 @@ async def test_rename_tag_template_field_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.rename_tag_template_field - ] = mock_object + ] = mock_rpc request = {} await client.rename_tag_template_field(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.rename_tag_template_field(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9122,22 +9141,23 @@ async def test_rename_tag_template_field_enum_value_async_use_cached_wrapped_rpc ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.rename_tag_template_field_enum_value - ] = mock_object + ] = mock_rpc request = {} await client.rename_tag_template_field_enum_value(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.rename_tag_template_field_enum_value(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9521,22 +9541,23 @@ async def test_delete_tag_template_field_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_tag_template_field - ] = mock_object + ] = mock_rpc request = {} await client.delete_tag_template_field(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_tag_template_field(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9902,22 +9923,23 @@ async def test_create_tag_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_tag - ] = mock_object + ] = mock_rpc request = {} await client.create_tag(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_tag(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10277,22 +10299,23 @@ async def test_update_tag_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_tag - ] = mock_object + ] = mock_rpc request = {} await client.update_tag(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_tag(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10642,22 +10665,23 @@ async def test_delete_tag_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_tag - ] = mock_object + ] = mock_rpc request = {} await client.delete_tag(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_tag(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10997,22 +11021,23 @@ async def test_list_tags_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_tags - ] = mock_object + ] = mock_rpc request = {} await client.list_tags(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_tags(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11558,22 +11583,23 @@ async def test_set_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11941,22 +11967,23 @@ async def test_get_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12332,22 +12359,23 @@ async def test_test_iam_permissions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.test_iam_permissions - ] = mock_object + ] = mock_rpc request = {} await client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.test_iam_permissions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager.py b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager.py index c922c5b886c5..3a1e0fa02567 100644 --- a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager.py +++ b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager.py @@ -1298,22 +1298,23 @@ async def test_create_taxonomy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_taxonomy - ] = mock_object + ] = mock_rpc request = {} await client.create_taxonomy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_taxonomy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1677,22 +1678,23 @@ async def test_delete_taxonomy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_taxonomy - ] = mock_object + ] = mock_rpc request = {} await client.delete_taxonomy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_taxonomy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2046,22 +2048,23 @@ async def test_update_taxonomy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_taxonomy - ] = mock_object + ] = mock_rpc request = {} await client.update_taxonomy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_taxonomy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2426,22 +2429,23 @@ async def test_list_taxonomies_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_taxonomies - ] = mock_object + ] = mock_rpc request = {} await client.list_taxonomies(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_taxonomies(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3002,22 +3006,23 @@ async def test_get_taxonomy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_taxonomy - ] = mock_object + ] = mock_rpc request = {} await client.get_taxonomy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_taxonomy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3400,22 +3405,23 @@ async def test_create_policy_tag_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_policy_tag - ] = mock_object + ] = mock_rpc request = {} await client.create_policy_tag(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_policy_tag(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3796,22 +3802,23 @@ async def test_delete_policy_tag_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_policy_tag - ] = mock_object + ] = mock_rpc request = {} await client.delete_policy_tag(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_policy_tag(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4180,22 +4187,23 @@ async def test_update_policy_tag_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_policy_tag - ] = mock_object + ] = mock_rpc request = {} await client.update_policy_tag(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_policy_tag(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4567,22 +4575,23 @@ async def test_list_policy_tags_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_policy_tags - ] = mock_object + ] = mock_rpc request = {} await client.list_policy_tags(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_policy_tags(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5137,22 +5146,23 @@ async def test_get_policy_tag_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_policy_tag - ] = mock_object + ] = mock_rpc request = {} await client.get_policy_tag(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_policy_tag(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5512,22 +5522,23 @@ async def test_get_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5814,22 +5825,23 @@ async def test_set_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6126,22 +6138,23 @@ async def test_test_iam_permissions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.test_iam_permissions - ] = mock_object + ] = mock_rpc request = {} await client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.test_iam_permissions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager_serialization.py b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager_serialization.py index c4d7a3249103..3af441095898 100644 --- a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager_serialization.py +++ b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager_serialization.py @@ -1336,22 +1336,23 @@ async def test_import_taxonomies_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_taxonomies - ] = mock_object + ] = mock_rpc request = {} await client.import_taxonomies(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.import_taxonomies(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1627,22 +1628,23 @@ async def test_export_taxonomies_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.export_taxonomies - ] = mock_object + ] = mock_rpc request = {} await client.export_taxonomies(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.export_taxonomies(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow/gapic_version.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow/gapic_version.py index f94a6b1a4aec..558c8aab67c5 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow/gapic_version.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.8.12" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/gapic_version.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/gapic_version.py index f94a6b1a4aec..558c8aab67c5 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/gapic_version.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.8.12" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/async_client.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/async_client.py index fa16fb3eef69..a24cc0eb8e9e 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/async_client.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -186,10 +185,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(FlexTemplatesServiceClient).get_transport_class, - type(FlexTemplatesServiceClient), - ) + get_transport_class = FlexTemplatesServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/client.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/client.py index efdca64928f3..71e049ca39e5 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/client.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/client.py @@ -644,7 +644,7 @@ def __init__( Type[FlexTemplatesServiceTransport], Callable[..., FlexTemplatesServiceTransport], ] = ( - type(self).get_transport_class(transport) + FlexTemplatesServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., FlexTemplatesServiceTransport], transport) ) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/async_client.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/async_client.py index cbf7b23e83a6..f811d534e78e 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/async_client.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -187,9 +186,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(JobsV1Beta3Client).get_transport_class, type(JobsV1Beta3Client) - ) + get_transport_class = JobsV1Beta3Client.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/client.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/client.py index b0f307160d19..1f3019cd8a89 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/client.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/client.py @@ -639,7 +639,7 @@ def __init__( transport_init: Union[ Type[JobsV1Beta3Transport], Callable[..., JobsV1Beta3Transport] ] = ( - type(self).get_transport_class(transport) + JobsV1Beta3Client.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., JobsV1Beta3Transport], transport) ) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/async_client.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/async_client.py index 6ddf846a4aa5..0b9299d50f5c 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/async_client.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -187,9 +186,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(MessagesV1Beta3Client).get_transport_class, type(MessagesV1Beta3Client) - ) + get_transport_class = MessagesV1Beta3Client.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/client.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/client.py index b54eb92d3793..c32452797c5a 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/client.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/client.py @@ -642,7 +642,7 @@ def __init__( transport_init: Union[ Type[MessagesV1Beta3Transport], Callable[..., MessagesV1Beta3Transport] ] = ( - type(self).get_transport_class(transport) + MessagesV1Beta3Client.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., MessagesV1Beta3Transport], transport) ) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/async_client.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/async_client.py index 57e3e5006763..a1686a129fb0 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/async_client.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -189,9 +188,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(MetricsV1Beta3Client).get_transport_class, type(MetricsV1Beta3Client) - ) + get_transport_class = MetricsV1Beta3Client.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/client.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/client.py index d86b7b9fa5a7..9c6570dea285 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/client.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/client.py @@ -642,7 +642,7 @@ def __init__( transport_init: Union[ Type[MetricsV1Beta3Transport], Callable[..., MetricsV1Beta3Transport] ] = ( - type(self).get_transport_class(transport) + MetricsV1Beta3Client.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., MetricsV1Beta3Transport], transport) ) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/async_client.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/async_client.py index 9fac47b40297..f46ceaa9029f 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/async_client.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -189,9 +188,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(SnapshotsV1Beta3Client).get_transport_class, type(SnapshotsV1Beta3Client) - ) + get_transport_class = SnapshotsV1Beta3Client.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/client.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/client.py index 0e67ac28c812..568607068769 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/client.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/client.py @@ -645,7 +645,7 @@ def __init__( Type[SnapshotsV1Beta3Transport], Callable[..., SnapshotsV1Beta3Transport], ] = ( - type(self).get_transport_class(transport) + SnapshotsV1Beta3Client.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SnapshotsV1Beta3Transport], transport) ) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/async_client.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/async_client.py index 6ff37d5c83e1..bc4e47b5c0b8 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/async_client.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -189,9 +188,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(TemplatesServiceClient).get_transport_class, type(TemplatesServiceClient) - ) + get_transport_class = TemplatesServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/client.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/client.py index bc7a34b75f59..53466a958396 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/client.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/client.py @@ -645,7 +645,7 @@ def __init__( Type[TemplatesServiceTransport], Callable[..., TemplatesServiceTransport], ] = ( - type(self).get_transport_class(transport) + TemplatesServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., TemplatesServiceTransport], transport) ) diff --git a/packages/google-cloud-dataflow-client/samples/generated_samples/snippet_metadata_google.dataflow.v1beta3.json b/packages/google-cloud-dataflow-client/samples/generated_samples/snippet_metadata_google.dataflow.v1beta3.json index 8c52d7b6d17e..07ae5fac5096 100644 --- a/packages/google-cloud-dataflow-client/samples/generated_samples/snippet_metadata_google.dataflow.v1beta3.json +++ b/packages/google-cloud-dataflow-client/samples/generated_samples/snippet_metadata_google.dataflow.v1beta3.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dataflow-client", - "version": "0.8.12" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_flex_templates_service.py b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_flex_templates_service.py index d454d148b542..aa5d43d912bc 100644 --- a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_flex_templates_service.py +++ b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_flex_templates_service.py @@ -1362,22 +1362,23 @@ async def test_launch_flex_template_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.launch_flex_template - ] = mock_object + ] = mock_rpc request = {} await client.launch_flex_template(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.launch_flex_template(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_jobs_v1_beta3.py b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_jobs_v1_beta3.py index 72e99e596064..88582acc1b51 100644 --- a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_jobs_v1_beta3.py +++ b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_jobs_v1_beta3.py @@ -1294,22 +1294,23 @@ async def test_create_job_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_job - ] = mock_object + ] = mock_rpc request = {} await client.create_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1643,22 +1644,23 @@ async def test_get_job_async_use_cached_wrapped_rpc(transport: str = "grpc_async ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_job - ] = mock_object + ] = mock_rpc request = {} await client.get_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1994,22 +1996,23 @@ async def test_update_job_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_job - ] = mock_object + ] = mock_rpc request = {} await client.update_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2306,22 +2309,23 @@ async def test_list_jobs_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_jobs - ] = mock_object + ] = mock_rpc request = {} await client.list_jobs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_jobs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2805,22 +2809,23 @@ async def test_aggregated_list_jobs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.aggregated_list_jobs - ] = mock_object + ] = mock_rpc request = {} await client.aggregated_list_jobs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.aggregated_list_jobs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3305,22 +3310,23 @@ async def test_check_active_jobs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.check_active_jobs - ] = mock_object + ] = mock_rpc request = {} await client.check_active_jobs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.check_active_jobs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3552,22 +3558,23 @@ async def test_snapshot_job_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.snapshot_job - ] = mock_object + ] = mock_rpc request = {} await client.snapshot_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.snapshot_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_messages_v1_beta3.py b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_messages_v1_beta3.py index 2c7c68cad73c..7b5bfb96ec0b 100644 --- a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_messages_v1_beta3.py +++ b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_messages_v1_beta3.py @@ -1327,22 +1327,23 @@ async def test_list_job_messages_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_job_messages - ] = mock_object + ] = mock_rpc request = {} await client.list_job_messages(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_job_messages(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_metrics_v1_beta3.py b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_metrics_v1_beta3.py index ccf43558e7b7..102d39f0cb8d 100644 --- a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_metrics_v1_beta3.py +++ b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_metrics_v1_beta3.py @@ -1288,22 +1288,23 @@ async def test_get_job_metrics_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_job_metrics - ] = mock_object + ] = mock_rpc request = {} await client.get_job_metrics(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_job_metrics(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1586,22 +1587,23 @@ async def test_get_job_execution_details_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_job_execution_details - ] = mock_object + ] = mock_rpc request = {} await client.get_job_execution_details(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_job_execution_details(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2109,22 +2111,23 @@ async def test_get_stage_execution_details_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_stage_execution_details - ] = mock_object + ] = mock_rpc request = {} await client.get_stage_execution_details(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_stage_execution_details(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_snapshots_v1_beta3.py b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_snapshots_v1_beta3.py index 2e41686a5f77..a0a319bac934 100644 --- a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_snapshots_v1_beta3.py +++ b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_snapshots_v1_beta3.py @@ -1339,22 +1339,23 @@ async def test_get_snapshot_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_snapshot - ] = mock_object + ] = mock_rpc request = {} await client.get_snapshot(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_snapshot(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1634,22 +1635,23 @@ async def test_delete_snapshot_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_snapshot - ] = mock_object + ] = mock_rpc request = {} await client.delete_snapshot(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_snapshot(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1916,22 +1918,23 @@ async def test_list_snapshots_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_snapshots - ] = mock_object + ] = mock_rpc request = {} await client.list_snapshots(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_snapshots(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_templates_service.py b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_templates_service.py index 9b8d2541eee7..071e04cdf105 100644 --- a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_templates_service.py +++ b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_templates_service.py @@ -1375,22 +1375,23 @@ async def test_create_job_from_template_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_job_from_template - ] = mock_object + ] = mock_rpc request = {} await client.create_job_from_template(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_job_from_template(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1688,22 +1689,23 @@ async def test_launch_template_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.launch_template - ] = mock_object + ] = mock_rpc request = {} await client.launch_template(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.launch_template(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1973,22 +1975,23 @@ async def test_get_template_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_template - ] = mock_object + ] = mock_rpc request = {} await client.get_template(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_template(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dataform/google/cloud/dataform/gapic_version.py b/packages/google-cloud-dataform/google/cloud/dataform/gapic_version.py index 0d21cc226e8f..558c8aab67c5 100644 --- a/packages/google-cloud-dataform/google/cloud/dataform/gapic_version.py +++ b/packages/google-cloud-dataform/google/cloud/dataform/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.5.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-dataform/google/cloud/dataform_v1beta1/gapic_version.py b/packages/google-cloud-dataform/google/cloud/dataform_v1beta1/gapic_version.py index 0d21cc226e8f..558c8aab67c5 100644 --- a/packages/google-cloud-dataform/google/cloud/dataform_v1beta1/gapic_version.py +++ b/packages/google-cloud-dataform/google/cloud/dataform_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.5.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-dataform/google/cloud/dataform_v1beta1/services/dataform/async_client.py b/packages/google-cloud-dataform/google/cloud/dataform_v1beta1/services/dataform/async_client.py index 1912daa165db..2e5979df8313 100644 --- a/packages/google-cloud-dataform/google/cloud/dataform_v1beta1/services/dataform/async_client.py +++ b/packages/google-cloud-dataform/google/cloud/dataform_v1beta1/services/dataform/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -203,9 +202,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DataformClient).get_transport_class, type(DataformClient) - ) + get_transport_class = DataformClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dataform/google/cloud/dataform_v1beta1/services/dataform/client.py b/packages/google-cloud-dataform/google/cloud/dataform_v1beta1/services/dataform/client.py index 161196eab69d..bcf592a2a77a 100644 --- a/packages/google-cloud-dataform/google/cloud/dataform_v1beta1/services/dataform/client.py +++ b/packages/google-cloud-dataform/google/cloud/dataform_v1beta1/services/dataform/client.py @@ -807,7 +807,7 @@ def __init__( transport_init: Union[ Type[DataformTransport], Callable[..., DataformTransport] ] = ( - type(self).get_transport_class(transport) + DataformClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DataformTransport], transport) ) diff --git a/packages/google-cloud-dataform/samples/generated_samples/snippet_metadata_google.cloud.dataform.v1beta1.json b/packages/google-cloud-dataform/samples/generated_samples/snippet_metadata_google.cloud.dataform.v1beta1.json index 9a805193694a..d78e2036cf34 100644 --- a/packages/google-cloud-dataform/samples/generated_samples/snippet_metadata_google.cloud.dataform.v1beta1.json +++ b/packages/google-cloud-dataform/samples/generated_samples/snippet_metadata_google.cloud.dataform.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dataform", - "version": "0.5.11" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-dataform/tests/unit/gapic/dataform_v1beta1/test_dataform.py b/packages/google-cloud-dataform/tests/unit/gapic/dataform_v1beta1/test_dataform.py index c79d3d689f3f..84f9bf85c48f 100644 --- a/packages/google-cloud-dataform/tests/unit/gapic/dataform_v1beta1/test_dataform.py +++ b/packages/google-cloud-dataform/tests/unit/gapic/dataform_v1beta1/test_dataform.py @@ -1253,22 +1253,23 @@ async def test_list_repositories_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_repositories - ] = mock_object + ] = mock_rpc request = {} await client.list_repositories(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_repositories(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1846,22 +1847,23 @@ async def test_get_repository_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_repository - ] = mock_object + ] = mock_rpc request = {} await client.get_repository(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_repository(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2244,22 +2246,23 @@ async def test_create_repository_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_repository - ] = mock_object + ] = mock_rpc request = {} await client.create_repository(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_repository(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2666,22 +2669,23 @@ async def test_update_repository_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_repository - ] = mock_object + ] = mock_rpc request = {} await client.update_repository(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_repository(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3060,22 +3064,23 @@ async def test_delete_repository_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_repository - ] = mock_object + ] = mock_rpc request = {} await client.delete_repository(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_repository(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3433,22 +3438,23 @@ async def test_commit_repository_changes_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.commit_repository_changes - ] = mock_object + ] = mock_rpc request = {} await client.commit_repository_changes(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.commit_repository_changes(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3731,22 +3737,23 @@ async def test_read_repository_file_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.read_repository_file - ] = mock_object + ] = mock_rpc request = {} await client.read_repository_file(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.read_repository_file(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4038,22 +4045,23 @@ async def test_query_repository_directory_contents_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.query_repository_directory_contents - ] = mock_object + ] = mock_rpc request = {} await client.query_repository_directory_contents(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.query_repository_directory_contents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4546,22 +4554,23 @@ async def test_fetch_repository_history_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.fetch_repository_history - ] = mock_object + ] = mock_rpc request = {} await client.fetch_repository_history(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.fetch_repository_history(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5054,22 +5063,23 @@ async def test_compute_repository_access_token_status_async_use_cached_wrapped_r ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.compute_repository_access_token_status - ] = mock_object + ] = mock_rpc request = {} await client.compute_repository_access_token_status(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.compute_repository_access_token_status(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5359,22 +5369,23 @@ async def test_fetch_remote_branches_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.fetch_remote_branches - ] = mock_object + ] = mock_rpc request = {} await client.fetch_remote_branches(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.fetch_remote_branches(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5656,22 +5667,23 @@ async def test_list_workspaces_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_workspaces - ] = mock_object + ] = mock_rpc request = {} await client.list_workspaces(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_workspaces(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6216,22 +6228,23 @@ async def test_get_workspace_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_workspace - ] = mock_object + ] = mock_rpc request = {} await client.get_workspace(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_workspace(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6580,22 +6593,23 @@ async def test_create_workspace_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_workspace - ] = mock_object + ] = mock_rpc request = {} await client.create_workspace(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_workspace(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6955,22 +6969,23 @@ async def test_delete_workspace_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_workspace - ] = mock_object + ] = mock_rpc request = {} await client.delete_workspace(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_workspace(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7317,22 +7332,23 @@ async def test_install_npm_packages_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.install_npm_packages - ] = mock_object + ] = mock_rpc request = {} await client.install_npm_packages(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.install_npm_packages(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7599,22 +7615,23 @@ async def test_pull_git_commits_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.pull_git_commits - ] = mock_object + ] = mock_rpc request = {} await client.pull_git_commits(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.pull_git_commits(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7871,22 +7888,23 @@ async def test_push_git_commits_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.push_git_commits - ] = mock_object + ] = mock_rpc request = {} await client.push_git_commits(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.push_git_commits(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8154,22 +8172,23 @@ async def test_fetch_file_git_statuses_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.fetch_file_git_statuses - ] = mock_object + ] = mock_rpc request = {} await client.fetch_file_git_statuses(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.fetch_file_git_statuses(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8457,22 +8476,23 @@ async def test_fetch_git_ahead_behind_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.fetch_git_ahead_behind - ] = mock_object + ] = mock_rpc request = {} await client.fetch_git_ahead_behind(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.fetch_git_ahead_behind(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8755,22 +8775,23 @@ async def test_commit_workspace_changes_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.commit_workspace_changes - ] = mock_object + ] = mock_rpc request = {} await client.commit_workspace_changes(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.commit_workspace_changes(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9042,22 +9063,23 @@ async def test_reset_workspace_changes_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.reset_workspace_changes - ] = mock_object + ] = mock_rpc request = {} await client.reset_workspace_changes(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.reset_workspace_changes(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9325,22 +9347,23 @@ async def test_fetch_file_diff_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.fetch_file_diff - ] = mock_object + ] = mock_rpc request = {} await client.fetch_file_diff(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.fetch_file_diff(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9624,22 +9647,23 @@ async def test_query_directory_contents_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.query_directory_contents - ] = mock_object + ] = mock_rpc request = {} await client.query_directory_contents(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.query_directory_contents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10113,22 +10137,23 @@ async def test_make_directory_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.make_directory - ] = mock_object + ] = mock_rpc request = {} await client.make_directory(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.make_directory(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10389,22 +10414,23 @@ async def test_remove_directory_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.remove_directory - ] = mock_object + ] = mock_rpc request = {} await client.remove_directory(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.remove_directory(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10663,22 +10689,23 @@ async def test_move_directory_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.move_directory - ] = mock_object + ] = mock_rpc request = {} await client.move_directory(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.move_directory(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10942,22 +10969,23 @@ async def test_read_file_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.read_file - ] = mock_object + ] = mock_rpc request = {} await client.read_file(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.read_file(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11219,22 +11247,23 @@ async def test_remove_file_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.remove_file - ] = mock_object + ] = mock_rpc request = {} await client.remove_file(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.remove_file(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11491,22 +11520,23 @@ async def test_move_file_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.move_file - ] = mock_object + ] = mock_rpc request = {} await client.move_file(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.move_file(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11765,22 +11795,23 @@ async def test_write_file_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.write_file - ] = mock_object + ] = mock_rpc request = {} await client.write_file(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.write_file(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12061,22 +12092,23 @@ async def test_list_release_configs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_release_configs - ] = mock_object + ] = mock_rpc request = {} await client.list_release_configs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_release_configs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12663,22 +12695,23 @@ async def test_get_release_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_release_config - ] = mock_object + ] = mock_rpc request = {} await client.get_release_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_release_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13072,22 +13105,23 @@ async def test_create_release_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_release_config - ] = mock_object + ] = mock_rpc request = {} await client.create_release_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_release_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13495,22 +13529,23 @@ async def test_update_release_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_release_config - ] = mock_object + ] = mock_rpc request = {} await client.update_release_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_release_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13893,22 +13928,23 @@ async def test_delete_release_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_release_config - ] = mock_object + ] = mock_rpc request = {} await client.delete_release_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_release_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14276,22 +14312,23 @@ async def test_list_compilation_results_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_compilation_results - ] = mock_object + ] = mock_rpc request = {} await client.list_compilation_results(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_compilation_results(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14876,22 +14913,23 @@ async def test_get_compilation_result_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_compilation_result - ] = mock_object + ] = mock_rpc request = {} await client.get_compilation_result(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_compilation_result(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15274,22 +15312,23 @@ async def test_create_compilation_result_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_compilation_result - ] = mock_object + ] = mock_rpc request = {} await client.create_compilation_result(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_compilation_result(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15680,22 +15719,23 @@ async def test_query_compilation_result_actions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.query_compilation_result_actions - ] = mock_object + ] = mock_rpc request = {} await client.query_compilation_result_actions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.query_compilation_result_actions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -16191,22 +16231,23 @@ async def test_list_workflow_configs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_workflow_configs - ] = mock_object + ] = mock_rpc request = {} await client.list_workflow_configs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_workflow_configs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -16790,22 +16831,23 @@ async def test_get_workflow_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_workflow_config - ] = mock_object + ] = mock_rpc request = {} await client.get_workflow_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_workflow_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -17194,22 +17236,23 @@ async def test_create_workflow_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_workflow_config - ] = mock_object + ] = mock_rpc request = {} await client.create_workflow_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_workflow_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -17612,22 +17655,23 @@ async def test_update_workflow_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_workflow_config - ] = mock_object + ] = mock_rpc request = {} await client.update_workflow_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_workflow_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -18008,22 +18052,23 @@ async def test_delete_workflow_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_workflow_config - ] = mock_object + ] = mock_rpc request = {} await client.delete_workflow_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_workflow_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -18395,22 +18440,23 @@ async def test_list_workflow_invocations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_workflow_invocations - ] = mock_object + ] = mock_rpc request = {} await client.list_workflow_invocations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_workflow_invocations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -18993,22 +19039,23 @@ async def test_get_workflow_invocation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_workflow_invocation - ] = mock_object + ] = mock_rpc request = {} await client.get_workflow_invocation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_workflow_invocation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -19386,22 +19433,23 @@ async def test_create_workflow_invocation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_workflow_invocation - ] = mock_object + ] = mock_rpc request = {} await client.create_workflow_invocation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_workflow_invocation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -19779,22 +19827,23 @@ async def test_delete_workflow_invocation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_workflow_invocation - ] = mock_object + ] = mock_rpc request = {} await client.delete_workflow_invocation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_workflow_invocation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -20151,22 +20200,23 @@ async def test_cancel_workflow_invocation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.cancel_workflow_invocation - ] = mock_object + ] = mock_rpc request = {} await client.cancel_workflow_invocation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.cancel_workflow_invocation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -20448,22 +20498,23 @@ async def test_query_workflow_invocation_actions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.query_workflow_invocation_actions - ] = mock_object + ] = mock_rpc request = {} await client.query_workflow_invocation_actions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.query_workflow_invocation_actions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-datalabeling/google/cloud/datalabeling/gapic_version.py b/packages/google-cloud-datalabeling/google/cloud/datalabeling/gapic_version.py index 3ba9a6de4897..558c8aab67c5 100644 --- a/packages/google-cloud-datalabeling/google/cloud/datalabeling/gapic_version.py +++ b/packages/google-cloud-datalabeling/google/cloud/datalabeling/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.10.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-datalabeling/google/cloud/datalabeling_v1beta1/gapic_version.py b/packages/google-cloud-datalabeling/google/cloud/datalabeling_v1beta1/gapic_version.py index 3ba9a6de4897..558c8aab67c5 100644 --- a/packages/google-cloud-datalabeling/google/cloud/datalabeling_v1beta1/gapic_version.py +++ b/packages/google-cloud-datalabeling/google/cloud/datalabeling_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.10.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-datalabeling/google/cloud/datalabeling_v1beta1/services/data_labeling_service/async_client.py b/packages/google-cloud-datalabeling/google/cloud/datalabeling_v1beta1/services/data_labeling_service/async_client.py index 977baa52a19d..9d2ea7440590 100644 --- a/packages/google-cloud-datalabeling/google/cloud/datalabeling_v1beta1/services/data_labeling_service/async_client.py +++ b/packages/google-cloud-datalabeling/google/cloud/datalabeling_v1beta1/services/data_labeling_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -234,10 +233,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DataLabelingServiceClient).get_transport_class, - type(DataLabelingServiceClient), - ) + get_transport_class = DataLabelingServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-datalabeling/google/cloud/datalabeling_v1beta1/services/data_labeling_service/client.py b/packages/google-cloud-datalabeling/google/cloud/datalabeling_v1beta1/services/data_labeling_service/client.py index 781f4c2b9b00..1cd66859e9ed 100644 --- a/packages/google-cloud-datalabeling/google/cloud/datalabeling_v1beta1/services/data_labeling_service/client.py +++ b/packages/google-cloud-datalabeling/google/cloud/datalabeling_v1beta1/services/data_labeling_service/client.py @@ -825,7 +825,7 @@ def __init__( Type[DataLabelingServiceTransport], Callable[..., DataLabelingServiceTransport], ] = ( - type(self).get_transport_class(transport) + DataLabelingServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DataLabelingServiceTransport], transport) ) diff --git a/packages/google-cloud-datalabeling/samples/generated_samples/snippet_metadata_google.cloud.datalabeling.v1beta1.json b/packages/google-cloud-datalabeling/samples/generated_samples/snippet_metadata_google.cloud.datalabeling.v1beta1.json index d4bb1f70fc54..4a6f501c7672 100644 --- a/packages/google-cloud-datalabeling/samples/generated_samples/snippet_metadata_google.cloud.datalabeling.v1beta1.json +++ b/packages/google-cloud-datalabeling/samples/generated_samples/snippet_metadata_google.cloud.datalabeling.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-datalabeling", - "version": "1.10.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-datalabeling/tests/unit/gapic/datalabeling_v1beta1/test_data_labeling_service.py b/packages/google-cloud-datalabeling/tests/unit/gapic/datalabeling_v1beta1/test_data_labeling_service.py index a1bebd8e8d76..c495e9e496d5 100644 --- a/packages/google-cloud-datalabeling/tests/unit/gapic/datalabeling_v1beta1/test_data_labeling_service.py +++ b/packages/google-cloud-datalabeling/tests/unit/gapic/datalabeling_v1beta1/test_data_labeling_service.py @@ -1340,22 +1340,23 @@ async def test_create_dataset_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_dataset - ] = mock_object + ] = mock_rpc request = {} await client.create_dataset(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_dataset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1731,22 +1732,23 @@ async def test_get_dataset_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_dataset - ] = mock_object + ] = mock_rpc request = {} await client.get_dataset(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_dataset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2104,22 +2106,23 @@ async def test_list_datasets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_datasets - ] = mock_object + ] = mock_rpc request = {} await client.list_datasets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_datasets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2666,22 +2669,23 @@ async def test_delete_dataset_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_dataset - ] = mock_object + ] = mock_rpc request = {} await client.delete_dataset(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_dataset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2968,8 +2972,9 @@ def test_import_data_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.import_data(request) @@ -3023,26 +3028,28 @@ async def test_import_data_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_data - ] = mock_object + ] = mock_rpc request = {} await client.import_data(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.import_data(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3361,8 +3368,9 @@ def test_export_data_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.export_data(request) @@ -3416,26 +3424,28 @@ async def test_export_data_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.export_data - ] = mock_object + ] = mock_rpc request = {} await client.export_data(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.export_data(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3824,22 +3834,23 @@ async def test_get_data_item_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_data_item - ] = mock_object + ] = mock_rpc request = {} await client.get_data_item(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_data_item(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4189,22 +4200,23 @@ async def test_list_data_items_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_data_items - ] = mock_object + ] = mock_rpc request = {} await client.list_data_items(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_data_items(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4795,22 +4807,23 @@ async def test_get_annotated_dataset_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_annotated_dataset - ] = mock_object + ] = mock_rpc request = {} await client.get_annotated_dataset(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_annotated_dataset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5204,22 +5217,23 @@ async def test_list_annotated_datasets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_annotated_datasets - ] = mock_object + ] = mock_rpc request = {} await client.list_annotated_datasets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_annotated_datasets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5797,22 +5811,23 @@ async def test_delete_annotated_dataset_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_annotated_dataset - ] = mock_object + ] = mock_rpc request = {} await client.delete_annotated_dataset(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_annotated_dataset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6023,8 +6038,9 @@ def test_label_image_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.label_image(request) @@ -6078,26 +6094,28 @@ async def test_label_image_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.label_image - ] = mock_object + ] = mock_rpc request = {} await client.label_image(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.label_image(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6420,8 +6438,9 @@ def test_label_video_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.label_video(request) @@ -6475,26 +6494,28 @@ async def test_label_video_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.label_video - ] = mock_object + ] = mock_rpc request = {} await client.label_video(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.label_video(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6817,8 +6838,9 @@ def test_label_text_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.label_text(request) @@ -6870,26 +6892,28 @@ async def test_label_text_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.label_text - ] = mock_object + ] = mock_rpc request = {} await client.label_text(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.label_text(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7269,22 +7293,23 @@ async def test_get_example_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_example - ] = mock_object + ] = mock_rpc request = {} await client.get_example(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_example(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7644,22 +7669,23 @@ async def test_list_examples_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_examples - ] = mock_object + ] = mock_rpc request = {} await client.list_examples(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_examples(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8235,22 +8261,23 @@ async def test_create_annotation_spec_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_annotation_spec_set - ] = mock_object + ] = mock_rpc request = {} await client.create_annotation_spec_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_annotation_spec_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8656,22 +8683,23 @@ async def test_get_annotation_spec_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_annotation_spec_set - ] = mock_object + ] = mock_rpc request = {} await client.get_annotation_spec_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_annotation_spec_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9054,22 +9082,23 @@ async def test_list_annotation_spec_sets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_annotation_spec_sets - ] = mock_object + ] = mock_rpc request = {} await client.list_annotation_spec_sets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_annotation_spec_sets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9653,22 +9682,23 @@ async def test_delete_annotation_spec_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_annotation_spec_set - ] = mock_object + ] = mock_rpc request = {} await client.delete_annotation_spec_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_annotation_spec_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9973,8 +10003,9 @@ def test_create_instruction_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_instruction(request) @@ -10030,26 +10061,28 @@ async def test_create_instruction_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_instruction - ] = mock_object + ] = mock_rpc request = {} await client.create_instruction(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_instruction(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10428,22 +10461,23 @@ async def test_get_instruction_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_instruction - ] = mock_object + ] = mock_rpc request = {} await client.get_instruction(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_instruction(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10815,22 +10849,23 @@ async def test_list_instructions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_instructions - ] = mock_object + ] = mock_rpc request = {} await client.list_instructions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_instructions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11407,22 +11442,23 @@ async def test_delete_instruction_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_instruction - ] = mock_object + ] = mock_rpc request = {} await client.delete_instruction(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_instruction(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11782,22 +11818,23 @@ async def test_get_evaluation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_evaluation - ] = mock_object + ] = mock_rpc request = {} await client.get_evaluation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_evaluation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12170,22 +12207,23 @@ async def test_search_evaluations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search_evaluations - ] = mock_object + ] = mock_rpc request = {} await client.search_evaluations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_evaluations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12772,22 +12810,23 @@ async def test_search_example_comparisons_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search_example_comparisons - ] = mock_object + ] = mock_rpc request = {} await client.search_example_comparisons(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_example_comparisons(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13394,22 +13433,23 @@ async def test_create_evaluation_job_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_evaluation_job - ] = mock_object + ] = mock_rpc request = {} await client.create_evaluation_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_evaluation_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13818,22 +13858,23 @@ async def test_update_evaluation_job_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_evaluation_job - ] = mock_object + ] = mock_rpc request = {} await client.update_evaluation_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_evaluation_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14245,22 +14286,23 @@ async def test_get_evaluation_job_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_evaluation_job - ] = mock_object + ] = mock_rpc request = {} await client.get_evaluation_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_evaluation_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14637,22 +14679,23 @@ async def test_pause_evaluation_job_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.pause_evaluation_job - ] = mock_object + ] = mock_rpc request = {} await client.pause_evaluation_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.pause_evaluation_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15009,22 +15052,23 @@ async def test_resume_evaluation_job_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.resume_evaluation_job - ] = mock_object + ] = mock_rpc request = {} await client.resume_evaluation_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.resume_evaluation_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15381,22 +15425,23 @@ async def test_delete_evaluation_job_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_evaluation_job - ] = mock_object + ] = mock_rpc request = {} await client.delete_evaluation_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_evaluation_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15763,22 +15808,23 @@ async def test_list_evaluation_jobs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_evaluation_jobs - ] = mock_object + ] = mock_rpc request = {} await client.list_evaluation_jobs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_evaluation_jobs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex/gapic_version.py b/packages/google-cloud-dataplex/google/cloud/dataplex/gapic_version.py index f7850c8d0049..558c8aab67c5 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex/gapic_version.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.2.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/gapic_version.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/gapic_version.py index f7850c8d0049..558c8aab67c5 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/gapic_version.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.2.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/async_client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/async_client.py index f1503d82d419..c7055186de2c 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/async_client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -209,9 +208,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(CatalogServiceClient).get_transport_class, type(CatalogServiceClient) - ) + get_transport_class = CatalogServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/client.py index 84808233ad3d..a0ca684d5d8f 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/client.py @@ -746,7 +746,7 @@ def __init__( transport_init: Union[ Type[CatalogServiceTransport], Callable[..., CatalogServiceTransport] ] = ( - type(self).get_transport_class(transport) + CatalogServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., CatalogServiceTransport], transport) ) diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/async_client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/async_client.py index 606198cb6e02..3742d93956a9 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/async_client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -198,9 +197,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ContentServiceClient).get_transport_class, type(ContentServiceClient) - ) + get_transport_class = ContentServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/client.py index a497d4df01bc..b8342f36ff08 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/client.py @@ -691,7 +691,7 @@ def __init__( transport_init: Union[ Type[ContentServiceTransport], Callable[..., ContentServiceTransport] ] = ( - type(self).get_transport_class(transport) + ContentServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ContentServiceTransport], transport) ) diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/async_client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/async_client.py index fb96abb79333..9a85748cbb34 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/async_client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -214,9 +213,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DataScanServiceClient).get_transport_class, type(DataScanServiceClient) - ) + get_transport_class = DataScanServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/client.py index 608a8156289f..492ffa36b496 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/client.py @@ -731,7 +731,7 @@ def __init__( transport_init: Union[ Type[DataScanServiceTransport], Callable[..., DataScanServiceTransport] ] = ( - type(self).get_transport_class(transport) + DataScanServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DataScanServiceTransport], transport) ) diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/async_client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/async_client.py index 5781986ed5ac..d50c36dde112 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/async_client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -214,10 +213,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DataTaxonomyServiceClient).get_transport_class, - type(DataTaxonomyServiceClient), - ) + get_transport_class = DataTaxonomyServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/client.py index 028c44d5f445..a72997a74a85 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/client.py @@ -724,7 +724,7 @@ def __init__( Type[DataTaxonomyServiceTransport], Callable[..., DataTaxonomyServiceTransport], ] = ( - type(self).get_transport_class(transport) + DataTaxonomyServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DataTaxonomyServiceTransport], transport) ) diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/async_client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/async_client.py index b66fbee8fb81..4424693fa6a9 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/async_client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -217,9 +216,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DataplexServiceClient).get_transport_class, type(DataplexServiceClient) - ) + get_transport_class = DataplexServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/client.py index 8b07ba1d8f33..ef826d0cd2ec 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/client.py @@ -854,7 +854,7 @@ def __init__( transport_init: Union[ Type[DataplexServiceTransport], Callable[..., DataplexServiceTransport] ] = ( - type(self).get_transport_class(transport) + DataplexServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DataplexServiceTransport], transport) ) diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/async_client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/async_client.py index 331688748e05..4c1e8f2f6aee 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/async_client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -199,9 +198,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(MetadataServiceClient).get_transport_class, type(MetadataServiceClient) - ) + get_transport_class = MetadataServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/client.py index 09ce53a07792..9598810211da 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/client.py @@ -726,7 +726,7 @@ def __init__( transport_init: Union[ Type[MetadataServiceTransport], Callable[..., MetadataServiceTransport] ] = ( - type(self).get_transport_class(transport) + MetadataServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., MetadataServiceTransport], transport) ) diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/catalog.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/catalog.py index fe234646514f..d4535bf4114d 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/catalog.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/catalog.py @@ -1664,8 +1664,8 @@ class ListEntriesRequest(proto.Message): to be provided. Example filter expressions: "entry_source.display_name=AnExampleDisplayName" "entry_type=projects/example-project/locations/global/entryTypes/example-entry_type" - `entry_type=projects/example-project/locations/us/entryTypes/a* - OR entry_type=projects/another-project/locations/*` "NOT + "entry_type=projects/example-project/locations/us/entryTypes/a* + OR entry_type=projects/another-project/locations/*" "NOT entry_source.display_name=AnotherExampleDisplayName". """ diff --git a/packages/google-cloud-dataplex/samples/generated_samples/snippet_metadata_google.cloud.dataplex.v1.json b/packages/google-cloud-dataplex/samples/generated_samples/snippet_metadata_google.cloud.dataplex.v1.json index 257527a1bb61..61adee5a1be5 100644 --- a/packages/google-cloud-dataplex/samples/generated_samples/snippet_metadata_google.cloud.dataplex.v1.json +++ b/packages/google-cloud-dataplex/samples/generated_samples/snippet_metadata_google.cloud.dataplex.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dataplex", - "version": "2.2.2" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_catalog_service.py b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_catalog_service.py index 38df78f1764a..78869ffa4363 100644 --- a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_catalog_service.py +++ b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_catalog_service.py @@ -1230,8 +1230,9 @@ def test_create_entry_type_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_entry_type(request) @@ -1287,26 +1288,28 @@ async def test_create_entry_type_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_entry_type - ] = mock_object + ] = mock_rpc request = {} await client.create_entry_type(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_entry_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1630,8 +1633,9 @@ def test_update_entry_type_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_entry_type(request) @@ -1687,26 +1691,28 @@ async def test_update_entry_type_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_entry_type - ] = mock_object + ] = mock_rpc request = {} await client.update_entry_type(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_entry_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2026,8 +2032,9 @@ def test_delete_entry_type_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_entry_type(request) @@ -2083,26 +2090,28 @@ async def test_delete_entry_type_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_entry_type - ] = mock_object + ] = mock_rpc request = {} await client.delete_entry_type(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_entry_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2469,22 +2478,23 @@ async def test_list_entry_types_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_entry_types - ] = mock_object + ] = mock_rpc request = {} await client.list_entry_types(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_entry_types(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3050,22 +3060,23 @@ async def test_get_entry_type_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_entry_type - ] = mock_object + ] = mock_rpc request = {} await client.get_entry_type(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_entry_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3380,8 +3391,9 @@ def test_create_aspect_type_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_aspect_type(request) @@ -3437,26 +3449,28 @@ async def test_create_aspect_type_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_aspect_type - ] = mock_object + ] = mock_rpc request = {} await client.create_aspect_type(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_aspect_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3782,8 +3796,9 @@ def test_update_aspect_type_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_aspect_type(request) @@ -3839,26 +3854,28 @@ async def test_update_aspect_type_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_aspect_type - ] = mock_object + ] = mock_rpc request = {} await client.update_aspect_type(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_aspect_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4180,8 +4197,9 @@ def test_delete_aspect_type_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_aspect_type(request) @@ -4237,26 +4255,28 @@ async def test_delete_aspect_type_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_aspect_type - ] = mock_object + ] = mock_rpc request = {} await client.delete_aspect_type(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_aspect_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4631,22 +4651,23 @@ async def test_list_aspect_types_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_aspect_types - ] = mock_object + ] = mock_rpc request = {} await client.list_aspect_types(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_aspect_types(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5224,22 +5245,23 @@ async def test_get_aspect_type_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_aspect_type - ] = mock_object + ] = mock_rpc request = {} await client.get_aspect_type(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_aspect_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5550,8 +5572,9 @@ def test_create_entry_group_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_entry_group(request) @@ -5607,26 +5630,28 @@ async def test_create_entry_group_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_entry_group - ] = mock_object + ] = mock_rpc request = {} await client.create_entry_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_entry_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5952,8 +5977,9 @@ def test_update_entry_group_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_entry_group(request) @@ -6009,26 +6035,28 @@ async def test_update_entry_group_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_entry_group - ] = mock_object + ] = mock_rpc request = {} await client.update_entry_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_entry_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6350,8 +6378,9 @@ def test_delete_entry_group_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_entry_group(request) @@ -6407,26 +6436,28 @@ async def test_delete_entry_group_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_entry_group - ] = mock_object + ] = mock_rpc request = {} await client.delete_entry_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_entry_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6801,22 +6832,23 @@ async def test_list_entry_groups_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_entry_groups - ] = mock_object + ] = mock_rpc request = {} await client.list_entry_groups(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_entry_groups(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7394,22 +7426,23 @@ async def test_get_entry_group_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_entry_group - ] = mock_object + ] = mock_rpc request = {} await client.get_entry_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_entry_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7775,22 +7808,23 @@ async def test_create_entry_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_entry - ] = mock_object + ] = mock_rpc request = {} await client.create_entry(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8166,22 +8200,23 @@ async def test_update_entry_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_entry - ] = mock_object + ] = mock_rpc request = {} await client.update_entry(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8551,22 +8586,23 @@ async def test_delete_entry_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_entry - ] = mock_object + ] = mock_rpc request = {} await client.delete_entry(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8921,22 +8957,23 @@ async def test_list_entries_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_entries - ] = mock_object + ] = mock_rpc request = {} await client.list_entries(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_entries(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9486,22 +9523,23 @@ async def test_get_entry_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_entry - ] = mock_object + ] = mock_rpc request = {} await client.get_entry(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9863,22 +9901,23 @@ async def test_lookup_entry_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.lookup_entry - ] = mock_object + ] = mock_rpc request = {} await client.lookup_entry(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.lookup_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10163,22 +10202,23 @@ async def test_search_entries_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search_entries - ] = mock_object + ] = mock_rpc request = {} await client.search_entries(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_entries(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_content_service.py b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_content_service.py index 35438a7efa20..c385cbb88bea 100644 --- a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_content_service.py +++ b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_content_service.py @@ -1278,22 +1278,23 @@ async def test_create_content_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_content - ] = mock_object + ] = mock_rpc request = {} await client.create_content(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_content(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1660,22 +1661,23 @@ async def test_update_content_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_content - ] = mock_object + ] = mock_rpc request = {} await client.update_content(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_content(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2029,22 +2031,23 @@ async def test_delete_content_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_content - ] = mock_object + ] = mock_rpc request = {} await client.delete_content(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_content(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2394,22 +2397,23 @@ async def test_get_content_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_content - ] = mock_object + ] = mock_rpc request = {} await client.get_content(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_content(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2763,22 +2767,23 @@ async def test_get_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3145,22 +3150,23 @@ async def test_set_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3457,22 +3463,23 @@ async def test_test_iam_permissions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.test_iam_permissions - ] = mock_object + ] = mock_rpc request = {} await client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.test_iam_permissions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3769,22 +3776,23 @@ async def test_list_content_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_content - ] = mock_object + ] = mock_rpc request = {} await client.list_content(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_content(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_scan_service.py b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_scan_service.py index c4c810e873dc..0aa24fc84248 100644 --- a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_scan_service.py +++ b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_scan_service.py @@ -1244,8 +1244,9 @@ def test_create_data_scan_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_data_scan(request) @@ -1299,26 +1300,28 @@ async def test_create_data_scan_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_data_scan - ] = mock_object + ] = mock_rpc request = {} await client.create_data_scan(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_data_scan(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1626,8 +1629,9 @@ def test_update_data_scan_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_data_scan(request) @@ -1681,26 +1685,28 @@ async def test_update_data_scan_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_data_scan - ] = mock_object + ] = mock_rpc request = {} await client.update_data_scan(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_data_scan(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2002,8 +2008,9 @@ def test_delete_data_scan_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_data_scan(request) @@ -2057,26 +2064,28 @@ async def test_delete_data_scan_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_data_scan - ] = mock_object + ] = mock_rpc request = {} await client.delete_data_scan(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_data_scan(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2437,22 +2446,23 @@ async def test_get_data_scan_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_data_scan - ] = mock_object + ] = mock_rpc request = {} await client.get_data_scan(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_data_scan(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2816,22 +2826,23 @@ async def test_list_data_scans_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_data_scans - ] = mock_object + ] = mock_rpc request = {} await client.list_data_scans(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_data_scans(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3371,22 +3382,23 @@ async def test_run_data_scan_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.run_data_scan - ] = mock_object + ] = mock_rpc request = {} await client.run_data_scan(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.run_data_scan(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3754,22 +3766,23 @@ async def test_get_data_scan_job_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_data_scan_job - ] = mock_object + ] = mock_rpc request = {} await client.get_data_scan_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_data_scan_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4152,22 +4165,23 @@ async def test_list_data_scan_jobs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_data_scan_jobs - ] = mock_object + ] = mock_rpc request = {} await client.list_data_scan_jobs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_data_scan_jobs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4736,22 +4750,23 @@ async def test_generate_data_quality_rules_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.generate_data_quality_rules - ] = mock_object + ] = mock_rpc request = {} await client.generate_data_quality_rules(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.generate_data_quality_rules(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_taxonomy_service.py b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_taxonomy_service.py index f98d4cbcd6c3..25e617f52308 100644 --- a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_taxonomy_service.py +++ b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_taxonomy_service.py @@ -1274,8 +1274,9 @@ def test_create_data_taxonomy_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_data_taxonomy(request) @@ -1331,26 +1332,28 @@ async def test_create_data_taxonomy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_data_taxonomy - ] = mock_object + ] = mock_rpc request = {} await client.create_data_taxonomy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_data_taxonomy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1677,8 +1680,9 @@ def test_update_data_taxonomy_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_data_taxonomy(request) @@ -1734,26 +1738,28 @@ async def test_update_data_taxonomy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_data_taxonomy - ] = mock_object + ] = mock_rpc request = {} await client.update_data_taxonomy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_data_taxonomy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2076,8 +2082,9 @@ def test_delete_data_taxonomy_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_data_taxonomy(request) @@ -2133,26 +2140,28 @@ async def test_delete_data_taxonomy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_data_taxonomy - ] = mock_object + ] = mock_rpc request = {} await client.delete_data_taxonomy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_data_taxonomy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2530,22 +2539,23 @@ async def test_list_data_taxonomies_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_data_taxonomies - ] = mock_object + ] = mock_rpc request = {} await client.list_data_taxonomies(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_data_taxonomies(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3137,22 +3147,23 @@ async def test_get_data_taxonomy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_data_taxonomy - ] = mock_object + ] = mock_rpc request = {} await client.get_data_taxonomy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_data_taxonomy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3480,8 +3491,9 @@ def test_create_data_attribute_binding_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_data_attribute_binding(request) @@ -3537,26 +3549,28 @@ async def test_create_data_attribute_binding_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_data_attribute_binding - ] = mock_object + ] = mock_rpc request = {} await client.create_data_attribute_binding(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_data_attribute_binding(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3892,8 +3906,9 @@ def test_update_data_attribute_binding_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_data_attribute_binding(request) @@ -3949,26 +3964,28 @@ async def test_update_data_attribute_binding_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_data_attribute_binding - ] = mock_object + ] = mock_rpc request = {} await client.update_data_attribute_binding(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_data_attribute_binding(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4300,8 +4317,9 @@ def test_delete_data_attribute_binding_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_data_attribute_binding(request) @@ -4357,26 +4375,28 @@ async def test_delete_data_attribute_binding_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_data_attribute_binding - ] = mock_object + ] = mock_rpc request = {} await client.delete_data_attribute_binding(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_data_attribute_binding(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4755,22 +4775,23 @@ async def test_list_data_attribute_bindings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_data_attribute_bindings - ] = mock_object + ] = mock_rpc request = {} await client.list_data_attribute_bindings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_data_attribute_bindings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5365,22 +5386,23 @@ async def test_get_data_attribute_binding_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_data_attribute_binding - ] = mock_object + ] = mock_rpc request = {} await client.get_data_attribute_binding(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_data_attribute_binding(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5707,8 +5729,9 @@ def test_create_data_attribute_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_data_attribute(request) @@ -5764,26 +5787,28 @@ async def test_create_data_attribute_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_data_attribute - ] = mock_object + ] = mock_rpc request = {} await client.create_data_attribute(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_data_attribute(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6111,8 +6136,9 @@ def test_update_data_attribute_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_data_attribute(request) @@ -6168,26 +6194,28 @@ async def test_update_data_attribute_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_data_attribute - ] = mock_object + ] = mock_rpc request = {} await client.update_data_attribute(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_data_attribute(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6511,8 +6539,9 @@ def test_delete_data_attribute_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_data_attribute(request) @@ -6568,26 +6597,28 @@ async def test_delete_data_attribute_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_data_attribute - ] = mock_object + ] = mock_rpc request = {} await client.delete_data_attribute(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_data_attribute(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6965,22 +6996,23 @@ async def test_list_data_attributes_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_data_attributes - ] = mock_object + ] = mock_rpc request = {} await client.list_data_attributes(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_data_attributes(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7574,22 +7606,23 @@ async def test_get_data_attribute_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_data_attribute - ] = mock_object + ] = mock_rpc request = {} await client.get_data_attribute(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_data_attribute(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_dataplex_service.py b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_dataplex_service.py index e3183fd2bdec..e632e1dac75b 100644 --- a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_dataplex_service.py +++ b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_dataplex_service.py @@ -1236,8 +1236,9 @@ def test_create_lake_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_lake(request) @@ -1291,26 +1292,28 @@ async def test_create_lake_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_lake - ] = mock_object + ] = mock_rpc request = {} await client.create_lake(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_lake(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1616,8 +1619,9 @@ def test_update_lake_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_lake(request) @@ -1671,26 +1675,28 @@ async def test_update_lake_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_lake - ] = mock_object + ] = mock_rpc request = {} await client.update_lake(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_lake(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1990,8 +1996,9 @@ def test_delete_lake_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_lake(request) @@ -2045,26 +2052,28 @@ async def test_delete_lake_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_lake - ] = mock_object + ] = mock_rpc request = {} await client.delete_lake(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_lake(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2417,22 +2426,23 @@ async def test_list_lakes_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_lakes - ] = mock_object + ] = mock_rpc request = {} await client.list_lakes(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_lakes(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2990,22 +3000,23 @@ async def test_get_lake_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_lake - ] = mock_object + ] = mock_rpc request = {} await client.get_lake(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_lake(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3372,22 +3383,23 @@ async def test_list_lake_actions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_lake_actions - ] = mock_object + ] = mock_rpc request = {} await client.list_lake_actions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_lake_actions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3894,8 +3906,9 @@ def test_create_zone_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_zone(request) @@ -3949,26 +3962,28 @@ async def test_create_zone_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_zone - ] = mock_object + ] = mock_rpc request = {} await client.create_zone(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_zone(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4274,8 +4289,9 @@ def test_update_zone_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_zone(request) @@ -4329,26 +4345,28 @@ async def test_update_zone_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_zone - ] = mock_object + ] = mock_rpc request = {} await client.update_zone(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_zone(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4648,8 +4666,9 @@ def test_delete_zone_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_zone(request) @@ -4703,26 +4722,28 @@ async def test_delete_zone_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_zone - ] = mock_object + ] = mock_rpc request = {} await client.delete_zone(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_zone(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5072,22 +5093,23 @@ async def test_list_zones_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_zones - ] = mock_object + ] = mock_rpc request = {} await client.list_zones(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_zones(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5643,22 +5665,23 @@ async def test_get_zone_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_zone - ] = mock_object + ] = mock_rpc request = {} await client.get_zone(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_zone(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6025,22 +6048,23 @@ async def test_list_zone_actions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_zone_actions - ] = mock_object + ] = mock_rpc request = {} await client.list_zone_actions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_zone_actions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6547,8 +6571,9 @@ def test_create_asset_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_asset(request) @@ -6602,26 +6627,28 @@ async def test_create_asset_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_asset - ] = mock_object + ] = mock_rpc request = {} await client.create_asset(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_asset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6927,8 +6954,9 @@ def test_update_asset_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_asset(request) @@ -6982,26 +7010,28 @@ async def test_update_asset_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_asset - ] = mock_object + ] = mock_rpc request = {} await client.update_asset(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_asset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7301,8 +7331,9 @@ def test_delete_asset_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_asset(request) @@ -7356,26 +7387,28 @@ async def test_delete_asset_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_asset - ] = mock_object + ] = mock_rpc request = {} await client.delete_asset(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_asset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7727,22 +7760,23 @@ async def test_list_assets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_assets - ] = mock_object + ] = mock_rpc request = {} await client.list_assets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_assets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8295,22 +8329,23 @@ async def test_get_asset_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_asset - ] = mock_object + ] = mock_rpc request = {} await client.get_asset(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_asset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8677,22 +8712,23 @@ async def test_list_asset_actions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_asset_actions - ] = mock_object + ] = mock_rpc request = {} await client.list_asset_actions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_asset_actions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9199,8 +9235,9 @@ def test_create_task_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_task(request) @@ -9254,26 +9291,28 @@ async def test_create_task_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_task - ] = mock_object + ] = mock_rpc request = {} await client.create_task(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_task(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9579,8 +9618,9 @@ def test_update_task_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_task(request) @@ -9634,26 +9674,28 @@ async def test_update_task_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_task - ] = mock_object + ] = mock_rpc request = {} await client.update_task(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_task(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9953,8 +9995,9 @@ def test_delete_task_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_task(request) @@ -10008,26 +10051,28 @@ async def test_delete_task_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_task - ] = mock_object + ] = mock_rpc request = {} await client.delete_task(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_task(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10380,22 +10425,23 @@ async def test_list_tasks_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_tasks - ] = mock_object + ] = mock_rpc request = {} await client.list_tasks(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_tasks(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10950,22 +10996,23 @@ async def test_get_task_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_task - ] = mock_object + ] = mock_rpc request = {} await client.get_task(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_task(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11318,22 +11365,23 @@ async def test_list_jobs_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_jobs - ] = mock_object + ] = mock_rpc request = {} await client.list_jobs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_jobs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11869,22 +11917,23 @@ async def test_run_task_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.run_task - ] = mock_object + ] = mock_rpc request = {} await client.run_task(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.run_task(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12249,22 +12298,23 @@ async def test_get_job_async_use_cached_wrapped_rpc(transport: str = "grpc_async ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_job - ] = mock_object + ] = mock_rpc request = {} await client.get_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12614,22 +12664,23 @@ async def test_cancel_job_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.cancel_job - ] = mock_object + ] = mock_rpc request = {} await client.cancel_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.cancel_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12925,8 +12976,9 @@ def test_create_environment_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_environment(request) @@ -12982,26 +13034,28 @@ async def test_create_environment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_environment - ] = mock_object + ] = mock_rpc request = {} await client.create_environment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_environment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13327,8 +13381,9 @@ def test_update_environment_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_environment(request) @@ -13384,26 +13439,28 @@ async def test_update_environment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_environment - ] = mock_object + ] = mock_rpc request = {} await client.update_environment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_environment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13723,8 +13780,9 @@ def test_delete_environment_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_environment(request) @@ -13780,26 +13838,28 @@ async def test_delete_environment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_environment - ] = mock_object + ] = mock_rpc request = {} await client.delete_environment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_environment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14171,22 +14231,23 @@ async def test_list_environments_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_environments - ] = mock_object + ] = mock_rpc request = {} await client.list_environments(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_environments(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14759,22 +14820,23 @@ async def test_get_environment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_environment - ] = mock_object + ] = mock_rpc request = {} await client.get_environment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_environment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15131,22 +15193,23 @@ async def test_list_sessions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_sessions - ] = mock_object + ] = mock_rpc request = {} await client.list_sessions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_sessions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_metadata_service.py b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_metadata_service.py index 51d511261536..106f11876d3d 100644 --- a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_metadata_service.py +++ b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_metadata_service.py @@ -1311,22 +1311,23 @@ async def test_create_entity_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_entity - ] = mock_object + ] = mock_rpc request = {} await client.create_entity(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_entity(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1732,22 +1733,23 @@ async def test_update_entity_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_entity - ] = mock_object + ] = mock_rpc request = {} await client.update_entity(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_entity(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2029,22 +2031,23 @@ async def test_delete_entity_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_entity - ] = mock_object + ] = mock_rpc request = {} await client.delete_entity(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_entity(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2415,22 +2418,23 @@ async def test_get_entity_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_entity - ] = mock_object + ] = mock_rpc request = {} await client.get_entity(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_entity(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2801,22 +2805,23 @@ async def test_list_entities_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_entities - ] = mock_object + ] = mock_rpc request = {} await client.list_entities(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_entities(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3370,22 +3375,23 @@ async def test_create_partition_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_partition - ] = mock_object + ] = mock_rpc request = {} await client.create_partition(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_partition(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3743,22 +3749,23 @@ async def test_delete_partition_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_partition - ] = mock_object + ] = mock_rpc request = {} await client.delete_partition(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_partition(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4107,22 +4114,23 @@ async def test_get_partition_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_partition - ] = mock_object + ] = mock_rpc request = {} await client.get_partition(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_partition(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4477,22 +4485,23 @@ async def test_list_partitions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_partitions - ] = mock_object + ] = mock_rpc request = {} await client.list_partitions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_partitions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore/gapic_version.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore/gapic_version.py index e018cef961ff..558c8aab67c5 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore/gapic_version.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.15.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/gapic_version.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/gapic_version.py index e018cef961ff..558c8aab67c5 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/gapic_version.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.15.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore/async_client.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore/async_client.py index ae7678fa7bd3..a12bacb0b074 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore/async_client.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -229,9 +228,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DataprocMetastoreClient).get_transport_class, type(DataprocMetastoreClient) - ) + get_transport_class = DataprocMetastoreClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore/client.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore/client.py index eb6ce944ff32..7eb04c02c9e4 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore/client.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore/client.py @@ -786,7 +786,7 @@ def __init__( Type[DataprocMetastoreTransport], Callable[..., DataprocMetastoreTransport], ] = ( - type(self).get_transport_class(transport) + DataprocMetastoreClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DataprocMetastoreTransport], transport) ) diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore_federation/async_client.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore_federation/async_client.py index 340282ce0d72..79b1bdd20da3 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore_federation/async_client.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore_federation/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -222,10 +221,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DataprocMetastoreFederationClient).get_transport_class, - type(DataprocMetastoreFederationClient), - ) + get_transport_class = DataprocMetastoreFederationClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore_federation/client.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore_federation/client.py index 02d6cb0e3c2a..b151707de647 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore_federation/client.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore_federation/client.py @@ -700,7 +700,7 @@ def __init__( Type[DataprocMetastoreFederationTransport], Callable[..., DataprocMetastoreFederationTransport], ] = ( - type(self).get_transport_class(transport) + DataprocMetastoreFederationClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast( Callable[..., DataprocMetastoreFederationTransport], transport diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/gapic_version.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/gapic_version.py index e018cef961ff..558c8aab67c5 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/gapic_version.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.15.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore/async_client.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore/async_client.py index b53984a832c0..5356db6dd979 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore/async_client.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -231,9 +230,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DataprocMetastoreClient).get_transport_class, type(DataprocMetastoreClient) - ) + get_transport_class = DataprocMetastoreClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore/client.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore/client.py index f01596090d88..41e907b0f7fc 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore/client.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore/client.py @@ -808,7 +808,7 @@ def __init__( Type[DataprocMetastoreTransport], Callable[..., DataprocMetastoreTransport], ] = ( - type(self).get_transport_class(transport) + DataprocMetastoreClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DataprocMetastoreTransport], transport) ) diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore_federation/async_client.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore_federation/async_client.py index 58c17e4443f2..f85aaaf1c945 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore_federation/async_client.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore_federation/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -222,10 +221,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DataprocMetastoreFederationClient).get_transport_class, - type(DataprocMetastoreFederationClient), - ) + get_transport_class = DataprocMetastoreFederationClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore_federation/client.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore_federation/client.py index d5578b690d4c..d0480bac5498 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore_federation/client.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore_federation/client.py @@ -700,7 +700,7 @@ def __init__( Type[DataprocMetastoreFederationTransport], Callable[..., DataprocMetastoreFederationTransport], ] = ( - type(self).get_transport_class(transport) + DataprocMetastoreFederationClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast( Callable[..., DataprocMetastoreFederationTransport], transport diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/gapic_version.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/gapic_version.py index e018cef961ff..558c8aab67c5 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/gapic_version.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.15.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore/async_client.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore/async_client.py index dd545bdcd7c9..ced7f1c9e60c 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore/async_client.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -231,9 +230,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DataprocMetastoreClient).get_transport_class, type(DataprocMetastoreClient) - ) + get_transport_class = DataprocMetastoreClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore/client.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore/client.py index fc54fbed5b30..6d2fa8cc4c22 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore/client.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore/client.py @@ -808,7 +808,7 @@ def __init__( Type[DataprocMetastoreTransport], Callable[..., DataprocMetastoreTransport], ] = ( - type(self).get_transport_class(transport) + DataprocMetastoreClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DataprocMetastoreTransport], transport) ) diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore_federation/async_client.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore_federation/async_client.py index 4414c1826a03..0f134074ee41 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore_federation/async_client.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore_federation/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -222,10 +221,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DataprocMetastoreFederationClient).get_transport_class, - type(DataprocMetastoreFederationClient), - ) + get_transport_class = DataprocMetastoreFederationClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore_federation/client.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore_federation/client.py index 658525e3d174..9e7055835472 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore_federation/client.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore_federation/client.py @@ -700,7 +700,7 @@ def __init__( Type[DataprocMetastoreFederationTransport], Callable[..., DataprocMetastoreFederationTransport], ] = ( - type(self).get_transport_class(transport) + DataprocMetastoreFederationClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast( Callable[..., DataprocMetastoreFederationTransport], transport diff --git a/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1.json b/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1.json index ba41e49e516c..61145e9be3f7 100644 --- a/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1.json +++ b/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dataproc-metastore", - "version": "1.15.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1alpha.json b/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1alpha.json index 4422b864e380..96abac734738 100644 --- a/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1alpha.json +++ b/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1alpha.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dataproc-metastore", - "version": "1.15.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1beta.json b/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1beta.json index 26abda4ab6df..4a03aa222378 100644 --- a/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1beta.json +++ b/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dataproc-metastore", - "version": "1.15.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1/test_dataproc_metastore.py b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1/test_dataproc_metastore.py index f3cd66bfca65..13e3222324cc 100644 --- a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1/test_dataproc_metastore.py +++ b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1/test_dataproc_metastore.py @@ -1347,22 +1347,23 @@ async def test_list_services_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_services - ] = mock_object + ] = mock_rpc request = {} await client.list_services(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_services(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1937,22 +1938,23 @@ async def test_get_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_service - ] = mock_object + ] = mock_rpc request = {} await client.get_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2265,8 +2267,9 @@ def test_create_service_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_service(request) @@ -2320,26 +2323,28 @@ async def test_create_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_service - ] = mock_object + ] = mock_rpc request = {} await client.create_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2669,8 +2674,9 @@ def test_update_service_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_service(request) @@ -2724,26 +2730,28 @@ async def test_update_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_service - ] = mock_object + ] = mock_rpc request = {} await client.update_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3065,8 +3073,9 @@ def test_delete_service_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_service(request) @@ -3120,26 +3129,28 @@ async def test_delete_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_service - ] = mock_object + ] = mock_rpc request = {} await client.delete_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3507,22 +3518,23 @@ async def test_list_metadata_imports_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_metadata_imports - ] = mock_object + ] = mock_rpc request = {} await client.list_metadata_imports(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_metadata_imports(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4103,22 +4115,23 @@ async def test_get_metadata_import_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_metadata_import - ] = mock_object + ] = mock_rpc request = {} await client.get_metadata_import(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_metadata_import(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4440,8 +4453,9 @@ def test_create_metadata_import_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_metadata_import(request) @@ -4497,26 +4511,28 @@ async def test_create_metadata_import_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_metadata_import - ] = mock_object + ] = mock_rpc request = {} await client.create_metadata_import(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_metadata_import(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4871,8 +4887,9 @@ def test_update_metadata_import_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_metadata_import(request) @@ -4928,26 +4945,28 @@ async def test_update_metadata_import_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_metadata_import - ] = mock_object + ] = mock_rpc request = {} await client.update_metadata_import(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_metadata_import(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5285,8 +5304,9 @@ def test_export_metadata_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.export_metadata(request) @@ -5340,26 +5360,28 @@ async def test_export_metadata_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.export_metadata - ] = mock_object + ] = mock_rpc request = {} await client.export_metadata(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.export_metadata(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5571,8 +5593,9 @@ def test_restore_service_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.restore_service(request) @@ -5626,26 +5649,28 @@ async def test_restore_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.restore_service - ] = mock_object + ] = mock_rpc request = {} await client.restore_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.restore_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6010,22 +6035,23 @@ async def test_list_backups_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_backups - ] = mock_object + ] = mock_rpc request = {} await client.list_backups(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_backups(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6577,22 +6603,23 @@ async def test_get_backup_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_backup - ] = mock_object + ] = mock_rpc request = {} await client.get_backup(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6891,8 +6918,9 @@ def test_create_backup_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_backup(request) @@ -6946,26 +6974,28 @@ async def test_create_backup_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_backup - ] = mock_object + ] = mock_rpc request = {} await client.create_backup(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7277,8 +7307,9 @@ def test_delete_backup_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_backup(request) @@ -7332,26 +7363,28 @@ async def test_delete_backup_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_backup - ] = mock_object + ] = mock_rpc request = {} await client.delete_backup(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7643,8 +7676,9 @@ def test_query_metadata_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.query_metadata(request) @@ -7698,26 +7732,28 @@ async def test_query_metadata_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.query_metadata - ] = mock_object + ] = mock_rpc request = {} await client.query_metadata(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.query_metadata(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7942,8 +7978,9 @@ def test_move_table_to_database_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.move_table_to_database(request) @@ -7999,26 +8036,28 @@ async def test_move_table_to_database_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.move_table_to_database - ] = mock_object + ] = mock_rpc request = {} await client.move_table_to_database(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.move_table_to_database(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8247,8 +8286,9 @@ def test_alter_metadata_resource_location_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.alter_metadata_resource_location(request) @@ -8304,26 +8344,28 @@ async def test_alter_metadata_resource_location_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.alter_metadata_resource_location - ] = mock_object + ] = mock_rpc request = {} await client.alter_metadata_resource_location(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.alter_metadata_resource_location(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1/test_dataproc_metastore_federation.py b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1/test_dataproc_metastore_federation.py index e145c749331e..e8df9b41018d 100644 --- a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1/test_dataproc_metastore_federation.py +++ b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1/test_dataproc_metastore_federation.py @@ -1397,22 +1397,23 @@ async def test_list_federations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_federations - ] = mock_object + ] = mock_rpc request = {} await client.list_federations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_federations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1973,22 +1974,23 @@ async def test_get_federation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_federation - ] = mock_object + ] = mock_rpc request = {} await client.get_federation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_federation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2304,8 +2306,9 @@ def test_create_federation_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_federation(request) @@ -2361,26 +2364,28 @@ async def test_create_federation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_federation - ] = mock_object + ] = mock_rpc request = {} await client.create_federation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_federation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2709,8 +2714,9 @@ def test_update_federation_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_federation(request) @@ -2766,26 +2772,28 @@ async def test_update_federation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_federation - ] = mock_object + ] = mock_rpc request = {} await client.update_federation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_federation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3106,8 +3114,9 @@ def test_delete_federation_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_federation(request) @@ -3163,26 +3172,28 @@ async def test_delete_federation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_federation - ] = mock_object + ] = mock_rpc request = {} await client.delete_federation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_federation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1alpha/test_dataproc_metastore.py b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1alpha/test_dataproc_metastore.py index be043d9b6a68..0acc7b881125 100644 --- a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1alpha/test_dataproc_metastore.py +++ b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1alpha/test_dataproc_metastore.py @@ -1347,22 +1347,23 @@ async def test_list_services_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_services - ] = mock_object + ] = mock_rpc request = {} await client.list_services(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_services(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1937,22 +1938,23 @@ async def test_get_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_service - ] = mock_object + ] = mock_rpc request = {} await client.get_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2265,8 +2267,9 @@ def test_create_service_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_service(request) @@ -2320,26 +2323,28 @@ async def test_create_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_service - ] = mock_object + ] = mock_rpc request = {} await client.create_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2669,8 +2674,9 @@ def test_update_service_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_service(request) @@ -2724,26 +2730,28 @@ async def test_update_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_service - ] = mock_object + ] = mock_rpc request = {} await client.update_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3065,8 +3073,9 @@ def test_delete_service_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_service(request) @@ -3120,26 +3129,28 @@ async def test_delete_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_service - ] = mock_object + ] = mock_rpc request = {} await client.delete_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3507,22 +3518,23 @@ async def test_list_metadata_imports_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_metadata_imports - ] = mock_object + ] = mock_rpc request = {} await client.list_metadata_imports(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_metadata_imports(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4103,22 +4115,23 @@ async def test_get_metadata_import_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_metadata_import - ] = mock_object + ] = mock_rpc request = {} await client.get_metadata_import(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_metadata_import(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4440,8 +4453,9 @@ def test_create_metadata_import_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_metadata_import(request) @@ -4497,26 +4511,28 @@ async def test_create_metadata_import_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_metadata_import - ] = mock_object + ] = mock_rpc request = {} await client.create_metadata_import(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_metadata_import(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4871,8 +4887,9 @@ def test_update_metadata_import_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_metadata_import(request) @@ -4928,26 +4945,28 @@ async def test_update_metadata_import_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_metadata_import - ] = mock_object + ] = mock_rpc request = {} await client.update_metadata_import(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_metadata_import(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5285,8 +5304,9 @@ def test_export_metadata_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.export_metadata(request) @@ -5340,26 +5360,28 @@ async def test_export_metadata_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.export_metadata - ] = mock_object + ] = mock_rpc request = {} await client.export_metadata(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.export_metadata(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5571,8 +5593,9 @@ def test_restore_service_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.restore_service(request) @@ -5626,26 +5649,28 @@ async def test_restore_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.restore_service - ] = mock_object + ] = mock_rpc request = {} await client.restore_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.restore_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6010,22 +6035,23 @@ async def test_list_backups_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_backups - ] = mock_object + ] = mock_rpc request = {} await client.list_backups(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_backups(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6577,22 +6603,23 @@ async def test_get_backup_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_backup - ] = mock_object + ] = mock_rpc request = {} await client.get_backup(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6891,8 +6918,9 @@ def test_create_backup_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_backup(request) @@ -6946,26 +6974,28 @@ async def test_create_backup_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_backup - ] = mock_object + ] = mock_rpc request = {} await client.create_backup(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7277,8 +7307,9 @@ def test_delete_backup_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_backup(request) @@ -7332,26 +7363,28 @@ async def test_delete_backup_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_backup - ] = mock_object + ] = mock_rpc request = {} await client.delete_backup(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7707,22 +7740,23 @@ async def test_remove_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.remove_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.remove_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.remove_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7941,8 +7975,9 @@ def test_query_metadata_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.query_metadata(request) @@ -7996,26 +8031,28 @@ async def test_query_metadata_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.query_metadata - ] = mock_object + ] = mock_rpc request = {} await client.query_metadata(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.query_metadata(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8240,8 +8277,9 @@ def test_move_table_to_database_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.move_table_to_database(request) @@ -8297,26 +8335,28 @@ async def test_move_table_to_database_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.move_table_to_database - ] = mock_object + ] = mock_rpc request = {} await client.move_table_to_database(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.move_table_to_database(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8545,8 +8585,9 @@ def test_alter_metadata_resource_location_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.alter_metadata_resource_location(request) @@ -8602,26 +8643,28 @@ async def test_alter_metadata_resource_location_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.alter_metadata_resource_location - ] = mock_object + ] = mock_rpc request = {} await client.alter_metadata_resource_location(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.alter_metadata_resource_location(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1alpha/test_dataproc_metastore_federation.py b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1alpha/test_dataproc_metastore_federation.py index 3730337bda5a..b2d58f70becc 100644 --- a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1alpha/test_dataproc_metastore_federation.py +++ b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1alpha/test_dataproc_metastore_federation.py @@ -1397,22 +1397,23 @@ async def test_list_federations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_federations - ] = mock_object + ] = mock_rpc request = {} await client.list_federations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_federations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1973,22 +1974,23 @@ async def test_get_federation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_federation - ] = mock_object + ] = mock_rpc request = {} await client.get_federation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_federation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2304,8 +2306,9 @@ def test_create_federation_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_federation(request) @@ -2361,26 +2364,28 @@ async def test_create_federation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_federation - ] = mock_object + ] = mock_rpc request = {} await client.create_federation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_federation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2709,8 +2714,9 @@ def test_update_federation_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_federation(request) @@ -2766,26 +2772,28 @@ async def test_update_federation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_federation - ] = mock_object + ] = mock_rpc request = {} await client.update_federation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_federation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3106,8 +3114,9 @@ def test_delete_federation_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_federation(request) @@ -3163,26 +3172,28 @@ async def test_delete_federation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_federation - ] = mock_object + ] = mock_rpc request = {} await client.delete_federation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_federation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1beta/test_dataproc_metastore.py b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1beta/test_dataproc_metastore.py index 8255f4fa435b..beeb99a2bded 100644 --- a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1beta/test_dataproc_metastore.py +++ b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1beta/test_dataproc_metastore.py @@ -1347,22 +1347,23 @@ async def test_list_services_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_services - ] = mock_object + ] = mock_rpc request = {} await client.list_services(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_services(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1937,22 +1938,23 @@ async def test_get_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_service - ] = mock_object + ] = mock_rpc request = {} await client.get_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2265,8 +2267,9 @@ def test_create_service_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_service(request) @@ -2320,26 +2323,28 @@ async def test_create_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_service - ] = mock_object + ] = mock_rpc request = {} await client.create_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2669,8 +2674,9 @@ def test_update_service_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_service(request) @@ -2724,26 +2730,28 @@ async def test_update_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_service - ] = mock_object + ] = mock_rpc request = {} await client.update_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3065,8 +3073,9 @@ def test_delete_service_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_service(request) @@ -3120,26 +3129,28 @@ async def test_delete_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_service - ] = mock_object + ] = mock_rpc request = {} await client.delete_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3507,22 +3518,23 @@ async def test_list_metadata_imports_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_metadata_imports - ] = mock_object + ] = mock_rpc request = {} await client.list_metadata_imports(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_metadata_imports(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4103,22 +4115,23 @@ async def test_get_metadata_import_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_metadata_import - ] = mock_object + ] = mock_rpc request = {} await client.get_metadata_import(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_metadata_import(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4440,8 +4453,9 @@ def test_create_metadata_import_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_metadata_import(request) @@ -4497,26 +4511,28 @@ async def test_create_metadata_import_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_metadata_import - ] = mock_object + ] = mock_rpc request = {} await client.create_metadata_import(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_metadata_import(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4871,8 +4887,9 @@ def test_update_metadata_import_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_metadata_import(request) @@ -4928,26 +4945,28 @@ async def test_update_metadata_import_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_metadata_import - ] = mock_object + ] = mock_rpc request = {} await client.update_metadata_import(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_metadata_import(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5285,8 +5304,9 @@ def test_export_metadata_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.export_metadata(request) @@ -5340,26 +5360,28 @@ async def test_export_metadata_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.export_metadata - ] = mock_object + ] = mock_rpc request = {} await client.export_metadata(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.export_metadata(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5571,8 +5593,9 @@ def test_restore_service_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.restore_service(request) @@ -5626,26 +5649,28 @@ async def test_restore_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.restore_service - ] = mock_object + ] = mock_rpc request = {} await client.restore_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.restore_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6010,22 +6035,23 @@ async def test_list_backups_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_backups - ] = mock_object + ] = mock_rpc request = {} await client.list_backups(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_backups(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6577,22 +6603,23 @@ async def test_get_backup_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_backup - ] = mock_object + ] = mock_rpc request = {} await client.get_backup(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6891,8 +6918,9 @@ def test_create_backup_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_backup(request) @@ -6946,26 +6974,28 @@ async def test_create_backup_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_backup - ] = mock_object + ] = mock_rpc request = {} await client.create_backup(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7277,8 +7307,9 @@ def test_delete_backup_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_backup(request) @@ -7332,26 +7363,28 @@ async def test_delete_backup_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_backup - ] = mock_object + ] = mock_rpc request = {} await client.delete_backup(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7707,22 +7740,23 @@ async def test_remove_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.remove_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.remove_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.remove_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7941,8 +7975,9 @@ def test_query_metadata_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.query_metadata(request) @@ -7996,26 +8031,28 @@ async def test_query_metadata_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.query_metadata - ] = mock_object + ] = mock_rpc request = {} await client.query_metadata(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.query_metadata(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8240,8 +8277,9 @@ def test_move_table_to_database_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.move_table_to_database(request) @@ -8297,26 +8335,28 @@ async def test_move_table_to_database_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.move_table_to_database - ] = mock_object + ] = mock_rpc request = {} await client.move_table_to_database(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.move_table_to_database(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8545,8 +8585,9 @@ def test_alter_metadata_resource_location_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.alter_metadata_resource_location(request) @@ -8602,26 +8643,28 @@ async def test_alter_metadata_resource_location_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.alter_metadata_resource_location - ] = mock_object + ] = mock_rpc request = {} await client.alter_metadata_resource_location(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.alter_metadata_resource_location(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1beta/test_dataproc_metastore_federation.py b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1beta/test_dataproc_metastore_federation.py index 610fce266a7c..cf6f75708fe7 100644 --- a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1beta/test_dataproc_metastore_federation.py +++ b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1beta/test_dataproc_metastore_federation.py @@ -1397,22 +1397,23 @@ async def test_list_federations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_federations - ] = mock_object + ] = mock_rpc request = {} await client.list_federations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_federations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1973,22 +1974,23 @@ async def test_get_federation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_federation - ] = mock_object + ] = mock_rpc request = {} await client.get_federation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_federation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2304,8 +2306,9 @@ def test_create_federation_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_federation(request) @@ -2361,26 +2364,28 @@ async def test_create_federation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_federation - ] = mock_object + ] = mock_rpc request = {} await client.create_federation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_federation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2709,8 +2714,9 @@ def test_update_federation_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_federation(request) @@ -2766,26 +2772,28 @@ async def test_update_federation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_federation - ] = mock_object + ] = mock_rpc request = {} await client.update_federation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_federation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3106,8 +3114,9 @@ def test_delete_federation_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_federation(request) @@ -3163,26 +3172,28 @@ async def test_delete_federation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_federation - ] = mock_object + ] = mock_rpc request = {} await client.delete_federation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_federation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc/gapic_version.py b/packages/google-cloud-dataproc/google/cloud/dataproc/gapic_version.py index a4bac342a507..558c8aab67c5 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc/gapic_version.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "5.10.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/gapic_version.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/gapic_version.py index a4bac342a507..558c8aab67c5 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/gapic_version.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "5.10.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/async_client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/async_client.py index bb5d8a6db4f6..ce06987dec0c 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/async_client.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -203,10 +202,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AutoscalingPolicyServiceClient).get_transport_class, - type(AutoscalingPolicyServiceClient), - ) + get_transport_class = AutoscalingPolicyServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/client.py index a92bd0cb9c27..6c4f2a92231d 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/client.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/client.py @@ -675,7 +675,7 @@ def __init__( Type[AutoscalingPolicyServiceTransport], Callable[..., AutoscalingPolicyServiceTransport], ] = ( - type(self).get_transport_class(transport) + AutoscalingPolicyServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AutoscalingPolicyServiceTransport], transport) ) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/async_client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/async_client.py index 0f99aab34c62..8f1648476acb 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/async_client.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -198,9 +197,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(BatchControllerClient).get_transport_class, type(BatchControllerClient) - ) + get_transport_class = BatchControllerClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/client.py index 00817e503eb4..f9fab06463aa 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/client.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/client.py @@ -693,7 +693,7 @@ def __init__( transport_init: Union[ Type[BatchControllerTransport], Callable[..., BatchControllerTransport] ] = ( - type(self).get_transport_class(transport) + BatchControllerClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., BatchControllerTransport], transport) ) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/async_client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/async_client.py index 38f1c3605bc6..b3c00033b6a1 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/async_client.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -201,9 +200,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ClusterControllerClient).get_transport_class, type(ClusterControllerClient) - ) + get_transport_class = ClusterControllerClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/client.py index 64cab2519f3d..d46589e86a36 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/client.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/client.py @@ -721,7 +721,7 @@ def __init__( Type[ClusterControllerTransport], Callable[..., ClusterControllerTransport], ] = ( - type(self).get_transport_class(transport) + ClusterControllerClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ClusterControllerTransport], transport) ) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/async_client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/async_client.py index 92ced819a375..c028b8119da6 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/async_client.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -191,9 +190,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(JobControllerClient).get_transport_class, type(JobControllerClient) - ) + get_transport_class = JobControllerClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/client.py index 7b09c15712e3..2c846b3d7a6f 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/client.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/client.py @@ -642,7 +642,7 @@ def __init__( transport_init: Union[ Type[JobControllerTransport], Callable[..., JobControllerTransport] ] = ( - type(self).get_transport_class(transport) + JobControllerClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., JobControllerTransport], transport) ) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/node_group_controller/async_client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/node_group_controller/async_client.py index 32f019859704..9827ac5afdc0 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/node_group_controller/async_client.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/node_group_controller/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -196,10 +195,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(NodeGroupControllerClient).get_transport_class, - type(NodeGroupControllerClient), - ) + get_transport_class = NodeGroupControllerClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/node_group_controller/client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/node_group_controller/client.py index 60acc99b6ea5..74b161e83af4 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/node_group_controller/client.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/node_group_controller/client.py @@ -674,7 +674,7 @@ def __init__( Type[NodeGroupControllerTransport], Callable[..., NodeGroupControllerTransport], ] = ( - type(self).get_transport_class(transport) + NodeGroupControllerClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., NodeGroupControllerTransport], transport) ) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/session_controller/async_client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/session_controller/async_client.py index 3b2db0851dbd..005fd68b3c5e 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/session_controller/async_client.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/session_controller/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -202,9 +201,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(SessionControllerClient).get_transport_class, type(SessionControllerClient) - ) + get_transport_class = SessionControllerClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/session_controller/client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/session_controller/client.py index 4ac9945fc27b..2738c51bb43f 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/session_controller/client.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/session_controller/client.py @@ -718,7 +718,7 @@ def __init__( Type[SessionControllerTransport], Callable[..., SessionControllerTransport], ] = ( - type(self).get_transport_class(transport) + SessionControllerClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SessionControllerTransport], transport) ) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/session_template_controller/async_client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/session_template_controller/async_client.py index 01298bd7c2e8..4419ecc2b8de 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/session_template_controller/async_client.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/session_template_controller/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -210,10 +209,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(SessionTemplateControllerClient).get_transport_class, - type(SessionTemplateControllerClient), - ) + get_transport_class = SessionTemplateControllerClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/session_template_controller/client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/session_template_controller/client.py index e22e2173c4b2..442118b1fef8 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/session_template_controller/client.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/session_template_controller/client.py @@ -698,7 +698,7 @@ def __init__( Type[SessionTemplateControllerTransport], Callable[..., SessionTemplateControllerTransport], ] = ( - type(self).get_transport_class(transport) + SessionTemplateControllerClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SessionTemplateControllerTransport], transport) ) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/async_client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/async_client.py index 1a59261f4b97..85222311c4d7 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/async_client.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -214,10 +213,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(WorkflowTemplateServiceClient).get_transport_class, - type(WorkflowTemplateServiceClient), - ) + get_transport_class = WorkflowTemplateServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/client.py index f99de568891a..4da14dd2c32c 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/client.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/client.py @@ -726,7 +726,7 @@ def __init__( Type[WorkflowTemplateServiceTransport], Callable[..., WorkflowTemplateServiceTransport], ] = ( - type(self).get_transport_class(transport) + WorkflowTemplateServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., WorkflowTemplateServiceTransport], transport) ) diff --git a/packages/google-cloud-dataproc/samples/generated_samples/snippet_metadata_google.cloud.dataproc.v1.json b/packages/google-cloud-dataproc/samples/generated_samples/snippet_metadata_google.cloud.dataproc.v1.json index f461de26e252..c5f4e003db04 100644 --- a/packages/google-cloud-dataproc/samples/generated_samples/snippet_metadata_google.cloud.dataproc.v1.json +++ b/packages/google-cloud-dataproc/samples/generated_samples/snippet_metadata_google.cloud.dataproc.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dataproc", - "version": "5.10.2" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_autoscaling_policy_service.py b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_autoscaling_policy_service.py index dcc74698fce3..9507d196b9a5 100644 --- a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_autoscaling_policy_service.py +++ b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_autoscaling_policy_service.py @@ -1380,22 +1380,23 @@ async def test_create_autoscaling_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_autoscaling_policy - ] = mock_object + ] = mock_rpc request = {} await client.create_autoscaling_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_autoscaling_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1779,22 +1780,23 @@ async def test_update_autoscaling_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_autoscaling_policy - ] = mock_object + ] = mock_rpc request = {} await client.update_autoscaling_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_autoscaling_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2172,22 +2174,23 @@ async def test_get_autoscaling_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_autoscaling_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_autoscaling_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_autoscaling_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2564,22 +2567,23 @@ async def test_list_autoscaling_policies_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_autoscaling_policies - ] = mock_object + ] = mock_rpc request = {} await client.list_autoscaling_policies(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_autoscaling_policies(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3153,22 +3157,23 @@ async def test_delete_autoscaling_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_autoscaling_policy - ] = mock_object + ] = mock_rpc request = {} await client.delete_autoscaling_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_autoscaling_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_batch_controller.py b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_batch_controller.py index e8c71d15897a..4e92cbcfc4a1 100644 --- a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_batch_controller.py +++ b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_batch_controller.py @@ -1268,8 +1268,9 @@ def test_create_batch_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_batch(request) @@ -1323,26 +1324,28 @@ async def test_create_batch_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_batch - ] = mock_object + ] = mock_rpc request = {} await client.create_batch(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_batch(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1721,22 +1724,23 @@ async def test_get_batch_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_batch - ] = mock_object + ] = mock_rpc request = {} await client.get_batch(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_batch(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2097,22 +2101,23 @@ async def test_list_batches_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_batches - ] = mock_object + ] = mock_rpc request = {} await client.list_batches(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_batches(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2648,22 +2653,23 @@ async def test_delete_batch_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_batch - ] = mock_object + ] = mock_rpc request = {} await client.delete_batch(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_batch(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_cluster_controller.py b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_cluster_controller.py index 238b6364103c..099921eb7e2b 100644 --- a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_cluster_controller.py +++ b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_cluster_controller.py @@ -1285,8 +1285,9 @@ def test_create_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_cluster(request) @@ -1340,26 +1341,28 @@ async def test_create_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_cluster - ] = mock_object + ] = mock_rpc request = {} await client.create_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1677,8 +1680,9 @@ def test_update_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_cluster(request) @@ -1732,26 +1736,28 @@ async def test_update_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_cluster - ] = mock_object + ] = mock_rpc request = {} await client.update_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2093,8 +2099,9 @@ def test_stop_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.stop_cluster(request) @@ -2148,26 +2155,28 @@ async def test_stop_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.stop_cluster - ] = mock_object + ] = mock_rpc request = {} await client.stop_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.stop_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2387,8 +2396,9 @@ def test_start_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.start_cluster(request) @@ -2442,26 +2452,28 @@ async def test_start_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.start_cluster - ] = mock_object + ] = mock_rpc request = {} await client.start_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.start_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2681,8 +2693,9 @@ def test_delete_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_cluster(request) @@ -2736,26 +2749,28 @@ async def test_delete_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_cluster - ] = mock_object + ] = mock_rpc request = {} await client.delete_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3135,22 +3150,23 @@ async def test_get_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_cluster - ] = mock_object + ] = mock_rpc request = {} await client.get_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3529,22 +3545,23 @@ async def test_list_clusters_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_clusters - ] = mock_object + ] = mock_rpc request = {} await client.list_clusters(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_clusters(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4066,8 +4083,9 @@ def test_diagnose_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.diagnose_cluster(request) @@ -4121,26 +4139,28 @@ async def test_diagnose_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.diagnose_cluster - ] = mock_object + ] = mock_rpc request = {} await client.diagnose_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.diagnose_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_job_controller.py b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_job_controller.py index e59a1ed6a1ab..1d12641b7a95 100644 --- a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_job_controller.py +++ b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_job_controller.py @@ -1303,22 +1303,23 @@ async def test_submit_job_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.submit_job - ] = mock_object + ] = mock_rpc request = {} await client.submit_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.submit_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1650,8 +1651,9 @@ def test_submit_job_as_operation_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.submit_job_as_operation(request) @@ -1707,26 +1709,28 @@ async def test_submit_job_as_operation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.submit_job_as_operation - ] = mock_object + ] = mock_rpc request = {} await client.submit_job_as_operation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.submit_job_as_operation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2115,22 +2119,23 @@ async def test_get_job_async_use_cached_wrapped_rpc(transport: str = "grpc_async ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_job - ] = mock_object + ] = mock_rpc request = {} await client.get_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2511,22 +2516,23 @@ async def test_list_jobs_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_jobs - ] = mock_object + ] = mock_rpc request = {} await client.list_jobs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_jobs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3107,22 +3113,23 @@ async def test_update_job_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_job - ] = mock_object + ] = mock_rpc request = {} await client.update_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3408,22 +3415,23 @@ async def test_cancel_job_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.cancel_job - ] = mock_object + ] = mock_rpc request = {} await client.cancel_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.cancel_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3793,22 +3801,23 @@ async def test_delete_job_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_job - ] = mock_object + ] = mock_rpc request = {} await client.delete_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_node_group_controller.py b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_node_group_controller.py index d873909055a0..1273aa1bde2b 100644 --- a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_node_group_controller.py +++ b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_node_group_controller.py @@ -1318,8 +1318,9 @@ def test_create_node_group_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_node_group(request) @@ -1375,26 +1376,28 @@ async def test_create_node_group_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_node_group - ] = mock_object + ] = mock_rpc request = {} await client.create_node_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_node_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1724,8 +1727,9 @@ def test_resize_node_group_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.resize_node_group(request) @@ -1781,26 +1785,28 @@ async def test_resize_node_group_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.resize_node_group - ] = mock_object + ] = mock_rpc request = {} await client.resize_node_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.resize_node_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2169,22 +2175,23 @@ async def test_get_node_group_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_node_group - ] = mock_object + ] = mock_rpc request = {} await client.get_node_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_node_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_session_controller.py b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_session_controller.py index e9ff1c5a0ec5..f5dbfb565af8 100644 --- a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_session_controller.py +++ b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_session_controller.py @@ -1282,8 +1282,9 @@ def test_create_session_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_session(request) @@ -1337,26 +1338,28 @@ async def test_create_session_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_session - ] = mock_object + ] = mock_rpc request = {} await client.create_session(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_session(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1740,22 +1743,23 @@ async def test_get_session_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_session - ] = mock_object + ] = mock_rpc request = {} await client.get_session(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_session(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2116,22 +2120,23 @@ async def test_list_sessions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_sessions - ] = mock_object + ] = mock_rpc request = {} await client.list_sessions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_sessions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2628,8 +2633,9 @@ def test_terminate_session_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.terminate_session(request) @@ -2685,26 +2691,28 @@ async def test_terminate_session_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.terminate_session - ] = mock_object + ] = mock_rpc request = {} await client.terminate_session(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.terminate_session(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3006,8 +3014,9 @@ def test_delete_session_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_session(request) @@ -3061,26 +3070,28 @@ async def test_delete_session_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_session - ] = mock_object + ] = mock_rpc request = {} await client.delete_session(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_session(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_session_template_controller.py b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_session_template_controller.py index 3855f4b6b8be..b157306093fc 100644 --- a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_session_template_controller.py +++ b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_session_template_controller.py @@ -1393,22 +1393,23 @@ async def test_create_session_template_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_session_template - ] = mock_object + ] = mock_rpc request = {} await client.create_session_template(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_session_template(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1802,22 +1803,23 @@ async def test_update_session_template_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_session_template - ] = mock_object + ] = mock_rpc request = {} await client.update_session_template(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_session_template(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2204,22 +2206,23 @@ async def test_get_session_template_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_session_template - ] = mock_object + ] = mock_rpc request = {} await client.get_session_template(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_session_template(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2602,22 +2605,23 @@ async def test_list_session_templates_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_session_templates - ] = mock_object + ] = mock_rpc request = {} await client.list_session_templates(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_session_templates(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3185,22 +3189,23 @@ async def test_delete_session_template_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_session_template - ] = mock_object + ] = mock_rpc request = {} await client.delete_session_template(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_session_template(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_workflow_template_service.py b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_workflow_template_service.py index 952acdc9e41e..67bc73b8e99b 100644 --- a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_workflow_template_service.py +++ b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_workflow_template_service.py @@ -1388,22 +1388,23 @@ async def test_create_workflow_template_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_workflow_template - ] = mock_object + ] = mock_rpc request = {} await client.create_workflow_template(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_workflow_template(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1796,22 +1797,23 @@ async def test_get_workflow_template_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_workflow_template - ] = mock_object + ] = mock_rpc request = {} await client.get_workflow_template(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_workflow_template(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2132,8 +2134,9 @@ def test_instantiate_workflow_template_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.instantiate_workflow_template(request) @@ -2189,26 +2192,28 @@ async def test_instantiate_workflow_template_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.instantiate_workflow_template - ] = mock_object + ] = mock_rpc request = {} await client.instantiate_workflow_template(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.instantiate_workflow_template(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2532,8 +2537,9 @@ def test_instantiate_inline_workflow_template_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.instantiate_inline_workflow_template(request) @@ -2589,26 +2595,28 @@ async def test_instantiate_inline_workflow_template_async_use_cached_wrapped_rpc ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.instantiate_inline_workflow_template - ] = mock_object + ] = mock_rpc request = {} await client.instantiate_inline_workflow_template(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.instantiate_inline_workflow_template(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2990,22 +2998,23 @@ async def test_update_workflow_template_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_workflow_template - ] = mock_object + ] = mock_rpc request = {} await client.update_workflow_template(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_workflow_template(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3384,22 +3393,23 @@ async def test_list_workflow_templates_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_workflow_templates - ] = mock_object + ] = mock_rpc request = {} await client.list_workflow_templates(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_workflow_templates(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3969,22 +3979,23 @@ async def test_delete_workflow_template_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_workflow_template - ] = mock_object + ] = mock_rpc request = {} await client.delete_workflow_template(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_workflow_template(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-datastream/google/cloud/datastream/gapic_version.py b/packages/google-cloud-datastream/google/cloud/datastream/gapic_version.py index 64ddc0e431e3..558c8aab67c5 100644 --- a/packages/google-cloud-datastream/google/cloud/datastream/gapic_version.py +++ b/packages/google-cloud-datastream/google/cloud/datastream/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.9.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-datastream/google/cloud/datastream_v1/gapic_version.py b/packages/google-cloud-datastream/google/cloud/datastream_v1/gapic_version.py index 64ddc0e431e3..558c8aab67c5 100644 --- a/packages/google-cloud-datastream/google/cloud/datastream_v1/gapic_version.py +++ b/packages/google-cloud-datastream/google/cloud/datastream_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.9.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-datastream/google/cloud/datastream_v1/services/datastream/async_client.py b/packages/google-cloud-datastream/google/cloud/datastream_v1/services/datastream/async_client.py index 07458d75789d..91b665e49d21 100644 --- a/packages/google-cloud-datastream/google/cloud/datastream_v1/services/datastream/async_client.py +++ b/packages/google-cloud-datastream/google/cloud/datastream_v1/services/datastream/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -205,9 +204,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DatastreamClient).get_transport_class, type(DatastreamClient) - ) + get_transport_class = DatastreamClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-datastream/google/cloud/datastream_v1/services/datastream/client.py b/packages/google-cloud-datastream/google/cloud/datastream_v1/services/datastream/client.py index a4bc48a00274..d4f79a05af7f 100644 --- a/packages/google-cloud-datastream/google/cloud/datastream_v1/services/datastream/client.py +++ b/packages/google-cloud-datastream/google/cloud/datastream_v1/services/datastream/client.py @@ -776,7 +776,7 @@ def __init__( transport_init: Union[ Type[DatastreamTransport], Callable[..., DatastreamTransport] ] = ( - type(self).get_transport_class(transport) + DatastreamClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DatastreamTransport], transport) ) diff --git a/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/gapic_version.py b/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/gapic_version.py index 64ddc0e431e3..558c8aab67c5 100644 --- a/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/gapic_version.py +++ b/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.9.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/services/datastream/async_client.py b/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/services/datastream/async_client.py index b45cf101cf25..c71f11f0c58c 100644 --- a/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/services/datastream/async_client.py +++ b/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/services/datastream/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -197,9 +196,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DatastreamClient).get_transport_class, type(DatastreamClient) - ) + get_transport_class = DatastreamClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/services/datastream/client.py b/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/services/datastream/client.py index 361e95536a5e..ce57716e9865 100644 --- a/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/services/datastream/client.py +++ b/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/services/datastream/client.py @@ -729,7 +729,7 @@ def __init__( transport_init: Union[ Type[DatastreamTransport], Callable[..., DatastreamTransport] ] = ( - type(self).get_transport_class(transport) + DatastreamClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DatastreamTransport], transport) ) diff --git a/packages/google-cloud-datastream/samples/generated_samples/snippet_metadata_google.cloud.datastream.v1.json b/packages/google-cloud-datastream/samples/generated_samples/snippet_metadata_google.cloud.datastream.v1.json index 8cde4a7487bd..8a69b9040e43 100644 --- a/packages/google-cloud-datastream/samples/generated_samples/snippet_metadata_google.cloud.datastream.v1.json +++ b/packages/google-cloud-datastream/samples/generated_samples/snippet_metadata_google.cloud.datastream.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-datastream", - "version": "1.9.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-datastream/samples/generated_samples/snippet_metadata_google.cloud.datastream.v1alpha1.json b/packages/google-cloud-datastream/samples/generated_samples/snippet_metadata_google.cloud.datastream.v1alpha1.json index 00ca209534e3..09dea00ec932 100644 --- a/packages/google-cloud-datastream/samples/generated_samples/snippet_metadata_google.cloud.datastream.v1alpha1.json +++ b/packages/google-cloud-datastream/samples/generated_samples/snippet_metadata_google.cloud.datastream.v1alpha1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-datastream", - "version": "1.9.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-datastream/tests/unit/gapic/datastream_v1/test_datastream.py b/packages/google-cloud-datastream/tests/unit/gapic/datastream_v1/test_datastream.py index 80899677eda3..88fccb36bcff 100644 --- a/packages/google-cloud-datastream/tests/unit/gapic/datastream_v1/test_datastream.py +++ b/packages/google-cloud-datastream/tests/unit/gapic/datastream_v1/test_datastream.py @@ -1280,22 +1280,23 @@ async def test_list_connection_profiles_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_connection_profiles - ] = mock_object + ] = mock_rpc request = {} await client.list_connection_profiles(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_connection_profiles(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1881,22 +1882,23 @@ async def test_get_connection_profile_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_connection_profile - ] = mock_object + ] = mock_rpc request = {} await client.get_connection_profile(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_connection_profile(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2216,8 +2218,9 @@ def test_create_connection_profile_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_connection_profile(request) @@ -2273,26 +2276,28 @@ async def test_create_connection_profile_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_connection_profile - ] = mock_object + ] = mock_rpc request = {} await client.create_connection_profile(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_connection_profile(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2632,8 +2637,9 @@ def test_update_connection_profile_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_connection_profile(request) @@ -2689,26 +2695,28 @@ async def test_update_connection_profile_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_connection_profile - ] = mock_object + ] = mock_rpc request = {} await client.update_connection_profile(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_connection_profile(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3040,8 +3048,9 @@ def test_delete_connection_profile_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_connection_profile(request) @@ -3097,26 +3106,28 @@ async def test_delete_connection_profile_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_connection_profile - ] = mock_object + ] = mock_rpc request = {} await client.delete_connection_profile(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_connection_profile(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3483,22 +3494,23 @@ async def test_discover_connection_profile_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.discover_connection_profile - ] = mock_object + ] = mock_rpc request = {} await client.discover_connection_profile(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.discover_connection_profile(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3778,22 +3790,23 @@ async def test_list_streams_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_streams - ] = mock_object + ] = mock_rpc request = {} await client.list_streams(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_streams(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4348,22 +4361,23 @@ async def test_get_stream_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_stream - ] = mock_object + ] = mock_rpc request = {} await client.get_stream(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_stream(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4669,8 +4683,9 @@ def test_create_stream_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_stream(request) @@ -4724,26 +4739,28 @@ async def test_create_stream_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_stream - ] = mock_object + ] = mock_rpc request = {} await client.create_stream(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_stream(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5053,8 +5070,9 @@ def test_update_stream_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_stream(request) @@ -5108,26 +5126,28 @@ async def test_update_stream_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_stream - ] = mock_object + ] = mock_rpc request = {} await client.update_stream(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_stream(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5429,8 +5449,9 @@ def test_delete_stream_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_stream(request) @@ -5484,26 +5505,28 @@ async def test_delete_stream_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_stream - ] = mock_object + ] = mock_rpc request = {} await client.delete_stream(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_stream(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5862,22 +5885,23 @@ async def test_get_stream_object_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_stream_object - ] = mock_object + ] = mock_rpc request = {} await client.get_stream_object(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_stream_object(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6253,22 +6277,23 @@ async def test_lookup_stream_object_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.lookup_stream_object - ] = mock_object + ] = mock_rpc request = {} await client.lookup_stream_object(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.lookup_stream_object(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6557,22 +6582,23 @@ async def test_list_stream_objects_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_stream_objects - ] = mock_object + ] = mock_rpc request = {} await client.list_stream_objects(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_stream_objects(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7140,22 +7166,23 @@ async def test_start_backfill_job_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.start_backfill_job - ] = mock_object + ] = mock_rpc request = {} await client.start_backfill_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.start_backfill_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7516,22 +7543,23 @@ async def test_stop_backfill_job_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.stop_backfill_job - ] = mock_object + ] = mock_rpc request = {} await client.stop_backfill_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.stop_backfill_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7894,22 +7922,23 @@ async def test_fetch_static_ips_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.fetch_static_ips - ] = mock_object + ] = mock_rpc request = {} await client.fetch_static_ips(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.fetch_static_ips(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8413,8 +8442,9 @@ def test_create_private_connection_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_private_connection(request) @@ -8470,26 +8500,28 @@ async def test_create_private_connection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_private_connection - ] = mock_object + ] = mock_rpc request = {} await client.create_private_connection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_private_connection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8893,22 +8925,23 @@ async def test_get_private_connection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_private_connection - ] = mock_object + ] = mock_rpc request = {} await client.get_private_connection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_private_connection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9293,22 +9326,23 @@ async def test_list_private_connections_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_private_connections - ] = mock_object + ] = mock_rpc request = {} await client.list_private_connections(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_private_connections(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9835,8 +9869,9 @@ def test_delete_private_connection_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_private_connection(request) @@ -9892,26 +9927,28 @@ async def test_delete_private_connection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_private_connection - ] = mock_object + ] = mock_rpc request = {} await client.delete_private_connection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_private_connection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10216,8 +10253,9 @@ def test_create_route_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_route(request) @@ -10271,26 +10309,28 @@ async def test_create_route_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_route - ] = mock_object + ] = mock_rpc request = {} await client.create_route(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_route(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10663,22 +10703,23 @@ async def test_get_route_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_route - ] = mock_object + ] = mock_rpc request = {} await client.get_route(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_route(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11042,22 +11083,23 @@ async def test_list_routes_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_routes - ] = mock_object + ] = mock_rpc request = {} await client.list_routes(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_routes(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11548,8 +11590,9 @@ def test_delete_route_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_route(request) @@ -11603,26 +11646,28 @@ async def test_delete_route_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_route - ] = mock_object + ] = mock_rpc request = {} await client.delete_route(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_route(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-datastream/tests/unit/gapic/datastream_v1alpha1/test_datastream.py b/packages/google-cloud-datastream/tests/unit/gapic/datastream_v1alpha1/test_datastream.py index 1f6089fc5fd6..9626e246f848 100644 --- a/packages/google-cloud-datastream/tests/unit/gapic/datastream_v1alpha1/test_datastream.py +++ b/packages/google-cloud-datastream/tests/unit/gapic/datastream_v1alpha1/test_datastream.py @@ -1276,22 +1276,23 @@ async def test_list_connection_profiles_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_connection_profiles - ] = mock_object + ] = mock_rpc request = {} await client.list_connection_profiles(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_connection_profiles(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1877,22 +1878,23 @@ async def test_get_connection_profile_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_connection_profile - ] = mock_object + ] = mock_rpc request = {} await client.get_connection_profile(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_connection_profile(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2212,8 +2214,9 @@ def test_create_connection_profile_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_connection_profile(request) @@ -2269,26 +2272,28 @@ async def test_create_connection_profile_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_connection_profile - ] = mock_object + ] = mock_rpc request = {} await client.create_connection_profile(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_connection_profile(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2628,8 +2633,9 @@ def test_update_connection_profile_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_connection_profile(request) @@ -2685,26 +2691,28 @@ async def test_update_connection_profile_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_connection_profile - ] = mock_object + ] = mock_rpc request = {} await client.update_connection_profile(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_connection_profile(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3036,8 +3044,9 @@ def test_delete_connection_profile_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_connection_profile(request) @@ -3093,26 +3102,28 @@ async def test_delete_connection_profile_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_connection_profile - ] = mock_object + ] = mock_rpc request = {} await client.delete_connection_profile(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_connection_profile(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3479,22 +3490,23 @@ async def test_discover_connection_profile_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.discover_connection_profile - ] = mock_object + ] = mock_rpc request = {} await client.discover_connection_profile(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.discover_connection_profile(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3774,22 +3786,23 @@ async def test_list_streams_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_streams - ] = mock_object + ] = mock_rpc request = {} await client.list_streams(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_streams(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4338,22 +4351,23 @@ async def test_get_stream_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_stream - ] = mock_object + ] = mock_rpc request = {} await client.get_stream(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_stream(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4654,8 +4668,9 @@ def test_create_stream_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_stream(request) @@ -4709,26 +4724,28 @@ async def test_create_stream_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_stream - ] = mock_object + ] = mock_rpc request = {} await client.create_stream(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_stream(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5038,8 +5055,9 @@ def test_update_stream_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_stream(request) @@ -5093,26 +5111,28 @@ async def test_update_stream_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_stream - ] = mock_object + ] = mock_rpc request = {} await client.update_stream(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_stream(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5414,8 +5434,9 @@ def test_delete_stream_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_stream(request) @@ -5469,26 +5490,28 @@ async def test_delete_stream_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_stream - ] = mock_object + ] = mock_rpc request = {} await client.delete_stream(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_stream(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5778,8 +5801,9 @@ def test_fetch_errors_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.fetch_errors(request) @@ -5833,26 +5857,28 @@ async def test_fetch_errors_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.fetch_errors - ] = mock_object + ] = mock_rpc request = {} await client.fetch_errors(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.fetch_errors(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6123,22 +6149,23 @@ async def test_fetch_static_ips_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.fetch_static_ips - ] = mock_object + ] = mock_rpc request = {} await client.fetch_static_ips(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.fetch_static_ips(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6642,8 +6669,9 @@ def test_create_private_connection_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_private_connection(request) @@ -6699,26 +6727,28 @@ async def test_create_private_connection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_private_connection - ] = mock_object + ] = mock_rpc request = {} await client.create_private_connection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_private_connection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7122,22 +7152,23 @@ async def test_get_private_connection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_private_connection - ] = mock_object + ] = mock_rpc request = {} await client.get_private_connection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_private_connection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7522,22 +7553,23 @@ async def test_list_private_connections_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_private_connections - ] = mock_object + ] = mock_rpc request = {} await client.list_private_connections(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_private_connections(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8064,8 +8096,9 @@ def test_delete_private_connection_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_private_connection(request) @@ -8121,26 +8154,28 @@ async def test_delete_private_connection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_private_connection - ] = mock_object + ] = mock_rpc request = {} await client.delete_private_connection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_private_connection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8445,8 +8480,9 @@ def test_create_route_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_route(request) @@ -8500,26 +8536,28 @@ async def test_create_route_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_route - ] = mock_object + ] = mock_rpc request = {} await client.create_route(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_route(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8892,22 +8930,23 @@ async def test_get_route_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_route - ] = mock_object + ] = mock_rpc request = {} await client.get_route(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_route(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9271,22 +9310,23 @@ async def test_list_routes_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_routes - ] = mock_object + ] = mock_rpc request = {} await client.list_routes(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_routes(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9777,8 +9817,9 @@ def test_delete_route_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_route(request) @@ -9832,26 +9873,28 @@ async def test_delete_route_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_route - ] = mock_object + ] = mock_rpc request = {} await client.delete_route(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_route(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-deploy/google/cloud/deploy/gapic_version.py b/packages/google-cloud-deploy/google/cloud/deploy/gapic_version.py index 7a056f2fd9ee..558c8aab67c5 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy/gapic_version.py +++ b/packages/google-cloud-deploy/google/cloud/deploy/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.0.1" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/gapic_version.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/gapic_version.py index 7a056f2fd9ee..558c8aab67c5 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/gapic_version.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.0.1" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/async_client.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/async_client.py index 87618574717d..72c3f7a4098b 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/async_client.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -232,9 +231,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(CloudDeployClient).get_transport_class, type(CloudDeployClient) - ) + get_transport_class = CloudDeployClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/client.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/client.py index 55a5228129f9..af6eb35e86ed 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/client.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/client.py @@ -1017,7 +1017,7 @@ def __init__( transport_init: Union[ Type[CloudDeployTransport], Callable[..., CloudDeployTransport] ] = ( - type(self).get_transport_class(transport) + CloudDeployClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., CloudDeployTransport], transport) ) diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/types/cloud_deploy.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/types/cloud_deploy.py index 05afdfc940e5..c7d9e95ecf4f 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/types/cloud_deploy.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/types/cloud_deploy.py @@ -2398,7 +2398,7 @@ class SkaffoldGCSSource(proto.Message): Attributes: source (str): Required. Cloud Storage source paths to copy recursively. - For example, providing `gs://my-bucket/dir/configs/*` will + For example, providing "gs://my-bucket/dir/configs/*" will result in Skaffold copying all files within the "dir/configs" directory in the bucket "my-bucket". path (str): diff --git a/packages/google-cloud-deploy/samples/generated_samples/snippet_metadata_google.cloud.deploy.v1.json b/packages/google-cloud-deploy/samples/generated_samples/snippet_metadata_google.cloud.deploy.v1.json index f750b853f039..b4f5eeee1a80 100644 --- a/packages/google-cloud-deploy/samples/generated_samples/snippet_metadata_google.cloud.deploy.v1.json +++ b/packages/google-cloud-deploy/samples/generated_samples/snippet_metadata_google.cloud.deploy.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-deploy", - "version": "2.0.1" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-deploy/tests/unit/gapic/deploy_v1/test_cloud_deploy.py b/packages/google-cloud-deploy/tests/unit/gapic/deploy_v1/test_cloud_deploy.py index 95200390561d..ff22c1a2c000 100644 --- a/packages/google-cloud-deploy/tests/unit/gapic/deploy_v1/test_cloud_deploy.py +++ b/packages/google-cloud-deploy/tests/unit/gapic/deploy_v1/test_cloud_deploy.py @@ -1284,22 +1284,23 @@ async def test_list_delivery_pipelines_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_delivery_pipelines - ] = mock_object + ] = mock_rpc request = {} await client.list_delivery_pipelines(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_delivery_pipelines(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1888,22 +1889,23 @@ async def test_get_delivery_pipeline_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_delivery_pipeline - ] = mock_object + ] = mock_rpc request = {} await client.get_delivery_pipeline(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_delivery_pipeline(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2230,8 +2232,9 @@ def test_create_delivery_pipeline_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_delivery_pipeline(request) @@ -2287,26 +2290,28 @@ async def test_create_delivery_pipeline_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_delivery_pipeline - ] = mock_object + ] = mock_rpc request = {} await client.create_delivery_pipeline(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_delivery_pipeline(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2638,8 +2643,9 @@ def test_update_delivery_pipeline_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_delivery_pipeline(request) @@ -2695,26 +2701,28 @@ async def test_update_delivery_pipeline_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_delivery_pipeline - ] = mock_object + ] = mock_rpc request = {} await client.update_delivery_pipeline(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_delivery_pipeline(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3040,8 +3048,9 @@ def test_delete_delivery_pipeline_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_delivery_pipeline(request) @@ -3097,26 +3106,28 @@ async def test_delete_delivery_pipeline_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_delivery_pipeline - ] = mock_object + ] = mock_rpc request = {} await client.delete_delivery_pipeline(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_delivery_pipeline(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3482,22 +3493,23 @@ async def test_list_targets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_targets - ] = mock_object + ] = mock_rpc request = {} await client.list_targets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_targets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4045,22 +4057,23 @@ async def test_rollback_target_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.rollback_target - ] = mock_object + ] = mock_rpc request = {} await client.rollback_target(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.rollback_target(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4439,22 +4452,23 @@ async def test_get_target_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_target - ] = mock_object + ] = mock_rpc request = {} await client.get_target(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_target(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4757,8 +4771,9 @@ def test_create_target_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_target(request) @@ -4812,26 +4827,28 @@ async def test_create_target_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_target - ] = mock_object + ] = mock_rpc request = {} await client.create_target(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_target(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5141,8 +5158,9 @@ def test_update_target_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_target(request) @@ -5196,26 +5214,28 @@ async def test_update_target_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_target - ] = mock_object + ] = mock_rpc request = {} await client.update_target(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_target(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5519,8 +5539,9 @@ def test_delete_target_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_target(request) @@ -5574,26 +5595,28 @@ async def test_delete_target_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_target - ] = mock_object + ] = mock_rpc request = {} await client.delete_target(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_target(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5961,22 +5984,23 @@ async def test_list_custom_target_types_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_custom_target_types - ] = mock_object + ] = mock_rpc request = {} await client.list_custom_target_types(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_custom_target_types(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6567,22 +6591,23 @@ async def test_get_custom_target_type_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_custom_target_type - ] = mock_object + ] = mock_rpc request = {} await client.get_custom_target_type(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_custom_target_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6909,8 +6934,9 @@ def test_create_custom_target_type_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_custom_target_type(request) @@ -6966,26 +6992,28 @@ async def test_create_custom_target_type_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_custom_target_type - ] = mock_object + ] = mock_rpc request = {} await client.create_custom_target_type(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_custom_target_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7317,8 +7345,9 @@ def test_update_custom_target_type_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_custom_target_type(request) @@ -7374,26 +7403,28 @@ async def test_update_custom_target_type_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_custom_target_type - ] = mock_object + ] = mock_rpc request = {} await client.update_custom_target_type(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_custom_target_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7719,8 +7750,9 @@ def test_delete_custom_target_type_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_custom_target_type(request) @@ -7776,26 +7808,28 @@ async def test_delete_custom_target_type_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_custom_target_type - ] = mock_object + ] = mock_rpc request = {} await client.delete_custom_target_type(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_custom_target_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8161,22 +8195,23 @@ async def test_list_releases_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_releases - ] = mock_object + ] = mock_rpc request = {} await client.list_releases(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_releases(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8745,22 +8780,23 @@ async def test_get_release_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_release - ] = mock_object + ] = mock_rpc request = {} await client.get_release(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_release(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9073,8 +9109,9 @@ def test_create_release_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_release(request) @@ -9128,26 +9165,28 @@ async def test_create_release_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_release - ] = mock_object + ] = mock_rpc request = {} await client.create_release(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_release(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9508,22 +9547,23 @@ async def test_abandon_release_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.abandon_release - ] = mock_object + ] = mock_rpc request = {} await client.abandon_release(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.abandon_release(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9864,22 +9904,23 @@ async def test_approve_rollout_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.approve_rollout - ] = mock_object + ] = mock_rpc request = {} await client.approve_rollout(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.approve_rollout(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10222,22 +10263,23 @@ async def test_advance_rollout_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.advance_rollout - ] = mock_object + ] = mock_rpc request = {} await client.advance_rollout(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.advance_rollout(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10588,22 +10630,23 @@ async def test_cancel_rollout_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.cancel_rollout - ] = mock_object + ] = mock_rpc request = {} await client.cancel_rollout(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.cancel_rollout(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10958,22 +11001,23 @@ async def test_list_rollouts_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_rollouts - ] = mock_object + ] = mock_rpc request = {} await client.list_rollouts(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_rollouts(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11557,22 +11601,23 @@ async def test_get_rollout_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_rollout - ] = mock_object + ] = mock_rpc request = {} await client.get_rollout(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_rollout(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11898,8 +11943,9 @@ def test_create_rollout_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_rollout(request) @@ -11953,26 +11999,28 @@ async def test_create_rollout_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_rollout - ] = mock_object + ] = mock_rpc request = {} await client.create_rollout(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_rollout(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12335,22 +12383,23 @@ async def test_ignore_job_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.ignore_job - ] = mock_object + ] = mock_rpc request = {} await client.ignore_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.ignore_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12713,22 +12762,23 @@ async def test_retry_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.retry_job - ] = mock_object + ] = mock_rpc request = {} await client.retry_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.retry_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13103,22 +13153,23 @@ async def test_list_job_runs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_job_runs - ] = mock_object + ] = mock_rpc request = {} await client.list_job_runs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_job_runs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13678,22 +13729,23 @@ async def test_get_job_run_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_job_run - ] = mock_object + ] = mock_rpc request = {} await client.get_job_run(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_job_run(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14053,22 +14105,23 @@ async def test_terminate_job_run_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.terminate_job_run - ] = mock_object + ] = mock_rpc request = {} await client.terminate_job_run(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.terminate_job_run(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14425,22 +14478,23 @@ async def test_get_config_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_config - ] = mock_object + ] = mock_rpc request = {} await client.get_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14743,8 +14797,9 @@ def test_create_automation_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_automation(request) @@ -14800,26 +14855,28 @@ async def test_create_automation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_automation - ] = mock_object + ] = mock_rpc request = {} await client.create_automation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_automation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15147,8 +15204,9 @@ def test_update_automation_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_automation(request) @@ -15204,26 +15262,28 @@ async def test_update_automation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_automation - ] = mock_object + ] = mock_rpc request = {} await client.update_automation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_automation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15545,8 +15605,9 @@ def test_delete_automation_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_automation(request) @@ -15602,26 +15663,28 @@ async def test_delete_automation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_automation - ] = mock_object + ] = mock_rpc request = {} await client.delete_automation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_automation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15992,22 +16055,23 @@ async def test_get_automation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_automation - ] = mock_object + ] = mock_rpc request = {} await client.get_automation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_automation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -16377,22 +16441,23 @@ async def test_list_automations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_automations - ] = mock_object + ] = mock_rpc request = {} await client.list_automations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_automations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -16970,22 +17035,23 @@ async def test_get_automation_run_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_automation_run - ] = mock_object + ] = mock_rpc request = {} await client.get_automation_run(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_automation_run(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -17379,22 +17445,23 @@ async def test_list_automation_runs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_automation_runs - ] = mock_object + ] = mock_rpc request = {} await client.list_automation_runs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_automation_runs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -17965,22 +18032,23 @@ async def test_cancel_automation_run_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.cancel_automation_run - ] = mock_object + ] = mock_rpc request = {} await client.cancel_automation_run(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.cancel_automation_run(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-developerconnect/google/cloud/developerconnect/gapic_version.py b/packages/google-cloud-developerconnect/google/cloud/developerconnect/gapic_version.py index 3b0a9d9a8d43..558c8aab67c5 100644 --- a/packages/google-cloud-developerconnect/google/cloud/developerconnect/gapic_version.py +++ b/packages/google-cloud-developerconnect/google/cloud/developerconnect/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/gapic_version.py b/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/gapic_version.py index 3b0a9d9a8d43..558c8aab67c5 100644 --- a/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/gapic_version.py +++ b/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/services/developer_connect/async_client.py b/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/services/developer_connect/async_client.py index 0207eada8222..c5aefdb6f411 100644 --- a/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/services/developer_connect/async_client.py +++ b/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/services/developer_connect/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -205,9 +204,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DeveloperConnectClient).get_transport_class, type(DeveloperConnectClient) - ) + get_transport_class = DeveloperConnectClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/services/developer_connect/client.py b/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/services/developer_connect/client.py index 79f9d66110c8..fec62e124d0b 100644 --- a/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/services/developer_connect/client.py +++ b/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/services/developer_connect/client.py @@ -719,7 +719,7 @@ def __init__( Type[DeveloperConnectTransport], Callable[..., DeveloperConnectTransport], ] = ( - type(self).get_transport_class(transport) + DeveloperConnectClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DeveloperConnectTransport], transport) ) diff --git a/packages/google-cloud-developerconnect/samples/generated_samples/snippet_metadata_google.cloud.developerconnect.v1.json b/packages/google-cloud-developerconnect/samples/generated_samples/snippet_metadata_google.cloud.developerconnect.v1.json index c6db9b18fbf0..6d8efa20b69c 100644 --- a/packages/google-cloud-developerconnect/samples/generated_samples/snippet_metadata_google.cloud.developerconnect.v1.json +++ b/packages/google-cloud-developerconnect/samples/generated_samples/snippet_metadata_google.cloud.developerconnect.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-developerconnect", - "version": "0.1.2" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-developerconnect/tests/unit/gapic/developerconnect_v1/test_developer_connect.py b/packages/google-cloud-developerconnect/tests/unit/gapic/developerconnect_v1/test_developer_connect.py index 8a5240d9f1d3..9b1557a02d43 100644 --- a/packages/google-cloud-developerconnect/tests/unit/gapic/developerconnect_v1/test_developer_connect.py +++ b/packages/google-cloud-developerconnect/tests/unit/gapic/developerconnect_v1/test_developer_connect.py @@ -1336,22 +1336,23 @@ async def test_list_connections_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_connections - ] = mock_object + ] = mock_rpc request = {} await client.list_connections(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_connections(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1909,22 +1910,23 @@ async def test_get_connection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_connection - ] = mock_object + ] = mock_rpc request = {} await client.get_connection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_connection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2235,8 +2237,9 @@ def test_create_connection_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_connection(request) @@ -2292,26 +2295,28 @@ async def test_create_connection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_connection - ] = mock_object + ] = mock_rpc request = {} await client.create_connection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_connection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2660,8 +2665,9 @@ def test_update_connection_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_connection(request) @@ -2717,26 +2723,28 @@ async def test_update_connection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_connection - ] = mock_object + ] = mock_rpc request = {} await client.update_connection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_connection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3081,8 +3089,9 @@ def test_delete_connection_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_connection(request) @@ -3138,26 +3147,28 @@ async def test_delete_connection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_connection - ] = mock_object + ] = mock_rpc request = {} await client.delete_connection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_connection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3471,8 +3482,9 @@ def test_create_git_repository_link_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_git_repository_link(request) @@ -3528,26 +3540,28 @@ async def test_create_git_repository_link_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_git_repository_link - ] = mock_object + ] = mock_rpc request = {} await client.create_git_repository_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_git_repository_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3881,8 +3895,9 @@ def test_delete_git_repository_link_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_git_repository_link(request) @@ -3938,26 +3953,28 @@ async def test_delete_git_repository_link_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_git_repository_link - ] = mock_object + ] = mock_rpc request = {} await client.delete_git_repository_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_git_repository_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4336,22 +4353,23 @@ async def test_list_git_repository_links_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_git_repository_links - ] = mock_object + ] = mock_rpc request = {} await client.list_git_repository_links(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_git_repository_links(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4944,22 +4962,23 @@ async def test_get_git_repository_link_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_git_repository_link - ] = mock_object + ] = mock_rpc request = {} await client.get_git_repository_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_git_repository_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5343,22 +5362,23 @@ async def test_fetch_read_write_token_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.fetch_read_write_token - ] = mock_object + ] = mock_rpc request = {} await client.fetch_read_write_token(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.fetch_read_write_token(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5725,22 +5745,23 @@ async def test_fetch_read_token_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.fetch_read_token - ] = mock_object + ] = mock_rpc request = {} await client.fetch_read_token(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.fetch_read_token(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6107,22 +6128,23 @@ async def test_fetch_linkable_git_repositories_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.fetch_linkable_git_repositories - ] = mock_object + ] = mock_rpc request = {} await client.fetch_linkable_git_repositories(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.fetch_linkable_git_repositories(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6698,22 +6720,23 @@ async def test_fetch_git_hub_installations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.fetch_git_hub_installations - ] = mock_object + ] = mock_rpc request = {} await client.fetch_git_hub_installations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.fetch_git_hub_installations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7075,22 +7098,23 @@ async def test_fetch_git_refs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.fetch_git_refs - ] = mock_object + ] = mock_rpc request = {} await client.fetch_git_refs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.fetch_git_refs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow/gapic_version.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow/gapic_version.py index 381e786a17cc..558c8aab67c5 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow/gapic_version.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.31.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/gapic_version.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/gapic_version.py index 381e786a17cc..558c8aab67c5 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/gapic_version.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.31.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/agents/async_client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/agents/async_client.py index dbb350dffc92..33c3bea5ab9c 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/agents/async_client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/agents/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -186,9 +185,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AgentsClient).get_transport_class, type(AgentsClient) - ) + get_transport_class = AgentsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/agents/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/agents/client.py index 1ec96a5a0213..4ac36a2701e7 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/agents/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/agents/client.py @@ -657,7 +657,7 @@ def __init__( transport_init: Union[ Type[AgentsTransport], Callable[..., AgentsTransport] ] = ( - type(self).get_transport_class(transport) + AgentsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AgentsTransport], transport) ) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/answer_records/async_client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/answer_records/async_client.py index 5f040c3a9a51..9ae205479eea 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/answer_records/async_client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/answer_records/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -200,9 +199,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AnswerRecordsClient).get_transport_class, type(AnswerRecordsClient) - ) + get_transport_class = AnswerRecordsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/answer_records/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/answer_records/client.py index 2178ad86a980..0e044e1f9841 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/answer_records/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/answer_records/client.py @@ -701,7 +701,7 @@ def __init__( transport_init: Union[ Type[AnswerRecordsTransport], Callable[..., AnswerRecordsTransport] ] = ( - type(self).get_transport_class(transport) + AnswerRecordsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AnswerRecordsTransport], transport) ) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/contexts/async_client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/contexts/async_client.py index 3808aeb30bae..50cd3a4370f4 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/contexts/async_client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/contexts/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -185,9 +184,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ContextsClient).get_transport_class, type(ContextsClient) - ) + get_transport_class = ContextsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/contexts/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/contexts/client.py index 6c2fcd892984..f10af0fbdab1 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/contexts/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/contexts/client.py @@ -661,7 +661,7 @@ def __init__( transport_init: Union[ Type[ContextsTransport], Callable[..., ContextsTransport] ] = ( - type(self).get_transport_class(transport) + ContextsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ContextsTransport], transport) ) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_datasets/async_client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_datasets/async_client.py index cf2dba2a38b2..b90d22fbe8f7 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_datasets/async_client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_datasets/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -205,10 +204,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ConversationDatasetsClient).get_transport_class, - type(ConversationDatasetsClient), - ) + get_transport_class = ConversationDatasetsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_datasets/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_datasets/client.py index edabef8eb91d..1d0dd0fd7630 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_datasets/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_datasets/client.py @@ -679,7 +679,7 @@ def __init__( Type[ConversationDatasetsTransport], Callable[..., ConversationDatasetsTransport], ] = ( - type(self).get_transport_class(transport) + ConversationDatasetsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ConversationDatasetsTransport], transport) ) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_models/async_client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_models/async_client.py index 61a981270114..0cdefdaa996b 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_models/async_client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_models/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -215,10 +214,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ConversationModelsClient).get_transport_class, - type(ConversationModelsClient), - ) + get_transport_class = ConversationModelsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_models/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_models/client.py index a98c1de5efac..c0db5a9decd2 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_models/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_models/client.py @@ -741,7 +741,7 @@ def __init__( Type[ConversationModelsTransport], Callable[..., ConversationModelsTransport], ] = ( - type(self).get_transport_class(transport) + ConversationModelsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ConversationModelsTransport], transport) ) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_profiles/async_client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_profiles/async_client.py index ff80e255289d..ce4a623eb982 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_profiles/async_client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_profiles/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -227,10 +226,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ConversationProfilesClient).get_transport_class, - type(ConversationProfilesClient), - ) + get_transport_class = ConversationProfilesClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_profiles/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_profiles/client.py index 230e9ef476fb..3479c509235c 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_profiles/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_profiles/client.py @@ -799,7 +799,7 @@ def __init__( Type[ConversationProfilesTransport], Callable[..., ConversationProfilesTransport], ] = ( - type(self).get_transport_class(transport) + ConversationProfilesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ConversationProfilesTransport], transport) ) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversations/async_client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversations/async_client.py index 4e06a4fbc5c9..7c2664bd6cb2 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversations/async_client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversations/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -227,9 +226,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ConversationsClient).get_transport_class, type(ConversationsClient) - ) + get_transport_class = ConversationsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversations/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversations/client.py index 8848253ebc91..2563af797f05 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversations/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversations/client.py @@ -848,7 +848,7 @@ def __init__( transport_init: Union[ Type[ConversationsTransport], Callable[..., ConversationsTransport] ] = ( - type(self).get_transport_class(transport) + ConversationsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ConversationsTransport], transport) ) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/documents/async_client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/documents/async_client.py index 91b6998e1ff9..88b4715db904 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/documents/async_client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/documents/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -191,9 +190,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DocumentsClient).get_transport_class, type(DocumentsClient) - ) + get_transport_class = DocumentsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/documents/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/documents/client.py index 259398041362..7a181338d959 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/documents/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/documents/client.py @@ -665,7 +665,7 @@ def __init__( transport_init: Union[ Type[DocumentsTransport], Callable[..., DocumentsTransport] ] = ( - type(self).get_transport_class(transport) + DocumentsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DocumentsTransport], transport) ) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/encryption_spec_service/async_client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/encryption_spec_service/async_client.py index 8bc5a3493d2b..2d002ee347bb 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/encryption_spec_service/async_client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/encryption_spec_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -200,10 +199,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(EncryptionSpecServiceClient).get_transport_class, - type(EncryptionSpecServiceClient), - ) + get_transport_class = EncryptionSpecServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/encryption_spec_service/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/encryption_spec_service/client.py index 2bff850f197d..7c7b92bbedff 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/encryption_spec_service/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/encryption_spec_service/client.py @@ -672,7 +672,7 @@ def __init__( Type[EncryptionSpecServiceTransport], Callable[..., EncryptionSpecServiceTransport], ] = ( - type(self).get_transport_class(transport) + EncryptionSpecServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., EncryptionSpecServiceTransport], transport) ) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/entity_types/async_client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/entity_types/async_client.py index 2dcaa9232c05..0249229a924b 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/entity_types/async_client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/entity_types/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -193,9 +192,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(EntityTypesClient).get_transport_class, type(EntityTypesClient) - ) + get_transport_class = EntityTypesClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/entity_types/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/entity_types/client.py index a80e9efa0ac7..1fd655665170 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/entity_types/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/entity_types/client.py @@ -662,7 +662,7 @@ def __init__( transport_init: Union[ Type[EntityTypesTransport], Callable[..., EntityTypesTransport] ] = ( - type(self).get_transport_class(transport) + EntityTypesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., EntityTypesTransport], transport) ) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/environments/async_client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/environments/async_client.py index 90a381dab01c..f19398ab8620 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/environments/async_client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/environments/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -193,9 +192,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(EnvironmentsClient).get_transport_class, type(EnvironmentsClient) - ) + get_transport_class = EnvironmentsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/environments/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/environments/client.py index aab2e06c4e0a..9d5dcfd12f75 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/environments/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/environments/client.py @@ -692,7 +692,7 @@ def __init__( transport_init: Union[ Type[EnvironmentsTransport], Callable[..., EnvironmentsTransport] ] = ( - type(self).get_transport_class(transport) + EnvironmentsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., EnvironmentsTransport], transport) ) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/fulfillments/async_client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/fulfillments/async_client.py index 0872c7f3e002..9ab1513a723a 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/fulfillments/async_client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/fulfillments/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -189,9 +188,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(FulfillmentsClient).get_transport_class, type(FulfillmentsClient) - ) + get_transport_class = FulfillmentsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/fulfillments/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/fulfillments/client.py index 74fecf9d7be1..40140305aed2 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/fulfillments/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/fulfillments/client.py @@ -654,7 +654,7 @@ def __init__( transport_init: Union[ Type[FulfillmentsTransport], Callable[..., FulfillmentsTransport] ] = ( - type(self).get_transport_class(transport) + FulfillmentsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., FulfillmentsTransport], transport) ) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/generators/async_client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/generators/async_client.py index 93e8fe316460..9172b31f76a0 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/generators/async_client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/generators/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -192,9 +191,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(GeneratorsClient).get_transport_class, type(GeneratorsClient) - ) + get_transport_class = GeneratorsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/generators/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/generators/client.py index 2db042441c1c..6666e073c531 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/generators/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/generators/client.py @@ -666,7 +666,7 @@ def __init__( transport_init: Union[ Type[GeneratorsTransport], Callable[..., GeneratorsTransport] ] = ( - type(self).get_transport_class(transport) + GeneratorsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., GeneratorsTransport], transport) ) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/intents/async_client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/intents/async_client.py index c1197f3ebd79..0cb240fdea48 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/intents/async_client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/intents/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -191,9 +190,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(IntentsClient).get_transport_class, type(IntentsClient) - ) + get_transport_class = IntentsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/intents/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/intents/client.py index 4ec746c13023..98b188b368c3 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/intents/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/intents/client.py @@ -682,7 +682,7 @@ def __init__( transport_init: Union[ Type[IntentsTransport], Callable[..., IntentsTransport] ] = ( - type(self).get_transport_class(transport) + IntentsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., IntentsTransport], transport) ) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/knowledge_bases/async_client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/knowledge_bases/async_client.py index eccfe1888120..8bac3d75fd4f 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/knowledge_bases/async_client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/knowledge_bases/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -196,9 +195,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(KnowledgeBasesClient).get_transport_class, type(KnowledgeBasesClient) - ) + get_transport_class = KnowledgeBasesClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/knowledge_bases/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/knowledge_bases/client.py index d9f16be09261..27674293784e 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/knowledge_bases/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/knowledge_bases/client.py @@ -664,7 +664,7 @@ def __init__( transport_init: Union[ Type[KnowledgeBasesTransport], Callable[..., KnowledgeBasesTransport] ] = ( - type(self).get_transport_class(transport) + KnowledgeBasesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., KnowledgeBasesTransport], transport) ) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/participants/async_client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/participants/async_client.py index 322f1f01d726..43f7da028296 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/participants/async_client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/participants/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( AsyncIterable, @@ -206,9 +205,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ParticipantsClient).get_transport_class, type(ParticipantsClient) - ) + get_transport_class = ParticipantsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/participants/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/participants/client.py index dde7db218c4a..dcd520b9ac4e 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/participants/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/participants/client.py @@ -769,7 +769,7 @@ def __init__( transport_init: Union[ Type[ParticipantsTransport], Callable[..., ParticipantsTransport] ] = ( - type(self).get_transport_class(transport) + ParticipantsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ParticipantsTransport], transport) ) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/session_entity_types/async_client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/session_entity_types/async_client.py index 15551ec9b1ed..b72dde41b65b 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/session_entity_types/async_client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/session_entity_types/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -201,10 +200,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(SessionEntityTypesClient).get_transport_class, - type(SessionEntityTypesClient), - ) + get_transport_class = SessionEntityTypesClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/session_entity_types/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/session_entity_types/client.py index 132caf0c1292..64e359566314 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/session_entity_types/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/session_entity_types/client.py @@ -675,7 +675,7 @@ def __init__( Type[SessionEntityTypesTransport], Callable[..., SessionEntityTypesTransport], ] = ( - type(self).get_transport_class(transport) + SessionEntityTypesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SessionEntityTypesTransport], transport) ) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/sessions/async_client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/sessions/async_client.py index e5ac2533ab65..fd60fadf2543 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/sessions/async_client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/sessions/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( AsyncIterable, @@ -199,9 +198,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(SessionsClient).get_transport_class, type(SessionsClient) - ) + get_transport_class = SessionsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/sessions/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/sessions/client.py index 42964b46f6c4..ad4dec9d8b41 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/sessions/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/sessions/client.py @@ -724,7 +724,7 @@ def __init__( transport_init: Union[ Type[SessionsTransport], Callable[..., SessionsTransport] ] = ( - type(self).get_transport_class(transport) + SessionsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SessionsTransport], transport) ) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/versions/async_client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/versions/async_client.py index e7c796a7ffa4..d824a3ae4066 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/versions/async_client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/versions/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -185,9 +184,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(VersionsClient).get_transport_class, type(VersionsClient) - ) + get_transport_class = VersionsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/versions/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/versions/client.py index a2f67523d6be..4b9e18554034 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/versions/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/versions/client.py @@ -658,7 +658,7 @@ def __init__( transport_init: Union[ Type[VersionsTransport], Callable[..., VersionsTransport] ] = ( - type(self).get_transport_class(transport) + VersionsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., VersionsTransport], transport) ) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/gapic_version.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/gapic_version.py index 381e786a17cc..558c8aab67c5 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/gapic_version.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.31.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/agents/async_client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/agents/async_client.py index d55f16f0f54a..d82c9800c4a1 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/agents/async_client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/agents/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -188,9 +187,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AgentsClient).get_transport_class, type(AgentsClient) - ) + get_transport_class = AgentsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/agents/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/agents/client.py index d8a9807c8028..9ffafc265da8 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/agents/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/agents/client.py @@ -659,7 +659,7 @@ def __init__( transport_init: Union[ Type[AgentsTransport], Callable[..., AgentsTransport] ] = ( - type(self).get_transport_class(transport) + AgentsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AgentsTransport], transport) ) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/answer_records/async_client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/answer_records/async_client.py index c5c2c35f8c92..ad915809ff39 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/answer_records/async_client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/answer_records/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -203,9 +202,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AnswerRecordsClient).get_transport_class, type(AnswerRecordsClient) - ) + get_transport_class = AnswerRecordsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/answer_records/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/answer_records/client.py index 7557b272e959..dc31fd879da9 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/answer_records/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/answer_records/client.py @@ -723,7 +723,7 @@ def __init__( transport_init: Union[ Type[AnswerRecordsTransport], Callable[..., AnswerRecordsTransport] ] = ( - type(self).get_transport_class(transport) + AnswerRecordsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AnswerRecordsTransport], transport) ) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/contexts/async_client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/contexts/async_client.py index 237a34c8ae78..fbb21e77d5f4 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/contexts/async_client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/contexts/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -187,9 +186,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ContextsClient).get_transport_class, type(ContextsClient) - ) + get_transport_class = ContextsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/contexts/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/contexts/client.py index 74e54e09968e..f268cfb02053 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/contexts/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/contexts/client.py @@ -663,7 +663,7 @@ def __init__( transport_init: Union[ Type[ContextsTransport], Callable[..., ContextsTransport] ] = ( - type(self).get_transport_class(transport) + ContextsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ContextsTransport], transport) ) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/conversation_profiles/async_client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/conversation_profiles/async_client.py index 292e3de13474..5876b7c20a69 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/conversation_profiles/async_client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/conversation_profiles/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -227,10 +226,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ConversationProfilesClient).get_transport_class, - type(ConversationProfilesClient), - ) + get_transport_class = ConversationProfilesClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/conversation_profiles/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/conversation_profiles/client.py index 4b5890723da3..45d0d87e9a2e 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/conversation_profiles/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/conversation_profiles/client.py @@ -799,7 +799,7 @@ def __init__( Type[ConversationProfilesTransport], Callable[..., ConversationProfilesTransport], ] = ( - type(self).get_transport_class(transport) + ConversationProfilesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ConversationProfilesTransport], transport) ) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/conversations/async_client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/conversations/async_client.py index 0e533b89d029..196bffbb80c9 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/conversations/async_client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/conversations/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -227,9 +226,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ConversationsClient).get_transport_class, type(ConversationsClient) - ) + get_transport_class = ConversationsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/conversations/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/conversations/client.py index 93bcab01f2ed..e42df6512a61 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/conversations/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/conversations/client.py @@ -848,7 +848,7 @@ def __init__( transport_init: Union[ Type[ConversationsTransport], Callable[..., ConversationsTransport] ] = ( - type(self).get_transport_class(transport) + ConversationsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ConversationsTransport], transport) ) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/documents/async_client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/documents/async_client.py index 03c6cf0604f4..bb6bd0f7c62e 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/documents/async_client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/documents/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -192,9 +191,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DocumentsClient).get_transport_class, type(DocumentsClient) - ) + get_transport_class = DocumentsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/documents/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/documents/client.py index 906964d86d20..d35946aa5f26 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/documents/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/documents/client.py @@ -666,7 +666,7 @@ def __init__( transport_init: Union[ Type[DocumentsTransport], Callable[..., DocumentsTransport] ] = ( - type(self).get_transport_class(transport) + DocumentsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DocumentsTransport], transport) ) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/encryption_spec_service/async_client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/encryption_spec_service/async_client.py index 756ff282279a..c99e44a544b6 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/encryption_spec_service/async_client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/encryption_spec_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -200,10 +199,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(EncryptionSpecServiceClient).get_transport_class, - type(EncryptionSpecServiceClient), - ) + get_transport_class = EncryptionSpecServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/encryption_spec_service/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/encryption_spec_service/client.py index 976e51e05538..b84dd3f7542b 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/encryption_spec_service/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/encryption_spec_service/client.py @@ -672,7 +672,7 @@ def __init__( Type[EncryptionSpecServiceTransport], Callable[..., EncryptionSpecServiceTransport], ] = ( - type(self).get_transport_class(transport) + EncryptionSpecServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., EncryptionSpecServiceTransport], transport) ) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/entity_types/async_client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/entity_types/async_client.py index 0c98ac551632..277817ddb451 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/entity_types/async_client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/entity_types/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -194,9 +193,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(EntityTypesClient).get_transport_class, type(EntityTypesClient) - ) + get_transport_class = EntityTypesClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/entity_types/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/entity_types/client.py index 59b51a40ce35..412c1e9b579e 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/entity_types/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/entity_types/client.py @@ -663,7 +663,7 @@ def __init__( transport_init: Union[ Type[EntityTypesTransport], Callable[..., EntityTypesTransport] ] = ( - type(self).get_transport_class(transport) + EntityTypesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., EntityTypesTransport], transport) ) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/environments/async_client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/environments/async_client.py index f8b44eadc48e..9a6dd14ea116 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/environments/async_client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/environments/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -193,9 +192,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(EnvironmentsClient).get_transport_class, type(EnvironmentsClient) - ) + get_transport_class = EnvironmentsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/environments/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/environments/client.py index bf43541baa2a..7595679ac339 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/environments/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/environments/client.py @@ -692,7 +692,7 @@ def __init__( transport_init: Union[ Type[EnvironmentsTransport], Callable[..., EnvironmentsTransport] ] = ( - type(self).get_transport_class(transport) + EnvironmentsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., EnvironmentsTransport], transport) ) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/fulfillments/async_client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/fulfillments/async_client.py index 87b287df7de6..c8e784951dc1 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/fulfillments/async_client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/fulfillments/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -189,9 +188,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(FulfillmentsClient).get_transport_class, type(FulfillmentsClient) - ) + get_transport_class = FulfillmentsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/fulfillments/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/fulfillments/client.py index 75f09cda6f9d..070372217861 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/fulfillments/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/fulfillments/client.py @@ -654,7 +654,7 @@ def __init__( transport_init: Union[ Type[FulfillmentsTransport], Callable[..., FulfillmentsTransport] ] = ( - type(self).get_transport_class(transport) + FulfillmentsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., FulfillmentsTransport], transport) ) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/generators/async_client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/generators/async_client.py index 937df96f69a4..37a2d1a30098 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/generators/async_client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/generators/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -192,9 +191,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(GeneratorsClient).get_transport_class, type(GeneratorsClient) - ) + get_transport_class = GeneratorsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/generators/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/generators/client.py index a1d8bf46095d..be484ebaf96b 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/generators/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/generators/client.py @@ -666,7 +666,7 @@ def __init__( transport_init: Union[ Type[GeneratorsTransport], Callable[..., GeneratorsTransport] ] = ( - type(self).get_transport_class(transport) + GeneratorsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., GeneratorsTransport], transport) ) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/intents/async_client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/intents/async_client.py index 9165652f2262..48854d736887 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/intents/async_client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/intents/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -193,9 +192,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(IntentsClient).get_transport_class, type(IntentsClient) - ) + get_transport_class = IntentsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/intents/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/intents/client.py index 2652e47e0163..7fe4ff52e83e 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/intents/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/intents/client.py @@ -684,7 +684,7 @@ def __init__( transport_init: Union[ Type[IntentsTransport], Callable[..., IntentsTransport] ] = ( - type(self).get_transport_class(transport) + IntentsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., IntentsTransport], transport) ) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/knowledge_bases/async_client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/knowledge_bases/async_client.py index d7a17b8760a7..f93742c2890f 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/knowledge_bases/async_client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/knowledge_bases/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -196,9 +195,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(KnowledgeBasesClient).get_transport_class, type(KnowledgeBasesClient) - ) + get_transport_class = KnowledgeBasesClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/knowledge_bases/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/knowledge_bases/client.py index 89965cce63b6..17177350b6d5 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/knowledge_bases/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/knowledge_bases/client.py @@ -664,7 +664,7 @@ def __init__( transport_init: Union[ Type[KnowledgeBasesTransport], Callable[..., KnowledgeBasesTransport] ] = ( - type(self).get_transport_class(transport) + KnowledgeBasesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., KnowledgeBasesTransport], transport) ) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/participants/async_client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/participants/async_client.py index 3191c0cca6cd..8da68c473a10 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/participants/async_client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/participants/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( AsyncIterable, @@ -209,9 +208,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ParticipantsClient).get_transport_class, type(ParticipantsClient) - ) + get_transport_class = ParticipantsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/participants/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/participants/client.py index ad5439c06211..b708febe7fea 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/participants/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/participants/client.py @@ -791,7 +791,7 @@ def __init__( transport_init: Union[ Type[ParticipantsTransport], Callable[..., ParticipantsTransport] ] = ( - type(self).get_transport_class(transport) + ParticipantsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ParticipantsTransport], transport) ) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/session_entity_types/async_client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/session_entity_types/async_client.py index fdcc49e7a5ae..80bffdb075b7 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/session_entity_types/async_client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/session_entity_types/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -201,10 +200,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(SessionEntityTypesClient).get_transport_class, - type(SessionEntityTypesClient), - ) + get_transport_class = SessionEntityTypesClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/session_entity_types/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/session_entity_types/client.py index 66ce4194ec24..f21bf2f9ae54 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/session_entity_types/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/session_entity_types/client.py @@ -675,7 +675,7 @@ def __init__( Type[SessionEntityTypesTransport], Callable[..., SessionEntityTypesTransport], ] = ( - type(self).get_transport_class(transport) + SessionEntityTypesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SessionEntityTypesTransport], transport) ) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/sessions/async_client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/sessions/async_client.py index 913fc411ac4f..d79f7ef375f4 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/sessions/async_client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/sessions/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( AsyncIterable, @@ -201,9 +200,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(SessionsClient).get_transport_class, type(SessionsClient) - ) + get_transport_class = SessionsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/sessions/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/sessions/client.py index d83d5687db20..9c00ec438a0d 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/sessions/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/sessions/client.py @@ -746,7 +746,7 @@ def __init__( transport_init: Union[ Type[SessionsTransport], Callable[..., SessionsTransport] ] = ( - type(self).get_transport_class(transport) + SessionsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SessionsTransport], transport) ) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/versions/async_client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/versions/async_client.py index 34dcc5d2f732..ac3cc8e82b42 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/versions/async_client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/versions/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -187,9 +186,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(VersionsClient).get_transport_class, type(VersionsClient) - ) + get_transport_class = VersionsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/versions/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/versions/client.py index 1fc81063fba4..389696069683 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/versions/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/versions/client.py @@ -660,7 +660,7 @@ def __init__( transport_init: Union[ Type[VersionsTransport], Callable[..., VersionsTransport] ] = ( - type(self).get_transport_class(transport) + VersionsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., VersionsTransport], transport) ) diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.v2.json b/packages/google-cloud-dialogflow/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.v2.json index a8227254967f..dde14d384e60 100644 --- a/packages/google-cloud-dialogflow/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.v2.json +++ b/packages/google-cloud-dialogflow/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dialogflow", - "version": "2.31.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.v2beta1.json b/packages/google-cloud-dialogflow/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.v2beta1.json index 5a3b5e91850d..58a96bc185e8 100644 --- a/packages/google-cloud-dialogflow/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.v2beta1.json +++ b/packages/google-cloud-dialogflow/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.v2beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dialogflow", - "version": "2.31.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_agents.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_agents.py index 5ec802b15c96..ff15e39beb3b 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_agents.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_agents.py @@ -1265,22 +1265,23 @@ async def test_get_agent_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_agent - ] = mock_object + ] = mock_rpc request = {} await client.get_agent(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_agent(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1678,22 +1679,23 @@ async def test_set_agent_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_agent - ] = mock_object + ] = mock_rpc request = {} await client.set_agent(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_agent(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2055,22 +2057,23 @@ async def test_delete_agent_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_agent - ] = mock_object + ] = mock_rpc request = {} await client.delete_agent(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_agent(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2412,22 +2415,23 @@ async def test_search_agents_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search_agents - ] = mock_object + ] = mock_rpc request = {} await client.search_agents(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_agents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2914,8 +2918,9 @@ def test_train_agent_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.train_agent(request) @@ -2969,26 +2974,28 @@ async def test_train_agent_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.train_agent - ] = mock_object + ] = mock_rpc request = {} await client.train_agent(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.train_agent(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3280,8 +3287,9 @@ def test_export_agent_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.export_agent(request) @@ -3335,26 +3343,28 @@ async def test_export_agent_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.export_agent - ] = mock_object + ] = mock_rpc request = {} await client.export_agent(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.export_agent(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3646,8 +3656,9 @@ def test_import_agent_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.import_agent(request) @@ -3701,26 +3712,28 @@ async def test_import_agent_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_agent - ] = mock_object + ] = mock_rpc request = {} await client.import_agent(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.import_agent(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3930,8 +3943,9 @@ def test_restore_agent_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.restore_agent(request) @@ -3985,26 +3999,28 @@ async def test_restore_agent_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.restore_agent - ] = mock_object + ] = mock_rpc request = {} await client.restore_agent(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.restore_agent(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4278,22 +4294,23 @@ async def test_get_validation_result_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_validation_result - ] = mock_object + ] = mock_rpc request = {} await client.get_validation_result(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_validation_result(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_answer_records.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_answer_records.py index 7673cc5928ac..2299b641c386 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_answer_records.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_answer_records.py @@ -1303,22 +1303,23 @@ async def test_list_answer_records_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_answer_records - ] = mock_object + ] = mock_rpc request = {} await client.list_answer_records(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_answer_records(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1887,22 +1888,23 @@ async def test_update_answer_record_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_answer_record - ] = mock_object + ] = mock_rpc request = {} await client.update_answer_record(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_answer_record(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_contexts.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_contexts.py index bdb47ebbc7a3..112e1ad6b343 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_contexts.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_contexts.py @@ -1235,22 +1235,23 @@ async def test_list_contexts_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_contexts - ] = mock_object + ] = mock_rpc request = {} await client.list_contexts(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_contexts(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1796,22 +1797,23 @@ async def test_get_context_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_context - ] = mock_object + ] = mock_rpc request = {} await client.get_context(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_context(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2161,22 +2163,23 @@ async def test_create_context_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_context - ] = mock_object + ] = mock_rpc request = {} await client.create_context(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_context(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2532,22 +2535,23 @@ async def test_update_context_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_context - ] = mock_object + ] = mock_rpc request = {} await client.update_context(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_context(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2897,22 +2901,23 @@ async def test_delete_context_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_context - ] = mock_object + ] = mock_rpc request = {} await client.delete_context(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_context(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3257,22 +3262,23 @@ async def test_delete_all_contexts_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_all_contexts - ] = mock_object + ] = mock_rpc request = {} await client.delete_all_contexts(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_all_contexts(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversation_datasets.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversation_datasets.py index 0b157178cbbe..e5c420409ba5 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversation_datasets.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversation_datasets.py @@ -1324,8 +1324,9 @@ def test_create_conversation_dataset_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_conversation_dataset(request) @@ -1381,26 +1382,28 @@ async def test_create_conversation_dataset_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_conversation_dataset - ] = mock_object + ] = mock_rpc request = {} await client.create_conversation_dataset(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_conversation_dataset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1797,22 +1800,23 @@ async def test_get_conversation_dataset_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_conversation_dataset - ] = mock_object + ] = mock_rpc request = {} await client.get_conversation_dataset(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_conversation_dataset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2193,22 +2197,23 @@ async def test_list_conversation_datasets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_conversation_datasets - ] = mock_object + ] = mock_rpc request = {} await client.list_conversation_datasets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_conversation_datasets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2731,8 +2736,9 @@ def test_delete_conversation_dataset_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_conversation_dataset(request) @@ -2788,26 +2794,28 @@ async def test_delete_conversation_dataset_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_conversation_dataset - ] = mock_object + ] = mock_rpc request = {} await client.delete_conversation_dataset(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_conversation_dataset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3119,8 +3127,9 @@ def test_import_conversation_data_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.import_conversation_data(request) @@ -3176,26 +3185,28 @@ async def test_import_conversation_data_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_conversation_data - ] = mock_object + ] = mock_rpc request = {} await client.import_conversation_data(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.import_conversation_data(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversation_models.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversation_models.py index 82c346d47a7a..cd7615c81d65 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversation_models.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversation_models.py @@ -1297,8 +1297,9 @@ def test_create_conversation_model_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_conversation_model(request) @@ -1354,26 +1355,28 @@ async def test_create_conversation_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_conversation_model - ] = mock_object + ] = mock_rpc request = {} await client.create_conversation_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_conversation_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1770,22 +1773,23 @@ async def test_get_conversation_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_conversation_model - ] = mock_object + ] = mock_rpc request = {} await client.get_conversation_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_conversation_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2166,22 +2170,23 @@ async def test_list_conversation_models_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_conversation_models - ] = mock_object + ] = mock_rpc request = {} await client.list_conversation_models(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_conversation_models(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2702,8 +2707,9 @@ def test_delete_conversation_model_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_conversation_model(request) @@ -2759,26 +2765,28 @@ async def test_delete_conversation_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_conversation_model - ] = mock_object + ] = mock_rpc request = {} await client.delete_conversation_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_conversation_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3090,8 +3098,9 @@ def test_deploy_conversation_model_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.deploy_conversation_model(request) @@ -3147,26 +3156,28 @@ async def test_deploy_conversation_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.deploy_conversation_model - ] = mock_object + ] = mock_rpc request = {} await client.deploy_conversation_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.deploy_conversation_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3392,8 +3403,9 @@ def test_undeploy_conversation_model_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.undeploy_conversation_model(request) @@ -3449,26 +3461,28 @@ async def test_undeploy_conversation_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.undeploy_conversation_model - ] = mock_object + ] = mock_rpc request = {} await client.undeploy_conversation_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.undeploy_conversation_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3758,22 +3772,23 @@ async def test_get_conversation_model_evaluation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_conversation_model_evaluation - ] = mock_object + ] = mock_rpc request = {} await client.get_conversation_model_evaluation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_conversation_model_evaluation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4152,22 +4167,23 @@ async def test_list_conversation_model_evaluations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_conversation_model_evaluations - ] = mock_object + ] = mock_rpc request = {} await client.list_conversation_model_evaluations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_conversation_model_evaluations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4698,8 +4714,9 @@ def test_create_conversation_model_evaluation_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_conversation_model_evaluation(request) @@ -4755,26 +4772,28 @@ async def test_create_conversation_model_evaluation_async_use_cached_wrapped_rpc ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_conversation_model_evaluation - ] = mock_object + ] = mock_rpc request = {} await client.create_conversation_model_evaluation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_conversation_model_evaluation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversation_profiles.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversation_profiles.py index 1809ddb9b9eb..73647d1e5e2a 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversation_profiles.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversation_profiles.py @@ -1386,22 +1386,23 @@ async def test_list_conversation_profiles_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_conversation_profiles - ] = mock_object + ] = mock_rpc request = {} await client.list_conversation_profiles(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_conversation_profiles(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1994,22 +1995,23 @@ async def test_get_conversation_profile_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_conversation_profile - ] = mock_object + ] = mock_rpc request = {} await client.get_conversation_profile(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_conversation_profile(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2402,22 +2404,23 @@ async def test_create_conversation_profile_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_conversation_profile - ] = mock_object + ] = mock_rpc request = {} await client.create_conversation_profile(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_conversation_profile(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2824,22 +2827,23 @@ async def test_update_conversation_profile_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_conversation_profile - ] = mock_object + ] = mock_rpc request = {} await client.update_conversation_profile(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_conversation_profile(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3231,22 +3235,23 @@ async def test_delete_conversation_profile_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_conversation_profile - ] = mock_object + ] = mock_rpc request = {} await client.delete_conversation_profile(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_conversation_profile(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3552,8 +3557,9 @@ def test_set_suggestion_feature_config_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.set_suggestion_feature_config(request) @@ -3609,26 +3615,28 @@ async def test_set_suggestion_feature_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_suggestion_feature_config - ] = mock_object + ] = mock_rpc request = {} await client.set_suggestion_feature_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.set_suggestion_feature_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3984,8 +3992,9 @@ def test_clear_suggestion_feature_config_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.clear_suggestion_feature_config(request) @@ -4041,26 +4050,28 @@ async def test_clear_suggestion_feature_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.clear_suggestion_feature_config - ] = mock_object + ] = mock_rpc request = {} await client.clear_suggestion_feature_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.clear_suggestion_feature_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversations.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversations.py index eaf18da8127b..54a9c5857d22 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversations.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversations.py @@ -1322,22 +1322,23 @@ async def test_create_conversation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_conversation - ] = mock_object + ] = mock_rpc request = {} await client.create_conversation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_conversation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1735,22 +1736,23 @@ async def test_list_conversations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_conversations - ] = mock_object + ] = mock_rpc request = {} await client.list_conversations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_conversations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2327,22 +2329,23 @@ async def test_get_conversation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_conversation - ] = mock_object + ] = mock_rpc request = {} await client.get_conversation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_conversation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2729,22 +2732,23 @@ async def test_complete_conversation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.complete_conversation - ] = mock_object + ] = mock_rpc request = {} await client.complete_conversation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.complete_conversation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3119,22 +3123,23 @@ async def test_list_messages_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_messages - ] = mock_object + ] = mock_rpc request = {} await client.list_messages(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_messages(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3695,22 +3700,23 @@ async def test_suggest_conversation_summary_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.suggest_conversation_summary - ] = mock_object + ] = mock_rpc request = {} await client.suggest_conversation_summary(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.suggest_conversation_summary(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4088,22 +4094,23 @@ async def test_generate_stateless_summary_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.generate_stateless_summary - ] = mock_object + ] = mock_rpc request = {} await client.generate_stateless_summary(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.generate_stateless_summary(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4389,22 +4396,23 @@ async def test_generate_stateless_suggestion_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.generate_stateless_suggestion - ] = mock_object + ] = mock_rpc request = {} await client.generate_stateless_suggestion(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.generate_stateless_suggestion(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4685,22 +4693,23 @@ async def test_search_knowledge_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search_knowledge - ] = mock_object + ] = mock_rpc request = {} await client.search_knowledge(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_knowledge(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_documents.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_documents.py index c261e5fe2fd7..d96e9815c0cb 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_documents.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_documents.py @@ -1261,22 +1261,23 @@ async def test_list_documents_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_documents - ] = mock_object + ] = mock_rpc request = {} await client.list_documents(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_documents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1835,22 +1836,23 @@ async def test_get_document_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_document - ] = mock_object + ] = mock_rpc request = {} await client.get_document(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2149,8 +2151,9 @@ def test_create_document_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_document(request) @@ -2204,26 +2207,28 @@ async def test_create_document_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_document - ] = mock_object + ] = mock_rpc request = {} await client.create_document(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2525,8 +2530,9 @@ def test_import_documents_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.import_documents(request) @@ -2580,26 +2586,28 @@ async def test_import_documents_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_documents - ] = mock_object + ] = mock_rpc request = {} await client.import_documents(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.import_documents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2807,8 +2815,9 @@ def test_delete_document_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_document(request) @@ -2862,26 +2871,28 @@ async def test_delete_document_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_document - ] = mock_object + ] = mock_rpc request = {} await client.delete_document(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3167,8 +3178,9 @@ def test_update_document_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_document(request) @@ -3222,26 +3234,28 @@ async def test_update_document_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_document - ] = mock_object + ] = mock_rpc request = {} await client.update_document(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3543,8 +3557,9 @@ def test_reload_document_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.reload_document(request) @@ -3598,26 +3613,28 @@ async def test_reload_document_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.reload_document - ] = mock_object + ] = mock_rpc request = {} await client.reload_document(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.reload_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3913,8 +3930,9 @@ def test_export_document_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.export_document(request) @@ -3968,26 +3986,28 @@ async def test_export_document_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.export_document - ] = mock_object + ] = mock_rpc request = {} await client.export_document(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.export_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_encryption_spec_service.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_encryption_spec_service.py index 06a51defa581..7afb6d4a8a43 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_encryption_spec_service.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_encryption_spec_service.py @@ -1380,22 +1380,23 @@ async def test_get_encryption_spec_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_encryption_spec - ] = mock_object + ] = mock_rpc request = {} await client.get_encryption_spec(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_encryption_spec(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1708,8 +1709,9 @@ def test_initialize_encryption_spec_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.initialize_encryption_spec(request) @@ -1765,26 +1767,28 @@ async def test_initialize_encryption_spec_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.initialize_encryption_spec - ] = mock_object + ] = mock_rpc request = {} await client.initialize_encryption_spec(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.initialize_encryption_spec(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_entity_types.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_entity_types.py index e1e55267fb83..f3969a436e58 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_entity_types.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_entity_types.py @@ -1276,22 +1276,23 @@ async def test_list_entity_types_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_entity_types - ] = mock_object + ] = mock_rpc request = {} await client.list_entity_types(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_entity_types(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1879,22 +1880,23 @@ async def test_get_entity_type_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_entity_type - ] = mock_object + ] = mock_rpc request = {} await client.get_entity_type(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_entity_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2293,22 +2295,23 @@ async def test_create_entity_type_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_entity_type - ] = mock_object + ] = mock_rpc request = {} await client.create_entity_type(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_entity_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2726,22 +2729,23 @@ async def test_update_entity_type_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_entity_type - ] = mock_object + ] = mock_rpc request = {} await client.update_entity_type(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_entity_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3127,22 +3131,23 @@ async def test_delete_entity_type_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_entity_type - ] = mock_object + ] = mock_rpc request = {} await client.delete_entity_type(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_entity_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3451,8 +3456,9 @@ def test_batch_update_entity_types_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.batch_update_entity_types(request) @@ -3508,26 +3514,28 @@ async def test_batch_update_entity_types_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_update_entity_types - ] = mock_object + ] = mock_rpc request = {} await client.batch_update_entity_types(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.batch_update_entity_types(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3753,8 +3761,9 @@ def test_batch_delete_entity_types_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.batch_delete_entity_types(request) @@ -3810,26 +3819,28 @@ async def test_batch_delete_entity_types_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_delete_entity_types - ] = mock_object + ] = mock_rpc request = {} await client.batch_delete_entity_types(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.batch_delete_entity_types(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4153,8 +4164,9 @@ def test_batch_create_entities_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.batch_create_entities(request) @@ -4210,26 +4222,28 @@ async def test_batch_create_entities_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_create_entities - ] = mock_object + ] = mock_rpc request = {} await client.batch_create_entities(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.batch_create_entities(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4562,8 +4576,9 @@ def test_batch_update_entities_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.batch_update_entities(request) @@ -4619,26 +4634,28 @@ async def test_batch_update_entities_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_update_entities - ] = mock_object + ] = mock_rpc request = {} await client.batch_update_entities(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.batch_update_entities(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4971,8 +4988,9 @@ def test_batch_delete_entities_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.batch_delete_entities(request) @@ -5028,26 +5046,28 @@ async def test_batch_delete_entities_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_delete_entities - ] = mock_object + ] = mock_rpc request = {} await client.batch_delete_entities(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.batch_delete_entities(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_environments.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_environments.py index 631a04a2b7e3..d0a1c490eef1 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_environments.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_environments.py @@ -1277,22 +1277,23 @@ async def test_list_environments_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_environments - ] = mock_object + ] = mock_rpc request = {} await client.list_environments(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_environments(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1862,22 +1863,23 @@ async def test_get_environment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_environment - ] = mock_object + ] = mock_rpc request = {} await client.get_environment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_environment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2173,22 +2175,23 @@ async def test_create_environment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_environment - ] = mock_object + ] = mock_rpc request = {} await client.create_environment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_environment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2484,22 +2487,23 @@ async def test_update_environment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_environment - ] = mock_object + ] = mock_rpc request = {} await client.update_environment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_environment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2783,22 +2787,23 @@ async def test_delete_environment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_environment - ] = mock_object + ] = mock_rpc request = {} await client.delete_environment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_environment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3082,22 +3087,23 @@ async def test_get_environment_history_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_environment_history - ] = mock_object + ] = mock_rpc request = {} await client.get_environment_history(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_environment_history(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_fulfillments.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_fulfillments.py index f08bb4893b96..4f2daa6bc14e 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_fulfillments.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_fulfillments.py @@ -1270,22 +1270,23 @@ async def test_get_fulfillment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_fulfillment - ] = mock_object + ] = mock_rpc request = {} await client.get_fulfillment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_fulfillment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1652,22 +1653,23 @@ async def test_update_fulfillment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_fulfillment - ] = mock_object + ] = mock_rpc request = {} await client.update_fulfillment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_fulfillment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_generators.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_generators.py index 5f0ca8129953..63789784bbb0 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_generators.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_generators.py @@ -1258,22 +1258,23 @@ async def test_create_generator_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_generator - ] = mock_object + ] = mock_rpc request = {} await client.create_generator(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_generator(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1652,22 +1653,23 @@ async def test_get_generator_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_generator - ] = mock_object + ] = mock_rpc request = {} await client.get_generator(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_generator(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2018,22 +2020,23 @@ async def test_list_generators_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_generators - ] = mock_object + ] = mock_rpc request = {} await client.list_generators(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_generators(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2571,22 +2574,23 @@ async def test_delete_generator_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_generator - ] = mock_object + ] = mock_rpc request = {} await client.delete_generator(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_generator(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2930,22 +2934,23 @@ async def test_update_generator_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_generator - ] = mock_object + ] = mock_rpc request = {} await client.update_generator(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_generator(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_intents.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_intents.py index 3d81b3b993cd..228399538e41 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_intents.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_intents.py @@ -1237,22 +1237,23 @@ async def test_list_intents_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_intents - ] = mock_object + ] = mock_rpc request = {} await client.list_intents(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_intents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1849,22 +1850,23 @@ async def test_get_intent_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_intent - ] = mock_object + ] = mock_rpc request = {} await client.get_intent(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_intent(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2299,22 +2301,23 @@ async def test_create_intent_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_intent - ] = mock_object + ] = mock_rpc request = {} await client.create_intent(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_intent(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2761,22 +2764,23 @@ async def test_update_intent_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_intent - ] = mock_object + ] = mock_rpc request = {} await client.update_intent(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_intent(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3168,22 +3172,23 @@ async def test_delete_intent_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_intent - ] = mock_object + ] = mock_rpc request = {} await client.delete_intent(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_intent(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3481,8 +3486,9 @@ def test_batch_update_intents_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.batch_update_intents(request) @@ -3538,26 +3544,28 @@ async def test_batch_update_intents_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_update_intents - ] = mock_object + ] = mock_rpc request = {} await client.batch_update_intents(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.batch_update_intents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3889,8 +3897,9 @@ def test_batch_delete_intents_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.batch_delete_intents(request) @@ -3946,26 +3955,28 @@ async def test_batch_delete_intents_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_delete_intents - ] = mock_object + ] = mock_rpc request = {} await client.batch_delete_intents(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.batch_delete_intents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_knowledge_bases.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_knowledge_bases.py index 42af2e149589..5a22d56e91f5 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_knowledge_bases.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_knowledge_bases.py @@ -1304,22 +1304,23 @@ async def test_list_knowledge_bases_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_knowledge_bases - ] = mock_object + ] = mock_rpc request = {} await client.list_knowledge_bases(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_knowledge_bases(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1899,22 +1900,23 @@ async def test_get_knowledge_base_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_knowledge_base - ] = mock_object + ] = mock_rpc request = {} await client.get_knowledge_base(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_knowledge_base(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2296,22 +2298,23 @@ async def test_create_knowledge_base_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_knowledge_base - ] = mock_object + ] = mock_rpc request = {} await client.create_knowledge_base(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_knowledge_base(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2691,22 +2694,23 @@ async def test_delete_knowledge_base_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_knowledge_base - ] = mock_object + ] = mock_rpc request = {} await client.delete_knowledge_base(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_knowledge_base(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3072,22 +3076,23 @@ async def test_update_knowledge_base_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_knowledge_base - ] = mock_object + ] = mock_rpc request = {} await client.update_knowledge_base(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_knowledge_base(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_participants.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_participants.py index e7f4ad3f84da..7eb9548154ab 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_participants.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_participants.py @@ -1290,22 +1290,23 @@ async def test_create_participant_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_participant - ] = mock_object + ] = mock_rpc request = {} await client.create_participant(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_participant(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1690,22 +1691,23 @@ async def test_get_participant_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_participant - ] = mock_object + ] = mock_rpc request = {} await client.get_participant(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_participant(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2072,22 +2074,23 @@ async def test_list_participants_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_participants - ] = mock_object + ] = mock_rpc request = {} await client.list_participants(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_participants(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2665,22 +2668,23 @@ async def test_update_participant_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_participant - ] = mock_object + ] = mock_rpc request = {} await client.update_participant(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_participant(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3058,22 +3062,23 @@ async def test_analyze_content_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.analyze_content - ] = mock_object + ] = mock_rpc request = {} await client.analyze_content(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.analyze_content(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3367,22 +3372,23 @@ async def test_streaming_analyze_content_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.streaming_analyze_content - ] = mock_object + ] = mock_rpc request = [{}] await client.streaming_analyze_content(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.streaming_analyze_content(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3597,22 +3603,23 @@ async def test_suggest_articles_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.suggest_articles - ] = mock_object + ] = mock_rpc request = {} await client.suggest_articles(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.suggest_articles(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3980,22 +3987,23 @@ async def test_suggest_faq_answers_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.suggest_faq_answers - ] = mock_object + ] = mock_rpc request = {} await client.suggest_faq_answers(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.suggest_faq_answers(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4374,22 +4382,23 @@ async def test_suggest_smart_replies_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.suggest_smart_replies - ] = mock_object + ] = mock_rpc request = {} await client.suggest_smart_replies(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.suggest_smart_replies(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4770,22 +4779,23 @@ async def test_suggest_knowledge_assist_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.suggest_knowledge_assist - ] = mock_object + ] = mock_rpc request = {} await client.suggest_knowledge_assist(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.suggest_knowledge_assist(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_session_entity_types.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_session_entity_types.py index 6d6a689e75f6..909edb7a43cd 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_session_entity_types.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_session_entity_types.py @@ -1348,22 +1348,23 @@ async def test_list_session_entity_types_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_session_entity_types - ] = mock_object + ] = mock_rpc request = {} await client.list_session_entity_types(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_session_entity_types(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1950,22 +1951,23 @@ async def test_get_session_entity_type_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_session_entity_type - ] = mock_object + ] = mock_rpc request = {} await client.get_session_entity_type(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_session_entity_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2349,22 +2351,23 @@ async def test_create_session_entity_type_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_session_entity_type - ] = mock_object + ] = mock_rpc request = {} await client.create_session_entity_type(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_session_entity_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2762,22 +2765,23 @@ async def test_update_session_entity_type_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_session_entity_type - ] = mock_object + ] = mock_rpc request = {} await client.update_session_entity_type(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_session_entity_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3166,22 +3170,23 @@ async def test_delete_session_entity_type_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_session_entity_type - ] = mock_object + ] = mock_rpc request = {} await client.delete_session_entity_type(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_session_entity_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_sessions.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_sessions.py index 2a94decd825d..061184fa9c33 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_sessions.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_sessions.py @@ -1239,22 +1239,23 @@ async def test_detect_intent_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.detect_intent - ] = mock_object + ] = mock_rpc request = {} await client.detect_intent(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.detect_intent(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1574,22 +1575,23 @@ async def test_streaming_detect_intent_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.streaming_detect_intent - ] = mock_object + ] = mock_rpc request = [{}] await client.streaming_detect_intent(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.streaming_detect_intent(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_versions.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_versions.py index 4bf4aae486d6..a0e1fce02836 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_versions.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_versions.py @@ -1235,22 +1235,23 @@ async def test_list_versions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_versions - ] = mock_object + ] = mock_rpc request = {} await client.list_versions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_versions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1802,22 +1803,23 @@ async def test_get_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_version - ] = mock_object + ] = mock_rpc request = {} await client.get_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2177,22 +2179,23 @@ async def test_create_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_version - ] = mock_object + ] = mock_rpc request = {} await client.create_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2558,22 +2561,23 @@ async def test_update_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_version - ] = mock_object + ] = mock_rpc request = {} await client.update_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2927,22 +2931,23 @@ async def test_delete_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_version - ] = mock_object + ] = mock_rpc request = {} await client.delete_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_agents.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_agents.py index 8fa47f4c27b7..67bc3c6462b8 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_agents.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_agents.py @@ -1265,22 +1265,23 @@ async def test_get_agent_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_agent - ] = mock_object + ] = mock_rpc request = {} await client.get_agent(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_agent(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1678,22 +1679,23 @@ async def test_set_agent_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_agent - ] = mock_object + ] = mock_rpc request = {} await client.set_agent(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_agent(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2055,22 +2057,23 @@ async def test_delete_agent_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_agent - ] = mock_object + ] = mock_rpc request = {} await client.delete_agent(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_agent(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2412,22 +2415,23 @@ async def test_search_agents_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search_agents - ] = mock_object + ] = mock_rpc request = {} await client.search_agents(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_agents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2914,8 +2918,9 @@ def test_train_agent_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.train_agent(request) @@ -2969,26 +2974,28 @@ async def test_train_agent_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.train_agent - ] = mock_object + ] = mock_rpc request = {} await client.train_agent(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.train_agent(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3280,8 +3287,9 @@ def test_export_agent_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.export_agent(request) @@ -3335,26 +3343,28 @@ async def test_export_agent_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.export_agent - ] = mock_object + ] = mock_rpc request = {} await client.export_agent(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.export_agent(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3646,8 +3656,9 @@ def test_import_agent_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.import_agent(request) @@ -3701,26 +3712,28 @@ async def test_import_agent_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_agent - ] = mock_object + ] = mock_rpc request = {} await client.import_agent(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.import_agent(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3930,8 +3943,9 @@ def test_restore_agent_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.restore_agent(request) @@ -3985,26 +3999,28 @@ async def test_restore_agent_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.restore_agent - ] = mock_object + ] = mock_rpc request = {} await client.restore_agent(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.restore_agent(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4278,22 +4294,23 @@ async def test_get_validation_result_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_validation_result - ] = mock_object + ] = mock_rpc request = {} await client.get_validation_result(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_validation_result(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_answer_records.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_answer_records.py index 616b0be3caf2..f71822c06c02 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_answer_records.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_answer_records.py @@ -1297,22 +1297,23 @@ async def test_get_answer_record_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_answer_record - ] = mock_object + ] = mock_rpc request = {} await client.get_answer_record(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_answer_record(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1601,22 +1602,23 @@ async def test_list_answer_records_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_answer_records - ] = mock_object + ] = mock_rpc request = {} await client.list_answer_records(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_answer_records(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2185,22 +2187,23 @@ async def test_update_answer_record_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_answer_record - ] = mock_object + ] = mock_rpc request = {} await client.update_answer_record(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_answer_record(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_contexts.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_contexts.py index 981363638145..cd78c5d1a839 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_contexts.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_contexts.py @@ -1235,22 +1235,23 @@ async def test_list_contexts_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_contexts - ] = mock_object + ] = mock_rpc request = {} await client.list_contexts(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_contexts(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1796,22 +1797,23 @@ async def test_get_context_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_context - ] = mock_object + ] = mock_rpc request = {} await client.get_context(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_context(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2161,22 +2163,23 @@ async def test_create_context_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_context - ] = mock_object + ] = mock_rpc request = {} await client.create_context(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_context(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2532,22 +2535,23 @@ async def test_update_context_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_context - ] = mock_object + ] = mock_rpc request = {} await client.update_context(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_context(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2897,22 +2901,23 @@ async def test_delete_context_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_context - ] = mock_object + ] = mock_rpc request = {} await client.delete_context(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_context(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3257,22 +3262,23 @@ async def test_delete_all_contexts_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_all_contexts - ] = mock_object + ] = mock_rpc request = {} await client.delete_all_contexts(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_all_contexts(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_conversation_profiles.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_conversation_profiles.py index fa783c9e6319..8a42c95cc7e4 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_conversation_profiles.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_conversation_profiles.py @@ -1386,22 +1386,23 @@ async def test_list_conversation_profiles_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_conversation_profiles - ] = mock_object + ] = mock_rpc request = {} await client.list_conversation_profiles(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_conversation_profiles(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1994,22 +1995,23 @@ async def test_get_conversation_profile_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_conversation_profile - ] = mock_object + ] = mock_rpc request = {} await client.get_conversation_profile(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_conversation_profile(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2402,22 +2404,23 @@ async def test_create_conversation_profile_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_conversation_profile - ] = mock_object + ] = mock_rpc request = {} await client.create_conversation_profile(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_conversation_profile(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2824,22 +2827,23 @@ async def test_update_conversation_profile_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_conversation_profile - ] = mock_object + ] = mock_rpc request = {} await client.update_conversation_profile(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_conversation_profile(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3231,22 +3235,23 @@ async def test_delete_conversation_profile_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_conversation_profile - ] = mock_object + ] = mock_rpc request = {} await client.delete_conversation_profile(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_conversation_profile(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3552,8 +3557,9 @@ def test_set_suggestion_feature_config_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.set_suggestion_feature_config(request) @@ -3609,26 +3615,28 @@ async def test_set_suggestion_feature_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_suggestion_feature_config - ] = mock_object + ] = mock_rpc request = {} await client.set_suggestion_feature_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.set_suggestion_feature_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3988,8 +3996,9 @@ def test_clear_suggestion_feature_config_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.clear_suggestion_feature_config(request) @@ -4045,26 +4054,28 @@ async def test_clear_suggestion_feature_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.clear_suggestion_feature_config - ] = mock_object + ] = mock_rpc request = {} await client.clear_suggestion_feature_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.clear_suggestion_feature_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_conversations.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_conversations.py index 7168dd831f56..5bc69c8879cb 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_conversations.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_conversations.py @@ -1322,22 +1322,23 @@ async def test_create_conversation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_conversation - ] = mock_object + ] = mock_rpc request = {} await client.create_conversation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_conversation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1735,22 +1736,23 @@ async def test_list_conversations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_conversations - ] = mock_object + ] = mock_rpc request = {} await client.list_conversations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_conversations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2327,22 +2329,23 @@ async def test_get_conversation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_conversation - ] = mock_object + ] = mock_rpc request = {} await client.get_conversation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_conversation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2729,22 +2732,23 @@ async def test_complete_conversation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.complete_conversation - ] = mock_object + ] = mock_rpc request = {} await client.complete_conversation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.complete_conversation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3123,22 +3127,23 @@ async def test_batch_create_messages_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_create_messages - ] = mock_object + ] = mock_rpc request = {} await client.batch_create_messages(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.batch_create_messages(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3509,22 +3514,23 @@ async def test_list_messages_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_messages - ] = mock_object + ] = mock_rpc request = {} await client.list_messages(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_messages(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4085,22 +4091,23 @@ async def test_suggest_conversation_summary_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.suggest_conversation_summary - ] = mock_object + ] = mock_rpc request = {} await client.suggest_conversation_summary(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.suggest_conversation_summary(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4478,22 +4485,23 @@ async def test_generate_stateless_summary_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.generate_stateless_summary - ] = mock_object + ] = mock_rpc request = {} await client.generate_stateless_summary(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.generate_stateless_summary(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4779,22 +4787,23 @@ async def test_generate_stateless_suggestion_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.generate_stateless_suggestion - ] = mock_object + ] = mock_rpc request = {} await client.generate_stateless_suggestion(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.generate_stateless_suggestion(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5075,22 +5084,23 @@ async def test_search_knowledge_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search_knowledge - ] = mock_object + ] = mock_rpc request = {} await client.search_knowledge(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_knowledge(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_documents.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_documents.py index e2b4c2fcdf6e..708aa47d22c2 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_documents.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_documents.py @@ -1261,22 +1261,23 @@ async def test_list_documents_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_documents - ] = mock_object + ] = mock_rpc request = {} await client.list_documents(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_documents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1835,22 +1836,23 @@ async def test_get_document_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_document - ] = mock_object + ] = mock_rpc request = {} await client.get_document(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2149,8 +2151,9 @@ def test_create_document_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_document(request) @@ -2204,26 +2207,28 @@ async def test_create_document_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_document - ] = mock_object + ] = mock_rpc request = {} await client.create_document(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2525,8 +2530,9 @@ def test_import_documents_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.import_documents(request) @@ -2580,26 +2586,28 @@ async def test_import_documents_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_documents - ] = mock_object + ] = mock_rpc request = {} await client.import_documents(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.import_documents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2807,8 +2815,9 @@ def test_delete_document_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_document(request) @@ -2862,26 +2871,28 @@ async def test_delete_document_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_document - ] = mock_object + ] = mock_rpc request = {} await client.delete_document(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3167,8 +3178,9 @@ def test_update_document_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_document(request) @@ -3222,26 +3234,28 @@ async def test_update_document_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_document - ] = mock_object + ] = mock_rpc request = {} await client.update_document(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3541,8 +3555,9 @@ def test_reload_document_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.reload_document(request) @@ -3596,26 +3611,28 @@ async def test_reload_document_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.reload_document - ] = mock_object + ] = mock_rpc request = {} await client.reload_document(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.reload_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_encryption_spec_service.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_encryption_spec_service.py index 450735897289..3bed433cd19a 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_encryption_spec_service.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_encryption_spec_service.py @@ -1380,22 +1380,23 @@ async def test_get_encryption_spec_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_encryption_spec - ] = mock_object + ] = mock_rpc request = {} await client.get_encryption_spec(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_encryption_spec(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1708,8 +1709,9 @@ def test_initialize_encryption_spec_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.initialize_encryption_spec(request) @@ -1765,26 +1767,28 @@ async def test_initialize_encryption_spec_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.initialize_encryption_spec - ] = mock_object + ] = mock_rpc request = {} await client.initialize_encryption_spec(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.initialize_encryption_spec(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_entity_types.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_entity_types.py index e2c6dcc9cb3d..6340f829f0db 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_entity_types.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_entity_types.py @@ -1276,22 +1276,23 @@ async def test_list_entity_types_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_entity_types - ] = mock_object + ] = mock_rpc request = {} await client.list_entity_types(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_entity_types(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1879,22 +1880,23 @@ async def test_get_entity_type_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_entity_type - ] = mock_object + ] = mock_rpc request = {} await client.get_entity_type(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_entity_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2293,22 +2295,23 @@ async def test_create_entity_type_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_entity_type - ] = mock_object + ] = mock_rpc request = {} await client.create_entity_type(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_entity_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2726,22 +2729,23 @@ async def test_update_entity_type_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_entity_type - ] = mock_object + ] = mock_rpc request = {} await client.update_entity_type(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_entity_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3137,22 +3141,23 @@ async def test_delete_entity_type_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_entity_type - ] = mock_object + ] = mock_rpc request = {} await client.delete_entity_type(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_entity_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3461,8 +3466,9 @@ def test_batch_update_entity_types_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.batch_update_entity_types(request) @@ -3518,26 +3524,28 @@ async def test_batch_update_entity_types_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_update_entity_types - ] = mock_object + ] = mock_rpc request = {} await client.batch_update_entity_types(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.batch_update_entity_types(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3763,8 +3771,9 @@ def test_batch_delete_entity_types_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.batch_delete_entity_types(request) @@ -3820,26 +3829,28 @@ async def test_batch_delete_entity_types_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_delete_entity_types - ] = mock_object + ] = mock_rpc request = {} await client.batch_delete_entity_types(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.batch_delete_entity_types(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4163,8 +4174,9 @@ def test_batch_create_entities_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.batch_create_entities(request) @@ -4220,26 +4232,28 @@ async def test_batch_create_entities_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_create_entities - ] = mock_object + ] = mock_rpc request = {} await client.batch_create_entities(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.batch_create_entities(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4572,8 +4586,9 @@ def test_batch_update_entities_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.batch_update_entities(request) @@ -4629,26 +4644,28 @@ async def test_batch_update_entities_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_update_entities - ] = mock_object + ] = mock_rpc request = {} await client.batch_update_entities(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.batch_update_entities(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4981,8 +4998,9 @@ def test_batch_delete_entities_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.batch_delete_entities(request) @@ -5038,26 +5056,28 @@ async def test_batch_delete_entities_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_delete_entities - ] = mock_object + ] = mock_rpc request = {} await client.batch_delete_entities(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.batch_delete_entities(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_environments.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_environments.py index d2290951e90d..6da09cd59963 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_environments.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_environments.py @@ -1277,22 +1277,23 @@ async def test_list_environments_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_environments - ] = mock_object + ] = mock_rpc request = {} await client.list_environments(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_environments(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1862,22 +1863,23 @@ async def test_get_environment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_environment - ] = mock_object + ] = mock_rpc request = {} await client.get_environment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_environment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2173,22 +2175,23 @@ async def test_create_environment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_environment - ] = mock_object + ] = mock_rpc request = {} await client.create_environment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_environment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2484,22 +2487,23 @@ async def test_update_environment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_environment - ] = mock_object + ] = mock_rpc request = {} await client.update_environment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_environment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2783,22 +2787,23 @@ async def test_delete_environment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_environment - ] = mock_object + ] = mock_rpc request = {} await client.delete_environment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_environment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3082,22 +3087,23 @@ async def test_get_environment_history_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_environment_history - ] = mock_object + ] = mock_rpc request = {} await client.get_environment_history(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_environment_history(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_fulfillments.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_fulfillments.py index d0cf632a387e..40cdb598bcc3 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_fulfillments.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_fulfillments.py @@ -1270,22 +1270,23 @@ async def test_get_fulfillment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_fulfillment - ] = mock_object + ] = mock_rpc request = {} await client.get_fulfillment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_fulfillment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1652,22 +1653,23 @@ async def test_update_fulfillment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_fulfillment - ] = mock_object + ] = mock_rpc request = {} await client.update_fulfillment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_fulfillment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_generators.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_generators.py index 51500df7bbfb..912ebb9ede1f 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_generators.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_generators.py @@ -1258,22 +1258,23 @@ async def test_create_generator_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_generator - ] = mock_object + ] = mock_rpc request = {} await client.create_generator(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_generator(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1652,22 +1653,23 @@ async def test_get_generator_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_generator - ] = mock_object + ] = mock_rpc request = {} await client.get_generator(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_generator(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2018,22 +2020,23 @@ async def test_list_generators_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_generators - ] = mock_object + ] = mock_rpc request = {} await client.list_generators(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_generators(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2571,22 +2574,23 @@ async def test_delete_generator_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_generator - ] = mock_object + ] = mock_rpc request = {} await client.delete_generator(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_generator(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2930,22 +2934,23 @@ async def test_update_generator_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_generator - ] = mock_object + ] = mock_rpc request = {} await client.update_generator(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_generator(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_intents.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_intents.py index 10923b547c16..3e00e6b28ee1 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_intents.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_intents.py @@ -1237,22 +1237,23 @@ async def test_list_intents_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_intents - ] = mock_object + ] = mock_rpc request = {} await client.list_intents(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_intents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1852,22 +1853,23 @@ async def test_get_intent_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_intent - ] = mock_object + ] = mock_rpc request = {} await client.get_intent(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_intent(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2307,22 +2309,23 @@ async def test_create_intent_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_intent - ] = mock_object + ] = mock_rpc request = {} await client.create_intent(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_intent(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2774,22 +2777,23 @@ async def test_update_intent_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_intent - ] = mock_object + ] = mock_rpc request = {} await client.update_intent(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_intent(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3183,22 +3187,23 @@ async def test_delete_intent_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_intent - ] = mock_object + ] = mock_rpc request = {} await client.delete_intent(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_intent(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3496,8 +3501,9 @@ def test_batch_update_intents_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.batch_update_intents(request) @@ -3553,26 +3559,28 @@ async def test_batch_update_intents_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_update_intents - ] = mock_object + ] = mock_rpc request = {} await client.batch_update_intents(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.batch_update_intents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3904,8 +3912,9 @@ def test_batch_delete_intents_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.batch_delete_intents(request) @@ -3961,26 +3970,28 @@ async def test_batch_delete_intents_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_delete_intents - ] = mock_object + ] = mock_rpc request = {} await client.batch_delete_intents(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.batch_delete_intents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_knowledge_bases.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_knowledge_bases.py index d4e074c796f2..05e5ca892130 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_knowledge_bases.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_knowledge_bases.py @@ -1304,22 +1304,23 @@ async def test_list_knowledge_bases_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_knowledge_bases - ] = mock_object + ] = mock_rpc request = {} await client.list_knowledge_bases(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_knowledge_bases(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1899,22 +1900,23 @@ async def test_get_knowledge_base_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_knowledge_base - ] = mock_object + ] = mock_rpc request = {} await client.get_knowledge_base(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_knowledge_base(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2296,22 +2298,23 @@ async def test_create_knowledge_base_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_knowledge_base - ] = mock_object + ] = mock_rpc request = {} await client.create_knowledge_base(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_knowledge_base(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2691,22 +2694,23 @@ async def test_delete_knowledge_base_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_knowledge_base - ] = mock_object + ] = mock_rpc request = {} await client.delete_knowledge_base(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_knowledge_base(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3072,22 +3076,23 @@ async def test_update_knowledge_base_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_knowledge_base - ] = mock_object + ] = mock_rpc request = {} await client.update_knowledge_base(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_knowledge_base(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_participants.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_participants.py index 28217c9d885d..0a43b860baa3 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_participants.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_participants.py @@ -1294,22 +1294,23 @@ async def test_create_participant_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_participant - ] = mock_object + ] = mock_rpc request = {} await client.create_participant(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_participant(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1689,22 +1690,23 @@ async def test_get_participant_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_participant - ] = mock_object + ] = mock_rpc request = {} await client.get_participant(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_participant(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2069,22 +2071,23 @@ async def test_list_participants_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_participants - ] = mock_object + ] = mock_rpc request = {} await client.list_participants(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_participants(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2659,22 +2662,23 @@ async def test_update_participant_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_participant - ] = mock_object + ] = mock_rpc request = {} await client.update_participant(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_participant(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3052,22 +3056,23 @@ async def test_analyze_content_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.analyze_content - ] = mock_object + ] = mock_rpc request = {} await client.analyze_content(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.analyze_content(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3381,22 +3386,23 @@ async def test_streaming_analyze_content_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.streaming_analyze_content - ] = mock_object + ] = mock_rpc request = [{}] await client.streaming_analyze_content(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.streaming_analyze_content(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3611,22 +3617,23 @@ async def test_suggest_articles_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.suggest_articles - ] = mock_object + ] = mock_rpc request = {} await client.suggest_articles(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.suggest_articles(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3994,22 +4001,23 @@ async def test_suggest_faq_answers_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.suggest_faq_answers - ] = mock_object + ] = mock_rpc request = {} await client.suggest_faq_answers(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.suggest_faq_answers(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4388,22 +4396,23 @@ async def test_suggest_smart_replies_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.suggest_smart_replies - ] = mock_object + ] = mock_rpc request = {} await client.suggest_smart_replies(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.suggest_smart_replies(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4784,22 +4793,23 @@ async def test_suggest_knowledge_assist_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.suggest_knowledge_assist - ] = mock_object + ] = mock_rpc request = {} await client.suggest_knowledge_assist(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.suggest_knowledge_assist(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5081,22 +5091,23 @@ async def test_list_suggestions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_suggestions - ] = mock_object + ] = mock_rpc request = {} await client.list_suggestions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_suggestions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5574,22 +5585,23 @@ async def test_compile_suggestion_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.compile_suggestion - ] = mock_object + ] = mock_rpc request = {} await client.compile_suggestion(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.compile_suggestion(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_session_entity_types.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_session_entity_types.py index 541161501ca5..463f1ad1ee40 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_session_entity_types.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_session_entity_types.py @@ -1348,22 +1348,23 @@ async def test_list_session_entity_types_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_session_entity_types - ] = mock_object + ] = mock_rpc request = {} await client.list_session_entity_types(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_session_entity_types(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1950,22 +1951,23 @@ async def test_get_session_entity_type_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_session_entity_type - ] = mock_object + ] = mock_rpc request = {} await client.get_session_entity_type(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_session_entity_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2349,22 +2351,23 @@ async def test_create_session_entity_type_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_session_entity_type - ] = mock_object + ] = mock_rpc request = {} await client.create_session_entity_type(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_session_entity_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2762,22 +2765,23 @@ async def test_update_session_entity_type_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_session_entity_type - ] = mock_object + ] = mock_rpc request = {} await client.update_session_entity_type(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_session_entity_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3166,22 +3170,23 @@ async def test_delete_session_entity_type_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_session_entity_type - ] = mock_object + ] = mock_rpc request = {} await client.delete_session_entity_type(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_session_entity_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_sessions.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_sessions.py index c70a52e596a4..da5533de46b8 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_sessions.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_sessions.py @@ -1245,22 +1245,23 @@ async def test_detect_intent_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.detect_intent - ] = mock_object + ] = mock_rpc request = {} await client.detect_intent(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.detect_intent(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1580,22 +1581,23 @@ async def test_streaming_detect_intent_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.streaming_detect_intent - ] = mock_object + ] = mock_rpc request = [{}] await client.streaming_detect_intent(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.streaming_detect_intent(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_versions.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_versions.py index dd32bec95757..69f463c32468 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_versions.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_versions.py @@ -1235,22 +1235,23 @@ async def test_list_versions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_versions - ] = mock_object + ] = mock_rpc request = {} await client.list_versions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_versions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1802,22 +1803,23 @@ async def test_get_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_version - ] = mock_object + ] = mock_rpc request = {} await client.get_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2177,22 +2179,23 @@ async def test_create_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_version - ] = mock_object + ] = mock_rpc request = {} await client.create_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2558,22 +2561,23 @@ async def test_update_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_version - ] = mock_object + ] = mock_rpc request = {} await client.update_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2927,22 +2931,23 @@ async def test_delete_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_version - ] = mock_object + ] = mock_rpc request = {} await client.delete_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine/gapic_version.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine/gapic_version.py index 3a3b09ebffa5..558c8aab67c5 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine/gapic_version.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.12.1" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/gapic_version.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/gapic_version.py index 3a3b09ebffa5..558c8aab67c5 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/gapic_version.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.12.1" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/async_client.py index 9eec6f979966..e45cc4c621a1 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -195,9 +194,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(CompletionServiceClient).get_transport_class, type(CompletionServiceClient) - ) + get_transport_class = CompletionServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/client.py index 2344e1741a8d..381944cf063a 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/client.py @@ -673,7 +673,7 @@ def __init__( Type[CompletionServiceTransport], Callable[..., CompletionServiceTransport], ] = ( - type(self).get_transport_class(transport) + CompletionServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., CompletionServiceTransport], transport) ) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/async_client.py index 07baf781183b..35b700126b56 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -200,9 +199,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ControlServiceClient).get_transport_class, type(ControlServiceClient) - ) + get_transport_class = ControlServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/client.py index 34a30522ac86..2a900c13463d 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/client.py @@ -695,7 +695,7 @@ def __init__( transport_init: Union[ Type[ControlServiceTransport], Callable[..., ControlServiceTransport] ] = ( - type(self).get_transport_class(transport) + ControlServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ControlServiceTransport], transport) ) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/conversational_search_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/conversational_search_service/async_client.py index dc6fb3c8db9c..75f3b1471e50 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/conversational_search_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/conversational_search_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -238,10 +237,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ConversationalSearchServiceClient).get_transport_class, - type(ConversationalSearchServiceClient), - ) + get_transport_class = ConversationalSearchServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/conversational_search_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/conversational_search_service/client.py index 89441a51b575..97170635640f 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/conversational_search_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/conversational_search_service/client.py @@ -840,7 +840,7 @@ def __init__( Type[ConversationalSearchServiceTransport], Callable[..., ConversationalSearchServiceTransport], ] = ( - type(self).get_transport_class(transport) + ConversationalSearchServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast( Callable[..., ConversationalSearchServiceTransport], transport diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/async_client.py index 6c9adf6f8f24..1c4199b422f1 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -215,9 +214,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DataStoreServiceClient).get_transport_class, type(DataStoreServiceClient) - ) + get_transport_class = DataStoreServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/client.py index 2e690359d1aa..815dc13951f1 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/client.py @@ -751,7 +751,7 @@ def __init__( Type[DataStoreServiceTransport], Callable[..., DataStoreServiceTransport], ] = ( - type(self).get_transport_class(transport) + DataStoreServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DataStoreServiceTransport], transport) ) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/async_client.py index 2c3ee4561908..9cf6c715c029 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -208,9 +207,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DocumentServiceClient).get_transport_class, type(DocumentServiceClient) - ) + get_transport_class = DocumentServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/client.py index 196c944ced05..0de64afac5e6 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/client.py @@ -731,7 +731,7 @@ def __init__( transport_init: Union[ Type[DocumentServiceTransport], Callable[..., DocumentServiceTransport] ] = ( - type(self).get_transport_class(transport) + DocumentServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DocumentServiceTransport], transport) ) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/async_client.py index 6e23c40d9306..403c07e67d41 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -202,9 +201,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(EngineServiceClient).get_transport_class, type(EngineServiceClient) - ) + get_transport_class = EngineServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/client.py index c79c59e77cfc..09c11f14c00b 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/client.py @@ -697,7 +697,7 @@ def __init__( transport_init: Union[ Type[EngineServiceTransport], Callable[..., EngineServiceTransport] ] = ( - type(self).get_transport_class(transport) + EngineServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., EngineServiceTransport], transport) ) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/grounded_generation_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/grounded_generation_service/async_client.py index 10ef7a8089dc..247023e4ee11 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/grounded_generation_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/grounded_generation_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -201,10 +200,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(GroundedGenerationServiceClient).get_transport_class, - type(GroundedGenerationServiceClient), - ) + get_transport_class = GroundedGenerationServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/grounded_generation_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/grounded_generation_service/client.py index 75fc5793a300..a5723a495294 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/grounded_generation_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/grounded_generation_service/client.py @@ -671,7 +671,7 @@ def __init__( Type[GroundedGenerationServiceTransport], Callable[..., GroundedGenerationServiceTransport], ] = ( - type(self).get_transport_class(transport) + GroundedGenerationServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., GroundedGenerationServiceTransport], transport) ) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/project_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/project_service/async_client.py index 30a15a92dc91..a892055580f8 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/project_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/project_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -193,9 +192,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ProjectServiceClient).get_transport_class, type(ProjectServiceClient) - ) + get_transport_class = ProjectServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/project_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/project_service/client.py index c964e97a6679..e2a70e94fabd 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/project_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/project_service/client.py @@ -659,7 +659,7 @@ def __init__( transport_init: Union[ Type[ProjectServiceTransport], Callable[..., ProjectServiceTransport] ] = ( - type(self).get_transport_class(transport) + ProjectServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ProjectServiceTransport], transport) ) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/rank_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/rank_service/async_client.py index f67bf0b27bd2..5c2b7308987e 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/rank_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/rank_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -187,9 +186,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(RankServiceClient).get_transport_class, type(RankServiceClient) - ) + get_transport_class = RankServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/rank_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/rank_service/client.py index cb4c617a1e9d..d77102b56b75 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/rank_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/rank_service/client.py @@ -657,7 +657,7 @@ def __init__( transport_init: Union[ Type[RankServiceTransport], Callable[..., RankServiceTransport] ] = ( - type(self).get_transport_class(transport) + RankServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RankServiceTransport], transport) ) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/recommendation_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/recommendation_service/async_client.py index b1c04a5401c8..c9164b0a0070 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/recommendation_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/recommendation_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -201,10 +200,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(RecommendationServiceClient).get_transport_class, - type(RecommendationServiceClient), - ) + get_transport_class = RecommendationServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/recommendation_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/recommendation_service/client.py index eda0e663db13..e8656be3fa15 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/recommendation_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/recommendation_service/client.py @@ -743,7 +743,7 @@ def __init__( Type[RecommendationServiceTransport], Callable[..., RecommendationServiceTransport], ] = ( - type(self).get_transport_class(transport) + RecommendationServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RecommendationServiceTransport], transport) ) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/async_client.py index 580691af188c..2a554d246aa6 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -200,9 +199,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(SchemaServiceClient).get_transport_class, type(SchemaServiceClient) - ) + get_transport_class = SchemaServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/client.py index 9b062d8e7d69..fe9290089ebc 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/client.py @@ -693,7 +693,7 @@ def __init__( transport_init: Union[ Type[SchemaServiceTransport], Callable[..., SchemaServiceTransport] ] = ( - type(self).get_transport_class(transport) + SchemaServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SchemaServiceTransport], transport) ) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/async_client.py index e08d059cad9f..65561895086b 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -202,9 +201,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(SearchServiceClient).get_transport_class, type(SearchServiceClient) - ) + get_transport_class = SearchServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/client.py index 7771cf47f7f1..9a605affe19a 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/client.py @@ -787,7 +787,7 @@ def __init__( transport_init: Union[ Type[SearchServiceTransport], Callable[..., SearchServiceTransport] ] = ( - type(self).get_transport_class(transport) + SearchServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SearchServiceTransport], transport) ) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/async_client.py index a39e03f65072..21098b464500 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -211,10 +210,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(SiteSearchEngineServiceClient).get_transport_class, - type(SiteSearchEngineServiceClient), - ) + get_transport_class = SiteSearchEngineServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/client.py index f1afaa1a2cd7..ade1d91f191f 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/client.py @@ -703,7 +703,7 @@ def __init__( Type[SiteSearchEngineServiceTransport], Callable[..., SiteSearchEngineServiceTransport], ] = ( - type(self).get_transport_class(transport) + SiteSearchEngineServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SiteSearchEngineServiceTransport], transport) ) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/async_client.py index bf1b9d55fbd4..20dbd3c1b872 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -205,9 +204,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(UserEventServiceClient).get_transport_class, type(UserEventServiceClient) - ) + get_transport_class = UserEventServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/client.py index 8fb0ebded5bb..e2936a08eb22 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/client.py @@ -727,7 +727,7 @@ def __init__( Type[UserEventServiceTransport], Callable[..., UserEventServiceTransport], ] = ( - type(self).get_transport_class(transport) + UserEventServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., UserEventServiceTransport], transport) ) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/gapic_version.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/gapic_version.py index 3a3b09ebffa5..558c8aab67c5 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/gapic_version.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.12.1" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/async_client.py index 35eff4a441d9..d27321c77e71 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -193,9 +192,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AclConfigServiceClient).get_transport_class, type(AclConfigServiceClient) - ) + get_transport_class = AclConfigServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/client.py index fb4d94a738b7..93ac988cd074 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/client.py @@ -666,7 +666,7 @@ def __init__( Type[AclConfigServiceTransport], Callable[..., AclConfigServiceTransport], ] = ( - type(self).get_transport_class(transport) + AclConfigServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AclConfigServiceTransport], transport) ) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/chunk_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/chunk_service/async_client.py index 947f2b920617..8a0ff5883263 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/chunk_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/chunk_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -192,9 +191,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ChunkServiceClient).get_transport_class, type(ChunkServiceClient) - ) + get_transport_class = ChunkServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/chunk_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/chunk_service/client.py index 3d4431837805..cdc09b556394 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/chunk_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/chunk_service/client.py @@ -694,7 +694,7 @@ def __init__( transport_init: Union[ Type[ChunkServiceTransport], Callable[..., ChunkServiceTransport] ] = ( - type(self).get_transport_class(transport) + ChunkServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ChunkServiceTransport], transport) ) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/completion_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/completion_service/async_client.py index 9587344070c3..fd04b0a031e8 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/completion_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/completion_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -195,9 +194,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(CompletionServiceClient).get_transport_class, type(CompletionServiceClient) - ) + get_transport_class = CompletionServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/completion_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/completion_service/client.py index 3c919b767b53..e125449a512b 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/completion_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/completion_service/client.py @@ -673,7 +673,7 @@ def __init__( Type[CompletionServiceTransport], Callable[..., CompletionServiceTransport], ] = ( - type(self).get_transport_class(transport) + CompletionServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., CompletionServiceTransport], transport) ) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/control_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/control_service/async_client.py index 586ba1a9e930..b8c61d0c86be 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/control_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/control_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -200,9 +199,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ControlServiceClient).get_transport_class, type(ControlServiceClient) - ) + get_transport_class = ControlServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/control_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/control_service/client.py index fdb4aef797d1..934f622fcbe4 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/control_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/control_service/client.py @@ -695,7 +695,7 @@ def __init__( transport_init: Union[ Type[ControlServiceTransport], Callable[..., ControlServiceTransport] ] = ( - type(self).get_transport_class(transport) + ControlServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ControlServiceTransport], transport) ) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/conversational_search_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/conversational_search_service/async_client.py index e9af29902d63..a26f9afdd801 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/conversational_search_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/conversational_search_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -238,10 +237,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ConversationalSearchServiceClient).get_transport_class, - type(ConversationalSearchServiceClient), - ) + get_transport_class = ConversationalSearchServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/conversational_search_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/conversational_search_service/client.py index d94a662ebf4b..39d913de62e3 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/conversational_search_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/conversational_search_service/client.py @@ -840,7 +840,7 @@ def __init__( Type[ConversationalSearchServiceTransport], Callable[..., ConversationalSearchServiceTransport], ] = ( - type(self).get_transport_class(transport) + ConversationalSearchServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast( Callable[..., ConversationalSearchServiceTransport], transport diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/data_store_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/data_store_service/async_client.py index 4435ca4e57f7..38026fd02ae1 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/data_store_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/data_store_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -216,9 +215,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DataStoreServiceClient).get_transport_class, type(DataStoreServiceClient) - ) + get_transport_class = DataStoreServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/data_store_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/data_store_service/client.py index 38069719070b..c6e69e4645d9 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/data_store_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/data_store_service/client.py @@ -752,7 +752,7 @@ def __init__( Type[DataStoreServiceTransport], Callable[..., DataStoreServiceTransport], ] = ( - type(self).get_transport_class(transport) + DataStoreServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DataStoreServiceTransport], transport) ) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/async_client.py index 95106e56a333..ceaa82413af3 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -208,9 +207,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DocumentServiceClient).get_transport_class, type(DocumentServiceClient) - ) + get_transport_class = DocumentServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/client.py index c6d4403c7534..4713addb0922 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/client.py @@ -731,7 +731,7 @@ def __init__( transport_init: Union[ Type[DocumentServiceTransport], Callable[..., DocumentServiceTransport] ] = ( - type(self).get_transport_class(transport) + DocumentServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DocumentServiceTransport], transport) ) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/engine_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/engine_service/async_client.py index f5f341e6108c..c409095ee8cc 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/engine_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/engine_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -202,9 +201,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(EngineServiceClient).get_transport_class, type(EngineServiceClient) - ) + get_transport_class = EngineServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/engine_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/engine_service/client.py index 9e7b11072dcc..2193d4ccc3b7 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/engine_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/engine_service/client.py @@ -697,7 +697,7 @@ def __init__( transport_init: Union[ Type[EngineServiceTransport], Callable[..., EngineServiceTransport] ] = ( - type(self).get_transport_class(transport) + EngineServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., EngineServiceTransport], transport) ) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/estimate_billing_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/estimate_billing_service/async_client.py index 54ce32285cc1..0496cf7c7900 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/estimate_billing_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/estimate_billing_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -193,10 +192,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(EstimateBillingServiceClient).get_transport_class, - type(EstimateBillingServiceClient), - ) + get_transport_class = EstimateBillingServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/estimate_billing_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/estimate_billing_service/client.py index d29cf2c1c5ea..6d72dc3a6d57 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/estimate_billing_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/estimate_billing_service/client.py @@ -666,7 +666,7 @@ def __init__( Type[EstimateBillingServiceTransport], Callable[..., EstimateBillingServiceTransport], ] = ( - type(self).get_transport_class(transport) + EstimateBillingServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., EstimateBillingServiceTransport], transport) ) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/evaluation_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/evaluation_service/async_client.py index d8ec9fc9fdad..181f125afefa 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/evaluation_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/evaluation_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -218,9 +217,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(EvaluationServiceClient).get_transport_class, type(EvaluationServiceClient) - ) + get_transport_class = EvaluationServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/evaluation_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/evaluation_service/client.py index f5afb85dddbe..0c40cbc4a82a 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/evaluation_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/evaluation_service/client.py @@ -835,7 +835,7 @@ def __init__( Type[EvaluationServiceTransport], Callable[..., EvaluationServiceTransport], ] = ( - type(self).get_transport_class(transport) + EvaluationServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., EvaluationServiceTransport], transport) ) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/grounded_generation_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/grounded_generation_service/async_client.py index 018ca05b4080..39e0c0525363 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/grounded_generation_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/grounded_generation_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -204,10 +203,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(GroundedGenerationServiceClient).get_transport_class, - type(GroundedGenerationServiceClient), - ) + get_transport_class = GroundedGenerationServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/grounded_generation_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/grounded_generation_service/client.py index 8466abfe190e..8a9a3d749dfe 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/grounded_generation_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/grounded_generation_service/client.py @@ -674,7 +674,7 @@ def __init__( Type[GroundedGenerationServiceTransport], Callable[..., GroundedGenerationServiceTransport], ] = ( - type(self).get_transport_class(transport) + GroundedGenerationServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., GroundedGenerationServiceTransport], transport) ) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/project_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/project_service/async_client.py index 77f55f80a802..2d1b84c5a275 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/project_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/project_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -196,9 +195,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ProjectServiceClient).get_transport_class, type(ProjectServiceClient) - ) + get_transport_class = ProjectServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/project_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/project_service/client.py index d2f430fdf986..9e4e6e9d4468 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/project_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/project_service/client.py @@ -662,7 +662,7 @@ def __init__( transport_init: Union[ Type[ProjectServiceTransport], Callable[..., ProjectServiceTransport] ] = ( - type(self).get_transport_class(transport) + ProjectServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ProjectServiceTransport], transport) ) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/rank_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/rank_service/async_client.py index c097d15f7515..371c045ff394 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/rank_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/rank_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -187,9 +186,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(RankServiceClient).get_transport_class, type(RankServiceClient) - ) + get_transport_class = RankServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/rank_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/rank_service/client.py index 07a46c0e11e3..50a5a909fa07 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/rank_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/rank_service/client.py @@ -657,7 +657,7 @@ def __init__( transport_init: Union[ Type[RankServiceTransport], Callable[..., RankServiceTransport] ] = ( - type(self).get_transport_class(transport) + RankServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RankServiceTransport], transport) ) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/recommendation_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/recommendation_service/async_client.py index eff1a6b29402..a2f78ccf092c 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/recommendation_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/recommendation_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -201,10 +200,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(RecommendationServiceClient).get_transport_class, - type(RecommendationServiceClient), - ) + get_transport_class = RecommendationServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/recommendation_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/recommendation_service/client.py index b72e350eab5a..9067daf9e4b0 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/recommendation_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/recommendation_service/client.py @@ -743,7 +743,7 @@ def __init__( Type[RecommendationServiceTransport], Callable[..., RecommendationServiceTransport], ] = ( - type(self).get_transport_class(transport) + RecommendationServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RecommendationServiceTransport], transport) ) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/sample_query_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/sample_query_service/async_client.py index 0ed67de1cdf6..1d964a59fc10 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/sample_query_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/sample_query_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -205,10 +204,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(SampleQueryServiceClient).get_transport_class, - type(SampleQueryServiceClient), - ) + get_transport_class = SampleQueryServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/sample_query_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/sample_query_service/client.py index 0fa8f2d96dbb..9dc41583d739 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/sample_query_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/sample_query_service/client.py @@ -701,7 +701,7 @@ def __init__( Type[SampleQueryServiceTransport], Callable[..., SampleQueryServiceTransport], ] = ( - type(self).get_transport_class(transport) + SampleQueryServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SampleQueryServiceTransport], transport) ) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/sample_query_set_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/sample_query_set_service/async_client.py index 61bce7d2a3a1..52badd9cc388 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/sample_query_set_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/sample_query_set_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -208,10 +207,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(SampleQuerySetServiceClient).get_transport_class, - type(SampleQuerySetServiceClient), - ) + get_transport_class = SampleQuerySetServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/sample_query_set_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/sample_query_set_service/client.py index 0f8b18e6b359..8f2372b14e5c 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/sample_query_set_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/sample_query_set_service/client.py @@ -697,7 +697,7 @@ def __init__( Type[SampleQuerySetServiceTransport], Callable[..., SampleQuerySetServiceTransport], ] = ( - type(self).get_transport_class(transport) + SampleQuerySetServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SampleQuerySetServiceTransport], transport) ) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/schema_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/schema_service/async_client.py index 43f09259acf8..e729fb427a4f 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/schema_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/schema_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -200,9 +199,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(SchemaServiceClient).get_transport_class, type(SchemaServiceClient) - ) + get_transport_class = SchemaServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/schema_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/schema_service/client.py index e1432e17ff3e..d8da1772de5e 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/schema_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/schema_service/client.py @@ -693,7 +693,7 @@ def __init__( transport_init: Union[ Type[SchemaServiceTransport], Callable[..., SchemaServiceTransport] ] = ( - type(self).get_transport_class(transport) + SchemaServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SchemaServiceTransport], transport) ) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_service/async_client.py index df7822be74f9..2f628a35c3ba 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -202,9 +201,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(SearchServiceClient).get_transport_class, type(SearchServiceClient) - ) + get_transport_class = SearchServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_service/client.py index 7e589620b6a8..e974a741cc46 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_service/client.py @@ -787,7 +787,7 @@ def __init__( transport_init: Union[ Type[SearchServiceTransport], Callable[..., SearchServiceTransport] ] = ( - type(self).get_transport_class(transport) + SearchServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SearchServiceTransport], transport) ) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_tuning_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_tuning_service/async_client.py index be32f606d365..2f47b005ea07 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_tuning_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_tuning_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -202,10 +201,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(SearchTuningServiceClient).get_transport_class, - type(SearchTuningServiceClient), - ) + get_transport_class = SearchTuningServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_tuning_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_tuning_service/client.py index 091d0a1bf534..7be88c26e15d 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_tuning_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_tuning_service/client.py @@ -696,7 +696,7 @@ def __init__( Type[SearchTuningServiceTransport], Callable[..., SearchTuningServiceTransport], ] = ( - type(self).get_transport_class(transport) + SearchTuningServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SearchTuningServiceTransport], transport) ) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/serving_config_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/serving_config_service/async_client.py index 8a9eaa2c6319..2c307da4af1d 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/serving_config_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/serving_config_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -201,10 +200,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ServingConfigServiceClient).get_transport_class, - type(ServingConfigServiceClient), - ) + get_transport_class = ServingConfigServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/serving_config_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/serving_config_service/client.py index 7abce53628a3..8a0e89232254 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/serving_config_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/serving_config_service/client.py @@ -679,7 +679,7 @@ def __init__( Type[ServingConfigServiceTransport], Callable[..., ServingConfigServiceTransport], ] = ( - type(self).get_transport_class(transport) + ServingConfigServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ServingConfigServiceTransport], transport) ) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/site_search_engine_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/site_search_engine_service/async_client.py index 9e9a6a534e7f..324a762e9a2b 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/site_search_engine_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/site_search_engine_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -213,10 +212,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(SiteSearchEngineServiceClient).get_transport_class, - type(SiteSearchEngineServiceClient), - ) + get_transport_class = SiteSearchEngineServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/site_search_engine_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/site_search_engine_service/client.py index 8afe9a3720bf..5c8a5f85b002 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/site_search_engine_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/site_search_engine_service/client.py @@ -705,7 +705,7 @@ def __init__( Type[SiteSearchEngineServiceTransport], Callable[..., SiteSearchEngineServiceTransport], ] = ( - type(self).get_transport_class(transport) + SiteSearchEngineServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SiteSearchEngineServiceTransport], transport) ) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/user_event_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/user_event_service/async_client.py index 7caf68634967..da7b0dbb97dd 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/user_event_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/user_event_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -206,9 +205,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(UserEventServiceClient).get_transport_class, type(UserEventServiceClient) - ) + get_transport_class = UserEventServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/user_event_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/user_event_service/client.py index c996847e40b9..ab4c7e3054dd 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/user_event_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/user_event_service/client.py @@ -728,7 +728,7 @@ def __init__( Type[UserEventServiceTransport], Callable[..., UserEventServiceTransport], ] = ( - type(self).get_transport_class(transport) + UserEventServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., UserEventServiceTransport], transport) ) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/gapic_version.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/gapic_version.py index 3a3b09ebffa5..558c8aab67c5 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/gapic_version.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.12.1" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/async_client.py index ea3176fe1214..e036921d6b2f 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -195,9 +194,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(CompletionServiceClient).get_transport_class, type(CompletionServiceClient) - ) + get_transport_class = CompletionServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/client.py index bf2551c1e626..f028d887a4de 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/client.py @@ -673,7 +673,7 @@ def __init__( Type[CompletionServiceTransport], Callable[..., CompletionServiceTransport], ] = ( - type(self).get_transport_class(transport) + CompletionServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., CompletionServiceTransport], transport) ) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/control_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/control_service/async_client.py index 0c7979101292..4c75df9019e3 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/control_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/control_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -200,9 +199,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ControlServiceClient).get_transport_class, type(ControlServiceClient) - ) + get_transport_class = ControlServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/control_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/control_service/client.py index e921e6d61884..4638275c9f83 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/control_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/control_service/client.py @@ -695,7 +695,7 @@ def __init__( transport_init: Union[ Type[ControlServiceTransport], Callable[..., ControlServiceTransport] ] = ( - type(self).get_transport_class(transport) + ControlServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ControlServiceTransport], transport) ) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/conversational_search_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/conversational_search_service/async_client.py index 06bf97a32180..87d43622302f 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/conversational_search_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/conversational_search_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -238,10 +237,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ConversationalSearchServiceClient).get_transport_class, - type(ConversationalSearchServiceClient), - ) + get_transport_class = ConversationalSearchServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/conversational_search_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/conversational_search_service/client.py index 051fce55a7f1..038c9360b10e 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/conversational_search_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/conversational_search_service/client.py @@ -840,7 +840,7 @@ def __init__( Type[ConversationalSearchServiceTransport], Callable[..., ConversationalSearchServiceTransport], ] = ( - type(self).get_transport_class(transport) + ConversationalSearchServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast( Callable[..., ConversationalSearchServiceTransport], transport diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/data_store_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/data_store_service/async_client.py index e16aae173721..f18be4cc0e43 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/data_store_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/data_store_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -215,9 +214,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DataStoreServiceClient).get_transport_class, type(DataStoreServiceClient) - ) + get_transport_class = DataStoreServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/data_store_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/data_store_service/client.py index 26fe2ba7b9b0..5087e8698d48 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/data_store_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/data_store_service/client.py @@ -751,7 +751,7 @@ def __init__( Type[DataStoreServiceTransport], Callable[..., DataStoreServiceTransport], ] = ( - type(self).get_transport_class(transport) + DataStoreServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DataStoreServiceTransport], transport) ) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/async_client.py index 40dc494afcb1..3cd99963bdee 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -208,9 +207,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DocumentServiceClient).get_transport_class, type(DocumentServiceClient) - ) + get_transport_class = DocumentServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/client.py index 515dab24b3b5..4dbf8a833298 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/client.py @@ -731,7 +731,7 @@ def __init__( transport_init: Union[ Type[DocumentServiceTransport], Callable[..., DocumentServiceTransport] ] = ( - type(self).get_transport_class(transport) + DocumentServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DocumentServiceTransport], transport) ) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/engine_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/engine_service/async_client.py index 402b05514620..9d03debdae1e 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/engine_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/engine_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -202,9 +201,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(EngineServiceClient).get_transport_class, type(EngineServiceClient) - ) + get_transport_class = EngineServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/engine_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/engine_service/client.py index a094d0833d48..9715af963279 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/engine_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/engine_service/client.py @@ -697,7 +697,7 @@ def __init__( transport_init: Union[ Type[EngineServiceTransport], Callable[..., EngineServiceTransport] ] = ( - type(self).get_transport_class(transport) + EngineServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., EngineServiceTransport], transport) ) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/evaluation_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/evaluation_service/async_client.py index 9c852b36ad1e..d4c232d98f2d 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/evaluation_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/evaluation_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -218,9 +217,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(EvaluationServiceClient).get_transport_class, type(EvaluationServiceClient) - ) + get_transport_class = EvaluationServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/evaluation_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/evaluation_service/client.py index 5c777e35a9a2..7f1c8f57232b 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/evaluation_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/evaluation_service/client.py @@ -835,7 +835,7 @@ def __init__( Type[EvaluationServiceTransport], Callable[..., EvaluationServiceTransport], ] = ( - type(self).get_transport_class(transport) + EvaluationServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., EvaluationServiceTransport], transport) ) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/grounded_generation_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/grounded_generation_service/async_client.py index eae9a5cff1b9..8e091db814aa 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/grounded_generation_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/grounded_generation_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -204,10 +203,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(GroundedGenerationServiceClient).get_transport_class, - type(GroundedGenerationServiceClient), - ) + get_transport_class = GroundedGenerationServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/grounded_generation_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/grounded_generation_service/client.py index 9c449bd30209..938f8ec21b9c 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/grounded_generation_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/grounded_generation_service/client.py @@ -674,7 +674,7 @@ def __init__( Type[GroundedGenerationServiceTransport], Callable[..., GroundedGenerationServiceTransport], ] = ( - type(self).get_transport_class(transport) + GroundedGenerationServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., GroundedGenerationServiceTransport], transport) ) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/project_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/project_service/async_client.py index df503d3ae8ce..6734e3b5564c 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/project_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/project_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -193,9 +192,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ProjectServiceClient).get_transport_class, type(ProjectServiceClient) - ) + get_transport_class = ProjectServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/project_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/project_service/client.py index 79d0321f3010..7a7babb35445 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/project_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/project_service/client.py @@ -659,7 +659,7 @@ def __init__( transport_init: Union[ Type[ProjectServiceTransport], Callable[..., ProjectServiceTransport] ] = ( - type(self).get_transport_class(transport) + ProjectServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ProjectServiceTransport], transport) ) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/rank_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/rank_service/async_client.py index ffc1bc7f3e00..6cdd54547619 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/rank_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/rank_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -187,9 +186,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(RankServiceClient).get_transport_class, type(RankServiceClient) - ) + get_transport_class = RankServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/rank_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/rank_service/client.py index ac877d0c2281..f9650bd5b9ae 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/rank_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/rank_service/client.py @@ -657,7 +657,7 @@ def __init__( transport_init: Union[ Type[RankServiceTransport], Callable[..., RankServiceTransport] ] = ( - type(self).get_transport_class(transport) + RankServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RankServiceTransport], transport) ) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/recommendation_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/recommendation_service/async_client.py index 387914f6741e..4e31f955cbe9 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/recommendation_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/recommendation_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -201,10 +200,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(RecommendationServiceClient).get_transport_class, - type(RecommendationServiceClient), - ) + get_transport_class = RecommendationServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/recommendation_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/recommendation_service/client.py index b7fe7b254248..ddfb21bcfbfb 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/recommendation_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/recommendation_service/client.py @@ -743,7 +743,7 @@ def __init__( Type[RecommendationServiceTransport], Callable[..., RecommendationServiceTransport], ] = ( - type(self).get_transport_class(transport) + RecommendationServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RecommendationServiceTransport], transport) ) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/sample_query_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/sample_query_service/async_client.py index 7abc4c66e5a8..c65aa3707232 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/sample_query_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/sample_query_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -205,10 +204,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(SampleQueryServiceClient).get_transport_class, - type(SampleQueryServiceClient), - ) + get_transport_class = SampleQueryServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/sample_query_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/sample_query_service/client.py index 648c67e1036a..3831e84d1038 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/sample_query_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/sample_query_service/client.py @@ -701,7 +701,7 @@ def __init__( Type[SampleQueryServiceTransport], Callable[..., SampleQueryServiceTransport], ] = ( - type(self).get_transport_class(transport) + SampleQueryServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SampleQueryServiceTransport], transport) ) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/sample_query_set_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/sample_query_set_service/async_client.py index 1abab1e8704d..7f6b24afa3d8 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/sample_query_set_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/sample_query_set_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -206,10 +205,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(SampleQuerySetServiceClient).get_transport_class, - type(SampleQuerySetServiceClient), - ) + get_transport_class = SampleQuerySetServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/sample_query_set_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/sample_query_set_service/client.py index b2859df45117..266cbd35a543 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/sample_query_set_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/sample_query_set_service/client.py @@ -695,7 +695,7 @@ def __init__( Type[SampleQuerySetServiceTransport], Callable[..., SampleQuerySetServiceTransport], ] = ( - type(self).get_transport_class(transport) + SampleQuerySetServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SampleQuerySetServiceTransport], transport) ) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/schema_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/schema_service/async_client.py index 97a82e37131c..561f2aaa23ac 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/schema_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/schema_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -200,9 +199,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(SchemaServiceClient).get_transport_class, type(SchemaServiceClient) - ) + get_transport_class = SchemaServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/schema_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/schema_service/client.py index 079b5aa47d7e..456eb81a6b1d 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/schema_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/schema_service/client.py @@ -693,7 +693,7 @@ def __init__( transport_init: Union[ Type[SchemaServiceTransport], Callable[..., SchemaServiceTransport] ] = ( - type(self).get_transport_class(transport) + SchemaServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SchemaServiceTransport], transport) ) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_service/async_client.py index 4f93965ec6aa..ff1fb19fe028 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -202,9 +201,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(SearchServiceClient).get_transport_class, type(SearchServiceClient) - ) + get_transport_class = SearchServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_service/client.py index 3ce6a89af3b4..7397c054afc5 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_service/client.py @@ -787,7 +787,7 @@ def __init__( transport_init: Union[ Type[SearchServiceTransport], Callable[..., SearchServiceTransport] ] = ( - type(self).get_transport_class(transport) + SearchServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SearchServiceTransport], transport) ) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_tuning_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_tuning_service/async_client.py index ea27f1e24c7c..ee572e4b4ba1 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_tuning_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_tuning_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -202,10 +201,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(SearchTuningServiceClient).get_transport_class, - type(SearchTuningServiceClient), - ) + get_transport_class = SearchTuningServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_tuning_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_tuning_service/client.py index 116e20bf5440..9b71699c8044 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_tuning_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_tuning_service/client.py @@ -696,7 +696,7 @@ def __init__( Type[SearchTuningServiceTransport], Callable[..., SearchTuningServiceTransport], ] = ( - type(self).get_transport_class(transport) + SearchTuningServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SearchTuningServiceTransport], transport) ) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/serving_config_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/serving_config_service/async_client.py index fbb675a7c116..3ee6dd0f4665 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/serving_config_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/serving_config_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -201,10 +200,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ServingConfigServiceClient).get_transport_class, - type(ServingConfigServiceClient), - ) + get_transport_class = ServingConfigServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/serving_config_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/serving_config_service/client.py index 98ae622d1ff2..d5b3af027154 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/serving_config_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/serving_config_service/client.py @@ -679,7 +679,7 @@ def __init__( Type[ServingConfigServiceTransport], Callable[..., ServingConfigServiceTransport], ] = ( - type(self).get_transport_class(transport) + ServingConfigServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ServingConfigServiceTransport], transport) ) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/site_search_engine_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/site_search_engine_service/async_client.py index 3b070faa46c6..6a747b266d46 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/site_search_engine_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/site_search_engine_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -213,10 +212,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(SiteSearchEngineServiceClient).get_transport_class, - type(SiteSearchEngineServiceClient), - ) + get_transport_class = SiteSearchEngineServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/site_search_engine_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/site_search_engine_service/client.py index 43315168963b..4e6afa8960ad 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/site_search_engine_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/site_search_engine_service/client.py @@ -705,7 +705,7 @@ def __init__( Type[SiteSearchEngineServiceTransport], Callable[..., SiteSearchEngineServiceTransport], ] = ( - type(self).get_transport_class(transport) + SiteSearchEngineServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SiteSearchEngineServiceTransport], transport) ) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/user_event_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/user_event_service/async_client.py index 5fc7db3653cd..67b4d1b70f7d 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/user_event_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/user_event_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -206,9 +205,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(UserEventServiceClient).get_transport_class, type(UserEventServiceClient) - ) + get_transport_class = UserEventServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/user_event_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/user_event_service/client.py index 5b309117da80..01ff702880c7 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/user_event_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/user_event_service/client.py @@ -728,7 +728,7 @@ def __init__( Type[UserEventServiceTransport], Callable[..., UserEventServiceTransport], ] = ( - type(self).get_transport_class(transport) + UserEventServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., UserEventServiceTransport], transport) ) diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1.json b/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1.json index cfa0d06bdd16..69944fc7ce8a 100644 --- a/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1.json +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-discoveryengine", - "version": "0.12.1" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1alpha.json b/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1alpha.json index 78b990672b87..2884e903e0cf 100644 --- a/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1alpha.json +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1alpha.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-discoveryengine", - "version": "0.12.1" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1beta.json b/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1beta.json index 91cdb4346919..cff39c47dc30 100644 --- a/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1beta.json +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-discoveryengine", - "version": "0.12.1" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_completion_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_completion_service.py index 31e2ed1f675c..ceff14a5ce65 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_completion_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_completion_service.py @@ -1341,22 +1341,23 @@ async def test_complete_query_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.complete_query - ] = mock_object + ] = mock_rpc request = {} await client.complete_query(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.complete_query(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1579,8 +1580,9 @@ def test_import_suggestion_deny_list_entries_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.import_suggestion_deny_list_entries(request) @@ -1636,26 +1638,28 @@ async def test_import_suggestion_deny_list_entries_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_suggestion_deny_list_entries - ] = mock_object + ] = mock_rpc request = {} await client.import_suggestion_deny_list_entries(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.import_suggestion_deny_list_entries(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1881,8 +1885,9 @@ def test_purge_suggestion_deny_list_entries_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.purge_suggestion_deny_list_entries(request) @@ -1938,26 +1943,28 @@ async def test_purge_suggestion_deny_list_entries_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.purge_suggestion_deny_list_entries - ] = mock_object + ] = mock_rpc request = {} await client.purge_suggestion_deny_list_entries(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.purge_suggestion_deny_list_entries(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2183,8 +2190,9 @@ def test_import_completion_suggestions_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.import_completion_suggestions(request) @@ -2240,26 +2248,28 @@ async def test_import_completion_suggestions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_completion_suggestions - ] = mock_object + ] = mock_rpc request = {} await client.import_completion_suggestions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.import_completion_suggestions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2485,8 +2495,9 @@ def test_purge_completion_suggestions_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.purge_completion_suggestions(request) @@ -2542,26 +2553,28 @@ async def test_purge_completion_suggestions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.purge_completion_suggestions - ] = mock_object + ] = mock_rpc request = {} await client.purge_completion_suggestions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.purge_completion_suggestions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_control_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_control_service.py index 66ef0848b4ad..3ec7d6007c23 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_control_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_control_service.py @@ -1304,22 +1304,23 @@ async def test_create_control_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_control - ] = mock_object + ] = mock_rpc request = {} await client.create_control(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_control(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1699,22 +1700,23 @@ async def test_delete_control_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_control - ] = mock_object + ] = mock_rpc request = {} await client.delete_control(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_control(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2064,22 +2066,23 @@ async def test_update_control_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_control - ] = mock_object + ] = mock_rpc request = {} await client.update_control(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_control(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2470,22 +2473,23 @@ async def test_get_control_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_control - ] = mock_object + ] = mock_rpc request = {} await client.get_control(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_control(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2844,22 +2848,23 @@ async def test_list_controls_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_controls - ] = mock_object + ] = mock_rpc request = {} await client.list_controls(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_controls(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_conversational_search_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_conversational_search_service.py index b14a16ccbfc2..67056fdabf2f 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_conversational_search_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_conversational_search_service.py @@ -1395,22 +1395,23 @@ async def test_converse_conversation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.converse_conversation - ] = mock_object + ] = mock_rpc request = {} await client.converse_conversation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.converse_conversation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1797,22 +1798,23 @@ async def test_create_conversation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_conversation - ] = mock_object + ] = mock_rpc request = {} await client.create_conversation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_conversation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2191,22 +2193,23 @@ async def test_delete_conversation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_conversation - ] = mock_object + ] = mock_rpc request = {} await client.delete_conversation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_conversation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2571,22 +2574,23 @@ async def test_update_conversation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_conversation - ] = mock_object + ] = mock_rpc request = {} await client.update_conversation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_conversation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2968,22 +2972,23 @@ async def test_get_conversation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_conversation - ] = mock_object + ] = mock_rpc request = {} await client.get_conversation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_conversation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3355,22 +3360,23 @@ async def test_list_conversations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_conversations - ] = mock_object + ] = mock_rpc request = {} await client.list_conversations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_conversations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3936,22 +3942,23 @@ async def test_answer_query_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.answer_query - ] = mock_object + ] = mock_rpc request = {} await client.answer_query(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.answer_query(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4235,22 +4242,23 @@ async def test_get_answer_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_answer - ] = mock_object + ] = mock_rpc request = {} await client.get_answer(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_answer(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4614,22 +4622,23 @@ async def test_create_session_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_session - ] = mock_object + ] = mock_rpc request = {} await client.create_session(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_session(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4982,22 +4991,23 @@ async def test_delete_session_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_session - ] = mock_object + ] = mock_rpc request = {} await client.delete_session(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_session(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5340,22 +5350,23 @@ async def test_update_session_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_session - ] = mock_object + ] = mock_rpc request = {} await client.update_session(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_session(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5721,22 +5732,23 @@ async def test_get_session_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_session - ] = mock_object + ] = mock_rpc request = {} await client.get_session(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_session(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6092,22 +6104,23 @@ async def test_list_sessions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_sessions - ] = mock_object + ] = mock_rpc request = {} await client.list_sessions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_sessions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_data_store_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_data_store_service.py index df1d9db22607..9459a96eceda 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_data_store_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_data_store_service.py @@ -1287,8 +1287,9 @@ def test_create_data_store_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_data_store(request) @@ -1344,26 +1345,28 @@ async def test_create_data_store_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_data_store - ] = mock_object + ] = mock_rpc request = {} await client.create_data_store(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_data_store(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1755,22 +1758,23 @@ async def test_get_data_store_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_data_store - ] = mock_object + ] = mock_rpc request = {} await client.get_data_store(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_data_store(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2135,22 +2139,23 @@ async def test_list_data_stores_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_data_stores - ] = mock_object + ] = mock_rpc request = {} await client.list_data_stores(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_data_stores(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2646,8 +2651,9 @@ def test_delete_data_store_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_data_store(request) @@ -2703,26 +2709,28 @@ async def test_delete_data_store_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_data_store - ] = mock_object + ] = mock_rpc request = {} await client.delete_data_store(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_data_store(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3100,22 +3108,23 @@ async def test_update_data_store_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_data_store - ] = mock_object + ] = mock_rpc request = {} await client.update_data_store(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_data_store(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_document_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_document_service.py index 5485a80a6ad6..207850afb022 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_document_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_document_service.py @@ -1336,22 +1336,23 @@ async def test_get_document_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_document - ] = mock_object + ] = mock_rpc request = {} await client.get_document(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1704,22 +1705,23 @@ async def test_list_documents_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_documents - ] = mock_object + ] = mock_rpc request = {} await client.list_documents(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_documents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2274,22 +2276,23 @@ async def test_create_document_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_document - ] = mock_object + ] = mock_rpc request = {} await client.create_document(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2718,22 +2721,23 @@ async def test_update_document_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_document - ] = mock_object + ] = mock_rpc request = {} await client.update_document(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3139,22 +3143,23 @@ async def test_delete_document_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_document - ] = mock_object + ] = mock_rpc request = {} await client.delete_document(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3442,8 +3447,9 @@ def test_import_documents_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.import_documents(request) @@ -3497,26 +3503,28 @@ async def test_import_documents_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_documents - ] = mock_object + ] = mock_rpc request = {} await client.import_documents(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.import_documents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3726,8 +3734,9 @@ def test_purge_documents_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.purge_documents(request) @@ -3781,26 +3790,28 @@ async def test_purge_documents_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.purge_documents - ] = mock_object + ] = mock_rpc request = {} await client.purge_documents(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.purge_documents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_engine_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_engine_service.py index a0eef09e9187..e6dd2a1b09c0 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_engine_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_engine_service.py @@ -1240,8 +1240,9 @@ def test_create_engine_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_engine(request) @@ -1295,26 +1296,28 @@ async def test_create_engine_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_engine - ] = mock_object + ] = mock_rpc request = {} await client.create_engine(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_engine(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1660,8 +1663,9 @@ def test_delete_engine_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_engine(request) @@ -1715,26 +1719,28 @@ async def test_delete_engine_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_engine - ] = mock_object + ] = mock_rpc request = {} await client.delete_engine(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_engine(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2088,22 +2094,23 @@ async def test_update_engine_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_engine - ] = mock_object + ] = mock_rpc request = {} await client.update_engine(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_engine(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2512,22 +2519,23 @@ async def test_get_engine_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_engine - ] = mock_object + ] = mock_rpc request = {} await client.get_engine(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_engine(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2884,22 +2892,23 @@ async def test_list_engines_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_engines - ] = mock_object + ] = mock_rpc request = {} await client.list_engines(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_engines(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_grounded_generation_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_grounded_generation_service.py index 0263067129c3..0f60e25611ae 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_grounded_generation_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_grounded_generation_service.py @@ -1368,22 +1368,23 @@ async def test_check_grounding_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.check_grounding - ] = mock_object + ] = mock_rpc request = {} await client.check_grounding(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.check_grounding(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_project_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_project_service.py index c722c847391e..2daef237a45c 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_project_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_project_service.py @@ -1245,8 +1245,9 @@ def test_provision_project_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.provision_project(request) @@ -1302,26 +1303,28 @@ async def test_provision_project_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.provision_project - ] = mock_object + ] = mock_rpc request = {} await client.provision_project(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.provision_project(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_rank_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_rank_service.py index bce3536fac33..7df37b54070e 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_rank_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_rank_service.py @@ -1241,22 +1241,23 @@ async def test_rank_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio" ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.rank - ] = mock_object + ] = mock_rpc request = {} await client.rank(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.rank(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_recommendation_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_recommendation_service.py index bde584d1ec2a..d84c20ba77fd 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_recommendation_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_recommendation_service.py @@ -1365,22 +1365,23 @@ async def test_recommend_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.recommend - ] = mock_object + ] = mock_rpc request = {} await client.recommend(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.recommend(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_schema_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_schema_service.py index cc5ed89245b7..a6b459547db9 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_schema_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_schema_service.py @@ -1291,22 +1291,23 @@ async def test_get_schema_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_schema - ] = mock_object + ] = mock_rpc request = {} await client.get_schema(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_schema(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1653,22 +1654,23 @@ async def test_list_schemas_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_schemas - ] = mock_object + ] = mock_rpc request = {} await client.list_schemas(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_schemas(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2157,8 +2159,9 @@ def test_create_schema_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_schema(request) @@ -2212,26 +2215,28 @@ async def test_create_schema_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_schema - ] = mock_object + ] = mock_rpc request = {} await client.create_schema(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_schema(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2585,8 +2590,9 @@ def test_update_schema_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_schema(request) @@ -2640,26 +2646,28 @@ async def test_update_schema_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_schema - ] = mock_object + ] = mock_rpc request = {} await client.update_schema(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_schema(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2867,8 +2875,9 @@ def test_delete_schema_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_schema(request) @@ -2922,26 +2931,28 @@ async def test_delete_schema_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_schema - ] = mock_object + ] = mock_rpc request = {} await client.delete_schema(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_schema(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_search_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_search_service.py index 672a06ca27c1..827427bbf4d3 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_search_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_search_service.py @@ -1308,22 +1308,23 @@ async def test_search_async_use_cached_wrapped_rpc(transport: str = "grpc_asynci ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search - ] = mock_object + ] = mock_rpc request = {} await client.search(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_site_search_engine_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_site_search_engine_service.py index 642b37a3bd03..244b30d00ba3 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_site_search_engine_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_site_search_engine_service.py @@ -1382,22 +1382,23 @@ async def test_get_site_search_engine_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_site_search_engine - ] = mock_object + ] = mock_rpc request = {} await client.get_site_search_engine(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_site_search_engine(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1711,8 +1712,9 @@ def test_create_target_site_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_target_site(request) @@ -1768,26 +1770,28 @@ async def test_create_target_site_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_target_site - ] = mock_object + ] = mock_rpc request = {} await client.create_target_site(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_target_site(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2109,8 +2113,9 @@ def test_batch_create_target_sites_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.batch_create_target_sites(request) @@ -2166,26 +2171,28 @@ async def test_batch_create_target_sites_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_create_target_sites - ] = mock_object + ] = mock_rpc request = {} await client.batch_create_target_sites(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.batch_create_target_sites(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2476,22 +2483,23 @@ async def test_get_target_site_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_target_site - ] = mock_object + ] = mock_rpc request = {} await client.get_target_site(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_target_site(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2805,8 +2813,9 @@ def test_update_target_site_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_target_site(request) @@ -2862,26 +2871,28 @@ async def test_update_target_site_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_target_site - ] = mock_object + ] = mock_rpc request = {} await client.update_target_site(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_target_site(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3192,8 +3203,9 @@ def test_delete_target_site_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_target_site(request) @@ -3249,26 +3261,28 @@ async def test_delete_target_site_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_target_site - ] = mock_object + ] = mock_rpc request = {} await client.delete_target_site(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_target_site(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3640,22 +3654,23 @@ async def test_list_target_sites_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_target_sites - ] = mock_object + ] = mock_rpc request = {} await client.list_target_sites(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_target_sites(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4174,8 +4189,9 @@ def test_enable_advanced_site_search_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.enable_advanced_site_search(request) @@ -4231,26 +4247,28 @@ async def test_enable_advanced_site_search_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.enable_advanced_site_search - ] = mock_object + ] = mock_rpc request = {} await client.enable_advanced_site_search(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.enable_advanced_site_search(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4476,8 +4494,9 @@ def test_disable_advanced_site_search_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.disable_advanced_site_search(request) @@ -4533,26 +4552,28 @@ async def test_disable_advanced_site_search_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.disable_advanced_site_search - ] = mock_object + ] = mock_rpc request = {} await client.disable_advanced_site_search(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.disable_advanced_site_search(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4767,8 +4788,9 @@ def test_recrawl_uris_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.recrawl_uris(request) @@ -4822,26 +4844,28 @@ async def test_recrawl_uris_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.recrawl_uris - ] = mock_object + ] = mock_rpc request = {} await client.recrawl_uris(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.recrawl_uris(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5061,8 +5085,9 @@ def test_batch_verify_target_sites_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.batch_verify_target_sites(request) @@ -5118,26 +5143,28 @@ async def test_batch_verify_target_sites_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_verify_target_sites - ] = mock_object + ] = mock_rpc request = {} await client.batch_verify_target_sites(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.batch_verify_target_sites(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5434,22 +5461,23 @@ async def test_fetch_domain_verification_status_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.fetch_domain_verification_status - ] = mock_object + ] = mock_rpc request = {} await client.fetch_domain_verification_status(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.fetch_domain_verification_status(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_user_event_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_user_event_service.py index 2653666166f5..08b5d51e76e2 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_user_event_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_user_event_service.py @@ -1360,22 +1360,23 @@ async def test_write_user_event_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.write_user_event - ] = mock_object + ] = mock_rpc request = {} await client.write_user_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.write_user_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1680,22 +1681,23 @@ async def test_collect_user_event_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.collect_user_event - ] = mock_object + ] = mock_rpc request = {} await client.collect_user_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.collect_user_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1925,8 +1927,9 @@ def test_import_user_events_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.import_user_events(request) @@ -1982,26 +1985,28 @@ async def test_import_user_events_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_user_events - ] = mock_object + ] = mock_rpc request = {} await client.import_user_events(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.import_user_events(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_acl_config_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_acl_config_service.py index 99af7c342918..d9ae170f9f90 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_acl_config_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_acl_config_service.py @@ -1322,22 +1322,23 @@ async def test_update_acl_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_acl_config - ] = mock_object + ] = mock_rpc request = {} await client.update_acl_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_acl_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1611,22 +1612,23 @@ async def test_get_acl_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_acl_config - ] = mock_object + ] = mock_rpc request = {} await client.get_acl_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_acl_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_chunk_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_chunk_service.py index a292164adda9..2bbe861f3a03 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_chunk_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_chunk_service.py @@ -1268,22 +1268,23 @@ async def test_get_chunk_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_chunk - ] = mock_object + ] = mock_rpc request = {} await client.get_chunk(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_chunk(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1636,22 +1637,23 @@ async def test_list_chunks_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_chunks - ] = mock_object + ] = mock_rpc request = {} await client.list_chunks(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_chunks(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_completion_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_completion_service.py index 9834caeffcaa..b444d0eafee2 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_completion_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_completion_service.py @@ -1341,22 +1341,23 @@ async def test_complete_query_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.complete_query - ] = mock_object + ] = mock_rpc request = {} await client.complete_query(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.complete_query(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1579,8 +1580,9 @@ def test_import_suggestion_deny_list_entries_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.import_suggestion_deny_list_entries(request) @@ -1636,26 +1638,28 @@ async def test_import_suggestion_deny_list_entries_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_suggestion_deny_list_entries - ] = mock_object + ] = mock_rpc request = {} await client.import_suggestion_deny_list_entries(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.import_suggestion_deny_list_entries(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1881,8 +1885,9 @@ def test_purge_suggestion_deny_list_entries_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.purge_suggestion_deny_list_entries(request) @@ -1938,26 +1943,28 @@ async def test_purge_suggestion_deny_list_entries_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.purge_suggestion_deny_list_entries - ] = mock_object + ] = mock_rpc request = {} await client.purge_suggestion_deny_list_entries(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.purge_suggestion_deny_list_entries(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2183,8 +2190,9 @@ def test_import_completion_suggestions_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.import_completion_suggestions(request) @@ -2240,26 +2248,28 @@ async def test_import_completion_suggestions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_completion_suggestions - ] = mock_object + ] = mock_rpc request = {} await client.import_completion_suggestions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.import_completion_suggestions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2485,8 +2495,9 @@ def test_purge_completion_suggestions_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.purge_completion_suggestions(request) @@ -2542,26 +2553,28 @@ async def test_purge_completion_suggestions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.purge_completion_suggestions - ] = mock_object + ] = mock_rpc request = {} await client.purge_completion_suggestions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.purge_completion_suggestions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_control_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_control_service.py index 1e81121478e6..bf1c6e867a2a 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_control_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_control_service.py @@ -1304,22 +1304,23 @@ async def test_create_control_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_control - ] = mock_object + ] = mock_rpc request = {} await client.create_control(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_control(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1699,22 +1700,23 @@ async def test_delete_control_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_control - ] = mock_object + ] = mock_rpc request = {} await client.delete_control(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_control(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2064,22 +2066,23 @@ async def test_update_control_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_control - ] = mock_object + ] = mock_rpc request = {} await client.update_control(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_control(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2470,22 +2473,23 @@ async def test_get_control_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_control - ] = mock_object + ] = mock_rpc request = {} await client.get_control(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_control(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2844,22 +2848,23 @@ async def test_list_controls_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_controls - ] = mock_object + ] = mock_rpc request = {} await client.list_controls(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_controls(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_conversational_search_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_conversational_search_service.py index be4c07583cab..393aef4d19c8 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_conversational_search_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_conversational_search_service.py @@ -1400,22 +1400,23 @@ async def test_converse_conversation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.converse_conversation - ] = mock_object + ] = mock_rpc request = {} await client.converse_conversation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.converse_conversation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1805,22 +1806,23 @@ async def test_create_conversation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_conversation - ] = mock_object + ] = mock_rpc request = {} await client.create_conversation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_conversation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2199,22 +2201,23 @@ async def test_delete_conversation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_conversation - ] = mock_object + ] = mock_rpc request = {} await client.delete_conversation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_conversation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2579,22 +2582,23 @@ async def test_update_conversation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_conversation - ] = mock_object + ] = mock_rpc request = {} await client.update_conversation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_conversation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2976,22 +2980,23 @@ async def test_get_conversation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_conversation - ] = mock_object + ] = mock_rpc request = {} await client.get_conversation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_conversation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3363,22 +3368,23 @@ async def test_list_conversations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_conversations - ] = mock_object + ] = mock_rpc request = {} await client.list_conversations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_conversations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3944,22 +3950,23 @@ async def test_answer_query_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.answer_query - ] = mock_object + ] = mock_rpc request = {} await client.answer_query(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.answer_query(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4243,22 +4250,23 @@ async def test_get_answer_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_answer - ] = mock_object + ] = mock_rpc request = {} await client.get_answer(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_answer(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4622,22 +4630,23 @@ async def test_create_session_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_session - ] = mock_object + ] = mock_rpc request = {} await client.create_session(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_session(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4990,22 +4999,23 @@ async def test_delete_session_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_session - ] = mock_object + ] = mock_rpc request = {} await client.delete_session(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_session(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5348,22 +5358,23 @@ async def test_update_session_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_session - ] = mock_object + ] = mock_rpc request = {} await client.update_session(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_session(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5729,22 +5740,23 @@ async def test_get_session_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_session - ] = mock_object + ] = mock_rpc request = {} await client.get_session(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_session(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6100,22 +6112,23 @@ async def test_list_sessions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_sessions - ] = mock_object + ] = mock_rpc request = {} await client.list_sessions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_sessions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_data_store_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_data_store_service.py index 98578bb499d0..961c537481ba 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_data_store_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_data_store_service.py @@ -1288,8 +1288,9 @@ def test_create_data_store_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_data_store(request) @@ -1345,26 +1346,28 @@ async def test_create_data_store_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_data_store - ] = mock_object + ] = mock_rpc request = {} await client.create_data_store(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_data_store(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1759,22 +1762,23 @@ async def test_get_data_store_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_data_store - ] = mock_object + ] = mock_rpc request = {} await client.get_data_store(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_data_store(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2141,22 +2145,23 @@ async def test_list_data_stores_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_data_stores - ] = mock_object + ] = mock_rpc request = {} await client.list_data_stores(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_data_stores(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2652,8 +2657,9 @@ def test_delete_data_store_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_data_store(request) @@ -2709,26 +2715,28 @@ async def test_delete_data_store_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_data_store - ] = mock_object + ] = mock_rpc request = {} await client.delete_data_store(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_data_store(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3109,22 +3117,23 @@ async def test_update_data_store_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_data_store - ] = mock_object + ] = mock_rpc request = {} await client.update_data_store(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_data_store(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3519,22 +3528,23 @@ async def test_get_document_processing_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_document_processing_config - ] = mock_object + ] = mock_rpc request = {} await client.get_document_processing_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_document_processing_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3903,22 +3913,23 @@ async def test_update_document_processing_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_document_processing_config - ] = mock_object + ] = mock_rpc request = {} await client.update_document_processing_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_document_processing_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_document_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_document_service.py index 4e2a81932b43..8adf368a1a3b 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_document_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_document_service.py @@ -1337,22 +1337,23 @@ async def test_get_document_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_document - ] = mock_object + ] = mock_rpc request = {} await client.get_document(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1705,22 +1706,23 @@ async def test_list_documents_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_documents - ] = mock_object + ] = mock_rpc request = {} await client.list_documents(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_documents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2275,22 +2277,23 @@ async def test_create_document_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_document - ] = mock_object + ] = mock_rpc request = {} await client.create_document(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2719,22 +2722,23 @@ async def test_update_document_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_document - ] = mock_object + ] = mock_rpc request = {} await client.update_document(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3140,22 +3144,23 @@ async def test_delete_document_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_document - ] = mock_object + ] = mock_rpc request = {} await client.delete_document(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3443,8 +3448,9 @@ def test_import_documents_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.import_documents(request) @@ -3498,26 +3504,28 @@ async def test_import_documents_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_documents - ] = mock_object + ] = mock_rpc request = {} await client.import_documents(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.import_documents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3727,8 +3735,9 @@ def test_purge_documents_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.purge_documents(request) @@ -3782,26 +3791,28 @@ async def test_purge_documents_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.purge_documents - ] = mock_object + ] = mock_rpc request = {} await client.purge_documents(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.purge_documents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4079,22 +4090,23 @@ async def test_get_processed_document_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_processed_document - ] = mock_object + ] = mock_rpc request = {} await client.get_processed_document(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_processed_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_engine_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_engine_service.py index 7849d17b7123..48d2762d7976 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_engine_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_engine_service.py @@ -1240,8 +1240,9 @@ def test_create_engine_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_engine(request) @@ -1295,26 +1296,28 @@ async def test_create_engine_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_engine - ] = mock_object + ] = mock_rpc request = {} await client.create_engine(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_engine(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1624,8 +1627,9 @@ def test_delete_engine_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_engine(request) @@ -1679,26 +1683,28 @@ async def test_delete_engine_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_engine - ] = mock_object + ] = mock_rpc request = {} await client.delete_engine(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_engine(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2052,22 +2058,23 @@ async def test_update_engine_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_engine - ] = mock_object + ] = mock_rpc request = {} await client.update_engine(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_engine(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2440,22 +2447,23 @@ async def test_get_engine_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_engine - ] = mock_object + ] = mock_rpc request = {} await client.get_engine(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_engine(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2812,22 +2820,23 @@ async def test_list_engines_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_engines - ] = mock_object + ] = mock_rpc request = {} await client.list_engines(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_engines(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3382,22 +3391,23 @@ async def test_pause_engine_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.pause_engine - ] = mock_object + ] = mock_rpc request = {} await client.pause_engine(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.pause_engine(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3762,22 +3772,23 @@ async def test_resume_engine_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.resume_engine - ] = mock_object + ] = mock_rpc request = {} await client.resume_engine(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.resume_engine(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4074,8 +4085,9 @@ def test_tune_engine_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.tune_engine(request) @@ -4129,26 +4141,28 @@ async def test_tune_engine_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.tune_engine - ] = mock_object + ] = mock_rpc request = {} await client.tune_engine(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.tune_engine(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_estimate_billing_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_estimate_billing_service.py index 9d819432015e..d50d1e19c39b 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_estimate_billing_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_estimate_billing_service.py @@ -1321,8 +1321,9 @@ def test_estimate_data_size_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.estimate_data_size(request) @@ -1378,26 +1379,28 @@ async def test_estimate_data_size_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.estimate_data_size - ] = mock_object + ] = mock_rpc request = {} await client.estimate_data_size(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.estimate_data_size(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_evaluation_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_evaluation_service.py index d05b60250570..c34ef74bcbfc 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_evaluation_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_evaluation_service.py @@ -1343,22 +1343,23 @@ async def test_get_evaluation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_evaluation - ] = mock_object + ] = mock_rpc request = {} await client.get_evaluation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_evaluation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1714,22 +1715,23 @@ async def test_list_evaluations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_evaluations - ] = mock_object + ] = mock_rpc request = {} await client.list_evaluations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_evaluations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2225,8 +2227,9 @@ def test_create_evaluation_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_evaluation(request) @@ -2282,26 +2285,28 @@ async def test_create_evaluation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_evaluation - ] = mock_object + ] = mock_rpc request = {} await client.create_evaluation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_evaluation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2683,22 +2688,23 @@ async def test_list_evaluation_results_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_evaluation_results - ] = mock_object + ] = mock_rpc request = {} await client.list_evaluation_results(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_evaluation_results(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_grounded_generation_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_grounded_generation_service.py index d34a1b8fb07a..9d48b6f4a81e 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_grounded_generation_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_grounded_generation_service.py @@ -1371,22 +1371,23 @@ async def test_check_grounding_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.check_grounding - ] = mock_object + ] = mock_rpc request = {} await client.check_grounding(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.check_grounding(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_project_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_project_service.py index 07485195738d..e5d70a45d6b3 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_project_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_project_service.py @@ -1294,22 +1294,23 @@ async def test_get_project_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_project - ] = mock_object + ] = mock_rpc request = {} await client.get_project(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_project(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1608,8 +1609,9 @@ def test_provision_project_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.provision_project(request) @@ -1665,26 +1667,28 @@ async def test_provision_project_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.provision_project - ] = mock_object + ] = mock_rpc request = {} await client.provision_project(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.provision_project(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2058,22 +2062,23 @@ async def test_report_consent_change_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.report_consent_change - ] = mock_object + ] = mock_rpc request = {} await client.report_consent_change(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.report_consent_change(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_rank_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_rank_service.py index 3a232982062a..7ddff64d1544 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_rank_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_rank_service.py @@ -1241,22 +1241,23 @@ async def test_rank_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio" ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.rank - ] = mock_object + ] = mock_rpc request = {} await client.rank(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.rank(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_recommendation_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_recommendation_service.py index dd441fe11082..b664e5ee8a01 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_recommendation_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_recommendation_service.py @@ -1365,22 +1365,23 @@ async def test_recommend_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.recommend - ] = mock_object + ] = mock_rpc request = {} await client.recommend(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.recommend(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_sample_query_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_sample_query_service.py index 1b043d898983..b96ba429fe28 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_sample_query_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_sample_query_service.py @@ -1342,22 +1342,23 @@ async def test_get_sample_query_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_sample_query - ] = mock_object + ] = mock_rpc request = {} await client.get_sample_query(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_sample_query(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1721,22 +1722,23 @@ async def test_list_sample_queries_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_sample_queries - ] = mock_object + ] = mock_rpc request = {} await client.list_sample_queries(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_sample_queries(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2312,22 +2314,23 @@ async def test_create_sample_query_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_sample_query - ] = mock_object + ] = mock_rpc request = {} await client.create_sample_query(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_sample_query(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2727,22 +2730,23 @@ async def test_update_sample_query_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_sample_query - ] = mock_object + ] = mock_rpc request = {} await client.update_sample_query(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_sample_query(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3129,22 +3133,23 @@ async def test_delete_sample_query_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_sample_query - ] = mock_object + ] = mock_rpc request = {} await client.delete_sample_query(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_sample_query(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3450,8 +3455,9 @@ def test_import_sample_queries_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.import_sample_queries(request) @@ -3507,26 +3513,28 @@ async def test_import_sample_queries_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_sample_queries - ] = mock_object + ] = mock_rpc request = {} await client.import_sample_queries(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.import_sample_queries(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_sample_query_set_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_sample_query_set_service.py index 724fa9f73fdf..8db198dbeaff 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_sample_query_set_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_sample_query_set_service.py @@ -1379,22 +1379,23 @@ async def test_get_sample_query_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_sample_query_set - ] = mock_object + ] = mock_rpc request = {} await client.get_sample_query_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_sample_query_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1773,22 +1774,23 @@ async def test_list_sample_query_sets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_sample_query_sets - ] = mock_object + ] = mock_rpc request = {} await client.list_sample_query_sets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_sample_query_sets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2371,22 +2373,23 @@ async def test_create_sample_query_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_sample_query_set - ] = mock_object + ] = mock_rpc request = {} await client.create_sample_query_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_sample_query_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2785,22 +2788,23 @@ async def test_update_sample_query_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_sample_query_set - ] = mock_object + ] = mock_rpc request = {} await client.update_sample_query_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_sample_query_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3180,22 +3184,23 @@ async def test_delete_sample_query_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_sample_query_set - ] = mock_object + ] = mock_rpc request = {} await client.delete_sample_query_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_sample_query_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_schema_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_schema_service.py index 7bab1c8ab2e8..bda3e6458e96 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_schema_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_schema_service.py @@ -1291,22 +1291,23 @@ async def test_get_schema_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_schema - ] = mock_object + ] = mock_rpc request = {} await client.get_schema(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_schema(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1653,22 +1654,23 @@ async def test_list_schemas_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_schemas - ] = mock_object + ] = mock_rpc request = {} await client.list_schemas(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_schemas(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2157,8 +2159,9 @@ def test_create_schema_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_schema(request) @@ -2212,26 +2215,28 @@ async def test_create_schema_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_schema - ] = mock_object + ] = mock_rpc request = {} await client.create_schema(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_schema(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2585,8 +2590,9 @@ def test_update_schema_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_schema(request) @@ -2640,26 +2646,28 @@ async def test_update_schema_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_schema - ] = mock_object + ] = mock_rpc request = {} await client.update_schema(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_schema(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2867,8 +2875,9 @@ def test_delete_schema_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_schema(request) @@ -2922,26 +2931,28 @@ async def test_delete_schema_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_schema - ] = mock_object + ] = mock_rpc request = {} await client.delete_schema(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_schema(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_search_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_search_service.py index 796d624b5960..31e26c37e303 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_search_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_search_service.py @@ -1315,22 +1315,23 @@ async def test_search_async_use_cached_wrapped_rpc(transport: str = "grpc_asynci ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search - ] = mock_object + ] = mock_rpc request = {} await client.search(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_search_tuning_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_search_tuning_service.py index 02b0318d34c4..0ce42e72738b 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_search_tuning_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_search_tuning_service.py @@ -1321,8 +1321,9 @@ def test_train_custom_model_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.train_custom_model(request) @@ -1378,26 +1379,28 @@ async def test_train_custom_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.train_custom_model - ] = mock_object + ] = mock_rpc request = {} await client.train_custom_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.train_custom_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1675,22 +1678,23 @@ async def test_list_custom_models_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_custom_models - ] = mock_object + ] = mock_rpc request = {} await client.list_custom_models(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_custom_models(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_serving_config_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_serving_config_service.py index f5ce500ab80a..7d15aae1b181 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_serving_config_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_serving_config_service.py @@ -1406,22 +1406,23 @@ async def test_update_serving_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_serving_config - ] = mock_object + ] = mock_rpc request = {} await client.update_serving_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_serving_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1892,22 +1893,23 @@ async def test_get_serving_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_serving_config - ] = mock_object + ] = mock_rpc request = {} await client.get_serving_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_serving_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2307,22 +2309,23 @@ async def test_list_serving_configs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_serving_configs - ] = mock_object + ] = mock_rpc request = {} await client.list_serving_configs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_serving_configs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_site_search_engine_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_site_search_engine_service.py index ab98c6a89e04..7f171f1ca062 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_site_search_engine_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_site_search_engine_service.py @@ -1383,22 +1383,23 @@ async def test_get_site_search_engine_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_site_search_engine - ] = mock_object + ] = mock_rpc request = {} await client.get_site_search_engine(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_site_search_engine(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1712,8 +1713,9 @@ def test_create_target_site_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_target_site(request) @@ -1769,26 +1771,28 @@ async def test_create_target_site_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_target_site - ] = mock_object + ] = mock_rpc request = {} await client.create_target_site(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_target_site(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2110,8 +2114,9 @@ def test_batch_create_target_sites_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.batch_create_target_sites(request) @@ -2167,26 +2172,28 @@ async def test_batch_create_target_sites_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_create_target_sites - ] = mock_object + ] = mock_rpc request = {} await client.batch_create_target_sites(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.batch_create_target_sites(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2477,22 +2484,23 @@ async def test_get_target_site_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_target_site - ] = mock_object + ] = mock_rpc request = {} await client.get_target_site(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_target_site(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2806,8 +2814,9 @@ def test_update_target_site_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_target_site(request) @@ -2863,26 +2872,28 @@ async def test_update_target_site_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_target_site - ] = mock_object + ] = mock_rpc request = {} await client.update_target_site(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_target_site(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3193,8 +3204,9 @@ def test_delete_target_site_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_target_site(request) @@ -3250,26 +3262,28 @@ async def test_delete_target_site_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_target_site - ] = mock_object + ] = mock_rpc request = {} await client.delete_target_site(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_target_site(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3641,22 +3655,23 @@ async def test_list_target_sites_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_target_sites - ] = mock_object + ] = mock_rpc request = {} await client.list_target_sites(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_target_sites(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4175,8 +4190,9 @@ def test_enable_advanced_site_search_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.enable_advanced_site_search(request) @@ -4232,26 +4248,28 @@ async def test_enable_advanced_site_search_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.enable_advanced_site_search - ] = mock_object + ] = mock_rpc request = {} await client.enable_advanced_site_search(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.enable_advanced_site_search(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4477,8 +4495,9 @@ def test_disable_advanced_site_search_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.disable_advanced_site_search(request) @@ -4534,26 +4553,28 @@ async def test_disable_advanced_site_search_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.disable_advanced_site_search - ] = mock_object + ] = mock_rpc request = {} await client.disable_advanced_site_search(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.disable_advanced_site_search(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4768,8 +4789,9 @@ def test_recrawl_uris_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.recrawl_uris(request) @@ -4823,26 +4845,28 @@ async def test_recrawl_uris_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.recrawl_uris - ] = mock_object + ] = mock_rpc request = {} await client.recrawl_uris(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.recrawl_uris(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5062,8 +5086,9 @@ def test_batch_verify_target_sites_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.batch_verify_target_sites(request) @@ -5119,26 +5144,28 @@ async def test_batch_verify_target_sites_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_verify_target_sites - ] = mock_object + ] = mock_rpc request = {} await client.batch_verify_target_sites(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.batch_verify_target_sites(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5435,22 +5462,23 @@ async def test_fetch_domain_verification_status_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.fetch_domain_verification_status - ] = mock_object + ] = mock_rpc request = {} await client.fetch_domain_verification_status(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.fetch_domain_verification_status(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5887,8 +5915,9 @@ def test_set_uri_pattern_document_data_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.set_uri_pattern_document_data(request) @@ -5944,26 +5973,28 @@ async def test_set_uri_pattern_document_data_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_uri_pattern_document_data - ] = mock_object + ] = mock_rpc request = {} await client.set_uri_pattern_document_data(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.set_uri_pattern_document_data(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6246,22 +6277,23 @@ async def test_get_uri_pattern_document_data_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_uri_pattern_document_data - ] = mock_object + ] = mock_rpc request = {} await client.get_uri_pattern_document_data(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_uri_pattern_document_data(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_user_event_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_user_event_service.py index fbb47a948bae..b23c90ee838d 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_user_event_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_user_event_service.py @@ -1361,22 +1361,23 @@ async def test_write_user_event_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.write_user_event - ] = mock_object + ] = mock_rpc request = {} await client.write_user_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.write_user_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1681,22 +1682,23 @@ async def test_collect_user_event_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.collect_user_event - ] = mock_object + ] = mock_rpc request = {} await client.collect_user_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.collect_user_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1926,8 +1928,9 @@ def test_purge_user_events_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.purge_user_events(request) @@ -1983,26 +1986,28 @@ async def test_purge_user_events_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.purge_user_events - ] = mock_object + ] = mock_rpc request = {} await client.purge_user_events(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.purge_user_events(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2226,8 +2231,9 @@ def test_import_user_events_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.import_user_events(request) @@ -2283,26 +2289,28 @@ async def test_import_user_events_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_user_events - ] = mock_object + ] = mock_rpc request = {} await client.import_user_events(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.import_user_events(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_completion_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_completion_service.py index 64447f603259..d3924c4cbf4c 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_completion_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_completion_service.py @@ -1341,22 +1341,23 @@ async def test_complete_query_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.complete_query - ] = mock_object + ] = mock_rpc request = {} await client.complete_query(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.complete_query(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1579,8 +1580,9 @@ def test_import_suggestion_deny_list_entries_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.import_suggestion_deny_list_entries(request) @@ -1636,26 +1638,28 @@ async def test_import_suggestion_deny_list_entries_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_suggestion_deny_list_entries - ] = mock_object + ] = mock_rpc request = {} await client.import_suggestion_deny_list_entries(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.import_suggestion_deny_list_entries(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1881,8 +1885,9 @@ def test_purge_suggestion_deny_list_entries_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.purge_suggestion_deny_list_entries(request) @@ -1938,26 +1943,28 @@ async def test_purge_suggestion_deny_list_entries_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.purge_suggestion_deny_list_entries - ] = mock_object + ] = mock_rpc request = {} await client.purge_suggestion_deny_list_entries(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.purge_suggestion_deny_list_entries(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2183,8 +2190,9 @@ def test_import_completion_suggestions_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.import_completion_suggestions(request) @@ -2240,26 +2248,28 @@ async def test_import_completion_suggestions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_completion_suggestions - ] = mock_object + ] = mock_rpc request = {} await client.import_completion_suggestions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.import_completion_suggestions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2485,8 +2495,9 @@ def test_purge_completion_suggestions_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.purge_completion_suggestions(request) @@ -2542,26 +2553,28 @@ async def test_purge_completion_suggestions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.purge_completion_suggestions - ] = mock_object + ] = mock_rpc request = {} await client.purge_completion_suggestions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.purge_completion_suggestions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_control_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_control_service.py index 90d678b8ba12..402ee2c3c6ea 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_control_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_control_service.py @@ -1304,22 +1304,23 @@ async def test_create_control_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_control - ] = mock_object + ] = mock_rpc request = {} await client.create_control(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_control(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1699,22 +1700,23 @@ async def test_delete_control_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_control - ] = mock_object + ] = mock_rpc request = {} await client.delete_control(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_control(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2064,22 +2066,23 @@ async def test_update_control_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_control - ] = mock_object + ] = mock_rpc request = {} await client.update_control(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_control(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2470,22 +2473,23 @@ async def test_get_control_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_control - ] = mock_object + ] = mock_rpc request = {} await client.get_control(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_control(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2844,22 +2848,23 @@ async def test_list_controls_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_controls - ] = mock_object + ] = mock_rpc request = {} await client.list_controls(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_controls(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_conversational_search_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_conversational_search_service.py index 769bab77b330..5ce407a2cbaf 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_conversational_search_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_conversational_search_service.py @@ -1400,22 +1400,23 @@ async def test_converse_conversation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.converse_conversation - ] = mock_object + ] = mock_rpc request = {} await client.converse_conversation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.converse_conversation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1805,22 +1806,23 @@ async def test_create_conversation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_conversation - ] = mock_object + ] = mock_rpc request = {} await client.create_conversation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_conversation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2199,22 +2201,23 @@ async def test_delete_conversation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_conversation - ] = mock_object + ] = mock_rpc request = {} await client.delete_conversation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_conversation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2579,22 +2582,23 @@ async def test_update_conversation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_conversation - ] = mock_object + ] = mock_rpc request = {} await client.update_conversation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_conversation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2976,22 +2980,23 @@ async def test_get_conversation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_conversation - ] = mock_object + ] = mock_rpc request = {} await client.get_conversation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_conversation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3363,22 +3368,23 @@ async def test_list_conversations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_conversations - ] = mock_object + ] = mock_rpc request = {} await client.list_conversations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_conversations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3944,22 +3950,23 @@ async def test_answer_query_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.answer_query - ] = mock_object + ] = mock_rpc request = {} await client.answer_query(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.answer_query(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4243,22 +4250,23 @@ async def test_get_answer_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_answer - ] = mock_object + ] = mock_rpc request = {} await client.get_answer(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_answer(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4622,22 +4630,23 @@ async def test_create_session_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_session - ] = mock_object + ] = mock_rpc request = {} await client.create_session(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_session(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4990,22 +4999,23 @@ async def test_delete_session_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_session - ] = mock_object + ] = mock_rpc request = {} await client.delete_session(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_session(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5348,22 +5358,23 @@ async def test_update_session_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_session - ] = mock_object + ] = mock_rpc request = {} await client.update_session(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_session(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5729,22 +5740,23 @@ async def test_get_session_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_session - ] = mock_object + ] = mock_rpc request = {} await client.get_session(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_session(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6100,22 +6112,23 @@ async def test_list_sessions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_sessions - ] = mock_object + ] = mock_rpc request = {} await client.list_sessions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_sessions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_data_store_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_data_store_service.py index e44cda825f20..14174b3855eb 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_data_store_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_data_store_service.py @@ -1287,8 +1287,9 @@ def test_create_data_store_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_data_store(request) @@ -1344,26 +1345,28 @@ async def test_create_data_store_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_data_store - ] = mock_object + ] = mock_rpc request = {} await client.create_data_store(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_data_store(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1755,22 +1758,23 @@ async def test_get_data_store_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_data_store - ] = mock_object + ] = mock_rpc request = {} await client.get_data_store(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_data_store(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2135,22 +2139,23 @@ async def test_list_data_stores_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_data_stores - ] = mock_object + ] = mock_rpc request = {} await client.list_data_stores(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_data_stores(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2646,8 +2651,9 @@ def test_delete_data_store_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_data_store(request) @@ -2703,26 +2709,28 @@ async def test_delete_data_store_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_data_store - ] = mock_object + ] = mock_rpc request = {} await client.delete_data_store(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_data_store(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3100,22 +3108,23 @@ async def test_update_data_store_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_data_store - ] = mock_object + ] = mock_rpc request = {} await client.update_data_store(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_data_store(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_document_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_document_service.py index 99c854274145..6ffa744aae25 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_document_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_document_service.py @@ -1336,22 +1336,23 @@ async def test_get_document_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_document - ] = mock_object + ] = mock_rpc request = {} await client.get_document(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1704,22 +1705,23 @@ async def test_list_documents_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_documents - ] = mock_object + ] = mock_rpc request = {} await client.list_documents(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_documents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2274,22 +2276,23 @@ async def test_create_document_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_document - ] = mock_object + ] = mock_rpc request = {} await client.create_document(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2718,22 +2721,23 @@ async def test_update_document_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_document - ] = mock_object + ] = mock_rpc request = {} await client.update_document(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3139,22 +3143,23 @@ async def test_delete_document_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_document - ] = mock_object + ] = mock_rpc request = {} await client.delete_document(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3442,8 +3447,9 @@ def test_import_documents_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.import_documents(request) @@ -3497,26 +3503,28 @@ async def test_import_documents_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_documents - ] = mock_object + ] = mock_rpc request = {} await client.import_documents(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.import_documents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3726,8 +3734,9 @@ def test_purge_documents_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.purge_documents(request) @@ -3781,26 +3790,28 @@ async def test_purge_documents_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.purge_documents - ] = mock_object + ] = mock_rpc request = {} await client.purge_documents(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.purge_documents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_engine_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_engine_service.py index ccc6b3dfc889..0352527c056c 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_engine_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_engine_service.py @@ -1240,8 +1240,9 @@ def test_create_engine_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_engine(request) @@ -1295,26 +1296,28 @@ async def test_create_engine_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_engine - ] = mock_object + ] = mock_rpc request = {} await client.create_engine(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_engine(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1660,8 +1663,9 @@ def test_delete_engine_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_engine(request) @@ -1715,26 +1719,28 @@ async def test_delete_engine_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_engine - ] = mock_object + ] = mock_rpc request = {} await client.delete_engine(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_engine(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2088,22 +2094,23 @@ async def test_update_engine_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_engine - ] = mock_object + ] = mock_rpc request = {} await client.update_engine(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_engine(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2512,22 +2519,23 @@ async def test_get_engine_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_engine - ] = mock_object + ] = mock_rpc request = {} await client.get_engine(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_engine(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2884,22 +2892,23 @@ async def test_list_engines_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_engines - ] = mock_object + ] = mock_rpc request = {} await client.list_engines(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_engines(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3454,22 +3463,23 @@ async def test_pause_engine_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.pause_engine - ] = mock_object + ] = mock_rpc request = {} await client.pause_engine(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.pause_engine(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3834,22 +3844,23 @@ async def test_resume_engine_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.resume_engine - ] = mock_object + ] = mock_rpc request = {} await client.resume_engine(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.resume_engine(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4146,8 +4157,9 @@ def test_tune_engine_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.tune_engine(request) @@ -4201,26 +4213,28 @@ async def test_tune_engine_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.tune_engine - ] = mock_object + ] = mock_rpc request = {} await client.tune_engine(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.tune_engine(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_evaluation_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_evaluation_service.py index bffd072b943a..0fd27c82c7e8 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_evaluation_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_evaluation_service.py @@ -1340,22 +1340,23 @@ async def test_get_evaluation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_evaluation - ] = mock_object + ] = mock_rpc request = {} await client.get_evaluation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_evaluation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1711,22 +1712,23 @@ async def test_list_evaluations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_evaluations - ] = mock_object + ] = mock_rpc request = {} await client.list_evaluations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_evaluations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2222,8 +2224,9 @@ def test_create_evaluation_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_evaluation(request) @@ -2279,26 +2282,28 @@ async def test_create_evaluation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_evaluation - ] = mock_object + ] = mock_rpc request = {} await client.create_evaluation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_evaluation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2680,22 +2685,23 @@ async def test_list_evaluation_results_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_evaluation_results - ] = mock_object + ] = mock_rpc request = {} await client.list_evaluation_results(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_evaluation_results(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_grounded_generation_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_grounded_generation_service.py index d890366e9041..c8308f1f9aa7 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_grounded_generation_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_grounded_generation_service.py @@ -1371,22 +1371,23 @@ async def test_check_grounding_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.check_grounding - ] = mock_object + ] = mock_rpc request = {} await client.check_grounding(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.check_grounding(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_project_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_project_service.py index 30d3cbcf70ac..986937dbc84e 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_project_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_project_service.py @@ -1245,8 +1245,9 @@ def test_provision_project_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.provision_project(request) @@ -1302,26 +1303,28 @@ async def test_provision_project_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.provision_project - ] = mock_object + ] = mock_rpc request = {} await client.provision_project(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.provision_project(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_rank_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_rank_service.py index 2ec9e8e45d82..b223a7375323 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_rank_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_rank_service.py @@ -1241,22 +1241,23 @@ async def test_rank_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio" ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.rank - ] = mock_object + ] = mock_rpc request = {} await client.rank(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.rank(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_recommendation_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_recommendation_service.py index 2b47ccc3d7c4..8a372e4599ca 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_recommendation_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_recommendation_service.py @@ -1365,22 +1365,23 @@ async def test_recommend_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.recommend - ] = mock_object + ] = mock_rpc request = {} await client.recommend(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.recommend(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_sample_query_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_sample_query_service.py index 4a02d8c4fcaa..4f1a77a32282 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_sample_query_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_sample_query_service.py @@ -1342,22 +1342,23 @@ async def test_get_sample_query_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_sample_query - ] = mock_object + ] = mock_rpc request = {} await client.get_sample_query(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_sample_query(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1721,22 +1722,23 @@ async def test_list_sample_queries_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_sample_queries - ] = mock_object + ] = mock_rpc request = {} await client.list_sample_queries(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_sample_queries(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2312,22 +2314,23 @@ async def test_create_sample_query_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_sample_query - ] = mock_object + ] = mock_rpc request = {} await client.create_sample_query(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_sample_query(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2727,22 +2730,23 @@ async def test_update_sample_query_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_sample_query - ] = mock_object + ] = mock_rpc request = {} await client.update_sample_query(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_sample_query(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3129,22 +3133,23 @@ async def test_delete_sample_query_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_sample_query - ] = mock_object + ] = mock_rpc request = {} await client.delete_sample_query(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_sample_query(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3450,8 +3455,9 @@ def test_import_sample_queries_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.import_sample_queries(request) @@ -3507,26 +3513,28 @@ async def test_import_sample_queries_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_sample_queries - ] = mock_object + ] = mock_rpc request = {} await client.import_sample_queries(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.import_sample_queries(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_sample_query_set_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_sample_query_set_service.py index e430bb6b75ed..688420f2e9ce 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_sample_query_set_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_sample_query_set_service.py @@ -1379,22 +1379,23 @@ async def test_get_sample_query_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_sample_query_set - ] = mock_object + ] = mock_rpc request = {} await client.get_sample_query_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_sample_query_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1773,22 +1774,23 @@ async def test_list_sample_query_sets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_sample_query_sets - ] = mock_object + ] = mock_rpc request = {} await client.list_sample_query_sets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_sample_query_sets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2371,22 +2373,23 @@ async def test_create_sample_query_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_sample_query_set - ] = mock_object + ] = mock_rpc request = {} await client.create_sample_query_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_sample_query_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2785,22 +2788,23 @@ async def test_update_sample_query_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_sample_query_set - ] = mock_object + ] = mock_rpc request = {} await client.update_sample_query_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_sample_query_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3180,22 +3184,23 @@ async def test_delete_sample_query_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_sample_query_set - ] = mock_object + ] = mock_rpc request = {} await client.delete_sample_query_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_sample_query_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_schema_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_schema_service.py index 1189aebf7c19..960df572518d 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_schema_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_schema_service.py @@ -1291,22 +1291,23 @@ async def test_get_schema_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_schema - ] = mock_object + ] = mock_rpc request = {} await client.get_schema(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_schema(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1653,22 +1654,23 @@ async def test_list_schemas_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_schemas - ] = mock_object + ] = mock_rpc request = {} await client.list_schemas(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_schemas(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2157,8 +2159,9 @@ def test_create_schema_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_schema(request) @@ -2212,26 +2215,28 @@ async def test_create_schema_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_schema - ] = mock_object + ] = mock_rpc request = {} await client.create_schema(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_schema(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2585,8 +2590,9 @@ def test_update_schema_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_schema(request) @@ -2640,26 +2646,28 @@ async def test_update_schema_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_schema - ] = mock_object + ] = mock_rpc request = {} await client.update_schema(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_schema(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2867,8 +2875,9 @@ def test_delete_schema_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_schema(request) @@ -2922,26 +2931,28 @@ async def test_delete_schema_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_schema - ] = mock_object + ] = mock_rpc request = {} await client.delete_schema(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_schema(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_search_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_search_service.py index 5b21972bd36e..3a96c98b8ab2 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_search_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_search_service.py @@ -1315,22 +1315,23 @@ async def test_search_async_use_cached_wrapped_rpc(transport: str = "grpc_asynci ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search - ] = mock_object + ] = mock_rpc request = {} await client.search(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_search_tuning_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_search_tuning_service.py index d7a503a38085..f2bbae9087c9 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_search_tuning_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_search_tuning_service.py @@ -1321,8 +1321,9 @@ def test_train_custom_model_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.train_custom_model(request) @@ -1378,26 +1379,28 @@ async def test_train_custom_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.train_custom_model - ] = mock_object + ] = mock_rpc request = {} await client.train_custom_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.train_custom_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1675,22 +1678,23 @@ async def test_list_custom_models_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_custom_models - ] = mock_object + ] = mock_rpc request = {} await client.list_custom_models(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_custom_models(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_serving_config_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_serving_config_service.py index 4b3609fbd907..33f10ee85214 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_serving_config_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_serving_config_service.py @@ -1406,22 +1406,23 @@ async def test_update_serving_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_serving_config - ] = mock_object + ] = mock_rpc request = {} await client.update_serving_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_serving_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1892,22 +1893,23 @@ async def test_get_serving_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_serving_config - ] = mock_object + ] = mock_rpc request = {} await client.get_serving_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_serving_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2307,22 +2309,23 @@ async def test_list_serving_configs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_serving_configs - ] = mock_object + ] = mock_rpc request = {} await client.list_serving_configs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_serving_configs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_site_search_engine_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_site_search_engine_service.py index ed6e85184071..1dbadf278dfb 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_site_search_engine_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_site_search_engine_service.py @@ -1382,22 +1382,23 @@ async def test_get_site_search_engine_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_site_search_engine - ] = mock_object + ] = mock_rpc request = {} await client.get_site_search_engine(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_site_search_engine(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1711,8 +1712,9 @@ def test_create_target_site_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_target_site(request) @@ -1768,26 +1770,28 @@ async def test_create_target_site_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_target_site - ] = mock_object + ] = mock_rpc request = {} await client.create_target_site(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_target_site(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2109,8 +2113,9 @@ def test_batch_create_target_sites_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.batch_create_target_sites(request) @@ -2166,26 +2171,28 @@ async def test_batch_create_target_sites_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_create_target_sites - ] = mock_object + ] = mock_rpc request = {} await client.batch_create_target_sites(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.batch_create_target_sites(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2476,22 +2483,23 @@ async def test_get_target_site_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_target_site - ] = mock_object + ] = mock_rpc request = {} await client.get_target_site(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_target_site(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2805,8 +2813,9 @@ def test_update_target_site_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_target_site(request) @@ -2862,26 +2871,28 @@ async def test_update_target_site_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_target_site - ] = mock_object + ] = mock_rpc request = {} await client.update_target_site(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_target_site(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3192,8 +3203,9 @@ def test_delete_target_site_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_target_site(request) @@ -3249,26 +3261,28 @@ async def test_delete_target_site_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_target_site - ] = mock_object + ] = mock_rpc request = {} await client.delete_target_site(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_target_site(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3640,22 +3654,23 @@ async def test_list_target_sites_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_target_sites - ] = mock_object + ] = mock_rpc request = {} await client.list_target_sites(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_target_sites(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4174,8 +4189,9 @@ def test_enable_advanced_site_search_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.enable_advanced_site_search(request) @@ -4231,26 +4247,28 @@ async def test_enable_advanced_site_search_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.enable_advanced_site_search - ] = mock_object + ] = mock_rpc request = {} await client.enable_advanced_site_search(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.enable_advanced_site_search(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4476,8 +4494,9 @@ def test_disable_advanced_site_search_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.disable_advanced_site_search(request) @@ -4533,26 +4552,28 @@ async def test_disable_advanced_site_search_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.disable_advanced_site_search - ] = mock_object + ] = mock_rpc request = {} await client.disable_advanced_site_search(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.disable_advanced_site_search(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4767,8 +4788,9 @@ def test_recrawl_uris_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.recrawl_uris(request) @@ -4822,26 +4844,28 @@ async def test_recrawl_uris_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.recrawl_uris - ] = mock_object + ] = mock_rpc request = {} await client.recrawl_uris(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.recrawl_uris(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5061,8 +5085,9 @@ def test_batch_verify_target_sites_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.batch_verify_target_sites(request) @@ -5118,26 +5143,28 @@ async def test_batch_verify_target_sites_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_verify_target_sites - ] = mock_object + ] = mock_rpc request = {} await client.batch_verify_target_sites(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.batch_verify_target_sites(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5434,22 +5461,23 @@ async def test_fetch_domain_verification_status_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.fetch_domain_verification_status - ] = mock_object + ] = mock_rpc request = {} await client.fetch_domain_verification_status(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.fetch_domain_verification_status(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_user_event_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_user_event_service.py index f36b51bcb329..fe2c1010670e 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_user_event_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_user_event_service.py @@ -1361,22 +1361,23 @@ async def test_write_user_event_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.write_user_event - ] = mock_object + ] = mock_rpc request = {} await client.write_user_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.write_user_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1681,22 +1682,23 @@ async def test_collect_user_event_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.collect_user_event - ] = mock_object + ] = mock_rpc request = {} await client.collect_user_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.collect_user_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1926,8 +1928,9 @@ def test_purge_user_events_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.purge_user_events(request) @@ -1983,26 +1986,28 @@ async def test_purge_user_events_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.purge_user_events - ] = mock_object + ] = mock_rpc request = {} await client.purge_user_events(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.purge_user_events(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2226,8 +2231,9 @@ def test_import_user_events_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.import_user_events(request) @@ -2283,26 +2289,28 @@ async def test_import_user_events_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_user_events - ] = mock_object + ] = mock_rpc request = {} await client.import_user_events(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.import_user_events(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dms/google/cloud/clouddms/gapic_version.py b/packages/google-cloud-dms/google/cloud/clouddms/gapic_version.py index 64ddc0e431e3..558c8aab67c5 100644 --- a/packages/google-cloud-dms/google/cloud/clouddms/gapic_version.py +++ b/packages/google-cloud-dms/google/cloud/clouddms/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.9.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-dms/google/cloud/clouddms_v1/gapic_version.py b/packages/google-cloud-dms/google/cloud/clouddms_v1/gapic_version.py index 64ddc0e431e3..558c8aab67c5 100644 --- a/packages/google-cloud-dms/google/cloud/clouddms_v1/gapic_version.py +++ b/packages/google-cloud-dms/google/cloud/clouddms_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.9.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-dms/google/cloud/clouddms_v1/services/data_migration_service/async_client.py b/packages/google-cloud-dms/google/cloud/clouddms_v1/services/data_migration_service/async_client.py index d99308c838b4..165b0e06ae24 100644 --- a/packages/google-cloud-dms/google/cloud/clouddms_v1/services/data_migration_service/async_client.py +++ b/packages/google-cloud-dms/google/cloud/clouddms_v1/services/data_migration_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -229,10 +228,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DataMigrationServiceClient).get_transport_class, - type(DataMigrationServiceClient), - ) + get_transport_class = DataMigrationServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dms/google/cloud/clouddms_v1/services/data_migration_service/client.py b/packages/google-cloud-dms/google/cloud/clouddms_v1/services/data_migration_service/client.py index 2bbf431b9a79..209427d94471 100644 --- a/packages/google-cloud-dms/google/cloud/clouddms_v1/services/data_migration_service/client.py +++ b/packages/google-cloud-dms/google/cloud/clouddms_v1/services/data_migration_service/client.py @@ -788,7 +788,7 @@ def __init__( Type[DataMigrationServiceTransport], Callable[..., DataMigrationServiceTransport], ] = ( - type(self).get_transport_class(transport) + DataMigrationServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DataMigrationServiceTransport], transport) ) diff --git a/packages/google-cloud-dms/google/cloud/clouddms_v1/types/conversionworkspace_resources.py b/packages/google-cloud-dms/google/cloud/clouddms_v1/types/conversionworkspace_resources.py index 4b1c9a3ce94d..8621f22c687e 100644 --- a/packages/google-cloud-dms/google/cloud/clouddms_v1/types/conversionworkspace_resources.py +++ b/packages/google-cloud-dms/google/cloud/clouddms_v1/types/conversionworkspace_resources.py @@ -1102,10 +1102,9 @@ class MultiColumnDatatypeChange(proto.Message): if not specified and relevant uses the source column precision. override_fractional_seconds_precision (int): - Optional. Column fractional seconds precision: - - - used only for timestamp based datatypes - - if not specified and relevant uses the source + Optional. Column fractional seconds precision + - used only for timestamp based datatypes - if + not specified and relevant uses the source column fractional seconds precision. custom_features (google.protobuf.struct_pb2.Struct): Optional. Custom engine specific features. diff --git a/packages/google-cloud-dms/samples/generated_samples/snippet_metadata_google.cloud.clouddms.v1.json b/packages/google-cloud-dms/samples/generated_samples/snippet_metadata_google.cloud.clouddms.v1.json index 3ea1db6a8d13..92f5517525f3 100644 --- a/packages/google-cloud-dms/samples/generated_samples/snippet_metadata_google.cloud.clouddms.v1.json +++ b/packages/google-cloud-dms/samples/generated_samples/snippet_metadata_google.cloud.clouddms.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dms", - "version": "1.9.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-dms/tests/unit/gapic/clouddms_v1/test_data_migration_service.py b/packages/google-cloud-dms/tests/unit/gapic/clouddms_v1/test_data_migration_service.py index c9150b949458..e3f942f6b5b7 100644 --- a/packages/google-cloud-dms/tests/unit/gapic/clouddms_v1/test_data_migration_service.py +++ b/packages/google-cloud-dms/tests/unit/gapic/clouddms_v1/test_data_migration_service.py @@ -1346,22 +1346,23 @@ async def test_list_migration_jobs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_migration_jobs - ] = mock_object + ] = mock_rpc request = {} await client.list_migration_jobs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_migration_jobs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1961,22 +1962,23 @@ async def test_get_migration_job_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_migration_job - ] = mock_object + ] = mock_rpc request = {} await client.get_migration_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_migration_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2311,8 +2313,9 @@ def test_create_migration_job_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_migration_job(request) @@ -2368,26 +2371,28 @@ async def test_create_migration_job_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_migration_job - ] = mock_object + ] = mock_rpc request = {} await client.create_migration_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_migration_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2717,8 +2722,9 @@ def test_update_migration_job_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_migration_job(request) @@ -2774,26 +2780,28 @@ async def test_update_migration_job_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_migration_job - ] = mock_object + ] = mock_rpc request = {} await client.update_migration_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_migration_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3115,8 +3123,9 @@ def test_delete_migration_job_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_migration_job(request) @@ -3172,26 +3181,28 @@ async def test_delete_migration_job_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_migration_job - ] = mock_object + ] = mock_rpc request = {} await client.delete_migration_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_migration_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3501,8 +3512,9 @@ def test_start_migration_job_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.start_migration_job(request) @@ -3558,26 +3570,28 @@ async def test_start_migration_job_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.start_migration_job - ] = mock_object + ] = mock_rpc request = {} await client.start_migration_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.start_migration_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3801,8 +3815,9 @@ def test_stop_migration_job_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.stop_migration_job(request) @@ -3858,26 +3873,28 @@ async def test_stop_migration_job_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.stop_migration_job - ] = mock_object + ] = mock_rpc request = {} await client.stop_migration_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.stop_migration_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4101,8 +4118,9 @@ def test_resume_migration_job_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.resume_migration_job(request) @@ -4158,26 +4176,28 @@ async def test_resume_migration_job_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.resume_migration_job - ] = mock_object + ] = mock_rpc request = {} await client.resume_migration_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.resume_migration_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4402,8 +4422,9 @@ def test_promote_migration_job_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.promote_migration_job(request) @@ -4459,26 +4480,28 @@ async def test_promote_migration_job_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.promote_migration_job - ] = mock_object + ] = mock_rpc request = {} await client.promote_migration_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.promote_migration_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4702,8 +4725,9 @@ def test_verify_migration_job_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.verify_migration_job(request) @@ -4759,26 +4783,28 @@ async def test_verify_migration_job_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.verify_migration_job - ] = mock_object + ] = mock_rpc request = {} await client.verify_migration_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.verify_migration_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5003,8 +5029,9 @@ def test_restart_migration_job_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.restart_migration_job(request) @@ -5060,26 +5087,28 @@ async def test_restart_migration_job_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.restart_migration_job - ] = mock_object + ] = mock_rpc request = {} await client.restart_migration_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.restart_migration_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5363,22 +5392,23 @@ async def test_generate_ssh_script_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.generate_ssh_script - ] = mock_object + ] = mock_rpc request = {} await client.generate_ssh_script(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.generate_ssh_script(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5670,22 +5700,23 @@ async def test_generate_tcp_proxy_script_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.generate_tcp_proxy_script - ] = mock_object + ] = mock_rpc request = {} await client.generate_tcp_proxy_script(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.generate_tcp_proxy_script(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5980,22 +6011,23 @@ async def test_list_connection_profiles_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_connection_profiles - ] = mock_object + ] = mock_rpc request = {} await client.list_connection_profiles(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_connection_profiles(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6584,22 +6616,23 @@ async def test_get_connection_profile_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_connection_profile - ] = mock_object + ] = mock_rpc request = {} await client.get_connection_profile(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_connection_profile(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6923,8 +6956,9 @@ def test_create_connection_profile_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_connection_profile(request) @@ -6980,26 +7014,28 @@ async def test_create_connection_profile_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_connection_profile - ] = mock_object + ] = mock_rpc request = {} await client.create_connection_profile(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_connection_profile(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7331,8 +7367,9 @@ def test_update_connection_profile_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_connection_profile(request) @@ -7388,26 +7425,28 @@ async def test_update_connection_profile_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_connection_profile - ] = mock_object + ] = mock_rpc request = {} await client.update_connection_profile(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_connection_profile(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7731,8 +7770,9 @@ def test_delete_connection_profile_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_connection_profile(request) @@ -7788,26 +7828,28 @@ async def test_delete_connection_profile_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_connection_profile - ] = mock_object + ] = mock_rpc request = {} await client.delete_connection_profile(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_connection_profile(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8123,8 +8165,9 @@ def test_create_private_connection_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_private_connection(request) @@ -8180,26 +8223,28 @@ async def test_create_private_connection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_private_connection - ] = mock_object + ] = mock_rpc request = {} await client.create_private_connection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_private_connection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8595,22 +8640,23 @@ async def test_get_private_connection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_private_connection - ] = mock_object + ] = mock_rpc request = {} await client.get_private_connection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_private_connection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8995,22 +9041,23 @@ async def test_list_private_connections_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_private_connections - ] = mock_object + ] = mock_rpc request = {} await client.list_private_connections(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_private_connections(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9534,8 +9581,9 @@ def test_delete_private_connection_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_private_connection(request) @@ -9591,26 +9639,28 @@ async def test_delete_private_connection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_private_connection - ] = mock_object + ] = mock_rpc request = {} await client.delete_private_connection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_private_connection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9989,22 +10039,23 @@ async def test_get_conversion_workspace_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_conversion_workspace - ] = mock_object + ] = mock_rpc request = {} await client.get_conversion_workspace(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_conversion_workspace(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10389,22 +10440,23 @@ async def test_list_conversion_workspaces_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_conversion_workspaces - ] = mock_object + ] = mock_rpc request = {} await client.list_conversion_workspaces(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_conversion_workspaces(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10935,8 +10987,9 @@ def test_create_conversion_workspace_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_conversion_workspace(request) @@ -10992,26 +11045,28 @@ async def test_create_conversion_workspace_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_conversion_workspace - ] = mock_object + ] = mock_rpc request = {} await client.create_conversion_workspace(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_conversion_workspace(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11351,8 +11406,9 @@ def test_update_conversion_workspace_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_conversion_workspace(request) @@ -11408,26 +11464,28 @@ async def test_update_conversion_workspace_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_conversion_workspace - ] = mock_object + ] = mock_rpc request = {} await client.update_conversion_workspace(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_conversion_workspace(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11759,8 +11817,9 @@ def test_delete_conversion_workspace_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_conversion_workspace(request) @@ -11816,26 +11875,28 @@ async def test_delete_conversion_workspace_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_conversion_workspace - ] = mock_object + ] = mock_rpc request = {} await client.delete_conversion_workspace(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_conversion_workspace(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12226,22 +12287,23 @@ async def test_create_mapping_rule_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_mapping_rule - ] = mock_object + ] = mock_rpc request = {} await client.create_mapping_rule(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_mapping_rule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12640,22 +12702,23 @@ async def test_delete_mapping_rule_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_mapping_rule - ] = mock_object + ] = mock_rpc request = {} await client.delete_mapping_rule(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_mapping_rule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13019,22 +13082,23 @@ async def test_list_mapping_rules_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_mapping_rules - ] = mock_object + ] = mock_rpc request = {} await client.list_mapping_rules(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_mapping_rules(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13619,22 +13683,23 @@ async def test_get_mapping_rule_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_mapping_rule - ] = mock_object + ] = mock_rpc request = {} await client.get_mapping_rule(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_mapping_rule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13955,8 +14020,9 @@ def test_seed_conversion_workspace_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.seed_conversion_workspace(request) @@ -14012,26 +14078,28 @@ async def test_seed_conversion_workspace_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.seed_conversion_workspace - ] = mock_object + ] = mock_rpc request = {} await client.seed_conversion_workspace(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.seed_conversion_workspace(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14256,8 +14324,9 @@ def test_import_mapping_rules_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.import_mapping_rules(request) @@ -14313,26 +14382,28 @@ async def test_import_mapping_rules_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_mapping_rules - ] = mock_object + ] = mock_rpc request = {} await client.import_mapping_rules(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.import_mapping_rules(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14559,8 +14630,9 @@ def test_convert_conversion_workspace_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.convert_conversion_workspace(request) @@ -14616,26 +14688,28 @@ async def test_convert_conversion_workspace_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.convert_conversion_workspace - ] = mock_object + ] = mock_rpc request = {} await client.convert_conversion_workspace(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.convert_conversion_workspace(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14863,8 +14937,9 @@ def test_commit_conversion_workspace_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.commit_conversion_workspace(request) @@ -14920,26 +14995,28 @@ async def test_commit_conversion_workspace_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.commit_conversion_workspace - ] = mock_object + ] = mock_rpc request = {} await client.commit_conversion_workspace(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.commit_conversion_workspace(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15165,8 +15242,9 @@ def test_rollback_conversion_workspace_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.rollback_conversion_workspace(request) @@ -15222,26 +15300,28 @@ async def test_rollback_conversion_workspace_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.rollback_conversion_workspace - ] = mock_object + ] = mock_rpc request = {} await client.rollback_conversion_workspace(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.rollback_conversion_workspace(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15471,8 +15551,9 @@ def test_apply_conversion_workspace_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.apply_conversion_workspace(request) @@ -15528,26 +15609,28 @@ async def test_apply_conversion_workspace_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.apply_conversion_workspace - ] = mock_object + ] = mock_rpc request = {} await client.apply_conversion_workspace(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.apply_conversion_workspace(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15837,22 +15920,23 @@ async def test_describe_database_entities_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.describe_database_entities - ] = mock_object + ] = mock_rpc request = {} await client.describe_database_entities(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.describe_database_entities(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -16343,22 +16427,23 @@ async def test_search_background_jobs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search_background_jobs - ] = mock_object + ] = mock_rpc request = {} await client.search_background_jobs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_background_jobs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -16638,22 +16723,23 @@ async def test_describe_conversion_workspace_revisions_async_use_cached_wrapped_ ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.describe_conversion_workspace_revisions - ] = mock_object + ] = mock_rpc request = {} await client.describe_conversion_workspace_revisions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.describe_conversion_workspace_revisions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -16931,22 +17017,23 @@ async def test_fetch_static_ips_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.fetch_static_ips - ] = mock_object + ] = mock_rpc request = {} await client.fetch_static_ips(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.fetch_static_ips(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-documentai/google/cloud/documentai/gapic_version.py b/packages/google-cloud-documentai/google/cloud/documentai/gapic_version.py index 381e786a17cc..558c8aab67c5 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai/gapic_version.py +++ b/packages/google-cloud-documentai/google/cloud/documentai/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.31.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1/gapic_version.py b/packages/google-cloud-documentai/google/cloud/documentai_v1/gapic_version.py index 381e786a17cc..558c8aab67c5 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1/gapic_version.py +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.31.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1/services/document_processor_service/async_client.py b/packages/google-cloud-documentai/google/cloud/documentai_v1/services/document_processor_service/async_client.py index 03276efbb9de..b9036902648c 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1/services/document_processor_service/async_client.py +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1/services/document_processor_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -238,10 +237,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DocumentProcessorServiceClient).get_transport_class, - type(DocumentProcessorServiceClient), - ) + get_transport_class = DocumentProcessorServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1/services/document_processor_service/client.py b/packages/google-cloud-documentai/google/cloud/documentai_v1/services/document_processor_service/client.py index f74fd09305dc..0c1aefc90440 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1/services/document_processor_service/client.py +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1/services/document_processor_service/client.py @@ -784,7 +784,7 @@ def __init__( Type[DocumentProcessorServiceTransport], Callable[..., DocumentProcessorServiceTransport], ] = ( - type(self).get_transport_class(transport) + DocumentProcessorServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DocumentProcessorServiceTransport], transport) ) diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1beta2/gapic_version.py b/packages/google-cloud-documentai/google/cloud/documentai_v1beta2/gapic_version.py index 381e786a17cc..558c8aab67c5 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1beta2/gapic_version.py +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1beta2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.31.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1beta2/services/document_understanding_service/async_client.py b/packages/google-cloud-documentai/google/cloud/documentai_v1beta2/services/document_understanding_service/async_client.py index d37a27997dc0..4edfe2c34aa4 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1beta2/services/document_understanding_service/async_client.py +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1beta2/services/document_understanding_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -201,10 +200,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DocumentUnderstandingServiceClient).get_transport_class, - type(DocumentUnderstandingServiceClient), - ) + get_transport_class = DocumentUnderstandingServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1beta2/services/document_understanding_service/client.py b/packages/google-cloud-documentai/google/cloud/documentai_v1beta2/services/document_understanding_service/client.py index 59d4f92e1120..3a4dac49cbca 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1beta2/services/document_understanding_service/client.py +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1beta2/services/document_understanding_service/client.py @@ -659,7 +659,7 @@ def __init__( Type[DocumentUnderstandingServiceTransport], Callable[..., DocumentUnderstandingServiceTransport], ] = ( - type(self).get_transport_class(transport) + DocumentUnderstandingServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast( Callable[..., DocumentUnderstandingServiceTransport], transport diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/gapic_version.py b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/gapic_version.py index 381e786a17cc..558c8aab67c5 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/gapic_version.py +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.31.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/services/document_processor_service/async_client.py b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/services/document_processor_service/async_client.py index 55817c6f8fc8..aad3feb63ed4 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/services/document_processor_service/async_client.py +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/services/document_processor_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -238,10 +237,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DocumentProcessorServiceClient).get_transport_class, - type(DocumentProcessorServiceClient), - ) + get_transport_class = DocumentProcessorServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/services/document_processor_service/client.py b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/services/document_processor_service/client.py index 372b0e860aa5..2e59402c56c4 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/services/document_processor_service/client.py +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/services/document_processor_service/client.py @@ -784,7 +784,7 @@ def __init__( Type[DocumentProcessorServiceTransport], Callable[..., DocumentProcessorServiceTransport], ] = ( - type(self).get_transport_class(transport) + DocumentProcessorServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DocumentProcessorServiceTransport], transport) ) diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/services/document_service/async_client.py b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/services/document_service/async_client.py index f38e07332b70..add6a26a3d66 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/services/document_service/async_client.py +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/services/document_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -207,9 +206,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DocumentServiceClient).get_transport_class, type(DocumentServiceClient) - ) + get_transport_class = DocumentServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/services/document_service/client.py b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/services/document_service/client.py index 45ed0f99af74..21eb9d26c809 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/services/document_service/client.py +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/services/document_service/client.py @@ -720,7 +720,7 @@ def __init__( transport_init: Union[ Type[DocumentServiceTransport], Callable[..., DocumentServiceTransport] ] = ( - type(self).get_transport_class(transport) + DocumentServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DocumentServiceTransport], transport) ) diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/types/document_service.py b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/types/document_service.py index 86af095d60ab..5fd1139ddcfb 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/types/document_service.py +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/types/document_service.py @@ -396,8 +396,10 @@ class ListDocumentsRequest(proto.Message): https://google.aip.dev/160. Currently support query strings are: + ------------------------------------ + + ``SplitType=DATASET_SPLIT_TEST|DATASET_SPLIT_TRAIN|DATASET_SPLIT_UNASSIGNED`` - - ``SplitType=DATASET_SPLIT_TEST|DATASET_SPLIT_TRAIN|DATASET_SPLIT_UNASSIGNED`` - ``LabelingState=DOCUMENT_LABELED|DOCUMENT_UNLABELED|DOCUMENT_AUTO_LABELED`` - ``DisplayName=\"file_name.pdf\"`` - ``EntityType=abc/def`` diff --git a/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1.json b/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1.json index 8fbbe668524b..96d60af285a2 100644 --- a/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1.json +++ b/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-documentai", - "version": "2.31.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta2.json b/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta2.json index 8fd0e7d04fb6..ef56bd7e3eb3 100644 --- a/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta2.json +++ b/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-documentai", - "version": "2.31.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta3.json b/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta3.json index 8d7dd7bb3e8e..f47545a8ed3e 100644 --- a/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta3.json +++ b/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta3.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-documentai", - "version": "2.31.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-documentai/tests/unit/gapic/documentai_v1/test_document_processor_service.py b/packages/google-cloud-documentai/tests/unit/gapic/documentai_v1/test_document_processor_service.py index cd6ae8cb014c..b08c63f5226e 100644 --- a/packages/google-cloud-documentai/tests/unit/gapic/documentai_v1/test_document_processor_service.py +++ b/packages/google-cloud-documentai/tests/unit/gapic/documentai_v1/test_document_processor_service.py @@ -1391,22 +1391,23 @@ async def test_process_document_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.process_document - ] = mock_object + ] = mock_rpc request = {} await client.process_document(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.process_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1708,8 +1709,9 @@ def test_batch_process_documents_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.batch_process_documents(request) @@ -1765,26 +1767,28 @@ async def test_batch_process_documents_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_process_documents - ] = mock_object + ] = mock_rpc request = {} await client.batch_process_documents(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.batch_process_documents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2149,22 +2153,23 @@ async def test_fetch_processor_types_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.fetch_processor_types - ] = mock_object + ] = mock_rpc request = {} await client.fetch_processor_types(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.fetch_processor_types(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2535,22 +2540,23 @@ async def test_list_processor_types_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_processor_types - ] = mock_object + ] = mock_rpc request = {} await client.list_processor_types(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_processor_types(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3139,22 +3145,23 @@ async def test_get_processor_type_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_processor_type - ] = mock_object + ] = mock_rpc request = {} await client.get_processor_type(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_processor_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3526,22 +3533,23 @@ async def test_list_processors_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_processors - ] = mock_object + ] = mock_rpc request = {} await client.list_processors(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_processors(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4109,22 +4117,23 @@ async def test_get_processor_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_processor - ] = mock_object + ] = mock_rpc request = {} await client.get_processor(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_processor(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4443,8 +4452,9 @@ def test_train_processor_version_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.train_processor_version(request) @@ -4500,26 +4510,28 @@ async def test_train_processor_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.train_processor_version - ] = mock_object + ] = mock_rpc request = {} await client.train_processor_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.train_processor_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4926,22 +4938,23 @@ async def test_get_processor_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_processor_version - ] = mock_object + ] = mock_rpc request = {} await client.get_processor_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_processor_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5335,22 +5348,23 @@ async def test_list_processor_versions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_processor_versions - ] = mock_object + ] = mock_rpc request = {} await client.list_processor_versions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_processor_versions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5867,8 +5881,9 @@ def test_delete_processor_version_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_processor_version(request) @@ -5924,26 +5939,28 @@ async def test_delete_processor_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_processor_version - ] = mock_object + ] = mock_rpc request = {} await client.delete_processor_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_processor_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6255,8 +6272,9 @@ def test_deploy_processor_version_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.deploy_processor_version(request) @@ -6312,26 +6330,28 @@ async def test_deploy_processor_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.deploy_processor_version - ] = mock_object + ] = mock_rpc request = {} await client.deploy_processor_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.deploy_processor_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6643,8 +6663,9 @@ def test_undeploy_processor_version_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.undeploy_processor_version(request) @@ -6700,26 +6721,28 @@ async def test_undeploy_processor_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.undeploy_processor_version - ] = mock_object + ] = mock_rpc request = {} await client.undeploy_processor_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.undeploy_processor_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7102,22 +7125,23 @@ async def test_create_processor_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_processor - ] = mock_object + ] = mock_rpc request = {} await client.create_processor(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_processor(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7439,8 +7463,9 @@ def test_delete_processor_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_processor(request) @@ -7494,26 +7519,28 @@ async def test_delete_processor_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_processor - ] = mock_object + ] = mock_rpc request = {} await client.delete_processor(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_processor(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7806,8 +7833,9 @@ def test_enable_processor_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.enable_processor(request) @@ -7861,26 +7889,28 @@ async def test_enable_processor_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.enable_processor - ] = mock_object + ] = mock_rpc request = {} await client.enable_processor(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.enable_processor(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8097,8 +8127,9 @@ def test_disable_processor_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.disable_processor(request) @@ -8154,26 +8185,28 @@ async def test_disable_processor_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.disable_processor - ] = mock_object + ] = mock_rpc request = {} await client.disable_processor(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.disable_processor(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8401,8 +8434,9 @@ def test_set_default_processor_version_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.set_default_processor_version(request) @@ -8458,26 +8492,28 @@ async def test_set_default_processor_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_default_processor_version - ] = mock_object + ] = mock_rpc request = {} await client.set_default_processor_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.set_default_processor_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8692,8 +8728,9 @@ def test_review_document_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.review_document(request) @@ -8747,26 +8784,28 @@ async def test_review_document_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.review_document - ] = mock_object + ] = mock_rpc request = {} await client.review_document(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.review_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9068,8 +9107,9 @@ def test_evaluate_processor_version_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.evaluate_processor_version(request) @@ -9125,26 +9165,28 @@ async def test_evaluate_processor_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.evaluate_processor_version - ] = mock_object + ] = mock_rpc request = {} await client.evaluate_processor_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.evaluate_processor_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9507,22 +9549,23 @@ async def test_get_evaluation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_evaluation - ] = mock_object + ] = mock_rpc request = {} await client.get_evaluation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_evaluation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9880,22 +9923,23 @@ async def test_list_evaluations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_evaluations - ] = mock_object + ] = mock_rpc request = {} await client.list_evaluations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_evaluations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-documentai/tests/unit/gapic/documentai_v1beta2/test_document_understanding_service.py b/packages/google-cloud-documentai/tests/unit/gapic/documentai_v1beta2/test_document_understanding_service.py index da98336db4e3..f7f64d32db7e 100644 --- a/packages/google-cloud-documentai/tests/unit/gapic/documentai_v1beta2/test_document_understanding_service.py +++ b/packages/google-cloud-documentai/tests/unit/gapic/documentai_v1beta2/test_document_understanding_service.py @@ -1293,8 +1293,9 @@ def test_batch_process_documents_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.batch_process_documents(request) @@ -1350,26 +1351,28 @@ async def test_batch_process_documents_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_process_documents - ] = mock_object + ] = mock_rpc request = {} await client.batch_process_documents(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.batch_process_documents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1746,22 +1749,23 @@ async def test_process_document_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.process_document - ] = mock_object + ] = mock_rpc request = {} await client.process_document(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.process_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-documentai/tests/unit/gapic/documentai_v1beta3/test_document_processor_service.py b/packages/google-cloud-documentai/tests/unit/gapic/documentai_v1beta3/test_document_processor_service.py index 19e12fae57c4..cb8ad433058f 100644 --- a/packages/google-cloud-documentai/tests/unit/gapic/documentai_v1beta3/test_document_processor_service.py +++ b/packages/google-cloud-documentai/tests/unit/gapic/documentai_v1beta3/test_document_processor_service.py @@ -1396,22 +1396,23 @@ async def test_process_document_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.process_document - ] = mock_object + ] = mock_rpc request = {} await client.process_document(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.process_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1716,8 +1717,9 @@ def test_batch_process_documents_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.batch_process_documents(request) @@ -1773,26 +1775,28 @@ async def test_batch_process_documents_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_process_documents - ] = mock_object + ] = mock_rpc request = {} await client.batch_process_documents(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.batch_process_documents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2157,22 +2161,23 @@ async def test_fetch_processor_types_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.fetch_processor_types - ] = mock_object + ] = mock_rpc request = {} await client.fetch_processor_types(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.fetch_processor_types(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2543,22 +2548,23 @@ async def test_list_processor_types_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_processor_types - ] = mock_object + ] = mock_rpc request = {} await client.list_processor_types(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_processor_types(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3147,22 +3153,23 @@ async def test_get_processor_type_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_processor_type - ] = mock_object + ] = mock_rpc request = {} await client.get_processor_type(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_processor_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3534,22 +3541,23 @@ async def test_list_processors_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_processors - ] = mock_object + ] = mock_rpc request = {} await client.list_processors(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_processors(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4117,22 +4125,23 @@ async def test_get_processor_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_processor - ] = mock_object + ] = mock_rpc request = {} await client.get_processor(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_processor(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4451,8 +4460,9 @@ def test_train_processor_version_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.train_processor_version(request) @@ -4508,26 +4518,28 @@ async def test_train_processor_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.train_processor_version - ] = mock_object + ] = mock_rpc request = {} await client.train_processor_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.train_processor_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4934,22 +4946,23 @@ async def test_get_processor_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_processor_version - ] = mock_object + ] = mock_rpc request = {} await client.get_processor_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_processor_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5343,22 +5356,23 @@ async def test_list_processor_versions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_processor_versions - ] = mock_object + ] = mock_rpc request = {} await client.list_processor_versions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_processor_versions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5875,8 +5889,9 @@ def test_delete_processor_version_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_processor_version(request) @@ -5932,26 +5947,28 @@ async def test_delete_processor_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_processor_version - ] = mock_object + ] = mock_rpc request = {} await client.delete_processor_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_processor_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6263,8 +6280,9 @@ def test_deploy_processor_version_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.deploy_processor_version(request) @@ -6320,26 +6338,28 @@ async def test_deploy_processor_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.deploy_processor_version - ] = mock_object + ] = mock_rpc request = {} await client.deploy_processor_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.deploy_processor_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6651,8 +6671,9 @@ def test_undeploy_processor_version_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.undeploy_processor_version(request) @@ -6708,26 +6729,28 @@ async def test_undeploy_processor_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.undeploy_processor_version - ] = mock_object + ] = mock_rpc request = {} await client.undeploy_processor_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.undeploy_processor_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7110,22 +7133,23 @@ async def test_create_processor_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_processor - ] = mock_object + ] = mock_rpc request = {} await client.create_processor(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_processor(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7447,8 +7471,9 @@ def test_delete_processor_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_processor(request) @@ -7502,26 +7527,28 @@ async def test_delete_processor_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_processor - ] = mock_object + ] = mock_rpc request = {} await client.delete_processor(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_processor(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7814,8 +7841,9 @@ def test_enable_processor_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.enable_processor(request) @@ -7869,26 +7897,28 @@ async def test_enable_processor_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.enable_processor - ] = mock_object + ] = mock_rpc request = {} await client.enable_processor(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.enable_processor(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8105,8 +8135,9 @@ def test_disable_processor_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.disable_processor(request) @@ -8162,26 +8193,28 @@ async def test_disable_processor_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.disable_processor - ] = mock_object + ] = mock_rpc request = {} await client.disable_processor(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.disable_processor(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8409,8 +8442,9 @@ def test_set_default_processor_version_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.set_default_processor_version(request) @@ -8466,26 +8500,28 @@ async def test_set_default_processor_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_default_processor_version - ] = mock_object + ] = mock_rpc request = {} await client.set_default_processor_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.set_default_processor_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8700,8 +8736,9 @@ def test_review_document_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.review_document(request) @@ -8755,26 +8792,28 @@ async def test_review_document_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.review_document - ] = mock_object + ] = mock_rpc request = {} await client.review_document(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.review_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9076,8 +9115,9 @@ def test_evaluate_processor_version_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.evaluate_processor_version(request) @@ -9133,26 +9173,28 @@ async def test_evaluate_processor_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.evaluate_processor_version - ] = mock_object + ] = mock_rpc request = {} await client.evaluate_processor_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.evaluate_processor_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9515,22 +9557,23 @@ async def test_get_evaluation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_evaluation - ] = mock_object + ] = mock_rpc request = {} await client.get_evaluation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_evaluation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9888,22 +9931,23 @@ async def test_list_evaluations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_evaluations - ] = mock_object + ] = mock_rpc request = {} await client.list_evaluations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_evaluations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10404,8 +10448,9 @@ def test_import_processor_version_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.import_processor_version(request) @@ -10461,26 +10506,28 @@ async def test_import_processor_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_processor_version - ] = mock_object + ] = mock_rpc request = {} await client.import_processor_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.import_processor_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-documentai/tests/unit/gapic/documentai_v1beta3/test_document_service.py b/packages/google-cloud-documentai/tests/unit/gapic/documentai_v1beta3/test_document_service.py index 69a71a4df7d9..7233ce2c91c1 100644 --- a/packages/google-cloud-documentai/tests/unit/gapic/documentai_v1beta3/test_document_service.py +++ b/packages/google-cloud-documentai/tests/unit/gapic/documentai_v1beta3/test_document_service.py @@ -1264,8 +1264,9 @@ def test_update_dataset_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_dataset(request) @@ -1319,26 +1320,28 @@ async def test_update_dataset_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_dataset - ] = mock_object + ] = mock_rpc request = {} await client.update_dataset(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_dataset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1672,8 +1675,9 @@ def test_import_documents_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.import_documents(request) @@ -1727,26 +1731,28 @@ async def test_import_documents_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_documents - ] = mock_object + ] = mock_rpc request = {} await client.import_documents(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.import_documents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2088,22 +2094,23 @@ async def test_get_document_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_document - ] = mock_object + ] = mock_rpc request = {} await client.get_document(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2456,22 +2463,23 @@ async def test_list_documents_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_documents - ] = mock_object + ] = mock_rpc request = {} await client.list_documents(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_documents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2971,8 +2979,9 @@ def test_batch_delete_documents_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.batch_delete_documents(request) @@ -3028,26 +3037,28 @@ async def test_batch_delete_documents_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_delete_documents - ] = mock_object + ] = mock_rpc request = {} await client.batch_delete_documents(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.batch_delete_documents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3416,22 +3427,23 @@ async def test_get_dataset_schema_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_dataset_schema - ] = mock_object + ] = mock_rpc request = {} await client.get_dataset_schema(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_dataset_schema(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3800,22 +3812,23 @@ async def test_update_dataset_schema_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_dataset_schema - ] = mock_object + ] = mock_rpc request = {} await client.update_dataset_schema(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_dataset_schema(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-domains/google/cloud/domains/gapic_version.py b/packages/google-cloud-domains/google/cloud/domains/gapic_version.py index e114cc3e196a..558c8aab67c5 100644 --- a/packages/google-cloud-domains/google/cloud/domains/gapic_version.py +++ b/packages/google-cloud-domains/google/cloud/domains/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.7.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-domains/google/cloud/domains_v1/gapic_version.py b/packages/google-cloud-domains/google/cloud/domains_v1/gapic_version.py index e114cc3e196a..558c8aab67c5 100644 --- a/packages/google-cloud-domains/google/cloud/domains_v1/gapic_version.py +++ b/packages/google-cloud-domains/google/cloud/domains_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.7.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-domains/google/cloud/domains_v1/services/domains/async_client.py b/packages/google-cloud-domains/google/cloud/domains_v1/services/domains/async_client.py index c1c6bfcdf277..ef00dae1a722 100644 --- a/packages/google-cloud-domains/google/cloud/domains_v1/services/domains/async_client.py +++ b/packages/google-cloud-domains/google/cloud/domains_v1/services/domains/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -188,9 +187,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DomainsClient).get_transport_class, type(DomainsClient) - ) + get_transport_class = DomainsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-domains/google/cloud/domains_v1/services/domains/client.py b/packages/google-cloud-domains/google/cloud/domains_v1/services/domains/client.py index 72de14aff2f3..986e5c916756 100644 --- a/packages/google-cloud-domains/google/cloud/domains_v1/services/domains/client.py +++ b/packages/google-cloud-domains/google/cloud/domains_v1/services/domains/client.py @@ -664,7 +664,7 @@ def __init__( transport_init: Union[ Type[DomainsTransport], Callable[..., DomainsTransport] ] = ( - type(self).get_transport_class(transport) + DomainsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DomainsTransport], transport) ) diff --git a/packages/google-cloud-domains/google/cloud/domains_v1beta1/gapic_version.py b/packages/google-cloud-domains/google/cloud/domains_v1beta1/gapic_version.py index e114cc3e196a..558c8aab67c5 100644 --- a/packages/google-cloud-domains/google/cloud/domains_v1beta1/gapic_version.py +++ b/packages/google-cloud-domains/google/cloud/domains_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.7.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-domains/google/cloud/domains_v1beta1/services/domains/async_client.py b/packages/google-cloud-domains/google/cloud/domains_v1beta1/services/domains/async_client.py index cd7515cb85a4..3078cb5d9676 100644 --- a/packages/google-cloud-domains/google/cloud/domains_v1beta1/services/domains/async_client.py +++ b/packages/google-cloud-domains/google/cloud/domains_v1beta1/services/domains/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -188,9 +187,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DomainsClient).get_transport_class, type(DomainsClient) - ) + get_transport_class = DomainsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-domains/google/cloud/domains_v1beta1/services/domains/client.py b/packages/google-cloud-domains/google/cloud/domains_v1beta1/services/domains/client.py index cbd27151ca1b..823c15ab18d3 100644 --- a/packages/google-cloud-domains/google/cloud/domains_v1beta1/services/domains/client.py +++ b/packages/google-cloud-domains/google/cloud/domains_v1beta1/services/domains/client.py @@ -664,7 +664,7 @@ def __init__( transport_init: Union[ Type[DomainsTransport], Callable[..., DomainsTransport] ] = ( - type(self).get_transport_class(transport) + DomainsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DomainsTransport], transport) ) diff --git a/packages/google-cloud-domains/samples/generated_samples/snippet_metadata_google.cloud.domains.v1.json b/packages/google-cloud-domains/samples/generated_samples/snippet_metadata_google.cloud.domains.v1.json index bb17ae39b34e..5384077f78bd 100644 --- a/packages/google-cloud-domains/samples/generated_samples/snippet_metadata_google.cloud.domains.v1.json +++ b/packages/google-cloud-domains/samples/generated_samples/snippet_metadata_google.cloud.domains.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-domains", - "version": "1.7.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-domains/samples/generated_samples/snippet_metadata_google.cloud.domains.v1beta1.json b/packages/google-cloud-domains/samples/generated_samples/snippet_metadata_google.cloud.domains.v1beta1.json index 925c5f3193de..7634af9e4008 100644 --- a/packages/google-cloud-domains/samples/generated_samples/snippet_metadata_google.cloud.domains.v1beta1.json +++ b/packages/google-cloud-domains/samples/generated_samples/snippet_metadata_google.cloud.domains.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-domains", - "version": "1.7.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-domains/tests/unit/gapic/domains_v1/test_domains.py b/packages/google-cloud-domains/tests/unit/gapic/domains_v1/test_domains.py index 75a2a7dca897..9deace39db86 100644 --- a/packages/google-cloud-domains/tests/unit/gapic/domains_v1/test_domains.py +++ b/packages/google-cloud-domains/tests/unit/gapic/domains_v1/test_domains.py @@ -1226,22 +1226,23 @@ async def test_search_domains_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search_domains - ] = mock_object + ] = mock_rpc request = {} await client.search_domains(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_domains(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1607,22 +1608,23 @@ async def test_retrieve_register_parameters_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.retrieve_register_parameters - ] = mock_object + ] = mock_rpc request = {} await client.retrieve_register_parameters(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.retrieve_register_parameters(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1933,8 +1935,9 @@ def test_register_domain_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.register_domain(request) @@ -1988,26 +1991,28 @@ async def test_register_domain_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.register_domain - ] = mock_object + ] = mock_rpc request = {} await client.register_domain(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.register_domain(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2383,22 +2388,23 @@ async def test_retrieve_transfer_parameters_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.retrieve_transfer_parameters - ] = mock_object + ] = mock_rpc request = {} await client.retrieve_transfer_parameters(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.retrieve_transfer_parameters(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2709,8 +2715,9 @@ def test_transfer_domain_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.transfer_domain(request) @@ -2764,26 +2771,28 @@ async def test_transfer_domain_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.transfer_domain - ] = mock_object + ] = mock_rpc request = {} await client.transfer_domain(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.transfer_domain(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3175,22 +3184,23 @@ async def test_list_registrations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_registrations - ] = mock_object + ] = mock_rpc request = {} await client.list_registrations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_registrations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3765,22 +3775,23 @@ async def test_get_registration_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_registration - ] = mock_object + ] = mock_rpc request = {} await client.get_registration(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_registration(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4087,8 +4098,9 @@ def test_update_registration_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_registration(request) @@ -4144,26 +4156,28 @@ async def test_update_registration_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_registration - ] = mock_object + ] = mock_rpc request = {} await client.update_registration(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_registration(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4484,8 +4498,9 @@ def test_configure_management_settings_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.configure_management_settings(request) @@ -4541,26 +4556,28 @@ async def test_configure_management_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.configure_management_settings - ] = mock_object + ] = mock_rpc request = {} await client.configure_management_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.configure_management_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4904,8 +4921,9 @@ def test_configure_dns_settings_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.configure_dns_settings(request) @@ -4961,26 +4979,28 @@ async def test_configure_dns_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.configure_dns_settings - ] = mock_object + ] = mock_rpc request = {} await client.configure_dns_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.configure_dns_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5335,8 +5355,9 @@ def test_configure_contact_settings_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.configure_contact_settings(request) @@ -5392,26 +5413,28 @@ async def test_configure_contact_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.configure_contact_settings - ] = mock_object + ] = mock_rpc request = {} await client.configure_contact_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.configure_contact_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5754,8 +5777,9 @@ def test_export_registration_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.export_registration(request) @@ -5811,26 +5835,28 @@ async def test_export_registration_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.export_registration - ] = mock_object + ] = mock_rpc request = {} await client.export_registration(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.export_registration(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6140,8 +6166,9 @@ def test_delete_registration_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_registration(request) @@ -6197,26 +6224,28 @@ async def test_delete_registration_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_registration - ] = mock_object + ] = mock_rpc request = {} await client.delete_registration(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_registration(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6585,22 +6614,23 @@ async def test_retrieve_authorization_code_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.retrieve_authorization_code - ] = mock_object + ] = mock_rpc request = {} await client.retrieve_authorization_code(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.retrieve_authorization_code(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6973,22 +7003,23 @@ async def test_reset_authorization_code_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.reset_authorization_code - ] = mock_object + ] = mock_rpc request = {} await client.reset_authorization_code(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.reset_authorization_code(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-domains/tests/unit/gapic/domains_v1beta1/test_domains.py b/packages/google-cloud-domains/tests/unit/gapic/domains_v1beta1/test_domains.py index 10e5faaae120..b62822eef94e 100644 --- a/packages/google-cloud-domains/tests/unit/gapic/domains_v1beta1/test_domains.py +++ b/packages/google-cloud-domains/tests/unit/gapic/domains_v1beta1/test_domains.py @@ -1226,22 +1226,23 @@ async def test_search_domains_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search_domains - ] = mock_object + ] = mock_rpc request = {} await client.search_domains(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_domains(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1607,22 +1608,23 @@ async def test_retrieve_register_parameters_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.retrieve_register_parameters - ] = mock_object + ] = mock_rpc request = {} await client.retrieve_register_parameters(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.retrieve_register_parameters(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1933,8 +1935,9 @@ def test_register_domain_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.register_domain(request) @@ -1988,26 +1991,28 @@ async def test_register_domain_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.register_domain - ] = mock_object + ] = mock_rpc request = {} await client.register_domain(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.register_domain(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2383,22 +2388,23 @@ async def test_retrieve_transfer_parameters_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.retrieve_transfer_parameters - ] = mock_object + ] = mock_rpc request = {} await client.retrieve_transfer_parameters(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.retrieve_transfer_parameters(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2709,8 +2715,9 @@ def test_transfer_domain_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.transfer_domain(request) @@ -2764,26 +2771,28 @@ async def test_transfer_domain_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.transfer_domain - ] = mock_object + ] = mock_rpc request = {} await client.transfer_domain(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.transfer_domain(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3175,22 +3184,23 @@ async def test_list_registrations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_registrations - ] = mock_object + ] = mock_rpc request = {} await client.list_registrations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_registrations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3765,22 +3775,23 @@ async def test_get_registration_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_registration - ] = mock_object + ] = mock_rpc request = {} await client.get_registration(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_registration(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4087,8 +4098,9 @@ def test_update_registration_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_registration(request) @@ -4144,26 +4156,28 @@ async def test_update_registration_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_registration - ] = mock_object + ] = mock_rpc request = {} await client.update_registration(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_registration(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4484,8 +4498,9 @@ def test_configure_management_settings_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.configure_management_settings(request) @@ -4541,26 +4556,28 @@ async def test_configure_management_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.configure_management_settings - ] = mock_object + ] = mock_rpc request = {} await client.configure_management_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.configure_management_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4904,8 +4921,9 @@ def test_configure_dns_settings_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.configure_dns_settings(request) @@ -4961,26 +4979,28 @@ async def test_configure_dns_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.configure_dns_settings - ] = mock_object + ] = mock_rpc request = {} await client.configure_dns_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.configure_dns_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5335,8 +5355,9 @@ def test_configure_contact_settings_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.configure_contact_settings(request) @@ -5392,26 +5413,28 @@ async def test_configure_contact_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.configure_contact_settings - ] = mock_object + ] = mock_rpc request = {} await client.configure_contact_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.configure_contact_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5754,8 +5777,9 @@ def test_export_registration_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.export_registration(request) @@ -5811,26 +5835,28 @@ async def test_export_registration_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.export_registration - ] = mock_object + ] = mock_rpc request = {} await client.export_registration(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.export_registration(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6140,8 +6166,9 @@ def test_delete_registration_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_registration(request) @@ -6197,26 +6224,28 @@ async def test_delete_registration_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_registration - ] = mock_object + ] = mock_rpc request = {} await client.delete_registration(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_registration(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6585,22 +6614,23 @@ async def test_retrieve_authorization_code_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.retrieve_authorization_code - ] = mock_object + ] = mock_rpc request = {} await client.retrieve_authorization_code(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.retrieve_authorization_code(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6973,22 +7003,23 @@ async def test_reset_authorization_code_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.reset_authorization_code - ] = mock_object + ] = mock_rpc request = {} await client.reset_authorization_code(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.reset_authorization_code(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-edgecontainer/google/cloud/edgecontainer/gapic_version.py b/packages/google-cloud-edgecontainer/google/cloud/edgecontainer/gapic_version.py index 0d21cc226e8f..558c8aab67c5 100644 --- a/packages/google-cloud-edgecontainer/google/cloud/edgecontainer/gapic_version.py +++ b/packages/google-cloud-edgecontainer/google/cloud/edgecontainer/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.5.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-edgecontainer/google/cloud/edgecontainer_v1/gapic_version.py b/packages/google-cloud-edgecontainer/google/cloud/edgecontainer_v1/gapic_version.py index 0d21cc226e8f..558c8aab67c5 100644 --- a/packages/google-cloud-edgecontainer/google/cloud/edgecontainer_v1/gapic_version.py +++ b/packages/google-cloud-edgecontainer/google/cloud/edgecontainer_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.5.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-edgecontainer/google/cloud/edgecontainer_v1/services/edge_container/async_client.py b/packages/google-cloud-edgecontainer/google/cloud/edgecontainer_v1/services/edge_container/async_client.py index c86933f94c7a..775108bfca19 100644 --- a/packages/google-cloud-edgecontainer/google/cloud/edgecontainer_v1/services/edge_container/async_client.py +++ b/packages/google-cloud-edgecontainer/google/cloud/edgecontainer_v1/services/edge_container/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -211,9 +210,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(EdgeContainerClient).get_transport_class, type(EdgeContainerClient) - ) + get_transport_class = EdgeContainerClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-edgecontainer/google/cloud/edgecontainer_v1/services/edge_container/client.py b/packages/google-cloud-edgecontainer/google/cloud/edgecontainer_v1/services/edge_container/client.py index b306a8580fa8..f440f378a372 100644 --- a/packages/google-cloud-edgecontainer/google/cloud/edgecontainer_v1/services/edge_container/client.py +++ b/packages/google-cloud-edgecontainer/google/cloud/edgecontainer_v1/services/edge_container/client.py @@ -786,7 +786,7 @@ def __init__( transport_init: Union[ Type[EdgeContainerTransport], Callable[..., EdgeContainerTransport] ] = ( - type(self).get_transport_class(transport) + EdgeContainerClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., EdgeContainerTransport], transport) ) diff --git a/packages/google-cloud-edgecontainer/google/cloud/edgecontainer_v1/types/resources.py b/packages/google-cloud-edgecontainer/google/cloud/edgecontainer_v1/types/resources.py index dc8141e90e9d..ebf7bc94a8cc 100644 --- a/packages/google-cloud-edgecontainer/google/cloud/edgecontainer_v1/types/resources.py +++ b/packages/google-cloud-edgecontainer/google/cloud/edgecontainer_v1/types/resources.py @@ -410,7 +410,7 @@ class MaintenanceEvent(proto.Message): operation (str): Output only. The operation for running the maintenance event. Specified in the format - `projects/*/locations/*/operations/*`. If the maintenance + projects/\ */locations/*/operations/*. If the maintenance event is split into multiple operations (e.g. due to maintenance windows), the latest one is recorded. type_ (google.cloud.edgecontainer_v1.types.Cluster.MaintenanceEvent.Type): diff --git a/packages/google-cloud-edgecontainer/samples/generated_samples/snippet_metadata_google.cloud.edgecontainer.v1.json b/packages/google-cloud-edgecontainer/samples/generated_samples/snippet_metadata_google.cloud.edgecontainer.v1.json index e2a2095dcaff..3cd9098ed4e2 100644 --- a/packages/google-cloud-edgecontainer/samples/generated_samples/snippet_metadata_google.cloud.edgecontainer.v1.json +++ b/packages/google-cloud-edgecontainer/samples/generated_samples/snippet_metadata_google.cloud.edgecontainer.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-edgecontainer", - "version": "0.5.11" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-edgecontainer/tests/unit/gapic/edgecontainer_v1/test_edge_container.py b/packages/google-cloud-edgecontainer/tests/unit/gapic/edgecontainer_v1/test_edge_container.py index b20546f1dfd7..652510d6e0ca 100644 --- a/packages/google-cloud-edgecontainer/tests/unit/gapic/edgecontainer_v1/test_edge_container.py +++ b/packages/google-cloud-edgecontainer/tests/unit/gapic/edgecontainer_v1/test_edge_container.py @@ -1303,22 +1303,23 @@ async def test_list_clusters_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_clusters - ] = mock_object + ] = mock_rpc request = {} await client.list_clusters(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_clusters(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1908,22 +1909,23 @@ async def test_get_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_cluster - ] = mock_object + ] = mock_rpc request = {} await client.get_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2246,8 +2248,9 @@ def test_create_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_cluster(request) @@ -2301,26 +2304,28 @@ async def test_create_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_cluster - ] = mock_object + ] = mock_rpc request = {} await client.create_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2630,8 +2635,9 @@ def test_update_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_cluster(request) @@ -2685,26 +2691,28 @@ async def test_update_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_cluster - ] = mock_object + ] = mock_rpc request = {} await client.update_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3008,8 +3016,9 @@ def test_upgrade_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.upgrade_cluster(request) @@ -3063,26 +3072,28 @@ async def test_upgrade_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.upgrade_cluster - ] = mock_object + ] = mock_rpc request = {} await client.upgrade_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.upgrade_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3394,8 +3405,9 @@ def test_delete_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_cluster(request) @@ -3449,26 +3461,28 @@ async def test_delete_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_cluster - ] = mock_object + ] = mock_rpc request = {} await client.delete_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3827,22 +3841,23 @@ async def test_generate_access_token_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.generate_access_token - ] = mock_object + ] = mock_rpc request = {} await client.generate_access_token(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.generate_access_token(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4220,22 +4235,23 @@ async def test_generate_offline_credential_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.generate_offline_credential - ] = mock_object + ] = mock_rpc request = {} await client.generate_offline_credential(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.generate_offline_credential(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4608,22 +4624,23 @@ async def test_list_node_pools_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_node_pools - ] = mock_object + ] = mock_rpc request = {} await client.list_node_pools(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_node_pools(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5180,22 +5197,23 @@ async def test_get_node_pool_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_node_pool - ] = mock_object + ] = mock_rpc request = {} await client.get_node_pool(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_node_pool(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5498,8 +5516,9 @@ def test_create_node_pool_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_node_pool(request) @@ -5553,26 +5572,28 @@ async def test_create_node_pool_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_node_pool - ] = mock_object + ] = mock_rpc request = {} await client.create_node_pool(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_node_pool(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5884,8 +5905,9 @@ def test_update_node_pool_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_node_pool(request) @@ -5939,26 +5961,28 @@ async def test_update_node_pool_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_node_pool - ] = mock_object + ] = mock_rpc request = {} await client.update_node_pool(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_node_pool(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6262,8 +6286,9 @@ def test_delete_node_pool_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_node_pool(request) @@ -6317,26 +6342,28 @@ async def test_delete_node_pool_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_node_pool - ] = mock_object + ] = mock_rpc request = {} await client.delete_node_pool(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_node_pool(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6691,22 +6718,23 @@ async def test_list_machines_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_machines - ] = mock_object + ] = mock_rpc request = {} await client.list_machines(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_machines(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7263,22 +7291,23 @@ async def test_get_machine_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_machine - ] = mock_object + ] = mock_rpc request = {} await client.get_machine(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_machine(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7652,22 +7681,23 @@ async def test_list_vpn_connections_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_vpn_connections - ] = mock_object + ] = mock_rpc request = {} await client.list_vpn_connections(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_vpn_connections(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8260,22 +8290,23 @@ async def test_get_vpn_connection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_vpn_connection - ] = mock_object + ] = mock_rpc request = {} await client.get_vpn_connection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_vpn_connection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8605,8 +8636,9 @@ def test_create_vpn_connection_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_vpn_connection(request) @@ -8662,26 +8694,28 @@ async def test_create_vpn_connection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_vpn_connection - ] = mock_object + ] = mock_rpc request = {} await client.create_vpn_connection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_vpn_connection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9014,8 +9048,9 @@ def test_delete_vpn_connection_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_vpn_connection(request) @@ -9071,26 +9106,28 @@ async def test_delete_vpn_connection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_vpn_connection - ] = mock_object + ] = mock_rpc request = {} await client.delete_vpn_connection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_vpn_connection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9456,22 +9493,23 @@ async def test_get_server_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_server_config - ] = mock_object + ] = mock_rpc request = {} await client.get_server_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_server_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio From 6909c55470d8d9019584921ac1fa5ca97f0cb209 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 19 Aug 2024 11:01:10 -0400 Subject: [PATCH 019/108] chore: [Many APIs] Update gapic-generator-python to v1.18.5 (#12995) - [ ] Regenerate this pull request now. PiperOrigin-RevId: 661268868 Source-Link: https://github.com/googleapis/googleapis/commit/f7d214cb08cd7d9b018d44564a8b184263f64177 Source-Link: https://github.com/googleapis/googleapis-gen/commit/79a8411bbdb25a983fa3aae8c0e14327df129f94 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWFkcy1hZG1hbmFnZXIvLk93bEJvdC55YW1sIiwiaCI6Ijc5YTg0MTFiYmRiMjVhOTgzZmEzYWFlOGMwZTE0MzI3ZGYxMjlmOTQifQ== Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWFpLWdlbmVyYXRpdmVsYW5ndWFnZS8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWFuYWx5dGljcy1hZG1pbi8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWFuYWx5dGljcy1kYXRhLy5Pd2xCb3QueWFtbCIsImgiOiI3OWE4NDExYmJkYjI1YTk4M2ZhM2FhZThjMGUxNDMyN2RmMTI5Zjk0In0= Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWFwcHMtY2FyZC8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWFwcHMtY2hhdC8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWFwcHMtZXZlbnRzLXN1YnNjcmlwdGlvbnMvLk93bEJvdC55YW1sIiwiaCI6Ijc5YTg0MTFiYmRiMjVhOTgzZmEzYWFlOGMwZTE0MzI3ZGYxMjlmOTQifQ== Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWFwcHMtbWVldC8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWFwcHMtc2NyaXB0LXR5cGUvLk93bEJvdC55YW1sIiwiaCI6Ijc5YTg0MTFiYmRiMjVhOTgzZmEzYWFlOGMwZTE0MzI3ZGYxMjlmOTQifQ== Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWFyZWExMjAtdGFibGVzLy5Pd2xCb3QueWFtbCIsImgiOiI3OWE4NDExYmJkYjI1YTk4M2ZhM2FhZThjMGUxNDMyN2RmMTI5Zjk0In0= Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWFjY2Vzcy1hcHByb3ZhbC8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWFkdmlzb3J5bm90aWZpY2F0aW9ucy8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWFsbG95ZGItY29ubmVjdG9ycy8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWFsbG95ZGIvLk93bEJvdC55YW1sIiwiaCI6Ijc5YTg0MTFiYmRiMjVhOTgzZmEzYWFlOGMwZTE0MzI3ZGYxMjlmOTQifQ== Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWFwaS1nYXRld2F5Ly5Pd2xCb3QueWFtbCIsImgiOiI3OWE4NDExYmJkYjI1YTk4M2ZhM2FhZThjMGUxNDMyN2RmMTI5Zjk0In0= Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWFwaS1rZXlzLy5Pd2xCb3QueWFtbCIsImgiOiI3OWE4NDExYmJkYjI1YTk4M2ZhM2FhZThjMGUxNDMyN2RmMTI5Zjk0In0= Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWFwaWdlZS1jb25uZWN0Ly5Pd2xCb3QueWFtbCIsImgiOiI3OWE4NDExYmJkYjI1YTk4M2ZhM2FhZThjMGUxNDMyN2RmMTI5Zjk0In0= Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWFwaWdlZS1yZWdpc3RyeS8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWFwaWh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWFwcGVuZ2luZS1hZG1pbi8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../google/ads/admanager/gapic_version.py | 2 +- .../google/ads/admanager_v1/gapic_version.py | 2 +- .../services/ad_partner_service/client.py | 2 +- .../services/ad_unit_service/client.py | 2 +- .../services/company_service/client.py | 2 +- .../services/contact_service/client.py | 2 +- .../services/creative_service/client.py | 2 +- .../services/custom_field_service/client.py | 2 +- .../custom_targeting_key_service/client.py | 2 +- .../custom_targeting_value_service/client.py | 2 +- .../services/label_service/client.py | 2 +- .../services/line_item_service/client.py | 2 +- .../services/network_service/client.py | 2 +- .../services/order_service/client.py | 2 +- .../services/placement_service/client.py | 2 +- .../services/report_service/client.py | 2 +- .../services/role_service/client.py | 2 +- .../services/team_service/client.py | 2 +- .../services/user_service/client.py | 2 +- ...ppet_metadata_google.ads.admanager.v1.json | 2 +- .../google/analytics/admin/gapic_version.py | 2 +- .../analytics/admin_v1alpha/gapic_version.py | 2 +- .../analytics_admin_service/async_client.py | 8 +- .../analytics_admin_service/client.py | 4 +- .../admin_v1alpha/types/analytics_admin.py | 2 +- .../analytics/admin_v1beta/gapic_version.py | 2 +- .../analytics_admin_service/async_client.py | 6 +- .../analytics_admin_service/client.py | 2 +- ...etadata_google.analytics.admin.v1beta.json | 2 +- .../test_analytics_admin_service.py | 1350 +++++++++-------- .../test_analytics_admin_service.py | 486 +++--- .../google/analytics/data/gapic_version.py | 2 +- .../analytics/data_v1alpha/gapic_version.py | 2 +- .../alpha_analytics_data/async_client.py | 6 +- .../services/alpha_analytics_data/client.py | 2 +- .../analytics/data_v1beta/gapic_version.py | 2 +- .../beta_analytics_data/async_client.py | 5 +- .../services/beta_analytics_data/client.py | 2 +- ...etadata_google.analytics.data.v1alpha.json | 2 +- ...metadata_google.analytics.data.v1beta.json | 2 +- .../data_v1alpha/test_alpha_analytics_data.py | 137 +- .../data_v1beta/test_beta_analytics_data.py | 109 +- .../google/apps/card/gapic_version.py | 2 +- .../google/apps/card_v1/gapic_version.py | 2 +- .../google/apps/chat/gapic_version.py | 2 +- .../google/apps/chat_v1/gapic_version.py | 2 +- .../services/chat_service/async_client.py | 5 +- .../chat_v1/services/chat_service/client.py | 2 +- .../snippet_metadata_google.chat.v1.json | 2 +- .../unit/gapic/chat_v1/test_chat_service.py | 252 +-- .../events_subscriptions/gapic_version.py | 2 +- .../events_subscriptions_v1/gapic_version.py | 2 +- .../subscriptions_service/async_client.py | 6 +- .../services/subscriptions_service/client.py | 2 +- ...a_google.apps.events.subscriptions.v1.json | 2 +- .../test_subscriptions_service.py | 94 +- .../google/apps/meet/gapic_version.py | 2 +- .../google/apps/meet_v2/gapic_version.py | 2 +- .../async_client.py | 6 +- .../conference_records_service/client.py | 2 +- .../services/spaces_service/async_client.py | 5 +- .../meet_v2/services/spaces_service/client.py | 2 +- .../google/apps/meet_v2beta/gapic_version.py | 2 +- .../async_client.py | 6 +- .../conference_records_service/client.py | 2 +- .../services/spaces_service/async_client.py | 5 +- .../services/spaces_service/client.py | 2 +- .../snippet_metadata_google.apps.meet.v2.json | 2 +- ...ppet_metadata_google.apps.meet.v2beta.json | 2 +- .../test_conference_records_service.py | 108 +- .../unit/gapic/meet_v2/test_spaces_service.py | 36 +- .../test_conference_records_service.py | 108 +- .../gapic/meet_v2beta/test_spaces_service.py | 36 +- .../script/type/calendar/gapic_version.py | 2 +- .../apps/script/type/docs/gapic_version.py | 2 +- .../apps/script/type/drive/gapic_version.py | 2 +- .../google/apps/script/type/gapic_version.py | 2 +- .../apps/script/type/gmail/gapic_version.py | 2 +- .../apps/script/type/sheets/gapic_version.py | 2 +- .../apps/script/type/slides/gapic_version.py | 2 +- .../google/area120/tables/gapic_version.py | 2 +- .../area120/tables_v1alpha1/gapic_version.py | 2 +- .../services/tables_service/async_client.py | 5 +- .../services/tables_service/client.py | 2 +- ...tadata_google.area120.tables.v1alpha1.json | 2 +- .../tables_v1alpha1/test_tables_service.py | 108 +- .../cloud/accessapproval/gapic_version.py | 2 +- .../cloud/accessapproval_v1/gapic_version.py | 2 +- .../services/access_approval/async_client.py | 5 +- .../services/access_approval/client.py | 2 +- ...tadata_google.cloud.accessapproval.v1.json | 2 +- .../accessapproval_v1/test_access_approval.py | 81 +- .../advisorynotifications/gapic_version.py | 2 +- .../advisorynotifications_v1/gapic_version.py | 2 +- .../async_client.py | 6 +- .../advisory_notifications_service/client.py | 2 +- ...google.cloud.advisorynotifications.v1.json | 2 +- .../test_advisory_notifications_service.py | 36 +- .../cloud/alloydb/connectors/gapic_version.py | 2 +- .../alloydb/connectors_v1/gapic_version.py | 2 +- .../google/cloud/alloydb/gapic_version.py | 2 +- .../google/cloud/alloydb_v1/gapic_version.py | 2 +- .../services/alloy_db_admin/async_client.py | 5 +- .../services/alloy_db_admin/client.py | 2 +- .../cloud/alloydb_v1/types/resources.py | 2 +- .../cloud/alloydb_v1alpha/gapic_version.py | 2 +- .../services/alloy_db_admin/async_client.py | 5 +- .../services/alloy_db_admin/client.py | 2 +- .../cloud/alloydb_v1alpha/types/resources.py | 2 +- .../cloud/alloydb_v1beta/gapic_version.py | 2 +- .../services/alloy_db_admin/async_client.py | 5 +- .../services/alloy_db_admin/client.py | 2 +- .../cloud/alloydb_v1beta/types/resources.py | 2 +- ...ppet_metadata_google.cloud.alloydb.v1.json | 2 +- ...metadata_google.cloud.alloydb.v1alpha.json | 2 +- ..._metadata_google.cloud.alloydb.v1beta.json | 2 +- .../gapic/alloydb_v1/test_alloy_db_admin.py | 449 +++--- .../alloydb_v1alpha/test_alloy_db_admin.py | 458 +++--- .../alloydb_v1beta/test_alloy_db_admin.py | 458 +++--- .../google/cloud/apigateway/gapic_version.py | 2 +- .../cloud/apigateway_v1/gapic_version.py | 2 +- .../api_gateway_service/async_client.py | 5 +- .../services/api_gateway_service/client.py | 2 +- ...t_metadata_google.cloud.apigateway.v1.json | 2 +- .../apigateway_v1/test_api_gateway_service.py | 225 +-- .../google/cloud/api_keys/gapic_version.py | 2 +- .../google/cloud/api_keys_v2/gapic_version.py | 2 +- .../services/api_keys/async_client.py | 5 +- .../api_keys_v2/services/api_keys/client.py | 2 +- ...nippet_metadata_google.api.apikeys.v2.json | 2 +- .../unit/gapic/api_keys_v2/test_api_keys.py | 112 +- .../cloud/apigeeconnect/gapic_version.py | 2 +- .../cloud/apigeeconnect_v1/gapic_version.py | 2 +- .../connection_service/async_client.py | 5 +- .../services/connection_service/client.py | 2 +- .../services/tether/async_client.py | 5 +- .../services/tether/client.py | 2 +- ...etadata_google.cloud.apigeeconnect.v1.json | 2 +- .../test_connection_service.py | 9 +- .../gapic/apigeeconnect_v1/test_tether.py | 9 +- .../cloud/apigee_registry/gapic_version.py | 2 +- .../cloud/apigee_registry_v1/gapic_version.py | 2 +- .../services/provisioning/async_client.py | 5 +- .../services/provisioning/client.py | 2 +- .../services/registry/async_client.py | 5 +- .../services/registry/client.py | 2 +- ...tadata_google.cloud.apigeeregistry.v1.json | 2 +- .../apigee_registry_v1/test_provisioning.py | 47 +- .../gapic/apigee_registry_v1/test_registry.py | 315 ++-- packages/google-cloud-apihub/.flake8 | 2 +- packages/google-cloud-apihub/MANIFEST.in | 2 +- packages/google-cloud-apihub/docs/conf.py | 2 +- .../google/cloud/apihub/gapic_version.py | 2 +- .../google/cloud/apihub_v1/gapic_version.py | 2 +- .../services/api_hub/async_client.py | 5 +- .../apihub_v1/services/api_hub/client.py | 2 +- .../api_hub_dependencies/async_client.py | 6 +- .../services/api_hub_dependencies/client.py | 2 +- .../services/api_hub_plugin/async_client.py | 5 +- .../services/api_hub_plugin/client.py | 2 +- .../async_client.py | 6 +- .../client.py | 2 +- .../services/linting_service/async_client.py | 5 +- .../services/linting_service/client.py | 2 +- .../services/provisioning/async_client.py | 5 +- .../apihub_v1/services/provisioning/client.py | 2 +- .../async_client.py | 6 +- .../client.py | 2 +- packages/google-cloud-apihub/noxfile.py | 2 +- .../scripts/decrypt-secrets.sh | 2 +- .../unit/gapic/apihub_v1/test_api_hub.py | 315 ++-- .../apihub_v1/test_api_hub_dependencies.py | 45 +- .../gapic/apihub_v1/test_api_hub_plugin.py | 27 +- .../test_host_project_registration_service.py | 27 +- .../gapic/apihub_v1/test_linting_service.py | 36 +- .../unit/gapic/apihub_v1/test_provisioning.py | 37 +- ...test_runtime_project_attachment_service.py | 45 +- .../cloud/appengine_admin/gapic_version.py | 2 +- .../cloud/appengine_admin_v1/gapic_version.py | 2 +- .../services/applications/async_client.py | 5 +- .../services/applications/client.py | 2 +- .../authorized_certificates/async_client.py | 6 +- .../authorized_certificates/client.py | 2 +- .../authorized_domains/async_client.py | 5 +- .../services/authorized_domains/client.py | 2 +- .../services/domain_mappings/async_client.py | 5 +- .../services/domain_mappings/client.py | 2 +- .../services/firewall/async_client.py | 5 +- .../services/firewall/client.py | 2 +- .../services/instances/async_client.py | 5 +- .../services/instances/client.py | 2 +- .../services/services/async_client.py | 5 +- .../services/services/client.py | 2 +- .../services/versions/async_client.py | 5 +- .../services/versions/client.py | 2 +- .../snippet_metadata_google.appengine.v1.json | 2 +- .../appengine_admin_v1/test_applications.py | 66 +- .../test_authorized_certificates.py | 45 +- .../test_authorized_domains.py | 9 +- .../test_domain_mappings.py | 75 +- .../gapic/appengine_admin_v1/test_firewall.py | 54 +- .../appengine_admin_v1/test_instances.py | 56 +- .../gapic/appengine_admin_v1/test_services.py | 56 +- .../gapic/appengine_admin_v1/test_versions.py | 75 +- 204 files changed, 3536 insertions(+), 2915 deletions(-) diff --git a/packages/google-ads-admanager/google/ads/admanager/gapic_version.py b/packages/google-ads-admanager/google/ads/admanager/gapic_version.py index 3b0a9d9a8d43..558c8aab67c5 100644 --- a/packages/google-ads-admanager/google/ads/admanager/gapic_version.py +++ b/packages/google-ads-admanager/google/ads/admanager/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/gapic_version.py b/packages/google-ads-admanager/google/ads/admanager_v1/gapic_version.py index 3b0a9d9a8d43..558c8aab67c5 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/gapic_version.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_partner_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_partner_service/client.py index 0b5e0860c1e1..f3907ddcd88e 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_partner_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_partner_service/client.py @@ -673,7 +673,7 @@ def __init__( Type[AdPartnerServiceTransport], Callable[..., AdPartnerServiceTransport], ] = ( - type(self).get_transport_class(transport) + AdPartnerServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AdPartnerServiceTransport], transport) ) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/client.py index e92cfbf83525..f1200c52e05a 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/client.py @@ -707,7 +707,7 @@ def __init__( transport_init: Union[ Type[AdUnitServiceTransport], Callable[..., AdUnitServiceTransport] ] = ( - type(self).get_transport_class(transport) + AdUnitServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AdUnitServiceTransport], transport) ) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/client.py index dfffec532f1b..a1bc3d1c6eab 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/client.py @@ -728,7 +728,7 @@ def __init__( transport_init: Union[ Type[CompanyServiceTransport], Callable[..., CompanyServiceTransport] ] = ( - type(self).get_transport_class(transport) + CompanyServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., CompanyServiceTransport], transport) ) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/contact_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/contact_service/client.py index 204e8293bbbc..46d892852e64 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/contact_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/contact_service/client.py @@ -670,7 +670,7 @@ def __init__( transport_init: Union[ Type[ContactServiceTransport], Callable[..., ContactServiceTransport] ] = ( - type(self).get_transport_class(transport) + ContactServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ContactServiceTransport], transport) ) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/creative_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/creative_service/client.py index 2fabc0254aea..c04fdc539730 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/creative_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/creative_service/client.py @@ -711,7 +711,7 @@ def __init__( transport_init: Union[ Type[CreativeServiceTransport], Callable[..., CreativeServiceTransport] ] = ( - type(self).get_transport_class(transport) + CreativeServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., CreativeServiceTransport], transport) ) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/client.py index 37de0b69d4f3..4b1cd58b89f4 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/client.py @@ -675,7 +675,7 @@ def __init__( Type[CustomFieldServiceTransport], Callable[..., CustomFieldServiceTransport], ] = ( - type(self).get_transport_class(transport) + CustomFieldServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., CustomFieldServiceTransport], transport) ) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/client.py index f2e5a41459cf..53ee2f5439d2 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/client.py @@ -685,7 +685,7 @@ def __init__( Type[CustomTargetingKeyServiceTransport], Callable[..., CustomTargetingKeyServiceTransport], ] = ( - type(self).get_transport_class(transport) + CustomTargetingKeyServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., CustomTargetingKeyServiceTransport], transport) ) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/client.py index f05268ef0763..2a1a0435b1c9 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/client.py @@ -694,7 +694,7 @@ def __init__( Type[CustomTargetingValueServiceTransport], Callable[..., CustomTargetingValueServiceTransport], ] = ( - type(self).get_transport_class(transport) + CustomTargetingValueServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast( Callable[..., CustomTargetingValueServiceTransport], transport diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/label_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/label_service/client.py index 343ef514f440..ee9d7253b492 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/label_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/label_service/client.py @@ -663,7 +663,7 @@ def __init__( transport_init: Union[ Type[LabelServiceTransport], Callable[..., LabelServiceTransport] ] = ( - type(self).get_transport_class(transport) + LabelServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., LabelServiceTransport], transport) ) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/line_item_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/line_item_service/client.py index e395cd391a54..bcd2fdb6f8ea 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/line_item_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/line_item_service/client.py @@ -703,7 +703,7 @@ def __init__( transport_init: Union[ Type[LineItemServiceTransport], Callable[..., LineItemServiceTransport] ] = ( - type(self).get_transport_class(transport) + LineItemServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., LineItemServiceTransport], transport) ) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/network_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/network_service/client.py index 708dd42e3e90..6c6cc693983d 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/network_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/network_service/client.py @@ -667,7 +667,7 @@ def __init__( transport_init: Union[ Type[NetworkServiceTransport], Callable[..., NetworkServiceTransport] ] = ( - type(self).get_transport_class(transport) + NetworkServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., NetworkServiceTransport], transport) ) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/order_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/order_service/client.py index 09ad3e675c14..98c15bafc0f8 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/order_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/order_service/client.py @@ -753,7 +753,7 @@ def __init__( transport_init: Union[ Type[OrderServiceTransport], Callable[..., OrderServiceTransport] ] = ( - type(self).get_transport_class(transport) + OrderServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., OrderServiceTransport], transport) ) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/placement_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/placement_service/client.py index 575189aaea65..313b200e4c3d 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/placement_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/placement_service/client.py @@ -691,7 +691,7 @@ def __init__( Type[PlacementServiceTransport], Callable[..., PlacementServiceTransport], ] = ( - type(self).get_transport_class(transport) + PlacementServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., PlacementServiceTransport], transport) ) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/report_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/report_service/client.py index df84cf7c5295..90e0b153f8af 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/report_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/report_service/client.py @@ -652,7 +652,7 @@ def __init__( transport_init: Union[ Type[ReportServiceTransport], Callable[..., ReportServiceTransport] ] = ( - type(self).get_transport_class(transport) + ReportServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ReportServiceTransport], transport) ) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/role_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/role_service/client.py index db9528e1a581..bae3794b29dd 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/role_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/role_service/client.py @@ -663,7 +663,7 @@ def __init__( transport_init: Union[ Type[RoleServiceTransport], Callable[..., RoleServiceTransport] ] = ( - type(self).get_transport_class(transport) + RoleServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RoleServiceTransport], transport) ) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/team_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/team_service/client.py index e6ebd9ddb51b..c054ad9fdb85 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/team_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/team_service/client.py @@ -663,7 +663,7 @@ def __init__( transport_init: Union[ Type[TeamServiceTransport], Callable[..., TeamServiceTransport] ] = ( - type(self).get_transport_class(transport) + TeamServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., TeamServiceTransport], transport) ) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/user_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/user_service/client.py index 0b74b925ce06..9e45779e3a82 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/user_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/user_service/client.py @@ -680,7 +680,7 @@ def __init__( transport_init: Union[ Type[UserServiceTransport], Callable[..., UserServiceTransport] ] = ( - type(self).get_transport_class(transport) + UserServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., UserServiceTransport], transport) ) diff --git a/packages/google-ads-admanager/samples/generated_samples/snippet_metadata_google.ads.admanager.v1.json b/packages/google-ads-admanager/samples/generated_samples/snippet_metadata_google.ads.admanager.v1.json index 5f13252c91c9..5910a1b6bf99 100644 --- a/packages/google-ads-admanager/samples/generated_samples/snippet_metadata_google.ads.admanager.v1.json +++ b/packages/google-ads-admanager/samples/generated_samples/snippet_metadata_google.ads.admanager.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-ads-admanager", - "version": "0.1.2" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-analytics-admin/google/analytics/admin/gapic_version.py b/packages/google-analytics-admin/google/analytics/admin/gapic_version.py index 4ec679931038..558c8aab67c5 100644 --- a/packages/google-analytics-admin/google/analytics/admin/gapic_version.py +++ b/packages/google-analytics-admin/google/analytics/admin/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.23.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-analytics-admin/google/analytics/admin_v1alpha/gapic_version.py b/packages/google-analytics-admin/google/analytics/admin_v1alpha/gapic_version.py index 4ec679931038..558c8aab67c5 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1alpha/gapic_version.py +++ b/packages/google-analytics-admin/google/analytics/admin_v1alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.23.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/async_client.py b/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/async_client.py index 058dc07b13a2..ef2d97ef9155 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/async_client.py +++ b/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -380,10 +379,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AnalyticsAdminServiceClient).get_transport_class, - type(AnalyticsAdminServiceClient), - ) + get_transport_class = AnalyticsAdminServiceClient.get_transport_class def __init__( self, @@ -12122,7 +12118,7 @@ async def create_calculated_metric( metric's resource name. This value should be 1-80 characters and valid - characters are `[a-zA-Z0-9_]`, no spaces allowed. + characters are /[a-zA-Z0-9_]/, no spaces allowed. calculated_metric_id must be unique between all calculated metrics under a property. The calculated_metric_id is used when referencing this diff --git a/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/client.py b/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/client.py index 58798a150034..dc955b205d26 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/client.py +++ b/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/client.py @@ -1304,7 +1304,7 @@ def __init__( Type[AnalyticsAdminServiceTransport], Callable[..., AnalyticsAdminServiceTransport], ] = ( - type(self).get_transport_class(transport) + AnalyticsAdminServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AnalyticsAdminServiceTransport], transport) ) @@ -12694,7 +12694,7 @@ def create_calculated_metric( metric's resource name. This value should be 1-80 characters and valid - characters are `[a-zA-Z0-9_]`, no spaces allowed. + characters are /[a-zA-Z0-9_]/, no spaces allowed. calculated_metric_id must be unique between all calculated metrics under a property. The calculated_metric_id is used when referencing this diff --git a/packages/google-analytics-admin/google/analytics/admin_v1alpha/types/analytics_admin.py b/packages/google-analytics-admin/google/analytics/admin_v1alpha/types/analytics_admin.py index 97fb01fe2add..6178315944ed 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1alpha/types/analytics_admin.py +++ b/packages/google-analytics-admin/google/analytics/admin_v1alpha/types/analytics_admin.py @@ -2650,7 +2650,7 @@ class CreateCalculatedMetricRequest(proto.Message): resource name. This value should be 1-80 characters and valid characters - are `[a-zA-Z0-9_]`, no spaces allowed. calculated_metric_id + are /[a-zA-Z0-9_]/, no spaces allowed. calculated_metric_id must be unique between all calculated metrics under a property. The calculated_metric_id is used when referencing this calculated metric from external APIs, for example, diff --git a/packages/google-analytics-admin/google/analytics/admin_v1beta/gapic_version.py b/packages/google-analytics-admin/google/analytics/admin_v1beta/gapic_version.py index 4ec679931038..558c8aab67c5 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1beta/gapic_version.py +++ b/packages/google-analytics-admin/google/analytics/admin_v1beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.23.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-analytics-admin/google/analytics/admin_v1beta/services/analytics_admin_service/async_client.py b/packages/google-analytics-admin/google/analytics/admin_v1beta/services/analytics_admin_service/async_client.py index 7a6f3f1afc40..a86919c6511d 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1beta/services/analytics_admin_service/async_client.py +++ b/packages/google-analytics-admin/google/analytics/admin_v1beta/services/analytics_admin_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -262,10 +261,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AnalyticsAdminServiceClient).get_transport_class, - type(AnalyticsAdminServiceClient), - ) + get_transport_class = AnalyticsAdminServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-analytics-admin/google/analytics/admin_v1beta/services/analytics_admin_service/client.py b/packages/google-analytics-admin/google/analytics/admin_v1beta/services/analytics_admin_service/client.py index 89723720b9af..baf4b1ed050c 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1beta/services/analytics_admin_service/client.py +++ b/packages/google-analytics-admin/google/analytics/admin_v1beta/services/analytics_admin_service/client.py @@ -901,7 +901,7 @@ def __init__( Type[AnalyticsAdminServiceTransport], Callable[..., AnalyticsAdminServiceTransport], ] = ( - type(self).get_transport_class(transport) + AnalyticsAdminServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AnalyticsAdminServiceTransport], transport) ) diff --git a/packages/google-analytics-admin/samples/generated_samples/snippet_metadata_google.analytics.admin.v1beta.json b/packages/google-analytics-admin/samples/generated_samples/snippet_metadata_google.analytics.admin.v1beta.json index 80d14403827c..5b91879363a3 100644 --- a/packages/google-analytics-admin/samples/generated_samples/snippet_metadata_google.analytics.admin.v1beta.json +++ b/packages/google-analytics-admin/samples/generated_samples/snippet_metadata_google.analytics.admin.v1beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-analytics-admin", - "version": "0.23.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-analytics-admin/tests/unit/gapic/admin_v1alpha/test_analytics_admin_service.py b/packages/google-analytics-admin/tests/unit/gapic/admin_v1alpha/test_analytics_admin_service.py index c117f372bc0b..24e6ceb09267 100644 --- a/packages/google-analytics-admin/tests/unit/gapic/admin_v1alpha/test_analytics_admin_service.py +++ b/packages/google-analytics-admin/tests/unit/gapic/admin_v1alpha/test_analytics_admin_service.py @@ -1386,22 +1386,23 @@ async def test_get_account_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_account - ] = mock_object + ] = mock_rpc request = {} await client.get_account(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_account(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1754,22 +1755,23 @@ async def test_list_accounts_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_accounts - ] = mock_object + ] = mock_rpc request = {} await client.list_accounts(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_accounts(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2159,22 +2161,23 @@ async def test_delete_account_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_account - ] = mock_object + ] = mock_rpc request = {} await client.delete_account(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_account(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2522,22 +2525,23 @@ async def test_update_account_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_account - ] = mock_object + ] = mock_rpc request = {} await client.update_account(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_account(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2913,22 +2917,23 @@ async def test_provision_account_ticket_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.provision_account_ticket - ] = mock_object + ] = mock_rpc request = {} await client.provision_account_ticket(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.provision_account_ticket(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3150,22 +3155,23 @@ async def test_list_account_summaries_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_account_summaries - ] = mock_object + ] = mock_rpc request = {} await client.list_account_summaries(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_account_summaries(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3597,22 +3603,23 @@ async def test_get_property_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_property - ] = mock_object + ] = mock_rpc request = {} await client.get_property(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_property(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3975,22 +3982,23 @@ async def test_list_properties_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_properties - ] = mock_object + ] = mock_rpc request = {} await client.list_properties(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_properties(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4407,22 +4415,23 @@ async def test_create_property_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_property - ] = mock_object + ] = mock_rpc request = {} await client.create_property(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_property(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4748,22 +4757,23 @@ async def test_delete_property_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_property - ] = mock_object + ] = mock_rpc request = {} await client.delete_property(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_property(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5144,22 +5154,23 @@ async def test_update_property_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_property - ] = mock_object + ] = mock_rpc request = {} await client.update_property(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_property(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5545,22 +5556,23 @@ async def test_create_firebase_link_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_firebase_link - ] = mock_object + ] = mock_rpc request = {} await client.create_firebase_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_firebase_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5937,22 +5949,23 @@ async def test_delete_firebase_link_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_firebase_link - ] = mock_object + ] = mock_rpc request = {} await client.delete_firebase_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_firebase_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6317,22 +6330,23 @@ async def test_list_firebase_links_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_firebase_links - ] = mock_object + ] = mock_rpc request = {} await client.list_firebase_links(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_firebase_links(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6909,22 +6923,23 @@ async def test_get_global_site_tag_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_global_site_tag - ] = mock_object + ] = mock_rpc request = {} await client.get_global_site_tag(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_global_site_tag(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7308,22 +7323,23 @@ async def test_create_google_ads_link_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_google_ads_link - ] = mock_object + ] = mock_rpc request = {} await client.create_google_ads_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_google_ads_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7717,22 +7733,23 @@ async def test_update_google_ads_link_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_google_ads_link - ] = mock_object + ] = mock_rpc request = {} await client.update_google_ads_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_google_ads_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8114,22 +8131,23 @@ async def test_delete_google_ads_link_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_google_ads_link - ] = mock_object + ] = mock_rpc request = {} await client.delete_google_ads_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_google_ads_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8495,22 +8513,23 @@ async def test_list_google_ads_links_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_google_ads_links - ] = mock_object + ] = mock_rpc request = {} await client.list_google_ads_links(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_google_ads_links(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9100,22 +9119,23 @@ async def test_get_data_sharing_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_data_sharing_settings - ] = mock_object + ] = mock_rpc request = {} await client.get_data_sharing_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_data_sharing_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9504,22 +9524,23 @@ async def test_get_measurement_protocol_secret_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_measurement_protocol_secret - ] = mock_object + ] = mock_rpc request = {} await client.get_measurement_protocol_secret(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_measurement_protocol_secret(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9898,22 +9919,23 @@ async def test_list_measurement_protocol_secrets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_measurement_protocol_secrets - ] = mock_object + ] = mock_rpc request = {} await client.list_measurement_protocol_secrets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_measurement_protocol_secrets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10498,22 +10520,23 @@ async def test_create_measurement_protocol_secret_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_measurement_protocol_secret - ] = mock_object + ] = mock_rpc request = {} await client.create_measurement_protocol_secret(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_measurement_protocol_secret(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10901,22 +10924,23 @@ async def test_delete_measurement_protocol_secret_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_measurement_protocol_secret - ] = mock_object + ] = mock_rpc request = {} await client.delete_measurement_protocol_secret(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_measurement_protocol_secret(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11282,22 +11306,23 @@ async def test_update_measurement_protocol_secret_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_measurement_protocol_secret - ] = mock_object + ] = mock_rpc request = {} await client.update_measurement_protocol_secret(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_measurement_protocol_secret(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11689,22 +11714,23 @@ async def test_acknowledge_user_data_collection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.acknowledge_user_data_collection - ] = mock_object + ] = mock_rpc request = {} await client.acknowledge_user_data_collection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.acknowledge_user_data_collection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11993,22 +12019,23 @@ async def test_get_sk_ad_network_conversion_value_schema_async_use_cached_wrappe ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_sk_ad_network_conversion_value_schema - ] = mock_object + ] = mock_rpc request = {} await client.get_sk_ad_network_conversion_value_schema(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_sk_ad_network_conversion_value_schema(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12392,22 +12419,23 @@ async def test_create_sk_ad_network_conversion_value_schema_async_use_cached_wra ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_sk_ad_network_conversion_value_schema - ] = mock_object + ] = mock_rpc request = {} await client.create_sk_ad_network_conversion_value_schema(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_sk_ad_network_conversion_value_schema(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12799,22 +12827,23 @@ async def test_delete_sk_ad_network_conversion_value_schema_async_use_cached_wra ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_sk_ad_network_conversion_value_schema - ] = mock_object + ] = mock_rpc request = {} await client.delete_sk_ad_network_conversion_value_schema(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_sk_ad_network_conversion_value_schema(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13185,22 +13214,23 @@ async def test_update_sk_ad_network_conversion_value_schema_async_use_cached_wra ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_sk_ad_network_conversion_value_schema - ] = mock_object + ] = mock_rpc request = {} await client.update_sk_ad_network_conversion_value_schema(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_sk_ad_network_conversion_value_schema(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13599,22 +13629,23 @@ async def test_list_sk_ad_network_conversion_value_schemas_async_use_cached_wrap ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_sk_ad_network_conversion_value_schemas - ] = mock_object + ] = mock_rpc request = {} await client.list_sk_ad_network_conversion_value_schemas(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_sk_ad_network_conversion_value_schemas(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14211,22 +14242,23 @@ async def test_search_change_history_events_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search_change_history_events - ] = mock_object + ] = mock_rpc request = {} await client.search_change_history_events(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_change_history_events(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14726,22 +14758,23 @@ async def test_get_google_signals_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_google_signals_settings - ] = mock_object + ] = mock_rpc request = {} await client.get_google_signals_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_google_signals_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15126,22 +15159,23 @@ async def test_update_google_signals_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_google_signals_settings - ] = mock_object + ] = mock_rpc request = {} await client.update_google_signals_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_google_signals_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15546,22 +15580,23 @@ async def test_create_conversion_event_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_conversion_event - ] = mock_object + ] = mock_rpc request = {} await client.create_conversion_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_conversion_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15966,22 +16001,23 @@ async def test_update_conversion_event_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_conversion_event - ] = mock_object + ] = mock_rpc request = {} await client.update_conversion_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_conversion_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -16389,22 +16425,23 @@ async def test_get_conversion_event_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_conversion_event - ] = mock_object + ] = mock_rpc request = {} await client.get_conversion_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_conversion_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -16781,22 +16818,23 @@ async def test_delete_conversion_event_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_conversion_event - ] = mock_object + ] = mock_rpc request = {} await client.delete_conversion_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_conversion_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -17162,22 +17200,23 @@ async def test_list_conversion_events_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_conversion_events - ] = mock_object + ] = mock_rpc request = {} await client.list_conversion_events(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_conversion_events(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -17753,22 +17792,23 @@ async def test_create_key_event_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_key_event - ] = mock_object + ] = mock_rpc request = {} await client.create_key_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_key_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -18141,22 +18181,23 @@ async def test_update_key_event_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_key_event - ] = mock_object + ] = mock_rpc request = {} await client.update_key_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_key_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -18531,22 +18572,23 @@ async def test_get_key_event_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_key_event - ] = mock_object + ] = mock_rpc request = {} await client.get_key_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_key_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -18894,22 +18936,23 @@ async def test_delete_key_event_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_key_event - ] = mock_object + ] = mock_rpc request = {} await client.delete_key_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_key_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -19251,22 +19294,23 @@ async def test_list_key_events_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_key_events - ] = mock_object + ] = mock_rpc request = {} await client.list_key_events(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_key_events(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -19828,22 +19872,23 @@ async def test_get_display_video360_advertiser_link_async_use_cached_wrapped_rpc ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_display_video360_advertiser_link - ] = mock_object + ] = mock_rpc request = {} await client.get_display_video360_advertiser_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_display_video360_advertiser_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -20222,22 +20267,23 @@ async def test_list_display_video360_advertiser_links_async_use_cached_wrapped_r ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_display_video360_advertiser_links - ] = mock_object + ] = mock_rpc request = {} await client.list_display_video360_advertiser_links(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_display_video360_advertiser_links(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -20824,22 +20870,23 @@ async def test_create_display_video360_advertiser_link_async_use_cached_wrapped_ ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_display_video360_advertiser_link - ] = mock_object + ] = mock_rpc request = {} await client.create_display_video360_advertiser_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_display_video360_advertiser_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -21227,22 +21274,23 @@ async def test_delete_display_video360_advertiser_link_async_use_cached_wrapped_ ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_display_video360_advertiser_link - ] = mock_object + ] = mock_rpc request = {} await client.delete_display_video360_advertiser_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_display_video360_advertiser_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -21608,22 +21656,23 @@ async def test_update_display_video360_advertiser_link_async_use_cached_wrapped_ ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_display_video360_advertiser_link - ] = mock_object + ] = mock_rpc request = {} await client.update_display_video360_advertiser_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_display_video360_advertiser_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -22035,22 +22084,23 @@ async def test_get_display_video360_advertiser_link_proposal_async_use_cached_wr ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_display_video360_advertiser_link_proposal - ] = mock_object + ] = mock_rpc request = {} await client.get_display_video360_advertiser_link_proposal(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_display_video360_advertiser_link_proposal(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -22447,22 +22497,23 @@ async def test_list_display_video360_advertiser_link_proposals_async_use_cached_ ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_display_video360_advertiser_link_proposals - ] = mock_object + ] = mock_rpc request = {} await client.list_display_video360_advertiser_link_proposals(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_display_video360_advertiser_link_proposals(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -23089,22 +23140,23 @@ async def test_create_display_video360_advertiser_link_proposal_async_use_cached ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_display_video360_advertiser_link_proposal - ] = mock_object + ] = mock_rpc request = {} await client.create_display_video360_advertiser_link_proposal(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_display_video360_advertiser_link_proposal(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -23515,22 +23567,23 @@ async def test_delete_display_video360_advertiser_link_proposal_async_use_cached ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_display_video360_advertiser_link_proposal - ] = mock_object + ] = mock_rpc request = {} await client.delete_display_video360_advertiser_link_proposal(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_display_video360_advertiser_link_proposal(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -23914,22 +23967,23 @@ async def test_approve_display_video360_advertiser_link_proposal_async_use_cache ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.approve_display_video360_advertiser_link_proposal - ] = mock_object + ] = mock_rpc request = {} await client.approve_display_video360_advertiser_link_proposal(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.approve_display_video360_advertiser_link_proposal(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -24247,22 +24301,23 @@ async def test_cancel_display_video360_advertiser_link_proposal_async_use_cached ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.cancel_display_video360_advertiser_link_proposal - ] = mock_object + ] = mock_rpc request = {} await client.cancel_display_video360_advertiser_link_proposal(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.cancel_display_video360_advertiser_link_proposal(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -24575,22 +24630,23 @@ async def test_create_custom_dimension_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_custom_dimension - ] = mock_object + ] = mock_rpc request = {} await client.create_custom_dimension(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_custom_dimension(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -24994,22 +25050,23 @@ async def test_update_custom_dimension_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_custom_dimension - ] = mock_object + ] = mock_rpc request = {} await client.update_custom_dimension(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_custom_dimension(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -25404,22 +25461,23 @@ async def test_list_custom_dimensions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_custom_dimensions - ] = mock_object + ] = mock_rpc request = {} await client.list_custom_dimensions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_custom_dimensions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -25987,22 +26045,23 @@ async def test_archive_custom_dimension_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.archive_custom_dimension - ] = mock_object + ] = mock_rpc request = {} await client.archive_custom_dimension(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.archive_custom_dimension(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -26380,22 +26439,23 @@ async def test_get_custom_dimension_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_custom_dimension - ] = mock_object + ] = mock_rpc request = {} await client.get_custom_dimension(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_custom_dimension(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -26801,22 +26861,23 @@ async def test_create_custom_metric_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_custom_metric - ] = mock_object + ] = mock_rpc request = {} await client.create_custom_metric(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_custom_metric(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -27234,22 +27295,23 @@ async def test_update_custom_metric_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_custom_metric - ] = mock_object + ] = mock_rpc request = {} await client.update_custom_metric(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_custom_metric(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -27649,22 +27711,23 @@ async def test_list_custom_metrics_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_custom_metrics - ] = mock_object + ] = mock_rpc request = {} await client.list_custom_metrics(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_custom_metrics(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -28232,22 +28295,23 @@ async def test_archive_custom_metric_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.archive_custom_metric - ] = mock_object + ] = mock_rpc request = {} await client.archive_custom_metric(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.archive_custom_metric(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -28632,22 +28696,23 @@ async def test_get_custom_metric_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_custom_metric - ] = mock_object + ] = mock_rpc request = {} await client.get_custom_metric(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_custom_metric(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -29044,22 +29109,23 @@ async def test_get_data_retention_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_data_retention_settings - ] = mock_object + ] = mock_rpc request = {} await client.get_data_retention_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_data_retention_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -29444,22 +29510,23 @@ async def test_update_data_retention_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_data_retention_settings - ] = mock_object + ] = mock_rpc request = {} await client.update_data_retention_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_data_retention_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -29854,22 +29921,23 @@ async def test_create_data_stream_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_data_stream - ] = mock_object + ] = mock_rpc request = {} await client.create_data_stream(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_data_stream(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -30272,22 +30340,23 @@ async def test_delete_data_stream_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_data_stream - ] = mock_object + ] = mock_rpc request = {} await client.delete_data_stream(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_data_stream(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -30652,22 +30721,23 @@ async def test_update_data_stream_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_data_stream - ] = mock_object + ] = mock_rpc request = {} await client.update_data_stream(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_data_stream(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -31077,22 +31147,23 @@ async def test_list_data_streams_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_data_streams - ] = mock_object + ] = mock_rpc request = {} await client.list_data_streams(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_data_streams(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -31659,22 +31730,23 @@ async def test_get_data_stream_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_data_stream - ] = mock_object + ] = mock_rpc request = {} await client.get_data_stream(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_data_stream(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -32045,22 +32117,23 @@ async def test_get_audience_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_audience - ] = mock_object + ] = mock_rpc request = {} await client.get_audience(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_audience(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -32420,22 +32493,23 @@ async def test_list_audiences_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_audiences - ] = mock_object + ] = mock_rpc request = {} await client.list_audiences(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_audiences(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -32996,22 +33070,23 @@ async def test_create_audience_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_audience - ] = mock_object + ] = mock_rpc request = {} await client.create_audience(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_audience(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -33397,22 +33472,23 @@ async def test_update_audience_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_audience - ] = mock_object + ] = mock_rpc request = {} await client.update_audience(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_audience(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -33779,22 +33855,23 @@ async def test_archive_audience_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.archive_audience - ] = mock_object + ] = mock_rpc request = {} await client.archive_audience(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.archive_audience(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -34073,22 +34150,23 @@ async def test_get_search_ads360_link_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_search_ads360_link - ] = mock_object + ] = mock_rpc request = {} await client.get_search_ads360_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_search_ads360_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -34467,22 +34545,23 @@ async def test_list_search_ads360_links_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_search_ads360_links - ] = mock_object + ] = mock_rpc request = {} await client.list_search_ads360_links(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_search_ads360_links(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -35065,22 +35144,23 @@ async def test_create_search_ads360_link_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_search_ads360_link - ] = mock_object + ] = mock_rpc request = {} await client.create_search_ads360_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_search_ads360_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -35460,22 +35540,23 @@ async def test_delete_search_ads360_link_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_search_ads360_link - ] = mock_object + ] = mock_rpc request = {} await client.delete_search_ads360_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_search_ads360_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -35841,22 +35922,23 @@ async def test_update_search_ads360_link_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_search_ads360_link - ] = mock_object + ] = mock_rpc request = {} await client.update_search_ads360_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_search_ads360_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -36267,22 +36349,23 @@ async def test_get_attribution_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_attribution_settings - ] = mock_object + ] = mock_rpc request = {} await client.get_attribution_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_attribution_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -36695,22 +36778,23 @@ async def test_update_attribution_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_attribution_settings - ] = mock_object + ] = mock_rpc request = {} await client.update_attribution_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_attribution_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -37112,22 +37196,23 @@ async def test_run_access_report_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.run_access_report - ] = mock_object + ] = mock_rpc request = {} await client.run_access_report(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.run_access_report(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -37417,22 +37502,23 @@ async def test_create_access_binding_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_access_binding - ] = mock_object + ] = mock_rpc request = {} await client.create_access_binding(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_access_binding(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -37820,22 +37906,23 @@ async def test_get_access_binding_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_access_binding - ] = mock_object + ] = mock_rpc request = {} await client.get_access_binding(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_access_binding(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -38210,22 +38297,23 @@ async def test_update_access_binding_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_access_binding - ] = mock_object + ] = mock_rpc request = {} await client.update_access_binding(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_access_binding(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -38593,22 +38681,23 @@ async def test_delete_access_binding_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_access_binding - ] = mock_object + ] = mock_rpc request = {} await client.delete_access_binding(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_access_binding(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -38973,22 +39062,23 @@ async def test_list_access_bindings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_access_bindings - ] = mock_object + ] = mock_rpc request = {} await client.list_access_bindings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_access_bindings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -39558,22 +39648,23 @@ async def test_batch_create_access_bindings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_create_access_bindings - ] = mock_object + ] = mock_rpc request = {} await client.batch_create_access_bindings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.batch_create_access_bindings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -39852,22 +39943,23 @@ async def test_batch_get_access_bindings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_get_access_bindings - ] = mock_object + ] = mock_rpc request = {} await client.batch_get_access_bindings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.batch_get_access_bindings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -40146,22 +40238,23 @@ async def test_batch_update_access_bindings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_update_access_bindings - ] = mock_object + ] = mock_rpc request = {} await client.batch_update_access_bindings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.batch_update_access_bindings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -40438,22 +40531,23 @@ async def test_batch_delete_access_bindings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_delete_access_bindings - ] = mock_object + ] = mock_rpc request = {} await client.batch_delete_access_bindings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.batch_delete_access_bindings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -40745,22 +40839,23 @@ async def test_get_expanded_data_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_expanded_data_set - ] = mock_object + ] = mock_rpc request = {} await client.get_expanded_data_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_expanded_data_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -41143,22 +41238,23 @@ async def test_list_expanded_data_sets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_expanded_data_sets - ] = mock_object + ] = mock_rpc request = {} await client.list_expanded_data_sets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_expanded_data_sets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -41745,22 +41841,23 @@ async def test_create_expanded_data_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_expanded_data_set - ] = mock_object + ] = mock_rpc request = {} await client.create_expanded_data_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_expanded_data_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -42159,22 +42256,23 @@ async def test_update_expanded_data_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_expanded_data_set - ] = mock_object + ] = mock_rpc request = {} await client.update_expanded_data_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_expanded_data_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -42558,22 +42656,23 @@ async def test_delete_expanded_data_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_expanded_data_set - ] = mock_object + ] = mock_rpc request = {} await client.delete_expanded_data_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_expanded_data_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -42946,22 +43045,23 @@ async def test_get_channel_group_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_channel_group - ] = mock_object + ] = mock_rpc request = {} await client.get_channel_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_channel_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -43342,22 +43442,23 @@ async def test_list_channel_groups_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_channel_groups - ] = mock_object + ] = mock_rpc request = {} await client.list_channel_groups(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_channel_groups(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -43943,22 +44044,23 @@ async def test_create_channel_group_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_channel_group - ] = mock_object + ] = mock_rpc request = {} await client.create_channel_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_channel_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -44356,22 +44458,23 @@ async def test_update_channel_group_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_channel_group - ] = mock_object + ] = mock_rpc request = {} await client.update_channel_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_channel_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -44754,22 +44857,23 @@ async def test_delete_channel_group_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_channel_group - ] = mock_object + ] = mock_rpc request = {} await client.delete_channel_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_channel_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -45130,22 +45234,23 @@ async def test_set_automated_ga4_configuration_opt_out_async_use_cached_wrapped_ ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_automated_ga4_configuration_opt_out - ] = mock_object + ] = mock_rpc request = {} await client.set_automated_ga4_configuration_opt_out(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_automated_ga4_configuration_opt_out(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -45372,22 +45477,23 @@ async def test_fetch_automated_ga4_configuration_opt_out_async_use_cached_wrappe ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.fetch_automated_ga4_configuration_opt_out - ] = mock_object + ] = mock_rpc request = {} await client.fetch_automated_ga4_configuration_opt_out(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.fetch_automated_ga4_configuration_opt_out(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -45635,22 +45741,23 @@ async def test_create_big_query_link_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_big_query_link - ] = mock_object + ] = mock_rpc request = {} await client.create_big_query_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_big_query_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -46072,22 +46179,23 @@ async def test_get_big_query_link_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_big_query_link - ] = mock_object + ] = mock_rpc request = {} await client.get_big_query_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_big_query_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -46476,22 +46584,23 @@ async def test_list_big_query_links_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_big_query_links - ] = mock_object + ] = mock_rpc request = {} await client.list_big_query_links(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_big_query_links(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -47059,22 +47168,23 @@ async def test_delete_big_query_link_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_big_query_link - ] = mock_object + ] = mock_rpc request = {} await client.delete_big_query_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_big_query_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -47458,22 +47568,23 @@ async def test_update_big_query_link_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_big_query_link - ] = mock_object + ] = mock_rpc request = {} await client.update_big_query_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_big_query_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -47902,22 +48013,23 @@ async def test_get_enhanced_measurement_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_enhanced_measurement_settings - ] = mock_object + ] = mock_rpc request = {} await client.get_enhanced_measurement_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_enhanced_measurement_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -48336,22 +48448,23 @@ async def test_update_enhanced_measurement_settings_async_use_cached_wrapped_rpc ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_enhanced_measurement_settings - ] = mock_object + ] = mock_rpc request = {} await client.update_enhanced_measurement_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_enhanced_measurement_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -48757,22 +48870,23 @@ async def test_create_connected_site_tag_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_connected_site_tag - ] = mock_object + ] = mock_rpc request = {} await client.create_connected_site_tag(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_connected_site_tag(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -48986,22 +49100,23 @@ async def test_delete_connected_site_tag_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_connected_site_tag - ] = mock_object + ] = mock_rpc request = {} await client.delete_connected_site_tag(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_connected_site_tag(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -49213,22 +49328,23 @@ async def test_list_connected_site_tags_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_connected_site_tags - ] = mock_object + ] = mock_rpc request = {} await client.list_connected_site_tags(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_connected_site_tags(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -49447,22 +49563,23 @@ async def test_fetch_connected_ga4_property_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.fetch_connected_ga4_property - ] = mock_object + ] = mock_rpc request = {} await client.fetch_connected_ga4_property(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.fetch_connected_ga4_property(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -49684,22 +49801,23 @@ async def test_get_ad_sense_link_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_ad_sense_link - ] = mock_object + ] = mock_rpc request = {} await client.get_ad_sense_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_ad_sense_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -50075,22 +50193,23 @@ async def test_create_ad_sense_link_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_ad_sense_link - ] = mock_object + ] = mock_rpc request = {} await client.create_ad_sense_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_ad_sense_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -50467,22 +50586,23 @@ async def test_delete_ad_sense_link_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_ad_sense_link - ] = mock_object + ] = mock_rpc request = {} await client.delete_ad_sense_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_ad_sense_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -50847,22 +50967,23 @@ async def test_list_ad_sense_links_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_ad_sense_links - ] = mock_object + ] = mock_rpc request = {} await client.list_ad_sense_links(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_ad_sense_links(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -51443,22 +51564,23 @@ async def test_get_event_create_rule_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_event_create_rule - ] = mock_object + ] = mock_rpc request = {} await client.get_event_create_rule(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_event_create_rule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -51837,22 +51959,23 @@ async def test_list_event_create_rules_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_event_create_rules - ] = mock_object + ] = mock_rpc request = {} await client.list_event_create_rules(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_event_create_rules(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -52437,22 +52560,23 @@ async def test_create_event_create_rule_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_event_create_rule - ] = mock_object + ] = mock_rpc request = {} await client.create_event_create_rule(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_event_create_rule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -52841,22 +52965,23 @@ async def test_update_event_create_rule_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_event_create_rule - ] = mock_object + ] = mock_rpc request = {} await client.update_event_create_rule(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_event_create_rule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -53236,22 +53361,23 @@ async def test_delete_event_create_rule_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_event_create_rule - ] = mock_object + ] = mock_rpc request = {} await client.delete_event_create_rule(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_event_create_rule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -53620,22 +53746,23 @@ async def test_get_event_edit_rule_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_event_edit_rule - ] = mock_object + ] = mock_rpc request = {} await client.get_event_edit_rule(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_event_edit_rule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -54014,22 +54141,23 @@ async def test_list_event_edit_rules_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_event_edit_rules - ] = mock_object + ] = mock_rpc request = {} await client.list_event_edit_rules(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_event_edit_rules(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -54612,22 +54740,23 @@ async def test_create_event_edit_rule_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_event_edit_rule - ] = mock_object + ] = mock_rpc request = {} await client.create_event_edit_rule(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_event_edit_rule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -55016,22 +55145,23 @@ async def test_update_event_edit_rule_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_event_edit_rule - ] = mock_object + ] = mock_rpc request = {} await client.update_event_edit_rule(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_event_edit_rule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -55411,22 +55541,23 @@ async def test_delete_event_edit_rule_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_event_edit_rule - ] = mock_object + ] = mock_rpc request = {} await client.delete_event_edit_rule(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_event_edit_rule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -55783,22 +55914,23 @@ async def test_reorder_event_edit_rules_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.reorder_event_edit_rules - ] = mock_object + ] = mock_rpc request = {} await client.reorder_event_edit_rules(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.reorder_event_edit_rules(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -56083,22 +56215,23 @@ async def test_update_data_redaction_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_data_redaction_settings - ] = mock_object + ] = mock_rpc request = {} await client.update_data_redaction_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_data_redaction_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -56496,22 +56629,23 @@ async def test_get_data_redaction_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_data_redaction_settings - ] = mock_object + ] = mock_rpc request = {} await client.get_data_redaction_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_data_redaction_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -56917,22 +57051,23 @@ async def test_get_calculated_metric_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_calculated_metric - ] = mock_object + ] = mock_rpc request = {} await client.get_calculated_metric(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_calculated_metric(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -57352,22 +57487,23 @@ async def test_create_calculated_metric_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_calculated_metric - ] = mock_object + ] = mock_rpc request = {} await client.create_calculated_metric(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_calculated_metric(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -57780,22 +57916,23 @@ async def test_list_calculated_metrics_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_calculated_metrics - ] = mock_object + ] = mock_rpc request = {} await client.list_calculated_metrics(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_calculated_metrics(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -58393,22 +58530,23 @@ async def test_update_calculated_metric_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_calculated_metric - ] = mock_object + ] = mock_rpc request = {} await client.update_calculated_metric(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_calculated_metric(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -58802,22 +58940,23 @@ async def test_delete_calculated_metric_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_calculated_metric - ] = mock_object + ] = mock_rpc request = {} await client.delete_calculated_metric(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_calculated_metric(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -59172,22 +59311,23 @@ async def test_create_rollup_property_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_rollup_property - ] = mock_object + ] = mock_rpc request = {} await client.create_rollup_property(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_rollup_property(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -59409,22 +59549,23 @@ async def test_get_rollup_property_source_link_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_rollup_property_source_link - ] = mock_object + ] = mock_rpc request = {} await client.get_rollup_property_source_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_rollup_property_source_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -59801,22 +59942,23 @@ async def test_list_rollup_property_source_links_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_rollup_property_source_links - ] = mock_object + ] = mock_rpc request = {} await client.list_rollup_property_source_links(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_rollup_property_source_links(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -60396,22 +60538,23 @@ async def test_create_rollup_property_source_link_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_rollup_property_source_link - ] = mock_object + ] = mock_rpc request = {} await client.create_rollup_property_source_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_rollup_property_source_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -60797,22 +60940,23 @@ async def test_delete_rollup_property_source_link_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_rollup_property_source_link - ] = mock_object + ] = mock_rpc request = {} await client.delete_rollup_property_source_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_rollup_property_source_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -61167,22 +61311,23 @@ async def test_provision_subproperty_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.provision_subproperty - ] = mock_object + ] = mock_rpc request = {} await client.provision_subproperty(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.provision_subproperty(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -61404,22 +61549,23 @@ async def test_create_subproperty_event_filter_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_subproperty_event_filter - ] = mock_object + ] = mock_rpc request = {} await client.create_subproperty_event_filter(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_subproperty_event_filter(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -61819,22 +61965,23 @@ async def test_get_subproperty_event_filter_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_subproperty_event_filter - ] = mock_object + ] = mock_rpc request = {} await client.get_subproperty_event_filter(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_subproperty_event_filter(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -62211,22 +62358,23 @@ async def test_list_subproperty_event_filters_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_subproperty_event_filters - ] = mock_object + ] = mock_rpc request = {} await client.list_subproperty_event_filters(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_subproperty_event_filters(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -62808,22 +62956,23 @@ async def test_update_subproperty_event_filter_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_subproperty_event_filter - ] = mock_object + ] = mock_rpc request = {} await client.update_subproperty_event_filter(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_subproperty_event_filter(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -63213,22 +63362,23 @@ async def test_delete_subproperty_event_filter_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_subproperty_event_filter - ] = mock_object + ] = mock_rpc request = {} await client.delete_subproperty_event_filter(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_subproperty_event_filter(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-analytics-admin/tests/unit/gapic/admin_v1beta/test_analytics_admin_service.py b/packages/google-analytics-admin/tests/unit/gapic/admin_v1beta/test_analytics_admin_service.py index 997c4f87bd9d..12a1f7b4cde1 100644 --- a/packages/google-analytics-admin/tests/unit/gapic/admin_v1beta/test_analytics_admin_service.py +++ b/packages/google-analytics-admin/tests/unit/gapic/admin_v1beta/test_analytics_admin_service.py @@ -1373,22 +1373,23 @@ async def test_get_account_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_account - ] = mock_object + ] = mock_rpc request = {} await client.get_account(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_account(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1741,22 +1742,23 @@ async def test_list_accounts_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_accounts - ] = mock_object + ] = mock_rpc request = {} await client.list_accounts(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_accounts(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2146,22 +2148,23 @@ async def test_delete_account_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_account - ] = mock_object + ] = mock_rpc request = {} await client.delete_account(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_account(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2509,22 +2512,23 @@ async def test_update_account_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_account - ] = mock_object + ] = mock_rpc request = {} await client.update_account(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_account(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2900,22 +2904,23 @@ async def test_provision_account_ticket_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.provision_account_ticket - ] = mock_object + ] = mock_rpc request = {} await client.provision_account_ticket(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.provision_account_ticket(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3137,22 +3142,23 @@ async def test_list_account_summaries_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_account_summaries - ] = mock_object + ] = mock_rpc request = {} await client.list_account_summaries(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_account_summaries(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3584,22 +3590,23 @@ async def test_get_property_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_property - ] = mock_object + ] = mock_rpc request = {} await client.get_property(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_property(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3962,22 +3969,23 @@ async def test_list_properties_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_properties - ] = mock_object + ] = mock_rpc request = {} await client.list_properties(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_properties(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4394,22 +4402,23 @@ async def test_create_property_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_property - ] = mock_object + ] = mock_rpc request = {} await client.create_property(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_property(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4735,22 +4744,23 @@ async def test_delete_property_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_property - ] = mock_object + ] = mock_rpc request = {} await client.delete_property(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_property(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5131,22 +5141,23 @@ async def test_update_property_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_property - ] = mock_object + ] = mock_rpc request = {} await client.update_property(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_property(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5532,22 +5543,23 @@ async def test_create_firebase_link_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_firebase_link - ] = mock_object + ] = mock_rpc request = {} await client.create_firebase_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_firebase_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5924,22 +5936,23 @@ async def test_delete_firebase_link_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_firebase_link - ] = mock_object + ] = mock_rpc request = {} await client.delete_firebase_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_firebase_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6304,22 +6317,23 @@ async def test_list_firebase_links_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_firebase_links - ] = mock_object + ] = mock_rpc request = {} await client.list_firebase_links(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_firebase_links(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6903,22 +6917,23 @@ async def test_create_google_ads_link_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_google_ads_link - ] = mock_object + ] = mock_rpc request = {} await client.create_google_ads_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_google_ads_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7312,22 +7327,23 @@ async def test_update_google_ads_link_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_google_ads_link - ] = mock_object + ] = mock_rpc request = {} await client.update_google_ads_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_google_ads_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7709,22 +7725,23 @@ async def test_delete_google_ads_link_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_google_ads_link - ] = mock_object + ] = mock_rpc request = {} await client.delete_google_ads_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_google_ads_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8090,22 +8107,23 @@ async def test_list_google_ads_links_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_google_ads_links - ] = mock_object + ] = mock_rpc request = {} await client.list_google_ads_links(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_google_ads_links(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8695,22 +8713,23 @@ async def test_get_data_sharing_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_data_sharing_settings - ] = mock_object + ] = mock_rpc request = {} await client.get_data_sharing_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_data_sharing_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9099,22 +9118,23 @@ async def test_get_measurement_protocol_secret_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_measurement_protocol_secret - ] = mock_object + ] = mock_rpc request = {} await client.get_measurement_protocol_secret(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_measurement_protocol_secret(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9493,22 +9513,23 @@ async def test_list_measurement_protocol_secrets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_measurement_protocol_secrets - ] = mock_object + ] = mock_rpc request = {} await client.list_measurement_protocol_secrets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_measurement_protocol_secrets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10093,22 +10114,23 @@ async def test_create_measurement_protocol_secret_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_measurement_protocol_secret - ] = mock_object + ] = mock_rpc request = {} await client.create_measurement_protocol_secret(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_measurement_protocol_secret(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10496,22 +10518,23 @@ async def test_delete_measurement_protocol_secret_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_measurement_protocol_secret - ] = mock_object + ] = mock_rpc request = {} await client.delete_measurement_protocol_secret(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_measurement_protocol_secret(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10877,22 +10900,23 @@ async def test_update_measurement_protocol_secret_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_measurement_protocol_secret - ] = mock_object + ] = mock_rpc request = {} await client.update_measurement_protocol_secret(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_measurement_protocol_secret(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11284,22 +11308,23 @@ async def test_acknowledge_user_data_collection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.acknowledge_user_data_collection - ] = mock_object + ] = mock_rpc request = {} await client.acknowledge_user_data_collection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.acknowledge_user_data_collection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11587,22 +11612,23 @@ async def test_search_change_history_events_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search_change_history_events - ] = mock_object + ] = mock_rpc request = {} await client.search_change_history_events(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_change_history_events(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12108,22 +12134,23 @@ async def test_create_conversion_event_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_conversion_event - ] = mock_object + ] = mock_rpc request = {} await client.create_conversion_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_conversion_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12528,22 +12555,23 @@ async def test_update_conversion_event_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_conversion_event - ] = mock_object + ] = mock_rpc request = {} await client.update_conversion_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_conversion_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12951,22 +12979,23 @@ async def test_get_conversion_event_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_conversion_event - ] = mock_object + ] = mock_rpc request = {} await client.get_conversion_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_conversion_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13343,22 +13372,23 @@ async def test_delete_conversion_event_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_conversion_event - ] = mock_object + ] = mock_rpc request = {} await client.delete_conversion_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_conversion_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13724,22 +13754,23 @@ async def test_list_conversion_events_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_conversion_events - ] = mock_object + ] = mock_rpc request = {} await client.list_conversion_events(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_conversion_events(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14315,22 +14346,23 @@ async def test_create_key_event_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_key_event - ] = mock_object + ] = mock_rpc request = {} await client.create_key_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_key_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14703,22 +14735,23 @@ async def test_update_key_event_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_key_event - ] = mock_object + ] = mock_rpc request = {} await client.update_key_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_key_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15093,22 +15126,23 @@ async def test_get_key_event_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_key_event - ] = mock_object + ] = mock_rpc request = {} await client.get_key_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_key_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15456,22 +15490,23 @@ async def test_delete_key_event_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_key_event - ] = mock_object + ] = mock_rpc request = {} await client.delete_key_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_key_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15813,22 +15848,23 @@ async def test_list_key_events_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_key_events - ] = mock_object + ] = mock_rpc request = {} await client.list_key_events(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_key_events(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -16399,22 +16435,23 @@ async def test_create_custom_dimension_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_custom_dimension - ] = mock_object + ] = mock_rpc request = {} await client.create_custom_dimension(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_custom_dimension(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -16818,22 +16855,23 @@ async def test_update_custom_dimension_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_custom_dimension - ] = mock_object + ] = mock_rpc request = {} await client.update_custom_dimension(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_custom_dimension(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -17228,22 +17266,23 @@ async def test_list_custom_dimensions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_custom_dimensions - ] = mock_object + ] = mock_rpc request = {} await client.list_custom_dimensions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_custom_dimensions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -17811,22 +17850,23 @@ async def test_archive_custom_dimension_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.archive_custom_dimension - ] = mock_object + ] = mock_rpc request = {} await client.archive_custom_dimension(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.archive_custom_dimension(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -18204,22 +18244,23 @@ async def test_get_custom_dimension_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_custom_dimension - ] = mock_object + ] = mock_rpc request = {} await client.get_custom_dimension(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_custom_dimension(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -18625,22 +18666,23 @@ async def test_create_custom_metric_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_custom_metric - ] = mock_object + ] = mock_rpc request = {} await client.create_custom_metric(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_custom_metric(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -19058,22 +19100,23 @@ async def test_update_custom_metric_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_custom_metric - ] = mock_object + ] = mock_rpc request = {} await client.update_custom_metric(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_custom_metric(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -19473,22 +19516,23 @@ async def test_list_custom_metrics_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_custom_metrics - ] = mock_object + ] = mock_rpc request = {} await client.list_custom_metrics(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_custom_metrics(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -20056,22 +20100,23 @@ async def test_archive_custom_metric_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.archive_custom_metric - ] = mock_object + ] = mock_rpc request = {} await client.archive_custom_metric(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.archive_custom_metric(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -20456,22 +20501,23 @@ async def test_get_custom_metric_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_custom_metric - ] = mock_object + ] = mock_rpc request = {} await client.get_custom_metric(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_custom_metric(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -20868,22 +20914,23 @@ async def test_get_data_retention_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_data_retention_settings - ] = mock_object + ] = mock_rpc request = {} await client.get_data_retention_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_data_retention_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -21268,22 +21315,23 @@ async def test_update_data_retention_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_data_retention_settings - ] = mock_object + ] = mock_rpc request = {} await client.update_data_retention_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_data_retention_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -21678,22 +21726,23 @@ async def test_create_data_stream_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_data_stream - ] = mock_object + ] = mock_rpc request = {} await client.create_data_stream(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_data_stream(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -22096,22 +22145,23 @@ async def test_delete_data_stream_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_data_stream - ] = mock_object + ] = mock_rpc request = {} await client.delete_data_stream(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_data_stream(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -22476,22 +22526,23 @@ async def test_update_data_stream_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_data_stream - ] = mock_object + ] = mock_rpc request = {} await client.update_data_stream(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_data_stream(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -22901,22 +22952,23 @@ async def test_list_data_streams_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_data_streams - ] = mock_object + ] = mock_rpc request = {} await client.list_data_streams(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_data_streams(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -23483,22 +23535,23 @@ async def test_get_data_stream_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_data_stream - ] = mock_object + ] = mock_rpc request = {} await client.get_data_stream(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_data_stream(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -23863,22 +23916,23 @@ async def test_run_access_report_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.run_access_report - ] = mock_object + ] = mock_rpc request = {} await client.run_access_report(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.run_access_report(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-analytics-data/google/analytics/data/gapic_version.py b/packages/google-analytics-data/google/analytics/data/gapic_version.py index 99ba85e1e055..558c8aab67c5 100644 --- a/packages/google-analytics-data/google/analytics/data/gapic_version.py +++ b/packages/google-analytics-data/google/analytics/data/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.18.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/gapic_version.py b/packages/google-analytics-data/google/analytics/data_v1alpha/gapic_version.py index 99ba85e1e055..558c8aab67c5 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/gapic_version.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.18.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/async_client.py b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/async_client.py index bf56a8b4708b..5afbe71746b5 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/async_client.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -204,10 +203,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AlphaAnalyticsDataClient).get_transport_class, - type(AlphaAnalyticsDataClient), - ) + get_transport_class = AlphaAnalyticsDataClient.get_transport_class def __init__( self, diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/client.py b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/client.py index ccebedecd1af..4a3bc827021a 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/client.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/client.py @@ -706,7 +706,7 @@ def __init__( Type[AlphaAnalyticsDataTransport], Callable[..., AlphaAnalyticsDataTransport], ] = ( - type(self).get_transport_class(transport) + AlphaAnalyticsDataClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AlphaAnalyticsDataTransport], transport) ) diff --git a/packages/google-analytics-data/google/analytics/data_v1beta/gapic_version.py b/packages/google-analytics-data/google/analytics/data_v1beta/gapic_version.py index 99ba85e1e055..558c8aab67c5 100644 --- a/packages/google-analytics-data/google/analytics/data_v1beta/gapic_version.py +++ b/packages/google-analytics-data/google/analytics/data_v1beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.18.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-analytics-data/google/analytics/data_v1beta/services/beta_analytics_data/async_client.py b/packages/google-analytics-data/google/analytics/data_v1beta/services/beta_analytics_data/async_client.py index 2a6f8bbb4aed..f3de83272ce7 100644 --- a/packages/google-analytics-data/google/analytics/data_v1beta/services/beta_analytics_data/async_client.py +++ b/packages/google-analytics-data/google/analytics/data_v1beta/services/beta_analytics_data/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -196,9 +195,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(BetaAnalyticsDataClient).get_transport_class, type(BetaAnalyticsDataClient) - ) + get_transport_class = BetaAnalyticsDataClient.get_transport_class def __init__( self, diff --git a/packages/google-analytics-data/google/analytics/data_v1beta/services/beta_analytics_data/client.py b/packages/google-analytics-data/google/analytics/data_v1beta/services/beta_analytics_data/client.py index bbf248164ecf..0e5f00491f32 100644 --- a/packages/google-analytics-data/google/analytics/data_v1beta/services/beta_analytics_data/client.py +++ b/packages/google-analytics-data/google/analytics/data_v1beta/services/beta_analytics_data/client.py @@ -683,7 +683,7 @@ def __init__( Type[BetaAnalyticsDataTransport], Callable[..., BetaAnalyticsDataTransport], ] = ( - type(self).get_transport_class(transport) + BetaAnalyticsDataClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., BetaAnalyticsDataTransport], transport) ) diff --git a/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1alpha.json b/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1alpha.json index e2644091ccfd..619e88f4243b 100644 --- a/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1alpha.json +++ b/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1alpha.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-analytics-data", - "version": "0.18.11" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1beta.json b/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1beta.json index eb19da08f5ca..29e86a085403 100644 --- a/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1beta.json +++ b/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-analytics-data", - "version": "0.18.11" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-analytics-data/tests/unit/gapic/data_v1alpha/test_alpha_analytics_data.py b/packages/google-analytics-data/tests/unit/gapic/data_v1alpha/test_alpha_analytics_data.py index 7f312b50b0ba..16d76b2f738b 100644 --- a/packages/google-analytics-data/tests/unit/gapic/data_v1alpha/test_alpha_analytics_data.py +++ b/packages/google-analytics-data/tests/unit/gapic/data_v1alpha/test_alpha_analytics_data.py @@ -1351,22 +1351,23 @@ async def test_run_funnel_report_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.run_funnel_report - ] = mock_object + ] = mock_rpc request = {} await client.run_funnel_report(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.run_funnel_report(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1594,8 +1595,9 @@ def test_create_audience_list_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_audience_list(request) @@ -1651,26 +1653,28 @@ async def test_create_audience_list_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_audience_list - ] = mock_object + ] = mock_rpc request = {} await client.create_audience_list(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_audience_list(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2049,22 +2053,23 @@ async def test_query_audience_list_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.query_audience_list - ] = mock_object + ] = mock_rpc request = {} await client.query_audience_list(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.query_audience_list(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2443,22 +2448,23 @@ async def test_sheet_export_audience_list_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.sheet_export_audience_list - ] = mock_object + ] = mock_rpc request = {} await client.sheet_export_audience_list(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.sheet_export_audience_list(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2856,22 +2862,23 @@ async def test_get_audience_list_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_audience_list - ] = mock_object + ] = mock_rpc request = {} await client.get_audience_list(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_audience_list(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3261,22 +3268,23 @@ async def test_list_audience_lists_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_audience_lists - ] = mock_object + ] = mock_rpc request = {} await client.list_audience_lists(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_audience_lists(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3863,22 +3871,23 @@ async def test_create_recurring_audience_list_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_recurring_audience_list - ] = mock_object + ] = mock_rpc request = {} await client.create_recurring_audience_list(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_recurring_audience_list(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4289,22 +4298,23 @@ async def test_get_recurring_audience_list_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_recurring_audience_list - ] = mock_object + ] = mock_rpc request = {} await client.get_recurring_audience_list(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_recurring_audience_list(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4687,22 +4697,23 @@ async def test_list_recurring_audience_lists_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_recurring_audience_lists - ] = mock_object + ] = mock_rpc request = {} await client.list_recurring_audience_lists(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_recurring_audience_lists(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5224,8 +5235,9 @@ def test_create_report_task_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_report_task(request) @@ -5281,26 +5293,28 @@ async def test_create_report_task_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_report_task - ] = mock_object + ] = mock_rpc request = {} await client.create_report_task(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_report_task(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5677,22 +5691,23 @@ async def test_query_report_task_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.query_report_task - ] = mock_object + ] = mock_rpc request = {} await client.query_report_task(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.query_report_task(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6052,22 +6067,23 @@ async def test_get_report_task_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_report_task - ] = mock_object + ] = mock_rpc request = {} await client.get_report_task(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_report_task(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6429,22 +6445,23 @@ async def test_list_report_tasks_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_report_tasks - ] = mock_object + ] = mock_rpc request = {} await client.list_report_tasks(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_report_tasks(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-analytics-data/tests/unit/gapic/data_v1beta/test_beta_analytics_data.py b/packages/google-analytics-data/tests/unit/gapic/data_v1beta/test_beta_analytics_data.py index 19258ca590fa..c160996cd5ab 100644 --- a/packages/google-analytics-data/tests/unit/gapic/data_v1beta/test_beta_analytics_data.py +++ b/packages/google-analytics-data/tests/unit/gapic/data_v1beta/test_beta_analytics_data.py @@ -1336,22 +1336,23 @@ async def test_run_report_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.run_report - ] = mock_object + ] = mock_rpc request = {} await client.run_report(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.run_report(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1624,22 +1625,23 @@ async def test_run_pivot_report_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.run_pivot_report - ] = mock_object + ] = mock_rpc request = {} await client.run_pivot_report(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.run_pivot_report(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1917,22 +1919,23 @@ async def test_batch_run_reports_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_run_reports - ] = mock_object + ] = mock_rpc request = {} await client.batch_run_reports(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.batch_run_reports(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2219,22 +2222,23 @@ async def test_batch_run_pivot_reports_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_run_pivot_reports - ] = mock_object + ] = mock_rpc request = {} await client.batch_run_pivot_reports(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.batch_run_pivot_reports(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2508,22 +2512,23 @@ async def test_get_metadata_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_metadata - ] = mock_object + ] = mock_rpc request = {} await client.get_metadata(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_metadata(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2887,22 +2892,23 @@ async def test_run_realtime_report_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.run_realtime_report - ] = mock_object + ] = mock_rpc request = {} await client.run_realtime_report(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.run_realtime_report(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3185,22 +3191,23 @@ async def test_check_compatibility_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.check_compatibility - ] = mock_object + ] = mock_rpc request = {} await client.check_compatibility(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.check_compatibility(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3426,8 +3433,9 @@ def test_create_audience_export_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_audience_export(request) @@ -3483,26 +3491,28 @@ async def test_create_audience_export_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_audience_export - ] = mock_object + ] = mock_rpc request = {} await client.create_audience_export(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_audience_export(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3882,22 +3892,23 @@ async def test_query_audience_export_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.query_audience_export - ] = mock_object + ] = mock_rpc request = {} await client.query_audience_export(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.query_audience_export(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4290,22 +4301,23 @@ async def test_get_audience_export_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_audience_export - ] = mock_object + ] = mock_rpc request = {} await client.get_audience_export(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_audience_export(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4694,22 +4706,23 @@ async def test_list_audience_exports_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_audience_exports - ] = mock_object + ] = mock_rpc request = {} await client.list_audience_exports(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_audience_exports(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-apps-card/google/apps/card/gapic_version.py b/packages/google-apps-card/google/apps/card/gapic_version.py index 937ede8823ef..558c8aab67c5 100644 --- a/packages/google-apps-card/google/apps/card/gapic_version.py +++ b/packages/google-apps-card/google/apps/card/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.4" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-apps-card/google/apps/card_v1/gapic_version.py b/packages/google-apps-card/google/apps/card_v1/gapic_version.py index 937ede8823ef..558c8aab67c5 100644 --- a/packages/google-apps-card/google/apps/card_v1/gapic_version.py +++ b/packages/google-apps-card/google/apps/card_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.4" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-apps-chat/google/apps/chat/gapic_version.py b/packages/google-apps-chat/google/apps/chat/gapic_version.py index f8ea948a9c30..558c8aab67c5 100644 --- a/packages/google-apps-chat/google/apps/chat/gapic_version.py +++ b/packages/google-apps-chat/google/apps/chat/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.9" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-apps-chat/google/apps/chat_v1/gapic_version.py b/packages/google-apps-chat/google/apps/chat_v1/gapic_version.py index f8ea948a9c30..558c8aab67c5 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/gapic_version.py +++ b/packages/google-apps-chat/google/apps/chat_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.9" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/async_client.py b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/async_client.py index e8130b807fc3..945434859bde 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/async_client.py +++ b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -236,9 +235,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ChatServiceClient).get_transport_class, type(ChatServiceClient) - ) + get_transport_class = ChatServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/client.py b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/client.py index fa5e085eb32e..268ff696d3a6 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/client.py +++ b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/client.py @@ -850,7 +850,7 @@ def __init__( transport_init: Union[ Type[ChatServiceTransport], Callable[..., ChatServiceTransport] ] = ( - type(self).get_transport_class(transport) + ChatServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ChatServiceTransport], transport) ) diff --git a/packages/google-apps-chat/samples/generated_samples/snippet_metadata_google.chat.v1.json b/packages/google-apps-chat/samples/generated_samples/snippet_metadata_google.chat.v1.json index 189e4bbc7470..6e637d46d014 100644 --- a/packages/google-apps-chat/samples/generated_samples/snippet_metadata_google.chat.v1.json +++ b/packages/google-apps-chat/samples/generated_samples/snippet_metadata_google.chat.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-apps-chat", - "version": "0.1.9" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-apps-chat/tests/unit/gapic/chat_v1/test_chat_service.py b/packages/google-apps-chat/tests/unit/gapic/chat_v1/test_chat_service.py index 28054fb2de8b..27ad0d358b7d 100644 --- a/packages/google-apps-chat/tests/unit/gapic/chat_v1/test_chat_service.py +++ b/packages/google-apps-chat/tests/unit/gapic/chat_v1/test_chat_service.py @@ -1320,22 +1320,23 @@ async def test_create_message_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_message - ] = mock_object + ] = mock_rpc request = {} await client.create_message(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_message(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1718,22 +1719,23 @@ async def test_list_messages_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_messages - ] = mock_object + ] = mock_rpc request = {} await client.list_messages(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_messages(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2282,22 +2284,23 @@ async def test_list_memberships_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_memberships - ] = mock_object + ] = mock_rpc request = {} await client.list_memberships(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_memberships(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2846,22 +2849,23 @@ async def test_get_membership_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_membership - ] = mock_object + ] = mock_rpc request = {} await client.get_membership(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_membership(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3232,22 +3236,23 @@ async def test_get_message_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_message - ] = mock_object + ] = mock_rpc request = {} await client.get_message(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_message(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3618,22 +3623,23 @@ async def test_update_message_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_message - ] = mock_object + ] = mock_rpc request = {} await client.update_message(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_message(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3993,22 +3999,23 @@ async def test_delete_message_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_message - ] = mock_object + ] = mock_rpc request = {} await client.delete_message(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_message(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4363,22 +4370,23 @@ async def test_get_attachment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_attachment - ] = mock_object + ] = mock_rpc request = {} await client.get_attachment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_attachment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4744,22 +4752,23 @@ async def test_upload_attachment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.upload_attachment - ] = mock_object + ] = mock_rpc request = {} await client.upload_attachment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.upload_attachment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5031,22 +5040,23 @@ async def test_list_spaces_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_spaces - ] = mock_object + ] = mock_rpc request = {} await client.list_spaces(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_spaces(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5477,22 +5487,23 @@ async def test_get_space_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_space - ] = mock_object + ] = mock_rpc request = {} await client.get_space(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_space(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5898,22 +5909,23 @@ async def test_create_space_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_space - ] = mock_object + ] = mock_rpc request = {} await client.create_space(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_space(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6260,22 +6272,23 @@ async def test_set_up_space_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_up_space - ] = mock_object + ] = mock_rpc request = {} await client.set_up_space(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_up_space(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6538,22 +6551,23 @@ async def test_update_space_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_space - ] = mock_object + ] = mock_rpc request = {} await client.update_space(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_space(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6926,22 +6940,23 @@ async def test_delete_space_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_space - ] = mock_object + ] = mock_rpc request = {} await client.delete_space(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_space(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7289,22 +7304,23 @@ async def test_complete_import_space_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.complete_import_space - ] = mock_object + ] = mock_rpc request = {} await client.complete_import_space(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.complete_import_space(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7622,22 +7638,23 @@ async def test_find_direct_message_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.find_direct_message - ] = mock_object + ] = mock_rpc request = {} await client.find_direct_message(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.find_direct_message(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7886,22 +7903,23 @@ async def test_create_membership_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_membership - ] = mock_object + ] = mock_rpc request = {} await client.create_membership(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_membership(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8286,22 +8304,23 @@ async def test_update_membership_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_membership - ] = mock_object + ] = mock_rpc request = {} await client.update_membership(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_membership(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8690,22 +8709,23 @@ async def test_delete_membership_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_membership - ] = mock_object + ] = mock_rpc request = {} await client.delete_membership(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_membership(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9068,22 +9088,23 @@ async def test_create_reaction_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_reaction - ] = mock_object + ] = mock_rpc request = {} await client.create_reaction(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_reaction(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9446,22 +9467,23 @@ async def test_list_reactions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_reactions - ] = mock_object + ] = mock_rpc request = {} await client.list_reactions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_reactions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9997,22 +10019,23 @@ async def test_delete_reaction_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_reaction - ] = mock_object + ] = mock_rpc request = {} await client.delete_reaction(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_reaction(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10364,22 +10387,23 @@ async def test_get_space_read_state_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_space_read_state - ] = mock_object + ] = mock_rpc request = {} await client.get_space_read_state(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_space_read_state(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10748,22 +10772,23 @@ async def test_update_space_read_state_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_space_read_state - ] = mock_object + ] = mock_rpc request = {} await client.update_space_read_state(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_space_read_state(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11146,22 +11171,23 @@ async def test_get_thread_read_state_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_thread_read_state - ] = mock_object + ] = mock_rpc request = {} await client.get_thread_read_state(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_thread_read_state(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11524,22 +11550,23 @@ async def test_get_space_event_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_space_event - ] = mock_object + ] = mock_rpc request = {} await client.get_space_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_space_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11904,22 +11931,23 @@ async def test_list_space_events_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_space_events - ] = mock_object + ] = mock_rpc request = {} await client.list_space_events(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_space_events(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-apps-events-subscriptions/google/apps/events_subscriptions/gapic_version.py b/packages/google-apps-events-subscriptions/google/apps/events_subscriptions/gapic_version.py index 3b0a9d9a8d43..558c8aab67c5 100644 --- a/packages/google-apps-events-subscriptions/google/apps/events_subscriptions/gapic_version.py +++ b/packages/google-apps-events-subscriptions/google/apps/events_subscriptions/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-apps-events-subscriptions/google/apps/events_subscriptions_v1/gapic_version.py b/packages/google-apps-events-subscriptions/google/apps/events_subscriptions_v1/gapic_version.py index 3b0a9d9a8d43..558c8aab67c5 100644 --- a/packages/google-apps-events-subscriptions/google/apps/events_subscriptions_v1/gapic_version.py +++ b/packages/google-apps-events-subscriptions/google/apps/events_subscriptions_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-apps-events-subscriptions/google/apps/events_subscriptions_v1/services/subscriptions_service/async_client.py b/packages/google-apps-events-subscriptions/google/apps/events_subscriptions_v1/services/subscriptions_service/async_client.py index 78d8fb9cc72b..819b85fa0aaf 100644 --- a/packages/google-apps-events-subscriptions/google/apps/events_subscriptions_v1/services/subscriptions_service/async_client.py +++ b/packages/google-apps-events-subscriptions/google/apps/events_subscriptions_v1/services/subscriptions_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -206,10 +205,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(SubscriptionsServiceClient).get_transport_class, - type(SubscriptionsServiceClient), - ) + get_transport_class = SubscriptionsServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-apps-events-subscriptions/google/apps/events_subscriptions_v1/services/subscriptions_service/client.py b/packages/google-apps-events-subscriptions/google/apps/events_subscriptions_v1/services/subscriptions_service/client.py index 986f228ec4bf..738eea50f5e0 100644 --- a/packages/google-apps-events-subscriptions/google/apps/events_subscriptions_v1/services/subscriptions_service/client.py +++ b/packages/google-apps-events-subscriptions/google/apps/events_subscriptions_v1/services/subscriptions_service/client.py @@ -703,7 +703,7 @@ def __init__( Type[SubscriptionsServiceTransport], Callable[..., SubscriptionsServiceTransport], ] = ( - type(self).get_transport_class(transport) + SubscriptionsServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SubscriptionsServiceTransport], transport) ) diff --git a/packages/google-apps-events-subscriptions/samples/generated_samples/snippet_metadata_google.apps.events.subscriptions.v1.json b/packages/google-apps-events-subscriptions/samples/generated_samples/snippet_metadata_google.apps.events.subscriptions.v1.json index f45e3c572dbc..367a176961c1 100644 --- a/packages/google-apps-events-subscriptions/samples/generated_samples/snippet_metadata_google.apps.events.subscriptions.v1.json +++ b/packages/google-apps-events-subscriptions/samples/generated_samples/snippet_metadata_google.apps.events.subscriptions.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-apps-events-subscriptions", - "version": "0.1.2" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-apps-events-subscriptions/tests/unit/gapic/events_subscriptions_v1/test_subscriptions_service.py b/packages/google-apps-events-subscriptions/tests/unit/gapic/events_subscriptions_v1/test_subscriptions_service.py index 25cce96bedd4..b142e2d9e8f3 100644 --- a/packages/google-apps-events-subscriptions/tests/unit/gapic/events_subscriptions_v1/test_subscriptions_service.py +++ b/packages/google-apps-events-subscriptions/tests/unit/gapic/events_subscriptions_v1/test_subscriptions_service.py @@ -1328,8 +1328,9 @@ def test_create_subscription_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_subscription(request) @@ -1385,26 +1386,28 @@ async def test_create_subscription_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_subscription - ] = mock_object + ] = mock_rpc request = {} await client.create_subscription(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_subscription(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1664,8 +1667,9 @@ def test_delete_subscription_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_subscription(request) @@ -1721,26 +1725,28 @@ async def test_delete_subscription_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_subscription - ] = mock_object + ] = mock_rpc request = {} await client.delete_subscription(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_subscription(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2126,22 +2132,23 @@ async def test_get_subscription_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_subscription - ] = mock_object + ] = mock_rpc request = {} await client.get_subscription(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_subscription(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2524,22 +2531,23 @@ async def test_list_subscriptions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_subscriptions - ] = mock_object + ] = mock_rpc request = {} await client.list_subscriptions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_subscriptions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2983,8 +2991,9 @@ def test_update_subscription_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_subscription(request) @@ -3040,26 +3049,28 @@ async def test_update_subscription_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_subscription - ] = mock_object + ] = mock_rpc request = {} await client.update_subscription(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_subscription(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3393,8 +3404,9 @@ def test_reactivate_subscription_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.reactivate_subscription(request) @@ -3450,26 +3462,28 @@ async def test_reactivate_subscription_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.reactivate_subscription - ] = mock_object + ] = mock_rpc request = {} await client.reactivate_subscription(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.reactivate_subscription(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-apps-meet/google/apps/meet/gapic_version.py b/packages/google-apps-meet/google/apps/meet/gapic_version.py index ec8d212c9160..558c8aab67c5 100644 --- a/packages/google-apps-meet/google/apps/meet/gapic_version.py +++ b/packages/google-apps-meet/google/apps/meet/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.8" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-apps-meet/google/apps/meet_v2/gapic_version.py b/packages/google-apps-meet/google/apps/meet_v2/gapic_version.py index ec8d212c9160..558c8aab67c5 100644 --- a/packages/google-apps-meet/google/apps/meet_v2/gapic_version.py +++ b/packages/google-apps-meet/google/apps/meet_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.8" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-apps-meet/google/apps/meet_v2/services/conference_records_service/async_client.py b/packages/google-apps-meet/google/apps/meet_v2/services/conference_records_service/async_client.py index 3e05db20e0fd..d1eeabee1a10 100644 --- a/packages/google-apps-meet/google/apps/meet_v2/services/conference_records_service/async_client.py +++ b/packages/google-apps-meet/google/apps/meet_v2/services/conference_records_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -225,10 +224,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ConferenceRecordsServiceClient).get_transport_class, - type(ConferenceRecordsServiceClient), - ) + get_transport_class = ConferenceRecordsServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-apps-meet/google/apps/meet_v2/services/conference_records_service/client.py b/packages/google-apps-meet/google/apps/meet_v2/services/conference_records_service/client.py index b99ecbce32bc..4001fe724b57 100644 --- a/packages/google-apps-meet/google/apps/meet_v2/services/conference_records_service/client.py +++ b/packages/google-apps-meet/google/apps/meet_v2/services/conference_records_service/client.py @@ -785,7 +785,7 @@ def __init__( Type[ConferenceRecordsServiceTransport], Callable[..., ConferenceRecordsServiceTransport], ] = ( - type(self).get_transport_class(transport) + ConferenceRecordsServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ConferenceRecordsServiceTransport], transport) ) diff --git a/packages/google-apps-meet/google/apps/meet_v2/services/spaces_service/async_client.py b/packages/google-apps-meet/google/apps/meet_v2/services/spaces_service/async_client.py index 5791d5429b37..8ffd92ec7dd0 100644 --- a/packages/google-apps-meet/google/apps/meet_v2/services/spaces_service/async_client.py +++ b/packages/google-apps-meet/google/apps/meet_v2/services/spaces_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -192,9 +191,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(SpacesServiceClient).get_transport_class, type(SpacesServiceClient) - ) + get_transport_class = SpacesServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-apps-meet/google/apps/meet_v2/services/spaces_service/client.py b/packages/google-apps-meet/google/apps/meet_v2/services/spaces_service/client.py index 585fb27f62a5..7a6bed8a44d2 100644 --- a/packages/google-apps-meet/google/apps/meet_v2/services/spaces_service/client.py +++ b/packages/google-apps-meet/google/apps/meet_v2/services/spaces_service/client.py @@ -667,7 +667,7 @@ def __init__( transport_init: Union[ Type[SpacesServiceTransport], Callable[..., SpacesServiceTransport] ] = ( - type(self).get_transport_class(transport) + SpacesServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SpacesServiceTransport], transport) ) diff --git a/packages/google-apps-meet/google/apps/meet_v2beta/gapic_version.py b/packages/google-apps-meet/google/apps/meet_v2beta/gapic_version.py index ec8d212c9160..558c8aab67c5 100644 --- a/packages/google-apps-meet/google/apps/meet_v2beta/gapic_version.py +++ b/packages/google-apps-meet/google/apps/meet_v2beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.8" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-apps-meet/google/apps/meet_v2beta/services/conference_records_service/async_client.py b/packages/google-apps-meet/google/apps/meet_v2beta/services/conference_records_service/async_client.py index ac6349067970..a44ca33c3796 100644 --- a/packages/google-apps-meet/google/apps/meet_v2beta/services/conference_records_service/async_client.py +++ b/packages/google-apps-meet/google/apps/meet_v2beta/services/conference_records_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -225,10 +224,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ConferenceRecordsServiceClient).get_transport_class, - type(ConferenceRecordsServiceClient), - ) + get_transport_class = ConferenceRecordsServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-apps-meet/google/apps/meet_v2beta/services/conference_records_service/client.py b/packages/google-apps-meet/google/apps/meet_v2beta/services/conference_records_service/client.py index 81a836304d99..4764e9d20ae9 100644 --- a/packages/google-apps-meet/google/apps/meet_v2beta/services/conference_records_service/client.py +++ b/packages/google-apps-meet/google/apps/meet_v2beta/services/conference_records_service/client.py @@ -785,7 +785,7 @@ def __init__( Type[ConferenceRecordsServiceTransport], Callable[..., ConferenceRecordsServiceTransport], ] = ( - type(self).get_transport_class(transport) + ConferenceRecordsServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ConferenceRecordsServiceTransport], transport) ) diff --git a/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/async_client.py b/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/async_client.py index f66d5f40977d..3e1421737e28 100644 --- a/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/async_client.py +++ b/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -192,9 +191,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(SpacesServiceClient).get_transport_class, type(SpacesServiceClient) - ) + get_transport_class = SpacesServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/client.py b/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/client.py index 8af3974b5ca2..a8c4f208485c 100644 --- a/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/client.py +++ b/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/client.py @@ -667,7 +667,7 @@ def __init__( transport_init: Union[ Type[SpacesServiceTransport], Callable[..., SpacesServiceTransport] ] = ( - type(self).get_transport_class(transport) + SpacesServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SpacesServiceTransport], transport) ) diff --git a/packages/google-apps-meet/samples/generated_samples/snippet_metadata_google.apps.meet.v2.json b/packages/google-apps-meet/samples/generated_samples/snippet_metadata_google.apps.meet.v2.json index 4071af248422..1c795de189b2 100644 --- a/packages/google-apps-meet/samples/generated_samples/snippet_metadata_google.apps.meet.v2.json +++ b/packages/google-apps-meet/samples/generated_samples/snippet_metadata_google.apps.meet.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-apps-meet", - "version": "0.1.8" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-apps-meet/samples/generated_samples/snippet_metadata_google.apps.meet.v2beta.json b/packages/google-apps-meet/samples/generated_samples/snippet_metadata_google.apps.meet.v2beta.json index c956685a5da0..bbee1583d40d 100644 --- a/packages/google-apps-meet/samples/generated_samples/snippet_metadata_google.apps.meet.v2beta.json +++ b/packages/google-apps-meet/samples/generated_samples/snippet_metadata_google.apps.meet.v2beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-apps-meet", - "version": "0.1.8" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-apps-meet/tests/unit/gapic/meet_v2/test_conference_records_service.py b/packages/google-apps-meet/tests/unit/gapic/meet_v2/test_conference_records_service.py index 4d7cf4e2f4ce..e141ae8bb275 100644 --- a/packages/google-apps-meet/tests/unit/gapic/meet_v2/test_conference_records_service.py +++ b/packages/google-apps-meet/tests/unit/gapic/meet_v2/test_conference_records_service.py @@ -1379,22 +1379,23 @@ async def test_get_conference_record_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_conference_record - ] = mock_object + ] = mock_rpc request = {} await client.get_conference_record(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_conference_record(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1770,22 +1771,23 @@ async def test_list_conference_records_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_conference_records - ] = mock_object + ] = mock_rpc request = {} await client.list_conference_records(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_conference_records(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2192,22 +2194,23 @@ async def test_get_participant_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_participant - ] = mock_object + ] = mock_rpc request = {} await client.get_participant(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_participant(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2573,22 +2576,23 @@ async def test_list_participants_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_participants - ] = mock_object + ] = mock_rpc request = {} await client.list_participants(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_participants(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3164,22 +3168,23 @@ async def test_get_participant_session_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_participant_session - ] = mock_object + ] = mock_rpc request = {} await client.get_participant_session(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_participant_session(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3555,22 +3560,23 @@ async def test_list_participant_sessions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_participant_sessions - ] = mock_object + ] = mock_rpc request = {} await client.list_participant_sessions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_participant_sessions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4136,22 +4142,23 @@ async def test_get_recording_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_recording - ] = mock_object + ] = mock_rpc request = {} await client.get_recording(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_recording(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4500,22 +4507,23 @@ async def test_list_recordings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_recordings - ] = mock_object + ] = mock_rpc request = {} await client.list_recordings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_recordings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5061,22 +5069,23 @@ async def test_get_transcript_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_transcript - ] = mock_object + ] = mock_rpc request = {} await client.get_transcript(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_transcript(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5427,22 +5436,23 @@ async def test_list_transcripts_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_transcripts - ] = mock_object + ] = mock_rpc request = {} await client.list_transcripts(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_transcripts(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6006,22 +6016,23 @@ async def test_get_transcript_entry_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_transcript_entry - ] = mock_object + ] = mock_rpc request = {} await client.get_transcript_entry(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_transcript_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6401,22 +6412,23 @@ async def test_list_transcript_entries_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_transcript_entries - ] = mock_object + ] = mock_rpc request = {} await client.list_transcript_entries(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_transcript_entries(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-apps-meet/tests/unit/gapic/meet_v2/test_spaces_service.py b/packages/google-apps-meet/tests/unit/gapic/meet_v2/test_spaces_service.py index bba44c0fa971..f9af1772962d 100644 --- a/packages/google-apps-meet/tests/unit/gapic/meet_v2/test_spaces_service.py +++ b/packages/google-apps-meet/tests/unit/gapic/meet_v2/test_spaces_service.py @@ -1282,22 +1282,23 @@ async def test_create_space_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_space - ] = mock_object + ] = mock_rpc request = {} await client.create_space(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_space(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1591,22 +1592,23 @@ async def test_get_space_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_space - ] = mock_object + ] = mock_rpc request = {} await client.get_space(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_space(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1957,22 +1959,23 @@ async def test_update_space_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_space - ] = mock_object + ] = mock_rpc request = {} await client.update_space(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_space(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2337,22 +2340,23 @@ async def test_end_active_conference_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.end_active_conference - ] = mock_object + ] = mock_rpc request = {} await client.end_active_conference(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.end_active_conference(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-apps-meet/tests/unit/gapic/meet_v2beta/test_conference_records_service.py b/packages/google-apps-meet/tests/unit/gapic/meet_v2beta/test_conference_records_service.py index 3b6c1a68092e..37a89bcdfaba 100644 --- a/packages/google-apps-meet/tests/unit/gapic/meet_v2beta/test_conference_records_service.py +++ b/packages/google-apps-meet/tests/unit/gapic/meet_v2beta/test_conference_records_service.py @@ -1376,22 +1376,23 @@ async def test_get_conference_record_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_conference_record - ] = mock_object + ] = mock_rpc request = {} await client.get_conference_record(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_conference_record(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1767,22 +1768,23 @@ async def test_list_conference_records_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_conference_records - ] = mock_object + ] = mock_rpc request = {} await client.list_conference_records(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_conference_records(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2189,22 +2191,23 @@ async def test_get_participant_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_participant - ] = mock_object + ] = mock_rpc request = {} await client.get_participant(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_participant(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2570,22 +2573,23 @@ async def test_list_participants_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_participants - ] = mock_object + ] = mock_rpc request = {} await client.list_participants(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_participants(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3161,22 +3165,23 @@ async def test_get_participant_session_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_participant_session - ] = mock_object + ] = mock_rpc request = {} await client.get_participant_session(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_participant_session(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3552,22 +3557,23 @@ async def test_list_participant_sessions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_participant_sessions - ] = mock_object + ] = mock_rpc request = {} await client.list_participant_sessions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_participant_sessions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4133,22 +4139,23 @@ async def test_get_recording_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_recording - ] = mock_object + ] = mock_rpc request = {} await client.get_recording(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_recording(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4497,22 +4504,23 @@ async def test_list_recordings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_recordings - ] = mock_object + ] = mock_rpc request = {} await client.list_recordings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_recordings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5058,22 +5066,23 @@ async def test_get_transcript_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_transcript - ] = mock_object + ] = mock_rpc request = {} await client.get_transcript(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_transcript(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5424,22 +5433,23 @@ async def test_list_transcripts_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_transcripts - ] = mock_object + ] = mock_rpc request = {} await client.list_transcripts(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_transcripts(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6003,22 +6013,23 @@ async def test_get_transcript_entry_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_transcript_entry - ] = mock_object + ] = mock_rpc request = {} await client.get_transcript_entry(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_transcript_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6398,22 +6409,23 @@ async def test_list_transcript_entries_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_transcript_entries - ] = mock_object + ] = mock_rpc request = {} await client.list_transcript_entries(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_transcript_entries(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-apps-meet/tests/unit/gapic/meet_v2beta/test_spaces_service.py b/packages/google-apps-meet/tests/unit/gapic/meet_v2beta/test_spaces_service.py index baaa4aa3e894..e3c1edc6f594 100644 --- a/packages/google-apps-meet/tests/unit/gapic/meet_v2beta/test_spaces_service.py +++ b/packages/google-apps-meet/tests/unit/gapic/meet_v2beta/test_spaces_service.py @@ -1279,22 +1279,23 @@ async def test_create_space_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_space - ] = mock_object + ] = mock_rpc request = {} await client.create_space(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_space(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1588,22 +1589,23 @@ async def test_get_space_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_space - ] = mock_object + ] = mock_rpc request = {} await client.get_space(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_space(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1954,22 +1956,23 @@ async def test_update_space_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_space - ] = mock_object + ] = mock_rpc request = {} await client.update_space(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_space(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2334,22 +2337,23 @@ async def test_end_active_conference_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.end_active_conference - ] = mock_object + ] = mock_rpc request = {} await client.end_active_conference(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.end_active_conference(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-apps-script-type/google/apps/script/type/calendar/gapic_version.py b/packages/google-apps-script-type/google/apps/script/type/calendar/gapic_version.py index 3425e3287cda..558c8aab67c5 100644 --- a/packages/google-apps-script-type/google/apps/script/type/calendar/gapic_version.py +++ b/packages/google-apps-script-type/google/apps/script/type/calendar/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.10" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-apps-script-type/google/apps/script/type/docs/gapic_version.py b/packages/google-apps-script-type/google/apps/script/type/docs/gapic_version.py index 3425e3287cda..558c8aab67c5 100644 --- a/packages/google-apps-script-type/google/apps/script/type/docs/gapic_version.py +++ b/packages/google-apps-script-type/google/apps/script/type/docs/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.10" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-apps-script-type/google/apps/script/type/drive/gapic_version.py b/packages/google-apps-script-type/google/apps/script/type/drive/gapic_version.py index 3425e3287cda..558c8aab67c5 100644 --- a/packages/google-apps-script-type/google/apps/script/type/drive/gapic_version.py +++ b/packages/google-apps-script-type/google/apps/script/type/drive/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.10" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-apps-script-type/google/apps/script/type/gapic_version.py b/packages/google-apps-script-type/google/apps/script/type/gapic_version.py index 3425e3287cda..558c8aab67c5 100644 --- a/packages/google-apps-script-type/google/apps/script/type/gapic_version.py +++ b/packages/google-apps-script-type/google/apps/script/type/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.10" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-apps-script-type/google/apps/script/type/gmail/gapic_version.py b/packages/google-apps-script-type/google/apps/script/type/gmail/gapic_version.py index 3425e3287cda..558c8aab67c5 100644 --- a/packages/google-apps-script-type/google/apps/script/type/gmail/gapic_version.py +++ b/packages/google-apps-script-type/google/apps/script/type/gmail/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.10" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-apps-script-type/google/apps/script/type/sheets/gapic_version.py b/packages/google-apps-script-type/google/apps/script/type/sheets/gapic_version.py index 3425e3287cda..558c8aab67c5 100644 --- a/packages/google-apps-script-type/google/apps/script/type/sheets/gapic_version.py +++ b/packages/google-apps-script-type/google/apps/script/type/sheets/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.10" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-apps-script-type/google/apps/script/type/slides/gapic_version.py b/packages/google-apps-script-type/google/apps/script/type/slides/gapic_version.py index 3425e3287cda..558c8aab67c5 100644 --- a/packages/google-apps-script-type/google/apps/script/type/slides/gapic_version.py +++ b/packages/google-apps-script-type/google/apps/script/type/slides/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.10" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-area120-tables/google/area120/tables/gapic_version.py b/packages/google-area120-tables/google/area120/tables/gapic_version.py index 11e34cec2824..558c8aab67c5 100644 --- a/packages/google-area120-tables/google/area120/tables/gapic_version.py +++ b/packages/google-area120-tables/google/area120/tables/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.11.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-area120-tables/google/area120/tables_v1alpha1/gapic_version.py b/packages/google-area120-tables/google/area120/tables_v1alpha1/gapic_version.py index 11e34cec2824..558c8aab67c5 100644 --- a/packages/google-area120-tables/google/area120/tables_v1alpha1/gapic_version.py +++ b/packages/google-area120-tables/google/area120/tables_v1alpha1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.11.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-area120-tables/google/area120/tables_v1alpha1/services/tables_service/async_client.py b/packages/google-area120-tables/google/area120/tables_v1alpha1/services/tables_service/async_client.py index ee5242225d3a..5a27331668c0 100644 --- a/packages/google-area120-tables/google/area120/tables_v1alpha1/services/tables_service/async_client.py +++ b/packages/google-area120-tables/google/area120/tables_v1alpha1/services/tables_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -207,9 +206,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(TablesServiceClient).get_transport_class, type(TablesServiceClient) - ) + get_transport_class = TablesServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-area120-tables/google/area120/tables_v1alpha1/services/tables_service/client.py b/packages/google-area120-tables/google/area120/tables_v1alpha1/services/tables_service/client.py index d87ea30e4e99..871e6857384e 100644 --- a/packages/google-area120-tables/google/area120/tables_v1alpha1/services/tables_service/client.py +++ b/packages/google-area120-tables/google/area120/tables_v1alpha1/services/tables_service/client.py @@ -699,7 +699,7 @@ def __init__( transport_init: Union[ Type[TablesServiceTransport], Callable[..., TablesServiceTransport] ] = ( - type(self).get_transport_class(transport) + TablesServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., TablesServiceTransport], transport) ) diff --git a/packages/google-area120-tables/samples/generated_samples/snippet_metadata_google.area120.tables.v1alpha1.json b/packages/google-area120-tables/samples/generated_samples/snippet_metadata_google.area120.tables.v1alpha1.json index 16f4052fc1ba..fdfb568d52d6 100644 --- a/packages/google-area120-tables/samples/generated_samples/snippet_metadata_google.area120.tables.v1alpha1.json +++ b/packages/google-area120-tables/samples/generated_samples/snippet_metadata_google.area120.tables.v1alpha1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-area120-tables", - "version": "0.11.11" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-area120-tables/tests/unit/gapic/tables_v1alpha1/test_tables_service.py b/packages/google-area120-tables/tests/unit/gapic/tables_v1alpha1/test_tables_service.py index d46a74ee41c8..580666937e7b 100644 --- a/packages/google-area120-tables/tests/unit/gapic/tables_v1alpha1/test_tables_service.py +++ b/packages/google-area120-tables/tests/unit/gapic/tables_v1alpha1/test_tables_service.py @@ -1287,22 +1287,23 @@ async def test_get_table_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_table - ] = mock_object + ] = mock_rpc request = {} await client.get_table(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_table(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1649,22 +1650,23 @@ async def test_list_tables_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_tables - ] = mock_object + ] = mock_rpc request = {} await client.list_tables(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_tables(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2064,22 +2066,23 @@ async def test_get_workspace_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_workspace - ] = mock_object + ] = mock_rpc request = {} await client.get_workspace(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_workspace(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2426,22 +2429,23 @@ async def test_list_workspaces_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_workspaces - ] = mock_object + ] = mock_rpc request = {} await client.list_workspaces(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_workspaces(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2836,22 +2840,23 @@ async def test_get_row_async_use_cached_wrapped_rpc(transport: str = "grpc_async ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_row - ] = mock_object + ] = mock_rpc request = {} await client.get_row(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_row(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3198,22 +3203,23 @@ async def test_list_rows_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_rows - ] = mock_object + ] = mock_rpc request = {} await client.list_rows(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_rows(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3754,22 +3760,23 @@ async def test_create_row_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_row - ] = mock_object + ] = mock_rpc request = {} await client.create_row(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_row(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4129,22 +4136,23 @@ async def test_batch_create_rows_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_create_rows - ] = mock_object + ] = mock_rpc request = {} await client.batch_create_rows(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.batch_create_rows(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4408,22 +4416,23 @@ async def test_update_row_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_row - ] = mock_object + ] = mock_rpc request = {} await client.update_row(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_row(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4783,22 +4792,23 @@ async def test_batch_update_rows_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_update_rows - ] = mock_object + ] = mock_rpc request = {} await client.batch_update_rows(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.batch_update_rows(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5059,22 +5069,23 @@ async def test_delete_row_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_row - ] = mock_object + ] = mock_rpc request = {} await client.delete_row(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_row(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5417,22 +5428,23 @@ async def test_batch_delete_rows_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_delete_rows - ] = mock_object + ] = mock_rpc request = {} await client.batch_delete_rows(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.batch_delete_rows(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-access-approval/google/cloud/accessapproval/gapic_version.py b/packages/google-cloud-access-approval/google/cloud/accessapproval/gapic_version.py index f192c1b4f03b..558c8aab67c5 100644 --- a/packages/google-cloud-access-approval/google/cloud/accessapproval/gapic_version.py +++ b/packages/google-cloud-access-approval/google/cloud/accessapproval/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.13.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-access-approval/google/cloud/accessapproval_v1/gapic_version.py b/packages/google-cloud-access-approval/google/cloud/accessapproval_v1/gapic_version.py index f192c1b4f03b..558c8aab67c5 100644 --- a/packages/google-cloud-access-approval/google/cloud/accessapproval_v1/gapic_version.py +++ b/packages/google-cloud-access-approval/google/cloud/accessapproval_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.13.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-access-approval/google/cloud/accessapproval_v1/services/access_approval/async_client.py b/packages/google-cloud-access-approval/google/cloud/accessapproval_v1/services/access_approval/async_client.py index 5ce9fd99a7cf..2adcbbec15b4 100644 --- a/packages/google-cloud-access-approval/google/cloud/accessapproval_v1/services/access_approval/async_client.py +++ b/packages/google-cloud-access-approval/google/cloud/accessapproval_v1/services/access_approval/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -239,9 +238,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AccessApprovalClient).get_transport_class, type(AccessApprovalClient) - ) + get_transport_class = AccessApprovalClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-access-approval/google/cloud/accessapproval_v1/services/access_approval/client.py b/packages/google-cloud-access-approval/google/cloud/accessapproval_v1/services/access_approval/client.py index 867e35bd52d9..3a134b51689f 100644 --- a/packages/google-cloud-access-approval/google/cloud/accessapproval_v1/services/access_approval/client.py +++ b/packages/google-cloud-access-approval/google/cloud/accessapproval_v1/services/access_approval/client.py @@ -726,7 +726,7 @@ def __init__( transport_init: Union[ Type[AccessApprovalTransport], Callable[..., AccessApprovalTransport] ] = ( - type(self).get_transport_class(transport) + AccessApprovalClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AccessApprovalTransport], transport) ) diff --git a/packages/google-cloud-access-approval/samples/generated_samples/snippet_metadata_google.cloud.accessapproval.v1.json b/packages/google-cloud-access-approval/samples/generated_samples/snippet_metadata_google.cloud.accessapproval.v1.json index 794dd0aecd46..b1f693e8b12f 100644 --- a/packages/google-cloud-access-approval/samples/generated_samples/snippet_metadata_google.cloud.accessapproval.v1.json +++ b/packages/google-cloud-access-approval/samples/generated_samples/snippet_metadata_google.cloud.accessapproval.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-access-approval", - "version": "1.13.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-access-approval/tests/unit/gapic/accessapproval_v1/test_access_approval.py b/packages/google-cloud-access-approval/tests/unit/gapic/accessapproval_v1/test_access_approval.py index 4afcbb8ae2b0..05777554f668 100644 --- a/packages/google-cloud-access-approval/tests/unit/gapic/accessapproval_v1/test_access_approval.py +++ b/packages/google-cloud-access-approval/tests/unit/gapic/accessapproval_v1/test_access_approval.py @@ -1300,22 +1300,23 @@ async def test_list_approval_requests_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_approval_requests - ] = mock_object + ] = mock_rpc request = {} await client.list_approval_requests(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_approval_requests(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1892,22 +1893,23 @@ async def test_get_approval_request_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_approval_request - ] = mock_object + ] = mock_rpc request = {} await client.get_approval_request(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_approval_request(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2285,22 +2287,23 @@ async def test_approve_approval_request_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.approve_approval_request - ] = mock_object + ] = mock_rpc request = {} await client.approve_approval_request(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.approve_approval_request(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2592,22 +2595,23 @@ async def test_dismiss_approval_request_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.dismiss_approval_request - ] = mock_object + ] = mock_rpc request = {} await client.dismiss_approval_request(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.dismiss_approval_request(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2899,22 +2903,23 @@ async def test_invalidate_approval_request_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.invalidate_approval_request - ] = mock_object + ] = mock_rpc request = {} await client.invalidate_approval_request(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.invalidate_approval_request(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3218,22 +3223,23 @@ async def test_get_access_approval_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_access_approval_settings - ] = mock_object + ] = mock_rpc request = {} await client.get_access_approval_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_access_approval_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3627,22 +3633,23 @@ async def test_update_access_approval_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_access_approval_settings - ] = mock_object + ] = mock_rpc request = {} await client.update_access_approval_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_access_approval_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4028,22 +4035,23 @@ async def test_delete_access_approval_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_access_approval_settings - ] = mock_object + ] = mock_rpc request = {} await client.delete_access_approval_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_access_approval_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4410,22 +4418,23 @@ async def test_get_access_approval_service_account_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_access_approval_service_account - ] = mock_object + ] = mock_rpc request = {} await client.get_access_approval_service_account(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_access_approval_service_account(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications/gapic_version.py b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications/gapic_version.py index 3425e3287cda..558c8aab67c5 100644 --- a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications/gapic_version.py +++ b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.10" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/gapic_version.py b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/gapic_version.py index 3425e3287cda..558c8aab67c5 100644 --- a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/gapic_version.py +++ b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.10" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/async_client.py b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/async_client.py index ff477afcbd78..0de083efd228 100644 --- a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/async_client.py +++ b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -207,10 +206,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AdvisoryNotificationsServiceClient).get_transport_class, - type(AdvisoryNotificationsServiceClient), - ) + get_transport_class = AdvisoryNotificationsServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/client.py b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/client.py index 8b819b14687a..1596808d73f2 100644 --- a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/client.py +++ b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/client.py @@ -699,7 +699,7 @@ def __init__( Type[AdvisoryNotificationsServiceTransport], Callable[..., AdvisoryNotificationsServiceTransport], ] = ( - type(self).get_transport_class(transport) + AdvisoryNotificationsServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast( Callable[..., AdvisoryNotificationsServiceTransport], transport diff --git a/packages/google-cloud-advisorynotifications/samples/generated_samples/snippet_metadata_google.cloud.advisorynotifications.v1.json b/packages/google-cloud-advisorynotifications/samples/generated_samples/snippet_metadata_google.cloud.advisorynotifications.v1.json index 32a422671417..d786a8bfb8cb 100644 --- a/packages/google-cloud-advisorynotifications/samples/generated_samples/snippet_metadata_google.cloud.advisorynotifications.v1.json +++ b/packages/google-cloud-advisorynotifications/samples/generated_samples/snippet_metadata_google.cloud.advisorynotifications.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-advisorynotifications", - "version": "0.3.10" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-advisorynotifications/tests/unit/gapic/advisorynotifications_v1/test_advisory_notifications_service.py b/packages/google-cloud-advisorynotifications/tests/unit/gapic/advisorynotifications_v1/test_advisory_notifications_service.py index 5369d76a3f66..d0845265e3ea 100644 --- a/packages/google-cloud-advisorynotifications/tests/unit/gapic/advisorynotifications_v1/test_advisory_notifications_service.py +++ b/packages/google-cloud-advisorynotifications/tests/unit/gapic/advisorynotifications_v1/test_advisory_notifications_service.py @@ -1393,22 +1393,23 @@ async def test_list_notifications_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_notifications - ] = mock_object + ] = mock_rpc request = {} await client.list_notifications(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_notifications(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1981,22 +1982,23 @@ async def test_get_notification_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_notification - ] = mock_object + ] = mock_rpc request = {} await client.get_notification(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_notification(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2353,22 +2355,23 @@ async def test_get_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_settings - ] = mock_object + ] = mock_rpc request = {} await client.get_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2714,22 +2717,23 @@ async def test_update_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_settings - ] = mock_object + ] = mock_rpc request = {} await client.update_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-alloydb-connectors/google/cloud/alloydb/connectors/gapic_version.py b/packages/google-cloud-alloydb-connectors/google/cloud/alloydb/connectors/gapic_version.py index 51d2795b9d6b..558c8aab67c5 100644 --- a/packages/google-cloud-alloydb-connectors/google/cloud/alloydb/connectors/gapic_version.py +++ b/packages/google-cloud-alloydb-connectors/google/cloud/alloydb/connectors/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.6" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-alloydb-connectors/google/cloud/alloydb/connectors_v1/gapic_version.py b/packages/google-cloud-alloydb-connectors/google/cloud/alloydb/connectors_v1/gapic_version.py index 51d2795b9d6b..558c8aab67c5 100644 --- a/packages/google-cloud-alloydb-connectors/google/cloud/alloydb/connectors_v1/gapic_version.py +++ b/packages/google-cloud-alloydb-connectors/google/cloud/alloydb/connectors_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.6" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb/gapic_version.py b/packages/google-cloud-alloydb/google/cloud/alloydb/gapic_version.py index ab68833be4be..558c8aab67c5 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb/gapic_version.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.12" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/gapic_version.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/gapic_version.py index ab68833be4be..558c8aab67c5 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/gapic_version.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.12" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/async_client.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/async_client.py index 0b4f876d17f6..ad4f6077b5cd 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/async_client.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -215,9 +214,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AlloyDBAdminClient).get_transport_class, type(AlloyDBAdminClient) - ) + get_transport_class = AlloyDBAdminClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/client.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/client.py index c6ebe8516c05..868bc60993fa 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/client.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/client.py @@ -826,7 +826,7 @@ def __init__( transport_init: Union[ Type[AlloyDBAdminTransport], Callable[..., AlloyDBAdminTransport] ] = ( - type(self).get_transport_class(transport) + AlloyDBAdminClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AlloyDBAdminTransport], transport) ) diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/types/resources.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/types/resources.py index eab98a4fe3ff..235d25432420 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/types/resources.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/types/resources.py @@ -1283,7 +1283,7 @@ class QueryInsightsInstanceConfig(proto.Message): query_string_length (int): Query string length. The default value is 1024. Any integer between 256 and 4500 is - considered valid. + considered valid. query_plans_per_minute (int): Number of query execution plans captured by Insights per minute for all queries combined. diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/gapic_version.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/gapic_version.py index ab68833be4be..558c8aab67c5 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/gapic_version.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.12" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/async_client.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/async_client.py index 5497fbab5978..7166d66ef0e7 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/async_client.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -217,9 +216,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AlloyDBAdminClient).get_transport_class, type(AlloyDBAdminClient) - ) + get_transport_class = AlloyDBAdminClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/client.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/client.py index dadcbdee6bb5..d843322e819e 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/client.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/client.py @@ -850,7 +850,7 @@ def __init__( transport_init: Union[ Type[AlloyDBAdminTransport], Callable[..., AlloyDBAdminTransport] ] = ( - type(self).get_transport_class(transport) + AlloyDBAdminClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AlloyDBAdminTransport], transport) ) diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/resources.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/resources.py index 22d401ff11cc..e4af7a8c95de 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/resources.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/resources.py @@ -1344,7 +1344,7 @@ class QueryInsightsInstanceConfig(proto.Message): query_string_length (int): Query string length. The default value is 1024. Any integer between 256 and 4500 is - considered valid. + considered valid. query_plans_per_minute (int): Number of query execution plans captured by Insights per minute for all queries combined. diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/gapic_version.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/gapic_version.py index ab68833be4be..558c8aab67c5 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/gapic_version.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.12" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/async_client.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/async_client.py index dc0acb2eb965..9e4dbdf4845b 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/async_client.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -217,9 +216,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AlloyDBAdminClient).get_transport_class, type(AlloyDBAdminClient) - ) + get_transport_class = AlloyDBAdminClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/client.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/client.py index 7c25c5f52022..4b4aa9d9597b 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/client.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/client.py @@ -850,7 +850,7 @@ def __init__( transport_init: Union[ Type[AlloyDBAdminTransport], Callable[..., AlloyDBAdminTransport] ] = ( - type(self).get_transport_class(transport) + AlloyDBAdminClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AlloyDBAdminTransport], transport) ) diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/resources.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/resources.py index ca93ce160aae..fd99ddd2630f 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/resources.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/resources.py @@ -1312,7 +1312,7 @@ class QueryInsightsInstanceConfig(proto.Message): query_string_length (int): Query string length. The default value is 1024. Any integer between 256 and 4500 is - considered valid. + considered valid. query_plans_per_minute (int): Number of query execution plans captured by Insights per minute for all queries combined. diff --git a/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1.json b/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1.json index d3ea7218d351..1cb197a5b3b4 100644 --- a/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1.json +++ b/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-alloydb", - "version": "0.3.12" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1alpha.json b/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1alpha.json index 56f45a7164c9..d70e655d280d 100644 --- a/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1alpha.json +++ b/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1alpha.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-alloydb", - "version": "0.3.12" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1beta.json b/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1beta.json index f2e7b2fbe03f..dc19bd4f2a54 100644 --- a/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1beta.json +++ b/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-alloydb", - "version": "0.3.12" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1/test_alloy_db_admin.py b/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1/test_alloy_db_admin.py index f937e65bcc12..3714063ac2bb 100644 --- a/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1/test_alloy_db_admin.py +++ b/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1/test_alloy_db_admin.py @@ -1287,22 +1287,23 @@ async def test_list_clusters_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_clusters - ] = mock_object + ] = mock_rpc request = {} await client.list_clusters(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_clusters(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1871,22 +1872,23 @@ async def test_get_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_cluster - ] = mock_object + ] = mock_rpc request = {} await client.get_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2195,8 +2197,9 @@ def test_create_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_cluster(request) @@ -2250,26 +2253,28 @@ async def test_create_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_cluster - ] = mock_object + ] = mock_rpc request = {} await client.create_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2591,8 +2596,9 @@ def test_update_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_cluster(request) @@ -2646,26 +2652,28 @@ async def test_update_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_cluster - ] = mock_object + ] = mock_rpc request = {} await client.update_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2981,8 +2989,9 @@ def test_delete_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_cluster(request) @@ -3036,26 +3045,28 @@ async def test_delete_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_cluster - ] = mock_object + ] = mock_rpc request = {} await client.delete_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3349,8 +3360,9 @@ def test_promote_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.promote_cluster(request) @@ -3404,26 +3416,28 @@ async def test_promote_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.promote_cluster - ] = mock_object + ] = mock_rpc request = {} await client.promote_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.promote_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3717,8 +3731,9 @@ def test_restore_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.restore_cluster(request) @@ -3772,26 +3787,28 @@ async def test_restore_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.restore_cluster - ] = mock_object + ] = mock_rpc request = {} await client.restore_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.restore_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4014,8 +4031,9 @@ def test_create_secondary_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_secondary_cluster(request) @@ -4071,26 +4089,28 @@ async def test_create_secondary_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_secondary_cluster - ] = mock_object + ] = mock_rpc request = {} await client.create_secondary_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_secondary_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4487,22 +4507,23 @@ async def test_list_instances_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_instances - ] = mock_object + ] = mock_rpc request = {} await client.list_instances(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_instances(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5074,22 +5095,23 @@ async def test_get_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_instance - ] = mock_object + ] = mock_rpc request = {} await client.get_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5400,8 +5422,9 @@ def test_create_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_instance(request) @@ -5455,26 +5478,28 @@ async def test_create_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_instance - ] = mock_object + ] = mock_rpc request = {} await client.create_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5799,8 +5824,9 @@ def test_create_secondary_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_secondary_instance(request) @@ -5856,26 +5882,28 @@ async def test_create_secondary_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_secondary_instance - ] = mock_object + ] = mock_rpc request = {} await client.create_secondary_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_secondary_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6208,8 +6236,9 @@ def test_batch_create_instances_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.batch_create_instances(request) @@ -6265,26 +6294,28 @@ async def test_batch_create_instances_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_create_instances - ] = mock_object + ] = mock_rpc request = {} await client.batch_create_instances(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.batch_create_instances(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6498,8 +6529,9 @@ def test_update_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_instance(request) @@ -6553,26 +6585,28 @@ async def test_update_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_instance - ] = mock_object + ] = mock_rpc request = {} await client.update_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6876,8 +6910,9 @@ def test_delete_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_instance(request) @@ -6931,26 +6966,28 @@ async def test_delete_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_instance - ] = mock_object + ] = mock_rpc request = {} await client.delete_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7250,8 +7287,9 @@ def test_failover_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.failover_instance(request) @@ -7307,26 +7345,28 @@ async def test_failover_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.failover_instance - ] = mock_object + ] = mock_rpc request = {} await client.failover_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.failover_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7628,8 +7668,9 @@ def test_inject_fault_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.inject_fault(request) @@ -7683,26 +7724,28 @@ async def test_inject_fault_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.inject_fault - ] = mock_object + ] = mock_rpc request = {} await client.inject_fault(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.inject_fault(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8006,8 +8049,9 @@ def test_restart_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.restart_instance(request) @@ -8061,26 +8105,28 @@ async def test_restart_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.restart_instance - ] = mock_object + ] = mock_rpc request = {} await client.restart_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.restart_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8435,22 +8481,23 @@ async def test_list_backups_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_backups - ] = mock_object + ] = mock_rpc request = {} await client.list_backups(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_backups(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9026,22 +9073,23 @@ async def test_get_backup_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_backup - ] = mock_object + ] = mock_rpc request = {} await client.get_backup(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9356,8 +9404,9 @@ def test_create_backup_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_backup(request) @@ -9411,26 +9460,28 @@ async def test_create_backup_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_backup - ] = mock_object + ] = mock_rpc request = {} await client.create_backup(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9740,8 +9791,9 @@ def test_update_backup_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_backup(request) @@ -9795,26 +9847,28 @@ async def test_update_backup_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_backup - ] = mock_object + ] = mock_rpc request = {} await client.update_backup(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10118,8 +10172,9 @@ def test_delete_backup_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_backup(request) @@ -10173,26 +10228,28 @@ async def test_delete_backup_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_backup - ] = mock_object + ] = mock_rpc request = {} await client.delete_backup(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10553,22 +10610,23 @@ async def test_list_supported_database_flags_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_supported_database_flags - ] = mock_object + ] = mock_rpc request = {} await client.list_supported_database_flags(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_supported_database_flags(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11152,22 +11210,23 @@ async def test_generate_client_certificate_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.generate_client_certificate - ] = mock_object + ] = mock_rpc request = {} await client.generate_client_certificate(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.generate_client_certificate(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11549,22 +11608,23 @@ async def test_get_connection_info_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_connection_info - ] = mock_object + ] = mock_rpc request = {} await client.get_connection_info(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_connection_info(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11934,22 +11994,23 @@ async def test_list_users_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_users - ] = mock_object + ] = mock_rpc request = {} await client.list_users(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_users(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12501,22 +12562,23 @@ async def test_get_user_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_user - ] = mock_object + ] = mock_rpc request = {} await client.get_user(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_user(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12880,22 +12942,23 @@ async def test_create_user_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_user - ] = mock_object + ] = mock_rpc request = {} await client.create_user(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_user(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13275,22 +13338,23 @@ async def test_update_user_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_user - ] = mock_object + ] = mock_rpc request = {} await client.update_user(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_user(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13646,22 +13710,23 @@ async def test_delete_user_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_user - ] = mock_object + ] = mock_rpc request = {} await client.delete_user(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_user(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1alpha/test_alloy_db_admin.py b/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1alpha/test_alloy_db_admin.py index 79cd2240746d..631b1b909ac2 100644 --- a/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1alpha/test_alloy_db_admin.py +++ b/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1alpha/test_alloy_db_admin.py @@ -1287,22 +1287,23 @@ async def test_list_clusters_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_clusters - ] = mock_object + ] = mock_rpc request = {} await client.list_clusters(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_clusters(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1877,22 +1878,23 @@ async def test_get_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_cluster - ] = mock_object + ] = mock_rpc request = {} await client.get_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2205,8 +2207,9 @@ def test_create_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_cluster(request) @@ -2260,26 +2263,28 @@ async def test_create_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_cluster - ] = mock_object + ] = mock_rpc request = {} await client.create_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2601,8 +2606,9 @@ def test_update_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_cluster(request) @@ -2656,26 +2662,28 @@ async def test_update_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_cluster - ] = mock_object + ] = mock_rpc request = {} await client.update_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2991,8 +2999,9 @@ def test_delete_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_cluster(request) @@ -3046,26 +3055,28 @@ async def test_delete_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_cluster - ] = mock_object + ] = mock_rpc request = {} await client.delete_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3359,8 +3370,9 @@ def test_promote_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.promote_cluster(request) @@ -3414,26 +3426,28 @@ async def test_promote_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.promote_cluster - ] = mock_object + ] = mock_rpc request = {} await client.promote_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.promote_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3727,8 +3741,9 @@ def test_restore_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.restore_cluster(request) @@ -3782,26 +3797,28 @@ async def test_restore_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.restore_cluster - ] = mock_object + ] = mock_rpc request = {} await client.restore_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.restore_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4024,8 +4041,9 @@ def test_create_secondary_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_secondary_cluster(request) @@ -4081,26 +4099,28 @@ async def test_create_secondary_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_secondary_cluster - ] = mock_object + ] = mock_rpc request = {} await client.create_secondary_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_secondary_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4497,22 +4517,23 @@ async def test_list_instances_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_instances - ] = mock_object + ] = mock_rpc request = {} await client.list_instances(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_instances(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5093,22 +5114,23 @@ async def test_get_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_instance - ] = mock_object + ] = mock_rpc request = {} await client.get_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5425,8 +5447,9 @@ def test_create_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_instance(request) @@ -5480,26 +5503,28 @@ async def test_create_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_instance - ] = mock_object + ] = mock_rpc request = {} await client.create_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5824,8 +5849,9 @@ def test_create_secondary_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_secondary_instance(request) @@ -5881,26 +5907,28 @@ async def test_create_secondary_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_secondary_instance - ] = mock_object + ] = mock_rpc request = {} await client.create_secondary_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_secondary_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6233,8 +6261,9 @@ def test_batch_create_instances_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.batch_create_instances(request) @@ -6290,26 +6319,28 @@ async def test_batch_create_instances_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_create_instances - ] = mock_object + ] = mock_rpc request = {} await client.batch_create_instances(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.batch_create_instances(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6523,8 +6554,9 @@ def test_update_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_instance(request) @@ -6578,26 +6610,28 @@ async def test_update_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_instance - ] = mock_object + ] = mock_rpc request = {} await client.update_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6901,8 +6935,9 @@ def test_delete_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_instance(request) @@ -6956,26 +6991,28 @@ async def test_delete_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_instance - ] = mock_object + ] = mock_rpc request = {} await client.delete_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7275,8 +7312,9 @@ def test_failover_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.failover_instance(request) @@ -7332,26 +7370,28 @@ async def test_failover_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.failover_instance - ] = mock_object + ] = mock_rpc request = {} await client.failover_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.failover_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7653,8 +7693,9 @@ def test_inject_fault_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.inject_fault(request) @@ -7708,26 +7749,28 @@ async def test_inject_fault_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.inject_fault - ] = mock_object + ] = mock_rpc request = {} await client.inject_fault(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.inject_fault(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8031,8 +8074,9 @@ def test_restart_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.restart_instance(request) @@ -8086,26 +8130,28 @@ async def test_restart_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.restart_instance - ] = mock_object + ] = mock_rpc request = {} await client.restart_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.restart_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8460,22 +8506,23 @@ async def test_list_backups_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_backups - ] = mock_object + ] = mock_rpc request = {} await client.list_backups(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_backups(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9057,22 +9104,23 @@ async def test_get_backup_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_backup - ] = mock_object + ] = mock_rpc request = {} await client.get_backup(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9391,8 +9439,9 @@ def test_create_backup_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_backup(request) @@ -9446,26 +9495,28 @@ async def test_create_backup_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_backup - ] = mock_object + ] = mock_rpc request = {} await client.create_backup(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9775,8 +9826,9 @@ def test_update_backup_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_backup(request) @@ -9830,26 +9882,28 @@ async def test_update_backup_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_backup - ] = mock_object + ] = mock_rpc request = {} await client.update_backup(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10153,8 +10207,9 @@ def test_delete_backup_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_backup(request) @@ -10208,26 +10263,28 @@ async def test_delete_backup_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_backup - ] = mock_object + ] = mock_rpc request = {} await client.delete_backup(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10588,22 +10645,23 @@ async def test_list_supported_database_flags_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_supported_database_flags - ] = mock_object + ] = mock_rpc request = {} await client.list_supported_database_flags(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_supported_database_flags(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11192,22 +11250,23 @@ async def test_generate_client_certificate_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.generate_client_certificate - ] = mock_object + ] = mock_rpc request = {} await client.generate_client_certificate(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.generate_client_certificate(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11600,22 +11659,23 @@ async def test_get_connection_info_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_connection_info - ] = mock_object + ] = mock_rpc request = {} await client.get_connection_info(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_connection_info(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11991,22 +12051,23 @@ async def test_list_users_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_users - ] = mock_object + ] = mock_rpc request = {} await client.list_users(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_users(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12558,22 +12619,23 @@ async def test_get_user_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_user - ] = mock_object + ] = mock_rpc request = {} await client.get_user(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_user(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12937,22 +12999,23 @@ async def test_create_user_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_user - ] = mock_object + ] = mock_rpc request = {} await client.create_user(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_user(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13332,22 +13395,23 @@ async def test_update_user_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_user - ] = mock_object + ] = mock_rpc request = {} await client.update_user(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_user(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13703,22 +13767,23 @@ async def test_delete_user_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_user - ] = mock_object + ] = mock_rpc request = {} await client.delete_user(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_user(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14062,22 +14127,23 @@ async def test_list_databases_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_databases - ] = mock_object + ] = mock_rpc request = {} await client.list_databases(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_databases(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1beta/test_alloy_db_admin.py b/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1beta/test_alloy_db_admin.py index b5b6c9bdcb8f..510dd441d48d 100644 --- a/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1beta/test_alloy_db_admin.py +++ b/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1beta/test_alloy_db_admin.py @@ -1287,22 +1287,23 @@ async def test_list_clusters_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_clusters - ] = mock_object + ] = mock_rpc request = {} await client.list_clusters(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_clusters(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1874,22 +1875,23 @@ async def test_get_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_cluster - ] = mock_object + ] = mock_rpc request = {} await client.get_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2200,8 +2202,9 @@ def test_create_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_cluster(request) @@ -2255,26 +2258,28 @@ async def test_create_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_cluster - ] = mock_object + ] = mock_rpc request = {} await client.create_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2596,8 +2601,9 @@ def test_update_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_cluster(request) @@ -2651,26 +2657,28 @@ async def test_update_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_cluster - ] = mock_object + ] = mock_rpc request = {} await client.update_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2986,8 +2994,9 @@ def test_delete_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_cluster(request) @@ -3041,26 +3050,28 @@ async def test_delete_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_cluster - ] = mock_object + ] = mock_rpc request = {} await client.delete_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3354,8 +3365,9 @@ def test_promote_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.promote_cluster(request) @@ -3409,26 +3421,28 @@ async def test_promote_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.promote_cluster - ] = mock_object + ] = mock_rpc request = {} await client.promote_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.promote_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3722,8 +3736,9 @@ def test_restore_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.restore_cluster(request) @@ -3777,26 +3792,28 @@ async def test_restore_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.restore_cluster - ] = mock_object + ] = mock_rpc request = {} await client.restore_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.restore_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4019,8 +4036,9 @@ def test_create_secondary_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_secondary_cluster(request) @@ -4076,26 +4094,28 @@ async def test_create_secondary_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_secondary_cluster - ] = mock_object + ] = mock_rpc request = {} await client.create_secondary_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_secondary_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4492,22 +4512,23 @@ async def test_list_instances_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_instances - ] = mock_object + ] = mock_rpc request = {} await client.list_instances(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_instances(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5085,22 +5106,23 @@ async def test_get_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_instance - ] = mock_object + ] = mock_rpc request = {} await client.get_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5415,8 +5437,9 @@ def test_create_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_instance(request) @@ -5470,26 +5493,28 @@ async def test_create_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_instance - ] = mock_object + ] = mock_rpc request = {} await client.create_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5814,8 +5839,9 @@ def test_create_secondary_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_secondary_instance(request) @@ -5871,26 +5897,28 @@ async def test_create_secondary_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_secondary_instance - ] = mock_object + ] = mock_rpc request = {} await client.create_secondary_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_secondary_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6223,8 +6251,9 @@ def test_batch_create_instances_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.batch_create_instances(request) @@ -6280,26 +6309,28 @@ async def test_batch_create_instances_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_create_instances - ] = mock_object + ] = mock_rpc request = {} await client.batch_create_instances(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.batch_create_instances(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6513,8 +6544,9 @@ def test_update_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_instance(request) @@ -6568,26 +6600,28 @@ async def test_update_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_instance - ] = mock_object + ] = mock_rpc request = {} await client.update_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6891,8 +6925,9 @@ def test_delete_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_instance(request) @@ -6946,26 +6981,28 @@ async def test_delete_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_instance - ] = mock_object + ] = mock_rpc request = {} await client.delete_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7265,8 +7302,9 @@ def test_failover_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.failover_instance(request) @@ -7322,26 +7360,28 @@ async def test_failover_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.failover_instance - ] = mock_object + ] = mock_rpc request = {} await client.failover_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.failover_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7643,8 +7683,9 @@ def test_inject_fault_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.inject_fault(request) @@ -7698,26 +7739,28 @@ async def test_inject_fault_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.inject_fault - ] = mock_object + ] = mock_rpc request = {} await client.inject_fault(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.inject_fault(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8021,8 +8064,9 @@ def test_restart_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.restart_instance(request) @@ -8076,26 +8120,28 @@ async def test_restart_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.restart_instance - ] = mock_object + ] = mock_rpc request = {} await client.restart_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.restart_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8450,22 +8496,23 @@ async def test_list_backups_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_backups - ] = mock_object + ] = mock_rpc request = {} await client.list_backups(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_backups(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9044,22 +9091,23 @@ async def test_get_backup_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_backup - ] = mock_object + ] = mock_rpc request = {} await client.get_backup(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9376,8 +9424,9 @@ def test_create_backup_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_backup(request) @@ -9431,26 +9480,28 @@ async def test_create_backup_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_backup - ] = mock_object + ] = mock_rpc request = {} await client.create_backup(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9760,8 +9811,9 @@ def test_update_backup_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_backup(request) @@ -9815,26 +9867,28 @@ async def test_update_backup_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_backup - ] = mock_object + ] = mock_rpc request = {} await client.update_backup(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10138,8 +10192,9 @@ def test_delete_backup_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_backup(request) @@ -10193,26 +10248,28 @@ async def test_delete_backup_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_backup - ] = mock_object + ] = mock_rpc request = {} await client.delete_backup(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10573,22 +10630,23 @@ async def test_list_supported_database_flags_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_supported_database_flags - ] = mock_object + ] = mock_rpc request = {} await client.list_supported_database_flags(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_supported_database_flags(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11177,22 +11235,23 @@ async def test_generate_client_certificate_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.generate_client_certificate - ] = mock_object + ] = mock_rpc request = {} await client.generate_client_certificate(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.generate_client_certificate(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11585,22 +11644,23 @@ async def test_get_connection_info_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_connection_info - ] = mock_object + ] = mock_rpc request = {} await client.get_connection_info(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_connection_info(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11976,22 +12036,23 @@ async def test_list_users_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_users - ] = mock_object + ] = mock_rpc request = {} await client.list_users(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_users(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12543,22 +12604,23 @@ async def test_get_user_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_user - ] = mock_object + ] = mock_rpc request = {} await client.get_user(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_user(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12922,22 +12984,23 @@ async def test_create_user_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_user - ] = mock_object + ] = mock_rpc request = {} await client.create_user(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_user(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13317,22 +13380,23 @@ async def test_update_user_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_user - ] = mock_object + ] = mock_rpc request = {} await client.update_user(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_user(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13688,22 +13752,23 @@ async def test_delete_user_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_user - ] = mock_object + ] = mock_rpc request = {} await client.delete_user(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_user(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14047,22 +14112,23 @@ async def test_list_databases_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_databases - ] = mock_object + ] = mock_rpc request = {} await client.list_databases(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_databases(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-api-gateway/google/cloud/apigateway/gapic_version.py b/packages/google-cloud-api-gateway/google/cloud/apigateway/gapic_version.py index 64ddc0e431e3..558c8aab67c5 100644 --- a/packages/google-cloud-api-gateway/google/cloud/apigateway/gapic_version.py +++ b/packages/google-cloud-api-gateway/google/cloud/apigateway/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.9.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-api-gateway/google/cloud/apigateway_v1/gapic_version.py b/packages/google-cloud-api-gateway/google/cloud/apigateway_v1/gapic_version.py index 64ddc0e431e3..558c8aab67c5 100644 --- a/packages/google-cloud-api-gateway/google/cloud/apigateway_v1/gapic_version.py +++ b/packages/google-cloud-api-gateway/google/cloud/apigateway_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.9.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-api-gateway/google/cloud/apigateway_v1/services/api_gateway_service/async_client.py b/packages/google-cloud-api-gateway/google/cloud/apigateway_v1/services/api_gateway_service/async_client.py index a2335db549af..1a2af4b47dea 100644 --- a/packages/google-cloud-api-gateway/google/cloud/apigateway_v1/services/api_gateway_service/async_client.py +++ b/packages/google-cloud-api-gateway/google/cloud/apigateway_v1/services/api_gateway_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -209,9 +208,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ApiGatewayServiceClient).get_transport_class, type(ApiGatewayServiceClient) - ) + get_transport_class = ApiGatewayServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-api-gateway/google/cloud/apigateway_v1/services/api_gateway_service/client.py b/packages/google-cloud-api-gateway/google/cloud/apigateway_v1/services/api_gateway_service/client.py index 037b0a4b9d8a..d3ad4c8f4101 100644 --- a/packages/google-cloud-api-gateway/google/cloud/apigateway_v1/services/api_gateway_service/client.py +++ b/packages/google-cloud-api-gateway/google/cloud/apigateway_v1/services/api_gateway_service/client.py @@ -766,7 +766,7 @@ def __init__( Type[ApiGatewayServiceTransport], Callable[..., ApiGatewayServiceTransport], ] = ( - type(self).get_transport_class(transport) + ApiGatewayServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ApiGatewayServiceTransport], transport) ) diff --git a/packages/google-cloud-api-gateway/samples/generated_samples/snippet_metadata_google.cloud.apigateway.v1.json b/packages/google-cloud-api-gateway/samples/generated_samples/snippet_metadata_google.cloud.apigateway.v1.json index 1496842e4c28..fdf981856ea3 100644 --- a/packages/google-cloud-api-gateway/samples/generated_samples/snippet_metadata_google.cloud.apigateway.v1.json +++ b/packages/google-cloud-api-gateway/samples/generated_samples/snippet_metadata_google.cloud.apigateway.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-api-gateway", - "version": "1.9.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-api-gateway/tests/unit/gapic/apigateway_v1/test_api_gateway_service.py b/packages/google-cloud-api-gateway/tests/unit/gapic/apigateway_v1/test_api_gateway_service.py index c46ffa458645..dbaa24996aed 100644 --- a/packages/google-cloud-api-gateway/tests/unit/gapic/apigateway_v1/test_api_gateway_service.py +++ b/packages/google-cloud-api-gateway/tests/unit/gapic/apigateway_v1/test_api_gateway_service.py @@ -1341,22 +1341,23 @@ async def test_list_gateways_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_gateways - ] = mock_object + ] = mock_rpc request = {} await client.list_gateways(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_gateways(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1913,22 +1914,23 @@ async def test_get_gateway_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_gateway - ] = mock_object + ] = mock_rpc request = {} await client.get_gateway(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_gateway(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2227,8 +2229,9 @@ def test_create_gateway_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_gateway(request) @@ -2282,26 +2285,28 @@ async def test_create_gateway_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_gateway - ] = mock_object + ] = mock_rpc request = {} await client.create_gateway(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_gateway(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2607,8 +2612,9 @@ def test_update_gateway_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_gateway(request) @@ -2662,26 +2668,28 @@ async def test_update_gateway_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_gateway - ] = mock_object + ] = mock_rpc request = {} await client.update_gateway(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_gateway(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2981,8 +2989,9 @@ def test_delete_gateway_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_gateway(request) @@ -3036,26 +3045,28 @@ async def test_delete_gateway_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_gateway - ] = mock_object + ] = mock_rpc request = {} await client.delete_gateway(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_gateway(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3408,22 +3419,23 @@ async def test_list_apis_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_apis - ] = mock_object + ] = mock_rpc request = {} await client.list_apis(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_apis(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3975,22 +3987,23 @@ async def test_get_api_async_use_cached_wrapped_rpc(transport: str = "grpc_async ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_api - ] = mock_object + ] = mock_rpc request = {} await client.get_api(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_api(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4287,8 +4300,9 @@ def test_create_api_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_api(request) @@ -4340,26 +4354,28 @@ async def test_create_api_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_api - ] = mock_object + ] = mock_rpc request = {} await client.create_api(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_api(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4665,8 +4681,9 @@ def test_update_api_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_api(request) @@ -4718,26 +4735,28 @@ async def test_update_api_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_api - ] = mock_object + ] = mock_rpc request = {} await client.update_api(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_api(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5037,8 +5056,9 @@ def test_delete_api_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_api(request) @@ -5090,26 +5110,28 @@ async def test_delete_api_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_api - ] = mock_object + ] = mock_rpc request = {} await client.delete_api(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_api(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5466,22 +5488,23 @@ async def test_list_api_configs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_api_configs - ] = mock_object + ] = mock_rpc request = {} await client.list_api_configs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_api_configs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6038,22 +6061,23 @@ async def test_get_api_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_api_config - ] = mock_object + ] = mock_rpc request = {} await client.get_api_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_api_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6364,8 +6388,9 @@ def test_create_api_config_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_api_config(request) @@ -6421,26 +6446,28 @@ async def test_create_api_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_api_config - ] = mock_object + ] = mock_rpc request = {} await client.create_api_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_api_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6764,8 +6791,9 @@ def test_update_api_config_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_api_config(request) @@ -6821,26 +6849,28 @@ async def test_update_api_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_api_config - ] = mock_object + ] = mock_rpc request = {} await client.update_api_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_api_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7158,8 +7188,9 @@ def test_delete_api_config_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_api_config(request) @@ -7215,26 +7246,28 @@ async def test_delete_api_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_api_config - ] = mock_object + ] = mock_rpc request = {} await client.delete_api_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_api_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-api-keys/google/cloud/api_keys/gapic_version.py b/packages/google-cloud-api-keys/google/cloud/api_keys/gapic_version.py index 0d21cc226e8f..558c8aab67c5 100644 --- a/packages/google-cloud-api-keys/google/cloud/api_keys/gapic_version.py +++ b/packages/google-cloud-api-keys/google/cloud/api_keys/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.5.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-api-keys/google/cloud/api_keys_v2/gapic_version.py b/packages/google-cloud-api-keys/google/cloud/api_keys_v2/gapic_version.py index 0d21cc226e8f..558c8aab67c5 100644 --- a/packages/google-cloud-api-keys/google/cloud/api_keys_v2/gapic_version.py +++ b/packages/google-cloud-api-keys/google/cloud/api_keys_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.5.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-api-keys/google/cloud/api_keys_v2/services/api_keys/async_client.py b/packages/google-cloud-api-keys/google/cloud/api_keys_v2/services/api_keys/async_client.py index dee1a213b742..4195ea44ee16 100644 --- a/packages/google-cloud-api-keys/google/cloud/api_keys_v2/services/api_keys/async_client.py +++ b/packages/google-cloud-api-keys/google/cloud/api_keys_v2/services/api_keys/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -186,9 +185,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ApiKeysClient).get_transport_class, type(ApiKeysClient) - ) + get_transport_class = ApiKeysClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-api-keys/google/cloud/api_keys_v2/services/api_keys/client.py b/packages/google-cloud-api-keys/google/cloud/api_keys_v2/services/api_keys/client.py index a500d1d53251..747e9bb8dde2 100644 --- a/packages/google-cloud-api-keys/google/cloud/api_keys_v2/services/api_keys/client.py +++ b/packages/google-cloud-api-keys/google/cloud/api_keys_v2/services/api_keys/client.py @@ -662,7 +662,7 @@ def __init__( transport_init: Union[ Type[ApiKeysTransport], Callable[..., ApiKeysTransport] ] = ( - type(self).get_transport_class(transport) + ApiKeysClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ApiKeysTransport], transport) ) diff --git a/packages/google-cloud-api-keys/samples/generated_samples/snippet_metadata_google.api.apikeys.v2.json b/packages/google-cloud-api-keys/samples/generated_samples/snippet_metadata_google.api.apikeys.v2.json index 88554b7a7f43..3d8902cac4a6 100644 --- a/packages/google-cloud-api-keys/samples/generated_samples/snippet_metadata_google.api.apikeys.v2.json +++ b/packages/google-cloud-api-keys/samples/generated_samples/snippet_metadata_google.api.apikeys.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-api-keys", - "version": "0.5.11" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-api-keys/tests/unit/gapic/api_keys_v2/test_api_keys.py b/packages/google-cloud-api-keys/tests/unit/gapic/api_keys_v2/test_api_keys.py index 94fbff52eb57..19a306f11630 100644 --- a/packages/google-cloud-api-keys/tests/unit/gapic/api_keys_v2/test_api_keys.py +++ b/packages/google-cloud-api-keys/tests/unit/gapic/api_keys_v2/test_api_keys.py @@ -1178,8 +1178,9 @@ def test_create_key_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_key(request) @@ -1231,26 +1232,28 @@ async def test_create_key_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_key - ] = mock_object + ] = mock_rpc request = {} await client.create_key(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_key(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1616,22 +1619,23 @@ async def test_list_keys_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_keys - ] = mock_object + ] = mock_rpc request = {} await client.list_keys(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_keys(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2184,22 +2188,23 @@ async def test_get_key_async_use_cached_wrapped_rpc(transport: str = "grpc_async ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_key - ] = mock_object + ] = mock_rpc request = {} await client.get_key(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_key(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2552,22 +2557,23 @@ async def test_get_key_string_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_key_string - ] = mock_object + ] = mock_rpc request = {} await client.get_key_string(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_key_string(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2856,8 +2862,9 @@ def test_update_key_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_key(request) @@ -2909,26 +2916,28 @@ async def test_update_key_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_key - ] = mock_object + ] = mock_rpc request = {} await client.update_key(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_key(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3230,8 +3239,9 @@ def test_delete_key_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_key(request) @@ -3283,26 +3293,28 @@ async def test_delete_key_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_key - ] = mock_object + ] = mock_rpc request = {} await client.delete_key(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_key(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3592,8 +3604,9 @@ def test_undelete_key_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.undelete_key(request) @@ -3647,26 +3660,28 @@ async def test_undelete_key_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.undelete_key - ] = mock_object + ] = mock_rpc request = {} await client.undelete_key(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.undelete_key(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3931,22 +3946,23 @@ async def test_lookup_key_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.lookup_key - ] = mock_object + ] = mock_rpc request = {} await client.lookup_key(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.lookup_key(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect/gapic_version.py b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect/gapic_version.py index 64ddc0e431e3..558c8aab67c5 100644 --- a/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect/gapic_version.py +++ b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.9.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/gapic_version.py b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/gapic_version.py index 64ddc0e431e3..558c8aab67c5 100644 --- a/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/gapic_version.py +++ b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.9.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/connection_service/async_client.py b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/connection_service/async_client.py index a76f610ddc4b..692a5b47d977 100644 --- a/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/connection_service/async_client.py +++ b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/connection_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -189,9 +188,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ConnectionServiceClient).get_transport_class, type(ConnectionServiceClient) - ) + get_transport_class = ConnectionServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/connection_service/client.py b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/connection_service/client.py index cf735e09880d..8b0713d63ed3 100644 --- a/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/connection_service/client.py +++ b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/connection_service/client.py @@ -660,7 +660,7 @@ def __init__( Type[ConnectionServiceTransport], Callable[..., ConnectionServiceTransport], ] = ( - type(self).get_transport_class(transport) + ConnectionServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ConnectionServiceTransport], transport) ) diff --git a/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/tether/async_client.py b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/tether/async_client.py index 6824a486e07a..a9d118e68dcb 100644 --- a/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/tether/async_client.py +++ b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/tether/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( AsyncIterable, @@ -183,9 +182,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(TetherClient).get_transport_class, type(TetherClient) - ) + get_transport_class = TetherClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/tether/client.py b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/tether/client.py index 9be50b0e18c2..7b651a854222 100644 --- a/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/tether/client.py +++ b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/tether/client.py @@ -638,7 +638,7 @@ def __init__( transport_init: Union[ Type[TetherTransport], Callable[..., TetherTransport] ] = ( - type(self).get_transport_class(transport) + TetherClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., TetherTransport], transport) ) diff --git a/packages/google-cloud-apigee-connect/samples/generated_samples/snippet_metadata_google.cloud.apigeeconnect.v1.json b/packages/google-cloud-apigee-connect/samples/generated_samples/snippet_metadata_google.cloud.apigeeconnect.v1.json index fb979b656282..b8c1a4c55ebc 100644 --- a/packages/google-cloud-apigee-connect/samples/generated_samples/snippet_metadata_google.cloud.apigeeconnect.v1.json +++ b/packages/google-cloud-apigee-connect/samples/generated_samples/snippet_metadata_google.cloud.apigeeconnect.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-apigee-connect", - "version": "1.9.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-apigee-connect/tests/unit/gapic/apigeeconnect_v1/test_connection_service.py b/packages/google-cloud-apigee-connect/tests/unit/gapic/apigeeconnect_v1/test_connection_service.py index 0c846df35117..ee82ef2f59e9 100644 --- a/packages/google-cloud-apigee-connect/tests/unit/gapic/apigeeconnect_v1/test_connection_service.py +++ b/packages/google-cloud-apigee-connect/tests/unit/gapic/apigeeconnect_v1/test_connection_service.py @@ -1285,22 +1285,23 @@ async def test_list_connections_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_connections - ] = mock_object + ] = mock_rpc request = {} await client.list_connections(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_connections(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-apigee-connect/tests/unit/gapic/apigeeconnect_v1/test_tether.py b/packages/google-cloud-apigee-connect/tests/unit/gapic/apigeeconnect_v1/test_tether.py index 56abbe13b325..c3857955a069 100644 --- a/packages/google-cloud-apigee-connect/tests/unit/gapic/apigeeconnect_v1/test_tether.py +++ b/packages/google-cloud-apigee-connect/tests/unit/gapic/apigeeconnect_v1/test_tether.py @@ -1118,22 +1118,23 @@ async def test_egress_async_use_cached_wrapped_rpc(transport: str = "grpc_asynci ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.egress - ] = mock_object + ] = mock_rpc request = [{}] await client.egress(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.egress(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-apigee-registry/google/cloud/apigee_registry/gapic_version.py b/packages/google-cloud-apigee-registry/google/cloud/apigee_registry/gapic_version.py index 02b228845902..558c8aab67c5 100644 --- a/packages/google-cloud-apigee-registry/google/cloud/apigee_registry/gapic_version.py +++ b/packages/google-cloud-apigee-registry/google/cloud/apigee_registry/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/gapic_version.py b/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/gapic_version.py index 02b228845902..558c8aab67c5 100644 --- a/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/gapic_version.py +++ b/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/services/provisioning/async_client.py b/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/services/provisioning/async_client.py index df2b9af6b019..b4c9eddd7e6a 100644 --- a/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/services/provisioning/async_client.py +++ b/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/services/provisioning/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -193,9 +192,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ProvisioningClient).get_transport_class, type(ProvisioningClient) - ) + get_transport_class = ProvisioningClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/services/provisioning/client.py b/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/services/provisioning/client.py index 46523fab45a9..dc2a49426692 100644 --- a/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/services/provisioning/client.py +++ b/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/services/provisioning/client.py @@ -665,7 +665,7 @@ def __init__( transport_init: Union[ Type[ProvisioningTransport], Callable[..., ProvisioningTransport] ] = ( - type(self).get_transport_class(transport) + ProvisioningClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ProvisioningTransport], transport) ) diff --git a/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/services/registry/async_client.py b/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/services/registry/async_client.py index c562dc69e398..b9cb56512196 100644 --- a/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/services/registry/async_client.py +++ b/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/services/registry/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -198,9 +197,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(RegistryClient).get_transport_class, type(RegistryClient) - ) + get_transport_class = RegistryClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/services/registry/client.py b/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/services/registry/client.py index 15e7bfbae605..210cea34e08a 100644 --- a/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/services/registry/client.py +++ b/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/services/registry/client.py @@ -762,7 +762,7 @@ def __init__( transport_init: Union[ Type[RegistryTransport], Callable[..., RegistryTransport] ] = ( - type(self).get_transport_class(transport) + RegistryClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RegistryTransport], transport) ) diff --git a/packages/google-cloud-apigee-registry/samples/generated_samples/snippet_metadata_google.cloud.apigeeregistry.v1.json b/packages/google-cloud-apigee-registry/samples/generated_samples/snippet_metadata_google.cloud.apigeeregistry.v1.json index a325ed82ef5c..399743edd3de 100644 --- a/packages/google-cloud-apigee-registry/samples/generated_samples/snippet_metadata_google.cloud.apigeeregistry.v1.json +++ b/packages/google-cloud-apigee-registry/samples/generated_samples/snippet_metadata_google.cloud.apigeeregistry.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-apigee-registry", - "version": "0.6.11" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-apigee-registry/tests/unit/gapic/apigee_registry_v1/test_provisioning.py b/packages/google-cloud-apigee-registry/tests/unit/gapic/apigee_registry_v1/test_provisioning.py index f5ef900a382f..b0cb0a3aa8a3 100644 --- a/packages/google-cloud-apigee-registry/tests/unit/gapic/apigee_registry_v1/test_provisioning.py +++ b/packages/google-cloud-apigee-registry/tests/unit/gapic/apigee_registry_v1/test_provisioning.py @@ -1219,8 +1219,9 @@ def test_create_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_instance(request) @@ -1274,26 +1275,28 @@ async def test_create_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_instance - ] = mock_object + ] = mock_rpc request = {} await client.create_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1604,8 +1607,9 @@ def test_delete_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_instance(request) @@ -1659,26 +1663,28 @@ async def test_delete_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_instance - ] = mock_object + ] = mock_rpc request = {} await client.delete_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2031,22 +2037,23 @@ async def test_get_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_instance - ] = mock_object + ] = mock_rpc request = {} await client.get_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-apigee-registry/tests/unit/gapic/apigee_registry_v1/test_registry.py b/packages/google-cloud-apigee-registry/tests/unit/gapic/apigee_registry_v1/test_registry.py index 3f718f87e226..28ed860fab7b 100644 --- a/packages/google-cloud-apigee-registry/tests/unit/gapic/apigee_registry_v1/test_registry.py +++ b/packages/google-cloud-apigee-registry/tests/unit/gapic/apigee_registry_v1/test_registry.py @@ -1236,22 +1236,23 @@ async def test_list_apis_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_apis - ] = mock_object + ] = mock_rpc request = {} await client.list_apis(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_apis(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1807,22 +1808,23 @@ async def test_get_api_async_use_cached_wrapped_rpc(transport: str = "grpc_async ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_api - ] = mock_object + ] = mock_rpc request = {} await client.get_api(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_api(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2192,22 +2194,23 @@ async def test_create_api_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_api - ] = mock_object + ] = mock_rpc request = {} await client.create_api(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_api(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2591,22 +2594,23 @@ async def test_update_api_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_api - ] = mock_object + ] = mock_rpc request = {} await client.update_api(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_api(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2962,22 +2966,23 @@ async def test_delete_api_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_api - ] = mock_object + ] = mock_rpc request = {} await client.delete_api(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_api(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3331,22 +3336,23 @@ async def test_list_api_versions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_api_versions - ] = mock_object + ] = mock_rpc request = {} await client.list_api_versions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_api_versions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3917,22 +3923,23 @@ async def test_get_api_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_api_version - ] = mock_object + ] = mock_rpc request = {} await client.get_api_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_api_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4310,22 +4317,23 @@ async def test_create_api_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_api_version - ] = mock_object + ] = mock_rpc request = {} await client.create_api_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_api_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4728,22 +4736,23 @@ async def test_update_api_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_api_version - ] = mock_object + ] = mock_rpc request = {} await client.update_api_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_api_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5124,22 +5133,23 @@ async def test_delete_api_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_api_version - ] = mock_object + ] = mock_rpc request = {} await client.delete_api_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_api_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5494,22 +5504,23 @@ async def test_list_api_specs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_api_specs - ] = mock_object + ] = mock_rpc request = {} await client.list_api_specs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_api_specs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6076,22 +6087,23 @@ async def test_get_api_spec_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_api_spec - ] = mock_object + ] = mock_rpc request = {} await client.get_api_spec(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_api_spec(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6472,22 +6484,23 @@ async def test_get_api_spec_contents_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_api_spec_contents - ] = mock_object + ] = mock_rpc request = {} await client.get_api_spec_contents(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_api_spec_contents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6875,22 +6888,23 @@ async def test_create_api_spec_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_api_spec - ] = mock_object + ] = mock_rpc request = {} await client.create_api_spec(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_api_spec(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7295,22 +7309,23 @@ async def test_update_api_spec_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_api_spec - ] = mock_object + ] = mock_rpc request = {} await client.update_api_spec(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_api_spec(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7678,22 +7693,23 @@ async def test_delete_api_spec_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_api_spec - ] = mock_object + ] = mock_rpc request = {} await client.delete_api_spec(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_api_spec(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8072,22 +8088,23 @@ async def test_tag_api_spec_revision_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.tag_api_spec_revision - ] = mock_object + ] = mock_rpc request = {} await client.tag_api_spec_revision(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.tag_api_spec_revision(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8392,22 +8409,23 @@ async def test_list_api_spec_revisions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_api_spec_revisions - ] = mock_object + ] = mock_rpc request = {} await client.list_api_spec_revisions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_api_spec_revisions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8919,22 +8937,23 @@ async def test_rollback_api_spec_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.rollback_api_spec - ] = mock_object + ] = mock_rpc request = {} await client.rollback_api_spec(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.rollback_api_spec(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9261,22 +9280,23 @@ async def test_delete_api_spec_revision_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_api_spec_revision - ] = mock_object + ] = mock_rpc request = {} await client.delete_api_spec_revision(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_api_spec_revision(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9668,22 +9688,23 @@ async def test_list_api_deployments_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_api_deployments - ] = mock_object + ] = mock_rpc request = {} await client.list_api_deployments(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_api_deployments(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10281,22 +10302,23 @@ async def test_get_api_deployment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_api_deployment - ] = mock_object + ] = mock_rpc request = {} await client.get_api_deployment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_api_deployment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10711,22 +10733,23 @@ async def test_create_api_deployment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_api_deployment - ] = mock_object + ] = mock_rpc request = {} await client.create_api_deployment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_api_deployment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11155,22 +11178,23 @@ async def test_update_api_deployment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_api_deployment - ] = mock_object + ] = mock_rpc request = {} await client.update_api_deployment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_api_deployment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11562,22 +11586,23 @@ async def test_delete_api_deployment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_api_deployment - ] = mock_object + ] = mock_rpc request = {} await client.delete_api_deployment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_api_deployment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11967,22 +11992,23 @@ async def test_tag_api_deployment_revision_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.tag_api_deployment_revision - ] = mock_object + ] = mock_rpc request = {} await client.tag_api_deployment_revision(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.tag_api_deployment_revision(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12287,22 +12313,23 @@ async def test_list_api_deployment_revisions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_api_deployment_revisions - ] = mock_object + ] = mock_rpc request = {} await client.list_api_deployment_revisions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_api_deployment_revisions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12819,22 +12846,23 @@ async def test_rollback_api_deployment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.rollback_api_deployment - ] = mock_object + ] = mock_rpc request = {} await client.rollback_api_deployment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.rollback_api_deployment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13161,22 +13189,23 @@ async def test_delete_api_deployment_revision_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_api_deployment_revision - ] = mock_object + ] = mock_rpc request = {} await client.delete_api_deployment_revision(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_api_deployment_revision(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13556,22 +13585,23 @@ async def test_list_artifacts_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_artifacts - ] = mock_object + ] = mock_rpc request = {} await client.list_artifacts(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_artifacts(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14126,22 +14156,23 @@ async def test_get_artifact_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_artifact - ] = mock_object + ] = mock_rpc request = {} await client.get_artifact(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_artifact(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14514,22 +14545,23 @@ async def test_get_artifact_contents_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_artifact_contents - ] = mock_object + ] = mock_rpc request = {} await client.get_artifact_contents(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_artifact_contents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14905,22 +14937,23 @@ async def test_create_artifact_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_artifact - ] = mock_object + ] = mock_rpc request = {} await client.create_artifact(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_artifact(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15307,22 +15340,23 @@ async def test_replace_artifact_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.replace_artifact - ] = mock_object + ] = mock_rpc request = {} await client.replace_artifact(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.replace_artifact(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15673,22 +15707,23 @@ async def test_delete_artifact_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_artifact - ] = mock_object + ] = mock_rpc request = {} await client.delete_artifact(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_artifact(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-apihub/.flake8 b/packages/google-cloud-apihub/.flake8 index 32986c79287a..87f6e408c47d 100644 --- a/packages/google-cloud-apihub/.flake8 +++ b/packages/google-cloud-apihub/.flake8 @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2024 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-apihub/MANIFEST.in b/packages/google-cloud-apihub/MANIFEST.in index d6814cd60037..e0a66705318e 100644 --- a/packages/google-cloud-apihub/MANIFEST.in +++ b/packages/google-cloud-apihub/MANIFEST.in @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2024 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-apihub/docs/conf.py b/packages/google-cloud-apihub/docs/conf.py index cdd94d02237a..939e0b6666a0 100644 --- a/packages/google-cloud-apihub/docs/conf.py +++ b/packages/google-cloud-apihub/docs/conf.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-apihub/google/cloud/apihub/gapic_version.py b/packages/google-cloud-apihub/google/cloud/apihub/gapic_version.py index 33d37a7b677b..558c8aab67c5 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub/gapic_version.py +++ b/packages/google-cloud-apihub/google/cloud/apihub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/gapic_version.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/gapic_version.py index 33d37a7b677b..558c8aab67c5 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/gapic_version.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/async_client.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/async_client.py index f22d36159e07..9d1b494ddccf 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/async_client.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -196,9 +195,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ApiHubClient).get_transport_class, type(ApiHubClient) - ) + get_transport_class = ApiHubClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/client.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/client.py index 97ee83819cfb..69f73e4792ec 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/client.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/client.py @@ -830,7 +830,7 @@ def __init__( transport_init: Union[ Type[ApiHubTransport], Callable[..., ApiHubTransport] ] = ( - type(self).get_transport_class(transport) + ApiHubClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ApiHubTransport], transport) ) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/async_client.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/async_client.py index 9efd7bd68591..42f5c884a8ec 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/async_client.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -196,10 +195,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ApiHubDependenciesClient).get_transport_class, - type(ApiHubDependenciesClient), - ) + get_transport_class = ApiHubDependenciesClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/client.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/client.py index b0da1fa619d7..1c70a2416c8e 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/client.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/client.py @@ -696,7 +696,7 @@ def __init__( Type[ApiHubDependenciesTransport], Callable[..., ApiHubDependenciesTransport], ] = ( - type(self).get_transport_class(transport) + ApiHubDependenciesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ApiHubDependenciesTransport], transport) ) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/async_client.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/async_client.py index d2493edf4869..6f83920a990a 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/async_client.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -187,9 +186,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ApiHubPluginClient).get_transport_class, type(ApiHubPluginClient) - ) + get_transport_class = ApiHubPluginClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/client.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/client.py index 7303831a85a9..5f6283c74876 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/client.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/client.py @@ -679,7 +679,7 @@ def __init__( transport_init: Union[ Type[ApiHubPluginTransport], Callable[..., ApiHubPluginTransport] ] = ( - type(self).get_transport_class(transport) + ApiHubPluginClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ApiHubPluginTransport], transport) ) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/async_client.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/async_client.py index c8f7cee1bcc8..be906eaa63df 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/async_client.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -208,10 +207,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(HostProjectRegistrationServiceClient).get_transport_class, - type(HostProjectRegistrationServiceClient), - ) + get_transport_class = HostProjectRegistrationServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/client.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/client.py index 8a844b352b26..89784ae2dd03 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/client.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/client.py @@ -686,7 +686,7 @@ def __init__( Type[HostProjectRegistrationServiceTransport], Callable[..., HostProjectRegistrationServiceTransport], ] = ( - type(self).get_transport_class(transport) + HostProjectRegistrationServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast( Callable[..., HostProjectRegistrationServiceTransport], transport diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/async_client.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/async_client.py index bc0b3555dc0d..e0a1abb6ea97 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/async_client.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -192,9 +191,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(LintingServiceClient).get_transport_class, type(LintingServiceClient) - ) + get_transport_class = LintingServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/client.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/client.py index 25f2c75bba43..75930326f9d9 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/client.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/client.py @@ -689,7 +689,7 @@ def __init__( transport_init: Union[ Type[LintingServiceTransport], Callable[..., LintingServiceTransport] ] = ( - type(self).get_transport_class(transport) + LintingServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., LintingServiceTransport], transport) ) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/async_client.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/async_client.py index b1031f73f4a8..5f52e6bf1413 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/async_client.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -192,9 +191,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ProvisioningClient).get_transport_class, type(ProvisioningClient) - ) + get_transport_class = ProvisioningClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/client.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/client.py index a0a5d2469846..c39738ccb878 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/client.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/client.py @@ -662,7 +662,7 @@ def __init__( transport_init: Union[ Type[ProvisioningTransport], Callable[..., ProvisioningTransport] ] = ( - type(self).get_transport_class(transport) + ProvisioningClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ProvisioningTransport], transport) ) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/async_client.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/async_client.py index 6b37168f6242..aa2a3acd7dd3 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/async_client.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -208,10 +207,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(RuntimeProjectAttachmentServiceClient).get_transport_class, - type(RuntimeProjectAttachmentServiceClient), - ) + get_transport_class = RuntimeProjectAttachmentServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/client.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/client.py index fd5c42592759..121ce0bf5470 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/client.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/client.py @@ -686,7 +686,7 @@ def __init__( Type[RuntimeProjectAttachmentServiceTransport], Callable[..., RuntimeProjectAttachmentServiceTransport], ] = ( - type(self).get_transport_class(transport) + RuntimeProjectAttachmentServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast( Callable[..., RuntimeProjectAttachmentServiceTransport], transport diff --git a/packages/google-cloud-apihub/noxfile.py b/packages/google-cloud-apihub/noxfile.py index aeee7851401a..67b7265f7586 100644 --- a/packages/google-cloud-apihub/noxfile.py +++ b/packages/google-cloud-apihub/noxfile.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2024 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-apihub/scripts/decrypt-secrets.sh b/packages/google-cloud-apihub/scripts/decrypt-secrets.sh index 120b0ddc4364..0018b421ddf8 100755 --- a/packages/google-cloud-apihub/scripts/decrypt-secrets.sh +++ b/packages/google-cloud-apihub/scripts/decrypt-secrets.sh @@ -1,6 +1,6 @@ #!/bin/bash -# Copyright 2024 Google LLC All rights reserved. +# Copyright 2023 Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub.py b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub.py index 140744493eac..aaec1e3055f6 100644 --- a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub.py +++ b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub.py @@ -1229,22 +1229,23 @@ async def test_create_api_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_api - ] = mock_object + ] = mock_rpc request = {} await client.create_api(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_api(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1627,22 +1628,23 @@ async def test_get_api_async_use_cached_wrapped_rpc(transport: str = "grpc_async ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_api - ] = mock_object + ] = mock_rpc request = {} await client.get_api(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_api(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1997,22 +1999,23 @@ async def test_list_apis_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_apis - ] = mock_object + ] = mock_rpc request = {} await client.list_apis(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_apis(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2561,22 +2564,23 @@ async def test_update_api_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_api - ] = mock_object + ] = mock_rpc request = {} await client.update_api(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_api(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2930,22 +2934,23 @@ async def test_delete_api_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_api - ] = mock_object + ] = mock_rpc request = {} await client.delete_api(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_api(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3308,22 +3313,23 @@ async def test_create_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_version - ] = mock_object + ] = mock_rpc request = {} await client.create_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3727,22 +3733,23 @@ async def test_get_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_version - ] = mock_object + ] = mock_rpc request = {} await client.get_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4109,22 +4116,23 @@ async def test_list_versions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_versions - ] = mock_object + ] = mock_rpc request = {} await client.list_versions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_versions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4684,22 +4692,23 @@ async def test_update_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_version - ] = mock_object + ] = mock_rpc request = {} await client.update_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5065,22 +5074,23 @@ async def test_delete_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_version - ] = mock_object + ] = mock_rpc request = {} await client.delete_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5431,22 +5441,23 @@ async def test_create_spec_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_spec - ] = mock_object + ] = mock_rpc request = {} await client.create_spec(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_spec(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5824,22 +5835,23 @@ async def test_get_spec_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_spec - ] = mock_object + ] = mock_rpc request = {} await client.get_spec(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_spec(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6203,22 +6215,23 @@ async def test_get_spec_contents_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_spec_contents - ] = mock_object + ] = mock_rpc request = {} await client.get_spec_contents(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_spec_contents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6581,22 +6594,23 @@ async def test_list_specs_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_specs - ] = mock_object + ] = mock_rpc request = {} await client.list_specs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_specs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7144,22 +7158,23 @@ async def test_update_spec_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_spec - ] = mock_object + ] = mock_rpc request = {} await client.update_spec(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_spec(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7513,22 +7528,23 @@ async def test_delete_spec_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_spec - ] = mock_object + ] = mock_rpc request = {} await client.delete_spec(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_spec(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7881,22 +7897,23 @@ async def test_get_api_operation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_api_operation - ] = mock_object + ] = mock_rpc request = {} await client.get_api_operation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_api_operation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8273,22 +8290,23 @@ async def test_list_api_operations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_api_operations - ] = mock_object + ] = mock_rpc request = {} await client.list_api_operations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_api_operations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8856,22 +8874,23 @@ async def test_get_definition_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_definition - ] = mock_object + ] = mock_rpc request = {} await client.get_definition(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_definition(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9251,22 +9270,23 @@ async def test_create_deployment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_deployment - ] = mock_object + ] = mock_rpc request = {} await client.create_deployment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_deployment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9670,22 +9690,23 @@ async def test_get_deployment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_deployment - ] = mock_object + ] = mock_rpc request = {} await client.get_deployment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_deployment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10050,22 +10071,23 @@ async def test_list_deployments_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_deployments - ] = mock_object + ] = mock_rpc request = {} await client.list_deployments(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_deployments(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10629,22 +10651,23 @@ async def test_update_deployment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_deployment - ] = mock_object + ] = mock_rpc request = {} await client.update_deployment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_deployment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11026,22 +11049,23 @@ async def test_delete_deployment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_deployment - ] = mock_object + ] = mock_rpc request = {} await client.delete_deployment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_deployment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11419,22 +11443,23 @@ async def test_create_attribute_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_attribute - ] = mock_object + ] = mock_rpc request = {} await client.create_attribute(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_attribute(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11844,22 +11869,23 @@ async def test_get_attribute_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_attribute - ] = mock_object + ] = mock_rpc request = {} await client.get_attribute(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_attribute(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12247,22 +12273,23 @@ async def test_update_attribute_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_attribute - ] = mock_object + ] = mock_rpc request = {} await client.update_attribute(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_attribute(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12633,22 +12660,23 @@ async def test_delete_attribute_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_attribute - ] = mock_object + ] = mock_rpc request = {} await client.delete_attribute(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_attribute(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12992,22 +13020,23 @@ async def test_list_attributes_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_attributes - ] = mock_object + ] = mock_rpc request = {} await client.list_attributes(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_attributes(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13558,22 +13587,23 @@ async def test_search_resources_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search_resources - ] = mock_object + ] = mock_rpc request = {} await client.search_resources(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_resources(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14152,22 +14182,23 @@ async def test_create_external_api_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_external_api - ] = mock_object + ] = mock_rpc request = {} await client.create_external_api(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_external_api(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14569,22 +14600,23 @@ async def test_get_external_api_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_external_api - ] = mock_object + ] = mock_rpc request = {} await client.get_external_api(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_external_api(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14961,22 +14993,23 @@ async def test_update_external_api_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_external_api - ] = mock_object + ] = mock_rpc request = {} await client.update_external_api(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_external_api(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15359,22 +15392,23 @@ async def test_delete_external_api_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_external_api - ] = mock_object + ] = mock_rpc request = {} await client.delete_external_api(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_external_api(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15739,22 +15773,23 @@ async def test_list_external_apis_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_external_apis - ] = mock_object + ] = mock_rpc request = {} await client.list_external_apis(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_external_apis(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub_dependencies.py b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub_dependencies.py index c45b9f41dbed..bf0a3da3c9e3 100644 --- a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub_dependencies.py +++ b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub_dependencies.py @@ -1348,22 +1348,23 @@ async def test_create_dependency_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_dependency - ] = mock_object + ] = mock_rpc request = {} await client.create_dependency(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_dependency(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1757,22 +1758,23 @@ async def test_get_dependency_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_dependency - ] = mock_object + ] = mock_rpc request = {} await client.get_dependency(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_dependency(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2142,22 +2144,23 @@ async def test_update_dependency_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_dependency - ] = mock_object + ] = mock_rpc request = {} await client.update_dependency(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_dependency(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2535,22 +2538,23 @@ async def test_delete_dependency_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_dependency - ] = mock_object + ] = mock_rpc request = {} await client.delete_dependency(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_dependency(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2914,22 +2918,23 @@ async def test_list_dependencies_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_dependencies - ] = mock_object + ] = mock_rpc request = {} await client.list_dependencies(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_dependencies(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub_plugin.py b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub_plugin.py index 1f596b7c787d..97adab5ecf39 100644 --- a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub_plugin.py +++ b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub_plugin.py @@ -1266,22 +1266,23 @@ async def test_get_plugin_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_plugin - ] = mock_object + ] = mock_rpc request = {} await client.get_plugin(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_plugin(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1645,22 +1646,23 @@ async def test_enable_plugin_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.enable_plugin - ] = mock_object + ] = mock_rpc request = {} await client.enable_plugin(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.enable_plugin(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2024,22 +2026,23 @@ async def test_disable_plugin_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.disable_plugin - ] = mock_object + ] = mock_rpc request = {} await client.disable_plugin(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.disable_plugin(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_host_project_registration_service.py b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_host_project_registration_service.py index 76d5a3c129b9..e951e616c819 100644 --- a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_host_project_registration_service.py +++ b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_host_project_registration_service.py @@ -1411,22 +1411,23 @@ async def test_create_host_project_registration_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_host_project_registration - ] = mock_object + ] = mock_rpc request = {} await client.create_host_project_registration(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_host_project_registration(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1850,22 +1851,23 @@ async def test_get_host_project_registration_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_host_project_registration - ] = mock_object + ] = mock_rpc request = {} await client.get_host_project_registration(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_host_project_registration(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2260,22 +2262,23 @@ async def test_list_host_project_registrations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_host_project_registrations - ] = mock_object + ] = mock_rpc request = {} await client.list_host_project_registrations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_host_project_registrations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_linting_service.py b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_linting_service.py index 8375765971ff..947a54eb86f7 100644 --- a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_linting_service.py +++ b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_linting_service.py @@ -1286,22 +1286,23 @@ async def test_get_style_guide_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_style_guide - ] = mock_object + ] = mock_rpc request = {} await client.get_style_guide(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_style_guide(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1663,22 +1664,23 @@ async def test_update_style_guide_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_style_guide - ] = mock_object + ] = mock_rpc request = {} await client.update_style_guide(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_style_guide(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2066,22 +2068,23 @@ async def test_get_style_guide_contents_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_style_guide_contents - ] = mock_object + ] = mock_rpc request = {} await client.get_style_guide_contents(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_style_guide_contents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2434,22 +2437,23 @@ async def test_lint_spec_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.lint_spec - ] = mock_object + ] = mock_rpc request = {} await client.lint_spec(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.lint_spec(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_provisioning.py b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_provisioning.py index ae413bf71cd8..f3cf6227c307 100644 --- a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_provisioning.py +++ b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_provisioning.py @@ -1226,8 +1226,9 @@ def test_create_api_hub_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_api_hub_instance(request) @@ -1283,26 +1284,28 @@ async def test_create_api_hub_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_api_hub_instance - ] = mock_object + ] = mock_rpc request = {} await client.create_api_hub_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_api_hub_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1700,22 +1703,23 @@ async def test_get_api_hub_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_api_hub_instance - ] = mock_object + ] = mock_rpc request = {} await client.get_api_hub_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_api_hub_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2089,22 +2093,23 @@ async def test_lookup_api_hub_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.lookup_api_hub_instance - ] = mock_object + ] = mock_rpc request = {} await client.lookup_api_hub_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.lookup_api_hub_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_runtime_project_attachment_service.py b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_runtime_project_attachment_service.py index 0bcf0a91a196..f56c0e63d5f7 100644 --- a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_runtime_project_attachment_service.py +++ b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_runtime_project_attachment_service.py @@ -1418,22 +1418,23 @@ async def test_create_runtime_project_attachment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_runtime_project_attachment - ] = mock_object + ] = mock_rpc request = {} await client.create_runtime_project_attachment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_runtime_project_attachment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1865,22 +1866,23 @@ async def test_get_runtime_project_attachment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_runtime_project_attachment - ] = mock_object + ] = mock_rpc request = {} await client.get_runtime_project_attachment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_runtime_project_attachment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2283,22 +2285,23 @@ async def test_list_runtime_project_attachments_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_runtime_project_attachments - ] = mock_object + ] = mock_rpc request = {} await client.list_runtime_project_attachments(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_runtime_project_attachments(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2892,22 +2895,23 @@ async def test_delete_runtime_project_attachment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_runtime_project_attachment - ] = mock_object + ] = mock_rpc request = {} await client.delete_runtime_project_attachment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_runtime_project_attachment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3283,22 +3287,23 @@ async def test_lookup_runtime_project_attachment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.lookup_runtime_project_attachment - ] = mock_object + ] = mock_rpc request = {} await client.lookup_runtime_project_attachment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.lookup_runtime_project_attachment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin/gapic_version.py b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin/gapic_version.py index f6b57ae22bbe..558c8aab67c5 100644 --- a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin/gapic_version.py +++ b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.11.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/gapic_version.py b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/gapic_version.py index f6b57ae22bbe..558c8aab67c5 100644 --- a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/gapic_version.py +++ b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.11.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/applications/async_client.py b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/applications/async_client.py index 9a08c3a23844..2cc4ae19de5c 100644 --- a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/applications/async_client.py +++ b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/applications/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -185,9 +184,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ApplicationsClient).get_transport_class, type(ApplicationsClient) - ) + get_transport_class = ApplicationsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/applications/client.py b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/applications/client.py index d1c87cf27d25..5010433228ac 100644 --- a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/applications/client.py +++ b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/applications/client.py @@ -637,7 +637,7 @@ def __init__( transport_init: Union[ Type[ApplicationsTransport], Callable[..., ApplicationsTransport] ] = ( - type(self).get_transport_class(transport) + ApplicationsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ApplicationsTransport], transport) ) diff --git a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/authorized_certificates/async_client.py b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/authorized_certificates/async_client.py index c7e0646785a9..f50144f9ebd2 100644 --- a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/authorized_certificates/async_client.py +++ b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/authorized_certificates/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -192,10 +191,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AuthorizedCertificatesClient).get_transport_class, - type(AuthorizedCertificatesClient), - ) + get_transport_class = AuthorizedCertificatesClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/authorized_certificates/client.py b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/authorized_certificates/client.py index fe3bbf2688d3..e2633508c5c4 100644 --- a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/authorized_certificates/client.py +++ b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/authorized_certificates/client.py @@ -650,7 +650,7 @@ def __init__( Type[AuthorizedCertificatesTransport], Callable[..., AuthorizedCertificatesTransport], ] = ( - type(self).get_transport_class(transport) + AuthorizedCertificatesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AuthorizedCertificatesTransport], transport) ) diff --git a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/authorized_domains/async_client.py b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/authorized_domains/async_client.py index dea523dd80a7..4f6356077aaf 100644 --- a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/authorized_domains/async_client.py +++ b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/authorized_domains/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -188,9 +187,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AuthorizedDomainsClient).get_transport_class, type(AuthorizedDomainsClient) - ) + get_transport_class = AuthorizedDomainsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/authorized_domains/client.py b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/authorized_domains/client.py index 616b77ae1760..64df5a7c08c9 100644 --- a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/authorized_domains/client.py +++ b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/authorized_domains/client.py @@ -646,7 +646,7 @@ def __init__( Type[AuthorizedDomainsTransport], Callable[..., AuthorizedDomainsTransport], ] = ( - type(self).get_transport_class(transport) + AuthorizedDomainsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AuthorizedDomainsTransport], transport) ) diff --git a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/domain_mappings/async_client.py b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/domain_mappings/async_client.py index 881137a28182..59539a765ab9 100644 --- a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/domain_mappings/async_client.py +++ b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/domain_mappings/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -190,9 +189,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DomainMappingsClient).get_transport_class, type(DomainMappingsClient) - ) + get_transport_class = DomainMappingsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/domain_mappings/client.py b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/domain_mappings/client.py index c8666634d5da..cfda1bd8fedb 100644 --- a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/domain_mappings/client.py +++ b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/domain_mappings/client.py @@ -643,7 +643,7 @@ def __init__( transport_init: Union[ Type[DomainMappingsTransport], Callable[..., DomainMappingsTransport] ] = ( - type(self).get_transport_class(transport) + DomainMappingsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DomainMappingsTransport], transport) ) diff --git a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/firewall/async_client.py b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/firewall/async_client.py index da32b03138fa..399bca77546a 100644 --- a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/firewall/async_client.py +++ b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/firewall/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -189,9 +188,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(FirewallClient).get_transport_class, type(FirewallClient) - ) + get_transport_class = FirewallClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/firewall/client.py b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/firewall/client.py index d8551dd7bd6f..ac94c119d5e5 100644 --- a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/firewall/client.py +++ b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/firewall/client.py @@ -645,7 +645,7 @@ def __init__( transport_init: Union[ Type[FirewallTransport], Callable[..., FirewallTransport] ] = ( - type(self).get_transport_class(transport) + FirewallClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., FirewallTransport], transport) ) diff --git a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/instances/async_client.py b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/instances/async_client.py index b7706a13ac9e..2969b655e347 100644 --- a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/instances/async_client.py +++ b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/instances/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -187,9 +186,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(InstancesClient).get_transport_class, type(InstancesClient) - ) + get_transport_class = InstancesClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/instances/client.py b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/instances/client.py index 9fb962838850..cb551f26c8f2 100644 --- a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/instances/client.py +++ b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/instances/client.py @@ -663,7 +663,7 @@ def __init__( transport_init: Union[ Type[InstancesTransport], Callable[..., InstancesTransport] ] = ( - type(self).get_transport_class(transport) + InstancesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., InstancesTransport], transport) ) diff --git a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/services/async_client.py b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/services/async_client.py index 2bd1de1db05d..6c1c6ff348b1 100644 --- a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/services/async_client.py +++ b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/services/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -183,9 +182,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ServicesClient).get_transport_class, type(ServicesClient) - ) + get_transport_class = ServicesClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/services/client.py b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/services/client.py index 7be00be82215..a4440b5042fe 100644 --- a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/services/client.py +++ b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/services/client.py @@ -639,7 +639,7 @@ def __init__( transport_init: Union[ Type[ServicesTransport], Callable[..., ServicesTransport] ] = ( - type(self).get_transport_class(transport) + ServicesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ServicesTransport], transport) ) diff --git a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/versions/async_client.py b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/versions/async_client.py index c60e9d7b90b8..37a9eafb9ce8 100644 --- a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/versions/async_client.py +++ b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/versions/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -185,9 +184,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(VersionsClient).get_transport_class, type(VersionsClient) - ) + get_transport_class = VersionsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/versions/client.py b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/versions/client.py index 7ccb087f84db..20dfb5b039de 100644 --- a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/versions/client.py +++ b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/versions/client.py @@ -641,7 +641,7 @@ def __init__( transport_init: Union[ Type[VersionsTransport], Callable[..., VersionsTransport] ] = ( - type(self).get_transport_class(transport) + VersionsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., VersionsTransport], transport) ) diff --git a/packages/google-cloud-appengine-admin/samples/generated_samples/snippet_metadata_google.appengine.v1.json b/packages/google-cloud-appengine-admin/samples/generated_samples/snippet_metadata_google.appengine.v1.json index e8154861b9d3..fb8cbf58355c 100644 --- a/packages/google-cloud-appengine-admin/samples/generated_samples/snippet_metadata_google.appengine.v1.json +++ b/packages/google-cloud-appengine-admin/samples/generated_samples/snippet_metadata_google.appengine.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-appengine-admin", - "version": "1.11.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_applications.py b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_applications.py index 685ca369d35e..bc6900b9d664 100644 --- a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_applications.py +++ b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_applications.py @@ -1306,22 +1306,23 @@ async def test_get_application_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_application - ] = mock_object + ] = mock_rpc request = {} await client.get_application(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_application(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1642,8 +1643,9 @@ def test_create_application_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_application(request) @@ -1699,26 +1701,28 @@ async def test_create_application_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_application - ] = mock_object + ] = mock_rpc request = {} await client.create_application(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_application(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1877,8 +1881,9 @@ def test_update_application_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_application(request) @@ -1934,26 +1939,28 @@ async def test_update_application_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_application - ] = mock_object + ] = mock_rpc request = {} await client.update_application(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_application(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2177,8 +2184,9 @@ def test_repair_application_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.repair_application(request) @@ -2234,26 +2242,28 @@ async def test_repair_application_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.repair_application - ] = mock_object + ] = mock_rpc request = {} await client.repair_application(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.repair_application(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_authorized_certificates.py b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_authorized_certificates.py index 43dd20d14a37..4719dc399f1d 100644 --- a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_authorized_certificates.py +++ b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_authorized_certificates.py @@ -1372,22 +1372,23 @@ async def test_list_authorized_certificates_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_authorized_certificates - ] = mock_object + ] = mock_rpc request = {} await client.list_authorized_certificates(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_authorized_certificates(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1893,22 +1894,23 @@ async def test_get_authorized_certificate_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_authorized_certificate - ] = mock_object + ] = mock_rpc request = {} await client.get_authorized_certificate(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_authorized_certificate(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2220,22 +2222,23 @@ async def test_create_authorized_certificate_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_authorized_certificate - ] = mock_object + ] = mock_rpc request = {} await client.create_authorized_certificate(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_authorized_certificate(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2547,22 +2550,23 @@ async def test_update_authorized_certificate_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_authorized_certificate - ] = mock_object + ] = mock_rpc request = {} await client.update_authorized_certificate(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_authorized_certificate(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2852,22 +2856,23 @@ async def test_delete_authorized_certificate_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_authorized_certificate - ] = mock_object + ] = mock_rpc request = {} await client.delete_authorized_certificate(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_authorized_certificate(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_authorized_domains.py b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_authorized_domains.py index 8ffd7e44e2bd..851b967ad202 100644 --- a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_authorized_domains.py +++ b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_authorized_domains.py @@ -1338,22 +1338,23 @@ async def test_list_authorized_domains_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_authorized_domains - ] = mock_object + ] = mock_rpc request = {} await client.list_authorized_domains(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_authorized_domains(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_domain_mappings.py b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_domain_mappings.py index 71fcf41de417..8deb1e04cb08 100644 --- a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_domain_mappings.py +++ b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_domain_mappings.py @@ -1312,22 +1312,23 @@ async def test_list_domain_mappings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_domain_mappings - ] = mock_object + ] = mock_rpc request = {} await client.list_domain_mappings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_domain_mappings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1817,22 +1818,23 @@ async def test_get_domain_mapping_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_domain_mapping - ] = mock_object + ] = mock_rpc request = {} await client.get_domain_mapping(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_domain_mapping(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2062,8 +2064,9 @@ def test_create_domain_mapping_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_domain_mapping(request) @@ -2119,26 +2122,28 @@ async def test_create_domain_mapping_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_domain_mapping - ] = mock_object + ] = mock_rpc request = {} await client.create_domain_mapping(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_domain_mapping(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2363,8 +2368,9 @@ def test_update_domain_mapping_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_domain_mapping(request) @@ -2420,26 +2426,28 @@ async def test_update_domain_mapping_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_domain_mapping - ] = mock_object + ] = mock_rpc request = {} await client.update_domain_mapping(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_domain_mapping(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2664,8 +2672,9 @@ def test_delete_domain_mapping_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_domain_mapping(request) @@ -2721,26 +2730,28 @@ async def test_delete_domain_mapping_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_domain_mapping - ] = mock_object + ] = mock_rpc request = {} await client.delete_domain_mapping(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_domain_mapping(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_firewall.py b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_firewall.py index 79384416507a..ebe3a04e982c 100644 --- a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_firewall.py +++ b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_firewall.py @@ -1246,22 +1246,23 @@ async def test_list_ingress_rules_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_ingress_rules - ] = mock_object + ] = mock_rpc request = {} await client.list_ingress_rules(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_ingress_rules(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1744,22 +1745,23 @@ async def test_batch_update_ingress_rules_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_update_ingress_rules - ] = mock_object + ] = mock_rpc request = {} await client.batch_update_ingress_rules(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.batch_update_ingress_rules(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2051,22 +2053,23 @@ async def test_create_ingress_rule_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_ingress_rule - ] = mock_object + ] = mock_rpc request = {} await client.create_ingress_rule(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_ingress_rule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2356,22 +2359,23 @@ async def test_get_ingress_rule_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_ingress_rule - ] = mock_object + ] = mock_rpc request = {} await client.get_ingress_rule(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_ingress_rule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2665,22 +2669,23 @@ async def test_update_ingress_rule_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_ingress_rule - ] = mock_object + ] = mock_rpc request = {} await client.update_ingress_rule(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_ingress_rule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2964,22 +2969,23 @@ async def test_delete_ingress_rule_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_ingress_rule - ] = mock_object + ] = mock_rpc request = {} await client.delete_ingress_rule(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_ingress_rule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_instances.py b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_instances.py index c93e7998d96d..e0d073e87cb2 100644 --- a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_instances.py +++ b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_instances.py @@ -1255,22 +1255,23 @@ async def test_list_instances_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_instances - ] = mock_object + ] = mock_rpc request = {} await client.list_instances(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_instances(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1776,22 +1777,23 @@ async def test_get_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_instance - ] = mock_object + ] = mock_rpc request = {} await client.get_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2030,8 +2032,9 @@ def test_delete_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_instance(request) @@ -2085,26 +2088,28 @@ async def test_delete_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_instance - ] = mock_object + ] = mock_rpc request = {} await client.delete_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2314,8 +2319,9 @@ def test_debug_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.debug_instance(request) @@ -2369,26 +2375,28 @@ async def test_debug_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.debug_instance - ] = mock_object + ] = mock_rpc request = {} await client.debug_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.debug_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_services.py b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_services.py index d3508563638b..12288ce8f9bd 100644 --- a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_services.py +++ b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_services.py @@ -1245,22 +1245,23 @@ async def test_list_services_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_services - ] = mock_object + ] = mock_rpc request = {} await client.list_services(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_services(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1724,22 +1725,23 @@ async def test_get_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_service - ] = mock_object + ] = mock_rpc request = {} await client.get_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1950,8 +1952,9 @@ def test_update_service_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_service(request) @@ -2005,26 +2008,28 @@ async def test_update_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_service - ] = mock_object + ] = mock_rpc request = {} await client.update_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2232,8 +2237,9 @@ def test_delete_service_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_service(request) @@ -2287,26 +2293,28 @@ async def test_delete_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_service - ] = mock_object + ] = mock_rpc request = {} await client.delete_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_versions.py b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_versions.py index bc4c34625da7..556d1b46336c 100644 --- a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_versions.py +++ b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_versions.py @@ -1247,22 +1247,23 @@ async def test_list_versions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_versions - ] = mock_object + ] = mock_rpc request = {} await client.list_versions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_versions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1779,22 +1780,23 @@ async def test_get_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_version - ] = mock_object + ] = mock_rpc request = {} await client.get_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2041,8 +2043,9 @@ def test_create_version_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_version(request) @@ -2096,26 +2099,28 @@ async def test_create_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_version - ] = mock_object + ] = mock_rpc request = {} await client.create_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2323,8 +2328,9 @@ def test_update_version_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_version(request) @@ -2378,26 +2384,28 @@ async def test_update_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_version - ] = mock_object + ] = mock_rpc request = {} await client.update_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2605,8 +2613,9 @@ def test_delete_version_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_version(request) @@ -2660,26 +2669,28 @@ async def test_delete_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_version - ] = mock_object + ] = mock_rpc request = {} await client.delete_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio From a721de7293d457d4e9e88b62e8a8237d089f419f Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 19 Aug 2024 15:25:55 +0000 Subject: [PATCH 020/108] chore: [Many APIs] Update gapic-generator-python to v1.18.5 (#13003) - [ ] Regenerate this pull request now. PiperOrigin-RevId: 661268868 Source-Link: https://github.com/googleapis/googleapis/commit/f7d214cb08cd7d9b018d44564a8b184263f64177 Source-Link: https://github.com/googleapis/googleapis-gen/commit/79a8411bbdb25a983fa3aae8c0e14327df129f94 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXRyYWNlLy5Pd2xCb3QueWFtbCIsImgiOiI3OWE4NDExYmJkYjI1YTk4M2ZhM2FhZThjMGUxNDMyN2RmMTI5Zjk0In0= Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXRyYW5zbGF0ZS8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXZpZGVvLWxpdmUtc3RyZWFtLy5Pd2xCb3QueWFtbCIsImgiOiI3OWE4NDExYmJkYjI1YTk4M2ZhM2FhZThjMGUxNDMyN2RmMTI5Zjk0In0= Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXZpZGVvLXN0aXRjaGVyLy5Pd2xCb3QueWFtbCIsImgiOiI3OWE4NDExYmJkYjI1YTk4M2ZhM2FhZThjMGUxNDMyN2RmMTI5Zjk0In0= Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXZpZGVvLXRyYW5zY29kZXIvLk93bEJvdC55YW1sIiwiaCI6Ijc5YTg0MTFiYmRiMjVhOTgzZmEzYWFlOGMwZTE0MzI3ZGYxMjlmOTQifQ== Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXZpZGVvaW50ZWxsaWdlbmNlLy5Pd2xCb3QueWFtbCIsImgiOiI3OWE4NDExYmJkYjI1YTk4M2ZhM2FhZThjMGUxNDMyN2RmMTI5Zjk0In0= Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXZpc2lvbi8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXZpc2lvbmFpLy5Pd2xCb3QueWFtbCIsImgiOiI3OWE4NDExYmJkYjI1YTk4M2ZhM2FhZThjMGUxNDMyN2RmMTI5Zjk0In0= Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXZtLW1pZ3JhdGlvbi8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXZtd2FyZWVuZ2luZS8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXZwYy1hY2Nlc3MvLk93bEJvdC55YW1sIiwiaCI6Ijc5YTg0MTFiYmRiMjVhOTgzZmEzYWFlOGMwZTE0MzI3ZGYxMjlmOTQifQ== Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXdlYnJpc2svLk93bEJvdC55YW1sIiwiaCI6Ijc5YTg0MTFiYmRiMjVhOTgzZmEzYWFlOGMwZTE0MzI3ZGYxMjlmOTQifQ== Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXdlYnNlY3VyaXR5c2Nhbm5lci8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXdvcmtmbG93cy8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXdvcmtzdGF0aW9ucy8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWdlby10eXBlLy5Pd2xCb3QueWFtbCIsImgiOiI3OWE4NDExYmJkYjI1YTk4M2ZhM2FhZThjMGUxNDMyN2RmMTI5Zjk0In0= Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLW1hcHMtYWRkcmVzc3ZhbGlkYXRpb24vLk93bEJvdC55YW1sIiwiaCI6Ijc5YTg0MTFiYmRiMjVhOTgzZmEzYWFlOGMwZTE0MzI3ZGYxMjlmOTQifQ== Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLW1hcHMtZmxlZXRlbmdpbmUtZGVsaXZlcnkvLk93bEJvdC55YW1sIiwiaCI6Ijc5YTg0MTFiYmRiMjVhOTgzZmEzYWFlOGMwZTE0MzI3ZGYxMjlmOTQifQ== Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLW1hcHMtZmxlZXRlbmdpbmUvLk93bEJvdC55YW1sIiwiaCI6Ijc5YTg0MTFiYmRiMjVhOTgzZmEzYWFlOGMwZTE0MzI3ZGYxMjlmOTQifQ== Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLW1hcHMtbWFwc3BsYXRmb3JtZGF0YXNldHMvLk93bEJvdC55YW1sIiwiaCI6Ijc5YTg0MTFiYmRiMjVhOTgzZmEzYWFlOGMwZTE0MzI3ZGYxMjlmOTQifQ== --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../google/cloud/trace/gapic_version.py | 2 +- .../google/cloud/trace_v1/gapic_version.py | 2 +- .../services/trace_service/async_client.py | 5 +- .../trace_v1/services/trace_service/client.py | 2 +- .../google/cloud/trace_v2/gapic_version.py | 2 +- .../services/trace_service/async_client.py | 5 +- .../trace_v2/services/trace_service/client.py | 2 +- ...etadata_google.devtools.cloudtrace.v1.json | 2 +- ...etadata_google.devtools.cloudtrace.v2.json | 2 +- .../unit/gapic/trace_v1/test_trace_service.py | 27 +- .../unit/gapic/trace_v2/test_trace_service.py | 18 +- .../cloud/video/live_stream/gapic_version.py | 2 +- .../video/live_stream_v1/gapic_version.py | 2 +- .../livestream_service/async_client.py | 5 +- .../services/livestream_service/client.py | 2 +- ...data_google.cloud.video.livestream.v1.json | 2 +- .../live_stream_v1/test_livestream_service.py | 364 +++--- .../cloud/video/stitcher/gapic_version.py | 2 +- .../cloud/video/stitcher_v1/gapic_version.py | 2 +- .../video_stitcher_service/async_client.py | 6 +- .../services/video_stitcher_service/client.py | 2 +- ...tadata_google.cloud.video.stitcher.v1.json | 2 +- .../test_video_stitcher_service.py | 390 +++--- .../cloud/video/transcoder/gapic_version.py | 2 +- .../video/transcoder_v1/gapic_version.py | 2 +- .../transcoder_service/async_client.py | 5 +- .../services/transcoder_service/client.py | 2 +- ...data_google.cloud.video.transcoder.v1.json | 2 +- .../transcoder_v1/test_transcoder_service.py | 72 +- .../cloud/videointelligence/gapic_version.py | 2 +- .../videointelligence_v1/gapic_version.py | 2 +- .../async_client.py | 6 +- .../video_intelligence_service/client.py | 2 +- .../gapic_version.py | 2 +- .../async_client.py | 6 +- .../video_intelligence_service/client.py | 2 +- .../gapic_version.py | 2 +- .../async_client.py | 6 +- .../video_intelligence_service/client.py | 2 +- .../gapic_version.py | 2 +- .../async_client.py | 6 +- .../video_intelligence_service/client.py | 2 +- .../gapic_version.py | 2 +- .../async_client.py | 6 +- .../client.py | 2 +- .../async_client.py | 6 +- .../video_intelligence_service/client.py | 2 +- ...ata_google.cloud.videointelligence.v1.json | 2 +- ...oogle.cloud.videointelligence.v1beta2.json | 2 +- ...gle.cloud.videointelligence.v1p1beta1.json | 2 +- ...gle.cloud.videointelligence.v1p2beta1.json | 2 +- ...gle.cloud.videointelligence.v1p3beta1.json | 2 +- .../test_video_intelligence_service.py | 19 +- .../test_video_intelligence_service.py | 19 +- .../test_video_intelligence_service.py | 19 +- .../test_video_intelligence_service.py | 19 +- ...st_streaming_video_intelligence_service.py | 9 +- .../test_video_intelligence_service.py | 19 +- .../google/cloud/vision/gapic_version.py | 2 +- .../google/cloud/vision_v1/gapic_version.py | 2 +- .../services/image_annotator/async_client.py | 5 +- .../services/image_annotator/client.py | 2 +- .../services/product_search/async_client.py | 5 +- .../services/product_search/client.py | 2 +- .../cloud/vision_v1p1beta1/gapic_version.py | 2 +- .../services/image_annotator/async_client.py | 5 +- .../services/image_annotator/client.py | 2 +- .../cloud/vision_v1p2beta1/gapic_version.py | 2 +- .../services/image_annotator/async_client.py | 5 +- .../services/image_annotator/client.py | 2 +- .../cloud/vision_v1p3beta1/gapic_version.py | 2 +- .../services/image_annotator/async_client.py | 5 +- .../services/image_annotator/client.py | 2 +- .../services/product_search/async_client.py | 5 +- .../services/product_search/client.py | 2 +- .../cloud/vision_v1p4beta1/gapic_version.py | 2 +- .../services/image_annotator/async_client.py | 5 +- .../services/image_annotator/client.py | 2 +- .../services/product_search/async_client.py | 5 +- .../services/product_search/client.py | 2 +- ...ippet_metadata_google.cloud.vision.v1.json | 2 +- ...etadata_google.cloud.vision.v1p1beta1.json | 2 +- ...etadata_google.cloud.vision.v1p2beta1.json | 2 +- ...etadata_google.cloud.vision.v1p3beta1.json | 2 +- ...etadata_google.cloud.vision.v1p4beta1.json | 2 +- .../gapic/vision_v1/test_image_annotator.py | 56 +- .../gapic/vision_v1/test_product_search.py | 191 +-- .../vision_v1p1beta1/test_image_annotator.py | 9 +- .../vision_v1p2beta1/test_image_annotator.py | 28 +- .../vision_v1p3beta1/test_image_annotator.py | 28 +- .../vision_v1p3beta1/test_product_search.py | 172 +-- .../vision_v1p4beta1/test_image_annotator.py | 56 +- .../vision_v1p4beta1/test_product_search.py | 191 +-- .../google/cloud/visionai/gapic_version.py | 2 +- .../google/cloud/visionai_v1/gapic_version.py | 2 +- .../services/app_platform/async_client.py | 5 +- .../services/app_platform/client.py | 2 +- .../health_check_service/async_client.py | 6 +- .../services/health_check_service/client.py | 2 +- .../live_video_analytics/async_client.py | 6 +- .../services/live_video_analytics/client.py | 2 +- .../streaming_service/async_client.py | 5 +- .../services/streaming_service/client.py | 2 +- .../services/streams_service/async_client.py | 5 +- .../services/streams_service/client.py | 2 +- .../services/warehouse/async_client.py | 5 +- .../visionai_v1/services/warehouse/client.py | 2 +- .../cloud/visionai_v1/types/platform.py | 4 +- .../cloud/visionai_v1alpha1/gapic_version.py | 2 +- .../services/app_platform/async_client.py | 5 +- .../services/app_platform/client.py | 2 +- .../live_video_analytics/async_client.py | 6 +- .../services/live_video_analytics/client.py | 2 +- .../streaming_service/async_client.py | 5 +- .../services/streaming_service/client.py | 2 +- .../services/streams_service/async_client.py | 5 +- .../services/streams_service/client.py | 2 +- .../services/warehouse/async_client.py | 5 +- .../services/warehouse/client.py | 2 +- .../cloud/visionai_v1alpha1/types/platform.py | 15 +- ...pet_metadata_google.cloud.visionai.v1.json | 2 +- ...tadata_google.cloud.visionai.v1alpha1.json | 2 +- .../gapic/visionai_v1/test_app_platform.py | 404 ++++--- .../visionai_v1/test_health_check_service.py | 9 +- .../visionai_v1/test_live_video_analytics.py | 262 ++-- .../visionai_v1/test_streaming_service.py | 54 +- .../gapic/visionai_v1/test_streams_service.py | 347 +++--- .../unit/gapic/visionai_v1/test_warehouse.py | 747 +++++++----- .../visionai_v1alpha1/test_app_platform.py | 404 ++++--- .../test_live_video_analytics.py | 75 +- .../test_streaming_service.py | 54 +- .../visionai_v1alpha1/test_streams_service.py | 328 ++--- .../gapic/visionai_v1alpha1/test_warehouse.py | 281 +++-- .../google/cloud/vmmigration/gapic_version.py | 2 +- .../cloud/vmmigration_v1/gapic_version.py | 2 +- .../services/vm_migration/async_client.py | 5 +- .../services/vm_migration/client.py | 2 +- ..._metadata_google.cloud.vmmigration.v1.json | 2 +- .../gapic/vmmigration_v1/test_vm_migration.py | 684 ++++++----- .../cloud/vmwareengine/gapic_version.py | 2 +- .../cloud/vmwareengine_v1/gapic_version.py | 2 +- .../services/vmware_engine/async_client.py | 5 +- .../services/vmware_engine/client.py | 2 +- ...metadata_google.cloud.vmwareengine.v1.json | 2 +- .../vmwareengine_v1/test_vmware_engine.py | 1056 ++++++++++------- .../google/cloud/vpcaccess/gapic_version.py | 2 +- .../cloud/vpcaccess_v1/gapic_version.py | 2 +- .../vpc_access_service/async_client.py | 5 +- .../services/vpc_access_service/client.py | 2 +- ...et_metadata_google.cloud.vpcaccess.v1.json | 2 +- .../vpcaccess_v1/test_vpc_access_service.py | 56 +- .../google/cloud/webrisk/gapic_version.py | 2 +- .../google/cloud/webrisk_v1/gapic_version.py | 2 +- .../services/web_risk_service/async_client.py | 5 +- .../services/web_risk_service/client.py | 2 +- .../cloud/webrisk_v1beta1/gapic_version.py | 2 +- .../web_risk_service_v1_beta1/async_client.py | 6 +- .../web_risk_service_v1_beta1/client.py | 2 +- ...ppet_metadata_google.cloud.webrisk.v1.json | 2 +- ...metadata_google.cloud.webrisk.v1beta1.json | 2 +- .../gapic/webrisk_v1/test_web_risk_service.py | 55 +- .../test_web_risk_service_v1_beta1.py | 27 +- .../cloud/websecurityscanner/gapic_version.py | 2 +- .../websecurityscanner_v1/gapic_version.py | 2 +- .../web_security_scanner/async_client.py | 6 +- .../services/web_security_scanner/client.py | 2 +- .../gapic_version.py | 2 +- .../web_security_scanner/async_client.py | 6 +- .../services/web_security_scanner/client.py | 2 +- .../gapic_version.py | 2 +- .../web_security_scanner/async_client.py | 6 +- .../services/web_security_scanner/client.py | 2 +- ...ta_google.cloud.websecurityscanner.v1.json | 2 +- ...ogle.cloud.websecurityscanner.v1alpha.json | 2 +- ...oogle.cloud.websecurityscanner.v1beta.json | 2 +- .../test_web_security_scanner.py | 117 +- .../test_web_security_scanner.py | 117 +- .../test_web_security_scanner.py | 117 +- .../google-cloud-workflows/docs/index.rst | 17 - .../workflows/executions/gapic_version.py | 2 +- .../workflows/executions_v1/gapic_version.py | 2 +- .../services/executions/async_client.py | 5 +- .../services/executions/client.py | 2 +- .../executions_v1beta/gapic_version.py | 2 +- .../services/executions/async_client.py | 5 +- .../services/executions/client.py | 2 +- .../google/cloud/workflows/gapic_version.py | 2 +- .../cloud/workflows_v1/gapic_version.py | 2 +- .../services/workflows/async_client.py | 5 +- .../workflows_v1/services/workflows/client.py | 2 +- .../cloud/workflows_v1beta/gapic_version.py | 2 +- .../services/workflows/async_client.py | 5 +- .../services/workflows/client.py | 2 +- ..._google.cloud.workflows.executions.v1.json | 2 +- ...gle.cloud.workflows.executions.v1beta.json | 2 +- ...et_metadata_google.cloud.workflows.v1.json | 2 +- ...etadata_google.cloud.workflows.v1beta.json | 2 +- .../gapic/executions_v1/test_executions.py | 36 +- .../executions_v1beta/test_executions.py | 36 +- .../unit/gapic/workflows_v1/test_workflows.py | 75 +- .../gapic/workflows_v1beta/test_workflows.py | 75 +- .../cloud/workstations/gapic_version.py | 2 +- .../cloud/workstations_v1/gapic_version.py | 2 +- .../services/workstations/async_client.py | 5 +- .../services/workstations/client.py | 2 +- .../workstations_v1beta/gapic_version.py | 2 +- .../services/workstations/async_client.py | 5 +- .../services/workstations/client.py | 2 +- ...metadata_google.cloud.workstations.v1.json | 2 +- ...data_google.cloud.workstations.v1beta.json | 2 +- .../workstations_v1/test_workstations.py | 290 +++-- .../workstations_v1beta/test_workstations.py | 290 +++-- .../google/geo/type/gapic_version.py | 2 +- .../maps/addressvalidation/gapic_version.py | 2 +- .../addressvalidation_v1/gapic_version.py | 2 +- .../address_validation/async_client.py | 5 +- .../services/address_validation/client.py | 2 +- ...data_google.maps.addressvalidation.v1.json | 2 +- .../test_address_validation.py | 18 +- .../fleetengine_delivery/gapic_version.py | 2 +- .../fleetengine_delivery_v1/gapic_version.py | 2 +- .../services/delivery_service/async_client.py | 5 +- .../services/delivery_service/client.py | 2 +- ...metadata_maps.fleetengine.delivery.v1.json | 2 +- .../test_delivery_service.py | 90 +- .../google/maps/fleetengine/gapic_version.py | 2 +- .../maps/fleetengine_v1/gapic_version.py | 2 +- .../services/trip_service/async_client.py | 5 +- .../services/trip_service/client.py | 2 +- .../services/vehicle_service/async_client.py | 5 +- .../services/vehicle_service/client.py | 2 +- .../snippet_metadata_maps.fleetengine.v1.json | 2 +- .../gapic/fleetengine_v1/test_trip_service.py | 45 +- .../fleetengine_v1/test_vehicle_service.py | 54 +- .../mapsplatformdatasets/gapic_version.py | 2 +- .../mapsplatformdatasets_v1/gapic_version.py | 2 +- .../maps_platform_datasets/async_client.py | 6 +- .../services/maps_platform_datasets/client.py | 2 +- ...a_google.maps.mapsplatformdatasets.v1.json | 2 +- .../test_maps_platform_datasets.py | 54 +- .../doc-formatting.yaml | 2 +- 241 files changed, 4723 insertions(+), 3816 deletions(-) diff --git a/packages/google-cloud-trace/google/cloud/trace/gapic_version.py b/packages/google-cloud-trace/google/cloud/trace/gapic_version.py index f192c1b4f03b..558c8aab67c5 100644 --- a/packages/google-cloud-trace/google/cloud/trace/gapic_version.py +++ b/packages/google-cloud-trace/google/cloud/trace/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.13.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-trace/google/cloud/trace_v1/gapic_version.py b/packages/google-cloud-trace/google/cloud/trace_v1/gapic_version.py index f192c1b4f03b..558c8aab67c5 100644 --- a/packages/google-cloud-trace/google/cloud/trace_v1/gapic_version.py +++ b/packages/google-cloud-trace/google/cloud/trace_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.13.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-trace/google/cloud/trace_v1/services/trace_service/async_client.py b/packages/google-cloud-trace/google/cloud/trace_v1/services/trace_service/async_client.py index 3960f06f74f9..6ed28847cbe8 100644 --- a/packages/google-cloud-trace/google/cloud/trace_v1/services/trace_service/async_client.py +++ b/packages/google-cloud-trace/google/cloud/trace_v1/services/trace_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -187,9 +186,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(TraceServiceClient).get_transport_class, type(TraceServiceClient) - ) + get_transport_class = TraceServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-trace/google/cloud/trace_v1/services/trace_service/client.py b/packages/google-cloud-trace/google/cloud/trace_v1/services/trace_service/client.py index 0949cf6aea95..d23d13065e9e 100644 --- a/packages/google-cloud-trace/google/cloud/trace_v1/services/trace_service/client.py +++ b/packages/google-cloud-trace/google/cloud/trace_v1/services/trace_service/client.py @@ -639,7 +639,7 @@ def __init__( transport_init: Union[ Type[TraceServiceTransport], Callable[..., TraceServiceTransport] ] = ( - type(self).get_transport_class(transport) + TraceServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., TraceServiceTransport], transport) ) diff --git a/packages/google-cloud-trace/google/cloud/trace_v2/gapic_version.py b/packages/google-cloud-trace/google/cloud/trace_v2/gapic_version.py index f192c1b4f03b..558c8aab67c5 100644 --- a/packages/google-cloud-trace/google/cloud/trace_v2/gapic_version.py +++ b/packages/google-cloud-trace/google/cloud/trace_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.13.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-trace/google/cloud/trace_v2/services/trace_service/async_client.py b/packages/google-cloud-trace/google/cloud/trace_v2/services/trace_service/async_client.py index 03858e00cf92..2eb82e7b7427 100644 --- a/packages/google-cloud-trace/google/cloud/trace_v2/services/trace_service/async_client.py +++ b/packages/google-cloud-trace/google/cloud/trace_v2/services/trace_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -194,9 +193,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(TraceServiceClient).get_transport_class, type(TraceServiceClient) - ) + get_transport_class = TraceServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-trace/google/cloud/trace_v2/services/trace_service/client.py b/packages/google-cloud-trace/google/cloud/trace_v2/services/trace_service/client.py index 7fd4ebc57135..2ce519e1fc61 100644 --- a/packages/google-cloud-trace/google/cloud/trace_v2/services/trace_service/client.py +++ b/packages/google-cloud-trace/google/cloud/trace_v2/services/trace_service/client.py @@ -666,7 +666,7 @@ def __init__( transport_init: Union[ Type[TraceServiceTransport], Callable[..., TraceServiceTransport] ] = ( - type(self).get_transport_class(transport) + TraceServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., TraceServiceTransport], transport) ) diff --git a/packages/google-cloud-trace/samples/generated_samples/snippet_metadata_google.devtools.cloudtrace.v1.json b/packages/google-cloud-trace/samples/generated_samples/snippet_metadata_google.devtools.cloudtrace.v1.json index ae7f573aee0c..d3b883caf123 100644 --- a/packages/google-cloud-trace/samples/generated_samples/snippet_metadata_google.devtools.cloudtrace.v1.json +++ b/packages/google-cloud-trace/samples/generated_samples/snippet_metadata_google.devtools.cloudtrace.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-trace", - "version": "1.13.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-trace/samples/generated_samples/snippet_metadata_google.devtools.cloudtrace.v2.json b/packages/google-cloud-trace/samples/generated_samples/snippet_metadata_google.devtools.cloudtrace.v2.json index c6fc81e71bd9..3f65264fc1b5 100644 --- a/packages/google-cloud-trace/samples/generated_samples/snippet_metadata_google.devtools.cloudtrace.v2.json +++ b/packages/google-cloud-trace/samples/generated_samples/snippet_metadata_google.devtools.cloudtrace.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-trace", - "version": "1.13.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-trace/tests/unit/gapic/trace_v1/test_trace_service.py b/packages/google-cloud-trace/tests/unit/gapic/trace_v1/test_trace_service.py index 30ef9ec548a3..2caf189188cd 100644 --- a/packages/google-cloud-trace/tests/unit/gapic/trace_v1/test_trace_service.py +++ b/packages/google-cloud-trace/tests/unit/gapic/trace_v1/test_trace_service.py @@ -1269,22 +1269,23 @@ async def test_list_traces_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_traces - ] = mock_object + ] = mock_rpc request = {} await client.list_traces(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_traces(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1830,22 +1831,23 @@ async def test_get_trace_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_trace - ] = mock_object + ] = mock_rpc request = {} await client.get_trace(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_trace(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2197,22 +2199,23 @@ async def test_patch_traces_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.patch_traces - ] = mock_object + ] = mock_rpc request = {} await client.patch_traces(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.patch_traces(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-trace/tests/unit/gapic/trace_v2/test_trace_service.py b/packages/google-cloud-trace/tests/unit/gapic/trace_v2/test_trace_service.py index d3491782b994..cc9ee077efb6 100644 --- a/packages/google-cloud-trace/tests/unit/gapic/trace_v2/test_trace_service.py +++ b/packages/google-cloud-trace/tests/unit/gapic/trace_v2/test_trace_service.py @@ -1267,22 +1267,23 @@ async def test_batch_write_spans_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_write_spans - ] = mock_object + ] = mock_rpc request = {} await client.batch_write_spans(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.batch_write_spans(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1655,22 +1656,23 @@ async def test_create_span_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_span - ] = mock_object + ] = mock_rpc request = {} await client.create_span(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_span(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-video-live-stream/google/cloud/video/live_stream/gapic_version.py b/packages/google-cloud-video-live-stream/google/cloud/video/live_stream/gapic_version.py index 02874f69f4e5..558c8aab67c5 100644 --- a/packages/google-cloud-video-live-stream/google/cloud/video/live_stream/gapic_version.py +++ b/packages/google-cloud-video-live-stream/google/cloud/video/live_stream/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.8.1" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-video-live-stream/google/cloud/video/live_stream_v1/gapic_version.py b/packages/google-cloud-video-live-stream/google/cloud/video/live_stream_v1/gapic_version.py index 02874f69f4e5..558c8aab67c5 100644 --- a/packages/google-cloud-video-live-stream/google/cloud/video/live_stream_v1/gapic_version.py +++ b/packages/google-cloud-video-live-stream/google/cloud/video/live_stream_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.8.1" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-video-live-stream/google/cloud/video/live_stream_v1/services/livestream_service/async_client.py b/packages/google-cloud-video-live-stream/google/cloud/video/live_stream_v1/services/livestream_service/async_client.py index c6143a6680d8..9b57c0680d49 100644 --- a/packages/google-cloud-video-live-stream/google/cloud/video/live_stream_v1/services/livestream_service/async_client.py +++ b/packages/google-cloud-video-live-stream/google/cloud/video/live_stream_v1/services/livestream_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -218,9 +217,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(LivestreamServiceClient).get_transport_class, type(LivestreamServiceClient) - ) + get_transport_class = LivestreamServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-video-live-stream/google/cloud/video/live_stream_v1/services/livestream_service/client.py b/packages/google-cloud-video-live-stream/google/cloud/video/live_stream_v1/services/livestream_service/client.py index 297ec14041f8..54120fbe6491 100644 --- a/packages/google-cloud-video-live-stream/google/cloud/video/live_stream_v1/services/livestream_service/client.py +++ b/packages/google-cloud-video-live-stream/google/cloud/video/live_stream_v1/services/livestream_service/client.py @@ -835,7 +835,7 @@ def __init__( Type[LivestreamServiceTransport], Callable[..., LivestreamServiceTransport], ] = ( - type(self).get_transport_class(transport) + LivestreamServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., LivestreamServiceTransport], transport) ) diff --git a/packages/google-cloud-video-live-stream/samples/generated_samples/snippet_metadata_google.cloud.video.livestream.v1.json b/packages/google-cloud-video-live-stream/samples/generated_samples/snippet_metadata_google.cloud.video.livestream.v1.json index 01402aee6a33..b33439ee1890 100644 --- a/packages/google-cloud-video-live-stream/samples/generated_samples/snippet_metadata_google.cloud.video.livestream.v1.json +++ b/packages/google-cloud-video-live-stream/samples/generated_samples/snippet_metadata_google.cloud.video.livestream.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-video-live-stream", - "version": "1.8.1" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-video-live-stream/tests/unit/gapic/live_stream_v1/test_livestream_service.py b/packages/google-cloud-video-live-stream/tests/unit/gapic/live_stream_v1/test_livestream_service.py index 808481bbbe35..9d99b6629b2d 100644 --- a/packages/google-cloud-video-live-stream/tests/unit/gapic/live_stream_v1/test_livestream_service.py +++ b/packages/google-cloud-video-live-stream/tests/unit/gapic/live_stream_v1/test_livestream_service.py @@ -1285,8 +1285,9 @@ def test_create_channel_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_channel(request) @@ -1340,26 +1341,28 @@ async def test_create_channel_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_channel_ - ] = mock_object + ] = mock_rpc request = {} await client.create_channel(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_channel(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1734,22 +1737,23 @@ async def test_list_channels_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_channels - ] = mock_object + ] = mock_rpc request = {} await client.list_channels(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_channels(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2300,22 +2304,23 @@ async def test_get_channel_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_channel - ] = mock_object + ] = mock_rpc request = {} await client.get_channel(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_channel(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2610,8 +2615,9 @@ def test_delete_channel_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_channel(request) @@ -2665,26 +2671,28 @@ async def test_delete_channel_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_channel - ] = mock_object + ] = mock_rpc request = {} await client.delete_channel(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_channel(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2974,8 +2982,9 @@ def test_update_channel_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_channel(request) @@ -3029,26 +3038,28 @@ async def test_update_channel_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_channel - ] = mock_object + ] = mock_rpc request = {} await client.update_channel(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_channel(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3350,8 +3361,9 @@ def test_start_channel_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.start_channel(request) @@ -3405,26 +3417,28 @@ async def test_start_channel_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.start_channel - ] = mock_object + ] = mock_rpc request = {} await client.start_channel(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.start_channel(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3716,8 +3730,9 @@ def test_stop_channel_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.stop_channel(request) @@ -3771,26 +3786,28 @@ async def test_stop_channel_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.stop_channel - ] = mock_object + ] = mock_rpc request = {} await client.stop_channel(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.stop_channel(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4084,8 +4101,9 @@ def test_create_input_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_input(request) @@ -4139,26 +4157,28 @@ async def test_create_input_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_input - ] = mock_object + ] = mock_rpc request = {} await client.create_input(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_input(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4533,22 +4553,23 @@ async def test_list_inputs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_inputs - ] = mock_object + ] = mock_rpc request = {} await client.list_inputs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_inputs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5100,22 +5121,23 @@ async def test_get_input_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_input - ] = mock_object + ] = mock_rpc request = {} await client.get_input(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_input(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5412,8 +5434,9 @@ def test_delete_input_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_input(request) @@ -5467,26 +5490,28 @@ async def test_delete_input_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_input - ] = mock_object + ] = mock_rpc request = {} await client.delete_input(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_input(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5776,8 +5801,9 @@ def test_update_input_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_input(request) @@ -5831,26 +5857,28 @@ async def test_update_input_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_input - ] = mock_object + ] = mock_rpc request = {} await client.update_input(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_input(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6216,22 +6244,23 @@ async def test_create_event_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_event - ] = mock_object + ] = mock_rpc request = {} await client.create_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6609,22 +6638,23 @@ async def test_list_events_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_events - ] = mock_object + ] = mock_rpc request = {} await client.list_events(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_events(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7173,22 +7203,23 @@ async def test_get_event_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_event - ] = mock_object + ] = mock_rpc request = {} await client.get_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7532,22 +7563,23 @@ async def test_delete_event_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_event - ] = mock_object + ] = mock_rpc request = {} await client.delete_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7894,22 +7926,23 @@ async def test_list_clips_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_clips - ] = mock_object + ] = mock_rpc request = {} await client.list_clips(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_clips(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8458,22 +8491,23 @@ async def test_get_clip_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_clip - ] = mock_object + ] = mock_rpc request = {} await client.get_clip(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_clip(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8770,8 +8804,9 @@ def test_create_clip_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_clip(request) @@ -8825,26 +8860,28 @@ async def test_create_clip_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_clip - ] = mock_object + ] = mock_rpc request = {} await client.create_clip(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_clip(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9154,8 +9191,9 @@ def test_delete_clip_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_clip(request) @@ -9209,26 +9247,28 @@ async def test_delete_clip_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_clip - ] = mock_object + ] = mock_rpc request = {} await client.delete_clip(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_clip(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9522,8 +9562,9 @@ def test_create_asset_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_asset(request) @@ -9577,26 +9618,28 @@ async def test_create_asset_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_asset - ] = mock_object + ] = mock_rpc request = {} await client.create_asset(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_asset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9908,8 +9951,9 @@ def test_delete_asset_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_asset(request) @@ -9963,26 +10007,28 @@ async def test_delete_asset_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_asset - ] = mock_object + ] = mock_rpc request = {} await client.delete_asset(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_asset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10332,22 +10378,23 @@ async def test_get_asset_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_asset - ] = mock_object + ] = mock_rpc request = {} await client.get_asset(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_asset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10705,22 +10752,23 @@ async def test_list_assets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_assets - ] = mock_object + ] = mock_rpc request = {} await client.list_assets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_assets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11263,22 +11311,23 @@ async def test_get_pool_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_pool - ] = mock_object + ] = mock_rpc request = {} await client.get_pool(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_pool(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11567,8 +11616,9 @@ def test_update_pool_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_pool(request) @@ -11622,26 +11672,28 @@ async def test_update_pool_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_pool - ] = mock_object + ] = mock_rpc request = {} await client.update_pool(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_pool(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-video-stitcher/google/cloud/video/stitcher/gapic_version.py b/packages/google-cloud-video-stitcher/google/cloud/video/stitcher/gapic_version.py index 43f4d6aa457c..558c8aab67c5 100644 --- a/packages/google-cloud-video-stitcher/google/cloud/video/stitcher/gapic_version.py +++ b/packages/google-cloud-video-stitcher/google/cloud/video/stitcher/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.7.12" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/gapic_version.py b/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/gapic_version.py index 43f4d6aa457c..558c8aab67c5 100644 --- a/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/gapic_version.py +++ b/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.7.12" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/services/video_stitcher_service/async_client.py b/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/services/video_stitcher_service/async_client.py index 15e9b1c18f58..2ccc6e232391 100644 --- a/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/services/video_stitcher_service/async_client.py +++ b/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/services/video_stitcher_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -244,10 +243,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(VideoStitcherServiceClient).get_transport_class, - type(VideoStitcherServiceClient), - ) + get_transport_class = VideoStitcherServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/services/video_stitcher_service/client.py b/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/services/video_stitcher_service/client.py index 68fa4ec95d43..74ebb99eb5a2 100644 --- a/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/services/video_stitcher_service/client.py +++ b/packages/google-cloud-video-stitcher/google/cloud/video/stitcher_v1/services/video_stitcher_service/client.py @@ -870,7 +870,7 @@ def __init__( Type[VideoStitcherServiceTransport], Callable[..., VideoStitcherServiceTransport], ] = ( - type(self).get_transport_class(transport) + VideoStitcherServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., VideoStitcherServiceTransport], transport) ) diff --git a/packages/google-cloud-video-stitcher/samples/generated_samples/snippet_metadata_google.cloud.video.stitcher.v1.json b/packages/google-cloud-video-stitcher/samples/generated_samples/snippet_metadata_google.cloud.video.stitcher.v1.json index b7205ebe1ab9..e2f9394ca881 100644 --- a/packages/google-cloud-video-stitcher/samples/generated_samples/snippet_metadata_google.cloud.video.stitcher.v1.json +++ b/packages/google-cloud-video-stitcher/samples/generated_samples/snippet_metadata_google.cloud.video.stitcher.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-video-stitcher", - "version": "0.7.12" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-video-stitcher/tests/unit/gapic/stitcher_v1/test_video_stitcher_service.py b/packages/google-cloud-video-stitcher/tests/unit/gapic/stitcher_v1/test_video_stitcher_service.py index 3b99e02c3c09..8a437797aa09 100644 --- a/packages/google-cloud-video-stitcher/tests/unit/gapic/stitcher_v1/test_video_stitcher_service.py +++ b/packages/google-cloud-video-stitcher/tests/unit/gapic/stitcher_v1/test_video_stitcher_service.py @@ -1270,8 +1270,9 @@ def test_create_cdn_key_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_cdn_key(request) @@ -1325,26 +1326,28 @@ async def test_create_cdn_key_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_cdn_key - ] = mock_object + ] = mock_rpc request = {} await client.create_cdn_key(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_cdn_key(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1732,22 +1735,23 @@ async def test_list_cdn_keys_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_cdn_keys - ] = mock_object + ] = mock_rpc request = {} await client.list_cdn_keys(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_cdn_keys(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2296,22 +2300,23 @@ async def test_get_cdn_key_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_cdn_key - ] = mock_object + ] = mock_rpc request = {} await client.get_cdn_key(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_cdn_key(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2603,8 +2608,9 @@ def test_delete_cdn_key_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_cdn_key(request) @@ -2658,26 +2664,28 @@ async def test_delete_cdn_key_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_cdn_key - ] = mock_object + ] = mock_rpc request = {} await client.delete_cdn_key(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_cdn_key(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2964,8 +2972,9 @@ def test_update_cdn_key_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_cdn_key(request) @@ -3019,26 +3028,28 @@ async def test_update_cdn_key_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_cdn_key - ] = mock_object + ] = mock_rpc request = {} await client.update_cdn_key(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_cdn_key(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3437,22 +3448,23 @@ async def test_create_vod_session_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_vod_session - ] = mock_object + ] = mock_rpc request = {} await client.create_vod_session(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_vod_session(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3848,22 +3860,23 @@ async def test_get_vod_session_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_vod_session - ] = mock_object + ] = mock_rpc request = {} await client.get_vod_session(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_vod_session(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4236,22 +4249,23 @@ async def test_list_vod_stitch_details_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_vod_stitch_details - ] = mock_object + ] = mock_rpc request = {} await client.list_vod_stitch_details(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_vod_stitch_details(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4826,22 +4840,23 @@ async def test_get_vod_stitch_detail_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_vod_stitch_detail - ] = mock_object + ] = mock_rpc request = {} await client.get_vod_stitch_detail(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_vod_stitch_detail(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5216,22 +5231,23 @@ async def test_list_vod_ad_tag_details_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_vod_ad_tag_details - ] = mock_object + ] = mock_rpc request = {} await client.list_vod_ad_tag_details(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_vod_ad_tag_details(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5806,22 +5822,23 @@ async def test_get_vod_ad_tag_detail_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_vod_ad_tag_detail - ] = mock_object + ] = mock_rpc request = {} await client.get_vod_ad_tag_detail(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_vod_ad_tag_detail(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6196,22 +6213,23 @@ async def test_list_live_ad_tag_details_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_live_ad_tag_details - ] = mock_object + ] = mock_rpc request = {} await client.list_live_ad_tag_details(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_live_ad_tag_details(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6788,22 +6806,23 @@ async def test_get_live_ad_tag_detail_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_live_ad_tag_detail - ] = mock_object + ] = mock_rpc request = {} await client.get_live_ad_tag_detail(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_live_ad_tag_detail(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7111,8 +7130,9 @@ def test_create_slate_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_slate(request) @@ -7166,26 +7186,28 @@ async def test_create_slate_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_slate - ] = mock_object + ] = mock_rpc request = {} await client.create_slate(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_slate(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7561,22 +7583,23 @@ async def test_list_slates_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_slates - ] = mock_object + ] = mock_rpc request = {} await client.list_slates(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_slates(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8123,22 +8146,23 @@ async def test_get_slate_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_slate - ] = mock_object + ] = mock_rpc request = {} await client.get_slate(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_slate(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8425,8 +8449,9 @@ def test_update_slate_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_slate(request) @@ -8480,26 +8505,28 @@ async def test_update_slate_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_slate - ] = mock_object + ] = mock_rpc request = {} await client.update_slate(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_slate(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8800,8 +8827,9 @@ def test_delete_slate_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_slate(request) @@ -8855,26 +8883,28 @@ async def test_delete_slate_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_slate - ] = mock_object + ] = mock_rpc request = {} await client.delete_slate(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_slate(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9242,22 +9272,23 @@ async def test_create_live_session_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_live_session - ] = mock_object + ] = mock_rpc request = {} await client.create_live_session(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_live_session(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9644,22 +9675,23 @@ async def test_get_live_session_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_live_session - ] = mock_object + ] = mock_rpc request = {} await client.get_live_session(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_live_session(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9973,8 +10005,9 @@ def test_create_live_config_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_live_config(request) @@ -10030,26 +10063,28 @@ async def test_create_live_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_live_config - ] = mock_object + ] = mock_rpc request = {} await client.create_live_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_live_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10445,22 +10480,23 @@ async def test_list_live_configs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_live_configs - ] = mock_object + ] = mock_rpc request = {} await client.list_live_configs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_live_configs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11044,22 +11080,23 @@ async def test_get_live_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_live_config - ] = mock_object + ] = mock_rpc request = {} await client.get_live_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_live_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11377,8 +11414,9 @@ def test_delete_live_config_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_live_config(request) @@ -11434,26 +11472,28 @@ async def test_delete_live_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_live_config - ] = mock_object + ] = mock_rpc request = {} await client.delete_live_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_live_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11760,8 +11800,9 @@ def test_update_live_config_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_live_config(request) @@ -11817,26 +11858,28 @@ async def test_update_live_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_live_config - ] = mock_object + ] = mock_rpc request = {} await client.update_live_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_live_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12159,8 +12202,9 @@ def test_create_vod_config_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_vod_config(request) @@ -12216,26 +12260,28 @@ async def test_create_vod_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_vod_config - ] = mock_object + ] = mock_rpc request = {} await client.create_vod_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_vod_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12623,22 +12669,23 @@ async def test_list_vod_configs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_vod_configs - ] = mock_object + ] = mock_rpc request = {} await client.list_vod_configs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_vod_configs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13193,22 +13240,23 @@ async def test_get_vod_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_vod_config - ] = mock_object + ] = mock_rpc request = {} await client.get_vod_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_vod_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13516,8 +13564,9 @@ def test_delete_vod_config_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_vod_config(request) @@ -13573,26 +13622,28 @@ async def test_delete_vod_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_vod_config - ] = mock_object + ] = mock_rpc request = {} await client.delete_vod_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_vod_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13897,8 +13948,9 @@ def test_update_vod_config_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_vod_config(request) @@ -13954,26 +14006,28 @@ async def test_update_vod_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_vod_config - ] = mock_object + ] = mock_rpc request = {} await client.update_vod_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_vod_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-video-transcoder/google/cloud/video/transcoder/gapic_version.py b/packages/google-cloud-video-transcoder/google/cloud/video/transcoder/gapic_version.py index 88ee289a17e3..558c8aab67c5 100644 --- a/packages/google-cloud-video-transcoder/google/cloud/video/transcoder/gapic_version.py +++ b/packages/google-cloud-video-transcoder/google/cloud/video/transcoder/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.12.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-video-transcoder/google/cloud/video/transcoder_v1/gapic_version.py b/packages/google-cloud-video-transcoder/google/cloud/video/transcoder_v1/gapic_version.py index 88ee289a17e3..558c8aab67c5 100644 --- a/packages/google-cloud-video-transcoder/google/cloud/video/transcoder_v1/gapic_version.py +++ b/packages/google-cloud-video-transcoder/google/cloud/video/transcoder_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.12.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-video-transcoder/google/cloud/video/transcoder_v1/services/transcoder_service/async_client.py b/packages/google-cloud-video-transcoder/google/cloud/video/transcoder_v1/services/transcoder_service/async_client.py index 0eecda05be1d..25080697d07f 100644 --- a/packages/google-cloud-video-transcoder/google/cloud/video/transcoder_v1/services/transcoder_service/async_client.py +++ b/packages/google-cloud-video-transcoder/google/cloud/video/transcoder_v1/services/transcoder_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -201,9 +200,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(TranscoderServiceClient).get_transport_class, type(TranscoderServiceClient) - ) + get_transport_class = TranscoderServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-video-transcoder/google/cloud/video/transcoder_v1/services/transcoder_service/client.py b/packages/google-cloud-video-transcoder/google/cloud/video/transcoder_v1/services/transcoder_service/client.py index a42d29b1e095..efdd02801b23 100644 --- a/packages/google-cloud-video-transcoder/google/cloud/video/transcoder_v1/services/transcoder_service/client.py +++ b/packages/google-cloud-video-transcoder/google/cloud/video/transcoder_v1/services/transcoder_service/client.py @@ -697,7 +697,7 @@ def __init__( Type[TranscoderServiceTransport], Callable[..., TranscoderServiceTransport], ] = ( - type(self).get_transport_class(transport) + TranscoderServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., TranscoderServiceTransport], transport) ) diff --git a/packages/google-cloud-video-transcoder/samples/generated_samples/snippet_metadata_google.cloud.video.transcoder.v1.json b/packages/google-cloud-video-transcoder/samples/generated_samples/snippet_metadata_google.cloud.video.transcoder.v1.json index 198fb41fb0b9..2c7572e16c29 100644 --- a/packages/google-cloud-video-transcoder/samples/generated_samples/snippet_metadata_google.cloud.video.transcoder.v1.json +++ b/packages/google-cloud-video-transcoder/samples/generated_samples/snippet_metadata_google.cloud.video.transcoder.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-video-transcoder", - "version": "1.12.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-video-transcoder/tests/unit/gapic/transcoder_v1/test_transcoder_service.py b/packages/google-cloud-video-transcoder/tests/unit/gapic/transcoder_v1/test_transcoder_service.py index 4265db7db50b..a87e1b63a8df 100644 --- a/packages/google-cloud-video-transcoder/tests/unit/gapic/transcoder_v1/test_transcoder_service.py +++ b/packages/google-cloud-video-transcoder/tests/unit/gapic/transcoder_v1/test_transcoder_service.py @@ -1343,22 +1343,23 @@ async def test_create_job_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_job - ] = mock_object + ] = mock_rpc request = {} await client.create_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1734,22 +1735,23 @@ async def test_list_jobs_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_jobs - ] = mock_object + ] = mock_rpc request = {} await client.list_jobs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_jobs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2314,22 +2316,23 @@ async def test_get_job_async_use_cached_wrapped_rpc(transport: str = "grpc_async ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_job - ] = mock_object + ] = mock_rpc request = {} await client.get_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2679,22 +2682,23 @@ async def test_delete_job_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_job - ] = mock_object + ] = mock_rpc request = {} await client.delete_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3048,22 +3052,23 @@ async def test_create_job_template_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_job_template - ] = mock_object + ] = mock_rpc request = {} await client.create_job_template(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_job_template(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3463,22 +3468,23 @@ async def test_list_job_templates_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_job_templates - ] = mock_object + ] = mock_rpc request = {} await client.list_job_templates(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_job_templates(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4043,22 +4049,23 @@ async def test_get_job_template_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_job_template - ] = mock_object + ] = mock_rpc request = {} await client.get_job_template(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_job_template(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4412,22 +4419,23 @@ async def test_delete_job_template_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_job_template - ] = mock_object + ] = mock_rpc request = {} await client.delete_job_template(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_job_template(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence/gapic_version.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence/gapic_version.py index cf63574ee44f..558c8aab67c5 100644 --- a/packages/google-cloud-videointelligence/google/cloud/videointelligence/gapic_version.py +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.13.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1/gapic_version.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1/gapic_version.py index cf63574ee44f..558c8aab67c5 100644 --- a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1/gapic_version.py +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.13.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1/services/video_intelligence_service/async_client.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1/services/video_intelligence_service/async_client.py index 453ffbdfdfe5..7c2aea04f1cb 100644 --- a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1/services/video_intelligence_service/async_client.py +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1/services/video_intelligence_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -193,10 +192,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(VideoIntelligenceServiceClient).get_transport_class, - type(VideoIntelligenceServiceClient), - ) + get_transport_class = VideoIntelligenceServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1/services/video_intelligence_service/client.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1/services/video_intelligence_service/client.py index 971a4f424d62..e237840afa68 100644 --- a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1/services/video_intelligence_service/client.py +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1/services/video_intelligence_service/client.py @@ -649,7 +649,7 @@ def __init__( Type[VideoIntelligenceServiceTransport], Callable[..., VideoIntelligenceServiceTransport], ] = ( - type(self).get_transport_class(transport) + VideoIntelligenceServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., VideoIntelligenceServiceTransport], transport) ) diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1beta2/gapic_version.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1beta2/gapic_version.py index cf63574ee44f..558c8aab67c5 100644 --- a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1beta2/gapic_version.py +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1beta2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.13.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1beta2/services/video_intelligence_service/async_client.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1beta2/services/video_intelligence_service/async_client.py index f502c638b92e..a34e91042374 100644 --- a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1beta2/services/video_intelligence_service/async_client.py +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1beta2/services/video_intelligence_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -193,10 +192,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(VideoIntelligenceServiceClient).get_transport_class, - type(VideoIntelligenceServiceClient), - ) + get_transport_class = VideoIntelligenceServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1beta2/services/video_intelligence_service/client.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1beta2/services/video_intelligence_service/client.py index 4b71b3e0b210..d3475d758528 100644 --- a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1beta2/services/video_intelligence_service/client.py +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1beta2/services/video_intelligence_service/client.py @@ -649,7 +649,7 @@ def __init__( Type[VideoIntelligenceServiceTransport], Callable[..., VideoIntelligenceServiceTransport], ] = ( - type(self).get_transport_class(transport) + VideoIntelligenceServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., VideoIntelligenceServiceTransport], transport) ) diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p1beta1/gapic_version.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p1beta1/gapic_version.py index cf63574ee44f..558c8aab67c5 100644 --- a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p1beta1/gapic_version.py +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.13.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p1beta1/services/video_intelligence_service/async_client.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p1beta1/services/video_intelligence_service/async_client.py index 6ba75ec51769..b5a88c366ffd 100644 --- a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p1beta1/services/video_intelligence_service/async_client.py +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p1beta1/services/video_intelligence_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -193,10 +192,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(VideoIntelligenceServiceClient).get_transport_class, - type(VideoIntelligenceServiceClient), - ) + get_transport_class = VideoIntelligenceServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p1beta1/services/video_intelligence_service/client.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p1beta1/services/video_intelligence_service/client.py index 4cfbf7574782..f39b10c547b2 100644 --- a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p1beta1/services/video_intelligence_service/client.py +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p1beta1/services/video_intelligence_service/client.py @@ -649,7 +649,7 @@ def __init__( Type[VideoIntelligenceServiceTransport], Callable[..., VideoIntelligenceServiceTransport], ] = ( - type(self).get_transport_class(transport) + VideoIntelligenceServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., VideoIntelligenceServiceTransport], transport) ) diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p2beta1/gapic_version.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p2beta1/gapic_version.py index cf63574ee44f..558c8aab67c5 100644 --- a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p2beta1/gapic_version.py +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p2beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.13.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p2beta1/services/video_intelligence_service/async_client.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p2beta1/services/video_intelligence_service/async_client.py index de5047388b14..d3bf9c203da8 100644 --- a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p2beta1/services/video_intelligence_service/async_client.py +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p2beta1/services/video_intelligence_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -193,10 +192,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(VideoIntelligenceServiceClient).get_transport_class, - type(VideoIntelligenceServiceClient), - ) + get_transport_class = VideoIntelligenceServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p2beta1/services/video_intelligence_service/client.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p2beta1/services/video_intelligence_service/client.py index 86ac4a4d4747..b91e930e3ea7 100644 --- a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p2beta1/services/video_intelligence_service/client.py +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p2beta1/services/video_intelligence_service/client.py @@ -649,7 +649,7 @@ def __init__( Type[VideoIntelligenceServiceTransport], Callable[..., VideoIntelligenceServiceTransport], ] = ( - type(self).get_transport_class(transport) + VideoIntelligenceServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., VideoIntelligenceServiceTransport], transport) ) diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/gapic_version.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/gapic_version.py index cf63574ee44f..558c8aab67c5 100644 --- a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/gapic_version.py +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.13.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/services/streaming_video_intelligence_service/async_client.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/services/streaming_video_intelligence_service/async_client.py index e28d1fcf3f2b..954fef29f5d3 100644 --- a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/services/streaming_video_intelligence_service/async_client.py +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/services/streaming_video_intelligence_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( AsyncIterable, @@ -204,10 +203,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(StreamingVideoIntelligenceServiceClient).get_transport_class, - type(StreamingVideoIntelligenceServiceClient), - ) + get_transport_class = StreamingVideoIntelligenceServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/services/streaming_video_intelligence_service/client.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/services/streaming_video_intelligence_service/client.py index b4d90ee4ff9e..cca188d4672d 100644 --- a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/services/streaming_video_intelligence_service/client.py +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/services/streaming_video_intelligence_service/client.py @@ -663,7 +663,7 @@ def __init__( Type[StreamingVideoIntelligenceServiceTransport], Callable[..., StreamingVideoIntelligenceServiceTransport], ] = ( - type(self).get_transport_class(transport) + StreamingVideoIntelligenceServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast( Callable[..., StreamingVideoIntelligenceServiceTransport], transport diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/services/video_intelligence_service/async_client.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/services/video_intelligence_service/async_client.py index c73fdf885d9d..002f73484ccb 100644 --- a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/services/video_intelligence_service/async_client.py +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/services/video_intelligence_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -193,10 +192,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(VideoIntelligenceServiceClient).get_transport_class, - type(VideoIntelligenceServiceClient), - ) + get_transport_class = VideoIntelligenceServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/services/video_intelligence_service/client.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/services/video_intelligence_service/client.py index 0d6a59b55c8e..4d77ff8df9d7 100644 --- a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/services/video_intelligence_service/client.py +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/services/video_intelligence_service/client.py @@ -647,7 +647,7 @@ def __init__( Type[VideoIntelligenceServiceTransport], Callable[..., VideoIntelligenceServiceTransport], ] = ( - type(self).get_transport_class(transport) + VideoIntelligenceServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., VideoIntelligenceServiceTransport], transport) ) diff --git a/packages/google-cloud-videointelligence/samples/generated_samples/snippet_metadata_google.cloud.videointelligence.v1.json b/packages/google-cloud-videointelligence/samples/generated_samples/snippet_metadata_google.cloud.videointelligence.v1.json index 5c9c86346133..1f03d3b6eb98 100644 --- a/packages/google-cloud-videointelligence/samples/generated_samples/snippet_metadata_google.cloud.videointelligence.v1.json +++ b/packages/google-cloud-videointelligence/samples/generated_samples/snippet_metadata_google.cloud.videointelligence.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-videointelligence", - "version": "2.13.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-videointelligence/samples/generated_samples/snippet_metadata_google.cloud.videointelligence.v1beta2.json b/packages/google-cloud-videointelligence/samples/generated_samples/snippet_metadata_google.cloud.videointelligence.v1beta2.json index bb7f05dd0fbe..27c0821c60d6 100644 --- a/packages/google-cloud-videointelligence/samples/generated_samples/snippet_metadata_google.cloud.videointelligence.v1beta2.json +++ b/packages/google-cloud-videointelligence/samples/generated_samples/snippet_metadata_google.cloud.videointelligence.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-videointelligence", - "version": "2.13.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-videointelligence/samples/generated_samples/snippet_metadata_google.cloud.videointelligence.v1p1beta1.json b/packages/google-cloud-videointelligence/samples/generated_samples/snippet_metadata_google.cloud.videointelligence.v1p1beta1.json index 8b52ed8458aa..91bdfe52093c 100644 --- a/packages/google-cloud-videointelligence/samples/generated_samples/snippet_metadata_google.cloud.videointelligence.v1p1beta1.json +++ b/packages/google-cloud-videointelligence/samples/generated_samples/snippet_metadata_google.cloud.videointelligence.v1p1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-videointelligence", - "version": "2.13.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-videointelligence/samples/generated_samples/snippet_metadata_google.cloud.videointelligence.v1p2beta1.json b/packages/google-cloud-videointelligence/samples/generated_samples/snippet_metadata_google.cloud.videointelligence.v1p2beta1.json index eb884565c8ee..ac1ac90e4237 100644 --- a/packages/google-cloud-videointelligence/samples/generated_samples/snippet_metadata_google.cloud.videointelligence.v1p2beta1.json +++ b/packages/google-cloud-videointelligence/samples/generated_samples/snippet_metadata_google.cloud.videointelligence.v1p2beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-videointelligence", - "version": "2.13.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-videointelligence/samples/generated_samples/snippet_metadata_google.cloud.videointelligence.v1p3beta1.json b/packages/google-cloud-videointelligence/samples/generated_samples/snippet_metadata_google.cloud.videointelligence.v1p3beta1.json index 52f5f09574e1..91041a3989af 100644 --- a/packages/google-cloud-videointelligence/samples/generated_samples/snippet_metadata_google.cloud.videointelligence.v1p3beta1.json +++ b/packages/google-cloud-videointelligence/samples/generated_samples/snippet_metadata_google.cloud.videointelligence.v1p3beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-videointelligence", - "version": "2.13.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-videointelligence/tests/unit/gapic/videointelligence_v1/test_video_intelligence_service.py b/packages/google-cloud-videointelligence/tests/unit/gapic/videointelligence_v1/test_video_intelligence_service.py index 66d969dd20d2..3bb4eeec4459 100644 --- a/packages/google-cloud-videointelligence/tests/unit/gapic/videointelligence_v1/test_video_intelligence_service.py +++ b/packages/google-cloud-videointelligence/tests/unit/gapic/videointelligence_v1/test_video_intelligence_service.py @@ -1317,8 +1317,9 @@ def test_annotate_video_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.annotate_video(request) @@ -1372,26 +1373,28 @@ async def test_annotate_video_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.annotate_video - ] = mock_object + ] = mock_rpc request = {} await client.annotate_video(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.annotate_video(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-videointelligence/tests/unit/gapic/videointelligence_v1beta2/test_video_intelligence_service.py b/packages/google-cloud-videointelligence/tests/unit/gapic/videointelligence_v1beta2/test_video_intelligence_service.py index d48b951f0f31..35865aefa3fc 100644 --- a/packages/google-cloud-videointelligence/tests/unit/gapic/videointelligence_v1beta2/test_video_intelligence_service.py +++ b/packages/google-cloud-videointelligence/tests/unit/gapic/videointelligence_v1beta2/test_video_intelligence_service.py @@ -1317,8 +1317,9 @@ def test_annotate_video_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.annotate_video(request) @@ -1372,26 +1373,28 @@ async def test_annotate_video_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.annotate_video - ] = mock_object + ] = mock_rpc request = {} await client.annotate_video(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.annotate_video(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-videointelligence/tests/unit/gapic/videointelligence_v1p1beta1/test_video_intelligence_service.py b/packages/google-cloud-videointelligence/tests/unit/gapic/videointelligence_v1p1beta1/test_video_intelligence_service.py index 63aae4ece0cf..8174bc446d11 100644 --- a/packages/google-cloud-videointelligence/tests/unit/gapic/videointelligence_v1p1beta1/test_video_intelligence_service.py +++ b/packages/google-cloud-videointelligence/tests/unit/gapic/videointelligence_v1p1beta1/test_video_intelligence_service.py @@ -1317,8 +1317,9 @@ def test_annotate_video_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.annotate_video(request) @@ -1372,26 +1373,28 @@ async def test_annotate_video_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.annotate_video - ] = mock_object + ] = mock_rpc request = {} await client.annotate_video(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.annotate_video(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-videointelligence/tests/unit/gapic/videointelligence_v1p2beta1/test_video_intelligence_service.py b/packages/google-cloud-videointelligence/tests/unit/gapic/videointelligence_v1p2beta1/test_video_intelligence_service.py index f28b066e4187..eb77f7fc640e 100644 --- a/packages/google-cloud-videointelligence/tests/unit/gapic/videointelligence_v1p2beta1/test_video_intelligence_service.py +++ b/packages/google-cloud-videointelligence/tests/unit/gapic/videointelligence_v1p2beta1/test_video_intelligence_service.py @@ -1317,8 +1317,9 @@ def test_annotate_video_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.annotate_video(request) @@ -1372,26 +1373,28 @@ async def test_annotate_video_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.annotate_video - ] = mock_object + ] = mock_rpc request = {} await client.annotate_video(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.annotate_video(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-videointelligence/tests/unit/gapic/videointelligence_v1p3beta1/test_streaming_video_intelligence_service.py b/packages/google-cloud-videointelligence/tests/unit/gapic/videointelligence_v1p3beta1/test_streaming_video_intelligence_service.py index 3dc0d0b3cd01..de3b5c94c357 100644 --- a/packages/google-cloud-videointelligence/tests/unit/gapic/videointelligence_v1p3beta1/test_streaming_video_intelligence_service.py +++ b/packages/google-cloud-videointelligence/tests/unit/gapic/videointelligence_v1p3beta1/test_streaming_video_intelligence_service.py @@ -1272,22 +1272,23 @@ async def test_streaming_annotate_video_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.streaming_annotate_video - ] = mock_object + ] = mock_rpc request = [{}] await client.streaming_annotate_video(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.streaming_annotate_video(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-videointelligence/tests/unit/gapic/videointelligence_v1p3beta1/test_video_intelligence_service.py b/packages/google-cloud-videointelligence/tests/unit/gapic/videointelligence_v1p3beta1/test_video_intelligence_service.py index 495ecf5d55d3..f4587ded15d2 100644 --- a/packages/google-cloud-videointelligence/tests/unit/gapic/videointelligence_v1p3beta1/test_video_intelligence_service.py +++ b/packages/google-cloud-videointelligence/tests/unit/gapic/videointelligence_v1p3beta1/test_video_intelligence_service.py @@ -1267,8 +1267,9 @@ def test_annotate_video_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.annotate_video(request) @@ -1322,26 +1323,28 @@ async def test_annotate_video_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.annotate_video - ] = mock_object + ] = mock_rpc request = {} await client.annotate_video(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.annotate_video(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-vision/google/cloud/vision/gapic_version.py b/packages/google-cloud-vision/google/cloud/vision/gapic_version.py index e5ad31ad9978..558c8aab67c5 100644 --- a/packages/google-cloud-vision/google/cloud/vision/gapic_version.py +++ b/packages/google-cloud-vision/google/cloud/vision/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.7.4" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-vision/google/cloud/vision_v1/gapic_version.py b/packages/google-cloud-vision/google/cloud/vision_v1/gapic_version.py index e5ad31ad9978..558c8aab67c5 100644 --- a/packages/google-cloud-vision/google/cloud/vision_v1/gapic_version.py +++ b/packages/google-cloud-vision/google/cloud/vision_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.7.4" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-vision/google/cloud/vision_v1/services/image_annotator/async_client.py b/packages/google-cloud-vision/google/cloud/vision_v1/services/image_annotator/async_client.py index 7713c2a35d6c..60941a348fe9 100644 --- a/packages/google-cloud-vision/google/cloud/vision_v1/services/image_annotator/async_client.py +++ b/packages/google-cloud-vision/google/cloud/vision_v1/services/image_annotator/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -196,9 +195,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ImageAnnotatorClient).get_transport_class, type(ImageAnnotatorClient) - ) + get_transport_class = ImageAnnotatorClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-vision/google/cloud/vision_v1/services/image_annotator/client.py b/packages/google-cloud-vision/google/cloud/vision_v1/services/image_annotator/client.py index e7215d296e5a..e4e38b807fcc 100644 --- a/packages/google-cloud-vision/google/cloud/vision_v1/services/image_annotator/client.py +++ b/packages/google-cloud-vision/google/cloud/vision_v1/services/image_annotator/client.py @@ -691,7 +691,7 @@ def __init__( transport_init: Union[ Type[ImageAnnotatorTransport], Callable[..., ImageAnnotatorTransport] ] = ( - type(self).get_transport_class(transport) + ImageAnnotatorClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ImageAnnotatorTransport], transport) ) diff --git a/packages/google-cloud-vision/google/cloud/vision_v1/services/product_search/async_client.py b/packages/google-cloud-vision/google/cloud/vision_v1/services/product_search/async_client.py index 709417f04c7d..6cab5c209229 100644 --- a/packages/google-cloud-vision/google/cloud/vision_v1/services/product_search/async_client.py +++ b/packages/google-cloud-vision/google/cloud/vision_v1/services/product_search/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -219,9 +218,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ProductSearchClient).get_transport_class, type(ProductSearchClient) - ) + get_transport_class = ProductSearchClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-vision/google/cloud/vision_v1/services/product_search/client.py b/packages/google-cloud-vision/google/cloud/vision_v1/services/product_search/client.py index d0ed15b83b16..f6ba348f0e1a 100644 --- a/packages/google-cloud-vision/google/cloud/vision_v1/services/product_search/client.py +++ b/packages/google-cloud-vision/google/cloud/vision_v1/services/product_search/client.py @@ -732,7 +732,7 @@ def __init__( transport_init: Union[ Type[ProductSearchTransport], Callable[..., ProductSearchTransport] ] = ( - type(self).get_transport_class(transport) + ProductSearchClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ProductSearchTransport], transport) ) diff --git a/packages/google-cloud-vision/google/cloud/vision_v1p1beta1/gapic_version.py b/packages/google-cloud-vision/google/cloud/vision_v1p1beta1/gapic_version.py index e5ad31ad9978..558c8aab67c5 100644 --- a/packages/google-cloud-vision/google/cloud/vision_v1p1beta1/gapic_version.py +++ b/packages/google-cloud-vision/google/cloud/vision_v1p1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.7.4" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-vision/google/cloud/vision_v1p1beta1/services/image_annotator/async_client.py b/packages/google-cloud-vision/google/cloud/vision_v1p1beta1/services/image_annotator/async_client.py index 7bd038590859..42b13364b0f1 100644 --- a/packages/google-cloud-vision/google/cloud/vision_v1p1beta1/services/image_annotator/async_client.py +++ b/packages/google-cloud-vision/google/cloud/vision_v1p1beta1/services/image_annotator/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -188,9 +187,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ImageAnnotatorClient).get_transport_class, type(ImageAnnotatorClient) - ) + get_transport_class = ImageAnnotatorClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-vision/google/cloud/vision_v1p1beta1/services/image_annotator/client.py b/packages/google-cloud-vision/google/cloud/vision_v1p1beta1/services/image_annotator/client.py index 7f8bddfcd239..ad180a9fded6 100644 --- a/packages/google-cloud-vision/google/cloud/vision_v1p1beta1/services/image_annotator/client.py +++ b/packages/google-cloud-vision/google/cloud/vision_v1p1beta1/services/image_annotator/client.py @@ -641,7 +641,7 @@ def __init__( transport_init: Union[ Type[ImageAnnotatorTransport], Callable[..., ImageAnnotatorTransport] ] = ( - type(self).get_transport_class(transport) + ImageAnnotatorClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ImageAnnotatorTransport], transport) ) diff --git a/packages/google-cloud-vision/google/cloud/vision_v1p2beta1/gapic_version.py b/packages/google-cloud-vision/google/cloud/vision_v1p2beta1/gapic_version.py index e5ad31ad9978..558c8aab67c5 100644 --- a/packages/google-cloud-vision/google/cloud/vision_v1p2beta1/gapic_version.py +++ b/packages/google-cloud-vision/google/cloud/vision_v1p2beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.7.4" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-vision/google/cloud/vision_v1p2beta1/services/image_annotator/async_client.py b/packages/google-cloud-vision/google/cloud/vision_v1p2beta1/services/image_annotator/async_client.py index 7c9f4812d0a0..e04a295b0b33 100644 --- a/packages/google-cloud-vision/google/cloud/vision_v1p2beta1/services/image_annotator/async_client.py +++ b/packages/google-cloud-vision/google/cloud/vision_v1p2beta1/services/image_annotator/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -191,9 +190,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ImageAnnotatorClient).get_transport_class, type(ImageAnnotatorClient) - ) + get_transport_class = ImageAnnotatorClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-vision/google/cloud/vision_v1p2beta1/services/image_annotator/client.py b/packages/google-cloud-vision/google/cloud/vision_v1p2beta1/services/image_annotator/client.py index c68a368ffdfe..dc892756fab0 100644 --- a/packages/google-cloud-vision/google/cloud/vision_v1p2beta1/services/image_annotator/client.py +++ b/packages/google-cloud-vision/google/cloud/vision_v1p2beta1/services/image_annotator/client.py @@ -644,7 +644,7 @@ def __init__( transport_init: Union[ Type[ImageAnnotatorTransport], Callable[..., ImageAnnotatorTransport] ] = ( - type(self).get_transport_class(transport) + ImageAnnotatorClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ImageAnnotatorTransport], transport) ) diff --git a/packages/google-cloud-vision/google/cloud/vision_v1p3beta1/gapic_version.py b/packages/google-cloud-vision/google/cloud/vision_v1p3beta1/gapic_version.py index e5ad31ad9978..558c8aab67c5 100644 --- a/packages/google-cloud-vision/google/cloud/vision_v1p3beta1/gapic_version.py +++ b/packages/google-cloud-vision/google/cloud/vision_v1p3beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.7.4" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-vision/google/cloud/vision_v1p3beta1/services/image_annotator/async_client.py b/packages/google-cloud-vision/google/cloud/vision_v1p3beta1/services/image_annotator/async_client.py index e1f804094923..a3f1d2d20e4d 100644 --- a/packages/google-cloud-vision/google/cloud/vision_v1p3beta1/services/image_annotator/async_client.py +++ b/packages/google-cloud-vision/google/cloud/vision_v1p3beta1/services/image_annotator/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -195,9 +194,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ImageAnnotatorClient).get_transport_class, type(ImageAnnotatorClient) - ) + get_transport_class = ImageAnnotatorClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-vision/google/cloud/vision_v1p3beta1/services/image_annotator/client.py b/packages/google-cloud-vision/google/cloud/vision_v1p3beta1/services/image_annotator/client.py index fbf3c9963523..83838b88e40e 100644 --- a/packages/google-cloud-vision/google/cloud/vision_v1p3beta1/services/image_annotator/client.py +++ b/packages/google-cloud-vision/google/cloud/vision_v1p3beta1/services/image_annotator/client.py @@ -690,7 +690,7 @@ def __init__( transport_init: Union[ Type[ImageAnnotatorTransport], Callable[..., ImageAnnotatorTransport] ] = ( - type(self).get_transport_class(transport) + ImageAnnotatorClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ImageAnnotatorTransport], transport) ) diff --git a/packages/google-cloud-vision/google/cloud/vision_v1p3beta1/services/product_search/async_client.py b/packages/google-cloud-vision/google/cloud/vision_v1p3beta1/services/product_search/async_client.py index fc5b1570aece..c89f89176a85 100644 --- a/packages/google-cloud-vision/google/cloud/vision_v1p3beta1/services/product_search/async_client.py +++ b/packages/google-cloud-vision/google/cloud/vision_v1p3beta1/services/product_search/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -219,9 +218,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ProductSearchClient).get_transport_class, type(ProductSearchClient) - ) + get_transport_class = ProductSearchClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-vision/google/cloud/vision_v1p3beta1/services/product_search/client.py b/packages/google-cloud-vision/google/cloud/vision_v1p3beta1/services/product_search/client.py index 8c320ab2cff1..ade3c2dd3dc3 100644 --- a/packages/google-cloud-vision/google/cloud/vision_v1p3beta1/services/product_search/client.py +++ b/packages/google-cloud-vision/google/cloud/vision_v1p3beta1/services/product_search/client.py @@ -732,7 +732,7 @@ def __init__( transport_init: Union[ Type[ProductSearchTransport], Callable[..., ProductSearchTransport] ] = ( - type(self).get_transport_class(transport) + ProductSearchClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ProductSearchTransport], transport) ) diff --git a/packages/google-cloud-vision/google/cloud/vision_v1p4beta1/gapic_version.py b/packages/google-cloud-vision/google/cloud/vision_v1p4beta1/gapic_version.py index e5ad31ad9978..558c8aab67c5 100644 --- a/packages/google-cloud-vision/google/cloud/vision_v1p4beta1/gapic_version.py +++ b/packages/google-cloud-vision/google/cloud/vision_v1p4beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.7.4" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-vision/google/cloud/vision_v1p4beta1/services/image_annotator/async_client.py b/packages/google-cloud-vision/google/cloud/vision_v1p4beta1/services/image_annotator/async_client.py index 1c34d1fce109..72df8a7df27f 100644 --- a/packages/google-cloud-vision/google/cloud/vision_v1p4beta1/services/image_annotator/async_client.py +++ b/packages/google-cloud-vision/google/cloud/vision_v1p4beta1/services/image_annotator/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -195,9 +194,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ImageAnnotatorClient).get_transport_class, type(ImageAnnotatorClient) - ) + get_transport_class = ImageAnnotatorClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-vision/google/cloud/vision_v1p4beta1/services/image_annotator/client.py b/packages/google-cloud-vision/google/cloud/vision_v1p4beta1/services/image_annotator/client.py index b1deb81d81b5..a49ce03778d9 100644 --- a/packages/google-cloud-vision/google/cloud/vision_v1p4beta1/services/image_annotator/client.py +++ b/packages/google-cloud-vision/google/cloud/vision_v1p4beta1/services/image_annotator/client.py @@ -690,7 +690,7 @@ def __init__( transport_init: Union[ Type[ImageAnnotatorTransport], Callable[..., ImageAnnotatorTransport] ] = ( - type(self).get_transport_class(transport) + ImageAnnotatorClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ImageAnnotatorTransport], transport) ) diff --git a/packages/google-cloud-vision/google/cloud/vision_v1p4beta1/services/product_search/async_client.py b/packages/google-cloud-vision/google/cloud/vision_v1p4beta1/services/product_search/async_client.py index ac6005888acf..94d731dff25b 100644 --- a/packages/google-cloud-vision/google/cloud/vision_v1p4beta1/services/product_search/async_client.py +++ b/packages/google-cloud-vision/google/cloud/vision_v1p4beta1/services/product_search/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -220,9 +219,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ProductSearchClient).get_transport_class, type(ProductSearchClient) - ) + get_transport_class = ProductSearchClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-vision/google/cloud/vision_v1p4beta1/services/product_search/client.py b/packages/google-cloud-vision/google/cloud/vision_v1p4beta1/services/product_search/client.py index 7372093e26c5..949b80361bde 100644 --- a/packages/google-cloud-vision/google/cloud/vision_v1p4beta1/services/product_search/client.py +++ b/packages/google-cloud-vision/google/cloud/vision_v1p4beta1/services/product_search/client.py @@ -733,7 +733,7 @@ def __init__( transport_init: Union[ Type[ProductSearchTransport], Callable[..., ProductSearchTransport] ] = ( - type(self).get_transport_class(transport) + ProductSearchClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ProductSearchTransport], transport) ) diff --git a/packages/google-cloud-vision/samples/generated_samples/snippet_metadata_google.cloud.vision.v1.json b/packages/google-cloud-vision/samples/generated_samples/snippet_metadata_google.cloud.vision.v1.json index 3cdc73ba3ae2..e29e0ec20f4f 100644 --- a/packages/google-cloud-vision/samples/generated_samples/snippet_metadata_google.cloud.vision.v1.json +++ b/packages/google-cloud-vision/samples/generated_samples/snippet_metadata_google.cloud.vision.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-vision", - "version": "3.7.4" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-vision/samples/generated_samples/snippet_metadata_google.cloud.vision.v1p1beta1.json b/packages/google-cloud-vision/samples/generated_samples/snippet_metadata_google.cloud.vision.v1p1beta1.json index 95b2470eab48..70f20c5e5334 100644 --- a/packages/google-cloud-vision/samples/generated_samples/snippet_metadata_google.cloud.vision.v1p1beta1.json +++ b/packages/google-cloud-vision/samples/generated_samples/snippet_metadata_google.cloud.vision.v1p1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-vision", - "version": "3.7.4" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-vision/samples/generated_samples/snippet_metadata_google.cloud.vision.v1p2beta1.json b/packages/google-cloud-vision/samples/generated_samples/snippet_metadata_google.cloud.vision.v1p2beta1.json index 4fa4c916a850..52e22f36f702 100644 --- a/packages/google-cloud-vision/samples/generated_samples/snippet_metadata_google.cloud.vision.v1p2beta1.json +++ b/packages/google-cloud-vision/samples/generated_samples/snippet_metadata_google.cloud.vision.v1p2beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-vision", - "version": "3.7.4" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-vision/samples/generated_samples/snippet_metadata_google.cloud.vision.v1p3beta1.json b/packages/google-cloud-vision/samples/generated_samples/snippet_metadata_google.cloud.vision.v1p3beta1.json index 18589edf1866..2dae915e840f 100644 --- a/packages/google-cloud-vision/samples/generated_samples/snippet_metadata_google.cloud.vision.v1p3beta1.json +++ b/packages/google-cloud-vision/samples/generated_samples/snippet_metadata_google.cloud.vision.v1p3beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-vision", - "version": "3.7.4" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-vision/samples/generated_samples/snippet_metadata_google.cloud.vision.v1p4beta1.json b/packages/google-cloud-vision/samples/generated_samples/snippet_metadata_google.cloud.vision.v1p4beta1.json index a3fc4917d491..a1201ce42817 100644 --- a/packages/google-cloud-vision/samples/generated_samples/snippet_metadata_google.cloud.vision.v1p4beta1.json +++ b/packages/google-cloud-vision/samples/generated_samples/snippet_metadata_google.cloud.vision.v1p4beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-vision", - "version": "3.7.4" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-vision/tests/unit/gapic/vision_v1/test_image_annotator.py b/packages/google-cloud-vision/tests/unit/gapic/vision_v1/test_image_annotator.py index 102b4a3daa30..29115320a9eb 100644 --- a/packages/google-cloud-vision/tests/unit/gapic/vision_v1/test_image_annotator.py +++ b/packages/google-cloud-vision/tests/unit/gapic/vision_v1/test_image_annotator.py @@ -1302,22 +1302,23 @@ async def test_batch_annotate_images_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_annotate_images - ] = mock_object + ] = mock_rpc request = {} await client.batch_annotate_images(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.batch_annotate_images(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1640,22 +1641,23 @@ async def test_batch_annotate_files_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_annotate_files - ] = mock_object + ] = mock_rpc request = {} await client.batch_annotate_files(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.batch_annotate_files(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1938,8 +1940,9 @@ def test_async_batch_annotate_images_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.async_batch_annotate_images(request) @@ -1995,26 +1998,28 @@ async def test_async_batch_annotate_images_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.async_batch_annotate_images - ] = mock_object + ] = mock_rpc request = {} await client.async_batch_annotate_images(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.async_batch_annotate_images(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2307,8 +2312,9 @@ def test_async_batch_annotate_files_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.async_batch_annotate_files(request) @@ -2364,26 +2370,28 @@ async def test_async_batch_annotate_files_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.async_batch_annotate_files - ] = mock_object + ] = mock_rpc request = {} await client.async_batch_annotate_files(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.async_batch_annotate_files(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-vision/tests/unit/gapic/vision_v1/test_product_search.py b/packages/google-cloud-vision/tests/unit/gapic/vision_v1/test_product_search.py index 796b4a41faa6..abac6a85612b 100644 --- a/packages/google-cloud-vision/tests/unit/gapic/vision_v1/test_product_search.py +++ b/packages/google-cloud-vision/tests/unit/gapic/vision_v1/test_product_search.py @@ -1312,22 +1312,23 @@ async def test_create_product_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_product_set - ] = mock_object + ] = mock_rpc request = {} await client.create_product_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_product_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1721,22 +1722,23 @@ async def test_list_product_sets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_product_sets - ] = mock_object + ] = mock_rpc request = {} await client.list_product_sets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_product_sets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2301,22 +2303,23 @@ async def test_get_product_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_product_set - ] = mock_object + ] = mock_rpc request = {} await client.get_product_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_product_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2679,22 +2682,23 @@ async def test_update_product_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_product_set - ] = mock_object + ] = mock_rpc request = {} await client.update_product_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_product_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3071,22 +3075,23 @@ async def test_delete_product_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_product_set - ] = mock_object + ] = mock_rpc request = {} await client.delete_product_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_product_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3448,22 +3453,23 @@ async def test_create_product_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_product - ] = mock_object + ] = mock_rpc request = {} await client.create_product(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_product(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3841,22 +3847,23 @@ async def test_list_products_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_products - ] = mock_object + ] = mock_rpc request = {} await client.list_products(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_products(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4409,22 +4416,23 @@ async def test_get_product_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_product - ] = mock_object + ] = mock_rpc request = {} await client.get_product(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_product(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4785,22 +4793,23 @@ async def test_update_product_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_product - ] = mock_object + ] = mock_rpc request = {} await client.update_product(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_product(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5159,22 +5168,23 @@ async def test_delete_product_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_product - ] = mock_object + ] = mock_rpc request = {} await client.delete_product(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_product(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5533,22 +5543,23 @@ async def test_create_reference_image_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_reference_image - ] = mock_object + ] = mock_rpc request = {} await client.create_reference_image(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_reference_image(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5936,22 +5947,23 @@ async def test_delete_reference_image_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_reference_image - ] = mock_object + ] = mock_rpc request = {} await client.delete_reference_image(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_reference_image(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6320,22 +6332,23 @@ async def test_list_reference_images_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_reference_images - ] = mock_object + ] = mock_rpc request = {} await client.list_reference_images(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_reference_images(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6918,22 +6931,23 @@ async def test_get_reference_image_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_reference_image - ] = mock_object + ] = mock_rpc request = {} await client.get_reference_image(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_reference_image(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7303,22 +7317,23 @@ async def test_add_product_to_product_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.add_product_to_product_set - ] = mock_object + ] = mock_rpc request = {} await client.add_product_to_product_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.add_product_to_product_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7687,22 +7702,23 @@ async def test_remove_product_from_product_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.remove_product_from_product_set - ] = mock_object + ] = mock_rpc request = {} await client.remove_product_from_product_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.remove_product_from_product_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8078,22 +8094,23 @@ async def test_list_products_in_product_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_products_in_product_set - ] = mock_object + ] = mock_rpc request = {} await client.list_products_in_product_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_products_in_product_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8611,8 +8628,9 @@ def test_import_product_sets_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.import_product_sets(request) @@ -8668,26 +8686,28 @@ async def test_import_product_sets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_product_sets - ] = mock_object + ] = mock_rpc request = {} await client.import_product_sets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.import_product_sets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9022,8 +9042,9 @@ def test_purge_products_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.purge_products(request) @@ -9077,26 +9098,28 @@ async def test_purge_products_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.purge_products - ] = mock_object + ] = mock_rpc request = {} await client.purge_products(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.purge_products(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-vision/tests/unit/gapic/vision_v1p1beta1/test_image_annotator.py b/packages/google-cloud-vision/tests/unit/gapic/vision_v1p1beta1/test_image_annotator.py index 797dc8d11125..f83b0758b0fd 100644 --- a/packages/google-cloud-vision/tests/unit/gapic/vision_v1p1beta1/test_image_annotator.py +++ b/packages/google-cloud-vision/tests/unit/gapic/vision_v1p1beta1/test_image_annotator.py @@ -1288,22 +1288,23 @@ async def test_batch_annotate_images_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_annotate_images - ] = mock_object + ] = mock_rpc request = {} await client.batch_annotate_images(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.batch_annotate_images(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-vision/tests/unit/gapic/vision_v1p2beta1/test_image_annotator.py b/packages/google-cloud-vision/tests/unit/gapic/vision_v1p2beta1/test_image_annotator.py index d73116ff26e2..557501537624 100644 --- a/packages/google-cloud-vision/tests/unit/gapic/vision_v1p2beta1/test_image_annotator.py +++ b/packages/google-cloud-vision/tests/unit/gapic/vision_v1p2beta1/test_image_annotator.py @@ -1298,22 +1298,23 @@ async def test_batch_annotate_images_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_annotate_images - ] = mock_object + ] = mock_rpc request = {} await client.batch_annotate_images(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.batch_annotate_images(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1580,8 +1581,9 @@ def test_async_batch_annotate_files_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.async_batch_annotate_files(request) @@ -1637,26 +1639,28 @@ async def test_async_batch_annotate_files_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.async_batch_annotate_files - ] = mock_object + ] = mock_rpc request = {} await client.async_batch_annotate_files(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.async_batch_annotate_files(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-vision/tests/unit/gapic/vision_v1p3beta1/test_image_annotator.py b/packages/google-cloud-vision/tests/unit/gapic/vision_v1p3beta1/test_image_annotator.py index 3b705fc4566b..a88658f8cd6b 100644 --- a/packages/google-cloud-vision/tests/unit/gapic/vision_v1p3beta1/test_image_annotator.py +++ b/packages/google-cloud-vision/tests/unit/gapic/vision_v1p3beta1/test_image_annotator.py @@ -1302,22 +1302,23 @@ async def test_batch_annotate_images_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_annotate_images - ] = mock_object + ] = mock_rpc request = {} await client.batch_annotate_images(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.batch_annotate_images(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1584,8 +1585,9 @@ def test_async_batch_annotate_files_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.async_batch_annotate_files(request) @@ -1641,26 +1643,28 @@ async def test_async_batch_annotate_files_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.async_batch_annotate_files - ] = mock_object + ] = mock_rpc request = {} await client.async_batch_annotate_files(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.async_batch_annotate_files(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-vision/tests/unit/gapic/vision_v1p3beta1/test_product_search.py b/packages/google-cloud-vision/tests/unit/gapic/vision_v1p3beta1/test_product_search.py index 82518408af45..c46e49720881 100644 --- a/packages/google-cloud-vision/tests/unit/gapic/vision_v1p3beta1/test_product_search.py +++ b/packages/google-cloud-vision/tests/unit/gapic/vision_v1p3beta1/test_product_search.py @@ -1311,22 +1311,23 @@ async def test_create_product_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_product_set - ] = mock_object + ] = mock_rpc request = {} await client.create_product_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_product_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1720,22 +1721,23 @@ async def test_list_product_sets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_product_sets - ] = mock_object + ] = mock_rpc request = {} await client.list_product_sets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_product_sets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2300,22 +2302,23 @@ async def test_get_product_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_product_set - ] = mock_object + ] = mock_rpc request = {} await client.get_product_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_product_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2678,22 +2681,23 @@ async def test_update_product_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_product_set - ] = mock_object + ] = mock_rpc request = {} await client.update_product_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_product_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3070,22 +3074,23 @@ async def test_delete_product_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_product_set - ] = mock_object + ] = mock_rpc request = {} await client.delete_product_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_product_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3447,22 +3452,23 @@ async def test_create_product_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_product - ] = mock_object + ] = mock_rpc request = {} await client.create_product(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_product(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3840,22 +3846,23 @@ async def test_list_products_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_products - ] = mock_object + ] = mock_rpc request = {} await client.list_products(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_products(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4408,22 +4415,23 @@ async def test_get_product_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_product - ] = mock_object + ] = mock_rpc request = {} await client.get_product(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_product(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4784,22 +4792,23 @@ async def test_update_product_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_product - ] = mock_object + ] = mock_rpc request = {} await client.update_product(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_product(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5158,22 +5167,23 @@ async def test_delete_product_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_product - ] = mock_object + ] = mock_rpc request = {} await client.delete_product(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_product(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5532,22 +5542,23 @@ async def test_create_reference_image_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_reference_image - ] = mock_object + ] = mock_rpc request = {} await client.create_reference_image(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_reference_image(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5935,22 +5946,23 @@ async def test_delete_reference_image_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_reference_image - ] = mock_object + ] = mock_rpc request = {} await client.delete_reference_image(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_reference_image(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6319,22 +6331,23 @@ async def test_list_reference_images_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_reference_images - ] = mock_object + ] = mock_rpc request = {} await client.list_reference_images(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_reference_images(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6917,22 +6930,23 @@ async def test_get_reference_image_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_reference_image - ] = mock_object + ] = mock_rpc request = {} await client.get_reference_image(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_reference_image(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7302,22 +7316,23 @@ async def test_add_product_to_product_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.add_product_to_product_set - ] = mock_object + ] = mock_rpc request = {} await client.add_product_to_product_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.add_product_to_product_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7686,22 +7701,23 @@ async def test_remove_product_from_product_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.remove_product_from_product_set - ] = mock_object + ] = mock_rpc request = {} await client.remove_product_from_product_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.remove_product_from_product_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8077,22 +8093,23 @@ async def test_list_products_in_product_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_products_in_product_set - ] = mock_object + ] = mock_rpc request = {} await client.list_products_in_product_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_products_in_product_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8610,8 +8627,9 @@ def test_import_product_sets_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.import_product_sets(request) @@ -8667,26 +8685,28 @@ async def test_import_product_sets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_product_sets - ] = mock_object + ] = mock_rpc request = {} await client.import_product_sets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.import_product_sets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-vision/tests/unit/gapic/vision_v1p4beta1/test_image_annotator.py b/packages/google-cloud-vision/tests/unit/gapic/vision_v1p4beta1/test_image_annotator.py index dbfc9959d6ae..c86d585093ee 100644 --- a/packages/google-cloud-vision/tests/unit/gapic/vision_v1p4beta1/test_image_annotator.py +++ b/packages/google-cloud-vision/tests/unit/gapic/vision_v1p4beta1/test_image_annotator.py @@ -1303,22 +1303,23 @@ async def test_batch_annotate_images_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_annotate_images - ] = mock_object + ] = mock_rpc request = {} await client.batch_annotate_images(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.batch_annotate_images(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1637,22 +1638,23 @@ async def test_batch_annotate_files_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_annotate_files - ] = mock_object + ] = mock_rpc request = {} await client.batch_annotate_files(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.batch_annotate_files(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1931,8 +1933,9 @@ def test_async_batch_annotate_images_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.async_batch_annotate_images(request) @@ -1988,26 +1991,28 @@ async def test_async_batch_annotate_images_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.async_batch_annotate_images - ] = mock_object + ] = mock_rpc request = {} await client.async_batch_annotate_images(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.async_batch_annotate_images(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2296,8 +2301,9 @@ def test_async_batch_annotate_files_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.async_batch_annotate_files(request) @@ -2353,26 +2359,28 @@ async def test_async_batch_annotate_files_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.async_batch_annotate_files - ] = mock_object + ] = mock_rpc request = {} await client.async_batch_annotate_files(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.async_batch_annotate_files(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-vision/tests/unit/gapic/vision_v1p4beta1/test_product_search.py b/packages/google-cloud-vision/tests/unit/gapic/vision_v1p4beta1/test_product_search.py index befcc8e8810e..42672daf0af0 100644 --- a/packages/google-cloud-vision/tests/unit/gapic/vision_v1p4beta1/test_product_search.py +++ b/packages/google-cloud-vision/tests/unit/gapic/vision_v1p4beta1/test_product_search.py @@ -1312,22 +1312,23 @@ async def test_create_product_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_product_set - ] = mock_object + ] = mock_rpc request = {} await client.create_product_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_product_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1721,22 +1722,23 @@ async def test_list_product_sets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_product_sets - ] = mock_object + ] = mock_rpc request = {} await client.list_product_sets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_product_sets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2301,22 +2303,23 @@ async def test_get_product_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_product_set - ] = mock_object + ] = mock_rpc request = {} await client.get_product_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_product_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2679,22 +2682,23 @@ async def test_update_product_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_product_set - ] = mock_object + ] = mock_rpc request = {} await client.update_product_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_product_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3071,22 +3075,23 @@ async def test_delete_product_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_product_set - ] = mock_object + ] = mock_rpc request = {} await client.delete_product_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_product_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3448,22 +3453,23 @@ async def test_create_product_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_product - ] = mock_object + ] = mock_rpc request = {} await client.create_product(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_product(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3841,22 +3847,23 @@ async def test_list_products_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_products - ] = mock_object + ] = mock_rpc request = {} await client.list_products(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_products(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4409,22 +4416,23 @@ async def test_get_product_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_product - ] = mock_object + ] = mock_rpc request = {} await client.get_product(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_product(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4785,22 +4793,23 @@ async def test_update_product_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_product - ] = mock_object + ] = mock_rpc request = {} await client.update_product(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_product(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5159,22 +5168,23 @@ async def test_delete_product_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_product - ] = mock_object + ] = mock_rpc request = {} await client.delete_product(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_product(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5533,22 +5543,23 @@ async def test_create_reference_image_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_reference_image - ] = mock_object + ] = mock_rpc request = {} await client.create_reference_image(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_reference_image(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5936,22 +5947,23 @@ async def test_delete_reference_image_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_reference_image - ] = mock_object + ] = mock_rpc request = {} await client.delete_reference_image(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_reference_image(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6320,22 +6332,23 @@ async def test_list_reference_images_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_reference_images - ] = mock_object + ] = mock_rpc request = {} await client.list_reference_images(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_reference_images(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6918,22 +6931,23 @@ async def test_get_reference_image_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_reference_image - ] = mock_object + ] = mock_rpc request = {} await client.get_reference_image(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_reference_image(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7303,22 +7317,23 @@ async def test_add_product_to_product_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.add_product_to_product_set - ] = mock_object + ] = mock_rpc request = {} await client.add_product_to_product_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.add_product_to_product_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7687,22 +7702,23 @@ async def test_remove_product_from_product_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.remove_product_from_product_set - ] = mock_object + ] = mock_rpc request = {} await client.remove_product_from_product_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.remove_product_from_product_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8078,22 +8094,23 @@ async def test_list_products_in_product_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_products_in_product_set - ] = mock_object + ] = mock_rpc request = {} await client.list_products_in_product_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_products_in_product_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8611,8 +8628,9 @@ def test_import_product_sets_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.import_product_sets(request) @@ -8668,26 +8686,28 @@ async def test_import_product_sets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_product_sets - ] = mock_object + ] = mock_rpc request = {} await client.import_product_sets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.import_product_sets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9022,8 +9042,9 @@ def test_purge_products_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.purge_products(request) @@ -9077,26 +9098,28 @@ async def test_purge_products_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.purge_products - ] = mock_object + ] = mock_rpc request = {} await client.purge_products(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.purge_products(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-visionai/google/cloud/visionai/gapic_version.py b/packages/google-cloud-visionai/google/cloud/visionai/gapic_version.py index 3b0a9d9a8d43..558c8aab67c5 100644 --- a/packages/google-cloud-visionai/google/cloud/visionai/gapic_version.py +++ b/packages/google-cloud-visionai/google/cloud/visionai/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-visionai/google/cloud/visionai_v1/gapic_version.py b/packages/google-cloud-visionai/google/cloud/visionai_v1/gapic_version.py index 3b0a9d9a8d43..558c8aab67c5 100644 --- a/packages/google-cloud-visionai/google/cloud/visionai_v1/gapic_version.py +++ b/packages/google-cloud-visionai/google/cloud/visionai_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-visionai/google/cloud/visionai_v1/services/app_platform/async_client.py b/packages/google-cloud-visionai/google/cloud/visionai_v1/services/app_platform/async_client.py index 989f54b4c8d5..895083a29697 100644 --- a/packages/google-cloud-visionai/google/cloud/visionai_v1/services/app_platform/async_client.py +++ b/packages/google-cloud-visionai/google/cloud/visionai_v1/services/app_platform/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -201,9 +200,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AppPlatformClient).get_transport_class, type(AppPlatformClient) - ) + get_transport_class = AppPlatformClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-visionai/google/cloud/visionai_v1/services/app_platform/client.py b/packages/google-cloud-visionai/google/cloud/visionai_v1/services/app_platform/client.py index 229610e6407e..af6d47d17d07 100644 --- a/packages/google-cloud-visionai/google/cloud/visionai_v1/services/app_platform/client.py +++ b/packages/google-cloud-visionai/google/cloud/visionai_v1/services/app_platform/client.py @@ -761,7 +761,7 @@ def __init__( transport_init: Union[ Type[AppPlatformTransport], Callable[..., AppPlatformTransport] ] = ( - type(self).get_transport_class(transport) + AppPlatformClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AppPlatformTransport], transport) ) diff --git a/packages/google-cloud-visionai/google/cloud/visionai_v1/services/health_check_service/async_client.py b/packages/google-cloud-visionai/google/cloud/visionai_v1/services/health_check_service/async_client.py index 71b5a3176c29..9b9faf01cd80 100644 --- a/packages/google-cloud-visionai/google/cloud/visionai_v1/services/health_check_service/async_client.py +++ b/packages/google-cloud-visionai/google/cloud/visionai_v1/services/health_check_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -193,10 +192,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(HealthCheckServiceClient).get_transport_class, - type(HealthCheckServiceClient), - ) + get_transport_class = HealthCheckServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-visionai/google/cloud/visionai_v1/services/health_check_service/client.py b/packages/google-cloud-visionai/google/cloud/visionai_v1/services/health_check_service/client.py index 593a16f45cf1..d346adf5ff00 100644 --- a/packages/google-cloud-visionai/google/cloud/visionai_v1/services/health_check_service/client.py +++ b/packages/google-cloud-visionai/google/cloud/visionai_v1/services/health_check_service/client.py @@ -671,7 +671,7 @@ def __init__( Type[HealthCheckServiceTransport], Callable[..., HealthCheckServiceTransport], ] = ( - type(self).get_transport_class(transport) + HealthCheckServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., HealthCheckServiceTransport], transport) ) diff --git a/packages/google-cloud-visionai/google/cloud/visionai_v1/services/live_video_analytics/async_client.py b/packages/google-cloud-visionai/google/cloud/visionai_v1/services/live_video_analytics/async_client.py index 1b16194aef1a..055d3342acc4 100644 --- a/packages/google-cloud-visionai/google/cloud/visionai_v1/services/live_video_analytics/async_client.py +++ b/packages/google-cloud-visionai/google/cloud/visionai_v1/services/live_video_analytics/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -206,10 +205,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(LiveVideoAnalyticsClient).get_transport_class, - type(LiveVideoAnalyticsClient), - ) + get_transport_class = LiveVideoAnalyticsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-visionai/google/cloud/visionai_v1/services/live_video_analytics/client.py b/packages/google-cloud-visionai/google/cloud/visionai_v1/services/live_video_analytics/client.py index 1f257b65a1c2..04f05f33b819 100644 --- a/packages/google-cloud-visionai/google/cloud/visionai_v1/services/live_video_analytics/client.py +++ b/packages/google-cloud-visionai/google/cloud/visionai_v1/services/live_video_analytics/client.py @@ -748,7 +748,7 @@ def __init__( Type[LiveVideoAnalyticsTransport], Callable[..., LiveVideoAnalyticsTransport], ] = ( - type(self).get_transport_class(transport) + LiveVideoAnalyticsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., LiveVideoAnalyticsTransport], transport) ) diff --git a/packages/google-cloud-visionai/google/cloud/visionai_v1/services/streaming_service/async_client.py b/packages/google-cloud-visionai/google/cloud/visionai_v1/services/streaming_service/async_client.py index 36043dff4811..e3b6a178a6a1 100644 --- a/packages/google-cloud-visionai/google/cloud/visionai_v1/services/streaming_service/async_client.py +++ b/packages/google-cloud-visionai/google/cloud/visionai_v1/services/streaming_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( AsyncIterable, @@ -195,9 +194,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(StreamingServiceClient).get_transport_class, type(StreamingServiceClient) - ) + get_transport_class = StreamingServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-visionai/google/cloud/visionai_v1/services/streaming_service/client.py b/packages/google-cloud-visionai/google/cloud/visionai_v1/services/streaming_service/client.py index d2f8d1094aaa..3e7e0b47a934 100644 --- a/packages/google-cloud-visionai/google/cloud/visionai_v1/services/streaming_service/client.py +++ b/packages/google-cloud-visionai/google/cloud/visionai_v1/services/streaming_service/client.py @@ -672,7 +672,7 @@ def __init__( Type[StreamingServiceTransport], Callable[..., StreamingServiceTransport], ] = ( - type(self).get_transport_class(transport) + StreamingServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., StreamingServiceTransport], transport) ) diff --git a/packages/google-cloud-visionai/google/cloud/visionai_v1/services/streams_service/async_client.py b/packages/google-cloud-visionai/google/cloud/visionai_v1/services/streams_service/async_client.py index 5e2006bc0f01..d8e8b72c8d2a 100644 --- a/packages/google-cloud-visionai/google/cloud/visionai_v1/services/streams_service/async_client.py +++ b/packages/google-cloud-visionai/google/cloud/visionai_v1/services/streams_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -211,9 +210,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(StreamsServiceClient).get_transport_class, type(StreamsServiceClient) - ) + get_transport_class = StreamsServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-visionai/google/cloud/visionai_v1/services/streams_service/client.py b/packages/google-cloud-visionai/google/cloud/visionai_v1/services/streams_service/client.py index 82b230acc5e0..9e91073a4967 100644 --- a/packages/google-cloud-visionai/google/cloud/visionai_v1/services/streams_service/client.py +++ b/packages/google-cloud-visionai/google/cloud/visionai_v1/services/streams_service/client.py @@ -772,7 +772,7 @@ def __init__( transport_init: Union[ Type[StreamsServiceTransport], Callable[..., StreamsServiceTransport] ] = ( - type(self).get_transport_class(transport) + StreamsServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., StreamsServiceTransport], transport) ) diff --git a/packages/google-cloud-visionai/google/cloud/visionai_v1/services/warehouse/async_client.py b/packages/google-cloud-visionai/google/cloud/visionai_v1/services/warehouse/async_client.py index 0d840b6c2e03..e33e38726d0e 100644 --- a/packages/google-cloud-visionai/google/cloud/visionai_v1/services/warehouse/async_client.py +++ b/packages/google-cloud-visionai/google/cloud/visionai_v1/services/warehouse/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( AsyncIterable, @@ -213,9 +212,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(WarehouseClient).get_transport_class, type(WarehouseClient) - ) + get_transport_class = WarehouseClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-visionai/google/cloud/visionai_v1/services/warehouse/client.py b/packages/google-cloud-visionai/google/cloud/visionai_v1/services/warehouse/client.py index fb665c6bd9e4..a7b0f4fa4c78 100644 --- a/packages/google-cloud-visionai/google/cloud/visionai_v1/services/warehouse/client.py +++ b/packages/google-cloud-visionai/google/cloud/visionai_v1/services/warehouse/client.py @@ -860,7 +860,7 @@ def __init__( transport_init: Union[ Type[WarehouseTransport], Callable[..., WarehouseTransport] ] = ( - type(self).get_transport_class(transport) + WarehouseClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., WarehouseTransport], transport) ) diff --git a/packages/google-cloud-visionai/google/cloud/visionai_v1/types/platform.py b/packages/google-cloud-visionai/google/cloud/visionai_v1/types/platform.py index 92d0767e5f12..4a45cb1dcc3a 100644 --- a/packages/google-cloud-visionai/google/cloud/visionai_v1/types/platform.py +++ b/packages/google-cloud-visionai/google/cloud/visionai_v1/types/platform.py @@ -3434,7 +3434,6 @@ class VertexCustomConfig(proto.Message): stored in the annotations field. - To drop the prediction output, simply clear the payload field in the returned AppPlatformCloudFunctionResponse. - attach_application_metadata (bool): If true, the prediction request received by custom model will also contain metadata with the @@ -3447,7 +3446,7 @@ class VertexCustomConfig(proto.Message): 'node': STRING; 'processor': STRING; - } + } dynamic_config_input_topic (str): Optional. By setting the configuration_input_topic, @@ -3463,7 +3462,6 @@ class VertexCustomConfig(proto.Message): fps set inside the topic. int32 fps = 2; } This field is a member of `oneof`_ ``_dynamic_config_input_topic``. - """ max_prediction_fps: int = proto.Field( diff --git a/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/gapic_version.py b/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/gapic_version.py index 3b0a9d9a8d43..558c8aab67c5 100644 --- a/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/gapic_version.py +++ b/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/services/app_platform/async_client.py b/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/services/app_platform/async_client.py index a35840972e47..a8cd710cd2d7 100644 --- a/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/services/app_platform/async_client.py +++ b/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/services/app_platform/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -201,9 +200,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AppPlatformClient).get_transport_class, type(AppPlatformClient) - ) + get_transport_class = AppPlatformClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/services/app_platform/client.py b/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/services/app_platform/client.py index c8f4480135a5..aa8f7f60fe9f 100644 --- a/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/services/app_platform/client.py +++ b/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/services/app_platform/client.py @@ -761,7 +761,7 @@ def __init__( transport_init: Union[ Type[AppPlatformTransport], Callable[..., AppPlatformTransport] ] = ( - type(self).get_transport_class(transport) + AppPlatformClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AppPlatformTransport], transport) ) diff --git a/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/services/live_video_analytics/async_client.py b/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/services/live_video_analytics/async_client.py index 59bcfe251b42..c73657e73f05 100644 --- a/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/services/live_video_analytics/async_client.py +++ b/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/services/live_video_analytics/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -202,10 +201,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(LiveVideoAnalyticsClient).get_transport_class, - type(LiveVideoAnalyticsClient), - ) + get_transport_class = LiveVideoAnalyticsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/services/live_video_analytics/client.py b/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/services/live_video_analytics/client.py index c68acb45d6f4..20cd4a34fd6d 100644 --- a/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/services/live_video_analytics/client.py +++ b/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/services/live_video_analytics/client.py @@ -702,7 +702,7 @@ def __init__( Type[LiveVideoAnalyticsTransport], Callable[..., LiveVideoAnalyticsTransport], ] = ( - type(self).get_transport_class(transport) + LiveVideoAnalyticsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., LiveVideoAnalyticsTransport], transport) ) diff --git a/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/services/streaming_service/async_client.py b/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/services/streaming_service/async_client.py index 20f76f7efad3..58769195c2c6 100644 --- a/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/services/streaming_service/async_client.py +++ b/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/services/streaming_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( AsyncIterable, @@ -195,9 +194,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(StreamingServiceClient).get_transport_class, type(StreamingServiceClient) - ) + get_transport_class = StreamingServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/services/streaming_service/client.py b/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/services/streaming_service/client.py index a5bb0dbf90ac..134e92a307e3 100644 --- a/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/services/streaming_service/client.py +++ b/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/services/streaming_service/client.py @@ -672,7 +672,7 @@ def __init__( Type[StreamingServiceTransport], Callable[..., StreamingServiceTransport], ] = ( - type(self).get_transport_class(transport) + StreamingServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., StreamingServiceTransport], transport) ) diff --git a/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/services/streams_service/async_client.py b/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/services/streams_service/async_client.py index e7d14c7850af..d077ee2bbdb0 100644 --- a/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/services/streams_service/async_client.py +++ b/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/services/streams_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -215,9 +214,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(StreamsServiceClient).get_transport_class, type(StreamsServiceClient) - ) + get_transport_class = StreamsServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/services/streams_service/client.py b/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/services/streams_service/client.py index 4c7679105c44..d676315a1d7a 100644 --- a/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/services/streams_service/client.py +++ b/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/services/streams_service/client.py @@ -776,7 +776,7 @@ def __init__( transport_init: Union[ Type[StreamsServiceTransport], Callable[..., StreamsServiceTransport] ] = ( - type(self).get_transport_class(transport) + StreamsServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., StreamsServiceTransport], transport) ) diff --git a/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/services/warehouse/async_client.py b/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/services/warehouse/async_client.py index 8aa862d3d3b1..1fa7c8349ebe 100644 --- a/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/services/warehouse/async_client.py +++ b/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/services/warehouse/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( AsyncIterable, @@ -202,9 +201,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(WarehouseClient).get_transport_class, type(WarehouseClient) - ) + get_transport_class = WarehouseClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/services/warehouse/client.py b/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/services/warehouse/client.py index 96be89a0474a..ccdc072df67d 100644 --- a/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/services/warehouse/client.py +++ b/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/services/warehouse/client.py @@ -765,7 +765,7 @@ def __init__( transport_init: Union[ Type[WarehouseTransport], Callable[..., WarehouseTransport] ] = ( - type(self).get_transport_class(transport) + WarehouseClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., WarehouseTransport], transport) ) diff --git a/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/types/platform.py b/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/types/platform.py index 45fc1588a825..fd8a8b002e97 100644 --- a/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/types/platform.py +++ b/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/types/platform.py @@ -3242,21 +3242,18 @@ class VertexCustomConfig(proto.Message): stored in the annotations field. - To drop the prediction output, simply clear the payload field in the returned AppPlatformCloudFunctionResponse. - attach_application_metadata (bool): If true, the prediction request received by custom model will also contain metadata with the following schema: 'appPlatformMetadata': { - 'ingestionTime': DOUBLE; (UNIX timestamp) - 'application': STRING; - 'instanceId': STRING; - 'node': STRING; - 'processor': STRING; - - } - + 'ingestionTime': DOUBLE; (UNIX timestamp) + 'application': STRING; + 'instanceId': STRING; + 'node': STRING; + 'processor': STRING; + } """ max_prediction_fps: int = proto.Field( diff --git a/packages/google-cloud-visionai/samples/generated_samples/snippet_metadata_google.cloud.visionai.v1.json b/packages/google-cloud-visionai/samples/generated_samples/snippet_metadata_google.cloud.visionai.v1.json index a9e3c20cb5ca..db06e43868dd 100644 --- a/packages/google-cloud-visionai/samples/generated_samples/snippet_metadata_google.cloud.visionai.v1.json +++ b/packages/google-cloud-visionai/samples/generated_samples/snippet_metadata_google.cloud.visionai.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-visionai", - "version": "0.1.2" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-visionai/samples/generated_samples/snippet_metadata_google.cloud.visionai.v1alpha1.json b/packages/google-cloud-visionai/samples/generated_samples/snippet_metadata_google.cloud.visionai.v1alpha1.json index 841a855dbc85..f7d1e3f46ede 100644 --- a/packages/google-cloud-visionai/samples/generated_samples/snippet_metadata_google.cloud.visionai.v1alpha1.json +++ b/packages/google-cloud-visionai/samples/generated_samples/snippet_metadata_google.cloud.visionai.v1alpha1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-visionai", - "version": "0.1.2" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1/test_app_platform.py b/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1/test_app_platform.py index d4ae069da786..83c5a79f945b 100644 --- a/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1/test_app_platform.py +++ b/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1/test_app_platform.py @@ -1282,22 +1282,23 @@ async def test_list_applications_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_applications - ] = mock_object + ] = mock_rpc request = {} await client.list_applications(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_applications(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1872,22 +1873,23 @@ async def test_get_application_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_application - ] = mock_object + ] = mock_rpc request = {} await client.get_application(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_application(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2202,8 +2204,9 @@ def test_create_application_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_application(request) @@ -2259,26 +2262,28 @@ async def test_create_application_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_application - ] = mock_object + ] = mock_rpc request = {} await client.create_application(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_application(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2598,8 +2603,9 @@ def test_update_application_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_application(request) @@ -2655,26 +2661,28 @@ async def test_update_application_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_application - ] = mock_object + ] = mock_rpc request = {} await client.update_application(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_application(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2996,8 +3004,9 @@ def test_delete_application_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_application(request) @@ -3053,26 +3062,28 @@ async def test_delete_application_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_application - ] = mock_object + ] = mock_rpc request = {} await client.delete_application(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_application(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3384,8 +3395,9 @@ def test_deploy_application_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.deploy_application(request) @@ -3441,26 +3453,28 @@ async def test_deploy_application_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.deploy_application - ] = mock_object + ] = mock_rpc request = {} await client.deploy_application(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.deploy_application(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3772,8 +3786,9 @@ def test_undeploy_application_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.undeploy_application(request) @@ -3829,26 +3844,28 @@ async def test_undeploy_application_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.undeploy_application - ] = mock_object + ] = mock_rpc request = {} await client.undeploy_application(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.undeploy_application(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4161,8 +4178,9 @@ def test_add_application_stream_input_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.add_application_stream_input(request) @@ -4218,26 +4236,28 @@ async def test_add_application_stream_input_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.add_application_stream_input - ] = mock_object + ] = mock_rpc request = {} await client.add_application_stream_input(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.add_application_stream_input(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4551,8 +4571,9 @@ def test_remove_application_stream_input_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.remove_application_stream_input(request) @@ -4608,26 +4629,28 @@ async def test_remove_application_stream_input_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.remove_application_stream_input - ] = mock_object + ] = mock_rpc request = {} await client.remove_application_stream_input(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.remove_application_stream_input(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4941,8 +4964,9 @@ def test_update_application_stream_input_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_application_stream_input(request) @@ -4998,26 +5022,28 @@ async def test_update_application_stream_input_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_application_stream_input - ] = mock_object + ] = mock_rpc request = {} await client.update_application_stream_input(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_application_stream_input(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5383,22 +5409,23 @@ async def test_list_instances_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_instances - ] = mock_object + ] = mock_rpc request = {} await client.list_instances(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_instances(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5955,22 +5982,23 @@ async def test_get_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_instance - ] = mock_object + ] = mock_rpc request = {} await client.get_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6280,8 +6308,9 @@ def test_create_application_instances_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_application_instances(request) @@ -6337,26 +6366,28 @@ async def test_create_application_instances_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_application_instances - ] = mock_object + ] = mock_rpc request = {} await client.create_application_instances(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_application_instances(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6670,8 +6701,9 @@ def test_delete_application_instances_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_application_instances(request) @@ -6727,26 +6759,28 @@ async def test_delete_application_instances_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_application_instances - ] = mock_object + ] = mock_rpc request = {} await client.delete_application_instances(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_application_instances(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7060,8 +7094,9 @@ def test_update_application_instances_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_application_instances(request) @@ -7117,26 +7152,28 @@ async def test_update_application_instances_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_application_instances - ] = mock_object + ] = mock_rpc request = {} await client.update_application_instances(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_application_instances(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7536,22 +7573,23 @@ async def test_list_drafts_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_drafts - ] = mock_object + ] = mock_rpc request = {} await client.list_drafts(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_drafts(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8100,22 +8138,23 @@ async def test_get_draft_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_draft - ] = mock_object + ] = mock_rpc request = {} await client.get_draft(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_draft(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8412,8 +8451,9 @@ def test_create_draft_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_draft(request) @@ -8467,26 +8507,28 @@ async def test_create_draft_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_draft - ] = mock_object + ] = mock_rpc request = {} await client.create_draft(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_draft(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8796,8 +8838,9 @@ def test_update_draft_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_draft(request) @@ -8851,26 +8894,28 @@ async def test_update_draft_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_draft - ] = mock_object + ] = mock_rpc request = {} await client.update_draft(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_draft(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9172,8 +9217,9 @@ def test_delete_draft_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_draft(request) @@ -9227,26 +9273,28 @@ async def test_delete_draft_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_draft - ] = mock_object + ] = mock_rpc request = {} await client.delete_draft(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_draft(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9601,22 +9649,23 @@ async def test_list_processors_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_processors - ] = mock_object + ] = mock_rpc request = {} await client.list_processors(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_processors(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10169,22 +10218,23 @@ async def test_list_prebuilt_processors_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_prebuilt_processors - ] = mock_object + ] = mock_rpc request = {} await client.list_prebuilt_processors(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_prebuilt_processors(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10579,22 +10629,23 @@ async def test_get_processor_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_processor - ] = mock_object + ] = mock_rpc request = {} await client.get_processor(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_processor(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10915,8 +10966,9 @@ def test_create_processor_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_processor(request) @@ -10970,26 +11022,28 @@ async def test_create_processor_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_processor - ] = mock_object + ] = mock_rpc request = {} await client.create_processor(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_processor(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11301,8 +11355,9 @@ def test_update_processor_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_processor(request) @@ -11356,26 +11411,28 @@ async def test_update_processor_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_processor - ] = mock_object + ] = mock_rpc request = {} await client.update_processor(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_processor(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11679,8 +11736,9 @@ def test_delete_processor_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_processor(request) @@ -11734,26 +11792,28 @@ async def test_delete_processor_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_processor - ] = mock_object + ] = mock_rpc request = {} await client.delete_processor(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_processor(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1/test_health_check_service.py b/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1/test_health_check_service.py index c3245c8e8462..fdda1734c5f6 100644 --- a/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1/test_health_check_service.py +++ b/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1/test_health_check_service.py @@ -1330,22 +1330,23 @@ async def test_health_check_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.health_check - ] = mock_object + ] = mock_rpc request = {} await client.health_check(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.health_check(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1/test_live_video_analytics.py b/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1/test_live_video_analytics.py index 9e794ae7c87a..6e0bc6c68708 100644 --- a/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1/test_live_video_analytics.py +++ b/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1/test_live_video_analytics.py @@ -1359,22 +1359,23 @@ async def test_list_public_operators_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_public_operators - ] = mock_object + ] = mock_rpc request = {} await client.list_public_operators(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_public_operators(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1943,22 +1944,23 @@ async def test_resolve_operator_info_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.resolve_operator_info - ] = mock_object + ] = mock_rpc request = {} await client.resolve_operator_info(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.resolve_operator_info(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2333,22 +2335,23 @@ async def test_list_operators_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_operators - ] = mock_object + ] = mock_rpc request = {} await client.list_operators(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_operators(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2896,22 +2899,23 @@ async def test_get_operator_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_operator - ] = mock_object + ] = mock_rpc request = {} await client.get_operator(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_operator(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3210,8 +3214,9 @@ def test_create_operator_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_operator(request) @@ -3265,26 +3270,28 @@ async def test_create_operator_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_operator - ] = mock_object + ] = mock_rpc request = {} await client.create_operator(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_operator(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3594,8 +3601,9 @@ def test_update_operator_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_operator(request) @@ -3649,26 +3657,28 @@ async def test_update_operator_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_operator - ] = mock_object + ] = mock_rpc request = {} await client.update_operator(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_operator(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3970,8 +3980,9 @@ def test_delete_operator_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_operator(request) @@ -4025,26 +4036,28 @@ async def test_delete_operator_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_operator - ] = mock_object + ] = mock_rpc request = {} await client.delete_operator(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_operator(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4399,22 +4412,23 @@ async def test_list_analyses_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_analyses - ] = mock_object + ] = mock_rpc request = {} await client.list_analyses(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_analyses(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4962,22 +4976,23 @@ async def test_get_analysis_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_analysis - ] = mock_object + ] = mock_rpc request = {} await client.get_analysis(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_analysis(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5276,8 +5291,9 @@ def test_create_analysis_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_analysis(request) @@ -5331,26 +5347,28 @@ async def test_create_analysis_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_analysis - ] = mock_object + ] = mock_rpc request = {} await client.create_analysis(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_analysis(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5660,8 +5678,9 @@ def test_update_analysis_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_analysis(request) @@ -5715,26 +5734,28 @@ async def test_update_analysis_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_analysis - ] = mock_object + ] = mock_rpc request = {} await client.update_analysis(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_analysis(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6036,8 +6057,9 @@ def test_delete_analysis_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_analysis(request) @@ -6091,26 +6113,28 @@ async def test_delete_analysis_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_analysis - ] = mock_object + ] = mock_rpc request = {} await client.delete_analysis(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_analysis(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6465,22 +6489,23 @@ async def test_list_processes_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_processes - ] = mock_object + ] = mock_rpc request = {} await client.list_processes(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_processes(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7043,22 +7068,23 @@ async def test_get_process_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_process - ] = mock_object + ] = mock_rpc request = {} await client.get_process(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_process(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7367,8 +7393,9 @@ def test_create_process_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_process(request) @@ -7422,26 +7449,28 @@ async def test_create_process_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_process - ] = mock_object + ] = mock_rpc request = {} await client.create_process(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_process(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7751,8 +7780,9 @@ def test_update_process_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_process(request) @@ -7806,26 +7836,28 @@ async def test_update_process_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_process - ] = mock_object + ] = mock_rpc request = {} await client.update_process(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_process(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8127,8 +8159,9 @@ def test_delete_process_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_process(request) @@ -8182,26 +8215,28 @@ async def test_delete_process_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_process - ] = mock_object + ] = mock_rpc request = {} await client.delete_process(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_process(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8501,8 +8536,9 @@ def test_batch_run_process_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.batch_run_process(request) @@ -8558,26 +8594,28 @@ async def test_batch_run_process_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_run_process - ] = mock_object + ] = mock_rpc request = {} await client.batch_run_process(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.batch_run_process(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1/test_streaming_service.py b/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1/test_streaming_service.py index 6445f7eba4de..0f09b40ccea4 100644 --- a/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1/test_streaming_service.py +++ b/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1/test_streaming_service.py @@ -1246,22 +1246,23 @@ async def test_send_packets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.send_packets - ] = mock_object + ] = mock_rpc request = [{}] await client.send_packets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.send_packets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1394,22 +1395,23 @@ async def test_receive_packets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.receive_packets - ] = mock_object + ] = mock_rpc request = [{}] await client.receive_packets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.receive_packets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1543,22 +1545,23 @@ async def test_receive_events_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.receive_events - ] = mock_object + ] = mock_rpc request = [{}] await client.receive_events(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.receive_events(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1774,22 +1777,23 @@ async def test_acquire_lease_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.acquire_lease - ] = mock_object + ] = mock_rpc request = {} await client.acquire_lease(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.acquire_lease(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2075,22 +2079,23 @@ async def test_renew_lease_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.renew_lease - ] = mock_object + ] = mock_rpc request = {} await client.renew_lease(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.renew_lease(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2362,22 +2367,23 @@ async def test_release_lease_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.release_lease - ] = mock_object + ] = mock_rpc request = {} await client.release_lease(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.release_lease(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1/test_streams_service.py b/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1/test_streams_service.py index 1a5374f0337f..7ac37020a036 100644 --- a/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1/test_streams_service.py +++ b/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1/test_streams_service.py @@ -1308,22 +1308,23 @@ async def test_list_clusters_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_clusters - ] = mock_object + ] = mock_rpc request = {} await client.list_clusters(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_clusters(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1877,22 +1878,23 @@ async def test_get_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_cluster - ] = mock_object + ] = mock_rpc request = {} await client.get_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2191,8 +2193,9 @@ def test_create_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_cluster(request) @@ -2246,26 +2249,28 @@ async def test_create_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_cluster - ] = mock_object + ] = mock_rpc request = {} await client.create_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2575,8 +2580,9 @@ def test_update_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_cluster(request) @@ -2630,26 +2636,28 @@ async def test_update_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_cluster - ] = mock_object + ] = mock_rpc request = {} await client.update_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2951,8 +2959,9 @@ def test_delete_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_cluster(request) @@ -3006,26 +3015,28 @@ async def test_delete_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_cluster - ] = mock_object + ] = mock_rpc request = {} await client.delete_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3380,22 +3391,23 @@ async def test_list_streams_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_streams - ] = mock_object + ] = mock_rpc request = {} await client.list_streams(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_streams(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3947,22 +3959,23 @@ async def test_get_stream_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_stream - ] = mock_object + ] = mock_rpc request = {} await client.get_stream(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_stream(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4265,8 +4278,9 @@ def test_create_stream_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_stream(request) @@ -4320,26 +4334,28 @@ async def test_create_stream_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_stream - ] = mock_object + ] = mock_rpc request = {} await client.create_stream(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_stream(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4649,8 +4665,9 @@ def test_update_stream_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_stream(request) @@ -4704,26 +4721,28 @@ async def test_update_stream_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_stream - ] = mock_object + ] = mock_rpc request = {} await client.update_stream(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_stream(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5025,8 +5044,9 @@ def test_delete_stream_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_stream(request) @@ -5080,26 +5100,28 @@ async def test_delete_stream_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_stream - ] = mock_object + ] = mock_rpc request = {} await client.delete_stream(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_stream(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5405,8 +5427,9 @@ def test_get_stream_thumbnail_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.get_stream_thumbnail(request) @@ -5462,26 +5485,28 @@ async def test_get_stream_thumbnail_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_stream_thumbnail - ] = mock_object + ] = mock_rpc request = {} await client.get_stream_thumbnail(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.get_stream_thumbnail(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5861,22 +5886,23 @@ async def test_generate_stream_hls_token_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.generate_stream_hls_token - ] = mock_object + ] = mock_rpc request = {} await client.generate_stream_hls_token(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.generate_stream_hls_token(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6245,22 +6271,23 @@ async def test_list_events_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_events - ] = mock_object + ] = mock_rpc request = {} await client.list_events(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_events(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6806,22 +6833,23 @@ async def test_get_event_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_event - ] = mock_object + ] = mock_rpc request = {} await client.get_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7120,8 +7148,9 @@ def test_create_event_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_event(request) @@ -7175,26 +7204,28 @@ async def test_create_event_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_event - ] = mock_object + ] = mock_rpc request = {} await client.create_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7504,8 +7535,9 @@ def test_update_event_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_event(request) @@ -7559,26 +7591,28 @@ async def test_update_event_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_event - ] = mock_object + ] = mock_rpc request = {} await client.update_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7880,8 +7914,9 @@ def test_delete_event_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_event(request) @@ -7935,26 +7970,28 @@ async def test_delete_event_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_event - ] = mock_object + ] = mock_rpc request = {} await client.delete_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8309,22 +8346,23 @@ async def test_list_series_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_series - ] = mock_object + ] = mock_rpc request = {} await client.list_series(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_series(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8873,22 +8911,23 @@ async def test_get_series_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_series - ] = mock_object + ] = mock_rpc request = {} await client.get_series(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_series(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9189,8 +9228,9 @@ def test_create_series_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_series(request) @@ -9244,26 +9284,28 @@ async def test_create_series_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_series - ] = mock_object + ] = mock_rpc request = {} await client.create_series(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_series(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9573,8 +9615,9 @@ def test_update_series_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_series(request) @@ -9628,26 +9671,28 @@ async def test_update_series_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_series - ] = mock_object + ] = mock_rpc request = {} await client.update_series(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_series(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9949,8 +9994,9 @@ def test_delete_series_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_series(request) @@ -10004,26 +10050,28 @@ async def test_delete_series_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_series - ] = mock_object + ] = mock_rpc request = {} await client.delete_series(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_series(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10327,8 +10375,9 @@ def test_materialize_channel_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.materialize_channel(request) @@ -10384,26 +10433,28 @@ async def test_materialize_channel_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.materialize_channel - ] = mock_object + ] = mock_rpc request = {} await client.materialize_channel(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.materialize_channel(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1/test_warehouse.py b/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1/test_warehouse.py index 5dcbacd114f9..0b6e0f0a95cc 100644 --- a/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1/test_warehouse.py +++ b/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1/test_warehouse.py @@ -1259,22 +1259,23 @@ async def test_create_asset_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_asset - ] = mock_object + ] = mock_rpc request = {} await client.create_asset(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_asset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1635,22 +1636,23 @@ async def test_update_asset_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_asset - ] = mock_object + ] = mock_rpc request = {} await client.update_asset(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_asset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2003,22 +2005,23 @@ async def test_get_asset_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_asset - ] = mock_object + ] = mock_rpc request = {} await client.get_asset(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_asset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2367,22 +2370,23 @@ async def test_list_assets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_assets - ] = mock_object + ] = mock_rpc request = {} await client.list_assets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_assets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2869,8 +2873,9 @@ def test_delete_asset_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_asset(request) @@ -2924,26 +2929,28 @@ async def test_delete_asset_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_asset - ] = mock_object + ] = mock_rpc request = {} await client.delete_asset(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_asset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3233,8 +3240,9 @@ def test_upload_asset_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.upload_asset(request) @@ -3288,26 +3296,28 @@ async def test_upload_asset_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.upload_asset - ] = mock_object + ] = mock_rpc request = {} await client.upload_asset(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.upload_asset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3584,22 +3594,23 @@ async def test_generate_retrieval_url_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.generate_retrieval_url - ] = mock_object + ] = mock_rpc request = {} await client.generate_retrieval_url(https://github.com/request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.generate_retrieval_url(https://github.com/request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3816,8 +3827,9 @@ def test_analyze_asset_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.analyze_asset(request) @@ -3871,26 +3883,28 @@ async def test_analyze_asset_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.analyze_asset - ] = mock_object + ] = mock_rpc request = {} await client.analyze_asset(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.analyze_asset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4100,8 +4114,9 @@ def test_index_asset_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.index_asset(request) @@ -4155,26 +4170,28 @@ async def test_index_asset_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.index_asset - ] = mock_object + ] = mock_rpc request = {} await client.index_asset(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.index_asset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4394,8 +4411,9 @@ def test_remove_index_asset_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.remove_index_asset(request) @@ -4451,26 +4469,28 @@ async def test_remove_index_asset_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.remove_index_asset - ] = mock_object + ] = mock_rpc request = {} await client.remove_index_asset(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.remove_index_asset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4756,22 +4776,23 @@ async def test_view_indexed_assets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.view_indexed_assets - ] = mock_object + ] = mock_rpc request = {} await client.view_indexed_assets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.view_indexed_assets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5278,8 +5299,9 @@ def test_create_index_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_index(request) @@ -5333,26 +5355,28 @@ async def test_create_index_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_index - ] = mock_object + ] = mock_rpc request = {} await client.create_index(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_index(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5658,8 +5682,9 @@ def test_update_index_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_index(request) @@ -5713,26 +5738,28 @@ async def test_update_index_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_index - ] = mock_object + ] = mock_rpc request = {} await client.update_index(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_index(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6096,22 +6123,23 @@ async def test_get_index_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_index - ] = mock_object + ] = mock_rpc request = {} await client.get_index(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_index(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6464,22 +6492,23 @@ async def test_list_indexes_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_indexes - ] = mock_object + ] = mock_rpc request = {} await client.list_indexes(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_indexes(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6966,8 +6995,9 @@ def test_delete_index_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_index(request) @@ -7021,26 +7051,28 @@ async def test_delete_index_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_index - ] = mock_object + ] = mock_rpc request = {} await client.delete_index(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_index(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7330,8 +7362,9 @@ def test_create_corpus_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_corpus(request) @@ -7385,26 +7418,28 @@ async def test_create_corpus_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_corpus - ] = mock_object + ] = mock_rpc request = {} await client.create_corpus(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_corpus(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7767,22 +7802,23 @@ async def test_get_corpus_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_corpus - ] = mock_object + ] = mock_rpc request = {} await client.get_corpus(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_corpus(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8138,22 +8174,23 @@ async def test_update_corpus_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_corpus - ] = mock_object + ] = mock_rpc request = {} await client.update_corpus(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_corpus(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8518,22 +8555,23 @@ async def test_list_corpora_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_corpora - ] = mock_object + ] = mock_rpc request = {} await client.list_corpora(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_corpora(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9069,22 +9107,23 @@ async def test_delete_corpus_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_corpus - ] = mock_object + ] = mock_rpc request = {} await client.delete_corpus(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_corpus(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9368,8 +9407,9 @@ def test_analyze_corpus_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.analyze_corpus(request) @@ -9423,26 +9463,28 @@ async def test_analyze_corpus_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.analyze_corpus - ] = mock_object + ] = mock_rpc request = {} await client.analyze_corpus(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.analyze_corpus(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9721,22 +9763,23 @@ async def test_create_data_schema_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_data_schema - ] = mock_object + ] = mock_rpc request = {} await client.create_data_schema(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_data_schema(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10118,22 +10161,23 @@ async def test_update_data_schema_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_data_schema - ] = mock_object + ] = mock_rpc request = {} await client.update_data_schema(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_data_schema(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10507,22 +10551,23 @@ async def test_get_data_schema_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_data_schema - ] = mock_object + ] = mock_rpc request = {} await client.get_data_schema(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_data_schema(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10878,22 +10923,23 @@ async def test_delete_data_schema_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_data_schema - ] = mock_object + ] = mock_rpc request = {} await client.delete_data_schema(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_data_schema(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11255,22 +11301,23 @@ async def test_list_data_schemas_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_data_schemas - ] = mock_object + ] = mock_rpc request = {} await client.list_data_schemas(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_data_schemas(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11843,22 +11890,23 @@ async def test_create_annotation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_annotation - ] = mock_object + ] = mock_rpc request = {} await client.create_annotation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_annotation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12237,22 +12285,23 @@ async def test_get_annotation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_annotation - ] = mock_object + ] = mock_rpc request = {} await client.get_annotation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_annotation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12607,22 +12656,23 @@ async def test_list_annotations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_annotations - ] = mock_object + ] = mock_rpc request = {} await client.list_annotations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_annotations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13171,22 +13221,23 @@ async def test_update_annotation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_annotation - ] = mock_object + ] = mock_rpc request = {} await client.update_annotation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_annotation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13558,22 +13609,23 @@ async def test_delete_annotation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_annotation - ] = mock_object + ] = mock_rpc request = {} await client.delete_annotation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_annotation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13851,22 +13903,23 @@ async def test_ingest_asset_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.ingest_asset - ] = mock_object + ] = mock_rpc request = [{}] await client.ingest_asset(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.ingest_asset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14064,22 +14117,23 @@ async def test_clip_asset_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.clip_asset - ] = mock_object + ] = mock_rpc request = {} await client.clip_asset(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.clip_asset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14345,22 +14399,23 @@ async def test_generate_hls_uri_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.generate_hls_uri - ] = mock_object + ] = mock_rpc request = {} await client.generate_hls_uri(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.generate_hls_uri(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14573,8 +14628,9 @@ def test_import_assets_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.import_assets(request) @@ -14628,26 +14684,28 @@ async def test_import_assets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_assets - ] = mock_object + ] = mock_rpc request = {} await client.import_assets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.import_assets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14925,22 +14983,23 @@ async def test_create_search_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_search_config - ] = mock_object + ] = mock_rpc request = {} await client.create_search_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_search_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15327,22 +15386,23 @@ async def test_update_search_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_search_config - ] = mock_object + ] = mock_rpc request = {} await client.update_search_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_search_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15721,22 +15781,23 @@ async def test_get_search_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_search_config - ] = mock_object + ] = mock_rpc request = {} await client.get_search_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_search_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -16100,22 +16161,23 @@ async def test_delete_search_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_search_config - ] = mock_object + ] = mock_rpc request = {} await client.delete_search_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_search_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -16479,22 +16541,23 @@ async def test_list_search_configs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_search_configs - ] = mock_object + ] = mock_rpc request = {} await client.list_search_configs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_search_configs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -17076,22 +17139,23 @@ async def test_create_search_hypernym_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_search_hypernym - ] = mock_object + ] = mock_rpc request = {} await client.create_search_hypernym(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_search_hypernym(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -17489,22 +17553,23 @@ async def test_update_search_hypernym_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_search_hypernym - ] = mock_object + ] = mock_rpc request = {} await client.update_search_hypernym(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_search_hypernym(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -17895,22 +17960,23 @@ async def test_get_search_hypernym_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_search_hypernym - ] = mock_object + ] = mock_rpc request = {} await client.get_search_hypernym(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_search_hypernym(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -18279,22 +18345,23 @@ async def test_delete_search_hypernym_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_search_hypernym - ] = mock_object + ] = mock_rpc request = {} await client.delete_search_hypernym(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_search_hypernym(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -18659,22 +18726,23 @@ async def test_list_search_hypernyms_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_search_hypernyms - ] = mock_object + ] = mock_rpc request = {} await client.list_search_hypernyms(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_search_hypernyms(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -19239,22 +19307,23 @@ async def test_search_assets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search_assets - ] = mock_object + ] = mock_rpc request = {} await client.search_assets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_assets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -19732,22 +19801,23 @@ async def test_search_index_endpoint_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search_index_endpoint - ] = mock_object + ] = mock_rpc request = {} await client.search_index_endpoint(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_index_endpoint(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -20179,8 +20249,9 @@ def test_create_index_endpoint_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_index_endpoint(request) @@ -20236,26 +20307,28 @@ async def test_create_index_endpoint_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_index_endpoint - ] = mock_object + ] = mock_rpc request = {} await client.create_index_endpoint(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_index_endpoint(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -20652,22 +20725,23 @@ async def test_get_index_endpoint_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_index_endpoint - ] = mock_object + ] = mock_rpc request = {} await client.get_index_endpoint(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_index_endpoint(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -21048,22 +21122,23 @@ async def test_list_index_endpoints_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_index_endpoints - ] = mock_object + ] = mock_rpc request = {} await client.list_index_endpoints(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_index_endpoints(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -21575,8 +21650,9 @@ def test_update_index_endpoint_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_index_endpoint(request) @@ -21632,26 +21708,28 @@ async def test_update_index_endpoint_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_index_endpoint - ] = mock_object + ] = mock_rpc request = {} await client.update_index_endpoint(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_index_endpoint(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -21972,8 +22050,9 @@ def test_delete_index_endpoint_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_index_endpoint(request) @@ -22029,26 +22108,28 @@ async def test_delete_index_endpoint_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_index_endpoint - ] = mock_object + ] = mock_rpc request = {} await client.delete_index_endpoint(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_index_endpoint(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -22348,8 +22429,9 @@ def test_deploy_index_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.deploy_index(request) @@ -22403,26 +22485,28 @@ async def test_deploy_index_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.deploy_index - ] = mock_object + ] = mock_rpc request = {} await client.deploy_index(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.deploy_index(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -22630,8 +22714,9 @@ def test_undeploy_index_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.undeploy_index(request) @@ -22685,26 +22770,28 @@ async def test_undeploy_index_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.undeploy_index - ] = mock_object + ] = mock_rpc request = {} await client.undeploy_index(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.undeploy_index(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -22922,8 +23009,9 @@ def test_create_collection_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_collection(request) @@ -22979,26 +23067,28 @@ async def test_create_collection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_collection - ] = mock_object + ] = mock_rpc request = {} await client.create_collection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_collection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -23326,8 +23416,9 @@ def test_delete_collection_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_collection(request) @@ -23383,26 +23474,28 @@ async def test_delete_collection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_collection - ] = mock_object + ] = mock_rpc request = {} await client.delete_collection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_collection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -23764,22 +23857,23 @@ async def test_get_collection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_collection - ] = mock_object + ] = mock_rpc request = {} await client.get_collection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_collection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -24144,22 +24238,23 @@ async def test_update_collection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_collection - ] = mock_object + ] = mock_rpc request = {} await client.update_collection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_collection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -24536,22 +24631,23 @@ async def test_list_collections_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_collections - ] = mock_object + ] = mock_rpc request = {} await client.list_collections(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_collections(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -25097,22 +25193,23 @@ async def test_add_collection_item_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.add_collection_item - ] = mock_object + ] = mock_rpc request = {} await client.add_collection_item(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.add_collection_item(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -25472,22 +25569,23 @@ async def test_remove_collection_item_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.remove_collection_item - ] = mock_object + ] = mock_rpc request = {} await client.remove_collection_item(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.remove_collection_item(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -25858,22 +25956,23 @@ async def test_view_collection_items_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.view_collection_items - ] = mock_object + ] = mock_rpc request = {} await client.view_collection_items(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.view_collection_items(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1alpha1/test_app_platform.py b/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1alpha1/test_app_platform.py index b8227dbb0dcc..3c2378f22b15 100644 --- a/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1alpha1/test_app_platform.py +++ b/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1alpha1/test_app_platform.py @@ -1281,22 +1281,23 @@ async def test_list_applications_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_applications - ] = mock_object + ] = mock_rpc request = {} await client.list_applications(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_applications(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1868,22 +1869,23 @@ async def test_get_application_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_application - ] = mock_object + ] = mock_rpc request = {} await client.get_application(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_application(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2196,8 +2198,9 @@ def test_create_application_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_application(request) @@ -2253,26 +2256,28 @@ async def test_create_application_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_application - ] = mock_object + ] = mock_rpc request = {} await client.create_application(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_application(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2592,8 +2597,9 @@ def test_update_application_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_application(request) @@ -2649,26 +2655,28 @@ async def test_update_application_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_application - ] = mock_object + ] = mock_rpc request = {} await client.update_application(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_application(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2990,8 +2998,9 @@ def test_delete_application_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_application(request) @@ -3047,26 +3056,28 @@ async def test_delete_application_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_application - ] = mock_object + ] = mock_rpc request = {} await client.delete_application(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_application(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3378,8 +3389,9 @@ def test_deploy_application_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.deploy_application(request) @@ -3435,26 +3447,28 @@ async def test_deploy_application_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.deploy_application - ] = mock_object + ] = mock_rpc request = {} await client.deploy_application(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.deploy_application(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3766,8 +3780,9 @@ def test_undeploy_application_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.undeploy_application(request) @@ -3823,26 +3838,28 @@ async def test_undeploy_application_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.undeploy_application - ] = mock_object + ] = mock_rpc request = {} await client.undeploy_application(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.undeploy_application(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4155,8 +4172,9 @@ def test_add_application_stream_input_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.add_application_stream_input(request) @@ -4212,26 +4230,28 @@ async def test_add_application_stream_input_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.add_application_stream_input - ] = mock_object + ] = mock_rpc request = {} await client.add_application_stream_input(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.add_application_stream_input(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4545,8 +4565,9 @@ def test_remove_application_stream_input_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.remove_application_stream_input(request) @@ -4602,26 +4623,28 @@ async def test_remove_application_stream_input_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.remove_application_stream_input - ] = mock_object + ] = mock_rpc request = {} await client.remove_application_stream_input(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.remove_application_stream_input(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4935,8 +4958,9 @@ def test_update_application_stream_input_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_application_stream_input(request) @@ -4992,26 +5016,28 @@ async def test_update_application_stream_input_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_application_stream_input - ] = mock_object + ] = mock_rpc request = {} await client.update_application_stream_input(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_application_stream_input(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5377,22 +5403,23 @@ async def test_list_instances_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_instances - ] = mock_object + ] = mock_rpc request = {} await client.list_instances(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_instances(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5946,22 +5973,23 @@ async def test_get_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_instance - ] = mock_object + ] = mock_rpc request = {} await client.get_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6269,8 +6297,9 @@ def test_create_application_instances_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_application_instances(request) @@ -6326,26 +6355,28 @@ async def test_create_application_instances_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_application_instances - ] = mock_object + ] = mock_rpc request = {} await client.create_application_instances(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_application_instances(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6659,8 +6690,9 @@ def test_delete_application_instances_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_application_instances(request) @@ -6716,26 +6748,28 @@ async def test_delete_application_instances_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_application_instances - ] = mock_object + ] = mock_rpc request = {} await client.delete_application_instances(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_application_instances(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7049,8 +7083,9 @@ def test_update_application_instances_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_application_instances(request) @@ -7106,26 +7141,28 @@ async def test_update_application_instances_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_application_instances - ] = mock_object + ] = mock_rpc request = {} await client.update_application_instances(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_application_instances(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7525,22 +7562,23 @@ async def test_list_drafts_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_drafts - ] = mock_object + ] = mock_rpc request = {} await client.list_drafts(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_drafts(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8089,22 +8127,23 @@ async def test_get_draft_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_draft - ] = mock_object + ] = mock_rpc request = {} await client.get_draft(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_draft(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8401,8 +8440,9 @@ def test_create_draft_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_draft(request) @@ -8456,26 +8496,28 @@ async def test_create_draft_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_draft - ] = mock_object + ] = mock_rpc request = {} await client.create_draft(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_draft(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8785,8 +8827,9 @@ def test_update_draft_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_draft(request) @@ -8840,26 +8883,28 @@ async def test_update_draft_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_draft - ] = mock_object + ] = mock_rpc request = {} await client.update_draft(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_draft(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9161,8 +9206,9 @@ def test_delete_draft_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_draft(request) @@ -9216,26 +9262,28 @@ async def test_delete_draft_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_draft - ] = mock_object + ] = mock_rpc request = {} await client.delete_draft(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_draft(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9590,22 +9638,23 @@ async def test_list_processors_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_processors - ] = mock_object + ] = mock_rpc request = {} await client.list_processors(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_processors(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10158,22 +10207,23 @@ async def test_list_prebuilt_processors_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_prebuilt_processors - ] = mock_object + ] = mock_rpc request = {} await client.list_prebuilt_processors(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_prebuilt_processors(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10559,22 +10609,23 @@ async def test_get_processor_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_processor - ] = mock_object + ] = mock_rpc request = {} await client.get_processor(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_processor(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10889,8 +10940,9 @@ def test_create_processor_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_processor(request) @@ -10944,26 +10996,28 @@ async def test_create_processor_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_processor - ] = mock_object + ] = mock_rpc request = {} await client.create_processor(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_processor(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11275,8 +11329,9 @@ def test_update_processor_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_processor(request) @@ -11330,26 +11385,28 @@ async def test_update_processor_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_processor - ] = mock_object + ] = mock_rpc request = {} await client.update_processor(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_processor(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11653,8 +11710,9 @@ def test_delete_processor_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_processor(request) @@ -11708,26 +11766,28 @@ async def test_delete_processor_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_processor - ] = mock_object + ] = mock_rpc request = {} await client.delete_processor(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_processor(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1alpha1/test_live_video_analytics.py b/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1alpha1/test_live_video_analytics.py index c596246bbc85..ac01a3755d51 100644 --- a/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1alpha1/test_live_video_analytics.py +++ b/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1alpha1/test_live_video_analytics.py @@ -1349,22 +1349,23 @@ async def test_list_analyses_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_analyses - ] = mock_object + ] = mock_rpc request = {} await client.list_analyses(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_analyses(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1909,22 +1910,23 @@ async def test_get_analysis_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_analysis - ] = mock_object + ] = mock_rpc request = {} await client.get_analysis(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_analysis(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2221,8 +2223,9 @@ def test_create_analysis_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_analysis(request) @@ -2276,26 +2279,28 @@ async def test_create_analysis_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_analysis - ] = mock_object + ] = mock_rpc request = {} await client.create_analysis(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_analysis(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2605,8 +2610,9 @@ def test_update_analysis_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_analysis(request) @@ -2660,26 +2666,28 @@ async def test_update_analysis_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_analysis - ] = mock_object + ] = mock_rpc request = {} await client.update_analysis(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_analysis(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2981,8 +2989,9 @@ def test_delete_analysis_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_analysis(request) @@ -3036,26 +3045,28 @@ async def test_delete_analysis_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_analysis - ] = mock_object + ] = mock_rpc request = {} await client.delete_analysis(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_analysis(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1alpha1/test_streaming_service.py b/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1alpha1/test_streaming_service.py index 055326eb89b9..5a3a86fd5dc6 100644 --- a/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1alpha1/test_streaming_service.py +++ b/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1alpha1/test_streaming_service.py @@ -1246,22 +1246,23 @@ async def test_send_packets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.send_packets - ] = mock_object + ] = mock_rpc request = [{}] await client.send_packets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.send_packets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1394,22 +1395,23 @@ async def test_receive_packets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.receive_packets - ] = mock_object + ] = mock_rpc request = [{}] await client.receive_packets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.receive_packets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1543,22 +1545,23 @@ async def test_receive_events_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.receive_events - ] = mock_object + ] = mock_rpc request = [{}] await client.receive_events(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.receive_events(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1774,22 +1777,23 @@ async def test_acquire_lease_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.acquire_lease - ] = mock_object + ] = mock_rpc request = {} await client.acquire_lease(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.acquire_lease(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2075,22 +2079,23 @@ async def test_renew_lease_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.renew_lease - ] = mock_object + ] = mock_rpc request = {} await client.renew_lease(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.renew_lease(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2362,22 +2367,23 @@ async def test_release_lease_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.release_lease - ] = mock_object + ] = mock_rpc request = {} await client.release_lease(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.release_lease(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1alpha1/test_streams_service.py b/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1alpha1/test_streams_service.py index 067222960647..d98db577cb1b 100644 --- a/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1alpha1/test_streams_service.py +++ b/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1alpha1/test_streams_service.py @@ -1312,22 +1312,23 @@ async def test_list_clusters_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_clusters - ] = mock_object + ] = mock_rpc request = {} await client.list_clusters(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_clusters(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1881,22 +1882,23 @@ async def test_get_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_cluster - ] = mock_object + ] = mock_rpc request = {} await client.get_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2195,8 +2197,9 @@ def test_create_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_cluster(request) @@ -2250,26 +2253,28 @@ async def test_create_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_cluster - ] = mock_object + ] = mock_rpc request = {} await client.create_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2579,8 +2584,9 @@ def test_update_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_cluster(request) @@ -2634,26 +2640,28 @@ async def test_update_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_cluster - ] = mock_object + ] = mock_rpc request = {} await client.update_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2955,8 +2963,9 @@ def test_delete_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_cluster(request) @@ -3010,26 +3019,28 @@ async def test_delete_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_cluster - ] = mock_object + ] = mock_rpc request = {} await client.delete_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3384,22 +3395,23 @@ async def test_list_streams_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_streams - ] = mock_object + ] = mock_rpc request = {} await client.list_streams(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_streams(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3951,22 +3963,23 @@ async def test_get_stream_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_stream - ] = mock_object + ] = mock_rpc request = {} await client.get_stream(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_stream(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4269,8 +4282,9 @@ def test_create_stream_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_stream(request) @@ -4324,26 +4338,28 @@ async def test_create_stream_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_stream - ] = mock_object + ] = mock_rpc request = {} await client.create_stream(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_stream(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4653,8 +4669,9 @@ def test_update_stream_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_stream(request) @@ -4708,26 +4725,28 @@ async def test_update_stream_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_stream - ] = mock_object + ] = mock_rpc request = {} await client.update_stream(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_stream(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5029,8 +5048,9 @@ def test_delete_stream_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_stream(request) @@ -5084,26 +5104,28 @@ async def test_delete_stream_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_stream - ] = mock_object + ] = mock_rpc request = {} await client.delete_stream(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_stream(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5462,22 +5484,23 @@ async def test_generate_stream_hls_token_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.generate_stream_hls_token - ] = mock_object + ] = mock_rpc request = {} await client.generate_stream_hls_token(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.generate_stream_hls_token(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5846,22 +5869,23 @@ async def test_list_events_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_events - ] = mock_object + ] = mock_rpc request = {} await client.list_events(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_events(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6407,22 +6431,23 @@ async def test_get_event_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_event - ] = mock_object + ] = mock_rpc request = {} await client.get_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6721,8 +6746,9 @@ def test_create_event_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_event(request) @@ -6776,26 +6802,28 @@ async def test_create_event_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_event - ] = mock_object + ] = mock_rpc request = {} await client.create_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7105,8 +7133,9 @@ def test_update_event_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_event(request) @@ -7160,26 +7189,28 @@ async def test_update_event_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_event - ] = mock_object + ] = mock_rpc request = {} await client.update_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7481,8 +7512,9 @@ def test_delete_event_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_event(request) @@ -7536,26 +7568,28 @@ async def test_delete_event_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_event - ] = mock_object + ] = mock_rpc request = {} await client.delete_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7910,22 +7944,23 @@ async def test_list_series_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_series - ] = mock_object + ] = mock_rpc request = {} await client.list_series(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_series(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8474,22 +8509,23 @@ async def test_get_series_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_series - ] = mock_object + ] = mock_rpc request = {} await client.get_series(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_series(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8790,8 +8826,9 @@ def test_create_series_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_series(request) @@ -8845,26 +8882,28 @@ async def test_create_series_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_series - ] = mock_object + ] = mock_rpc request = {} await client.create_series(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_series(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9174,8 +9213,9 @@ def test_update_series_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_series(request) @@ -9229,26 +9269,28 @@ async def test_update_series_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_series - ] = mock_object + ] = mock_rpc request = {} await client.update_series(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_series(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9550,8 +9592,9 @@ def test_delete_series_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_series(request) @@ -9605,26 +9648,28 @@ async def test_delete_series_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_series - ] = mock_object + ] = mock_rpc request = {} await client.delete_series(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_series(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9928,8 +9973,9 @@ def test_materialize_channel_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.materialize_channel(request) @@ -9985,26 +10031,28 @@ async def test_materialize_channel_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.materialize_channel - ] = mock_object + ] = mock_rpc request = {} await client.materialize_channel(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.materialize_channel(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1alpha1/test_warehouse.py b/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1alpha1/test_warehouse.py index 346cda4b0365..532a5dfc67d6 100644 --- a/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1alpha1/test_warehouse.py +++ b/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1alpha1/test_warehouse.py @@ -1259,22 +1259,23 @@ async def test_create_asset_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_asset - ] = mock_object + ] = mock_rpc request = {} await client.create_asset(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_asset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1635,22 +1636,23 @@ async def test_update_asset_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_asset - ] = mock_object + ] = mock_rpc request = {} await client.update_asset(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_asset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2003,22 +2005,23 @@ async def test_get_asset_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_asset - ] = mock_object + ] = mock_rpc request = {} await client.get_asset(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_asset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2365,22 +2368,23 @@ async def test_list_assets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_assets - ] = mock_object + ] = mock_rpc request = {} await client.list_assets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_assets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2867,8 +2871,9 @@ def test_delete_asset_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_asset(request) @@ -2922,26 +2927,28 @@ async def test_delete_asset_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_asset - ] = mock_object + ] = mock_rpc request = {} await client.delete_asset(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_asset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3231,8 +3238,9 @@ def test_create_corpus_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_corpus(request) @@ -3286,26 +3294,28 @@ async def test_create_corpus_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_corpus - ] = mock_object + ] = mock_rpc request = {} await client.create_corpus(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_corpus(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3665,22 +3675,23 @@ async def test_get_corpus_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_corpus - ] = mock_object + ] = mock_rpc request = {} await client.get_corpus(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_corpus(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4031,22 +4042,23 @@ async def test_update_corpus_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_corpus - ] = mock_object + ] = mock_rpc request = {} await client.update_corpus(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_corpus(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4407,22 +4419,23 @@ async def test_list_corpora_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_corpora - ] = mock_object + ] = mock_rpc request = {} await client.list_corpora(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_corpora(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4958,22 +4971,23 @@ async def test_delete_corpus_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_corpus - ] = mock_object + ] = mock_rpc request = {} await client.delete_corpus(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_corpus(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5328,22 +5342,23 @@ async def test_create_data_schema_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_data_schema - ] = mock_object + ] = mock_rpc request = {} await client.create_data_schema(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_data_schema(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5725,22 +5740,23 @@ async def test_update_data_schema_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_data_schema - ] = mock_object + ] = mock_rpc request = {} await client.update_data_schema(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_data_schema(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6114,22 +6130,23 @@ async def test_get_data_schema_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_data_schema - ] = mock_object + ] = mock_rpc request = {} await client.get_data_schema(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_data_schema(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6485,22 +6502,23 @@ async def test_delete_data_schema_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_data_schema - ] = mock_object + ] = mock_rpc request = {} await client.delete_data_schema(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_data_schema(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6862,22 +6880,23 @@ async def test_list_data_schemas_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_data_schemas - ] = mock_object + ] = mock_rpc request = {} await client.list_data_schemas(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_data_schemas(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7450,22 +7469,23 @@ async def test_create_annotation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_annotation - ] = mock_object + ] = mock_rpc request = {} await client.create_annotation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_annotation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7844,22 +7864,23 @@ async def test_get_annotation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_annotation - ] = mock_object + ] = mock_rpc request = {} await client.get_annotation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_annotation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8214,22 +8235,23 @@ async def test_list_annotations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_annotations - ] = mock_object + ] = mock_rpc request = {} await client.list_annotations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_annotations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8778,22 +8800,23 @@ async def test_update_annotation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_annotation - ] = mock_object + ] = mock_rpc request = {} await client.update_annotation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_annotation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9165,22 +9188,23 @@ async def test_delete_annotation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_annotation - ] = mock_object + ] = mock_rpc request = {} await client.delete_annotation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_annotation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9458,22 +9482,23 @@ async def test_ingest_asset_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.ingest_asset - ] = mock_object + ] = mock_rpc request = [{}] await client.ingest_asset(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.ingest_asset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9671,22 +9696,23 @@ async def test_clip_asset_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.clip_asset - ] = mock_object + ] = mock_rpc request = {} await client.clip_asset(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.clip_asset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9952,22 +9978,23 @@ async def test_generate_hls_uri_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.generate_hls_uri - ] = mock_object + ] = mock_rpc request = {} await client.generate_hls_uri(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.generate_hls_uri(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10248,22 +10275,23 @@ async def test_create_search_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_search_config - ] = mock_object + ] = mock_rpc request = {} await client.create_search_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_search_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10650,22 +10678,23 @@ async def test_update_search_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_search_config - ] = mock_object + ] = mock_rpc request = {} await client.update_search_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_search_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11044,22 +11073,23 @@ async def test_get_search_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_search_config - ] = mock_object + ] = mock_rpc request = {} await client.get_search_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_search_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11423,22 +11453,23 @@ async def test_delete_search_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_search_config - ] = mock_object + ] = mock_rpc request = {} await client.delete_search_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_search_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11802,22 +11833,23 @@ async def test_list_search_configs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_search_configs - ] = mock_object + ] = mock_rpc request = {} await client.list_search_configs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_search_configs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12380,22 +12412,23 @@ async def test_search_assets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search_assets - ] = mock_object + ] = mock_rpc request = {} await client.search_assets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_assets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-vm-migration/google/cloud/vmmigration/gapic_version.py b/packages/google-cloud-vm-migration/google/cloud/vmmigration/gapic_version.py index ab4d576b9121..558c8aab67c5 100644 --- a/packages/google-cloud-vm-migration/google/cloud/vmmigration/gapic_version.py +++ b/packages/google-cloud-vm-migration/google/cloud/vmmigration/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.8.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-vm-migration/google/cloud/vmmigration_v1/gapic_version.py b/packages/google-cloud-vm-migration/google/cloud/vmmigration_v1/gapic_version.py index ab4d576b9121..558c8aab67c5 100644 --- a/packages/google-cloud-vm-migration/google/cloud/vmmigration_v1/gapic_version.py +++ b/packages/google-cloud-vm-migration/google/cloud/vmmigration_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.8.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-vm-migration/google/cloud/vmmigration_v1/services/vm_migration/async_client.py b/packages/google-cloud-vm-migration/google/cloud/vmmigration_v1/services/vm_migration/async_client.py index f4c5d3b246ed..034f30d46b96 100644 --- a/packages/google-cloud-vm-migration/google/cloud/vmmigration_v1/services/vm_migration/async_client.py +++ b/packages/google-cloud-vm-migration/google/cloud/vmmigration_v1/services/vm_migration/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -221,9 +220,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(VmMigrationClient).get_transport_class, type(VmMigrationClient) - ) + get_transport_class = VmMigrationClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-vm-migration/google/cloud/vmmigration_v1/services/vm_migration/client.py b/packages/google-cloud-vm-migration/google/cloud/vmmigration_v1/services/vm_migration/client.py index b849e8b013de..59298642b01e 100644 --- a/packages/google-cloud-vm-migration/google/cloud/vmmigration_v1/services/vm_migration/client.py +++ b/packages/google-cloud-vm-migration/google/cloud/vmmigration_v1/services/vm_migration/client.py @@ -861,7 +861,7 @@ def __init__( transport_init: Union[ Type[VmMigrationTransport], Callable[..., VmMigrationTransport] ] = ( - type(self).get_transport_class(transport) + VmMigrationClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., VmMigrationTransport], transport) ) diff --git a/packages/google-cloud-vm-migration/samples/generated_samples/snippet_metadata_google.cloud.vmmigration.v1.json b/packages/google-cloud-vm-migration/samples/generated_samples/snippet_metadata_google.cloud.vmmigration.v1.json index 8c3264de33ce..8437717ca276 100644 --- a/packages/google-cloud-vm-migration/samples/generated_samples/snippet_metadata_google.cloud.vmmigration.v1.json +++ b/packages/google-cloud-vm-migration/samples/generated_samples/snippet_metadata_google.cloud.vmmigration.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-vm-migration", - "version": "1.8.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-vm-migration/tests/unit/gapic/vmmigration_v1/test_vm_migration.py b/packages/google-cloud-vm-migration/tests/unit/gapic/vmmigration_v1/test_vm_migration.py index 6dbc640d0c81..971022295d0f 100644 --- a/packages/google-cloud-vm-migration/tests/unit/gapic/vmmigration_v1/test_vm_migration.py +++ b/packages/google-cloud-vm-migration/tests/unit/gapic/vmmigration_v1/test_vm_migration.py @@ -1273,22 +1273,23 @@ async def test_list_sources_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_sources - ] = mock_object + ] = mock_rpc request = {} await client.list_sources(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_sources(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1834,22 +1835,23 @@ async def test_get_source_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_source - ] = mock_object + ] = mock_rpc request = {} await client.get_source(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_source(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2144,8 +2146,9 @@ def test_create_source_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_source(request) @@ -2199,26 +2202,28 @@ async def test_create_source_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_source - ] = mock_object + ] = mock_rpc request = {} await client.create_source(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_source(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2540,8 +2545,9 @@ def test_update_source_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_source(request) @@ -2595,26 +2601,28 @@ async def test_update_source_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_source - ] = mock_object + ] = mock_rpc request = {} await client.update_source(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_source(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2928,8 +2936,9 @@ def test_delete_source_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_source(request) @@ -2983,26 +2992,28 @@ async def test_delete_source_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_source - ] = mock_object + ] = mock_rpc request = {} await client.delete_source(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_source(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3343,22 +3354,23 @@ async def test_fetch_inventory_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.fetch_inventory - ] = mock_object + ] = mock_rpc request = {} await client.fetch_inventory(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.fetch_inventory(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3726,22 +3738,23 @@ async def test_list_utilization_reports_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_utilization_reports - ] = mock_object + ] = mock_rpc request = {} await client.list_utilization_reports(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_utilization_reports(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4332,22 +4345,23 @@ async def test_get_utilization_report_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_utilization_report - ] = mock_object + ] = mock_rpc request = {} await client.get_utilization_report(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_utilization_report(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4674,8 +4688,9 @@ def test_create_utilization_report_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_utilization_report(request) @@ -4731,26 +4746,28 @@ async def test_create_utilization_report_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_utilization_report - ] = mock_object + ] = mock_rpc request = {} await client.create_utilization_report(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_utilization_report(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5084,8 +5101,9 @@ def test_delete_utilization_report_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_utilization_report(request) @@ -5141,26 +5159,28 @@ async def test_delete_utilization_report_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_utilization_report - ] = mock_object + ] = mock_rpc request = {} await client.delete_utilization_report(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_utilization_report(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5539,22 +5559,23 @@ async def test_list_datacenter_connectors_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_datacenter_connectors - ] = mock_object + ] = mock_rpc request = {} await client.list_datacenter_connectors(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_datacenter_connectors(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6157,22 +6178,23 @@ async def test_get_datacenter_connector_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_datacenter_connector - ] = mock_object + ] = mock_rpc request = {} await client.get_datacenter_connector(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_datacenter_connector(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6508,8 +6530,9 @@ def test_create_datacenter_connector_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_datacenter_connector(request) @@ -6565,26 +6588,28 @@ async def test_create_datacenter_connector_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_datacenter_connector - ] = mock_object + ] = mock_rpc request = {} await client.create_datacenter_connector(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_datacenter_connector(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6930,8 +6955,9 @@ def test_delete_datacenter_connector_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_datacenter_connector(request) @@ -6987,26 +7013,28 @@ async def test_delete_datacenter_connector_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_datacenter_connector - ] = mock_object + ] = mock_rpc request = {} await client.delete_datacenter_connector(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_datacenter_connector(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7317,8 +7345,9 @@ def test_upgrade_appliance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.upgrade_appliance(request) @@ -7374,26 +7403,28 @@ async def test_upgrade_appliance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.upgrade_appliance - ] = mock_object + ] = mock_rpc request = {} await client.upgrade_appliance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.upgrade_appliance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7621,8 +7652,9 @@ def test_create_migrating_vm_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_migrating_vm(request) @@ -7678,26 +7710,28 @@ async def test_create_migrating_vm_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_migrating_vm - ] = mock_object + ] = mock_rpc request = {} await client.create_migrating_vm(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_migrating_vm(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8118,22 +8152,23 @@ async def test_list_migrating_vms_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_migrating_vms - ] = mock_object + ] = mock_rpc request = {} await client.list_migrating_vms(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_migrating_vms(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8713,22 +8748,23 @@ async def test_get_migrating_vm_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_migrating_vm - ] = mock_object + ] = mock_rpc request = {} await client.get_migrating_vm(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_migrating_vm(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9041,8 +9077,9 @@ def test_update_migrating_vm_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_migrating_vm(request) @@ -9098,26 +9135,28 @@ async def test_update_migrating_vm_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_migrating_vm - ] = mock_object + ] = mock_rpc request = {} await client.update_migrating_vm(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_migrating_vm(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9461,8 +9500,9 @@ def test_delete_migrating_vm_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_migrating_vm(request) @@ -9518,26 +9558,28 @@ async def test_delete_migrating_vm_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_migrating_vm - ] = mock_object + ] = mock_rpc request = {} await client.delete_migrating_vm(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_migrating_vm(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9837,8 +9879,9 @@ def test_start_migration_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.start_migration(request) @@ -9892,26 +9935,28 @@ async def test_start_migration_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.start_migration - ] = mock_object + ] = mock_rpc request = {} await client.start_migration(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.start_migration(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10203,8 +10248,9 @@ def test_resume_migration_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.resume_migration(request) @@ -10258,26 +10304,28 @@ async def test_resume_migration_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.resume_migration - ] = mock_object + ] = mock_rpc request = {} await client.resume_migration(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.resume_migration(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10485,8 +10533,9 @@ def test_pause_migration_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.pause_migration(request) @@ -10540,26 +10589,28 @@ async def test_pause_migration_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.pause_migration - ] = mock_object + ] = mock_rpc request = {} await client.pause_migration(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.pause_migration(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10777,8 +10828,9 @@ def test_finalize_migration_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.finalize_migration(request) @@ -10834,26 +10886,28 @@ async def test_finalize_migration_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.finalize_migration - ] = mock_object + ] = mock_rpc request = {} await client.finalize_migration(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.finalize_migration(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11159,8 +11213,9 @@ def test_create_clone_job_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_clone_job(request) @@ -11214,26 +11269,28 @@ async def test_create_clone_job_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_clone_job - ] = mock_object + ] = mock_rpc request = {} await client.create_clone_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_clone_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11569,8 +11626,9 @@ def test_cancel_clone_job_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.cancel_clone_job(request) @@ -11624,26 +11682,28 @@ async def test_cancel_clone_job_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.cancel_clone_job - ] = mock_object + ] = mock_rpc request = {} await client.cancel_clone_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.cancel_clone_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11998,22 +12058,23 @@ async def test_list_clone_jobs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_clone_jobs - ] = mock_object + ] = mock_rpc request = {} await client.list_clone_jobs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_clone_jobs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12561,22 +12622,23 @@ async def test_get_clone_job_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_clone_job - ] = mock_object + ] = mock_rpc request = {} await client.get_clone_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_clone_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12885,8 +12947,9 @@ def test_create_cutover_job_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_cutover_job(request) @@ -12942,26 +13005,28 @@ async def test_create_cutover_job_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_cutover_job - ] = mock_object + ] = mock_rpc request = {} await client.create_cutover_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_cutover_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13315,8 +13380,9 @@ def test_cancel_cutover_job_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.cancel_cutover_job(request) @@ -13372,26 +13438,28 @@ async def test_cancel_cutover_job_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.cancel_cutover_job - ] = mock_object + ] = mock_rpc request = {} await client.cancel_cutover_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.cancel_cutover_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13766,22 +13834,23 @@ async def test_list_cutover_jobs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_cutover_jobs - ] = mock_object + ] = mock_rpc request = {} await client.list_cutover_jobs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_cutover_jobs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14353,22 +14422,23 @@ async def test_get_cutover_job_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_cutover_job - ] = mock_object + ] = mock_rpc request = {} await client.get_cutover_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_cutover_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14732,22 +14802,23 @@ async def test_list_groups_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_groups - ] = mock_object + ] = mock_rpc request = {} await client.list_groups(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_groups(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15296,22 +15367,23 @@ async def test_get_group_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_group - ] = mock_object + ] = mock_rpc request = {} await client.get_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15608,8 +15680,9 @@ def test_create_group_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_group(request) @@ -15663,26 +15736,28 @@ async def test_create_group_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_group - ] = mock_object + ] = mock_rpc request = {} await client.create_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15992,8 +16067,9 @@ def test_update_group_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_group(request) @@ -16047,26 +16123,28 @@ async def test_update_group_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_group - ] = mock_object + ] = mock_rpc request = {} await client.update_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -16368,8 +16446,9 @@ def test_delete_group_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_group(request) @@ -16423,26 +16502,28 @@ async def test_delete_group_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_group - ] = mock_object + ] = mock_rpc request = {} await client.delete_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -16744,8 +16825,9 @@ def test_add_group_migration_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.add_group_migration(request) @@ -16801,26 +16883,28 @@ async def test_add_group_migration_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.add_group_migration - ] = mock_object + ] = mock_rpc request = {} await client.add_group_migration(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.add_group_migration(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -17133,8 +17217,9 @@ def test_remove_group_migration_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.remove_group_migration(request) @@ -17190,26 +17275,28 @@ async def test_remove_group_migration_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.remove_group_migration - ] = mock_object + ] = mock_rpc request = {} await client.remove_group_migration(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.remove_group_migration(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -17587,22 +17674,23 @@ async def test_list_target_projects_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_target_projects - ] = mock_object + ] = mock_rpc request = {} await client.list_target_projects(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_target_projects(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -18183,22 +18271,23 @@ async def test_get_target_project_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_target_project - ] = mock_object + ] = mock_rpc request = {} await client.get_target_project(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_target_project(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -18520,8 +18609,9 @@ def test_create_target_project_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_target_project(request) @@ -18577,26 +18667,28 @@ async def test_create_target_project_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_target_project - ] = mock_object + ] = mock_rpc request = {} await client.create_target_project(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_target_project(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -18927,8 +19019,9 @@ def test_update_target_project_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_target_project(request) @@ -18984,26 +19077,28 @@ async def test_update_target_project_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_target_project - ] = mock_object + ] = mock_rpc request = {} await client.update_target_project(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_target_project(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -19326,8 +19421,9 @@ def test_delete_target_project_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_target_project(request) @@ -19383,26 +19479,28 @@ async def test_delete_target_project_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_target_project - ] = mock_object + ] = mock_rpc request = {} await client.delete_target_project(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_target_project(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -19780,22 +19878,23 @@ async def test_list_replication_cycles_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_replication_cycles - ] = mock_object + ] = mock_rpc request = {} await client.list_replication_cycles(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_replication_cycles(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -20381,22 +20480,23 @@ async def test_get_replication_cycle_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_replication_cycle - ] = mock_object + ] = mock_rpc request = {} await client.get_replication_cycle(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_replication_cycle(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-vmwareengine/google/cloud/vmwareengine/gapic_version.py b/packages/google-cloud-vmwareengine/google/cloud/vmwareengine/gapic_version.py index de27578dd493..558c8aab67c5 100644 --- a/packages/google-cloud-vmwareengine/google/cloud/vmwareengine/gapic_version.py +++ b/packages/google-cloud-vmwareengine/google/cloud/vmwareengine/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.5.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/gapic_version.py b/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/gapic_version.py index de27578dd493..558c8aab67c5 100644 --- a/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/gapic_version.py +++ b/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.5.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/services/vmware_engine/async_client.py b/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/services/vmware_engine/async_client.py index e66efe554f50..4ba41b550a1d 100644 --- a/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/services/vmware_engine/async_client.py +++ b/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/services/vmware_engine/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -253,9 +252,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(VmwareEngineClient).get_transport_class, type(VmwareEngineClient) - ) + get_transport_class = VmwareEngineClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/services/vmware_engine/client.py b/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/services/vmware_engine/client.py index a41bfdc75c44..dc8b412ddd45 100644 --- a/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/services/vmware_engine/client.py +++ b/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/services/vmware_engine/client.py @@ -1030,7 +1030,7 @@ def __init__( transport_init: Union[ Type[VmwareEngineTransport], Callable[..., VmwareEngineTransport] ] = ( - type(self).get_transport_class(transport) + VmwareEngineClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., VmwareEngineTransport], transport) ) diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/snippet_metadata_google.cloud.vmwareengine.v1.json b/packages/google-cloud-vmwareengine/samples/generated_samples/snippet_metadata_google.cloud.vmwareengine.v1.json index 0e6ba909e54d..fba928d86f26 100644 --- a/packages/google-cloud-vmwareengine/samples/generated_samples/snippet_metadata_google.cloud.vmwareengine.v1.json +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/snippet_metadata_google.cloud.vmwareengine.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-vmwareengine", - "version": "1.5.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-vmwareengine/tests/unit/gapic/vmwareengine_v1/test_vmware_engine.py b/packages/google-cloud-vmwareengine/tests/unit/gapic/vmwareengine_v1/test_vmware_engine.py index 5442fb254f31..e62a85487875 100644 --- a/packages/google-cloud-vmwareengine/tests/unit/gapic/vmwareengine_v1/test_vmware_engine.py +++ b/packages/google-cloud-vmwareengine/tests/unit/gapic/vmwareengine_v1/test_vmware_engine.py @@ -1297,22 +1297,23 @@ async def test_list_private_clouds_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_private_clouds - ] = mock_object + ] = mock_rpc request = {} await client.list_private_clouds(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_private_clouds(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1899,22 +1900,23 @@ async def test_get_private_cloud_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_private_cloud - ] = mock_object + ] = mock_rpc request = {} await client.get_private_cloud(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_private_cloud(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2239,8 +2241,9 @@ def test_create_private_cloud_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_private_cloud(request) @@ -2296,26 +2299,28 @@ async def test_create_private_cloud_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_private_cloud - ] = mock_object + ] = mock_rpc request = {} await client.create_private_cloud(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_private_cloud(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2645,8 +2650,9 @@ def test_update_private_cloud_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_private_cloud(request) @@ -2702,26 +2708,28 @@ async def test_update_private_cloud_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_private_cloud - ] = mock_object + ] = mock_rpc request = {} await client.update_private_cloud(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_private_cloud(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3043,8 +3051,9 @@ def test_delete_private_cloud_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_private_cloud(request) @@ -3100,26 +3109,28 @@ async def test_delete_private_cloud_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_private_cloud - ] = mock_object + ] = mock_rpc request = {} await client.delete_private_cloud(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_private_cloud(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3432,8 +3443,9 @@ def test_undelete_private_cloud_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.undelete_private_cloud(request) @@ -3489,26 +3501,28 @@ async def test_undelete_private_cloud_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.undelete_private_cloud - ] = mock_object + ] = mock_rpc request = {} await client.undelete_private_cloud(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.undelete_private_cloud(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3874,22 +3888,23 @@ async def test_list_clusters_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_clusters - ] = mock_object + ] = mock_rpc request = {} await client.list_clusters(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_clusters(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4443,22 +4458,23 @@ async def test_get_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_cluster - ] = mock_object + ] = mock_rpc request = {} await client.get_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4761,8 +4777,9 @@ def test_create_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_cluster(request) @@ -4816,26 +4833,28 @@ async def test_create_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_cluster - ] = mock_object + ] = mock_rpc request = {} await client.create_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5145,8 +5164,9 @@ def test_update_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_cluster(request) @@ -5200,26 +5220,28 @@ async def test_update_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_cluster - ] = mock_object + ] = mock_rpc request = {} await client.update_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5521,8 +5543,9 @@ def test_delete_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_cluster(request) @@ -5576,26 +5599,28 @@ async def test_delete_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_cluster - ] = mock_object + ] = mock_rpc request = {} await client.delete_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5941,22 +5966,23 @@ async def test_list_nodes_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_nodes - ] = mock_object + ] = mock_rpc request = {} await client.list_nodes(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_nodes(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6515,22 +6541,23 @@ async def test_get_node_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_node - ] = mock_object + ] = mock_rpc request = {} await client.get_node(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_node(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6913,22 +6940,23 @@ async def test_list_external_addresses_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_external_addresses - ] = mock_object + ] = mock_rpc request = {} await client.list_external_addresses(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_external_addresses(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7511,22 +7539,23 @@ async def test_fetch_network_policy_external_addresses_async_use_cached_wrapped_ ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.fetch_network_policy_external_addresses - ] = mock_object + ] = mock_rpc request = {} await client.fetch_network_policy_external_addresses(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.fetch_network_policy_external_addresses(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8121,22 +8150,23 @@ async def test_get_external_address_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_external_address - ] = mock_object + ] = mock_rpc request = {} await client.get_external_address(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_external_address(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8464,8 +8494,9 @@ def test_create_external_address_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_external_address(request) @@ -8521,26 +8552,28 @@ async def test_create_external_address_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_external_address - ] = mock_object + ] = mock_rpc request = {} await client.create_external_address(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_external_address(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8872,8 +8905,9 @@ def test_update_external_address_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_external_address(request) @@ -8929,26 +8963,28 @@ async def test_update_external_address_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_external_address - ] = mock_object + ] = mock_rpc request = {} await client.update_external_address(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_external_address(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9272,8 +9308,9 @@ def test_delete_external_address_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_external_address(request) @@ -9329,26 +9366,28 @@ async def test_delete_external_address_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_external_address - ] = mock_object + ] = mock_rpc request = {} await client.delete_external_address(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_external_address(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9710,22 +9749,23 @@ async def test_list_subnets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_subnets - ] = mock_object + ] = mock_rpc request = {} await client.list_subnets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_subnets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10283,22 +10323,23 @@ async def test_get_subnet_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_subnet - ] = mock_object + ] = mock_rpc request = {} await client.get_subnet(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_subnet(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10597,8 +10638,9 @@ def test_update_subnet_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_subnet(request) @@ -10652,26 +10694,28 @@ async def test_update_subnet_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_subnet - ] = mock_object + ] = mock_rpc request = {} await client.update_subnet(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_subnet(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11049,22 +11093,23 @@ async def test_list_external_access_rules_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_external_access_rules - ] = mock_object + ] = mock_rpc request = {} await client.list_external_access_rules(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_external_access_rules(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11671,22 +11716,23 @@ async def test_get_external_access_rule_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_external_access_rule - ] = mock_object + ] = mock_rpc request = {} await client.get_external_access_rule(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_external_access_rule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12021,8 +12067,9 @@ def test_create_external_access_rule_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_external_access_rule(request) @@ -12078,26 +12125,28 @@ async def test_create_external_access_rule_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_external_access_rule - ] = mock_object + ] = mock_rpc request = {} await client.create_external_access_rule(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_external_access_rule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12437,8 +12486,9 @@ def test_update_external_access_rule_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_external_access_rule(request) @@ -12494,26 +12544,28 @@ async def test_update_external_access_rule_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_external_access_rule - ] = mock_object + ] = mock_rpc request = {} await client.update_external_access_rule(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_external_access_rule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12845,8 +12897,9 @@ def test_delete_external_access_rule_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_external_access_rule(request) @@ -12902,26 +12955,28 @@ async def test_delete_external_access_rule_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_external_access_rule - ] = mock_object + ] = mock_rpc request = {} await client.delete_external_access_rule(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_external_access_rule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13299,22 +13354,23 @@ async def test_list_logging_servers_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_logging_servers - ] = mock_object + ] = mock_rpc request = {} await client.list_logging_servers(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_logging_servers(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13906,22 +13962,23 @@ async def test_get_logging_server_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_logging_server - ] = mock_object + ] = mock_rpc request = {} await client.get_logging_server(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_logging_server(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14249,8 +14306,9 @@ def test_create_logging_server_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_logging_server(request) @@ -14306,26 +14364,28 @@ async def test_create_logging_server_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_logging_server - ] = mock_object + ] = mock_rpc request = {} await client.create_logging_server(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_logging_server(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14657,8 +14717,9 @@ def test_update_logging_server_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_logging_server(request) @@ -14714,26 +14775,28 @@ async def test_update_logging_server_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_logging_server - ] = mock_object + ] = mock_rpc request = {} await client.update_logging_server(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_logging_server(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15057,8 +15120,9 @@ def test_delete_logging_server_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_logging_server(request) @@ -15114,26 +15178,28 @@ async def test_delete_logging_server_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_logging_server - ] = mock_object + ] = mock_rpc request = {} await client.delete_logging_server(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_logging_server(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15497,22 +15563,23 @@ async def test_list_node_types_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_node_types - ] = mock_object + ] = mock_rpc request = {} await client.list_node_types(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_node_types(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -16093,22 +16160,23 @@ async def test_get_node_type_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_node_type - ] = mock_object + ] = mock_rpc request = {} await client.get_node_type(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_node_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -16496,22 +16564,23 @@ async def test_show_nsx_credentials_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.show_nsx_credentials - ] = mock_object + ] = mock_rpc request = {} await client.show_nsx_credentials(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.show_nsx_credentials(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -16890,22 +16959,23 @@ async def test_show_vcenter_credentials_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.show_vcenter_credentials - ] = mock_object + ] = mock_rpc request = {} await client.show_vcenter_credentials(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.show_vcenter_credentials(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -17224,8 +17294,9 @@ def test_reset_nsx_credentials_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.reset_nsx_credentials(request) @@ -17281,26 +17352,28 @@ async def test_reset_nsx_credentials_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.reset_nsx_credentials - ] = mock_object + ] = mock_rpc request = {} await client.reset_nsx_credentials(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.reset_nsx_credentials(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -17616,8 +17689,9 @@ def test_reset_vcenter_credentials_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.reset_vcenter_credentials(request) @@ -17673,26 +17747,28 @@ async def test_reset_vcenter_credentials_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.reset_vcenter_credentials - ] = mock_object + ] = mock_rpc request = {} await client.reset_vcenter_credentials(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.reset_vcenter_credentials(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -18061,22 +18137,23 @@ async def test_get_dns_forwarding_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_dns_forwarding - ] = mock_object + ] = mock_rpc request = {} await client.get_dns_forwarding(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_dns_forwarding(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -18390,8 +18467,9 @@ def test_update_dns_forwarding_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_dns_forwarding(request) @@ -18447,26 +18525,28 @@ async def test_update_dns_forwarding_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_dns_forwarding - ] = mock_object + ] = mock_rpc request = {} await client.update_dns_forwarding(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_dns_forwarding(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -18887,22 +18967,23 @@ async def test_get_network_peering_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_network_peering - ] = mock_object + ] = mock_rpc request = {} await client.get_network_peering(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_network_peering(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -19312,22 +19393,23 @@ async def test_list_network_peerings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_network_peerings - ] = mock_object + ] = mock_rpc request = {} await client.list_network_peerings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_network_peerings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -19854,8 +19936,9 @@ def test_create_network_peering_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_network_peering(request) @@ -19911,26 +19994,28 @@ async def test_create_network_peering_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_network_peering - ] = mock_object + ] = mock_rpc request = {} await client.create_network_peering(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_network_peering(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -20264,8 +20349,9 @@ def test_delete_network_peering_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_network_peering(request) @@ -20321,26 +20407,28 @@ async def test_delete_network_peering_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_network_peering - ] = mock_object + ] = mock_rpc request = {} await client.delete_network_peering(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_network_peering(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -20652,8 +20740,9 @@ def test_update_network_peering_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_network_peering(request) @@ -20709,26 +20798,28 @@ async def test_update_network_peering_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_network_peering - ] = mock_object + ] = mock_rpc request = {} await client.update_network_peering(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_network_peering(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -21111,22 +21202,23 @@ async def test_list_peering_routes_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_peering_routes - ] = mock_object + ] = mock_rpc request = {} await client.list_peering_routes(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_peering_routes(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -21648,8 +21740,9 @@ def test_create_hcx_activation_key_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_hcx_activation_key(request) @@ -21705,26 +21798,28 @@ async def test_create_hcx_activation_key_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_hcx_activation_key - ] = mock_object + ] = mock_rpc request = {} await client.create_hcx_activation_key(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_hcx_activation_key(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -22127,22 +22222,23 @@ async def test_list_hcx_activation_keys_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_hcx_activation_keys - ] = mock_object + ] = mock_rpc request = {} await client.list_hcx_activation_keys(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_hcx_activation_keys(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -22734,22 +22830,23 @@ async def test_get_hcx_activation_key_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_hcx_activation_key - ] = mock_object + ] = mock_rpc request = {} await client.get_hcx_activation_key(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_hcx_activation_key(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -23145,22 +23242,23 @@ async def test_get_network_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_network_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_network_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_network_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -23554,22 +23652,23 @@ async def test_list_network_policies_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_network_policies - ] = mock_object + ] = mock_rpc request = {} await client.list_network_policies(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_network_policies(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -24094,8 +24193,9 @@ def test_create_network_policy_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_network_policy(request) @@ -24151,26 +24251,28 @@ async def test_create_network_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_network_policy - ] = mock_object + ] = mock_rpc request = {} await client.create_network_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_network_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -24502,8 +24604,9 @@ def test_update_network_policy_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_network_policy(request) @@ -24559,26 +24662,28 @@ async def test_update_network_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_network_policy - ] = mock_object + ] = mock_rpc request = {} await client.update_network_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_network_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -24902,8 +25007,9 @@ def test_delete_network_policy_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_network_policy(request) @@ -24959,26 +25065,28 @@ async def test_delete_network_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_network_policy - ] = mock_object + ] = mock_rpc request = {} await client.delete_network_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_network_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -25357,22 +25465,23 @@ async def test_list_management_dns_zone_bindings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_management_dns_zone_bindings - ] = mock_object + ] = mock_rpc request = {} await client.list_management_dns_zone_bindings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_management_dns_zone_bindings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -25969,22 +26078,23 @@ async def test_get_management_dns_zone_binding_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_management_dns_zone_binding - ] = mock_object + ] = mock_rpc request = {} await client.get_management_dns_zone_binding(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_management_dns_zone_binding(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -26311,8 +26421,9 @@ def test_create_management_dns_zone_binding_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_management_dns_zone_binding(request) @@ -26368,26 +26479,28 @@ async def test_create_management_dns_zone_binding_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_management_dns_zone_binding - ] = mock_object + ] = mock_rpc request = {} await client.create_management_dns_zone_binding(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_management_dns_zone_binding(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -26727,8 +26840,9 @@ def test_update_management_dns_zone_binding_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_management_dns_zone_binding(request) @@ -26784,26 +26898,28 @@ async def test_update_management_dns_zone_binding_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_management_dns_zone_binding - ] = mock_object + ] = mock_rpc request = {} await client.update_management_dns_zone_binding(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_management_dns_zone_binding(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -27135,8 +27251,9 @@ def test_delete_management_dns_zone_binding_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_management_dns_zone_binding(request) @@ -27192,26 +27309,28 @@ async def test_delete_management_dns_zone_binding_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_management_dns_zone_binding - ] = mock_object + ] = mock_rpc request = {} await client.delete_management_dns_zone_binding(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_management_dns_zone_binding(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -27525,8 +27644,9 @@ def test_repair_management_dns_zone_binding_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.repair_management_dns_zone_binding(request) @@ -27582,26 +27702,28 @@ async def test_repair_management_dns_zone_binding_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.repair_management_dns_zone_binding - ] = mock_object + ] = mock_rpc request = {} await client.repair_management_dns_zone_binding(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.repair_management_dns_zone_binding(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -27917,8 +28039,9 @@ def test_create_vmware_engine_network_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_vmware_engine_network(request) @@ -27974,26 +28097,28 @@ async def test_create_vmware_engine_network_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_vmware_engine_network - ] = mock_object + ] = mock_rpc request = {} await client.create_vmware_engine_network(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_vmware_engine_network(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -28333,8 +28458,9 @@ def test_update_vmware_engine_network_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_vmware_engine_network(request) @@ -28390,26 +28516,28 @@ async def test_update_vmware_engine_network_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_vmware_engine_network - ] = mock_object + ] = mock_rpc request = {} await client.update_vmware_engine_network(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_vmware_engine_network(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -28743,8 +28871,9 @@ def test_delete_vmware_engine_network_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_vmware_engine_network(request) @@ -28800,26 +28929,28 @@ async def test_delete_vmware_engine_network_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_vmware_engine_network - ] = mock_object + ] = mock_rpc request = {} await client.delete_vmware_engine_network(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_vmware_engine_network(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -29204,22 +29335,23 @@ async def test_get_vmware_engine_network_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_vmware_engine_network - ] = mock_object + ] = mock_rpc request = {} await client.get_vmware_engine_network(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_vmware_engine_network(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -29611,22 +29743,23 @@ async def test_list_vmware_engine_networks_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_vmware_engine_networks - ] = mock_object + ] = mock_rpc request = {} await client.list_vmware_engine_networks(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_vmware_engine_networks(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -30155,8 +30288,9 @@ def test_create_private_connection_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_private_connection(request) @@ -30212,26 +30346,28 @@ async def test_create_private_connection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_private_connection - ] = mock_object + ] = mock_rpc request = {} await client.create_private_connection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_private_connection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -30671,22 +30807,23 @@ async def test_get_private_connection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_private_connection - ] = mock_object + ] = mock_rpc request = {} await client.get_private_connection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_private_connection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -31100,22 +31237,23 @@ async def test_list_private_connections_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_private_connections - ] = mock_object + ] = mock_rpc request = {} await client.list_private_connections(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_private_connections(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -31640,8 +31778,9 @@ def test_update_private_connection_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_private_connection(request) @@ -31697,26 +31836,28 @@ async def test_update_private_connection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_private_connection - ] = mock_object + ] = mock_rpc request = {} await client.update_private_connection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_private_connection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -32048,8 +32189,9 @@ def test_delete_private_connection_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_private_connection(request) @@ -32105,26 +32247,28 @@ async def test_delete_private_connection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_private_connection - ] = mock_object + ] = mock_rpc request = {} await client.delete_private_connection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_private_connection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -32496,22 +32640,23 @@ async def test_list_private_connection_peering_routes_async_use_cached_wrapped_r ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_private_connection_peering_routes - ] = mock_object + ] = mock_rpc request = {} await client.list_private_connection_peering_routes(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_private_connection_peering_routes(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -33034,8 +33179,9 @@ def test_grant_dns_bind_permission_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.grant_dns_bind_permission(request) @@ -33091,26 +33237,28 @@ async def test_grant_dns_bind_permission_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.grant_dns_bind_permission - ] = mock_object + ] = mock_rpc request = {} await client.grant_dns_bind_permission(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.grant_dns_bind_permission(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -33490,22 +33638,23 @@ async def test_get_dns_bind_permission_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_dns_bind_permission - ] = mock_object + ] = mock_rpc request = {} await client.get_dns_bind_permission(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_dns_bind_permission(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -33822,8 +33971,9 @@ def test_revoke_dns_bind_permission_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.revoke_dns_bind_permission(request) @@ -33879,26 +34029,28 @@ async def test_revoke_dns_bind_permission_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.revoke_dns_bind_permission - ] = mock_object + ] = mock_rpc request = {} await client.revoke_dns_bind_permission(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.revoke_dns_bind_permission(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-vpc-access/google/cloud/vpcaccess/gapic_version.py b/packages/google-cloud-vpc-access/google/cloud/vpcaccess/gapic_version.py index 3ba9a6de4897..558c8aab67c5 100644 --- a/packages/google-cloud-vpc-access/google/cloud/vpcaccess/gapic_version.py +++ b/packages/google-cloud-vpc-access/google/cloud/vpcaccess/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.10.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-vpc-access/google/cloud/vpcaccess_v1/gapic_version.py b/packages/google-cloud-vpc-access/google/cloud/vpcaccess_v1/gapic_version.py index 3ba9a6de4897..558c8aab67c5 100644 --- a/packages/google-cloud-vpc-access/google/cloud/vpcaccess_v1/gapic_version.py +++ b/packages/google-cloud-vpc-access/google/cloud/vpcaccess_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.10.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-vpc-access/google/cloud/vpcaccess_v1/services/vpc_access_service/async_client.py b/packages/google-cloud-vpc-access/google/cloud/vpcaccess_v1/services/vpc_access_service/async_client.py index 7fdc268d520a..8a0d65b2b6b4 100644 --- a/packages/google-cloud-vpc-access/google/cloud/vpcaccess_v1/services/vpc_access_service/async_client.py +++ b/packages/google-cloud-vpc-access/google/cloud/vpcaccess_v1/services/vpc_access_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -196,9 +195,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(VpcAccessServiceClient).get_transport_class, type(VpcAccessServiceClient) - ) + get_transport_class = VpcAccessServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-vpc-access/google/cloud/vpcaccess_v1/services/vpc_access_service/client.py b/packages/google-cloud-vpc-access/google/cloud/vpcaccess_v1/services/vpc_access_service/client.py index f504c116966e..014b88f68d38 100644 --- a/packages/google-cloud-vpc-access/google/cloud/vpcaccess_v1/services/vpc_access_service/client.py +++ b/packages/google-cloud-vpc-access/google/cloud/vpcaccess_v1/services/vpc_access_service/client.py @@ -672,7 +672,7 @@ def __init__( Type[VpcAccessServiceTransport], Callable[..., VpcAccessServiceTransport], ] = ( - type(self).get_transport_class(transport) + VpcAccessServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., VpcAccessServiceTransport], transport) ) diff --git a/packages/google-cloud-vpc-access/samples/generated_samples/snippet_metadata_google.cloud.vpcaccess.v1.json b/packages/google-cloud-vpc-access/samples/generated_samples/snippet_metadata_google.cloud.vpcaccess.v1.json index 121344a36c7a..16be63e96c27 100644 --- a/packages/google-cloud-vpc-access/samples/generated_samples/snippet_metadata_google.cloud.vpcaccess.v1.json +++ b/packages/google-cloud-vpc-access/samples/generated_samples/snippet_metadata_google.cloud.vpcaccess.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-vpc-access", - "version": "1.10.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-vpc-access/tests/unit/gapic/vpcaccess_v1/test_vpc_access_service.py b/packages/google-cloud-vpc-access/tests/unit/gapic/vpcaccess_v1/test_vpc_access_service.py index 820e93594489..37926ebbf88c 100644 --- a/packages/google-cloud-vpc-access/tests/unit/gapic/vpcaccess_v1/test_vpc_access_service.py +++ b/packages/google-cloud-vpc-access/tests/unit/gapic/vpcaccess_v1/test_vpc_access_service.py @@ -1271,8 +1271,9 @@ def test_create_connector_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_connector(request) @@ -1326,26 +1327,28 @@ async def test_create_connector_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_connector - ] = mock_object + ] = mock_rpc request = {} await client.create_connector(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_connector(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1738,22 +1741,23 @@ async def test_get_connector_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_connector - ] = mock_object + ] = mock_rpc request = {} await client.get_connector(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_connector(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2122,22 +2126,23 @@ async def test_list_connectors_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_connectors - ] = mock_object + ] = mock_rpc request = {} await client.list_connectors(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_connectors(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2626,8 +2631,9 @@ def test_delete_connector_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_connector(request) @@ -2681,26 +2687,28 @@ async def test_delete_connector_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_connector - ] = mock_object + ] = mock_rpc request = {} await client.delete_connector(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_connector(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-webrisk/google/cloud/webrisk/gapic_version.py b/packages/google-cloud-webrisk/google/cloud/webrisk/gapic_version.py index 5464dff981fa..558c8aab67c5 100644 --- a/packages/google-cloud-webrisk/google/cloud/webrisk/gapic_version.py +++ b/packages/google-cloud-webrisk/google/cloud/webrisk/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.14.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-webrisk/google/cloud/webrisk_v1/gapic_version.py b/packages/google-cloud-webrisk/google/cloud/webrisk_v1/gapic_version.py index 5464dff981fa..558c8aab67c5 100644 --- a/packages/google-cloud-webrisk/google/cloud/webrisk_v1/gapic_version.py +++ b/packages/google-cloud-webrisk/google/cloud/webrisk_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.14.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-webrisk/google/cloud/webrisk_v1/services/web_risk_service/async_client.py b/packages/google-cloud-webrisk/google/cloud/webrisk_v1/services/web_risk_service/async_client.py index 2d8b4c0c2049..cd964df576d9 100644 --- a/packages/google-cloud-webrisk/google/cloud/webrisk_v1/services/web_risk_service/async_client.py +++ b/packages/google-cloud-webrisk/google/cloud/webrisk_v1/services/web_risk_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -191,9 +190,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(WebRiskServiceClient).get_transport_class, type(WebRiskServiceClient) - ) + get_transport_class = WebRiskServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-webrisk/google/cloud/webrisk_v1/services/web_risk_service/client.py b/packages/google-cloud-webrisk/google/cloud/webrisk_v1/services/web_risk_service/client.py index 26d80579981f..d8ca6cb6ae4b 100644 --- a/packages/google-cloud-webrisk/google/cloud/webrisk_v1/services/web_risk_service/client.py +++ b/packages/google-cloud-webrisk/google/cloud/webrisk_v1/services/web_risk_service/client.py @@ -644,7 +644,7 @@ def __init__( transport_init: Union[ Type[WebRiskServiceTransport], Callable[..., WebRiskServiceTransport] ] = ( - type(self).get_transport_class(transport) + WebRiskServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., WebRiskServiceTransport], transport) ) diff --git a/packages/google-cloud-webrisk/google/cloud/webrisk_v1beta1/gapic_version.py b/packages/google-cloud-webrisk/google/cloud/webrisk_v1beta1/gapic_version.py index 5464dff981fa..558c8aab67c5 100644 --- a/packages/google-cloud-webrisk/google/cloud/webrisk_v1beta1/gapic_version.py +++ b/packages/google-cloud-webrisk/google/cloud/webrisk_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.14.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-webrisk/google/cloud/webrisk_v1beta1/services/web_risk_service_v1_beta1/async_client.py b/packages/google-cloud-webrisk/google/cloud/webrisk_v1beta1/services/web_risk_service_v1_beta1/async_client.py index ac9c00a32a8c..e0e80324b79b 100644 --- a/packages/google-cloud-webrisk/google/cloud/webrisk_v1beta1/services/web_risk_service_v1_beta1/async_client.py +++ b/packages/google-cloud-webrisk/google/cloud/webrisk_v1beta1/services/web_risk_service_v1_beta1/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -190,10 +189,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(WebRiskServiceV1Beta1Client).get_transport_class, - type(WebRiskServiceV1Beta1Client), - ) + get_transport_class = WebRiskServiceV1Beta1Client.get_transport_class def __init__( self, diff --git a/packages/google-cloud-webrisk/google/cloud/webrisk_v1beta1/services/web_risk_service_v1_beta1/client.py b/packages/google-cloud-webrisk/google/cloud/webrisk_v1beta1/services/web_risk_service_v1_beta1/client.py index b030426f49a4..01806605c97d 100644 --- a/packages/google-cloud-webrisk/google/cloud/webrisk_v1beta1/services/web_risk_service_v1_beta1/client.py +++ b/packages/google-cloud-webrisk/google/cloud/webrisk_v1beta1/services/web_risk_service_v1_beta1/client.py @@ -648,7 +648,7 @@ def __init__( Type[WebRiskServiceV1Beta1Transport], Callable[..., WebRiskServiceV1Beta1Transport], ] = ( - type(self).get_transport_class(transport) + WebRiskServiceV1Beta1Client.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., WebRiskServiceV1Beta1Transport], transport) ) diff --git a/packages/google-cloud-webrisk/samples/generated_samples/snippet_metadata_google.cloud.webrisk.v1.json b/packages/google-cloud-webrisk/samples/generated_samples/snippet_metadata_google.cloud.webrisk.v1.json index 3412a9d06d8c..1a29be137849 100644 --- a/packages/google-cloud-webrisk/samples/generated_samples/snippet_metadata_google.cloud.webrisk.v1.json +++ b/packages/google-cloud-webrisk/samples/generated_samples/snippet_metadata_google.cloud.webrisk.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-webrisk", - "version": "1.14.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-webrisk/samples/generated_samples/snippet_metadata_google.cloud.webrisk.v1beta1.json b/packages/google-cloud-webrisk/samples/generated_samples/snippet_metadata_google.cloud.webrisk.v1beta1.json index 24dc18da8063..de1e97aff4a5 100644 --- a/packages/google-cloud-webrisk/samples/generated_samples/snippet_metadata_google.cloud.webrisk.v1beta1.json +++ b/packages/google-cloud-webrisk/samples/generated_samples/snippet_metadata_google.cloud.webrisk.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-webrisk", - "version": "1.14.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-webrisk/tests/unit/gapic/webrisk_v1/test_web_risk_service.py b/packages/google-cloud-webrisk/tests/unit/gapic/webrisk_v1/test_web_risk_service.py index c9a9884a2ed5..6e9bd82226bf 100644 --- a/packages/google-cloud-webrisk/tests/unit/gapic/webrisk_v1/test_web_risk_service.py +++ b/packages/google-cloud-webrisk/tests/unit/gapic/webrisk_v1/test_web_risk_service.py @@ -1310,22 +1310,23 @@ async def test_compute_threat_list_diff_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.compute_threat_list_diff - ] = mock_object + ] = mock_rpc request = {} await client.compute_threat_list_diff(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.compute_threat_list_diff(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1651,22 +1652,23 @@ async def test_search_uris_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search_uris - ] = mock_object + ] = mock_rpc request = {} await client.search_uris(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_uris(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1952,22 +1954,23 @@ async def test_search_hashes_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search_hashes - ] = mock_object + ] = mock_rpc request = {} await client.search_hashes(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_hashes(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2275,22 +2278,23 @@ async def test_create_submission_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_submission - ] = mock_object + ] = mock_rpc request = {} await client.create_submission(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_submission(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2601,8 +2605,9 @@ def test_submit_uri_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.submit_uri(request) @@ -2654,26 +2659,28 @@ async def test_submit_uri_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.submit_uri - ] = mock_object + ] = mock_rpc request = {} await client.submit_uri(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.submit_uri(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-webrisk/tests/unit/gapic/webrisk_v1beta1/test_web_risk_service_v1_beta1.py b/packages/google-cloud-webrisk/tests/unit/gapic/webrisk_v1beta1/test_web_risk_service_v1_beta1.py index d46c70eca352..29d04ceedb7c 100644 --- a/packages/google-cloud-webrisk/tests/unit/gapic/webrisk_v1beta1/test_web_risk_service_v1_beta1.py +++ b/packages/google-cloud-webrisk/tests/unit/gapic/webrisk_v1beta1/test_web_risk_service_v1_beta1.py @@ -1368,22 +1368,23 @@ async def test_compute_threat_list_diff_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.compute_threat_list_diff - ] = mock_object + ] = mock_rpc request = {} await client.compute_threat_list_diff(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.compute_threat_list_diff(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1709,22 +1710,23 @@ async def test_search_uris_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search_uris - ] = mock_object + ] = mock_rpc request = {} await client.search_uris(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_uris(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2010,22 +2012,23 @@ async def test_search_hashes_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search_hashes - ] = mock_object + ] = mock_rpc request = {} await client.search_hashes(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_hashes(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-websecurityscanner/google/cloud/websecurityscanner/gapic_version.py b/packages/google-cloud-websecurityscanner/google/cloud/websecurityscanner/gapic_version.py index 5464dff981fa..558c8aab67c5 100644 --- a/packages/google-cloud-websecurityscanner/google/cloud/websecurityscanner/gapic_version.py +++ b/packages/google-cloud-websecurityscanner/google/cloud/websecurityscanner/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.14.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-websecurityscanner/google/cloud/websecurityscanner_v1/gapic_version.py b/packages/google-cloud-websecurityscanner/google/cloud/websecurityscanner_v1/gapic_version.py index 5464dff981fa..558c8aab67c5 100644 --- a/packages/google-cloud-websecurityscanner/google/cloud/websecurityscanner_v1/gapic_version.py +++ b/packages/google-cloud-websecurityscanner/google/cloud/websecurityscanner_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.14.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-websecurityscanner/google/cloud/websecurityscanner_v1/services/web_security_scanner/async_client.py b/packages/google-cloud-websecurityscanner/google/cloud/websecurityscanner_v1/services/web_security_scanner/async_client.py index bca317e9b862..c65fa657162a 100644 --- a/packages/google-cloud-websecurityscanner/google/cloud/websecurityscanner_v1/services/web_security_scanner/async_client.py +++ b/packages/google-cloud-websecurityscanner/google/cloud/websecurityscanner_v1/services/web_security_scanner/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -203,10 +202,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(WebSecurityScannerClient).get_transport_class, - type(WebSecurityScannerClient), - ) + get_transport_class = WebSecurityScannerClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-websecurityscanner/google/cloud/websecurityscanner_v1/services/web_security_scanner/client.py b/packages/google-cloud-websecurityscanner/google/cloud/websecurityscanner_v1/services/web_security_scanner/client.py index b872b9a49569..807a26ccba8e 100644 --- a/packages/google-cloud-websecurityscanner/google/cloud/websecurityscanner_v1/services/web_security_scanner/client.py +++ b/packages/google-cloud-websecurityscanner/google/cloud/websecurityscanner_v1/services/web_security_scanner/client.py @@ -683,7 +683,7 @@ def __init__( Type[WebSecurityScannerTransport], Callable[..., WebSecurityScannerTransport], ] = ( - type(self).get_transport_class(transport) + WebSecurityScannerClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., WebSecurityScannerTransport], transport) ) diff --git a/packages/google-cloud-websecurityscanner/google/cloud/websecurityscanner_v1alpha/gapic_version.py b/packages/google-cloud-websecurityscanner/google/cloud/websecurityscanner_v1alpha/gapic_version.py index 5464dff981fa..558c8aab67c5 100644 --- a/packages/google-cloud-websecurityscanner/google/cloud/websecurityscanner_v1alpha/gapic_version.py +++ b/packages/google-cloud-websecurityscanner/google/cloud/websecurityscanner_v1alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.14.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-websecurityscanner/google/cloud/websecurityscanner_v1alpha/services/web_security_scanner/async_client.py b/packages/google-cloud-websecurityscanner/google/cloud/websecurityscanner_v1alpha/services/web_security_scanner/async_client.py index 4e52dc03d341..3e9450a8b77b 100644 --- a/packages/google-cloud-websecurityscanner/google/cloud/websecurityscanner_v1alpha/services/web_security_scanner/async_client.py +++ b/packages/google-cloud-websecurityscanner/google/cloud/websecurityscanner_v1alpha/services/web_security_scanner/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -208,10 +207,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(WebSecurityScannerClient).get_transport_class, - type(WebSecurityScannerClient), - ) + get_transport_class = WebSecurityScannerClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-websecurityscanner/google/cloud/websecurityscanner_v1alpha/services/web_security_scanner/client.py b/packages/google-cloud-websecurityscanner/google/cloud/websecurityscanner_v1alpha/services/web_security_scanner/client.py index b3de7b6560ea..db94e624f996 100644 --- a/packages/google-cloud-websecurityscanner/google/cloud/websecurityscanner_v1alpha/services/web_security_scanner/client.py +++ b/packages/google-cloud-websecurityscanner/google/cloud/websecurityscanner_v1alpha/services/web_security_scanner/client.py @@ -725,7 +725,7 @@ def __init__( Type[WebSecurityScannerTransport], Callable[..., WebSecurityScannerTransport], ] = ( - type(self).get_transport_class(transport) + WebSecurityScannerClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., WebSecurityScannerTransport], transport) ) diff --git a/packages/google-cloud-websecurityscanner/google/cloud/websecurityscanner_v1beta/gapic_version.py b/packages/google-cloud-websecurityscanner/google/cloud/websecurityscanner_v1beta/gapic_version.py index 5464dff981fa..558c8aab67c5 100644 --- a/packages/google-cloud-websecurityscanner/google/cloud/websecurityscanner_v1beta/gapic_version.py +++ b/packages/google-cloud-websecurityscanner/google/cloud/websecurityscanner_v1beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.14.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-websecurityscanner/google/cloud/websecurityscanner_v1beta/services/web_security_scanner/async_client.py b/packages/google-cloud-websecurityscanner/google/cloud/websecurityscanner_v1beta/services/web_security_scanner/async_client.py index 9accae8fdcb0..f3b106ac2584 100644 --- a/packages/google-cloud-websecurityscanner/google/cloud/websecurityscanner_v1beta/services/web_security_scanner/async_client.py +++ b/packages/google-cloud-websecurityscanner/google/cloud/websecurityscanner_v1beta/services/web_security_scanner/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -213,10 +212,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(WebSecurityScannerClient).get_transport_class, - type(WebSecurityScannerClient), - ) + get_transport_class = WebSecurityScannerClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-websecurityscanner/google/cloud/websecurityscanner_v1beta/services/web_security_scanner/client.py b/packages/google-cloud-websecurityscanner/google/cloud/websecurityscanner_v1beta/services/web_security_scanner/client.py index 07111adaf3b8..5938a7171379 100644 --- a/packages/google-cloud-websecurityscanner/google/cloud/websecurityscanner_v1beta/services/web_security_scanner/client.py +++ b/packages/google-cloud-websecurityscanner/google/cloud/websecurityscanner_v1beta/services/web_security_scanner/client.py @@ -730,7 +730,7 @@ def __init__( Type[WebSecurityScannerTransport], Callable[..., WebSecurityScannerTransport], ] = ( - type(self).get_transport_class(transport) + WebSecurityScannerClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., WebSecurityScannerTransport], transport) ) diff --git a/packages/google-cloud-websecurityscanner/samples/generated_samples/snippet_metadata_google.cloud.websecurityscanner.v1.json b/packages/google-cloud-websecurityscanner/samples/generated_samples/snippet_metadata_google.cloud.websecurityscanner.v1.json index 758bb71da40e..030ea8082797 100644 --- a/packages/google-cloud-websecurityscanner/samples/generated_samples/snippet_metadata_google.cloud.websecurityscanner.v1.json +++ b/packages/google-cloud-websecurityscanner/samples/generated_samples/snippet_metadata_google.cloud.websecurityscanner.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-websecurityscanner", - "version": "1.14.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-websecurityscanner/samples/generated_samples/snippet_metadata_google.cloud.websecurityscanner.v1alpha.json b/packages/google-cloud-websecurityscanner/samples/generated_samples/snippet_metadata_google.cloud.websecurityscanner.v1alpha.json index 1f1e3a99d7b0..c87e0efb514b 100644 --- a/packages/google-cloud-websecurityscanner/samples/generated_samples/snippet_metadata_google.cloud.websecurityscanner.v1alpha.json +++ b/packages/google-cloud-websecurityscanner/samples/generated_samples/snippet_metadata_google.cloud.websecurityscanner.v1alpha.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-websecurityscanner", - "version": "1.14.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-websecurityscanner/samples/generated_samples/snippet_metadata_google.cloud.websecurityscanner.v1beta.json b/packages/google-cloud-websecurityscanner/samples/generated_samples/snippet_metadata_google.cloud.websecurityscanner.v1beta.json index 35f65c060e5d..3896b1676525 100644 --- a/packages/google-cloud-websecurityscanner/samples/generated_samples/snippet_metadata_google.cloud.websecurityscanner.v1beta.json +++ b/packages/google-cloud-websecurityscanner/samples/generated_samples/snippet_metadata_google.cloud.websecurityscanner.v1beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-websecurityscanner", - "version": "1.14.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-websecurityscanner/tests/unit/gapic/websecurityscanner_v1/test_web_security_scanner.py b/packages/google-cloud-websecurityscanner/tests/unit/gapic/websecurityscanner_v1/test_web_security_scanner.py index 3969ec5360b1..187ec4d7f502 100644 --- a/packages/google-cloud-websecurityscanner/tests/unit/gapic/websecurityscanner_v1/test_web_security_scanner.py +++ b/packages/google-cloud-websecurityscanner/tests/unit/gapic/websecurityscanner_v1/test_web_security_scanner.py @@ -1380,22 +1380,23 @@ async def test_create_scan_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_scan_config - ] = mock_object + ] = mock_rpc request = {} await client.create_scan_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_scan_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1697,22 +1698,23 @@ async def test_delete_scan_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_scan_config - ] = mock_object + ] = mock_rpc request = {} await client.delete_scan_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_scan_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2012,22 +2014,23 @@ async def test_get_scan_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_scan_config - ] = mock_object + ] = mock_rpc request = {} await client.get_scan_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_scan_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2330,22 +2333,23 @@ async def test_list_scan_configs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_scan_configs - ] = mock_object + ] = mock_rpc request = {} await client.list_scan_configs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_scan_configs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2862,22 +2866,23 @@ async def test_update_scan_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_scan_config - ] = mock_object + ] = mock_rpc request = {} await client.update_scan_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_scan_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3192,22 +3197,23 @@ async def test_start_scan_run_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.start_scan_run - ] = mock_object + ] = mock_rpc request = {} await client.start_scan_run(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.start_scan_run(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3503,22 +3509,23 @@ async def test_get_scan_run_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_scan_run - ] = mock_object + ] = mock_rpc request = {} await client.get_scan_run(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_scan_run(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3797,22 +3804,23 @@ async def test_list_scan_runs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_scan_runs - ] = mock_object + ] = mock_rpc request = {} await client.list_scan_runs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_scan_runs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4292,22 +4300,23 @@ async def test_stop_scan_run_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.stop_scan_run - ] = mock_object + ] = mock_rpc request = {} await client.stop_scan_run(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.stop_scan_run(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4597,22 +4606,23 @@ async def test_list_crawled_urls_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_crawled_urls - ] = mock_object + ] = mock_rpc request = {} await client.list_crawled_urls(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_crawled_urls(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5118,22 +5128,23 @@ async def test_get_finding_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_finding - ] = mock_object + ] = mock_rpc request = {} await client.get_finding(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_finding(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5422,22 +5433,23 @@ async def test_list_findings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_findings - ] = mock_object + ] = mock_rpc request = {} await client.list_findings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_findings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5907,22 +5919,23 @@ async def test_list_finding_type_stats_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_finding_type_stats - ] = mock_object + ] = mock_rpc request = {} await client.list_finding_type_stats(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_finding_type_stats(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-websecurityscanner/tests/unit/gapic/websecurityscanner_v1alpha/test_web_security_scanner.py b/packages/google-cloud-websecurityscanner/tests/unit/gapic/websecurityscanner_v1alpha/test_web_security_scanner.py index 3bf6b982f1c7..6533c69e56f4 100644 --- a/packages/google-cloud-websecurityscanner/tests/unit/gapic/websecurityscanner_v1alpha/test_web_security_scanner.py +++ b/packages/google-cloud-websecurityscanner/tests/unit/gapic/websecurityscanner_v1alpha/test_web_security_scanner.py @@ -1365,22 +1365,23 @@ async def test_create_scan_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_scan_config - ] = mock_object + ] = mock_rpc request = {} await client.create_scan_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_scan_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1769,22 +1770,23 @@ async def test_delete_scan_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_scan_config - ] = mock_object + ] = mock_rpc request = {} await client.delete_scan_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_scan_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2155,22 +2157,23 @@ async def test_get_scan_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_scan_config - ] = mock_object + ] = mock_rpc request = {} await client.get_scan_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_scan_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2546,22 +2549,23 @@ async def test_list_scan_configs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_scan_configs - ] = mock_object + ] = mock_rpc request = {} await client.list_scan_configs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_scan_configs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3151,22 +3155,23 @@ async def test_update_scan_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_scan_config - ] = mock_object + ] = mock_rpc request = {} await client.update_scan_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_scan_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3568,22 +3573,23 @@ async def test_start_scan_run_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.start_scan_run - ] = mock_object + ] = mock_rpc request = {} await client.start_scan_run(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.start_scan_run(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3959,22 +3965,23 @@ async def test_get_scan_run_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_scan_run - ] = mock_object + ] = mock_rpc request = {} await client.get_scan_run(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_scan_run(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4333,22 +4340,23 @@ async def test_list_scan_runs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_scan_runs - ] = mock_object + ] = mock_rpc request = {} await client.list_scan_runs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_scan_runs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4910,22 +4918,23 @@ async def test_stop_scan_run_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.stop_scan_run - ] = mock_object + ] = mock_rpc request = {} await client.stop_scan_run(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.stop_scan_run(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5295,22 +5304,23 @@ async def test_list_crawled_urls_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_crawled_urls - ] = mock_object + ] = mock_rpc request = {} await client.list_crawled_urls(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_crawled_urls(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5899,22 +5909,23 @@ async def test_get_finding_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_finding - ] = mock_object + ] = mock_rpc request = {} await client.get_finding(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_finding(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6281,22 +6292,23 @@ async def test_list_findings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_findings - ] = mock_object + ] = mock_rpc request = {} await client.list_findings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_findings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6858,22 +6870,23 @@ async def test_list_finding_type_stats_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_finding_type_stats - ] = mock_object + ] = mock_rpc request = {} await client.list_finding_type_stats(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_finding_type_stats(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-websecurityscanner/tests/unit/gapic/websecurityscanner_v1beta/test_web_security_scanner.py b/packages/google-cloud-websecurityscanner/tests/unit/gapic/websecurityscanner_v1beta/test_web_security_scanner.py index 7cb511a9a897..a6bf81b97039 100644 --- a/packages/google-cloud-websecurityscanner/tests/unit/gapic/websecurityscanner_v1beta/test_web_security_scanner.py +++ b/packages/google-cloud-websecurityscanner/tests/unit/gapic/websecurityscanner_v1beta/test_web_security_scanner.py @@ -1380,22 +1380,23 @@ async def test_create_scan_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_scan_config - ] = mock_object + ] = mock_rpc request = {} await client.create_scan_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_scan_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1791,22 +1792,23 @@ async def test_delete_scan_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_scan_config - ] = mock_object + ] = mock_rpc request = {} await client.delete_scan_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_scan_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2186,22 +2188,23 @@ async def test_get_scan_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_scan_config - ] = mock_object + ] = mock_rpc request = {} await client.get_scan_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_scan_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2584,22 +2587,23 @@ async def test_list_scan_configs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_scan_configs - ] = mock_object + ] = mock_rpc request = {} await client.list_scan_configs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_scan_configs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3198,22 +3202,23 @@ async def test_update_scan_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_scan_config - ] = mock_object + ] = mock_rpc request = {} await client.update_scan_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_scan_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3622,22 +3627,23 @@ async def test_start_scan_run_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.start_scan_run - ] = mock_object + ] = mock_rpc request = {} await client.start_scan_run(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.start_scan_run(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4013,22 +4019,23 @@ async def test_get_scan_run_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_scan_run - ] = mock_object + ] = mock_rpc request = {} await client.get_scan_run(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_scan_run(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4387,22 +4394,23 @@ async def test_list_scan_runs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_scan_runs - ] = mock_object + ] = mock_rpc request = {} await client.list_scan_runs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_scan_runs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4964,22 +4972,23 @@ async def test_stop_scan_run_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.stop_scan_run - ] = mock_object + ] = mock_rpc request = {} await client.stop_scan_run(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.stop_scan_run(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5349,22 +5358,23 @@ async def test_list_crawled_urls_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_crawled_urls - ] = mock_object + ] = mock_rpc request = {} await client.list_crawled_urls(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_crawled_urls(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5953,22 +5963,23 @@ async def test_get_finding_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_finding - ] = mock_object + ] = mock_rpc request = {} await client.get_finding(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_finding(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6335,22 +6346,23 @@ async def test_list_findings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_findings - ] = mock_object + ] = mock_rpc request = {} await client.list_findings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_findings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6912,22 +6924,23 @@ async def test_list_finding_type_stats_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_finding_type_stats - ] = mock_object + ] = mock_rpc request = {} await client.list_finding_type_stats(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_finding_type_stats(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-workflows/docs/index.rst b/packages/google-cloud-workflows/docs/index.rst index fbe39e3a5b14..11dc740e1731 100644 --- a/packages/google-cloud-workflows/docs/index.rst +++ b/packages/google-cloud-workflows/docs/index.rst @@ -23,23 +23,6 @@ API Reference workflows_v1beta/types_ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - executions_v1/services_ - executions_v1/types_ - -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - executions_v1beta/services_ - executions_v1beta/types_ - - Changelog --------- diff --git a/packages/google-cloud-workflows/google/cloud/workflows/executions/gapic_version.py b/packages/google-cloud-workflows/google/cloud/workflows/executions/gapic_version.py index 5464dff981fa..558c8aab67c5 100644 --- a/packages/google-cloud-workflows/google/cloud/workflows/executions/gapic_version.py +++ b/packages/google-cloud-workflows/google/cloud/workflows/executions/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.14.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-workflows/google/cloud/workflows/executions_v1/gapic_version.py b/packages/google-cloud-workflows/google/cloud/workflows/executions_v1/gapic_version.py index 5464dff981fa..558c8aab67c5 100644 --- a/packages/google-cloud-workflows/google/cloud/workflows/executions_v1/gapic_version.py +++ b/packages/google-cloud-workflows/google/cloud/workflows/executions_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.14.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-workflows/google/cloud/workflows/executions_v1/services/executions/async_client.py b/packages/google-cloud-workflows/google/cloud/workflows/executions_v1/services/executions/async_client.py index 991dcca8d66d..ece3c3598599 100644 --- a/packages/google-cloud-workflows/google/cloud/workflows/executions_v1/services/executions/async_client.py +++ b/packages/google-cloud-workflows/google/cloud/workflows/executions_v1/services/executions/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -188,9 +187,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ExecutionsClient).get_transport_class, type(ExecutionsClient) - ) + get_transport_class = ExecutionsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-workflows/google/cloud/workflows/executions_v1/services/executions/client.py b/packages/google-cloud-workflows/google/cloud/workflows/executions_v1/services/executions/client.py index bba09a9deb02..f8652adb9563 100644 --- a/packages/google-cloud-workflows/google/cloud/workflows/executions_v1/services/executions/client.py +++ b/packages/google-cloud-workflows/google/cloud/workflows/executions_v1/services/executions/client.py @@ -682,7 +682,7 @@ def __init__( transport_init: Union[ Type[ExecutionsTransport], Callable[..., ExecutionsTransport] ] = ( - type(self).get_transport_class(transport) + ExecutionsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ExecutionsTransport], transport) ) diff --git a/packages/google-cloud-workflows/google/cloud/workflows/executions_v1beta/gapic_version.py b/packages/google-cloud-workflows/google/cloud/workflows/executions_v1beta/gapic_version.py index 5464dff981fa..558c8aab67c5 100644 --- a/packages/google-cloud-workflows/google/cloud/workflows/executions_v1beta/gapic_version.py +++ b/packages/google-cloud-workflows/google/cloud/workflows/executions_v1beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.14.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-workflows/google/cloud/workflows/executions_v1beta/services/executions/async_client.py b/packages/google-cloud-workflows/google/cloud/workflows/executions_v1beta/services/executions/async_client.py index f6d91ed2f6e3..67c7af47b23f 100644 --- a/packages/google-cloud-workflows/google/cloud/workflows/executions_v1beta/services/executions/async_client.py +++ b/packages/google-cloud-workflows/google/cloud/workflows/executions_v1beta/services/executions/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -188,9 +187,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ExecutionsClient).get_transport_class, type(ExecutionsClient) - ) + get_transport_class = ExecutionsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-workflows/google/cloud/workflows/executions_v1beta/services/executions/client.py b/packages/google-cloud-workflows/google/cloud/workflows/executions_v1beta/services/executions/client.py index 37263247b785..cf7ff3e5b434 100644 --- a/packages/google-cloud-workflows/google/cloud/workflows/executions_v1beta/services/executions/client.py +++ b/packages/google-cloud-workflows/google/cloud/workflows/executions_v1beta/services/executions/client.py @@ -682,7 +682,7 @@ def __init__( transport_init: Union[ Type[ExecutionsTransport], Callable[..., ExecutionsTransport] ] = ( - type(self).get_transport_class(transport) + ExecutionsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ExecutionsTransport], transport) ) diff --git a/packages/google-cloud-workflows/google/cloud/workflows/gapic_version.py b/packages/google-cloud-workflows/google/cloud/workflows/gapic_version.py index 5464dff981fa..558c8aab67c5 100644 --- a/packages/google-cloud-workflows/google/cloud/workflows/gapic_version.py +++ b/packages/google-cloud-workflows/google/cloud/workflows/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.14.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-workflows/google/cloud/workflows_v1/gapic_version.py b/packages/google-cloud-workflows/google/cloud/workflows_v1/gapic_version.py index 5464dff981fa..558c8aab67c5 100644 --- a/packages/google-cloud-workflows/google/cloud/workflows_v1/gapic_version.py +++ b/packages/google-cloud-workflows/google/cloud/workflows_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.14.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-workflows/google/cloud/workflows_v1/services/workflows/async_client.py b/packages/google-cloud-workflows/google/cloud/workflows_v1/services/workflows/async_client.py index 618b92e29cd9..5f0a2facd4ac 100644 --- a/packages/google-cloud-workflows/google/cloud/workflows_v1/services/workflows/async_client.py +++ b/packages/google-cloud-workflows/google/cloud/workflows_v1/services/workflows/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -194,9 +193,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(WorkflowsClient).get_transport_class, type(WorkflowsClient) - ) + get_transport_class = WorkflowsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-workflows/google/cloud/workflows_v1/services/workflows/client.py b/packages/google-cloud-workflows/google/cloud/workflows_v1/services/workflows/client.py index 029e7be2dd67..15381d6753cf 100644 --- a/packages/google-cloud-workflows/google/cloud/workflows_v1/services/workflows/client.py +++ b/packages/google-cloud-workflows/google/cloud/workflows_v1/services/workflows/client.py @@ -690,7 +690,7 @@ def __init__( transport_init: Union[ Type[WorkflowsTransport], Callable[..., WorkflowsTransport] ] = ( - type(self).get_transport_class(transport) + WorkflowsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., WorkflowsTransport], transport) ) diff --git a/packages/google-cloud-workflows/google/cloud/workflows_v1beta/gapic_version.py b/packages/google-cloud-workflows/google/cloud/workflows_v1beta/gapic_version.py index 5464dff981fa..558c8aab67c5 100644 --- a/packages/google-cloud-workflows/google/cloud/workflows_v1beta/gapic_version.py +++ b/packages/google-cloud-workflows/google/cloud/workflows_v1beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.14.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-workflows/google/cloud/workflows_v1beta/services/workflows/async_client.py b/packages/google-cloud-workflows/google/cloud/workflows_v1beta/services/workflows/async_client.py index 35964be1ef62..91aab3e657c1 100644 --- a/packages/google-cloud-workflows/google/cloud/workflows_v1beta/services/workflows/async_client.py +++ b/packages/google-cloud-workflows/google/cloud/workflows_v1beta/services/workflows/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -190,9 +189,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(WorkflowsClient).get_transport_class, type(WorkflowsClient) - ) + get_transport_class = WorkflowsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-workflows/google/cloud/workflows_v1beta/services/workflows/client.py b/packages/google-cloud-workflows/google/cloud/workflows_v1beta/services/workflows/client.py index b23ffbe8c922..369d219a4c60 100644 --- a/packages/google-cloud-workflows/google/cloud/workflows_v1beta/services/workflows/client.py +++ b/packages/google-cloud-workflows/google/cloud/workflows_v1beta/services/workflows/client.py @@ -664,7 +664,7 @@ def __init__( transport_init: Union[ Type[WorkflowsTransport], Callable[..., WorkflowsTransport] ] = ( - type(self).get_transport_class(transport) + WorkflowsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., WorkflowsTransport], transport) ) diff --git a/packages/google-cloud-workflows/samples/generated_samples/snippet_metadata_google.cloud.workflows.executions.v1.json b/packages/google-cloud-workflows/samples/generated_samples/snippet_metadata_google.cloud.workflows.executions.v1.json index 5cf462ad7929..56d60ff5a803 100644 --- a/packages/google-cloud-workflows/samples/generated_samples/snippet_metadata_google.cloud.workflows.executions.v1.json +++ b/packages/google-cloud-workflows/samples/generated_samples/snippet_metadata_google.cloud.workflows.executions.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-workflows-executions", - "version": "1.14.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-workflows/samples/generated_samples/snippet_metadata_google.cloud.workflows.executions.v1beta.json b/packages/google-cloud-workflows/samples/generated_samples/snippet_metadata_google.cloud.workflows.executions.v1beta.json index b6389aace919..c486937ea17a 100644 --- a/packages/google-cloud-workflows/samples/generated_samples/snippet_metadata_google.cloud.workflows.executions.v1beta.json +++ b/packages/google-cloud-workflows/samples/generated_samples/snippet_metadata_google.cloud.workflows.executions.v1beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-workflows-executions", - "version": "1.14.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-workflows/samples/generated_samples/snippet_metadata_google.cloud.workflows.v1.json b/packages/google-cloud-workflows/samples/generated_samples/snippet_metadata_google.cloud.workflows.v1.json index c42b11c8001e..5eeb3c21a076 100644 --- a/packages/google-cloud-workflows/samples/generated_samples/snippet_metadata_google.cloud.workflows.v1.json +++ b/packages/google-cloud-workflows/samples/generated_samples/snippet_metadata_google.cloud.workflows.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-workflows", - "version": "1.14.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-workflows/samples/generated_samples/snippet_metadata_google.cloud.workflows.v1beta.json b/packages/google-cloud-workflows/samples/generated_samples/snippet_metadata_google.cloud.workflows.v1beta.json index 7311d137f95e..e6e1022f77a9 100644 --- a/packages/google-cloud-workflows/samples/generated_samples/snippet_metadata_google.cloud.workflows.v1beta.json +++ b/packages/google-cloud-workflows/samples/generated_samples/snippet_metadata_google.cloud.workflows.v1beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-workflows", - "version": "1.14.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-workflows/tests/unit/gapic/executions_v1/test_executions.py b/packages/google-cloud-workflows/tests/unit/gapic/executions_v1/test_executions.py index 087a8f8d4029..f8246c916ce1 100644 --- a/packages/google-cloud-workflows/tests/unit/gapic/executions_v1/test_executions.py +++ b/packages/google-cloud-workflows/tests/unit/gapic/executions_v1/test_executions.py @@ -1225,22 +1225,23 @@ async def test_list_executions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_executions - ] = mock_object + ] = mock_rpc request = {} await client.list_executions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_executions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1800,22 +1801,23 @@ async def test_create_execution_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_execution - ] = mock_object + ] = mock_rpc request = {} await client.create_execution(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_execution(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2199,22 +2201,23 @@ async def test_get_execution_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_execution - ] = mock_object + ] = mock_rpc request = {} await client.get_execution(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_execution(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2590,22 +2593,23 @@ async def test_cancel_execution_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.cancel_execution - ] = mock_object + ] = mock_rpc request = {} await client.cancel_execution(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.cancel_execution(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-workflows/tests/unit/gapic/executions_v1beta/test_executions.py b/packages/google-cloud-workflows/tests/unit/gapic/executions_v1beta/test_executions.py index 2a8fe497d2f1..26ddba668c97 100644 --- a/packages/google-cloud-workflows/tests/unit/gapic/executions_v1beta/test_executions.py +++ b/packages/google-cloud-workflows/tests/unit/gapic/executions_v1beta/test_executions.py @@ -1220,22 +1220,23 @@ async def test_list_executions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_executions - ] = mock_object + ] = mock_rpc request = {} await client.list_executions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_executions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1792,22 +1793,23 @@ async def test_create_execution_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_execution - ] = mock_object + ] = mock_rpc request = {} await client.create_execution(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_execution(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2186,22 +2188,23 @@ async def test_get_execution_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_execution - ] = mock_object + ] = mock_rpc request = {} await client.get_execution(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_execution(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2572,22 +2575,23 @@ async def test_cancel_execution_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.cancel_execution - ] = mock_object + ] = mock_rpc request = {} await client.cancel_execution(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.cancel_execution(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-workflows/tests/unit/gapic/workflows_v1/test_workflows.py b/packages/google-cloud-workflows/tests/unit/gapic/workflows_v1/test_workflows.py index 7d00aafeb940..20c2e191d44e 100644 --- a/packages/google-cloud-workflows/tests/unit/gapic/workflows_v1/test_workflows.py +++ b/packages/google-cloud-workflows/tests/unit/gapic/workflows_v1/test_workflows.py @@ -1259,22 +1259,23 @@ async def test_list_workflows_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_workflows - ] = mock_object + ] = mock_rpc request = {} await client.list_workflows(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_workflows(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1840,22 +1841,23 @@ async def test_get_workflow_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_workflow - ] = mock_object + ] = mock_rpc request = {} await client.get_workflow(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_workflow(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2158,8 +2160,9 @@ def test_create_workflow_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_workflow(request) @@ -2213,26 +2216,28 @@ async def test_create_workflow_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_workflow - ] = mock_object + ] = mock_rpc request = {} await client.create_workflow(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_workflow(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2542,8 +2547,9 @@ def test_delete_workflow_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_workflow(request) @@ -2597,26 +2603,28 @@ async def test_delete_workflow_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_workflow - ] = mock_object + ] = mock_rpc request = {} await client.delete_workflow(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_workflow(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2902,8 +2910,9 @@ def test_update_workflow_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_workflow(request) @@ -2957,26 +2966,28 @@ async def test_update_workflow_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_workflow - ] = mock_object + ] = mock_rpc request = {} await client.update_workflow(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_workflow(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-workflows/tests/unit/gapic/workflows_v1beta/test_workflows.py b/packages/google-cloud-workflows/tests/unit/gapic/workflows_v1beta/test_workflows.py index 06ff6abc3753..d33cfdf0a8a5 100644 --- a/packages/google-cloud-workflows/tests/unit/gapic/workflows_v1beta/test_workflows.py +++ b/packages/google-cloud-workflows/tests/unit/gapic/workflows_v1beta/test_workflows.py @@ -1258,22 +1258,23 @@ async def test_list_workflows_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_workflows - ] = mock_object + ] = mock_rpc request = {} await client.list_workflows(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_workflows(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1831,22 +1832,23 @@ async def test_get_workflow_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_workflow - ] = mock_object + ] = mock_rpc request = {} await client.get_workflow(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_workflow(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2145,8 +2147,9 @@ def test_create_workflow_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_workflow(request) @@ -2200,26 +2203,28 @@ async def test_create_workflow_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_workflow - ] = mock_object + ] = mock_rpc request = {} await client.create_workflow(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_workflow(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2529,8 +2534,9 @@ def test_delete_workflow_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_workflow(request) @@ -2584,26 +2590,28 @@ async def test_delete_workflow_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_workflow - ] = mock_object + ] = mock_rpc request = {} await client.delete_workflow(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_workflow(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2889,8 +2897,9 @@ def test_update_workflow_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_workflow(request) @@ -2944,26 +2953,28 @@ async def test_update_workflow_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_workflow - ] = mock_object + ] = mock_rpc request = {} await client.update_workflow(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_workflow(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-workstations/google/cloud/workstations/gapic_version.py b/packages/google-cloud-workstations/google/cloud/workstations/gapic_version.py index e811309b2349..558c8aab67c5 100644 --- a/packages/google-cloud-workstations/google/cloud/workstations/gapic_version.py +++ b/packages/google-cloud-workstations/google/cloud/workstations/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.5.8" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-workstations/google/cloud/workstations_v1/gapic_version.py b/packages/google-cloud-workstations/google/cloud/workstations_v1/gapic_version.py index e811309b2349..558c8aab67c5 100644 --- a/packages/google-cloud-workstations/google/cloud/workstations_v1/gapic_version.py +++ b/packages/google-cloud-workstations/google/cloud/workstations_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.5.8" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-workstations/google/cloud/workstations_v1/services/workstations/async_client.py b/packages/google-cloud-workstations/google/cloud/workstations_v1/services/workstations/async_client.py index 8827f0eae15f..414031e01f6a 100644 --- a/packages/google-cloud-workstations/google/cloud/workstations_v1/services/workstations/async_client.py +++ b/packages/google-cloud-workstations/google/cloud/workstations_v1/services/workstations/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -202,9 +201,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(WorkstationsClient).get_transport_class, type(WorkstationsClient) - ) + get_transport_class = WorkstationsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-workstations/google/cloud/workstations_v1/services/workstations/client.py b/packages/google-cloud-workstations/google/cloud/workstations_v1/services/workstations/client.py index 93a7e1e39aa4..3f49e51395dd 100644 --- a/packages/google-cloud-workstations/google/cloud/workstations_v1/services/workstations/client.py +++ b/packages/google-cloud-workstations/google/cloud/workstations_v1/services/workstations/client.py @@ -716,7 +716,7 @@ def __init__( transport_init: Union[ Type[WorkstationsTransport], Callable[..., WorkstationsTransport] ] = ( - type(self).get_transport_class(transport) + WorkstationsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., WorkstationsTransport], transport) ) diff --git a/packages/google-cloud-workstations/google/cloud/workstations_v1beta/gapic_version.py b/packages/google-cloud-workstations/google/cloud/workstations_v1beta/gapic_version.py index e811309b2349..558c8aab67c5 100644 --- a/packages/google-cloud-workstations/google/cloud/workstations_v1beta/gapic_version.py +++ b/packages/google-cloud-workstations/google/cloud/workstations_v1beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.5.8" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-workstations/google/cloud/workstations_v1beta/services/workstations/async_client.py b/packages/google-cloud-workstations/google/cloud/workstations_v1beta/services/workstations/async_client.py index c0e7c16051ad..e9368e7040ac 100644 --- a/packages/google-cloud-workstations/google/cloud/workstations_v1beta/services/workstations/async_client.py +++ b/packages/google-cloud-workstations/google/cloud/workstations_v1beta/services/workstations/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -202,9 +201,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(WorkstationsClient).get_transport_class, type(WorkstationsClient) - ) + get_transport_class = WorkstationsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-workstations/google/cloud/workstations_v1beta/services/workstations/client.py b/packages/google-cloud-workstations/google/cloud/workstations_v1beta/services/workstations/client.py index a02acb7766ff..97fefae14e60 100644 --- a/packages/google-cloud-workstations/google/cloud/workstations_v1beta/services/workstations/client.py +++ b/packages/google-cloud-workstations/google/cloud/workstations_v1beta/services/workstations/client.py @@ -716,7 +716,7 @@ def __init__( transport_init: Union[ Type[WorkstationsTransport], Callable[..., WorkstationsTransport] ] = ( - type(self).get_transport_class(transport) + WorkstationsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., WorkstationsTransport], transport) ) diff --git a/packages/google-cloud-workstations/samples/generated_samples/snippet_metadata_google.cloud.workstations.v1.json b/packages/google-cloud-workstations/samples/generated_samples/snippet_metadata_google.cloud.workstations.v1.json index b87eb164d4c0..8dabda2a933a 100644 --- a/packages/google-cloud-workstations/samples/generated_samples/snippet_metadata_google.cloud.workstations.v1.json +++ b/packages/google-cloud-workstations/samples/generated_samples/snippet_metadata_google.cloud.workstations.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-workstations", - "version": "0.5.8" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-workstations/samples/generated_samples/snippet_metadata_google.cloud.workstations.v1beta.json b/packages/google-cloud-workstations/samples/generated_samples/snippet_metadata_google.cloud.workstations.v1beta.json index dfed42a7fa94..8afcd1793b5c 100644 --- a/packages/google-cloud-workstations/samples/generated_samples/snippet_metadata_google.cloud.workstations.v1beta.json +++ b/packages/google-cloud-workstations/samples/generated_samples/snippet_metadata_google.cloud.workstations.v1beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-workstations", - "version": "0.5.8" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-workstations/tests/unit/gapic/workstations_v1/test_workstations.py b/packages/google-cloud-workstations/tests/unit/gapic/workstations_v1/test_workstations.py index 085ba46e450f..85babd4da988 100644 --- a/packages/google-cloud-workstations/tests/unit/gapic/workstations_v1/test_workstations.py +++ b/packages/google-cloud-workstations/tests/unit/gapic/workstations_v1/test_workstations.py @@ -1314,22 +1314,23 @@ async def test_get_workstation_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_workstation_cluster - ] = mock_object + ] = mock_rpc request = {} await client.get_workstation_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_workstation_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1723,22 +1724,23 @@ async def test_list_workstation_clusters_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_workstation_clusters - ] = mock_object + ] = mock_rpc request = {} await client.list_workstation_clusters(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_workstation_clusters(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2261,8 +2263,9 @@ def test_create_workstation_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_workstation_cluster(request) @@ -2318,26 +2321,28 @@ async def test_create_workstation_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_workstation_cluster - ] = mock_object + ] = mock_rpc request = {} await client.create_workstation_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_workstation_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2665,8 +2670,9 @@ def test_update_workstation_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_workstation_cluster(request) @@ -2722,26 +2728,28 @@ async def test_update_workstation_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_workstation_cluster - ] = mock_object + ] = mock_rpc request = {} await client.update_workstation_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_workstation_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3065,8 +3073,9 @@ def test_delete_workstation_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_workstation_cluster(request) @@ -3122,26 +3131,28 @@ async def test_delete_workstation_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_workstation_cluster - ] = mock_object + ] = mock_rpc request = {} await client.delete_workstation_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_workstation_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3529,22 +3540,23 @@ async def test_get_workstation_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_workstation_config - ] = mock_object + ] = mock_rpc request = {} await client.get_workstation_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_workstation_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3934,22 +3946,23 @@ async def test_list_workstation_configs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_workstation_configs - ] = mock_object + ] = mock_rpc request = {} await client.list_workstation_configs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_workstation_configs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4533,22 +4546,23 @@ async def test_list_usable_workstation_configs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_usable_workstation_configs - ] = mock_object + ] = mock_rpc request = {} await client.list_usable_workstation_configs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_usable_workstation_configs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5071,8 +5085,9 @@ def test_create_workstation_config_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_workstation_config(request) @@ -5128,26 +5143,28 @@ async def test_create_workstation_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_workstation_config - ] = mock_object + ] = mock_rpc request = {} await client.create_workstation_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_workstation_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5475,8 +5492,9 @@ def test_update_workstation_config_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_workstation_config(request) @@ -5532,26 +5550,28 @@ async def test_update_workstation_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_workstation_config - ] = mock_object + ] = mock_rpc request = {} await client.update_workstation_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_workstation_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5875,8 +5895,9 @@ def test_delete_workstation_config_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_workstation_config(request) @@ -5932,26 +5953,28 @@ async def test_delete_workstation_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_workstation_config - ] = mock_object + ] = mock_rpc request = {} await client.delete_workstation_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_workstation_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6326,22 +6349,23 @@ async def test_get_workstation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_workstation - ] = mock_object + ] = mock_rpc request = {} await client.get_workstation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_workstation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6717,22 +6741,23 @@ async def test_list_workstations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_workstations - ] = mock_object + ] = mock_rpc request = {} await client.list_workstations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_workstations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7313,22 +7338,23 @@ async def test_list_usable_workstations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_usable_workstations - ] = mock_object + ] = mock_rpc request = {} await client.list_usable_workstations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_usable_workstations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7850,8 +7876,9 @@ def test_create_workstation_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_workstation(request) @@ -7907,26 +7934,28 @@ async def test_create_workstation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_workstation - ] = mock_object + ] = mock_rpc request = {} await client.create_workstation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_workstation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8252,8 +8281,9 @@ def test_update_workstation_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_workstation(request) @@ -8309,26 +8339,28 @@ async def test_update_workstation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_workstation - ] = mock_object + ] = mock_rpc request = {} await client.update_workstation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_workstation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8650,8 +8682,9 @@ def test_delete_workstation_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_workstation(request) @@ -8707,26 +8740,28 @@ async def test_delete_workstation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_workstation - ] = mock_object + ] = mock_rpc request = {} await client.delete_workstation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_workstation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9036,8 +9071,9 @@ def test_start_workstation_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.start_workstation(request) @@ -9093,26 +9129,28 @@ async def test_start_workstation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.start_workstation - ] = mock_object + ] = mock_rpc request = {} await client.start_workstation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.start_workstation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9416,8 +9454,9 @@ def test_stop_workstation_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.stop_workstation(request) @@ -9471,26 +9510,28 @@ async def test_stop_workstation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.stop_workstation - ] = mock_object + ] = mock_rpc request = {} await client.stop_workstation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.stop_workstation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9849,22 +9890,23 @@ async def test_generate_access_token_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.generate_access_token - ] = mock_object + ] = mock_rpc request = {} await client.generate_access_token(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.generate_access_token(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-workstations/tests/unit/gapic/workstations_v1beta/test_workstations.py b/packages/google-cloud-workstations/tests/unit/gapic/workstations_v1beta/test_workstations.py index 2bc082c743c6..7c9037bf70dd 100644 --- a/packages/google-cloud-workstations/tests/unit/gapic/workstations_v1beta/test_workstations.py +++ b/packages/google-cloud-workstations/tests/unit/gapic/workstations_v1beta/test_workstations.py @@ -1314,22 +1314,23 @@ async def test_get_workstation_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_workstation_cluster - ] = mock_object + ] = mock_rpc request = {} await client.get_workstation_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_workstation_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1723,22 +1724,23 @@ async def test_list_workstation_clusters_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_workstation_clusters - ] = mock_object + ] = mock_rpc request = {} await client.list_workstation_clusters(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_workstation_clusters(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2261,8 +2263,9 @@ def test_create_workstation_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_workstation_cluster(request) @@ -2318,26 +2321,28 @@ async def test_create_workstation_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_workstation_cluster - ] = mock_object + ] = mock_rpc request = {} await client.create_workstation_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_workstation_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2665,8 +2670,9 @@ def test_update_workstation_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_workstation_cluster(request) @@ -2722,26 +2728,28 @@ async def test_update_workstation_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_workstation_cluster - ] = mock_object + ] = mock_rpc request = {} await client.update_workstation_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_workstation_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3065,8 +3073,9 @@ def test_delete_workstation_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_workstation_cluster(request) @@ -3122,26 +3131,28 @@ async def test_delete_workstation_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_workstation_cluster - ] = mock_object + ] = mock_rpc request = {} await client.delete_workstation_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_workstation_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3532,22 +3543,23 @@ async def test_get_workstation_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_workstation_config - ] = mock_object + ] = mock_rpc request = {} await client.get_workstation_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_workstation_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3939,22 +3951,23 @@ async def test_list_workstation_configs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_workstation_configs - ] = mock_object + ] = mock_rpc request = {} await client.list_workstation_configs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_workstation_configs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4538,22 +4551,23 @@ async def test_list_usable_workstation_configs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_usable_workstation_configs - ] = mock_object + ] = mock_rpc request = {} await client.list_usable_workstation_configs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_usable_workstation_configs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5076,8 +5090,9 @@ def test_create_workstation_config_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_workstation_config(request) @@ -5133,26 +5148,28 @@ async def test_create_workstation_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_workstation_config - ] = mock_object + ] = mock_rpc request = {} await client.create_workstation_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_workstation_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5480,8 +5497,9 @@ def test_update_workstation_config_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_workstation_config(request) @@ -5537,26 +5555,28 @@ async def test_update_workstation_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_workstation_config - ] = mock_object + ] = mock_rpc request = {} await client.update_workstation_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_workstation_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5880,8 +5900,9 @@ def test_delete_workstation_config_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_workstation_config(request) @@ -5937,26 +5958,28 @@ async def test_delete_workstation_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_workstation_config - ] = mock_object + ] = mock_rpc request = {} await client.delete_workstation_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_workstation_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6331,22 +6354,23 @@ async def test_get_workstation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_workstation - ] = mock_object + ] = mock_rpc request = {} await client.get_workstation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_workstation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6722,22 +6746,23 @@ async def test_list_workstations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_workstations - ] = mock_object + ] = mock_rpc request = {} await client.list_workstations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_workstations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7318,22 +7343,23 @@ async def test_list_usable_workstations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_usable_workstations - ] = mock_object + ] = mock_rpc request = {} await client.list_usable_workstations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_usable_workstations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7855,8 +7881,9 @@ def test_create_workstation_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_workstation(request) @@ -7912,26 +7939,28 @@ async def test_create_workstation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_workstation - ] = mock_object + ] = mock_rpc request = {} await client.create_workstation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_workstation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8257,8 +8286,9 @@ def test_update_workstation_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_workstation(request) @@ -8314,26 +8344,28 @@ async def test_update_workstation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_workstation - ] = mock_object + ] = mock_rpc request = {} await client.update_workstation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_workstation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8655,8 +8687,9 @@ def test_delete_workstation_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_workstation(request) @@ -8712,26 +8745,28 @@ async def test_delete_workstation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_workstation - ] = mock_object + ] = mock_rpc request = {} await client.delete_workstation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_workstation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9041,8 +9076,9 @@ def test_start_workstation_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.start_workstation(request) @@ -9098,26 +9134,28 @@ async def test_start_workstation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.start_workstation - ] = mock_object + ] = mock_rpc request = {} await client.start_workstation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.start_workstation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9421,8 +9459,9 @@ def test_stop_workstation_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.stop_workstation(request) @@ -9476,26 +9515,28 @@ async def test_stop_workstation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.stop_workstation - ] = mock_object + ] = mock_rpc request = {} await client.stop_workstation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.stop_workstation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9854,22 +9895,23 @@ async def test_generate_access_token_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.generate_access_token - ] = mock_object + ] = mock_rpc request = {} await client.generate_access_token(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.generate_access_token(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-geo-type/google/geo/type/gapic_version.py b/packages/google-geo-type/google/geo/type/gapic_version.py index d24d3fba9057..558c8aab67c5 100644 --- a/packages/google-geo-type/google/geo/type/gapic_version.py +++ b/packages/google-geo-type/google/geo/type/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.9" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-maps-addressvalidation/google/maps/addressvalidation/gapic_version.py b/packages/google-maps-addressvalidation/google/maps/addressvalidation/gapic_version.py index fb3463bbb3c2..558c8aab67c5 100644 --- a/packages/google-maps-addressvalidation/google/maps/addressvalidation/gapic_version.py +++ b/packages/google-maps-addressvalidation/google/maps/addressvalidation/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.13" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-maps-addressvalidation/google/maps/addressvalidation_v1/gapic_version.py b/packages/google-maps-addressvalidation/google/maps/addressvalidation_v1/gapic_version.py index fb3463bbb3c2..558c8aab67c5 100644 --- a/packages/google-maps-addressvalidation/google/maps/addressvalidation_v1/gapic_version.py +++ b/packages/google-maps-addressvalidation/google/maps/addressvalidation_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.13" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-maps-addressvalidation/google/maps/addressvalidation_v1/services/address_validation/async_client.py b/packages/google-maps-addressvalidation/google/maps/addressvalidation_v1/services/address_validation/async_client.py index d8ad16ff1efe..48a080b30e35 100644 --- a/packages/google-maps-addressvalidation/google/maps/addressvalidation_v1/services/address_validation/async_client.py +++ b/packages/google-maps-addressvalidation/google/maps/addressvalidation_v1/services/address_validation/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -184,9 +183,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AddressValidationClient).get_transport_class, type(AddressValidationClient) - ) + get_transport_class = AddressValidationClient.get_transport_class def __init__( self, diff --git a/packages/google-maps-addressvalidation/google/maps/addressvalidation_v1/services/address_validation/client.py b/packages/google-maps-addressvalidation/google/maps/addressvalidation_v1/services/address_validation/client.py index 3337e3b8b8a8..a14fdfff54ce 100644 --- a/packages/google-maps-addressvalidation/google/maps/addressvalidation_v1/services/address_validation/client.py +++ b/packages/google-maps-addressvalidation/google/maps/addressvalidation_v1/services/address_validation/client.py @@ -642,7 +642,7 @@ def __init__( Type[AddressValidationTransport], Callable[..., AddressValidationTransport], ] = ( - type(self).get_transport_class(transport) + AddressValidationClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AddressValidationTransport], transport) ) diff --git a/packages/google-maps-addressvalidation/samples/generated_samples/snippet_metadata_google.maps.addressvalidation.v1.json b/packages/google-maps-addressvalidation/samples/generated_samples/snippet_metadata_google.maps.addressvalidation.v1.json index 55928c104e22..b8aeda3afd23 100644 --- a/packages/google-maps-addressvalidation/samples/generated_samples/snippet_metadata_google.maps.addressvalidation.v1.json +++ b/packages/google-maps-addressvalidation/samples/generated_samples/snippet_metadata_google.maps.addressvalidation.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-maps-addressvalidation", - "version": "0.3.13" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-maps-addressvalidation/tests/unit/gapic/addressvalidation_v1/test_address_validation.py b/packages/google-maps-addressvalidation/tests/unit/gapic/addressvalidation_v1/test_address_validation.py index 3935f17726d1..76de892581e3 100644 --- a/packages/google-maps-addressvalidation/tests/unit/gapic/addressvalidation_v1/test_address_validation.py +++ b/packages/google-maps-addressvalidation/tests/unit/gapic/addressvalidation_v1/test_address_validation.py @@ -1323,22 +1323,23 @@ async def test_validate_address_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.validate_address - ] = mock_object + ] = mock_rpc request = {} await client.validate_address(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.validate_address(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1557,22 +1558,23 @@ async def test_provide_validation_feedback_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.provide_validation_feedback - ] = mock_object + ] = mock_rpc request = {} await client.provide_validation_feedback(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.provide_validation_feedback(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-maps-fleetengine-delivery/google/maps/fleetengine_delivery/gapic_version.py b/packages/google-maps-fleetengine-delivery/google/maps/fleetengine_delivery/gapic_version.py index 355df6b536f8..558c8aab67c5 100644 --- a/packages/google-maps-fleetengine-delivery/google/maps/fleetengine_delivery/gapic_version.py +++ b/packages/google-maps-fleetengine-delivery/google/maps/fleetengine_delivery/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.2.3" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-maps-fleetengine-delivery/google/maps/fleetengine_delivery_v1/gapic_version.py b/packages/google-maps-fleetengine-delivery/google/maps/fleetengine_delivery_v1/gapic_version.py index 355df6b536f8..558c8aab67c5 100644 --- a/packages/google-maps-fleetengine-delivery/google/maps/fleetengine_delivery_v1/gapic_version.py +++ b/packages/google-maps-fleetengine-delivery/google/maps/fleetengine_delivery_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.2.3" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-maps-fleetengine-delivery/google/maps/fleetengine_delivery_v1/services/delivery_service/async_client.py b/packages/google-maps-fleetengine-delivery/google/maps/fleetengine_delivery_v1/services/delivery_service/async_client.py index dea953e2e407..57872ce521af 100644 --- a/packages/google-maps-fleetengine-delivery/google/maps/fleetengine_delivery_v1/services/delivery_service/async_client.py +++ b/packages/google-maps-fleetengine-delivery/google/maps/fleetengine_delivery_v1/services/delivery_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -209,9 +208,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DeliveryServiceClient).get_transport_class, type(DeliveryServiceClient) - ) + get_transport_class = DeliveryServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-maps-fleetengine-delivery/google/maps/fleetengine_delivery_v1/services/delivery_service/client.py b/packages/google-maps-fleetengine-delivery/google/maps/fleetengine_delivery_v1/services/delivery_service/client.py index d58160d8d51c..d27ed0c69a5c 100644 --- a/packages/google-maps-fleetengine-delivery/google/maps/fleetengine_delivery_v1/services/delivery_service/client.py +++ b/packages/google-maps-fleetengine-delivery/google/maps/fleetengine_delivery_v1/services/delivery_service/client.py @@ -707,7 +707,7 @@ def __init__( transport_init: Union[ Type[DeliveryServiceTransport], Callable[..., DeliveryServiceTransport] ] = ( - type(self).get_transport_class(transport) + DeliveryServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DeliveryServiceTransport], transport) ) diff --git a/packages/google-maps-fleetengine-delivery/samples/generated_samples/snippet_metadata_maps.fleetengine.delivery.v1.json b/packages/google-maps-fleetengine-delivery/samples/generated_samples/snippet_metadata_maps.fleetengine.delivery.v1.json index 31dbe930e61e..884ac226d0e4 100644 --- a/packages/google-maps-fleetengine-delivery/samples/generated_samples/snippet_metadata_maps.fleetengine.delivery.v1.json +++ b/packages/google-maps-fleetengine-delivery/samples/generated_samples/snippet_metadata_maps.fleetengine.delivery.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-maps-fleetengine-delivery", - "version": "0.2.3" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-maps-fleetengine-delivery/tests/unit/gapic/fleetengine_delivery_v1/test_delivery_service.py b/packages/google-maps-fleetengine-delivery/tests/unit/gapic/fleetengine_delivery_v1/test_delivery_service.py index 741041daa2d9..6788563f65e5 100644 --- a/packages/google-maps-fleetengine-delivery/tests/unit/gapic/fleetengine_delivery_v1/test_delivery_service.py +++ b/packages/google-maps-fleetengine-delivery/tests/unit/gapic/fleetengine_delivery_v1/test_delivery_service.py @@ -1344,22 +1344,23 @@ async def test_create_delivery_vehicle_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_delivery_vehicle - ] = mock_object + ] = mock_rpc request = {} await client.create_delivery_vehicle(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_delivery_vehicle(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1733,22 +1734,23 @@ async def test_get_delivery_vehicle_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_delivery_vehicle - ] = mock_object + ] = mock_rpc request = {} await client.get_delivery_vehicle(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_delivery_vehicle(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2096,22 +2098,23 @@ async def test_update_delivery_vehicle_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_delivery_vehicle - ] = mock_object + ] = mock_rpc request = {} await client.update_delivery_vehicle(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_delivery_vehicle(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2459,22 +2462,23 @@ async def test_batch_create_tasks_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_create_tasks - ] = mock_object + ] = mock_rpc request = {} await client.batch_create_tasks(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.batch_create_tasks(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2728,22 +2732,23 @@ async def test_create_task_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_task - ] = mock_object + ] = mock_rpc request = {} await client.create_task(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_task(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3107,22 +3112,23 @@ async def test_get_task_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_task - ] = mock_object + ] = mock_rpc request = {} await client.get_task(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_task(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3464,22 +3470,23 @@ async def test_update_task_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_task - ] = mock_object + ] = mock_rpc request = {} await client.update_task(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_task(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3819,22 +3826,23 @@ async def test_list_tasks_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_tasks - ] = mock_object + ] = mock_rpc request = {} await client.list_tasks(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_tasks(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4361,22 +4369,23 @@ async def test_get_task_tracking_info_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_task_tracking_info - ] = mock_object + ] = mock_rpc request = {} await client.get_task_tracking_info(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_task_tracking_info(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4723,22 +4732,23 @@ async def test_list_delivery_vehicles_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_delivery_vehicles - ] = mock_object + ] = mock_rpc request = {} await client.list_delivery_vehicles(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_delivery_vehicles(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-maps-fleetengine/google/maps/fleetengine/gapic_version.py b/packages/google-maps-fleetengine/google/maps/fleetengine/gapic_version.py index d1a1a883babd..558c8aab67c5 100644 --- a/packages/google-maps-fleetengine/google/maps/fleetengine/gapic_version.py +++ b/packages/google-maps-fleetengine/google/maps/fleetengine/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.2.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-maps-fleetengine/google/maps/fleetengine_v1/gapic_version.py b/packages/google-maps-fleetengine/google/maps/fleetengine_v1/gapic_version.py index d1a1a883babd..558c8aab67c5 100644 --- a/packages/google-maps-fleetengine/google/maps/fleetengine_v1/gapic_version.py +++ b/packages/google-maps-fleetengine/google/maps/fleetengine_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.2.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-maps-fleetengine/google/maps/fleetengine_v1/services/trip_service/async_client.py b/packages/google-maps-fleetengine/google/maps/fleetengine_v1/services/trip_service/async_client.py index 7d7d94cf1c64..2f9341d45df7 100644 --- a/packages/google-maps-fleetengine/google/maps/fleetengine_v1/services/trip_service/async_client.py +++ b/packages/google-maps-fleetengine/google/maps/fleetengine_v1/services/trip_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -188,9 +187,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(TripServiceClient).get_transport_class, type(TripServiceClient) - ) + get_transport_class = TripServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-maps-fleetengine/google/maps/fleetengine_v1/services/trip_service/client.py b/packages/google-maps-fleetengine/google/maps/fleetengine_v1/services/trip_service/client.py index 4fc9b7945644..e26f48c95cb9 100644 --- a/packages/google-maps-fleetengine/google/maps/fleetengine_v1/services/trip_service/client.py +++ b/packages/google-maps-fleetengine/google/maps/fleetengine_v1/services/trip_service/client.py @@ -653,7 +653,7 @@ def __init__( transport_init: Union[ Type[TripServiceTransport], Callable[..., TripServiceTransport] ] = ( - type(self).get_transport_class(transport) + TripServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., TripServiceTransport], transport) ) diff --git a/packages/google-maps-fleetengine/google/maps/fleetengine_v1/services/vehicle_service/async_client.py b/packages/google-maps-fleetengine/google/maps/fleetengine_v1/services/vehicle_service/async_client.py index e5ba59820e0a..19b447b14988 100644 --- a/packages/google-maps-fleetengine/google/maps/fleetengine_v1/services/vehicle_service/async_client.py +++ b/packages/google-maps-fleetengine/google/maps/fleetengine_v1/services/vehicle_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -190,9 +189,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(VehicleServiceClient).get_transport_class, type(VehicleServiceClient) - ) + get_transport_class = VehicleServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-maps-fleetengine/google/maps/fleetengine_v1/services/vehicle_service/client.py b/packages/google-maps-fleetengine/google/maps/fleetengine_v1/services/vehicle_service/client.py index 829a89b1228e..63b58a5d7c77 100644 --- a/packages/google-maps-fleetengine/google/maps/fleetengine_v1/services/vehicle_service/client.py +++ b/packages/google-maps-fleetengine/google/maps/fleetengine_v1/services/vehicle_service/client.py @@ -656,7 +656,7 @@ def __init__( transport_init: Union[ Type[VehicleServiceTransport], Callable[..., VehicleServiceTransport] ] = ( - type(self).get_transport_class(transport) + VehicleServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., VehicleServiceTransport], transport) ) diff --git a/packages/google-maps-fleetengine/samples/generated_samples/snippet_metadata_maps.fleetengine.v1.json b/packages/google-maps-fleetengine/samples/generated_samples/snippet_metadata_maps.fleetengine.v1.json index b5a74b72dd59..b0139d358773 100644 --- a/packages/google-maps-fleetengine/samples/generated_samples/snippet_metadata_maps.fleetengine.v1.json +++ b/packages/google-maps-fleetengine/samples/generated_samples/snippet_metadata_maps.fleetengine.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-maps-fleetengine", - "version": "0.2.2" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-maps-fleetengine/tests/unit/gapic/fleetengine_v1/test_trip_service.py b/packages/google-maps-fleetengine/tests/unit/gapic/fleetengine_v1/test_trip_service.py index 835525c44ae3..cfed72f2af3b 100644 --- a/packages/google-maps-fleetengine/tests/unit/gapic/fleetengine_v1/test_trip_service.py +++ b/packages/google-maps-fleetengine/tests/unit/gapic/fleetengine_v1/test_trip_service.py @@ -1258,22 +1258,23 @@ async def test_create_trip_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_trip - ] = mock_object + ] = mock_rpc request = {} await client.create_trip(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_trip(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1541,22 +1542,23 @@ async def test_get_trip_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_trip - ] = mock_object + ] = mock_rpc request = {} await client.get_trip(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_trip(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1809,22 +1811,23 @@ async def test_report_billable_trip_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.report_billable_trip - ] = mock_object + ] = mock_rpc request = {} await client.report_billable_trip(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.report_billable_trip(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2057,22 +2060,23 @@ async def test_search_trips_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search_trips - ] = mock_object + ] = mock_rpc request = {} await client.search_trips(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_trips(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2517,22 +2521,23 @@ async def test_update_trip_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_trip - ] = mock_object + ] = mock_rpc request = {} await client.update_trip(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_trip(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-maps-fleetengine/tests/unit/gapic/fleetengine_v1/test_vehicle_service.py b/packages/google-maps-fleetengine/tests/unit/gapic/fleetengine_v1/test_vehicle_service.py index dd76ce7ab760..8c3e7b0811ae 100644 --- a/packages/google-maps-fleetengine/tests/unit/gapic/fleetengine_v1/test_vehicle_service.py +++ b/packages/google-maps-fleetengine/tests/unit/gapic/fleetengine_v1/test_vehicle_service.py @@ -1293,22 +1293,23 @@ async def test_create_vehicle_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_vehicle - ] = mock_object + ] = mock_rpc request = {} await client.create_vehicle(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_vehicle(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1573,22 +1574,23 @@ async def test_get_vehicle_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_vehicle - ] = mock_object + ] = mock_rpc request = {} await client.get_vehicle(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_vehicle(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1853,22 +1855,23 @@ async def test_update_vehicle_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_vehicle - ] = mock_object + ] = mock_rpc request = {} await client.update_vehicle(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_vehicle(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2120,22 +2123,23 @@ async def test_update_vehicle_attributes_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_vehicle_attributes - ] = mock_object + ] = mock_rpc request = {} await client.update_vehicle_attributes(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_vehicle_attributes(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2376,22 +2380,23 @@ async def test_list_vehicles_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_vehicles - ] = mock_object + ] = mock_rpc request = {} await client.list_vehicles(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_vehicles(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2813,22 +2818,23 @@ async def test_search_vehicles_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search_vehicles - ] = mock_object + ] = mock_rpc request = {} await client.search_vehicles(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_vehicles(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets/gapic_version.py b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets/gapic_version.py index ee41ffcc0a1d..558c8aab67c5 100644 --- a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets/gapic_version.py +++ b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/gapic_version.py b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/gapic_version.py index ee41ffcc0a1d..558c8aab67c5 100644 --- a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/gapic_version.py +++ b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/services/maps_platform_datasets/async_client.py b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/services/maps_platform_datasets/async_client.py index 8bbdf6023eb4..515b3f3a7a64 100644 --- a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/services/maps_platform_datasets/async_client.py +++ b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/services/maps_platform_datasets/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -194,10 +193,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(MapsPlatformDatasetsClient).get_transport_class, - type(MapsPlatformDatasetsClient), - ) + get_transport_class = MapsPlatformDatasetsClient.get_transport_class def __init__( self, diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/services/maps_platform_datasets/client.py b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/services/maps_platform_datasets/client.py index 5893deda60f0..cfff5003241f 100644 --- a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/services/maps_platform_datasets/client.py +++ b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/services/maps_platform_datasets/client.py @@ -667,7 +667,7 @@ def __init__( Type[MapsPlatformDatasetsTransport], Callable[..., MapsPlatformDatasetsTransport], ] = ( - type(self).get_transport_class(transport) + MapsPlatformDatasetsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., MapsPlatformDatasetsTransport], transport) ) diff --git a/packages/google-maps-mapsplatformdatasets/samples/generated_samples/snippet_metadata_google.maps.mapsplatformdatasets.v1.json b/packages/google-maps-mapsplatformdatasets/samples/generated_samples/snippet_metadata_google.maps.mapsplatformdatasets.v1.json index 415d2d8ad398..ef06d5878d41 100644 --- a/packages/google-maps-mapsplatformdatasets/samples/generated_samples/snippet_metadata_google.maps.mapsplatformdatasets.v1.json +++ b/packages/google-maps-mapsplatformdatasets/samples/generated_samples/snippet_metadata_google.maps.mapsplatformdatasets.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-maps-mapsplatformdatasets", - "version": "0.4.2" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-maps-mapsplatformdatasets/tests/unit/gapic/mapsplatformdatasets_v1/test_maps_platform_datasets.py b/packages/google-maps-mapsplatformdatasets/tests/unit/gapic/mapsplatformdatasets_v1/test_maps_platform_datasets.py index e2ebce74789a..c731c88871b4 100644 --- a/packages/google-maps-mapsplatformdatasets/tests/unit/gapic/mapsplatformdatasets_v1/test_maps_platform_datasets.py +++ b/packages/google-maps-mapsplatformdatasets/tests/unit/gapic/mapsplatformdatasets_v1/test_maps_platform_datasets.py @@ -1370,22 +1370,23 @@ async def test_create_dataset_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_dataset - ] = mock_object + ] = mock_rpc request = {} await client.create_dataset(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_dataset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1775,22 +1776,23 @@ async def test_update_dataset_metadata_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_dataset_metadata - ] = mock_object + ] = mock_rpc request = {} await client.update_dataset_metadata(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_dataset_metadata(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2181,22 +2183,23 @@ async def test_get_dataset_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_dataset - ] = mock_object + ] = mock_rpc request = {} await client.get_dataset(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_dataset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2566,22 +2569,23 @@ async def test_fetch_dataset_errors_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.fetch_dataset_errors - ] = mock_object + ] = mock_rpc request = {} await client.fetch_dataset_errors(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.fetch_dataset_errors(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3147,22 +3151,23 @@ async def test_list_datasets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_datasets - ] = mock_object + ] = mock_rpc request = {} await client.list_datasets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_datasets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3699,22 +3704,23 @@ async def test_delete_dataset_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_dataset - ] = mock_object + ] = mock_rpc request = {} await client.delete_dataset(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_dataset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/scripts/client-post-processing/doc-formatting.yaml b/scripts/client-post-processing/doc-formatting.yaml index 01c368217277..d95e1c4fc542 100644 --- a/scripts/client-post-processing/doc-formatting.yaml +++ b/scripts/client-post-processing/doc-formatting.yaml @@ -159,7 +159,7 @@ replacements: \ 'ingestionTime': DOUBLE; \(UNIX timestamp\) \ 'application': STRING; after: " 'ingestionTime': DOUBLE; (UNIX timestamp)\n 'application': STRING;\n" - count: 1 + count: 2 - paths: [ packages/google-cloud-visionai/google/cloud/visionai_v1/types/platform.py, ] From 9ac95e45ae509c9137b90e6e67130122a415b782 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 19 Aug 2024 11:52:26 -0400 Subject: [PATCH 021/108] chore: release main (#13019) :robot: I have created a release *beep* *boop* ---
google-ai-generativelanguage: 0.6.9 ## [0.6.9](https://github.com/googleapis/google-cloud-python/compare/google-ai-generativelanguage-v0.6.8...google-ai-generativelanguage-v0.6.9) (2024-08-19) ### Features * Add model max_temperature ([fdebbf2](https://github.com/googleapis/google-cloud-python/commit/fdebbf2e914e9a8ed5a31a04ce9fe26de0f69c72)) * Add new PromptFeedback and FinishReason entries ([fdebbf2](https://github.com/googleapis/google-cloud-python/commit/fdebbf2e914e9a8ed5a31a04ce9fe26de0f69c72)) * Add new PromptFeedback and FinishReason entries for https://github.com/google-gemini/generative-ai-python/issues/476 ([fdebbf2](https://github.com/googleapis/google-cloud-python/commit/fdebbf2e914e9a8ed5a31a04ce9fe26de0f69c72)) ### Documentation * Many small fixes ([fdebbf2](https://github.com/googleapis/google-cloud-python/commit/fdebbf2e914e9a8ed5a31a04ce9fe26de0f69c72))
google-cloud-batch: 0.17.25 ## [0.17.25](https://github.com/googleapis/google-cloud-python/compare/google-cloud-batch-v0.17.24...google-cloud-batch-v0.17.25) (2024-08-19) ### Documentation * Batch CentOS images and HPC CentOS images are EOS ([5f179b9](https://github.com/googleapis/google-cloud-python/commit/5f179b98744808c33b07768f44efdfb3551fda03)) * Clarify required fields for Runnable.Container ([5f179b9](https://github.com/googleapis/google-cloud-python/commit/5f179b98744808c33b07768f44efdfb3551fda03)) * Clarify required oneof fields for Runnable.Script ([5f179b9](https://github.com/googleapis/google-cloud-python/commit/5f179b98744808c33b07768f44efdfb3551fda03)) * Clarify TaskSpec requires one or more runnables ([5f179b9](https://github.com/googleapis/google-cloud-python/commit/5f179b98744808c33b07768f44efdfb3551fda03))
google-cloud-dlp: 3.22.0 ## [3.22.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dlp-v3.21.0...google-cloud-dlp-v3.22.0) (2024-08-19) ### Features * file store data profiles can now be filtered by type and storage location ([5dc35c8](https://github.com/googleapis/google-cloud-python/commit/5dc35c8b35091a0ed7f69a0f4f4652a48523efaa)) * inspect template modified cadence discovery config for Cloud SQL ([5dc35c8](https://github.com/googleapis/google-cloud-python/commit/5dc35c8b35091a0ed7f69a0f4f4652a48523efaa)) ### Documentation * small improvements ([5dc35c8](https://github.com/googleapis/google-cloud-python/commit/5dc35c8b35091a0ed7f69a0f4f4652a48523efaa))
google-cloud-managedkafka: 0.1.3 ## [0.1.3](https://github.com/googleapis/google-cloud-python/compare/google-cloud-managedkafka-v0.1.2...google-cloud-managedkafka-v0.1.3) (2024-08-19) ### Documentation * [google-cloud-managedkafka] changed API title to official name ([#13010](https://github.com/googleapis/google-cloud-python/issues/13010)) ([5e6b4ce](https://github.com/googleapis/google-cloud-python/commit/5e6b4ce92614cc9a169c530f9a23d3934f4868cc))
google-cloud-storage-transfer: 1.12.0 ## [1.12.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-storage-transfer-v1.11.5...google-cloud-storage-transfer-v1.12.0) (2024-08-19) ### Features * add GCS Managed Folders ([9c54c1d](https://github.com/googleapis/google-cloud-python/commit/9c54c1d92e54f71f35d8e7a65bb16f730ec841b0)) * add HDFS configuration ([9c54c1d](https://github.com/googleapis/google-cloud-python/commit/9c54c1d92e54f71f35d8e7a65bb16f730ec841b0)) * add S3 Cloudfront Domain ([9c54c1d](https://github.com/googleapis/google-cloud-python/commit/9c54c1d92e54f71f35d8e7a65bb16f730ec841b0)) * add S3 Managed Private Network ([9c54c1d](https://github.com/googleapis/google-cloud-python/commit/9c54c1d92e54f71f35d8e7a65bb16f730ec841b0))
google-cloud-texttospeech: 2.17.0 ## [2.17.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-texttospeech-v2.16.5...google-cloud-texttospeech-v2.17.0) (2024-08-19) ### Features * A new method `StreamingSynthesize` is added to service `TextToSpeech` ([973e48a](https://github.com/googleapis/google-cloud-python/commit/973e48afb87cef6565535a7262e38195245018ef)) ### Documentation * A comment for field `name` in message `.google.cloud.texttospeech.v1.VoiceSelectionParams` is changed ([973e48a](https://github.com/googleapis/google-cloud-python/commit/973e48afb87cef6565535a7262e38195245018ef)) * A comment for field `name` in message `.google.cloud.texttospeech.v1beta1.VoiceSelectionParams` is changed ([973e48a](https://github.com/googleapis/google-cloud-python/commit/973e48afb87cef6565535a7262e38195245018ef))
--- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> Co-authored-by: Anthonios Partheniou --- .release-please-manifest.json | 12 ++++++------ packages/google-ai-generativelanguage/CHANGELOG.md | 14 ++++++++++++++ .../google/ai/generativelanguage/gapic_version.py | 2 +- .../ai/generativelanguage_v1/gapic_version.py | 2 +- .../ai/generativelanguage_v1beta/gapic_version.py | 2 +- .../ai/generativelanguage_v1beta2/gapic_version.py | 2 +- .../ai/generativelanguage_v1beta3/gapic_version.py | 2 +- ...t_metadata_google.ai.generativelanguage.v1.json | 2 +- ...tadata_google.ai.generativelanguage.v1beta.json | 2 +- ...adata_google.ai.generativelanguage.v1beta2.json | 2 +- ...adata_google.ai.generativelanguage.v1beta3.json | 2 +- packages/google-cloud-batch/CHANGELOG.md | 10 ++++++++++ .../google/cloud/batch/gapic_version.py | 2 +- .../google/cloud/batch_v1/gapic_version.py | 2 +- .../google/cloud/batch_v1alpha/gapic_version.py | 2 +- .../snippet_metadata_google.cloud.batch.v1.json | 2 +- ...nippet_metadata_google.cloud.batch.v1alpha.json | 2 +- packages/google-cloud-dlp/CHANGELOG.md | 13 +++++++++++++ .../google/cloud/dlp/gapic_version.py | 2 +- .../google/cloud/dlp_v2/gapic_version.py | 2 +- .../snippet_metadata_google.privacy.dlp.v2.json | 2 +- packages/google-cloud-managedkafka/CHANGELOG.md | 7 +++++++ .../google/cloud/managedkafka/gapic_version.py | 2 +- .../google/cloud/managedkafka_v1/gapic_version.py | 2 +- ...ppet_metadata_google.cloud.managedkafka.v1.json | 2 +- .../google-cloud-storage-transfer/CHANGELOG.md | 10 ++++++++++ .../google/cloud/storage_transfer/gapic_version.py | 2 +- .../cloud/storage_transfer_v1/gapic_version.py | 2 +- ...snippet_metadata_google.storagetransfer.v1.json | 2 +- packages/google-cloud-texttospeech/CHANGELOG.md | 13 +++++++++++++ .../google/cloud/texttospeech/gapic_version.py | 2 +- .../google/cloud/texttospeech_v1/gapic_version.py | 2 +- .../cloud/texttospeech_v1beta1/gapic_version.py | 2 +- ...ppet_metadata_google.cloud.texttospeech.v1.json | 2 +- ...metadata_google.cloud.texttospeech.v1beta1.json | 2 +- 35 files changed, 101 insertions(+), 34 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index a5489f077d52..c0f47cf58de9 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,6 +1,6 @@ { "packages/google-ads-admanager": "0.1.2", - "packages/google-ai-generativelanguage": "0.6.8", + "packages/google-ai-generativelanguage": "0.6.9", "packages/google-analytics-admin": "0.23.0", "packages/google-analytics-data": "0.18.11", "packages/google-apps-card": "0.1.4", @@ -27,7 +27,7 @@ "packages/google-cloud-automl": "2.13.5", "packages/google-cloud-backupdr": "0.1.3", "packages/google-cloud-bare-metal-solution": "1.7.5", - "packages/google-cloud-batch": "0.17.24", + "packages/google-cloud-batch": "0.17.25", "packages/google-cloud-beyondcorp-appconnections": "0.4.11", "packages/google-cloud-beyondcorp-appconnectors": "0.4.11", "packages/google-cloud-beyondcorp-appgateways": "0.4.11", @@ -75,7 +75,7 @@ "packages/google-cloud-dialogflow": "2.31.0", "packages/google-cloud-dialogflow-cx": "1.35.0", "packages/google-cloud-discoveryengine": "0.12.1", - "packages/google-cloud-dlp": "3.21.0", + "packages/google-cloud-dlp": "3.22.0", "packages/google-cloud-dms": "1.9.5", "packages/google-cloud-documentai": "2.31.0", "packages/google-cloud-domains": "1.7.5", @@ -102,7 +102,7 @@ "packages/google-cloud-language": "2.14.0", "packages/google-cloud-life-sciences": "0.9.12", "packages/google-cloud-managed-identities": "1.9.5", - "packages/google-cloud-managedkafka": "0.1.2", + "packages/google-cloud-managedkafka": "0.1.3", "packages/google-cloud-media-translation": "0.11.11", "packages/google-cloud-memcache": "1.9.5", "packages/google-cloud-migrationcenter": "0.1.9", @@ -152,13 +152,13 @@ "packages/google-cloud-source-context": "1.5.5", "packages/google-cloud-speech": "2.27.0", "packages/google-cloud-storage-control": "1.0.3", - "packages/google-cloud-storage-transfer": "1.11.5", + "packages/google-cloud-storage-transfer": "1.12.0", "packages/google-cloud-storageinsights": "0.1.10", "packages/google-cloud-support": "0.1.9", "packages/google-cloud-talent": "2.13.5", "packages/google-cloud-tasks": "2.16.5", "packages/google-cloud-telcoautomation": "0.2.5", - "packages/google-cloud-texttospeech": "2.16.5", + "packages/google-cloud-texttospeech": "2.17.0", "packages/google-cloud-tpu": "1.18.5", "packages/google-cloud-trace": "1.13.5", "packages/google-cloud-translate": "3.16.0", diff --git a/packages/google-ai-generativelanguage/CHANGELOG.md b/packages/google-ai-generativelanguage/CHANGELOG.md index 656da96f6be1..d3b8538f00c3 100644 --- a/packages/google-ai-generativelanguage/CHANGELOG.md +++ b/packages/google-ai-generativelanguage/CHANGELOG.md @@ -1,5 +1,19 @@ # Changelog +## [0.6.9](https://github.com/googleapis/google-cloud-python/compare/google-ai-generativelanguage-v0.6.8...google-ai-generativelanguage-v0.6.9) (2024-08-19) + + +### Features + +* Add model max_temperature ([fdebbf2](https://github.com/googleapis/google-cloud-python/commit/fdebbf2e914e9a8ed5a31a04ce9fe26de0f69c72)) +* Add new PromptFeedback and FinishReason entries ([fdebbf2](https://github.com/googleapis/google-cloud-python/commit/fdebbf2e914e9a8ed5a31a04ce9fe26de0f69c72)) +* Add new PromptFeedback and FinishReason entries for https://github.com/google-gemini/generative-ai-python/issues/476 ([fdebbf2](https://github.com/googleapis/google-cloud-python/commit/fdebbf2e914e9a8ed5a31a04ce9fe26de0f69c72)) + + +### Documentation + +* Many small fixes ([fdebbf2](https://github.com/googleapis/google-cloud-python/commit/fdebbf2e914e9a8ed5a31a04ce9fe26de0f69c72)) + ## [0.6.8](https://github.com/googleapis/google-cloud-python/compare/google-ai-generativelanguage-v0.6.7...google-ai-generativelanguage-v0.6.8) (2024-07-30) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage/gapic_version.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage/gapic_version.py index 558c8aab67c5..1699c98da708 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage/gapic_version.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.6.9" # {x-release-please-version} diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/gapic_version.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/gapic_version.py index 558c8aab67c5..1699c98da708 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/gapic_version.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.6.9" # {x-release-please-version} diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/gapic_version.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/gapic_version.py index 558c8aab67c5..1699c98da708 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/gapic_version.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.6.9" # {x-release-please-version} diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/gapic_version.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/gapic_version.py index 558c8aab67c5..1699c98da708 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/gapic_version.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.6.9" # {x-release-please-version} diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/gapic_version.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/gapic_version.py index 558c8aab67c5..1699c98da708 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/gapic_version.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.6.9" # {x-release-please-version} diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1.json b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1.json index d6c3fe4c5051..dcb6ad5e6a9e 100644 --- a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1.json +++ b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-ai-generativelanguage", - "version": "0.1.0" + "version": "0.6.9" }, "snippets": [ { diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta.json b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta.json index 3a0ef31881fb..a6e1502bce78 100644 --- a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta.json +++ b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-ai-generativelanguage", - "version": "0.1.0" + "version": "0.6.9" }, "snippets": [ { diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta2.json b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta2.json index 5b7d0a0509b4..d3fc92d09eaa 100644 --- a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta2.json +++ b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-ai-generativelanguage", - "version": "0.1.0" + "version": "0.6.9" }, "snippets": [ { diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta3.json b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta3.json index 91de9e353f90..d9c470b9e07c 100644 --- a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta3.json +++ b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta3.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-ai-generativelanguage", - "version": "0.1.0" + "version": "0.6.9" }, "snippets": [ { diff --git a/packages/google-cloud-batch/CHANGELOG.md b/packages/google-cloud-batch/CHANGELOG.md index 20c41c42be0e..fe8a3a929a87 100644 --- a/packages/google-cloud-batch/CHANGELOG.md +++ b/packages/google-cloud-batch/CHANGELOG.md @@ -1,5 +1,15 @@ # Changelog +## [0.17.25](https://github.com/googleapis/google-cloud-python/compare/google-cloud-batch-v0.17.24...google-cloud-batch-v0.17.25) (2024-08-19) + + +### Documentation + +* Batch CentOS images and HPC CentOS images are EOS ([5f179b9](https://github.com/googleapis/google-cloud-python/commit/5f179b98744808c33b07768f44efdfb3551fda03)) +* Clarify required fields for Runnable.Container ([5f179b9](https://github.com/googleapis/google-cloud-python/commit/5f179b98744808c33b07768f44efdfb3551fda03)) +* Clarify required oneof fields for Runnable.Script ([5f179b9](https://github.com/googleapis/google-cloud-python/commit/5f179b98744808c33b07768f44efdfb3551fda03)) +* Clarify TaskSpec requires one or more runnables ([5f179b9](https://github.com/googleapis/google-cloud-python/commit/5f179b98744808c33b07768f44efdfb3551fda03)) + ## [0.17.24](https://github.com/googleapis/google-cloud-python/compare/google-cloud-batch-v0.17.23...google-cloud-batch-v0.17.24) (2024-08-08) diff --git a/packages/google-cloud-batch/google/cloud/batch/gapic_version.py b/packages/google-cloud-batch/google/cloud/batch/gapic_version.py index 558c8aab67c5..43863a7a8ca3 100644 --- a/packages/google-cloud-batch/google/cloud/batch/gapic_version.py +++ b/packages/google-cloud-batch/google/cloud/batch/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.17.25" # {x-release-please-version} diff --git a/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py b/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py index 558c8aab67c5..43863a7a8ca3 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.17.25" # {x-release-please-version} diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py index 558c8aab67c5..43863a7a8ca3 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.17.25" # {x-release-please-version} diff --git a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json index e2df1067e4dd..772c4bb246f5 100644 --- a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json +++ b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-batch", - "version": "0.1.0" + "version": "0.17.25" }, "snippets": [ { diff --git a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json index 7f67670b100c..56a4ce0bde8d 100644 --- a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json +++ b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-batch", - "version": "0.1.0" + "version": "0.17.25" }, "snippets": [ { diff --git a/packages/google-cloud-dlp/CHANGELOG.md b/packages/google-cloud-dlp/CHANGELOG.md index a90d054b8e83..589d11fd9bed 100644 --- a/packages/google-cloud-dlp/CHANGELOG.md +++ b/packages/google-cloud-dlp/CHANGELOG.md @@ -4,6 +4,19 @@ [1]: https://pypi.org/project/google-cloud-dlp/#history +## [3.22.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dlp-v3.21.0...google-cloud-dlp-v3.22.0) (2024-08-19) + + +### Features + +* file store data profiles can now be filtered by type and storage location ([5dc35c8](https://github.com/googleapis/google-cloud-python/commit/5dc35c8b35091a0ed7f69a0f4f4652a48523efaa)) +* inspect template modified cadence discovery config for Cloud SQL ([5dc35c8](https://github.com/googleapis/google-cloud-python/commit/5dc35c8b35091a0ed7f69a0f4f4652a48523efaa)) + + +### Documentation + +* small improvements ([5dc35c8](https://github.com/googleapis/google-cloud-python/commit/5dc35c8b35091a0ed7f69a0f4f4652a48523efaa)) + ## [3.21.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dlp-v3.20.0...google-cloud-dlp-v3.21.0) (2024-08-08) diff --git a/packages/google-cloud-dlp/google/cloud/dlp/gapic_version.py b/packages/google-cloud-dlp/google/cloud/dlp/gapic_version.py index 558c8aab67c5..3ed830e26f63 100644 --- a/packages/google-cloud-dlp/google/cloud/dlp/gapic_version.py +++ b/packages/google-cloud-dlp/google/cloud/dlp/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "3.22.0" # {x-release-please-version} diff --git a/packages/google-cloud-dlp/google/cloud/dlp_v2/gapic_version.py b/packages/google-cloud-dlp/google/cloud/dlp_v2/gapic_version.py index 558c8aab67c5..3ed830e26f63 100644 --- a/packages/google-cloud-dlp/google/cloud/dlp_v2/gapic_version.py +++ b/packages/google-cloud-dlp/google/cloud/dlp_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "3.22.0" # {x-release-please-version} diff --git a/packages/google-cloud-dlp/samples/generated_samples/snippet_metadata_google.privacy.dlp.v2.json b/packages/google-cloud-dlp/samples/generated_samples/snippet_metadata_google.privacy.dlp.v2.json index 4da85d5c6cd9..cb1c758e62fc 100644 --- a/packages/google-cloud-dlp/samples/generated_samples/snippet_metadata_google.privacy.dlp.v2.json +++ b/packages/google-cloud-dlp/samples/generated_samples/snippet_metadata_google.privacy.dlp.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dlp", - "version": "0.1.0" + "version": "3.22.0" }, "snippets": [ { diff --git a/packages/google-cloud-managedkafka/CHANGELOG.md b/packages/google-cloud-managedkafka/CHANGELOG.md index 1307064ce2bb..79c377ce8300 100644 --- a/packages/google-cloud-managedkafka/CHANGELOG.md +++ b/packages/google-cloud-managedkafka/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.1.3](https://github.com/googleapis/google-cloud-python/compare/google-cloud-managedkafka-v0.1.2...google-cloud-managedkafka-v0.1.3) (2024-08-19) + + +### Documentation + +* [google-cloud-managedkafka] changed API title to official name ([#13010](https://github.com/googleapis/google-cloud-python/issues/13010)) ([5e6b4ce](https://github.com/googleapis/google-cloud-python/commit/5e6b4ce92614cc9a169c530f9a23d3934f4868cc)) + ## [0.1.2](https://github.com/googleapis/google-cloud-python/compare/google-cloud-managedkafka-v0.1.1...google-cloud-managedkafka-v0.1.2) (2024-07-30) diff --git a/packages/google-cloud-managedkafka/google/cloud/managedkafka/gapic_version.py b/packages/google-cloud-managedkafka/google/cloud/managedkafka/gapic_version.py index 558c8aab67c5..114e40645800 100644 --- a/packages/google-cloud-managedkafka/google/cloud/managedkafka/gapic_version.py +++ b/packages/google-cloud-managedkafka/google/cloud/managedkafka/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.3" # {x-release-please-version} diff --git a/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/gapic_version.py b/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/gapic_version.py index 558c8aab67c5..114e40645800 100644 --- a/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/gapic_version.py +++ b/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.3" # {x-release-please-version} diff --git a/packages/google-cloud-managedkafka/samples/generated_samples/snippet_metadata_google.cloud.managedkafka.v1.json b/packages/google-cloud-managedkafka/samples/generated_samples/snippet_metadata_google.cloud.managedkafka.v1.json index 37b09a104f58..3e586401a59f 100644 --- a/packages/google-cloud-managedkafka/samples/generated_samples/snippet_metadata_google.cloud.managedkafka.v1.json +++ b/packages/google-cloud-managedkafka/samples/generated_samples/snippet_metadata_google.cloud.managedkafka.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-managedkafka", - "version": "0.1.0" + "version": "0.1.3" }, "snippets": [ { diff --git a/packages/google-cloud-storage-transfer/CHANGELOG.md b/packages/google-cloud-storage-transfer/CHANGELOG.md index f3582371c72f..13c759fb6f24 100644 --- a/packages/google-cloud-storage-transfer/CHANGELOG.md +++ b/packages/google-cloud-storage-transfer/CHANGELOG.md @@ -1,5 +1,15 @@ # Changelog +## [1.12.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-storage-transfer-v1.11.5...google-cloud-storage-transfer-v1.12.0) (2024-08-19) + + +### Features + +* add GCS Managed Folders ([9c54c1d](https://github.com/googleapis/google-cloud-python/commit/9c54c1d92e54f71f35d8e7a65bb16f730ec841b0)) +* add HDFS configuration ([9c54c1d](https://github.com/googleapis/google-cloud-python/commit/9c54c1d92e54f71f35d8e7a65bb16f730ec841b0)) +* add S3 Cloudfront Domain ([9c54c1d](https://github.com/googleapis/google-cloud-python/commit/9c54c1d92e54f71f35d8e7a65bb16f730ec841b0)) +* add S3 Managed Private Network ([9c54c1d](https://github.com/googleapis/google-cloud-python/commit/9c54c1d92e54f71f35d8e7a65bb16f730ec841b0)) + ## [1.11.5](https://github.com/googleapis/google-cloud-python/compare/google-cloud-storage-transfer-v1.11.4...google-cloud-storage-transfer-v1.11.5) (2024-07-30) diff --git a/packages/google-cloud-storage-transfer/google/cloud/storage_transfer/gapic_version.py b/packages/google-cloud-storage-transfer/google/cloud/storage_transfer/gapic_version.py index 558c8aab67c5..739fdfae141c 100644 --- a/packages/google-cloud-storage-transfer/google/cloud/storage_transfer/gapic_version.py +++ b/packages/google-cloud-storage-transfer/google/cloud/storage_transfer/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "1.12.0" # {x-release-please-version} diff --git a/packages/google-cloud-storage-transfer/google/cloud/storage_transfer_v1/gapic_version.py b/packages/google-cloud-storage-transfer/google/cloud/storage_transfer_v1/gapic_version.py index 558c8aab67c5..739fdfae141c 100644 --- a/packages/google-cloud-storage-transfer/google/cloud/storage_transfer_v1/gapic_version.py +++ b/packages/google-cloud-storage-transfer/google/cloud/storage_transfer_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "1.12.0" # {x-release-please-version} diff --git a/packages/google-cloud-storage-transfer/samples/generated_samples/snippet_metadata_google.storagetransfer.v1.json b/packages/google-cloud-storage-transfer/samples/generated_samples/snippet_metadata_google.storagetransfer.v1.json index f3c5ac4b04c5..1db03f4d6f4e 100644 --- a/packages/google-cloud-storage-transfer/samples/generated_samples/snippet_metadata_google.storagetransfer.v1.json +++ b/packages/google-cloud-storage-transfer/samples/generated_samples/snippet_metadata_google.storagetransfer.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-storage-transfer", - "version": "0.1.0" + "version": "1.12.0" }, "snippets": [ { diff --git a/packages/google-cloud-texttospeech/CHANGELOG.md b/packages/google-cloud-texttospeech/CHANGELOG.md index 47064d75fe88..c522f2fe2cec 100644 --- a/packages/google-cloud-texttospeech/CHANGELOG.md +++ b/packages/google-cloud-texttospeech/CHANGELOG.md @@ -4,6 +4,19 @@ [1]: https://pypi.org/project/google-cloud-texttospeech/#history +## [2.17.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-texttospeech-v2.16.5...google-cloud-texttospeech-v2.17.0) (2024-08-19) + + +### Features + +* A new method `StreamingSynthesize` is added to service `TextToSpeech` ([973e48a](https://github.com/googleapis/google-cloud-python/commit/973e48afb87cef6565535a7262e38195245018ef)) + + +### Documentation + +* A comment for field `name` in message `.google.cloud.texttospeech.v1.VoiceSelectionParams` is changed ([973e48a](https://github.com/googleapis/google-cloud-python/commit/973e48afb87cef6565535a7262e38195245018ef)) +* A comment for field `name` in message `.google.cloud.texttospeech.v1beta1.VoiceSelectionParams` is changed ([973e48a](https://github.com/googleapis/google-cloud-python/commit/973e48afb87cef6565535a7262e38195245018ef)) + ## [2.16.5](https://github.com/googleapis/google-cloud-python/compare/google-cloud-texttospeech-v2.16.4...google-cloud-texttospeech-v2.16.5) (2024-07-30) diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech/gapic_version.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech/gapic_version.py index 558c8aab67c5..6053ad2404bf 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech/gapic_version.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "2.17.0" # {x-release-please-version} diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/gapic_version.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/gapic_version.py index 558c8aab67c5..6053ad2404bf 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/gapic_version.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "2.17.0" # {x-release-please-version} diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/gapic_version.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/gapic_version.py index 558c8aab67c5..6053ad2404bf 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/gapic_version.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "2.17.0" # {x-release-please-version} diff --git a/packages/google-cloud-texttospeech/samples/generated_samples/snippet_metadata_google.cloud.texttospeech.v1.json b/packages/google-cloud-texttospeech/samples/generated_samples/snippet_metadata_google.cloud.texttospeech.v1.json index f87785fcdd45..3dced488d328 100644 --- a/packages/google-cloud-texttospeech/samples/generated_samples/snippet_metadata_google.cloud.texttospeech.v1.json +++ b/packages/google-cloud-texttospeech/samples/generated_samples/snippet_metadata_google.cloud.texttospeech.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-texttospeech", - "version": "0.1.0" + "version": "2.17.0" }, "snippets": [ { diff --git a/packages/google-cloud-texttospeech/samples/generated_samples/snippet_metadata_google.cloud.texttospeech.v1beta1.json b/packages/google-cloud-texttospeech/samples/generated_samples/snippet_metadata_google.cloud.texttospeech.v1beta1.json index 2877853b66c5..42e6c3c22568 100644 --- a/packages/google-cloud-texttospeech/samples/generated_samples/snippet_metadata_google.cloud.texttospeech.v1beta1.json +++ b/packages/google-cloud-texttospeech/samples/generated_samples/snippet_metadata_google.cloud.texttospeech.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-texttospeech", - "version": "0.1.0" + "version": "2.17.0" }, "snippets": [ { From 875f390264cb8d91ceb6d6fd4cb5aca27cd9262f Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 19 Aug 2024 16:12:30 +0000 Subject: [PATCH 022/108] chore: [Many APIs] Update gapic-generator-python to v1.18.5 (#13004) - [ ] Regenerate this pull request now. PiperOrigin-RevId: 661268868 Source-Link: https://github.com/googleapis/googleapis/commit/f7d214cb08cd7d9b018d44564a8b184263f64177 Source-Link: https://github.com/googleapis/googleapis-gen/commit/79a8411bbdb25a983fa3aae8c0e14327df129f94 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLW1hcHMtcGxhY2VzLy5Pd2xCb3QueWFtbCIsImgiOiI3OWE4NDExYmJkYjI1YTk4M2ZhM2FhZThjMGUxNDMyN2RmMTI5Zjk0In0= Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLW1hcHMtcm91dGVvcHRpbWl6YXRpb24vLk93bEJvdC55YW1sIiwiaCI6Ijc5YTg0MTFiYmRiMjVhOTgzZmEzYWFlOGMwZTE0MzI3ZGYxMjlmOTQifQ== Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLW1hcHMtcm91dGluZy8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLW1hcHMtc29sYXIvLk93bEJvdC55YW1sIiwiaCI6Ijc5YTg0MTFiYmRiMjVhOTgzZmEzYWFlOGMwZTE0MzI3ZGYxMjlmOTQifQ== Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLXNob3BwaW5nLWNzcy8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLXNob3BwaW5nLW1lcmNoYW50LWFjY291bnRzLy5Pd2xCb3QueWFtbCIsImgiOiI3OWE4NDExYmJkYjI1YTk4M2ZhM2FhZThjMGUxNDMyN2RmMTI5Zjk0In0= Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLXNob3BwaW5nLW1lcmNoYW50LWNvbnZlcnNpb25zLy5Pd2xCb3QueWFtbCIsImgiOiI3OWE4NDExYmJkYjI1YTk4M2ZhM2FhZThjMGUxNDMyN2RmMTI5Zjk0In0= Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLXNob3BwaW5nLW1lcmNoYW50LWRhdGFzb3VyY2VzLy5Pd2xCb3QueWFtbCIsImgiOiI3OWE4NDExYmJkYjI1YTk4M2ZhM2FhZThjMGUxNDMyN2RmMTI5Zjk0In0= Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLXNob3BwaW5nLW1lcmNoYW50LWludmVudG9yaWVzLy5Pd2xCb3QueWFtbCIsImgiOiI3OWE4NDExYmJkYjI1YTk4M2ZhM2FhZThjMGUxNDMyN2RmMTI5Zjk0In0= Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLXNob3BwaW5nLW1lcmNoYW50LWxmcC8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLXNob3BwaW5nLW1lcmNoYW50LW5vdGlmaWNhdGlvbnMvLk93bEJvdC55YW1sIiwiaCI6Ijc5YTg0MTFiYmRiMjVhOTgzZmEzYWFlOGMwZTE0MzI3ZGYxMjlmOTQifQ== Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLXNob3BwaW5nLW1lcmNoYW50LXByb2R1Y3RzLy5Pd2xCb3QueWFtbCIsImgiOiI3OWE4NDExYmJkYjI1YTk4M2ZhM2FhZThjMGUxNDMyN2RmMTI5Zjk0In0= Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLXNob3BwaW5nLW1lcmNoYW50LXByb21vdGlvbnMvLk93bEJvdC55YW1sIiwiaCI6Ijc5YTg0MTFiYmRiMjVhOTgzZmEzYWFlOGMwZTE0MzI3ZGYxMjlmOTQifQ== Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLXNob3BwaW5nLW1lcmNoYW50LXF1b3RhLy5Pd2xCb3QueWFtbCIsImgiOiI3OWE4NDExYmJkYjI1YTk4M2ZhM2FhZThjMGUxNDMyN2RmMTI5Zjk0In0= Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLXNob3BwaW5nLW1lcmNoYW50LXJlcG9ydHMvLk93bEJvdC55YW1sIiwiaCI6Ijc5YTg0MTFiYmRiMjVhOTgzZmEzYWFlOGMwZTE0MzI3ZGYxMjlmOTQifQ== Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLXNob3BwaW5nLXR5cGUvLk93bEJvdC55YW1sIiwiaCI6Ijc5YTg0MTFiYmRiMjVhOTgzZmEzYWFlOGMwZTE0MzI3ZGYxMjlmOTQifQ== Copy-Tag: eyJwIjoicGFja2FnZXMvZ3JhZmVhcy8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../google/maps/places/gapic_version.py | 2 +- .../google/maps/places_v1/gapic_version.py | 2 +- .../places_v1/services/places/async_client.py | 5 +- .../maps/places_v1/services/places/client.py | 2 +- ...nippet_metadata_google.maps.places.v1.json | 2 +- .../tests/unit/gapic/places_v1/test_places.py | 45 ++++--- .../maps/routeoptimization/gapic_version.py | 2 +- .../routeoptimization_v1/gapic_version.py | 2 +- .../route_optimization/async_client.py | 5 +- .../services/route_optimization/client.py | 2 +- ...data_google.maps.routeoptimization.v1.json | 2 +- .../test_route_optimization.py | 28 ++-- .../google/maps/routing/gapic_version.py | 2 +- .../google/maps/routing_v2/gapic_version.py | 2 +- .../services/routes/async_client.py | 5 +- .../maps/routing_v2/services/routes/client.py | 2 +- .../maps/routing_v2/types/toll_passes.py | 1 - ...ippet_metadata_google.maps.routing.v2.json | 2 +- .../unit/gapic/routing_v2/test_routes.py | 18 +-- .../google/maps/solar/gapic_version.py | 2 +- .../google/maps/solar_v1/gapic_version.py | 2 +- .../solar_v1/services/solar/async_client.py | 5 +- .../maps/solar_v1/services/solar/client.py | 2 +- ...snippet_metadata_google.maps.solar.v1.json | 2 +- .../tests/unit/gapic/solar_v1/test_solar.py | 27 ++-- .../google/shopping/css/gapic_version.py | 2 +- .../google/shopping/css_v1/gapic_version.py | 2 +- .../account_labels_service/async_client.py | 6 +- .../services/account_labels_service/client.py | 2 +- .../services/accounts_service/async_client.py | 5 +- .../services/accounts_service/client.py | 2 +- .../async_client.py | 6 +- .../css_product_inputs_service/client.py | 2 +- .../css_products_service/async_client.py | 6 +- .../services/css_products_service/client.py | 2 +- ...ippet_metadata_google.shopping.css.v1.json | 2 +- packages/google-shopping-css/setup.py | 2 +- .../testing/constraints-3.7.txt | 2 +- .../css_v1/test_account_labels_service.py | 36 ++--- .../gapic/css_v1/test_accounts_service.py | 27 ++-- .../css_v1/test_css_product_inputs_service.py | 18 +-- .../gapic/css_v1/test_css_products_service.py | 18 +-- .../merchant_accounts/gapic_version.py | 2 +- .../merchant_accounts_v1beta/gapic_version.py | 2 +- .../account_issue_service/async_client.py | 6 +- .../services/account_issue_service/client.py | 2 +- .../account_tax_service/async_client.py | 5 +- .../services/account_tax_service/client.py | 2 +- .../services/accounts_service/async_client.py | 5 +- .../services/accounts_service/client.py | 2 +- .../business_identity_service/async_client.py | 6 +- .../business_identity_service/client.py | 2 +- .../business_info_service/async_client.py | 6 +- .../services/business_info_service/client.py | 2 +- .../email_preferences_service/async_client.py | 10 +- .../email_preferences_service/client.py | 6 +- .../transports/grpc.py | 4 +- .../transports/grpc_asyncio.py | 4 +- .../services/homepage_service/async_client.py | 5 +- .../services/homepage_service/client.py | 2 +- .../async_client.py | 10 +- .../online_return_policy_service/client.py | 6 +- .../transports/grpc.py | 4 +- .../transports/grpc_asyncio.py | 4 +- .../transports/rest.py | 4 +- .../services/programs_service/async_client.py | 5 +- .../services/programs_service/client.py | 2 +- .../services/regions_service/async_client.py | 5 +- .../services/regions_service/client.py | 2 +- .../shipping_settings_service/async_client.py | 6 +- .../shipping_settings_service/client.py | 2 +- .../async_client.py | 6 +- .../client.py | 2 +- .../terms_of_service_service/async_client.py | 6 +- .../terms_of_service_service/client.py | 2 +- .../services/user_service/async_client.py | 5 +- .../services/user_service/client.py | 2 +- .../types/shippingsettings.py | 4 +- ...gle.shopping.merchant.accounts.v1beta.json | 2 +- .../test_account_issue_service.py | 9 +- .../test_account_tax_service.py | 27 ++-- .../test_accounts_service.py | 54 ++++---- .../test_business_identity_service.py | 18 +-- .../test_business_info_service.py | 18 +-- .../test_email_preferences_service.py | 18 +-- .../test_homepage_service.py | 36 ++--- .../test_online_return_policy_service.py | 18 +-- .../test_programs_service.py | 36 ++--- .../test_regions_service.py | 45 ++++--- .../test_shipping_settings_service.py | 18 +-- ...erms_of_service_agreement_state_service.py | 18 +-- .../test_terms_of_service_service.py | 27 ++-- .../test_user_service.py | 45 ++++--- .../merchant_conversions/gapic_version.py | 2 +- .../gapic_version.py | 2 +- .../async_client.py | 6 +- .../conversion_sources_service/client.py | 2 +- ....shopping.merchant.conversions.v1beta.json | 2 +- .../test_conversion_sources_service.py | 54 ++++---- .../merchant_datasources/gapic_version.py | 2 +- .../gapic_version.py | 2 +- .../data_sources_service/async_client.py | 6 +- .../services/data_sources_service/client.py | 2 +- ....shopping.merchant.datasources.v1beta.json | 2 +- .../test_data_sources_service.py | 54 ++++---- .../merchant_inventories/gapic_version.py | 2 +- .../gapic_version.py | 2 +- .../local_inventory_service/async_client.py | 6 +- .../local_inventory_service/client.py | 2 +- .../async_client.py | 6 +- .../regional_inventory_service/client.py | 2 +- ....shopping.merchant.inventories.v1beta.json | 2 +- .../setup.py | 2 +- .../testing/constraints-3.7.txt | 2 +- .../test_local_inventory_service.py | 27 ++-- .../test_regional_inventory_service.py | 27 ++-- .../shopping/merchant_lfp/gapic_version.py | 2 +- .../merchant_lfp_v1beta/gapic_version.py | 2 +- .../lfp_inventory_service/async_client.py | 6 +- .../services/lfp_inventory_service/client.py | 2 +- .../services/lfp_sale_service/async_client.py | 5 +- .../services/lfp_sale_service/client.py | 2 +- .../lfp_store_service/async_client.py | 5 +- .../services/lfp_store_service/client.py | 2 +- ...a_google.shopping.merchant.lfp.v1beta.json | 2 +- .../google-shopping-merchant-lfp/setup.py | 2 +- .../testing/constraints-3.7.txt | 2 +- .../test_lfp_inventory_service.py | 9 +- .../test_lfp_sale_service.py | 9 +- .../test_lfp_store_service.py | 36 ++--- .../merchant_notifications/gapic_version.py | 2 +- .../gapic_version.py | 2 +- .../notifications_api_service/async_client.py | 6 +- .../notifications_api_service/client.py | 2 +- ...hopping.merchant.notifications.v1beta.json | 2 +- .../setup.py | 2 +- .../testing/constraints-3.7.txt | 2 +- .../test_notifications_api_service.py | 45 ++++--- .../merchant_products/gapic_version.py | 2 +- .../merchant_products_v1beta/gapic_version.py | 2 +- .../product_inputs_service/async_client.py | 6 +- .../services/product_inputs_service/client.py | 2 +- .../services/products_service/async_client.py | 5 +- .../services/products_service/client.py | 2 +- ...gle.shopping.merchant.products.v1beta.json | 2 +- .../setup.py | 2 +- .../testing/constraints-3.7.txt | 2 +- .../test_product_inputs_service.py | 18 +-- .../test_products_service.py | 18 +-- .../merchant_promotions/gapic_version.py | 2 +- .../gapic_version.py | 2 +- .../promotions_service/async_client.py | 5 +- .../services/promotions_service/client.py | 2 +- ...e.shopping.merchant.promotions.v1beta.json | 2 +- .../setup.py | 2 +- .../testing/constraints-3.7.txt | 2 +- .../test_promotions_service.py | 27 ++-- .../shopping/merchant_quota/gapic_version.py | 2 +- .../merchant_quota_v1beta/gapic_version.py | 2 +- .../services/quota_service/async_client.py | 5 +- .../services/quota_service/client.py | 2 +- ...google.shopping.merchant.quota.v1beta.json | 2 +- .../test_quota_service.py | 9 +- .../merchant_reports/gapic_version.py | 2 +- .../merchant_reports_v1beta/gapic_version.py | 2 +- .../services/report_service/async_client.py | 5 +- .../services/report_service/client.py | 2 +- .../merchant_reports_v1beta/types/reports.py | 2 - ...ogle.shopping.merchant.reports.v1beta.json | 2 +- .../google-shopping-merchant-reports/setup.py | 2 +- .../testing/constraints-3.7.txt | 2 +- .../test_report_service.py | 9 +- .../google/shopping/type/gapic_version.py | 2 +- .../grafeas/grafeas/grafeas/gapic_version.py | 2 +- .../grafeas/grafeas_v1/gapic_version.py | 2 +- .../services/grafeas/async_client.py | 5 +- .../grafeas_v1/services/grafeas/client.py | 2 +- .../snippet_metadata_grafeas.v1.json | 2 +- .../unit/gapic/grafeas_v1/test_grafeas.py | 126 ++++++++++-------- .../unique-grafeas-client.yaml | 8 +- 180 files changed, 760 insertions(+), 759 deletions(-) diff --git a/packages/google-maps-places/google/maps/places/gapic_version.py b/packages/google-maps-places/google/maps/places/gapic_version.py index cf4cc395ad87..558c8aab67c5 100644 --- a/packages/google-maps-places/google/maps/places/gapic_version.py +++ b/packages/google-maps-places/google/maps/places/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.17" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-maps-places/google/maps/places_v1/gapic_version.py b/packages/google-maps-places/google/maps/places_v1/gapic_version.py index cf4cc395ad87..558c8aab67c5 100644 --- a/packages/google-maps-places/google/maps/places_v1/gapic_version.py +++ b/packages/google-maps-places/google/maps/places_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.17" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-maps-places/google/maps/places_v1/services/places/async_client.py b/packages/google-maps-places/google/maps/places_v1/services/places/async_client.py index f0536ab4a75e..0e27906f0dc2 100644 --- a/packages/google-maps-places/google/maps/places_v1/services/places/async_client.py +++ b/packages/google-maps-places/google/maps/places_v1/services/places/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -199,9 +198,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(PlacesClient).get_transport_class, type(PlacesClient) - ) + get_transport_class = PlacesClient.get_transport_class def __init__( self, diff --git a/packages/google-maps-places/google/maps/places_v1/services/places/client.py b/packages/google-maps-places/google/maps/places_v1/services/places/client.py index eb73f35475dc..bbf53396b61b 100644 --- a/packages/google-maps-places/google/maps/places_v1/services/places/client.py +++ b/packages/google-maps-places/google/maps/places_v1/services/places/client.py @@ -717,7 +717,7 @@ def __init__( transport_init: Union[ Type[PlacesTransport], Callable[..., PlacesTransport] ] = ( - type(self).get_transport_class(transport) + PlacesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., PlacesTransport], transport) ) diff --git a/packages/google-maps-places/samples/generated_samples/snippet_metadata_google.maps.places.v1.json b/packages/google-maps-places/samples/generated_samples/snippet_metadata_google.maps.places.v1.json index 7e805c13b24d..74eb224b30a6 100644 --- a/packages/google-maps-places/samples/generated_samples/snippet_metadata_google.maps.places.v1.json +++ b/packages/google-maps-places/samples/generated_samples/snippet_metadata_google.maps.places.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-maps-places", - "version": "0.1.17" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-maps-places/tests/unit/gapic/places_v1/test_places.py b/packages/google-maps-places/tests/unit/gapic/places_v1/test_places.py index 2974c544ce7f..0dbb9efbb41f 100644 --- a/packages/google-maps-places/tests/unit/gapic/places_v1/test_places.py +++ b/packages/google-maps-places/tests/unit/gapic/places_v1/test_places.py @@ -1221,22 +1221,23 @@ async def test_search_nearby_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search_nearby - ] = mock_object + ] = mock_rpc request = {} await client.search_nearby(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_nearby(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1440,22 +1441,23 @@ async def test_search_text_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search_text - ] = mock_object + ] = mock_rpc request = {} await client.search_text(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_text(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1661,22 +1663,23 @@ async def test_get_photo_media_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_photo_media - ] = mock_object + ] = mock_rpc request = {} await client.get_photo_media(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_photo_media(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2151,22 +2154,23 @@ async def test_get_place_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_place - ] = mock_object + ] = mock_rpc request = {} await client.get_place(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_place(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2604,22 +2608,23 @@ async def test_autocomplete_places_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.autocomplete_places - ] = mock_object + ] = mock_rpc request = {} await client.autocomplete_places(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.autocomplete_places(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-maps-routeoptimization/google/maps/routeoptimization/gapic_version.py b/packages/google-maps-routeoptimization/google/maps/routeoptimization/gapic_version.py index 3b0a9d9a8d43..558c8aab67c5 100644 --- a/packages/google-maps-routeoptimization/google/maps/routeoptimization/gapic_version.py +++ b/packages/google-maps-routeoptimization/google/maps/routeoptimization/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/gapic_version.py b/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/gapic_version.py index 3b0a9d9a8d43..558c8aab67c5 100644 --- a/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/gapic_version.py +++ b/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/async_client.py b/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/async_client.py index d04e12ddae7c..a87f03648831 100644 --- a/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/async_client.py +++ b/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -211,9 +210,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(RouteOptimizationClient).get_transport_class, type(RouteOptimizationClient) - ) + get_transport_class = RouteOptimizationClient.get_transport_class def __init__( self, diff --git a/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/client.py b/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/client.py index fa1a87108b7a..ba2242e7e73f 100644 --- a/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/client.py +++ b/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/client.py @@ -669,7 +669,7 @@ def __init__( Type[RouteOptimizationTransport], Callable[..., RouteOptimizationTransport], ] = ( - type(self).get_transport_class(transport) + RouteOptimizationClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RouteOptimizationTransport], transport) ) diff --git a/packages/google-maps-routeoptimization/samples/generated_samples/snippet_metadata_google.maps.routeoptimization.v1.json b/packages/google-maps-routeoptimization/samples/generated_samples/snippet_metadata_google.maps.routeoptimization.v1.json index d30cabf7b1cf..c329d83ca2a2 100644 --- a/packages/google-maps-routeoptimization/samples/generated_samples/snippet_metadata_google.maps.routeoptimization.v1.json +++ b/packages/google-maps-routeoptimization/samples/generated_samples/snippet_metadata_google.maps.routeoptimization.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-maps-routeoptimization", - "version": "0.1.2" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-maps-routeoptimization/tests/unit/gapic/routeoptimization_v1/test_route_optimization.py b/packages/google-maps-routeoptimization/tests/unit/gapic/routeoptimization_v1/test_route_optimization.py index 81d834b84bcf..019f424b2345 100644 --- a/packages/google-maps-routeoptimization/tests/unit/gapic/routeoptimization_v1/test_route_optimization.py +++ b/packages/google-maps-routeoptimization/tests/unit/gapic/routeoptimization_v1/test_route_optimization.py @@ -1333,22 +1333,23 @@ async def test_optimize_tours_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.optimize_tours - ] = mock_object + ] = mock_rpc request = {} await client.optimize_tours(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.optimize_tours(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1570,8 +1571,9 @@ def test_batch_optimize_tours_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.batch_optimize_tours(request) @@ -1627,26 +1629,28 @@ async def test_batch_optimize_tours_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_optimize_tours - ] = mock_object + ] = mock_rpc request = {} await client.batch_optimize_tours(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.batch_optimize_tours(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-maps-routing/google/maps/routing/gapic_version.py b/packages/google-maps-routing/google/maps/routing/gapic_version.py index 8ebdaa033b52..558c8aab67c5 100644 --- a/packages/google-maps-routing/google/maps/routing/gapic_version.py +++ b/packages/google-maps-routing/google/maps/routing/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.10" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-maps-routing/google/maps/routing_v2/gapic_version.py b/packages/google-maps-routing/google/maps/routing_v2/gapic_version.py index 8ebdaa033b52..558c8aab67c5 100644 --- a/packages/google-maps-routing/google/maps/routing_v2/gapic_version.py +++ b/packages/google-maps-routing/google/maps/routing_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.10" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-maps-routing/google/maps/routing_v2/services/routes/async_client.py b/packages/google-maps-routing/google/maps/routing_v2/services/routes/async_client.py index 6ca3687219a1..a6882615fc53 100644 --- a/packages/google-maps-routing/google/maps/routing_v2/services/routes/async_client.py +++ b/packages/google-maps-routing/google/maps/routing_v2/services/routes/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( AsyncIterable, @@ -184,9 +183,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(RoutesClient).get_transport_class, type(RoutesClient) - ) + get_transport_class = RoutesClient.get_transport_class def __init__( self, diff --git a/packages/google-maps-routing/google/maps/routing_v2/services/routes/client.py b/packages/google-maps-routing/google/maps/routing_v2/services/routes/client.py index db6d554f6a12..f634ae12f331 100644 --- a/packages/google-maps-routing/google/maps/routing_v2/services/routes/client.py +++ b/packages/google-maps-routing/google/maps/routing_v2/services/routes/client.py @@ -641,7 +641,7 @@ def __init__( transport_init: Union[ Type[RoutesTransport], Callable[..., RoutesTransport] ] = ( - type(self).get_transport_class(transport) + RoutesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RoutesTransport], transport) ) diff --git a/packages/google-maps-routing/google/maps/routing_v2/types/toll_passes.py b/packages/google-maps-routing/google/maps/routing_v2/types/toll_passes.py index db2a0615cd62..57edad22a37d 100644 --- a/packages/google-maps-routing/google/maps/routing_v2/types/toll_passes.py +++ b/packages/google-maps-routing/google/maps/routing_v2/types/toll_passes.py @@ -85,7 +85,6 @@ class TollPass(proto.Enum): E-card provided by multiple banks used to pay for tolls. All e-cards via banks are charged the same so only one enum value is needed. E.g. - - Bank Mandiri https://www.bankmandiri.co.id/e-money - BCA https://www.bca.co.id/flazz diff --git a/packages/google-maps-routing/samples/generated_samples/snippet_metadata_google.maps.routing.v2.json b/packages/google-maps-routing/samples/generated_samples/snippet_metadata_google.maps.routing.v2.json index d129095b793d..b5ed5aca319c 100644 --- a/packages/google-maps-routing/samples/generated_samples/snippet_metadata_google.maps.routing.v2.json +++ b/packages/google-maps-routing/samples/generated_samples/snippet_metadata_google.maps.routing.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-maps-routing", - "version": "0.6.10" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-maps-routing/tests/unit/gapic/routing_v2/test_routes.py b/packages/google-maps-routing/tests/unit/gapic/routing_v2/test_routes.py index 59ce071bdb43..8f1804aabaf4 100644 --- a/packages/google-maps-routing/tests/unit/gapic/routing_v2/test_routes.py +++ b/packages/google-maps-routing/tests/unit/gapic/routing_v2/test_routes.py @@ -1231,22 +1231,23 @@ async def test_compute_routes_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.compute_routes - ] = mock_object + ] = mock_rpc request = {} await client.compute_routes(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.compute_routes(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1460,22 +1461,23 @@ async def test_compute_route_matrix_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.compute_route_matrix - ] = mock_object + ] = mock_rpc request = {} await client.compute_route_matrix(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.compute_route_matrix(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-maps-solar/google/maps/solar/gapic_version.py b/packages/google-maps-solar/google/maps/solar/gapic_version.py index 3b0a9d9a8d43..558c8aab67c5 100644 --- a/packages/google-maps-solar/google/maps/solar/gapic_version.py +++ b/packages/google-maps-solar/google/maps/solar/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-maps-solar/google/maps/solar_v1/gapic_version.py b/packages/google-maps-solar/google/maps/solar_v1/gapic_version.py index 3b0a9d9a8d43..558c8aab67c5 100644 --- a/packages/google-maps-solar/google/maps/solar_v1/gapic_version.py +++ b/packages/google-maps-solar/google/maps/solar_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-maps-solar/google/maps/solar_v1/services/solar/async_client.py b/packages/google-maps-solar/google/maps/solar_v1/services/solar/async_client.py index a3cbc6981055..73899add5518 100644 --- a/packages/google-maps-solar/google/maps/solar_v1/services/solar/async_client.py +++ b/packages/google-maps-solar/google/maps/solar_v1/services/solar/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -179,9 +178,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(SolarClient).get_transport_class, type(SolarClient) - ) + get_transport_class = SolarClient.get_transport_class def __init__( self, diff --git a/packages/google-maps-solar/google/maps/solar_v1/services/solar/client.py b/packages/google-maps-solar/google/maps/solar_v1/services/solar/client.py index 8a738780b101..8c15e80690ee 100644 --- a/packages/google-maps-solar/google/maps/solar_v1/services/solar/client.py +++ b/packages/google-maps-solar/google/maps/solar_v1/services/solar/client.py @@ -637,7 +637,7 @@ def __init__( transport_init: Union[ Type[SolarTransport], Callable[..., SolarTransport] ] = ( - type(self).get_transport_class(transport) + SolarClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SolarTransport], transport) ) diff --git a/packages/google-maps-solar/samples/generated_samples/snippet_metadata_google.maps.solar.v1.json b/packages/google-maps-solar/samples/generated_samples/snippet_metadata_google.maps.solar.v1.json index 2974f8282f83..ef51a4e54a98 100644 --- a/packages/google-maps-solar/samples/generated_samples/snippet_metadata_google.maps.solar.v1.json +++ b/packages/google-maps-solar/samples/generated_samples/snippet_metadata_google.maps.solar.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-maps-solar", - "version": "0.1.2" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-maps-solar/tests/unit/gapic/solar_v1/test_solar.py b/packages/google-maps-solar/tests/unit/gapic/solar_v1/test_solar.py index bb7a78ea0d72..9306930f7bd8 100644 --- a/packages/google-maps-solar/tests/unit/gapic/solar_v1/test_solar.py +++ b/packages/google-maps-solar/tests/unit/gapic/solar_v1/test_solar.py @@ -1236,22 +1236,23 @@ async def test_find_closest_building_insights_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.find_closest_building_insights - ] = mock_object + ] = mock_rpc request = {} await client.find_closest_building_insights(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.find_closest_building_insights(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1484,22 +1485,23 @@ async def test_get_data_layers_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_data_layers - ] = mock_object + ] = mock_rpc request = {} await client.get_data_layers(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_data_layers(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1720,22 +1722,23 @@ async def test_get_geo_tiff_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_geo_tiff - ] = mock_object + ] = mock_rpc request = {} await client.get_geo_tiff(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_geo_tiff(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-shopping-css/google/shopping/css/gapic_version.py b/packages/google-shopping-css/google/shopping/css/gapic_version.py index cf5493b86bbc..558c8aab67c5 100644 --- a/packages/google-shopping-css/google/shopping/css/gapic_version.py +++ b/packages/google-shopping-css/google/shopping/css/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.7" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-shopping-css/google/shopping/css_v1/gapic_version.py b/packages/google-shopping-css/google/shopping/css_v1/gapic_version.py index cf5493b86bbc..558c8aab67c5 100644 --- a/packages/google-shopping-css/google/shopping/css_v1/gapic_version.py +++ b/packages/google-shopping-css/google/shopping/css_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.7" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-shopping-css/google/shopping/css_v1/services/account_labels_service/async_client.py b/packages/google-shopping-css/google/shopping/css_v1/services/account_labels_service/async_client.py index df9ef4a6ffe3..66e098bc2490 100644 --- a/packages/google-shopping-css/google/shopping/css_v1/services/account_labels_service/async_client.py +++ b/packages/google-shopping-css/google/shopping/css_v1/services/account_labels_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -189,10 +188,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AccountLabelsServiceClient).get_transport_class, - type(AccountLabelsServiceClient), - ) + get_transport_class = AccountLabelsServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-shopping-css/google/shopping/css_v1/services/account_labels_service/client.py b/packages/google-shopping-css/google/shopping/css_v1/services/account_labels_service/client.py index 6de4e9b0f0fc..1c34fdb97744 100644 --- a/packages/google-shopping-css/google/shopping/css_v1/services/account_labels_service/client.py +++ b/packages/google-shopping-css/google/shopping/css_v1/services/account_labels_service/client.py @@ -660,7 +660,7 @@ def __init__( Type[AccountLabelsServiceTransport], Callable[..., AccountLabelsServiceTransport], ] = ( - type(self).get_transport_class(transport) + AccountLabelsServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AccountLabelsServiceTransport], transport) ) diff --git a/packages/google-shopping-css/google/shopping/css_v1/services/accounts_service/async_client.py b/packages/google-shopping-css/google/shopping/css_v1/services/accounts_service/async_client.py index 7559129d87d8..0cb6cae3efce 100644 --- a/packages/google-shopping-css/google/shopping/css_v1/services/accounts_service/async_client.py +++ b/packages/google-shopping-css/google/shopping/css_v1/services/accounts_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -187,9 +186,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AccountsServiceClient).get_transport_class, type(AccountsServiceClient) - ) + get_transport_class = AccountsServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-shopping-css/google/shopping/css_v1/services/accounts_service/client.py b/packages/google-shopping-css/google/shopping/css_v1/services/accounts_service/client.py index 878b77d1ddbb..f27c35a320e1 100644 --- a/packages/google-shopping-css/google/shopping/css_v1/services/accounts_service/client.py +++ b/packages/google-shopping-css/google/shopping/css_v1/services/accounts_service/client.py @@ -655,7 +655,7 @@ def __init__( transport_init: Union[ Type[AccountsServiceTransport], Callable[..., AccountsServiceTransport] ] = ( - type(self).get_transport_class(transport) + AccountsServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AccountsServiceTransport], transport) ) diff --git a/packages/google-shopping-css/google/shopping/css_v1/services/css_product_inputs_service/async_client.py b/packages/google-shopping-css/google/shopping/css_v1/services/css_product_inputs_service/async_client.py index c6c030380600..8c7085c07388 100644 --- a/packages/google-shopping-css/google/shopping/css_v1/services/css_product_inputs_service/async_client.py +++ b/packages/google-shopping-css/google/shopping/css_v1/services/css_product_inputs_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -201,10 +200,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(CssProductInputsServiceClient).get_transport_class, - type(CssProductInputsServiceClient), - ) + get_transport_class = CssProductInputsServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-shopping-css/google/shopping/css_v1/services/css_product_inputs_service/client.py b/packages/google-shopping-css/google/shopping/css_v1/services/css_product_inputs_service/client.py index 359354e9dec1..096667a1876c 100644 --- a/packages/google-shopping-css/google/shopping/css_v1/services/css_product_inputs_service/client.py +++ b/packages/google-shopping-css/google/shopping/css_v1/services/css_product_inputs_service/client.py @@ -671,7 +671,7 @@ def __init__( Type[CssProductInputsServiceTransport], Callable[..., CssProductInputsServiceTransport], ] = ( - type(self).get_transport_class(transport) + CssProductInputsServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., CssProductInputsServiceTransport], transport) ) diff --git a/packages/google-shopping-css/google/shopping/css_v1/services/css_products_service/async_client.py b/packages/google-shopping-css/google/shopping/css_v1/services/css_products_service/async_client.py index 875771e984d8..59fa5016dcc9 100644 --- a/packages/google-shopping-css/google/shopping/css_v1/services/css_products_service/async_client.py +++ b/packages/google-shopping-css/google/shopping/css_v1/services/css_products_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -193,10 +192,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(CssProductsServiceClient).get_transport_class, - type(CssProductsServiceClient), - ) + get_transport_class = CssProductsServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-shopping-css/google/shopping/css_v1/services/css_products_service/client.py b/packages/google-shopping-css/google/shopping/css_v1/services/css_products_service/client.py index b109e141e6f6..230f2b014b3c 100644 --- a/packages/google-shopping-css/google/shopping/css_v1/services/css_products_service/client.py +++ b/packages/google-shopping-css/google/shopping/css_v1/services/css_products_service/client.py @@ -666,7 +666,7 @@ def __init__( Type[CssProductsServiceTransport], Callable[..., CssProductsServiceTransport], ] = ( - type(self).get_transport_class(transport) + CssProductsServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., CssProductsServiceTransport], transport) ) diff --git a/packages/google-shopping-css/samples/generated_samples/snippet_metadata_google.shopping.css.v1.json b/packages/google-shopping-css/samples/generated_samples/snippet_metadata_google.shopping.css.v1.json index 772294b0a8fd..5b682921bee5 100644 --- a/packages/google-shopping-css/samples/generated_samples/snippet_metadata_google.shopping.css.v1.json +++ b/packages/google-shopping-css/samples/generated_samples/snippet_metadata_google.shopping.css.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-shopping-css", - "version": "0.1.7" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-shopping-css/setup.py b/packages/google-shopping-css/setup.py index 0b5b86f61cb6..86cceb75fb83 100644 --- a/packages/google-shopping-css/setup.py +++ b/packages/google-shopping-css/setup.py @@ -45,7 +45,7 @@ "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", "proto-plus >= 1.22.3, <2.0.0dev", "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", - "google-shopping-type >= 0.1.0, <1.0.0dev", + "google-shopping-type >= 0.1.6, <1.0.0dev", ] url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-css" diff --git a/packages/google-shopping-css/testing/constraints-3.7.txt b/packages/google-shopping-css/testing/constraints-3.7.txt index 83bec2765bf7..130a0c0f80ab 100644 --- a/packages/google-shopping-css/testing/constraints-3.7.txt +++ b/packages/google-shopping-css/testing/constraints-3.7.txt @@ -8,4 +8,4 @@ google-api-core==1.34.1 google-auth==2.14.1 proto-plus==1.22.3 protobuf==3.20.2 -google-shopping-type==0.1.0 +google-shopping-type==0.1.6 diff --git a/packages/google-shopping-css/tests/unit/gapic/css_v1/test_account_labels_service.py b/packages/google-shopping-css/tests/unit/gapic/css_v1/test_account_labels_service.py index 09f050b8a17a..8e827d965701 100644 --- a/packages/google-shopping-css/tests/unit/gapic/css_v1/test_account_labels_service.py +++ b/packages/google-shopping-css/tests/unit/gapic/css_v1/test_account_labels_service.py @@ -1363,22 +1363,23 @@ async def test_list_account_labels_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_account_labels - ] = mock_object + ] = mock_rpc request = {} await client.list_account_labels(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_account_labels(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1967,22 +1968,23 @@ async def test_create_account_label_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_account_label - ] = mock_object + ] = mock_rpc request = {} await client.create_account_label(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_account_label(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2385,22 +2387,23 @@ async def test_update_account_label_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_account_label - ] = mock_object + ] = mock_rpc request = {} await client.update_account_label(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_account_label(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2775,22 +2778,23 @@ async def test_delete_account_label_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_account_label - ] = mock_object + ] = mock_rpc request = {} await client.delete_account_label(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_account_label(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-shopping-css/tests/unit/gapic/css_v1/test_accounts_service.py b/packages/google-shopping-css/tests/unit/gapic/css_v1/test_accounts_service.py index 02dfb8ccf6e1..b5e69b9df20c 100644 --- a/packages/google-shopping-css/tests/unit/gapic/css_v1/test_accounts_service.py +++ b/packages/google-shopping-css/tests/unit/gapic/css_v1/test_accounts_service.py @@ -1321,22 +1321,23 @@ async def test_list_child_accounts_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_child_accounts - ] = mock_object + ] = mock_rpc request = {} await client.list_child_accounts(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_child_accounts(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1920,22 +1921,23 @@ async def test_get_account_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_account - ] = mock_object + ] = mock_rpc request = {} await client.get_account(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_account(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2317,22 +2319,23 @@ async def test_update_labels_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_labels - ] = mock_object + ] = mock_rpc request = {} await client.update_labels(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_labels(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-shopping-css/tests/unit/gapic/css_v1/test_css_product_inputs_service.py b/packages/google-shopping-css/tests/unit/gapic/css_v1/test_css_product_inputs_service.py index 98cc2589a2be..44cee9034e03 100644 --- a/packages/google-shopping-css/tests/unit/gapic/css_v1/test_css_product_inputs_service.py +++ b/packages/google-shopping-css/tests/unit/gapic/css_v1/test_css_product_inputs_service.py @@ -1379,22 +1379,23 @@ async def test_insert_css_product_input_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.insert_css_product_input - ] = mock_object + ] = mock_rpc request = {} await client.insert_css_product_input(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.insert_css_product_input(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1682,22 +1683,23 @@ async def test_delete_css_product_input_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_css_product_input - ] = mock_object + ] = mock_rpc request = {} await client.delete_css_product_input(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_css_product_input(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-shopping-css/tests/unit/gapic/css_v1/test_css_products_service.py b/packages/google-shopping-css/tests/unit/gapic/css_v1/test_css_products_service.py index 424c55241ea6..093bbfb35ea4 100644 --- a/packages/google-shopping-css/tests/unit/gapic/css_v1/test_css_products_service.py +++ b/packages/google-shopping-css/tests/unit/gapic/css_v1/test_css_products_service.py @@ -1333,22 +1333,23 @@ async def test_get_css_product_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_css_product - ] = mock_object + ] = mock_rpc request = {} await client.get_css_product(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_css_product(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1715,22 +1716,23 @@ async def test_list_css_products_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_css_products - ] = mock_object + ] = mock_rpc request = {} await client.list_css_products(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_css_products(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts/gapic_version.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts/gapic_version.py index 114e40645800..558c8aab67c5 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts/gapic_version.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.3" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/gapic_version.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/gapic_version.py index 114e40645800..558c8aab67c5 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/gapic_version.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.3" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/account_issue_service/async_client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/account_issue_service/async_client.py index 0939aff44d3a..1cbc438a3fa5 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/account_issue_service/async_client.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/account_issue_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -193,10 +192,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AccountIssueServiceClient).get_transport_class, - type(AccountIssueServiceClient), - ) + get_transport_class = AccountIssueServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/account_issue_service/client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/account_issue_service/client.py index 78dd29de2855..4a4615de60e5 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/account_issue_service/client.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/account_issue_service/client.py @@ -677,7 +677,7 @@ def __init__( Type[AccountIssueServiceTransport], Callable[..., AccountIssueServiceTransport], ] = ( - type(self).get_transport_class(transport) + AccountIssueServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AccountIssueServiceTransport], transport) ) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/account_tax_service/async_client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/account_tax_service/async_client.py index 890dc5a6b7bb..136885e029cb 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/account_tax_service/async_client.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/account_tax_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -200,9 +199,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AccountTaxServiceClient).get_transport_class, type(AccountTaxServiceClient) - ) + get_transport_class = AccountTaxServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/account_tax_service/client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/account_tax_service/client.py index 2edc548b9501..b8f5ad2116ad 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/account_tax_service/client.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/account_tax_service/client.py @@ -671,7 +671,7 @@ def __init__( Type[AccountTaxServiceTransport], Callable[..., AccountTaxServiceTransport], ] = ( - type(self).get_transport_class(transport) + AccountTaxServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AccountTaxServiceTransport], transport) ) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/accounts_service/async_client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/accounts_service/async_client.py index 139c4409fcec..2b9a05d02cc8 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/accounts_service/async_client.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/accounts_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -196,9 +195,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AccountsServiceClient).get_transport_class, type(AccountsServiceClient) - ) + get_transport_class = AccountsServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/accounts_service/client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/accounts_service/client.py index 6ad5c1d20479..b5bb3423a3df 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/accounts_service/client.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/accounts_service/client.py @@ -690,7 +690,7 @@ def __init__( transport_init: Union[ Type[AccountsServiceTransport], Callable[..., AccountsServiceTransport] ] = ( - type(self).get_transport_class(transport) + AccountsServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AccountsServiceTransport], transport) ) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/business_identity_service/async_client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/business_identity_service/async_client.py index 686902735e1a..0961ffbb5ccc 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/business_identity_service/async_client.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/business_identity_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -201,10 +200,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(BusinessIdentityServiceClient).get_transport_class, - type(BusinessIdentityServiceClient), - ) + get_transport_class = BusinessIdentityServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/business_identity_service/client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/business_identity_service/client.py index ec0111bfaf1e..3af6dc5b2618 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/business_identity_service/client.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/business_identity_service/client.py @@ -666,7 +666,7 @@ def __init__( Type[BusinessIdentityServiceTransport], Callable[..., BusinessIdentityServiceTransport], ] = ( - type(self).get_transport_class(transport) + BusinessIdentityServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., BusinessIdentityServiceTransport], transport) ) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/business_info_service/async_client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/business_info_service/async_client.py index 1616d19d661e..470d273018fa 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/business_info_service/async_client.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/business_info_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -196,10 +195,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(BusinessInfoServiceClient).get_transport_class, - type(BusinessInfoServiceClient), - ) + get_transport_class = BusinessInfoServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/business_info_service/client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/business_info_service/client.py index e298316f4707..2c610030a784 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/business_info_service/client.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/business_info_service/client.py @@ -665,7 +665,7 @@ def __init__( Type[BusinessInfoServiceTransport], Callable[..., BusinessInfoServiceTransport], ] = ( - type(self).get_transport_class(transport) + BusinessInfoServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., BusinessInfoServiceTransport], transport) ) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/email_preferences_service/async_client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/email_preferences_service/async_client.py index 03ef568cbddb..e01a3b2bcc9a 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/email_preferences_service/async_client.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/email_preferences_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -202,10 +201,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(EmailPreferencesServiceClient).get_transport_class, - type(EmailPreferencesServiceClient), - ) + get_transport_class = EmailPreferencesServiceClient.get_transport_class def __init__( self, @@ -291,7 +287,7 @@ async def get_email_preferences( r"""Returns the email preferences for a Merchant Center account user. - Use the `name=accounts/*/users/me/emailPreferences` alias to get + Use the name=accounts/*/users/me/emailPreferences alias to get preferences for the authenticated user. .. code-block:: python @@ -415,7 +411,7 @@ async def update_email_preferences( It is invalid for updates to specify an UNCONFIRMED opt-in status value. - Use the `name=accounts/*/users/me/emailPreferences` alias to + Use the name=accounts/*/users/me/emailPreferences alias to update preferences for the authenticated user. .. code-block:: python diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/email_preferences_service/client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/email_preferences_service/client.py index 5cfba0208653..e2d8285b4900 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/email_preferences_service/client.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/email_preferences_service/client.py @@ -671,7 +671,7 @@ def __init__( Type[EmailPreferencesServiceTransport], Callable[..., EmailPreferencesServiceTransport], ] = ( - type(self).get_transport_class(transport) + EmailPreferencesServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., EmailPreferencesServiceTransport], transport) ) @@ -702,7 +702,7 @@ def get_email_preferences( r"""Returns the email preferences for a Merchant Center account user. - Use the `name=accounts/*/users/me/emailPreferences` alias to get + Use the name=accounts/*/users/me/emailPreferences alias to get preferences for the authenticated user. .. code-block:: python @@ -823,7 +823,7 @@ def update_email_preferences( It is invalid for updates to specify an UNCONFIRMED opt-in status value. - Use the `name=accounts/*/users/me/emailPreferences` alias to + Use the name=accounts/*/users/me/emailPreferences alias to update preferences for the authenticated user. .. code-block:: python diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/email_preferences_service/transports/grpc.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/email_preferences_service/transports/grpc.py index 79b98addeaa5..568ca9dd5a6e 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/email_preferences_service/transports/grpc.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/email_preferences_service/transports/grpc.py @@ -248,7 +248,7 @@ def get_email_preferences( Returns the email preferences for a Merchant Center account user. - Use the `name=accounts/*/users/me/emailPreferences` alias to get + Use the name=accounts/*/users/me/emailPreferences alias to get preferences for the authenticated user. Returns: @@ -288,7 +288,7 @@ def update_email_preferences( It is invalid for updates to specify an UNCONFIRMED opt-in status value. - Use the `name=accounts/*/users/me/emailPreferences` alias to + Use the name=accounts/*/users/me/emailPreferences alias to update preferences for the authenticated user. Returns: diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/email_preferences_service/transports/grpc_asyncio.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/email_preferences_service/transports/grpc_asyncio.py index 5d27647ec04b..afd5cc75f670 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/email_preferences_service/transports/grpc_asyncio.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/email_preferences_service/transports/grpc_asyncio.py @@ -253,7 +253,7 @@ def get_email_preferences( Returns the email preferences for a Merchant Center account user. - Use the `name=accounts/*/users/me/emailPreferences` alias to get + Use the name=accounts/*/users/me/emailPreferences alias to get preferences for the authenticated user. Returns: @@ -293,7 +293,7 @@ def update_email_preferences( It is invalid for updates to specify an UNCONFIRMED opt-in status value. - Use the `name=accounts/*/users/me/emailPreferences` alias to + Use the name=accounts/*/users/me/emailPreferences alias to update preferences for the authenticated user. Returns: diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/homepage_service/async_client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/homepage_service/async_client.py index 433149cf930f..54e31ed88a03 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/homepage_service/async_client.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/homepage_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -189,9 +188,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(HomepageServiceClient).get_transport_class, type(HomepageServiceClient) - ) + get_transport_class = HomepageServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/homepage_service/client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/homepage_service/client.py index 4af54a563701..54c748bdc7b9 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/homepage_service/client.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/homepage_service/client.py @@ -657,7 +657,7 @@ def __init__( transport_init: Union[ Type[HomepageServiceTransport], Callable[..., HomepageServiceTransport] ] = ( - type(self).get_transport_class(transport) + HomepageServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., HomepageServiceTransport], transport) ) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/online_return_policy_service/async_client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/online_return_policy_service/async_client.py index 97092d22e973..14f3f87a6b36 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/online_return_policy_service/async_client.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/online_return_policy_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -59,7 +58,9 @@ class OnlineReturnPolicyServiceAsyncClient: ads and free listings programs. This API defines the following resource model: - - [OnlineReturnPolicy][google.shopping.merchant.accounts.v1.OnlineReturnPolicy] + -------------------------------------------------------- + + [OnlineReturnPolicy][google.shopping.merchant.accounts.v1.OnlineReturnPolicy] """ _client: OnlineReturnPolicyServiceClient @@ -207,10 +208,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(OnlineReturnPolicyServiceClient).get_transport_class, - type(OnlineReturnPolicyServiceClient), - ) + get_transport_class = OnlineReturnPolicyServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/online_return_policy_service/client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/online_return_policy_service/client.py index 20ceda8e8083..5159022c0212 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/online_return_policy_service/client.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/online_return_policy_service/client.py @@ -102,7 +102,9 @@ class OnlineReturnPolicyServiceClient(metaclass=OnlineReturnPolicyServiceClientM ads and free listings programs. This API defines the following resource model: - - [OnlineReturnPolicy][google.shopping.merchant.accounts.v1.OnlineReturnPolicy] + -------------------------------------------------------- + + [OnlineReturnPolicy][google.shopping.merchant.accounts.v1.OnlineReturnPolicy] """ @staticmethod @@ -675,7 +677,7 @@ def __init__( Type[OnlineReturnPolicyServiceTransport], Callable[..., OnlineReturnPolicyServiceTransport], ] = ( - type(self).get_transport_class(transport) + OnlineReturnPolicyServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., OnlineReturnPolicyServiceTransport], transport) ) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/online_return_policy_service/transports/grpc.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/online_return_policy_service/transports/grpc.py index a8d881201cc9..dea6fb91c35a 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/online_return_policy_service/transports/grpc.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/online_return_policy_service/transports/grpc.py @@ -35,7 +35,9 @@ class OnlineReturnPolicyServiceGrpcTransport(OnlineReturnPolicyServiceTransport) ads and free listings programs. This API defines the following resource model: - - [OnlineReturnPolicy][google.shopping.merchant.accounts.v1.OnlineReturnPolicy] + -------------------------------------------------------- + + [OnlineReturnPolicy][google.shopping.merchant.accounts.v1.OnlineReturnPolicy] This class defines the same methods as the primary client, so the primary client can load the underlying transport implementation diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/online_return_policy_service/transports/grpc_asyncio.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/online_return_policy_service/transports/grpc_asyncio.py index 9002283d8caf..043a6612628a 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/online_return_policy_service/transports/grpc_asyncio.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/online_return_policy_service/transports/grpc_asyncio.py @@ -38,7 +38,9 @@ class OnlineReturnPolicyServiceGrpcAsyncIOTransport(OnlineReturnPolicyServiceTra ads and free listings programs. This API defines the following resource model: - - [OnlineReturnPolicy][google.shopping.merchant.accounts.v1.OnlineReturnPolicy] + -------------------------------------------------------- + + [OnlineReturnPolicy][google.shopping.merchant.accounts.v1.OnlineReturnPolicy] This class defines the same methods as the primary client, so the primary client can load the underlying transport implementation diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/online_return_policy_service/transports/rest.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/online_return_policy_service/transports/rest.py index 220765f2ddd8..662b5c93e797 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/online_return_policy_service/transports/rest.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/online_return_policy_service/transports/rest.py @@ -151,7 +151,9 @@ class OnlineReturnPolicyServiceRestTransport(OnlineReturnPolicyServiceTransport) ads and free listings programs. This API defines the following resource model: - - [OnlineReturnPolicy][google.shopping.merchant.accounts.v1.OnlineReturnPolicy] + -------------------------------------------------------- + + [OnlineReturnPolicy][google.shopping.merchant.accounts.v1.OnlineReturnPolicy] This class defines the same methods as the primary client, so the primary client can load the underlying transport implementation diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/programs_service/async_client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/programs_service/async_client.py index 76a5c73f80d7..91f454d23bcc 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/programs_service/async_client.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/programs_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -198,9 +197,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ProgramsServiceClient).get_transport_class, type(ProgramsServiceClient) - ) + get_transport_class = ProgramsServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/programs_service/client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/programs_service/client.py index a7333db1b864..acc45a0dc1ab 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/programs_service/client.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/programs_service/client.py @@ -668,7 +668,7 @@ def __init__( transport_init: Union[ Type[ProgramsServiceTransport], Callable[..., ProgramsServiceTransport] ] = ( - type(self).get_transport_class(transport) + ProgramsServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ProgramsServiceTransport], transport) ) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/regions_service/async_client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/regions_service/async_client.py index bac7fafcf75e..2ab9b5c1a99b 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/regions_service/async_client.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/regions_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -197,9 +196,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(RegionsServiceClient).get_transport_class, type(RegionsServiceClient) - ) + get_transport_class = RegionsServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/regions_service/client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/regions_service/client.py index 5c08eaeaadf5..a5820b8ae5e5 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/regions_service/client.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/regions_service/client.py @@ -678,7 +678,7 @@ def __init__( transport_init: Union[ Type[RegionsServiceTransport], Callable[..., RegionsServiceTransport] ] = ( - type(self).get_transport_class(transport) + RegionsServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RegionsServiceTransport], transport) ) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/shipping_settings_service/async_client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/shipping_settings_service/async_client.py index a12e387affcd..aab6ce594de6 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/shipping_settings_service/async_client.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/shipping_settings_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -198,10 +197,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ShippingSettingsServiceClient).get_transport_class, - type(ShippingSettingsServiceClient), - ) + get_transport_class = ShippingSettingsServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/shipping_settings_service/client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/shipping_settings_service/client.py index 1252e98263f2..a54e3edc102b 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/shipping_settings_service/client.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/shipping_settings_service/client.py @@ -663,7 +663,7 @@ def __init__( Type[ShippingSettingsServiceTransport], Callable[..., ShippingSettingsServiceTransport], ] = ( - type(self).get_transport_class(transport) + ShippingSettingsServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ShippingSettingsServiceTransport], transport) ) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_agreement_state_service/async_client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_agreement_state_service/async_client.py index a1383137817a..c32d92d51405 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_agreement_state_service/async_client.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_agreement_state_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -218,10 +217,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(TermsOfServiceAgreementStateServiceClient).get_transport_class, - type(TermsOfServiceAgreementStateServiceClient), - ) + get_transport_class = TermsOfServiceAgreementStateServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_agreement_state_service/client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_agreement_state_service/client.py index 8be4e9679afe..98cffc24a88e 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_agreement_state_service/client.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_agreement_state_service/client.py @@ -716,7 +716,7 @@ def __init__( Type[TermsOfServiceAgreementStateServiceTransport], Callable[..., TermsOfServiceAgreementStateServiceTransport], ] = ( - type(self).get_transport_class(transport) + TermsOfServiceAgreementStateServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast( Callable[..., TermsOfServiceAgreementStateServiceTransport], diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_service/async_client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_service/async_client.py index 723eb05fceaf..841d23664e98 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_service/async_client.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -197,10 +196,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(TermsOfServiceServiceClient).get_transport_class, - type(TermsOfServiceServiceClient), - ) + get_transport_class = TermsOfServiceServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_service/client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_service/client.py index 6c1b4c48e2ed..8b47907a1926 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_service/client.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_service/client.py @@ -677,7 +677,7 @@ def __init__( Type[TermsOfServiceServiceTransport], Callable[..., TermsOfServiceServiceTransport], ] = ( - type(self).get_transport_class(transport) + TermsOfServiceServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., TermsOfServiceServiceTransport], transport) ) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/user_service/async_client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/user_service/async_client.py index c7d4f5d32951..a984dec524c5 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/user_service/async_client.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/user_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -189,9 +188,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(UserServiceClient).get_transport_class, type(UserServiceClient) - ) + get_transport_class = UserServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/user_service/client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/user_service/client.py index f7dc5336a3e9..564e07f7a1ed 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/user_service/client.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/user_service/client.py @@ -669,7 +669,7 @@ def __init__( transport_init: Union[ Type[UserServiceTransport], Callable[..., UserServiceTransport] ] = ( - type(self).get_transport_class(transport) + UserServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., UserServiceTransport], transport) ) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/shippingsettings.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/shippingsettings.py index 93ccf59230a9..2afe56a87508 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/shippingsettings.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/shippingsettings.py @@ -65,8 +65,8 @@ class ShippingSettings(proto.Message): etag (str): Required. This field is used for avoid async issue. Make sure shipping setting data - didn't change between get call and insert call. - The user should do following steps: + didn't change between get call and insert call. + The user should do following steps: 1. Set etag field as empty string for initial shipping setting creation. diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/snippet_metadata_google.shopping.merchant.accounts.v1beta.json b/packages/google-shopping-merchant-accounts/samples/generated_samples/snippet_metadata_google.shopping.merchant.accounts.v1beta.json index 8b9696904fa4..a6cc2aef08d1 100644 --- a/packages/google-shopping-merchant-accounts/samples/generated_samples/snippet_metadata_google.shopping.merchant.accounts.v1beta.json +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/snippet_metadata_google.shopping.merchant.accounts.v1beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-shopping-merchant-accounts", - "version": "0.1.3" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_account_issue_service.py b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_account_issue_service.py index 5b590392e8a0..9cfff1670b0a 100644 --- a/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_account_issue_service.py +++ b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_account_issue_service.py @@ -1366,22 +1366,23 @@ async def test_list_account_issues_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_account_issues - ] = mock_object + ] = mock_rpc request = {} await client.list_account_issues(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_account_issues(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_account_tax_service.py b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_account_tax_service.py index d807c6b7ab68..1c24be2ac0a2 100644 --- a/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_account_tax_service.py +++ b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_account_tax_service.py @@ -1329,22 +1329,23 @@ async def test_get_account_tax_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_account_tax - ] = mock_object + ] = mock_rpc request = {} await client.get_account_tax(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_account_tax(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1699,22 +1700,23 @@ async def test_list_account_tax_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_account_tax - ] = mock_object + ] = mock_rpc request = {} await client.list_account_tax(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_account_tax(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2268,22 +2270,23 @@ async def test_update_account_tax_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_account_tax - ] = mock_object + ] = mock_rpc request = {} await client.update_account_tax(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_account_tax(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_accounts_service.py b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_accounts_service.py index 6b1b1dbdd35c..c5e99b1fa6eb 100644 --- a/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_accounts_service.py +++ b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_accounts_service.py @@ -1323,22 +1323,23 @@ async def test_get_account_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_account - ] = mock_object + ] = mock_rpc request = {} await client.get_account(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_account(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1717,22 +1718,23 @@ async def test_create_and_configure_account_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_and_configure_account - ] = mock_object + ] = mock_rpc request = {} await client.create_and_configure_account(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_and_configure_account(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1944,22 +1946,23 @@ async def test_delete_account_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_account - ] = mock_object + ] = mock_rpc request = {} await client.delete_account(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_account(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2310,22 +2313,23 @@ async def test_update_account_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_account - ] = mock_object + ] = mock_rpc request = {} await client.update_account(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_account(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2692,22 +2696,23 @@ async def test_list_accounts_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_accounts - ] = mock_object + ] = mock_rpc request = {} await client.list_accounts(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_accounts(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3116,22 +3121,23 @@ async def test_list_sub_accounts_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_sub_accounts - ] = mock_object + ] = mock_rpc request = {} await client.list_sub_accounts(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_sub_accounts(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_business_identity_service.py b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_business_identity_service.py index 687615bc6f33..f4566fff4b42 100644 --- a/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_business_identity_service.py +++ b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_business_identity_service.py @@ -1372,22 +1372,23 @@ async def test_get_business_identity_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_business_identity - ] = mock_object + ] = mock_rpc request = {} await client.get_business_identity(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_business_identity(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1767,22 +1768,23 @@ async def test_update_business_identity_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_business_identity - ] = mock_object + ] = mock_rpc request = {} await client.update_business_identity(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_business_identity(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_business_info_service.py b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_business_info_service.py index 05b94b68298b..15afcf6bc479 100644 --- a/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_business_info_service.py +++ b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_business_info_service.py @@ -1371,22 +1371,23 @@ async def test_get_business_info_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_business_info - ] = mock_object + ] = mock_rpc request = {} await client.get_business_info(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_business_info(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1764,22 +1765,23 @@ async def test_update_business_info_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_business_info - ] = mock_object + ] = mock_rpc request = {} await client.update_business_info(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_business_info(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_email_preferences_service.py b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_email_preferences_service.py index 9e84ba633030..6ae10d659323 100644 --- a/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_email_preferences_service.py +++ b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_email_preferences_service.py @@ -1371,22 +1371,23 @@ async def test_get_email_preferences_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_email_preferences - ] = mock_object + ] = mock_rpc request = {} await client.get_email_preferences(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_email_preferences(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1764,22 +1765,23 @@ async def test_update_email_preferences_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_email_preferences - ] = mock_object + ] = mock_rpc request = {} await client.update_email_preferences(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_email_preferences(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_homepage_service.py b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_homepage_service.py index 53a6be724ce3..54fdd2b90714 100644 --- a/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_homepage_service.py +++ b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_homepage_service.py @@ -1312,22 +1312,23 @@ async def test_get_homepage_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_homepage - ] = mock_object + ] = mock_rpc request = {} await client.get_homepage(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_homepage(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1678,22 +1679,23 @@ async def test_update_homepage_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_homepage - ] = mock_object + ] = mock_rpc request = {} await client.update_homepage(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_homepage(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2062,22 +2064,23 @@ async def test_claim_homepage_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.claim_homepage - ] = mock_object + ] = mock_rpc request = {} await client.claim_homepage(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.claim_homepage(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2354,22 +2357,23 @@ async def test_unclaim_homepage_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.unclaim_homepage - ] = mock_object + ] = mock_rpc request = {} await client.unclaim_homepage(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.unclaim_homepage(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_online_return_policy_service.py b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_online_return_policy_service.py index f2e85e7dd396..fc5a00a8058e 100644 --- a/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_online_return_policy_service.py +++ b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_online_return_policy_service.py @@ -1415,22 +1415,23 @@ async def test_get_online_return_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_online_return_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_online_return_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_online_return_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1831,22 +1832,23 @@ async def test_list_online_return_policies_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_online_return_policies - ] = mock_object + ] = mock_rpc request = {} await client.list_online_return_policies(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_online_return_policies(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_programs_service.py b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_programs_service.py index 30d64aa1da16..f3d21efba569 100644 --- a/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_programs_service.py +++ b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_programs_service.py @@ -1314,22 +1314,23 @@ async def test_get_program_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_program - ] = mock_object + ] = mock_rpc request = {} await client.get_program(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_program(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1682,22 +1683,23 @@ async def test_list_programs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_programs - ] = mock_object + ] = mock_rpc request = {} await client.list_programs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_programs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2249,22 +2251,23 @@ async def test_enable_program_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.enable_program - ] = mock_object + ] = mock_rpc request = {} await client.enable_program(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.enable_program(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2624,22 +2627,23 @@ async def test_disable_program_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.disable_program - ] = mock_object + ] = mock_rpc request = {} await client.disable_program(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.disable_program(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_regions_service.py b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_regions_service.py index 30c6f0e9fe2e..4a4aa5aeb46a 100644 --- a/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_regions_service.py +++ b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_regions_service.py @@ -1284,22 +1284,23 @@ async def test_get_region_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_region - ] = mock_object + ] = mock_rpc request = {} await client.get_region(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_region(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1651,22 +1652,23 @@ async def test_create_region_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_region - ] = mock_object + ] = mock_rpc request = {} await client.create_region(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_region(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2032,22 +2034,23 @@ async def test_update_region_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_region - ] = mock_object + ] = mock_rpc request = {} await client.update_region(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_region(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2397,22 +2400,23 @@ async def test_delete_region_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_region - ] = mock_object + ] = mock_rpc request = {} await client.delete_region(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_region(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2754,22 +2758,23 @@ async def test_list_regions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_regions - ] = mock_object + ] = mock_rpc request = {} await client.list_regions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_regions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_shipping_settings_service.py b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_shipping_settings_service.py index 7230ad7f5104..e5c3a1557a94 100644 --- a/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_shipping_settings_service.py +++ b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_shipping_settings_service.py @@ -1369,22 +1369,23 @@ async def test_get_shipping_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_shipping_settings - ] = mock_object + ] = mock_rpc request = {} await client.get_shipping_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_shipping_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1762,22 +1763,23 @@ async def test_insert_shipping_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.insert_shipping_settings - ] = mock_object + ] = mock_rpc request = {} await client.insert_shipping_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.insert_shipping_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_terms_of_service_agreement_state_service.py b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_terms_of_service_agreement_state_service.py index 3ba5de93574a..eeed3bd18c59 100644 --- a/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_terms_of_service_agreement_state_service.py +++ b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_terms_of_service_agreement_state_service.py @@ -1426,22 +1426,23 @@ async def test_get_terms_of_service_agreement_state_async_use_cached_wrapped_rpc ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_terms_of_service_agreement_state - ] = mock_object + ] = mock_rpc request = {} await client.get_terms_of_service_agreement_state(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_terms_of_service_agreement_state(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1864,22 +1865,23 @@ async def test_retrieve_for_application_terms_of_service_agreement_state_async_u ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.retrieve_for_application_terms_of_service_agreement_state - ] = mock_object + ] = mock_rpc request = {} await client.retrieve_for_application_terms_of_service_agreement_state(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.retrieve_for_application_terms_of_service_agreement_state(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_terms_of_service_service.py b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_terms_of_service_service.py index 9a887afdf4ee..e50f9373a6c8 100644 --- a/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_terms_of_service_service.py +++ b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_terms_of_service_service.py @@ -1379,22 +1379,23 @@ async def test_get_terms_of_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_terms_of_service - ] = mock_object + ] = mock_rpc request = {} await client.get_terms_of_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_terms_of_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1787,22 +1788,23 @@ async def test_retrieve_latest_terms_of_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.retrieve_latest_terms_of_service - ] = mock_object + ] = mock_rpc request = {} await client.retrieve_latest_terms_of_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.retrieve_latest_terms_of_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2029,22 +2031,23 @@ async def test_accept_terms_of_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.accept_terms_of_service - ] = mock_object + ] = mock_rpc request = {} await client.accept_terms_of_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.accept_terms_of_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_user_service.py b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_user_service.py index 0d7995b13ac0..cf9e307285c0 100644 --- a/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_user_service.py +++ b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_user_service.py @@ -1251,22 +1251,23 @@ async def test_get_user_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_user - ] = mock_object + ] = mock_rpc request = {} await client.get_user(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_user(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1623,22 +1624,23 @@ async def test_create_user_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_user - ] = mock_object + ] = mock_rpc request = {} await client.create_user(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_user(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1990,22 +1992,23 @@ async def test_delete_user_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_user - ] = mock_object + ] = mock_rpc request = {} await client.delete_user(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_user(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2347,22 +2350,23 @@ async def test_update_user_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_user - ] = mock_object + ] = mock_rpc request = {} await client.update_user(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_user(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2721,22 +2725,23 @@ async def test_list_users_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_users - ] = mock_object + ] = mock_rpc request = {} await client.list_users(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_users(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-shopping-merchant-conversions/google/shopping/merchant_conversions/gapic_version.py b/packages/google-shopping-merchant-conversions/google/shopping/merchant_conversions/gapic_version.py index 114e40645800..558c8aab67c5 100644 --- a/packages/google-shopping-merchant-conversions/google/shopping/merchant_conversions/gapic_version.py +++ b/packages/google-shopping-merchant-conversions/google/shopping/merchant_conversions/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.3" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-shopping-merchant-conversions/google/shopping/merchant_conversions_v1beta/gapic_version.py b/packages/google-shopping-merchant-conversions/google/shopping/merchant_conversions_v1beta/gapic_version.py index 114e40645800..558c8aab67c5 100644 --- a/packages/google-shopping-merchant-conversions/google/shopping/merchant_conversions_v1beta/gapic_version.py +++ b/packages/google-shopping-merchant-conversions/google/shopping/merchant_conversions_v1beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.3" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-shopping-merchant-conversions/google/shopping/merchant_conversions_v1beta/services/conversion_sources_service/async_client.py b/packages/google-shopping-merchant-conversions/google/shopping/merchant_conversions_v1beta/services/conversion_sources_service/async_client.py index 4611e4ddc837..d3c1bcfc9094 100644 --- a/packages/google-shopping-merchant-conversions/google/shopping/merchant_conversions_v1beta/services/conversion_sources_service/async_client.py +++ b/packages/google-shopping-merchant-conversions/google/shopping/merchant_conversions_v1beta/services/conversion_sources_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -204,10 +203,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ConversionSourcesServiceClient).get_transport_class, - type(ConversionSourcesServiceClient), - ) + get_transport_class = ConversionSourcesServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-shopping-merchant-conversions/google/shopping/merchant_conversions_v1beta/services/conversion_sources_service/client.py b/packages/google-shopping-merchant-conversions/google/shopping/merchant_conversions_v1beta/services/conversion_sources_service/client.py index c0f6fd8b5c50..533e9f233698 100644 --- a/packages/google-shopping-merchant-conversions/google/shopping/merchant_conversions_v1beta/services/conversion_sources_service/client.py +++ b/packages/google-shopping-merchant-conversions/google/shopping/merchant_conversions_v1beta/services/conversion_sources_service/client.py @@ -674,7 +674,7 @@ def __init__( Type[ConversionSourcesServiceTransport], Callable[..., ConversionSourcesServiceTransport], ] = ( - type(self).get_transport_class(transport) + ConversionSourcesServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ConversionSourcesServiceTransport], transport) ) diff --git a/packages/google-shopping-merchant-conversions/samples/generated_samples/snippet_metadata_google.shopping.merchant.conversions.v1beta.json b/packages/google-shopping-merchant-conversions/samples/generated_samples/snippet_metadata_google.shopping.merchant.conversions.v1beta.json index 03dbba76cda4..03a202362a18 100644 --- a/packages/google-shopping-merchant-conversions/samples/generated_samples/snippet_metadata_google.shopping.merchant.conversions.v1beta.json +++ b/packages/google-shopping-merchant-conversions/samples/generated_samples/snippet_metadata_google.shopping.merchant.conversions.v1beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-shopping-merchant-conversions", - "version": "0.1.3" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-shopping-merchant-conversions/tests/unit/gapic/merchant_conversions_v1beta/test_conversion_sources_service.py b/packages/google-shopping-merchant-conversions/tests/unit/gapic/merchant_conversions_v1beta/test_conversion_sources_service.py index c2cf3f5fbf5b..8165aed2dcc2 100644 --- a/packages/google-shopping-merchant-conversions/tests/unit/gapic/merchant_conversions_v1beta/test_conversion_sources_service.py +++ b/packages/google-shopping-merchant-conversions/tests/unit/gapic/merchant_conversions_v1beta/test_conversion_sources_service.py @@ -1380,22 +1380,23 @@ async def test_create_conversion_source_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_conversion_source - ] = mock_object + ] = mock_rpc request = {} await client.create_conversion_source(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_conversion_source(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1808,22 +1809,23 @@ async def test_update_conversion_source_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_conversion_source - ] = mock_object + ] = mock_rpc request = {} await client.update_conversion_source(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_conversion_source(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2227,22 +2229,23 @@ async def test_delete_conversion_source_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_conversion_source - ] = mock_object + ] = mock_rpc request = {} await client.delete_conversion_source(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_conversion_source(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2612,22 +2615,23 @@ async def test_undelete_conversion_source_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.undelete_conversion_source - ] = mock_object + ] = mock_rpc request = {} await client.undelete_conversion_source(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.undelete_conversion_source(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2924,22 +2928,23 @@ async def test_get_conversion_source_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_conversion_source - ] = mock_object + ] = mock_rpc request = {} await client.get_conversion_source(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_conversion_source(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3318,22 +3323,23 @@ async def test_list_conversion_sources_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_conversion_sources - ] = mock_object + ] = mock_rpc request = {} await client.list_conversion_sources(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_conversion_sources(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources/gapic_version.py b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources/gapic_version.py index 3b0a9d9a8d43..558c8aab67c5 100644 --- a/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources/gapic_version.py +++ b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/gapic_version.py b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/gapic_version.py index 3b0a9d9a8d43..558c8aab67c5 100644 --- a/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/gapic_version.py +++ b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/data_sources_service/async_client.py b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/data_sources_service/async_client.py index ec266d5aeb96..89103d087586 100644 --- a/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/data_sources_service/async_client.py +++ b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/data_sources_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -201,10 +200,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DataSourcesServiceClient).get_transport_class, - type(DataSourcesServiceClient), - ) + get_transport_class = DataSourcesServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/data_sources_service/client.py b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/data_sources_service/client.py index ea4dfc6fc59e..2870cb3ee221 100644 --- a/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/data_sources_service/client.py +++ b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/data_sources_service/client.py @@ -674,7 +674,7 @@ def __init__( Type[DataSourcesServiceTransport], Callable[..., DataSourcesServiceTransport], ] = ( - type(self).get_transport_class(transport) + DataSourcesServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DataSourcesServiceTransport], transport) ) diff --git a/packages/google-shopping-merchant-datasources/samples/generated_samples/snippet_metadata_google.shopping.merchant.datasources.v1beta.json b/packages/google-shopping-merchant-datasources/samples/generated_samples/snippet_metadata_google.shopping.merchant.datasources.v1beta.json index 771fe1499dd3..ee381d03839d 100644 --- a/packages/google-shopping-merchant-datasources/samples/generated_samples/snippet_metadata_google.shopping.merchant.datasources.v1beta.json +++ b/packages/google-shopping-merchant-datasources/samples/generated_samples/snippet_metadata_google.shopping.merchant.datasources.v1beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-shopping-merchant-datasources", - "version": "0.1.2" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-shopping-merchant-datasources/tests/unit/gapic/merchant_datasources_v1beta/test_data_sources_service.py b/packages/google-shopping-merchant-datasources/tests/unit/gapic/merchant_datasources_v1beta/test_data_sources_service.py index d941379d2e80..9bd09642ee77 100644 --- a/packages/google-shopping-merchant-datasources/tests/unit/gapic/merchant_datasources_v1beta/test_data_sources_service.py +++ b/packages/google-shopping-merchant-datasources/tests/unit/gapic/merchant_datasources_v1beta/test_data_sources_service.py @@ -1339,22 +1339,23 @@ async def test_get_data_source_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_data_source - ] = mock_object + ] = mock_rpc request = {} await client.get_data_source(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_data_source(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1721,22 +1722,23 @@ async def test_list_data_sources_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_data_sources - ] = mock_object + ] = mock_rpc request = {} await client.list_data_sources(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_data_sources(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2318,22 +2320,23 @@ async def test_create_data_source_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_data_source - ] = mock_object + ] = mock_rpc request = {} await client.create_data_source(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_data_source(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2749,22 +2752,23 @@ async def test_update_data_source_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_data_source - ] = mock_object + ] = mock_rpc request = {} await client.update_data_source(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_data_source(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3168,22 +3172,23 @@ async def test_delete_data_source_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_data_source - ] = mock_object + ] = mock_rpc request = {} await client.delete_data_source(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_data_source(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3536,22 +3541,23 @@ async def test_fetch_data_source_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.fetch_data_source - ] = mock_object + ] = mock_rpc request = {} await client.fetch_data_source(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.fetch_data_source(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories/gapic_version.py b/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories/gapic_version.py index f8ea948a9c30..558c8aab67c5 100644 --- a/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories/gapic_version.py +++ b/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.9" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1beta/gapic_version.py b/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1beta/gapic_version.py index f8ea948a9c30..558c8aab67c5 100644 --- a/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1beta/gapic_version.py +++ b/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.9" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1beta/services/local_inventory_service/async_client.py b/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1beta/services/local_inventory_service/async_client.py index 4240ede43db6..5aee1badcfa4 100644 --- a/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1beta/services/local_inventory_service/async_client.py +++ b/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1beta/services/local_inventory_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -198,10 +197,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(LocalInventoryServiceClient).get_transport_class, - type(LocalInventoryServiceClient), - ) + get_transport_class = LocalInventoryServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1beta/services/local_inventory_service/client.py b/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1beta/services/local_inventory_service/client.py index 1862f5b1f0d3..c0cb9c434748 100644 --- a/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1beta/services/local_inventory_service/client.py +++ b/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1beta/services/local_inventory_service/client.py @@ -672,7 +672,7 @@ def __init__( Type[LocalInventoryServiceTransport], Callable[..., LocalInventoryServiceTransport], ] = ( - type(self).get_transport_class(transport) + LocalInventoryServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., LocalInventoryServiceTransport], transport) ) diff --git a/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1beta/services/regional_inventory_service/async_client.py b/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1beta/services/regional_inventory_service/async_client.py index 387b3d7e9098..bd282b0768bb 100644 --- a/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1beta/services/regional_inventory_service/async_client.py +++ b/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1beta/services/regional_inventory_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -204,10 +203,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(RegionalInventoryServiceClient).get_transport_class, - type(RegionalInventoryServiceClient), - ) + get_transport_class = RegionalInventoryServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1beta/services/regional_inventory_service/client.py b/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1beta/services/regional_inventory_service/client.py index eef2e9ce9635..78f77bc4778c 100644 --- a/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1beta/services/regional_inventory_service/client.py +++ b/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1beta/services/regional_inventory_service/client.py @@ -678,7 +678,7 @@ def __init__( Type[RegionalInventoryServiceTransport], Callable[..., RegionalInventoryServiceTransport], ] = ( - type(self).get_transport_class(transport) + RegionalInventoryServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RegionalInventoryServiceTransport], transport) ) diff --git a/packages/google-shopping-merchant-inventories/samples/generated_samples/snippet_metadata_google.shopping.merchant.inventories.v1beta.json b/packages/google-shopping-merchant-inventories/samples/generated_samples/snippet_metadata_google.shopping.merchant.inventories.v1beta.json index 1faa42e79902..69c4a31abe8f 100644 --- a/packages/google-shopping-merchant-inventories/samples/generated_samples/snippet_metadata_google.shopping.merchant.inventories.v1beta.json +++ b/packages/google-shopping-merchant-inventories/samples/generated_samples/snippet_metadata_google.shopping.merchant.inventories.v1beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-shopping-merchant-inventories", - "version": "0.1.9" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-shopping-merchant-inventories/setup.py b/packages/google-shopping-merchant-inventories/setup.py index 567549a13266..c01e43dcf9c4 100644 --- a/packages/google-shopping-merchant-inventories/setup.py +++ b/packages/google-shopping-merchant-inventories/setup.py @@ -47,7 +47,7 @@ "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", "proto-plus >= 1.22.3, <2.0.0dev", "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", - "google-shopping-type >= 0.1.0, <1.0.0dev", + "google-shopping-type >= 0.1.6, <1.0.0dev", ] url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-inventories" diff --git a/packages/google-shopping-merchant-inventories/testing/constraints-3.7.txt b/packages/google-shopping-merchant-inventories/testing/constraints-3.7.txt index 83bec2765bf7..130a0c0f80ab 100644 --- a/packages/google-shopping-merchant-inventories/testing/constraints-3.7.txt +++ b/packages/google-shopping-merchant-inventories/testing/constraints-3.7.txt @@ -8,4 +8,4 @@ google-api-core==1.34.1 google-auth==2.14.1 proto-plus==1.22.3 protobuf==3.20.2 -google-shopping-type==0.1.0 +google-shopping-type==0.1.6 diff --git a/packages/google-shopping-merchant-inventories/tests/unit/gapic/merchant_inventories_v1beta/test_local_inventory_service.py b/packages/google-shopping-merchant-inventories/tests/unit/gapic/merchant_inventories_v1beta/test_local_inventory_service.py index e981e1f5e15a..d16aebb40700 100644 --- a/packages/google-shopping-merchant-inventories/tests/unit/gapic/merchant_inventories_v1beta/test_local_inventory_service.py +++ b/packages/google-shopping-merchant-inventories/tests/unit/gapic/merchant_inventories_v1beta/test_local_inventory_service.py @@ -1369,22 +1369,23 @@ async def test_list_local_inventories_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_local_inventories - ] = mock_object + ] = mock_rpc request = {} await client.list_local_inventories(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_local_inventories(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1980,22 +1981,23 @@ async def test_insert_local_inventory_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.insert_local_inventory - ] = mock_object + ] = mock_rpc request = {} await client.insert_local_inventory(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.insert_local_inventory(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2289,22 +2291,23 @@ async def test_delete_local_inventory_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_local_inventory - ] = mock_object + ] = mock_rpc request = {} await client.delete_local_inventory(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_local_inventory(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-shopping-merchant-inventories/tests/unit/gapic/merchant_inventories_v1beta/test_regional_inventory_service.py b/packages/google-shopping-merchant-inventories/tests/unit/gapic/merchant_inventories_v1beta/test_regional_inventory_service.py index 1ee368cce528..054d04148e18 100644 --- a/packages/google-shopping-merchant-inventories/tests/unit/gapic/merchant_inventories_v1beta/test_regional_inventory_service.py +++ b/packages/google-shopping-merchant-inventories/tests/unit/gapic/merchant_inventories_v1beta/test_regional_inventory_service.py @@ -1377,22 +1377,23 @@ async def test_list_regional_inventories_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_regional_inventories - ] = mock_object + ] = mock_rpc request = {} await client.list_regional_inventories(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_regional_inventories(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1980,22 +1981,23 @@ async def test_insert_regional_inventory_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.insert_regional_inventory - ] = mock_object + ] = mock_rpc request = {} await client.insert_regional_inventory(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.insert_regional_inventory(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2281,22 +2283,23 @@ async def test_delete_regional_inventory_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_regional_inventory - ] = mock_object + ] = mock_rpc request = {} await client.delete_regional_inventory(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_regional_inventory(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-shopping-merchant-lfp/google/shopping/merchant_lfp/gapic_version.py b/packages/google-shopping-merchant-lfp/google/shopping/merchant_lfp/gapic_version.py index 114e40645800..558c8aab67c5 100644 --- a/packages/google-shopping-merchant-lfp/google/shopping/merchant_lfp/gapic_version.py +++ b/packages/google-shopping-merchant-lfp/google/shopping/merchant_lfp/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.3" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-shopping-merchant-lfp/google/shopping/merchant_lfp_v1beta/gapic_version.py b/packages/google-shopping-merchant-lfp/google/shopping/merchant_lfp_v1beta/gapic_version.py index 114e40645800..558c8aab67c5 100644 --- a/packages/google-shopping-merchant-lfp/google/shopping/merchant_lfp_v1beta/gapic_version.py +++ b/packages/google-shopping-merchant-lfp/google/shopping/merchant_lfp_v1beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.3" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-shopping-merchant-lfp/google/shopping/merchant_lfp_v1beta/services/lfp_inventory_service/async_client.py b/packages/google-shopping-merchant-lfp/google/shopping/merchant_lfp_v1beta/services/lfp_inventory_service/async_client.py index 74b69884dff5..58e49da98983 100644 --- a/packages/google-shopping-merchant-lfp/google/shopping/merchant_lfp_v1beta/services/lfp_inventory_service/async_client.py +++ b/packages/google-shopping-merchant-lfp/google/shopping/merchant_lfp_v1beta/services/lfp_inventory_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -194,10 +193,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(LfpInventoryServiceClient).get_transport_class, - type(LfpInventoryServiceClient), - ) + get_transport_class = LfpInventoryServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-shopping-merchant-lfp/google/shopping/merchant_lfp_v1beta/services/lfp_inventory_service/client.py b/packages/google-shopping-merchant-lfp/google/shopping/merchant_lfp_v1beta/services/lfp_inventory_service/client.py index 636b2cef4268..0ffe78db5e8e 100644 --- a/packages/google-shopping-merchant-lfp/google/shopping/merchant_lfp_v1beta/services/lfp_inventory_service/client.py +++ b/packages/google-shopping-merchant-lfp/google/shopping/merchant_lfp_v1beta/services/lfp_inventory_service/client.py @@ -672,7 +672,7 @@ def __init__( Type[LfpInventoryServiceTransport], Callable[..., LfpInventoryServiceTransport], ] = ( - type(self).get_transport_class(transport) + LfpInventoryServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., LfpInventoryServiceTransport], transport) ) diff --git a/packages/google-shopping-merchant-lfp/google/shopping/merchant_lfp_v1beta/services/lfp_sale_service/async_client.py b/packages/google-shopping-merchant-lfp/google/shopping/merchant_lfp_v1beta/services/lfp_sale_service/async_client.py index 1e5a55c7e9ca..57a52a52191d 100644 --- a/packages/google-shopping-merchant-lfp/google/shopping/merchant_lfp_v1beta/services/lfp_sale_service/async_client.py +++ b/packages/google-shopping-merchant-lfp/google/shopping/merchant_lfp_v1beta/services/lfp_sale_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -192,9 +191,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(LfpSaleServiceClient).get_transport_class, type(LfpSaleServiceClient) - ) + get_transport_class = LfpSaleServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-shopping-merchant-lfp/google/shopping/merchant_lfp_v1beta/services/lfp_sale_service/client.py b/packages/google-shopping-merchant-lfp/google/shopping/merchant_lfp_v1beta/services/lfp_sale_service/client.py index cc69457f94e4..a17cd45505c6 100644 --- a/packages/google-shopping-merchant-lfp/google/shopping/merchant_lfp_v1beta/services/lfp_sale_service/client.py +++ b/packages/google-shopping-merchant-lfp/google/shopping/merchant_lfp_v1beta/services/lfp_sale_service/client.py @@ -660,7 +660,7 @@ def __init__( transport_init: Union[ Type[LfpSaleServiceTransport], Callable[..., LfpSaleServiceTransport] ] = ( - type(self).get_transport_class(transport) + LfpSaleServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., LfpSaleServiceTransport], transport) ) diff --git a/packages/google-shopping-merchant-lfp/google/shopping/merchant_lfp_v1beta/services/lfp_store_service/async_client.py b/packages/google-shopping-merchant-lfp/google/shopping/merchant_lfp_v1beta/services/lfp_store_service/async_client.py index b912aa7ca2f5..5dce7e0b8308 100644 --- a/packages/google-shopping-merchant-lfp/google/shopping/merchant_lfp_v1beta/services/lfp_store_service/async_client.py +++ b/packages/google-shopping-merchant-lfp/google/shopping/merchant_lfp_v1beta/services/lfp_store_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -190,9 +189,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(LfpStoreServiceClient).get_transport_class, type(LfpStoreServiceClient) - ) + get_transport_class = LfpStoreServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-shopping-merchant-lfp/google/shopping/merchant_lfp_v1beta/services/lfp_store_service/client.py b/packages/google-shopping-merchant-lfp/google/shopping/merchant_lfp_v1beta/services/lfp_store_service/client.py index c04fac71a69f..33b7c007b1e8 100644 --- a/packages/google-shopping-merchant-lfp/google/shopping/merchant_lfp_v1beta/services/lfp_store_service/client.py +++ b/packages/google-shopping-merchant-lfp/google/shopping/merchant_lfp_v1beta/services/lfp_store_service/client.py @@ -665,7 +665,7 @@ def __init__( transport_init: Union[ Type[LfpStoreServiceTransport], Callable[..., LfpStoreServiceTransport] ] = ( - type(self).get_transport_class(transport) + LfpStoreServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., LfpStoreServiceTransport], transport) ) diff --git a/packages/google-shopping-merchant-lfp/samples/generated_samples/snippet_metadata_google.shopping.merchant.lfp.v1beta.json b/packages/google-shopping-merchant-lfp/samples/generated_samples/snippet_metadata_google.shopping.merchant.lfp.v1beta.json index ebd32abd766c..e3327910013f 100644 --- a/packages/google-shopping-merchant-lfp/samples/generated_samples/snippet_metadata_google.shopping.merchant.lfp.v1beta.json +++ b/packages/google-shopping-merchant-lfp/samples/generated_samples/snippet_metadata_google.shopping.merchant.lfp.v1beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-shopping-merchant-lfp", - "version": "0.1.3" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-shopping-merchant-lfp/setup.py b/packages/google-shopping-merchant-lfp/setup.py index 71a2ed217612..c1e6ee144039 100644 --- a/packages/google-shopping-merchant-lfp/setup.py +++ b/packages/google-shopping-merchant-lfp/setup.py @@ -47,7 +47,7 @@ "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", "proto-plus >= 1.22.3, <2.0.0dev", "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", - "google-shopping-type >= 0.1.0, <1.0.0dev", + "google-shopping-type >= 0.1.6, <1.0.0dev", ] url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-lfp" diff --git a/packages/google-shopping-merchant-lfp/testing/constraints-3.7.txt b/packages/google-shopping-merchant-lfp/testing/constraints-3.7.txt index 83bec2765bf7..130a0c0f80ab 100644 --- a/packages/google-shopping-merchant-lfp/testing/constraints-3.7.txt +++ b/packages/google-shopping-merchant-lfp/testing/constraints-3.7.txt @@ -8,4 +8,4 @@ google-api-core==1.34.1 google-auth==2.14.1 proto-plus==1.22.3 protobuf==3.20.2 -google-shopping-type==0.1.0 +google-shopping-type==0.1.6 diff --git a/packages/google-shopping-merchant-lfp/tests/unit/gapic/merchant_lfp_v1beta/test_lfp_inventory_service.py b/packages/google-shopping-merchant-lfp/tests/unit/gapic/merchant_lfp_v1beta/test_lfp_inventory_service.py index bb09216e0296..4bd3d2796892 100644 --- a/packages/google-shopping-merchant-lfp/tests/unit/gapic/merchant_lfp_v1beta/test_lfp_inventory_service.py +++ b/packages/google-shopping-merchant-lfp/tests/unit/gapic/merchant_lfp_v1beta/test_lfp_inventory_service.py @@ -1395,22 +1395,23 @@ async def test_insert_lfp_inventory_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.insert_lfp_inventory - ] = mock_object + ] = mock_rpc request = {} await client.insert_lfp_inventory(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.insert_lfp_inventory(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-shopping-merchant-lfp/tests/unit/gapic/merchant_lfp_v1beta/test_lfp_sale_service.py b/packages/google-shopping-merchant-lfp/tests/unit/gapic/merchant_lfp_v1beta/test_lfp_sale_service.py index 10edf71b36f5..03b007c66aba 100644 --- a/packages/google-shopping-merchant-lfp/tests/unit/gapic/merchant_lfp_v1beta/test_lfp_sale_service.py +++ b/packages/google-shopping-merchant-lfp/tests/unit/gapic/merchant_lfp_v1beta/test_lfp_sale_service.py @@ -1313,22 +1313,23 @@ async def test_insert_lfp_sale_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.insert_lfp_sale - ] = mock_object + ] = mock_rpc request = {} await client.insert_lfp_sale(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.insert_lfp_sale(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-shopping-merchant-lfp/tests/unit/gapic/merchant_lfp_v1beta/test_lfp_store_service.py b/packages/google-shopping-merchant-lfp/tests/unit/gapic/merchant_lfp_v1beta/test_lfp_store_service.py index a047091c8459..871ddd443b2e 100644 --- a/packages/google-shopping-merchant-lfp/tests/unit/gapic/merchant_lfp_v1beta/test_lfp_store_service.py +++ b/packages/google-shopping-merchant-lfp/tests/unit/gapic/merchant_lfp_v1beta/test_lfp_store_service.py @@ -1338,22 +1338,23 @@ async def test_get_lfp_store_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_lfp_store - ] = mock_object + ] = mock_rpc request = {} await client.get_lfp_store(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_lfp_store(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1756,22 +1757,23 @@ async def test_insert_lfp_store_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.insert_lfp_store - ] = mock_object + ] = mock_rpc request = {} await client.insert_lfp_store(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.insert_lfp_store(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2144,22 +2146,23 @@ async def test_delete_lfp_store_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_lfp_store - ] = mock_object + ] = mock_rpc request = {} await client.delete_lfp_store(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_lfp_store(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2501,22 +2504,23 @@ async def test_list_lfp_stores_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_lfp_stores - ] = mock_object + ] = mock_rpc request = {} await client.list_lfp_stores(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_lfp_stores(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-shopping-merchant-notifications/google/shopping/merchant_notifications/gapic_version.py b/packages/google-shopping-merchant-notifications/google/shopping/merchant_notifications/gapic_version.py index 3b0a9d9a8d43..558c8aab67c5 100644 --- a/packages/google-shopping-merchant-notifications/google/shopping/merchant_notifications/gapic_version.py +++ b/packages/google-shopping-merchant-notifications/google/shopping/merchant_notifications/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-shopping-merchant-notifications/google/shopping/merchant_notifications_v1beta/gapic_version.py b/packages/google-shopping-merchant-notifications/google/shopping/merchant_notifications_v1beta/gapic_version.py index 3b0a9d9a8d43..558c8aab67c5 100644 --- a/packages/google-shopping-merchant-notifications/google/shopping/merchant_notifications_v1beta/gapic_version.py +++ b/packages/google-shopping-merchant-notifications/google/shopping/merchant_notifications_v1beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-shopping-merchant-notifications/google/shopping/merchant_notifications_v1beta/services/notifications_api_service/async_client.py b/packages/google-shopping-merchant-notifications/google/shopping/merchant_notifications_v1beta/services/notifications_api_service/async_client.py index 4af4a017403a..f9dd01b3f3df 100644 --- a/packages/google-shopping-merchant-notifications/google/shopping/merchant_notifications_v1beta/services/notifications_api_service/async_client.py +++ b/packages/google-shopping-merchant-notifications/google/shopping/merchant_notifications_v1beta/services/notifications_api_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -203,10 +202,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(NotificationsApiServiceClient).get_transport_class, - type(NotificationsApiServiceClient), - ) + get_transport_class = NotificationsApiServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-shopping-merchant-notifications/google/shopping/merchant_notifications_v1beta/services/notifications_api_service/client.py b/packages/google-shopping-merchant-notifications/google/shopping/merchant_notifications_v1beta/services/notifications_api_service/client.py index 7420b59f55dd..7d15bc6d0ca4 100644 --- a/packages/google-shopping-merchant-notifications/google/shopping/merchant_notifications_v1beta/services/notifications_api_service/client.py +++ b/packages/google-shopping-merchant-notifications/google/shopping/merchant_notifications_v1beta/services/notifications_api_service/client.py @@ -673,7 +673,7 @@ def __init__( Type[NotificationsApiServiceTransport], Callable[..., NotificationsApiServiceTransport], ] = ( - type(self).get_transport_class(transport) + NotificationsApiServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., NotificationsApiServiceTransport], transport) ) diff --git a/packages/google-shopping-merchant-notifications/samples/generated_samples/snippet_metadata_google.shopping.merchant.notifications.v1beta.json b/packages/google-shopping-merchant-notifications/samples/generated_samples/snippet_metadata_google.shopping.merchant.notifications.v1beta.json index 33be6b0ab582..834227f32181 100644 --- a/packages/google-shopping-merchant-notifications/samples/generated_samples/snippet_metadata_google.shopping.merchant.notifications.v1beta.json +++ b/packages/google-shopping-merchant-notifications/samples/generated_samples/snippet_metadata_google.shopping.merchant.notifications.v1beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-shopping-merchant-notifications", - "version": "0.1.2" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-shopping-merchant-notifications/setup.py b/packages/google-shopping-merchant-notifications/setup.py index c018d2f6810f..5b968b1d5347 100644 --- a/packages/google-shopping-merchant-notifications/setup.py +++ b/packages/google-shopping-merchant-notifications/setup.py @@ -49,7 +49,7 @@ "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", "proto-plus >= 1.22.3, <2.0.0dev", "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", - "google-shopping-type >= 0.1.0, <1.0.0dev", + "google-shopping-type >= 0.1.6, <1.0.0dev", ] url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-notifications" diff --git a/packages/google-shopping-merchant-notifications/testing/constraints-3.7.txt b/packages/google-shopping-merchant-notifications/testing/constraints-3.7.txt index 83bec2765bf7..130a0c0f80ab 100644 --- a/packages/google-shopping-merchant-notifications/testing/constraints-3.7.txt +++ b/packages/google-shopping-merchant-notifications/testing/constraints-3.7.txt @@ -8,4 +8,4 @@ google-api-core==1.34.1 google-auth==2.14.1 proto-plus==1.22.3 protobuf==3.20.2 -google-shopping-type==0.1.0 +google-shopping-type==0.1.6 diff --git a/packages/google-shopping-merchant-notifications/tests/unit/gapic/merchant_notifications_v1beta/test_notifications_api_service.py b/packages/google-shopping-merchant-notifications/tests/unit/gapic/merchant_notifications_v1beta/test_notifications_api_service.py index db0c89df016c..7e6fe3b73458 100644 --- a/packages/google-shopping-merchant-notifications/tests/unit/gapic/merchant_notifications_v1beta/test_notifications_api_service.py +++ b/packages/google-shopping-merchant-notifications/tests/unit/gapic/merchant_notifications_v1beta/test_notifications_api_service.py @@ -1377,22 +1377,23 @@ async def test_get_notification_subscription_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_notification_subscription - ] = mock_object + ] = mock_rpc request = {} await client.get_notification_subscription(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_notification_subscription(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1782,22 +1783,23 @@ async def test_create_notification_subscription_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_notification_subscription - ] = mock_object + ] = mock_rpc request = {} await client.create_notification_subscription(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_notification_subscription(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2201,22 +2203,23 @@ async def test_update_notification_subscription_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_notification_subscription - ] = mock_object + ] = mock_rpc request = {} await client.update_notification_subscription(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_notification_subscription(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2607,22 +2610,23 @@ async def test_delete_notification_subscription_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_notification_subscription - ] = mock_object + ] = mock_rpc request = {} await client.delete_notification_subscription(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_notification_subscription(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2988,22 +2992,23 @@ async def test_list_notification_subscriptions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_notification_subscriptions - ] = mock_object + ] = mock_rpc request = {} await client.list_notification_subscriptions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_notification_subscriptions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-shopping-merchant-products/google/shopping/merchant_products/gapic_version.py b/packages/google-shopping-merchant-products/google/shopping/merchant_products/gapic_version.py index 3b0a9d9a8d43..558c8aab67c5 100644 --- a/packages/google-shopping-merchant-products/google/shopping/merchant_products/gapic_version.py +++ b/packages/google-shopping-merchant-products/google/shopping/merchant_products/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/gapic_version.py b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/gapic_version.py index 3b0a9d9a8d43..558c8aab67c5 100644 --- a/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/gapic_version.py +++ b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/services/product_inputs_service/async_client.py b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/services/product_inputs_service/async_client.py index f64af9e701a5..54aac15fa95d 100644 --- a/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/services/product_inputs_service/async_client.py +++ b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/services/product_inputs_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -197,10 +196,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ProductInputsServiceClient).get_transport_class, - type(ProductInputsServiceClient), - ) + get_transport_class = ProductInputsServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/services/product_inputs_service/client.py b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/services/product_inputs_service/client.py index 69f62a38bf37..67f7be20d866 100644 --- a/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/services/product_inputs_service/client.py +++ b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/services/product_inputs_service/client.py @@ -685,7 +685,7 @@ def __init__( Type[ProductInputsServiceTransport], Callable[..., ProductInputsServiceTransport], ] = ( - type(self).get_transport_class(transport) + ProductInputsServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ProductInputsServiceTransport], transport) ) diff --git a/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/services/products_service/async_client.py b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/services/products_service/async_client.py index 9a5b6f0af184..ee17f6207ae3 100644 --- a/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/services/products_service/async_client.py +++ b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/services/products_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -191,9 +190,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ProductsServiceClient).get_transport_class, type(ProductsServiceClient) - ) + get_transport_class = ProductsServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/services/products_service/client.py b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/services/products_service/client.py index b5934b0a7348..cf8189145439 100644 --- a/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/services/products_service/client.py +++ b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1beta/services/products_service/client.py @@ -661,7 +661,7 @@ def __init__( transport_init: Union[ Type[ProductsServiceTransport], Callable[..., ProductsServiceTransport] ] = ( - type(self).get_transport_class(transport) + ProductsServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ProductsServiceTransport], transport) ) diff --git a/packages/google-shopping-merchant-products/samples/generated_samples/snippet_metadata_google.shopping.merchant.products.v1beta.json b/packages/google-shopping-merchant-products/samples/generated_samples/snippet_metadata_google.shopping.merchant.products.v1beta.json index 61205db8b73f..10f94f8cadc7 100644 --- a/packages/google-shopping-merchant-products/samples/generated_samples/snippet_metadata_google.shopping.merchant.products.v1beta.json +++ b/packages/google-shopping-merchant-products/samples/generated_samples/snippet_metadata_google.shopping.merchant.products.v1beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-shopping-merchant-products", - "version": "0.1.2" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-shopping-merchant-products/setup.py b/packages/google-shopping-merchant-products/setup.py index 9779bc821a80..7edd0edd53d9 100644 --- a/packages/google-shopping-merchant-products/setup.py +++ b/packages/google-shopping-merchant-products/setup.py @@ -47,7 +47,7 @@ "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", "proto-plus >= 1.22.3, <2.0.0dev", "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", - "google-shopping-type >= 0.1.0, <1.0.0dev", + "google-shopping-type >= 0.1.6, <1.0.0dev", ] url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-products" diff --git a/packages/google-shopping-merchant-products/testing/constraints-3.7.txt b/packages/google-shopping-merchant-products/testing/constraints-3.7.txt index 83bec2765bf7..130a0c0f80ab 100644 --- a/packages/google-shopping-merchant-products/testing/constraints-3.7.txt +++ b/packages/google-shopping-merchant-products/testing/constraints-3.7.txt @@ -8,4 +8,4 @@ google-api-core==1.34.1 google-auth==2.14.1 proto-plus==1.22.3 protobuf==3.20.2 -google-shopping-type==0.1.0 +google-shopping-type==0.1.6 diff --git a/packages/google-shopping-merchant-products/tests/unit/gapic/merchant_products_v1beta/test_product_inputs_service.py b/packages/google-shopping-merchant-products/tests/unit/gapic/merchant_products_v1beta/test_product_inputs_service.py index ee28c6fb1d0c..d625be3942fc 100644 --- a/packages/google-shopping-merchant-products/tests/unit/gapic/merchant_products_v1beta/test_product_inputs_service.py +++ b/packages/google-shopping-merchant-products/tests/unit/gapic/merchant_products_v1beta/test_product_inputs_service.py @@ -1386,22 +1386,23 @@ async def test_insert_product_input_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.insert_product_input - ] = mock_object + ] = mock_rpc request = {} await client.insert_product_input(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.insert_product_input(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1694,22 +1695,23 @@ async def test_delete_product_input_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_product_input - ] = mock_object + ] = mock_rpc request = {} await client.delete_product_input(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_product_input(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-shopping-merchant-products/tests/unit/gapic/merchant_products_v1beta/test_products_service.py b/packages/google-shopping-merchant-products/tests/unit/gapic/merchant_products_v1beta/test_products_service.py index b72f8e56e564..b34f09fb80a5 100644 --- a/packages/google-shopping-merchant-products/tests/unit/gapic/merchant_products_v1beta/test_products_service.py +++ b/packages/google-shopping-merchant-products/tests/unit/gapic/merchant_products_v1beta/test_products_service.py @@ -1324,22 +1324,23 @@ async def test_get_product_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_product - ] = mock_object + ] = mock_rpc request = {} await client.get_product(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_product(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1698,22 +1699,23 @@ async def test_list_products_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_products - ] = mock_object + ] = mock_rpc request = {} await client.list_products(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_products(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions/gapic_version.py b/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions/gapic_version.py index 3b0a9d9a8d43..558c8aab67c5 100644 --- a/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions/gapic_version.py +++ b/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions_v1beta/gapic_version.py b/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions_v1beta/gapic_version.py index 3b0a9d9a8d43..558c8aab67c5 100644 --- a/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions_v1beta/gapic_version.py +++ b/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions_v1beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions_v1beta/services/promotions_service/async_client.py b/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions_v1beta/services/promotions_service/async_client.py index c8cc15465363..70670bb806fb 100644 --- a/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions_v1beta/services/promotions_service/async_client.py +++ b/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions_v1beta/services/promotions_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -194,9 +193,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(PromotionsServiceClient).get_transport_class, type(PromotionsServiceClient) - ) + get_transport_class = PromotionsServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions_v1beta/services/promotions_service/client.py b/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions_v1beta/services/promotions_service/client.py index d0c6f7ace430..d8d795bd8757 100644 --- a/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions_v1beta/services/promotions_service/client.py +++ b/packages/google-shopping-merchant-promotions/google/shopping/merchant_promotions_v1beta/services/promotions_service/client.py @@ -667,7 +667,7 @@ def __init__( Type[PromotionsServiceTransport], Callable[..., PromotionsServiceTransport], ] = ( - type(self).get_transport_class(transport) + PromotionsServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., PromotionsServiceTransport], transport) ) diff --git a/packages/google-shopping-merchant-promotions/samples/generated_samples/snippet_metadata_google.shopping.merchant.promotions.v1beta.json b/packages/google-shopping-merchant-promotions/samples/generated_samples/snippet_metadata_google.shopping.merchant.promotions.v1beta.json index e533633456aa..4b7f108a5403 100644 --- a/packages/google-shopping-merchant-promotions/samples/generated_samples/snippet_metadata_google.shopping.merchant.promotions.v1beta.json +++ b/packages/google-shopping-merchant-promotions/samples/generated_samples/snippet_metadata_google.shopping.merchant.promotions.v1beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-shopping-merchant-promotions", - "version": "0.1.2" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-shopping-merchant-promotions/setup.py b/packages/google-shopping-merchant-promotions/setup.py index 3029279654ed..7e8adf132ed5 100644 --- a/packages/google-shopping-merchant-promotions/setup.py +++ b/packages/google-shopping-merchant-promotions/setup.py @@ -47,7 +47,7 @@ "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", "proto-plus >= 1.22.3, <2.0.0dev", "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", - "google-shopping-type >= 0.1.0, <1.0.0dev", + "google-shopping-type >= 0.1.6, <1.0.0dev", ] url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-promotions" diff --git a/packages/google-shopping-merchant-promotions/testing/constraints-3.7.txt b/packages/google-shopping-merchant-promotions/testing/constraints-3.7.txt index 83bec2765bf7..130a0c0f80ab 100644 --- a/packages/google-shopping-merchant-promotions/testing/constraints-3.7.txt +++ b/packages/google-shopping-merchant-promotions/testing/constraints-3.7.txt @@ -8,4 +8,4 @@ google-api-core==1.34.1 google-auth==2.14.1 proto-plus==1.22.3 protobuf==3.20.2 -google-shopping-type==0.1.0 +google-shopping-type==0.1.6 diff --git a/packages/google-shopping-merchant-promotions/tests/unit/gapic/merchant_promotions_v1beta/test_promotions_service.py b/packages/google-shopping-merchant-promotions/tests/unit/gapic/merchant_promotions_v1beta/test_promotions_service.py index 5c882d3aa643..acfb2ea13911 100644 --- a/packages/google-shopping-merchant-promotions/tests/unit/gapic/merchant_promotions_v1beta/test_promotions_service.py +++ b/packages/google-shopping-merchant-promotions/tests/unit/gapic/merchant_promotions_v1beta/test_promotions_service.py @@ -1347,22 +1347,23 @@ async def test_insert_promotion_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.insert_promotion - ] = mock_object + ] = mock_rpc request = {} await client.insert_promotion(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.insert_promotion(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1659,22 +1660,23 @@ async def test_get_promotion_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_promotion - ] = mock_object + ] = mock_rpc request = {} await client.get_promotion(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_promotion(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2037,22 +2039,23 @@ async def test_list_promotions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_promotions - ] = mock_object + ] = mock_rpc request = {} await client.list_promotions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_promotions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-shopping-merchant-quota/google/shopping/merchant_quota/gapic_version.py b/packages/google-shopping-merchant-quota/google/shopping/merchant_quota/gapic_version.py index 3b0a9d9a8d43..558c8aab67c5 100644 --- a/packages/google-shopping-merchant-quota/google/shopping/merchant_quota/gapic_version.py +++ b/packages/google-shopping-merchant-quota/google/shopping/merchant_quota/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-shopping-merchant-quota/google/shopping/merchant_quota_v1beta/gapic_version.py b/packages/google-shopping-merchant-quota/google/shopping/merchant_quota_v1beta/gapic_version.py index 3b0a9d9a8d43..558c8aab67c5 100644 --- a/packages/google-shopping-merchant-quota/google/shopping/merchant_quota_v1beta/gapic_version.py +++ b/packages/google-shopping-merchant-quota/google/shopping/merchant_quota_v1beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-shopping-merchant-quota/google/shopping/merchant_quota_v1beta/services/quota_service/async_client.py b/packages/google-shopping-merchant-quota/google/shopping/merchant_quota_v1beta/services/quota_service/async_client.py index 29e055451e31..3e89516cd556 100644 --- a/packages/google-shopping-merchant-quota/google/shopping/merchant_quota_v1beta/services/quota_service/async_client.py +++ b/packages/google-shopping-merchant-quota/google/shopping/merchant_quota_v1beta/services/quota_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -185,9 +184,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(QuotaServiceClient).get_transport_class, type(QuotaServiceClient) - ) + get_transport_class = QuotaServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-shopping-merchant-quota/google/shopping/merchant_quota_v1beta/services/quota_service/client.py b/packages/google-shopping-merchant-quota/google/shopping/merchant_quota_v1beta/services/quota_service/client.py index a2932afb9a1b..e52fd74db22f 100644 --- a/packages/google-shopping-merchant-quota/google/shopping/merchant_quota_v1beta/services/quota_service/client.py +++ b/packages/google-shopping-merchant-quota/google/shopping/merchant_quota_v1beta/services/quota_service/client.py @@ -652,7 +652,7 @@ def __init__( transport_init: Union[ Type[QuotaServiceTransport], Callable[..., QuotaServiceTransport] ] = ( - type(self).get_transport_class(transport) + QuotaServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., QuotaServiceTransport], transport) ) diff --git a/packages/google-shopping-merchant-quota/samples/generated_samples/snippet_metadata_google.shopping.merchant.quota.v1beta.json b/packages/google-shopping-merchant-quota/samples/generated_samples/snippet_metadata_google.shopping.merchant.quota.v1beta.json index 62eff3bd3900..884c8d108671 100644 --- a/packages/google-shopping-merchant-quota/samples/generated_samples/snippet_metadata_google.shopping.merchant.quota.v1beta.json +++ b/packages/google-shopping-merchant-quota/samples/generated_samples/snippet_metadata_google.shopping.merchant.quota.v1beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-shopping-merchant-quota", - "version": "0.1.2" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-shopping-merchant-quota/tests/unit/gapic/merchant_quota_v1beta/test_quota_service.py b/packages/google-shopping-merchant-quota/tests/unit/gapic/merchant_quota_v1beta/test_quota_service.py index 1c8418fc7994..8ec35cf70b33 100644 --- a/packages/google-shopping-merchant-quota/tests/unit/gapic/merchant_quota_v1beta/test_quota_service.py +++ b/packages/google-shopping-merchant-quota/tests/unit/gapic/merchant_quota_v1beta/test_quota_service.py @@ -1270,22 +1270,23 @@ async def test_list_quota_groups_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_quota_groups - ] = mock_object + ] = mock_rpc request = {} await client.list_quota_groups(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_quota_groups(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-shopping-merchant-reports/google/shopping/merchant_reports/gapic_version.py b/packages/google-shopping-merchant-reports/google/shopping/merchant_reports/gapic_version.py index f8ea948a9c30..558c8aab67c5 100644 --- a/packages/google-shopping-merchant-reports/google/shopping/merchant_reports/gapic_version.py +++ b/packages/google-shopping-merchant-reports/google/shopping/merchant_reports/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.9" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-shopping-merchant-reports/google/shopping/merchant_reports_v1beta/gapic_version.py b/packages/google-shopping-merchant-reports/google/shopping/merchant_reports_v1beta/gapic_version.py index f8ea948a9c30..558c8aab67c5 100644 --- a/packages/google-shopping-merchant-reports/google/shopping/merchant_reports_v1beta/gapic_version.py +++ b/packages/google-shopping-merchant-reports/google/shopping/merchant_reports_v1beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.9" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-shopping-merchant-reports/google/shopping/merchant_reports_v1beta/services/report_service/async_client.py b/packages/google-shopping-merchant-reports/google/shopping/merchant_reports_v1beta/services/report_service/async_client.py index 3e542f22dfdd..f43a77eca125 100644 --- a/packages/google-shopping-merchant-reports/google/shopping/merchant_reports_v1beta/services/report_service/async_client.py +++ b/packages/google-shopping-merchant-reports/google/shopping/merchant_reports_v1beta/services/report_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -188,9 +187,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ReportServiceClient).get_transport_class, type(ReportServiceClient) - ) + get_transport_class = ReportServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-shopping-merchant-reports/google/shopping/merchant_reports_v1beta/services/report_service/client.py b/packages/google-shopping-merchant-reports/google/shopping/merchant_reports_v1beta/services/report_service/client.py index 7f6a5473fb1c..e82574112938 100644 --- a/packages/google-shopping-merchant-reports/google/shopping/merchant_reports_v1beta/services/report_service/client.py +++ b/packages/google-shopping-merchant-reports/google/shopping/merchant_reports_v1beta/services/report_service/client.py @@ -639,7 +639,7 @@ def __init__( transport_init: Union[ Type[ReportServiceTransport], Callable[..., ReportServiceTransport] ] = ( - type(self).get_transport_class(transport) + ReportServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ReportServiceTransport], transport) ) diff --git a/packages/google-shopping-merchant-reports/google/shopping/merchant_reports_v1beta/types/reports.py b/packages/google-shopping-merchant-reports/google/shopping/merchant_reports_v1beta/types/reports.py index 5b32972350bb..ab35339ba41f 100644 --- a/packages/google-shopping-merchant-reports/google/shopping/merchant_reports_v1beta/types/reports.py +++ b/packages/google-shopping-merchant-reports/google/shopping/merchant_reports_v1beta/types/reports.py @@ -703,13 +703,11 @@ class AggregatedReportingContextStatus(proto.Enum): Here's an example of how the aggregated status is computed: - ``` Free listings \| Shopping ads \| Status --------------|--------------|------------------------------ Approved \| Approved \| ELIGIBLE Approved \| Pending \| ELIGIBLE Approved \| Disapproved \| ELIGIBLE_LIMITED Pending \| Pending \| PENDING Disapproved \| Disapproved \| NOT_ELIGIBLE_OR_DISAPPROVED - ``` Values: AGGREGATED_REPORTING_CONTEXT_STATUS_UNSPECIFIED (0): diff --git a/packages/google-shopping-merchant-reports/samples/generated_samples/snippet_metadata_google.shopping.merchant.reports.v1beta.json b/packages/google-shopping-merchant-reports/samples/generated_samples/snippet_metadata_google.shopping.merchant.reports.v1beta.json index 23c07a989bdf..55992b8a05a3 100644 --- a/packages/google-shopping-merchant-reports/samples/generated_samples/snippet_metadata_google.shopping.merchant.reports.v1beta.json +++ b/packages/google-shopping-merchant-reports/samples/generated_samples/snippet_metadata_google.shopping.merchant.reports.v1beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-shopping-merchant-reports", - "version": "0.1.9" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-shopping-merchant-reports/setup.py b/packages/google-shopping-merchant-reports/setup.py index 63375561646a..97aa3194c122 100644 --- a/packages/google-shopping-merchant-reports/setup.py +++ b/packages/google-shopping-merchant-reports/setup.py @@ -47,7 +47,7 @@ "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", "proto-plus >= 1.22.3, <2.0.0dev", "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", - "google-shopping-type >= 0.1.0, <1.0.0dev", + "google-shopping-type >= 0.1.6, <1.0.0dev", ] url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-reports" diff --git a/packages/google-shopping-merchant-reports/testing/constraints-3.7.txt b/packages/google-shopping-merchant-reports/testing/constraints-3.7.txt index 83bec2765bf7..130a0c0f80ab 100644 --- a/packages/google-shopping-merchant-reports/testing/constraints-3.7.txt +++ b/packages/google-shopping-merchant-reports/testing/constraints-3.7.txt @@ -8,4 +8,4 @@ google-api-core==1.34.1 google-auth==2.14.1 proto-plus==1.22.3 protobuf==3.20.2 -google-shopping-type==0.1.0 +google-shopping-type==0.1.6 diff --git a/packages/google-shopping-merchant-reports/tests/unit/gapic/merchant_reports_v1beta/test_report_service.py b/packages/google-shopping-merchant-reports/tests/unit/gapic/merchant_reports_v1beta/test_report_service.py index 1107a78fc157..fdc692e3c64d 100644 --- a/packages/google-shopping-merchant-reports/tests/unit/gapic/merchant_reports_v1beta/test_report_service.py +++ b/packages/google-shopping-merchant-reports/tests/unit/gapic/merchant_reports_v1beta/test_report_service.py @@ -1279,22 +1279,23 @@ async def test_search_async_use_cached_wrapped_rpc(transport: str = "grpc_asynci ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search - ] = mock_object + ] = mock_rpc request = {} await client.search(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-shopping-type/google/shopping/type/gapic_version.py b/packages/google-shopping-type/google/shopping/type/gapic_version.py index ec8d212c9160..558c8aab67c5 100644 --- a/packages/google-shopping-type/google/shopping/type/gapic_version.py +++ b/packages/google-shopping-type/google/shopping/type/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.8" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/grafeas/grafeas/grafeas/gapic_version.py b/packages/grafeas/grafeas/grafeas/gapic_version.py index 50d842f376d0..558c8aab67c5 100644 --- a/packages/grafeas/grafeas/grafeas/gapic_version.py +++ b/packages/grafeas/grafeas/grafeas/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.11.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/grafeas/grafeas/grafeas_v1/gapic_version.py b/packages/grafeas/grafeas/grafeas_v1/gapic_version.py index 50d842f376d0..558c8aab67c5 100644 --- a/packages/grafeas/grafeas/grafeas_v1/gapic_version.py +++ b/packages/grafeas/grafeas/grafeas_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.11.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/grafeas/grafeas/grafeas_v1/services/grafeas/async_client.py b/packages/grafeas/grafeas/grafeas_v1/services/grafeas/async_client.py index b718e2058811..f18d20debe1d 100644 --- a/packages/grafeas/grafeas/grafeas_v1/services/grafeas/async_client.py +++ b/packages/grafeas/grafeas/grafeas_v1/services/grafeas/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -137,9 +136,7 @@ def transport(self) -> GrafeasTransport: """ return self._client.transport - get_transport_class = functools.partial( - type(GrafeasClient).get_transport_class, type(GrafeasClient) - ) + get_transport_class = GrafeasClient.get_transport_class def __init__( self, diff --git a/packages/grafeas/grafeas/grafeas_v1/services/grafeas/client.py b/packages/grafeas/grafeas/grafeas_v1/services/grafeas/client.py index b34a328c31eb..1e3d2e5b01fc 100644 --- a/packages/grafeas/grafeas/grafeas_v1/services/grafeas/client.py +++ b/packages/grafeas/grafeas/grafeas_v1/services/grafeas/client.py @@ -339,7 +339,7 @@ def __init__( if isinstance(transport, GrafeasTransport): self._transport = transport else: - Transport = type(self).get_transport_class(transport) + Transport = GrafeasClient.get_transport_class(transport) self._transport = Transport(credentials=credentials) def get_occurrence( diff --git a/packages/grafeas/samples/generated_samples/snippet_metadata_grafeas.v1.json b/packages/grafeas/samples/generated_samples/snippet_metadata_grafeas.v1.json index 0a954f7d13ae..dc3e1cc8b7ee 100644 --- a/packages/grafeas/samples/generated_samples/snippet_metadata_grafeas.v1.json +++ b/packages/grafeas/samples/generated_samples/snippet_metadata_grafeas.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "grafeas", - "version": "1.11.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/grafeas/tests/unit/gapic/grafeas_v1/test_grafeas.py b/packages/grafeas/tests/unit/gapic/grafeas_v1/test_grafeas.py index 45679a0a8311..7974b1d1afdc 100644 --- a/packages/grafeas/tests/unit/gapic/grafeas_v1/test_grafeas.py +++ b/packages/grafeas/tests/unit/gapic/grafeas_v1/test_grafeas.py @@ -259,22 +259,23 @@ async def test_get_occurrence_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_occurrence - ] = mock_object + ] = mock_rpc request = {} await client.get_occurrence(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_occurrence(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -633,22 +634,23 @@ async def test_list_occurrences_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_occurrences - ] = mock_object + ] = mock_rpc request = {} await client.list_occurrences(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_occurrences(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1204,22 +1206,23 @@ async def test_delete_occurrence_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_occurrence - ] = mock_object + ] = mock_rpc request = {} await client.delete_occurrence(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_occurrence(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1591,22 +1594,23 @@ async def test_create_occurrence_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_occurrence - ] = mock_object + ] = mock_rpc request = {} await client.create_occurrence(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_occurrence(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1987,22 +1991,23 @@ async def test_batch_create_occurrences_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_create_occurrences - ] = mock_object + ] = mock_rpc request = {} await client.batch_create_occurrences(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.batch_create_occurrences(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2390,22 +2395,23 @@ async def test_update_occurrence_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_occurrence - ] = mock_object + ] = mock_rpc request = {} await client.update_occurrence(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_occurrence(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2812,22 +2818,23 @@ async def test_get_occurrence_note_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_occurrence_note - ] = mock_object + ] = mock_rpc request = {} await client.get_occurrence_note(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_occurrence_note(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3200,22 +3207,23 @@ async def test_get_note_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_note - ] = mock_object + ] = mock_rpc request = {} await client.get_note(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_note(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3570,22 +3578,23 @@ async def test_list_notes_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_notes - ] = mock_object + ] = mock_rpc request = {} await client.list_notes(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_notes(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4131,22 +4140,23 @@ async def test_delete_note_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_note - ] = mock_object + ] = mock_rpc request = {} await client.delete_note(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_note(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4500,22 +4510,23 @@ async def test_create_note_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_note - ] = mock_object + ] = mock_rpc request = {} await client.create_note(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_note(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4895,22 +4906,23 @@ async def test_batch_create_notes_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_create_notes - ] = mock_object + ] = mock_rpc request = {} await client.batch_create_notes(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.batch_create_notes(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5288,22 +5300,23 @@ async def test_update_note_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_note - ] = mock_object + ] = mock_rpc request = {} await client.update_note(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_note(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5693,22 +5706,23 @@ async def test_list_note_occurrences_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_note_occurrences - ] = mock_object + ] = mock_rpc request = {} await client.list_note_occurrences(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_note_occurrences(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/scripts/client-post-processing/unique-grafeas-client.yaml b/scripts/client-post-processing/unique-grafeas-client.yaml index fd68e016a3a0..ef2efd5dab6f 100644 --- a/scripts/client-post-processing/unique-grafeas-client.yaml +++ b/scripts/client-post-processing/unique-grafeas-client.yaml @@ -1767,7 +1767,7 @@ replacements: \ transport_init: Union\[ \ Type\[GrafeasTransport\], Callable\[..., GrafeasTransport\] \ \] = \( - \ type\(self\).get_transport_class\(transport\) + \ GrafeasClient.get_transport_class\(transport\) \ if isinstance\(transport, str\) or transport is None \ else cast\(Callable\[..., GrafeasTransport\], transport\) \ \) @@ -1810,14 +1810,14 @@ replacements: if isinstance(transport, GrafeasTransport): self._transport = transport else: - Transport = type(self).get_transport_class(transport) + Transport = GrafeasClient.get_transport_class(transport) self._transport = Transport(credentials=credentials) count: 1 - paths: [ packages/grafeas/grafeas/grafeas_v1/services/grafeas/async_client.py ] before: | - \)\n + GrafeasClient.get_transport_class\n \ def __init__\( \ self, \ \*, @@ -1884,7 +1884,7 @@ replacements: \ client_info=client_info, \ \) after: | - )\n + GrafeasClient.get_transport_class\n def __init__( self, *, From 200a64b0c9ce565103295ab0a8126353f386fbce Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 20 Aug 2024 09:46:01 -0400 Subject: [PATCH 023/108] docs: [google-cloud-texttospeech] update Long Audio capabilities to include SSML (#13020) - [ ] Regenerate this pull request now. PiperOrigin-RevId: 664823949 Source-Link: https://github.com/googleapis/googleapis/commit/d9eae9f029427bd9ed4379d8e3cd46ca69f1a33f Source-Link: https://github.com/googleapis/googleapis-gen/commit/68d27baaaca51c63ad17dd5b851679b649899634 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXRleHR0b3NwZWVjaC8uT3dsQm90LnlhbWwiLCJoIjoiNjhkMjdiYWFhY2E1MWM2M2FkMTdkZDViODUxNjc5YjY0OTg5OTYzNCJ9 --------- Co-authored-by: Owl Bot --- .../google/cloud/texttospeech/gapic_version.py | 2 +- .../google/cloud/texttospeech_v1/gapic_version.py | 2 +- .../google/cloud/texttospeech_v1/types/cloud_tts_lrs.py | 3 +-- .../google/cloud/texttospeech_v1beta1/gapic_version.py | 2 +- .../snippet_metadata_google.cloud.texttospeech.v1.json | 2 +- .../snippet_metadata_google.cloud.texttospeech.v1beta1.json | 2 +- 6 files changed, 6 insertions(+), 7 deletions(-) diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech/gapic_version.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech/gapic_version.py index 6053ad2404bf..558c8aab67c5 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech/gapic_version.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.17.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/gapic_version.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/gapic_version.py index 6053ad2404bf..558c8aab67c5 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/gapic_version.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.17.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/types/cloud_tts_lrs.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/types/cloud_tts_lrs.py index 5a5cad82b733..fece3ac17d02 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/types/cloud_tts_lrs.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/types/cloud_tts_lrs.py @@ -42,8 +42,7 @@ class SynthesizeLongAudioRequest(proto.Message): ``projects/*/locations/*``. input (google.cloud.texttospeech_v1.types.SynthesisInput): Required. The Synthesizer requires either - plain text or SSML as input. While Long Audio is - in preview, SSML is temporarily unsupported. + plain text or SSML as input. audio_config (google.cloud.texttospeech_v1.types.AudioConfig): Required. The configuration of the synthesized audio. diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/gapic_version.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/gapic_version.py index 6053ad2404bf..558c8aab67c5 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/gapic_version.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.17.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-texttospeech/samples/generated_samples/snippet_metadata_google.cloud.texttospeech.v1.json b/packages/google-cloud-texttospeech/samples/generated_samples/snippet_metadata_google.cloud.texttospeech.v1.json index 3dced488d328..f87785fcdd45 100644 --- a/packages/google-cloud-texttospeech/samples/generated_samples/snippet_metadata_google.cloud.texttospeech.v1.json +++ b/packages/google-cloud-texttospeech/samples/generated_samples/snippet_metadata_google.cloud.texttospeech.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-texttospeech", - "version": "2.17.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-texttospeech/samples/generated_samples/snippet_metadata_google.cloud.texttospeech.v1beta1.json b/packages/google-cloud-texttospeech/samples/generated_samples/snippet_metadata_google.cloud.texttospeech.v1beta1.json index 42e6c3c22568..2877853b66c5 100644 --- a/packages/google-cloud-texttospeech/samples/generated_samples/snippet_metadata_google.cloud.texttospeech.v1beta1.json +++ b/packages/google-cloud-texttospeech/samples/generated_samples/snippet_metadata_google.cloud.texttospeech.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-texttospeech", - "version": "2.17.0" + "version": "0.1.0" }, "snippets": [ { From 3014fab25f9161bd25cbe009e106347fb35d06a5 Mon Sep 17 00:00:00 2001 From: yoshi-code-bot <70984784+yoshi-code-bot@users.noreply.github.com> Date: Tue, 20 Aug 2024 06:46:09 -0700 Subject: [PATCH 024/108] chore: Update the root changelog (#13022) Update the root changelog --- CHANGELOG.md | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2b02f7d31c89..2696012f56a1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,7 +3,7 @@ Please refer to each API's `CHANGELOG.md` file under the `packages/` directory Changelogs ----- - [google-ads-admanager==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-ads-admanager/CHANGELOG.md) -- [google-ai-generativelanguage==0.6.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-ai-generativelanguage/CHANGELOG.md) +- [google-ai-generativelanguage==0.6.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-ai-generativelanguage/CHANGELOG.md) - [google-analytics-admin==0.23.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-analytics-admin/CHANGELOG.md) - [google-analytics-data==0.18.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-analytics-data/CHANGELOG.md) - [google-apps-card==0.1.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-card/CHANGELOG.md) @@ -30,7 +30,7 @@ Changelogs - [google-cloud-automl==2.13.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-automl/CHANGELOG.md) - [google-cloud-backupdr==0.1.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-backupdr/CHANGELOG.md) - [google-cloud-bare-metal-solution==1.7.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bare-metal-solution/CHANGELOG.md) -- [google-cloud-batch==0.17.24](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-batch/CHANGELOG.md) +- [google-cloud-batch==0.17.25](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-batch/CHANGELOG.md) - [google-cloud-beyondcorp-appconnections==0.4.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-appconnections/CHANGELOG.md) - [google-cloud-beyondcorp-appconnectors==0.4.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-appconnectors/CHANGELOG.md) - [google-cloud-beyondcorp-appgateways==0.4.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-appgateways/CHANGELOG.md) @@ -78,7 +78,7 @@ Changelogs - [google-cloud-dialogflow-cx==1.35.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dialogflow-cx/CHANGELOG.md) - [google-cloud-dialogflow==2.31.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dialogflow/CHANGELOG.md) - [google-cloud-discoveryengine==0.12.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-discoveryengine/CHANGELOG.md) -- [google-cloud-dlp==3.21.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dlp/CHANGELOG.md) +- [google-cloud-dlp==3.22.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dlp/CHANGELOG.md) - [google-cloud-dms==1.9.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dms/CHANGELOG.md) - [google-cloud-documentai==2.31.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-documentai/CHANGELOG.md) - [google-cloud-domains==1.7.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-domains/CHANGELOG.md) @@ -105,7 +105,7 @@ Changelogs - [google-cloud-language==2.14.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-language/CHANGELOG.md) - [google-cloud-life-sciences==0.9.12](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-life-sciences/CHANGELOG.md) - [google-cloud-managed-identities==1.9.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-managed-identities/CHANGELOG.md) -- [google-cloud-managedkafka==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-managedkafka/CHANGELOG.md) +- [google-cloud-managedkafka==0.1.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-managedkafka/CHANGELOG.md) - [google-cloud-media-translation==0.11.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-media-translation/CHANGELOG.md) - [google-cloud-memcache==1.9.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-memcache/CHANGELOG.md) - [google-cloud-migrationcenter==0.1.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-migrationcenter/CHANGELOG.md) @@ -155,13 +155,13 @@ Changelogs - [google-cloud-source-context==1.5.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-source-context/CHANGELOG.md) - [google-cloud-speech==2.27.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-speech/CHANGELOG.md) - [google-cloud-storage-control==1.0.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-storage-control/CHANGELOG.md) -- [google-cloud-storage-transfer==1.11.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-storage-transfer/CHANGELOG.md) +- [google-cloud-storage-transfer==1.12.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-storage-transfer/CHANGELOG.md) - [google-cloud-storageinsights==0.1.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-storageinsights/CHANGELOG.md) - [google-cloud-support==0.1.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-support/CHANGELOG.md) - [google-cloud-talent==2.13.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-talent/CHANGELOG.md) - [google-cloud-tasks==2.16.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-tasks/CHANGELOG.md) - [google-cloud-telcoautomation==0.2.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-telcoautomation/CHANGELOG.md) -- [google-cloud-texttospeech==2.16.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-texttospeech/CHANGELOG.md) +- [google-cloud-texttospeech==2.17.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-texttospeech/CHANGELOG.md) - [google-cloud-tpu==1.18.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-tpu/CHANGELOG.md) - [google-cloud-trace==1.13.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-trace/CHANGELOG.md) - [google-cloud-translate==3.16.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-translate/CHANGELOG.md) From e3a6b17c8b05ef23da801e81598ce2d75e18b6bb Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 20 Aug 2024 09:46:17 -0400 Subject: [PATCH 025/108] docs: [google-cloud-batch] clarify tasks success criteria for background runnable (#13023) - [ ] Regenerate this pull request now. PiperOrigin-RevId: 665192663 Source-Link: https://github.com/googleapis/googleapis/commit/07ede94493ac8cb9a38f2d248bedc557bca95dd8 Source-Link: https://github.com/googleapis/googleapis-gen/commit/d88a093b400e1ba1b7c2d666cef2de94152bb212 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWJhdGNoLy5Pd2xCb3QueWFtbCIsImgiOiJkODhhMDkzYjQwMGUxYmExYjdjMmQ2NjZjZWYyZGU5NDE1MmJiMjEyIn0= BEGIN_NESTED_COMMIT docs: [google-cloud-batch] clarify tasks success criteria for background runnable PiperOrigin-RevId: 665192495 Source-Link: https://github.com/googleapis/googleapis/commit/d7f3478b9cf0b14e3658dfcb48e7f1e61a17d625 Source-Link: https://github.com/googleapis/googleapis-gen/commit/66d372d7c788bc91b22166eda632e210acf9866d Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWJhdGNoLy5Pd2xCb3QueWFtbCIsImgiOiI2NmQzNzJkN2M3ODhiYzkxYjIyMTY2ZWRhNjMyZTIxMGFjZjk4NjZkIn0= END_NESTED_COMMIT --------- Co-authored-by: Owl Bot --- .../google/cloud/batch/gapic_version.py | 2 +- .../google/cloud/batch_v1/gapic_version.py | 2 +- .../google/cloud/batch_v1/types/task.py | 46 +++++++++++-------- .../cloud/batch_v1alpha/gapic_version.py | 2 +- .../google/cloud/batch_v1alpha/types/task.py | 46 +++++++++++-------- ...nippet_metadata_google.cloud.batch.v1.json | 2 +- ...t_metadata_google.cloud.batch.v1alpha.json | 2 +- 7 files changed, 59 insertions(+), 43 deletions(-) diff --git a/packages/google-cloud-batch/google/cloud/batch/gapic_version.py b/packages/google-cloud-batch/google/cloud/batch/gapic_version.py index 43863a7a8ca3..558c8aab67c5 100644 --- a/packages/google-cloud-batch/google/cloud/batch/gapic_version.py +++ b/packages/google-cloud-batch/google/cloud/batch/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.17.25" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py b/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py index 43863a7a8ca3..558c8aab67c5 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.17.25" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-batch/google/cloud/batch_v1/types/task.py b/packages/google-cloud-batch/google/cloud/batch_v1/types/task.py index 17aa644fd47a..d42dfd8b608b 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1/types/task.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1/types/task.py @@ -265,16 +265,26 @@ class Runnable(proto.Message): to understand the logs. If not provided the index of the runnable will be used for outputs. ignore_exit_status (bool): - Normally, a non-zero exit status causes the - Task to fail. This flag allows execution of - other Runnables to continue instead. + Normally, a runnable that returns a non-zero exit status + fails and causes the task to fail. However, you can set this + field to ``true`` to allow the task to continue executing + its other runnables even if this runnable fails. background (bool): - This flag allows a Runnable to continue - running in the background while the Task - executes subsequent Runnables. This is useful to - provide services to other Runnables (or to - provide debugging support tools like SSH - servers). + Normally, a runnable that doesn't exit causes its task to + fail. However, you can set this field to ``true`` to + configure a background runnable. Background runnables are + allowed continue running in the background while the task + executes subsequent runnables. For example, background + runnables are useful for providing services to other + runnables or providing debugging-support tools like SSH + servers. + + Specifically, background runnables are killed automatically + (if they have not already exited) a short time after all + foreground runnables have completed. Even though this is + likely to result in a non-zero exit status for the + background runnable, these automatic kills are not treated + as task failures. always_run (bool): By default, after a Runnable fails, no further Runnable are executed. This flag indicates that this Runnable must be run @@ -567,16 +577,14 @@ class TaskSpec(proto.Message): scripts, executable containers, and/or barriers) for each task in this task group to run. Each task runs this list of runnables in order. For a task to succeed, all of its script - and container runnables each must either exit with a zero - status or enable the ``ignore_exit_status`` subfield and - exit with any status. - - Background runnables are killed automatically (if they have - not already exited) a short time after all foreground - runnables have completed. Even though this is likely to - result in a non-zero exit status for the background - runnable, these automatic kills are not treated as Task - failures. + and container runnables each must meet at least one of the + following conditions: + + - The runnable exited with a zero status. + - The runnable didn't finish, but you enabled its + ``background`` subfield. + - The runnable exited with a non-zero status, but you + enabled its ``ignore_exit_status`` subfield. compute_resource (google.cloud.batch_v1.types.ComputeResource): ComputeResource requirements. max_run_duration (google.protobuf.duration_pb2.Duration): diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py index 43863a7a8ca3..558c8aab67c5 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.17.25" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/task.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/task.py index 50619e3864b1..b6ddf5702b52 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/task.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/task.py @@ -305,16 +305,26 @@ class Runnable(proto.Message): to understand the logs. If not provided the index of the runnable will be used for outputs. ignore_exit_status (bool): - Normally, a non-zero exit status causes the - Task to fail. This flag allows execution of - other Runnables to continue instead. + Normally, a runnable that returns a non-zero exit status + fails and causes the task to fail. However, you can set this + field to ``true`` to allow the task to continue executing + its other runnables even if this runnable fails. background (bool): - This flag allows a Runnable to continue - running in the background while the Task - executes subsequent Runnables. This is useful to - provide services to other Runnables (or to - provide debugging support tools like SSH - servers). + Normally, a runnable that doesn't exit causes its task to + fail. However, you can set this field to ``true`` to + configure a background runnable. Background runnables are + allowed continue running in the background while the task + executes subsequent runnables. For example, background + runnables are useful for providing services to other + runnables or providing debugging-support tools like SSH + servers. + + Specifically, background runnables are killed automatically + (if they have not already exited) a short time after all + foreground runnables have completed. Even though this is + likely to result in a non-zero exit status for the + background runnable, these automatic kills are not treated + as task failures. always_run (bool): By default, after a Runnable fails, no further Runnable are executed. This flag indicates that this Runnable must be run @@ -607,16 +617,14 @@ class TaskSpec(proto.Message): scripts, executable containers, and/or barriers) for each task in this task group to run. Each task runs this list of runnables in order. For a task to succeed, all of its script - and container runnables each must either exit with a zero - status or enable the ``ignore_exit_status`` subfield and - exit with any status. - - Background runnables are killed automatically (if they have - not already exited) a short time after all foreground - runnables have completed. Even though this is likely to - result in a non-zero exit status for the background - runnable, these automatic kills are not treated as Task - failures. + and container runnables each must meet at least one of the + following conditions: + + - The runnable exited with a zero status. + - The runnable didn't finish, but you enabled its + ``background`` subfield. + - The runnable exited with a non-zero status, but you + enabled its ``ignore_exit_status`` subfield. compute_resource (google.cloud.batch_v1alpha.types.ComputeResource): ComputeResource requirements. max_run_duration (google.protobuf.duration_pb2.Duration): diff --git a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json index 772c4bb246f5..e2df1067e4dd 100644 --- a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json +++ b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-batch", - "version": "0.17.25" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json index 56a4ce0bde8d..7f67670b100c 100644 --- a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json +++ b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-batch", - "version": "0.17.25" + "version": "0.1.0" }, "snippets": [ { From 78bd284bbaa89ef26ce60c20beb6445212c8b27b Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 20 Aug 2024 09:46:47 -0400 Subject: [PATCH 026/108] docs: [google-cloud-bigquery-datatransfer] deprecate `authorization_code` (#13021) - [ ] Regenerate this pull request now. PiperOrigin-RevId: 664993498 Source-Link: https://github.com/googleapis/googleapis/commit/b678386866ca6ee40c84c9043beb70992309343e Source-Link: https://github.com/googleapis/googleapis-gen/commit/148188cb9e24793ca3f52eac441a74b8f543bb7a Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWJpZ3F1ZXJ5LWRhdGF0cmFuc2Zlci8uT3dsQm90LnlhbWwiLCJoIjoiMTQ4MTg4Y2I5ZTI0NzkzY2EzZjUyZWFjNDQxYTc0YjhmNTQzYmI3YSJ9 --------- Co-authored-by: Owl Bot --- .../types/datatransfer.py | 30 ++++++++++++++----- 1 file changed, 22 insertions(+), 8 deletions(-) diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/types/datatransfer.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/types/datatransfer.py index 1138fb6ff3f5..30e607bd0c09 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/types/datatransfer.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/types/datatransfer.py @@ -492,6 +492,11 @@ class CreateTransferConfigRequest(proto.Message): Required. Data transfer configuration to create. authorization_code (str): + Deprecated: Authorization code was required when + ``transferConfig.dataSourceId`` is 'youtube_channel' but it + is no longer used in any data sources. Use ``version_info`` + instead. + Optional OAuth2 authorization code to use with this transfer configuration. This is required only if ``transferConfig.dataSourceId`` is 'youtube_channel' and new @@ -505,7 +510,7 @@ class CreateTransferConfigRequest(proto.Message): https://bigquery.cloud.google.com/datatransfer/oauthz/auth?redirect_uri=urn:ietf:wg:oauth:2.0:oob&response_type=authorization_code&client_id=client_id&scope=data_source_scopes - - The client_id is the OAuth client_id of the a data source + - The client_id is the OAuth client_id of the data source as returned by ListDataSources method. - data_source_scopes are the scopes returned by ListDataSources method. @@ -514,8 +519,10 @@ class CreateTransferConfigRequest(proto.Message): ``service_account_name`` is used to create the transfer config. version_info (str): - Optional version info. This is required only if - ``transferConfig.dataSourceId`` is not 'youtube_channel' and + Optional version info. This parameter replaces + ``authorization_code`` which is no longer used in any data + sources. This is required only if + ``transferConfig.dataSourceId`` is 'youtube_channel' *or* new credentials are needed, as indicated by ``CheckValidCreds``. In order to obtain version info, make a request to the following URL: @@ -526,7 +533,7 @@ class CreateTransferConfigRequest(proto.Message): https://bigquery.cloud.google.com/datatransfer/oauthz/auth?redirect_uri=urn:ietf:wg:oauth:2.0:oob&response_type=version_info&client_id=client_id&scope=data_source_scopes - - The client_id is the OAuth client_id of the a data source + - The client_id is the OAuth client_id of the data source as returned by ListDataSources method. - data_source_scopes are the scopes returned by ListDataSources method. @@ -579,6 +586,11 @@ class UpdateTransferConfigRequest(proto.Message): Required. Data transfer configuration to create. authorization_code (str): + Deprecated: Authorization code was required when + ``transferConfig.dataSourceId`` is 'youtube_channel' but it + is no longer used in any data sources. Use ``version_info`` + instead. + Optional OAuth2 authorization code to use with this transfer configuration. This is required only if ``transferConfig.dataSourceId`` is 'youtube_channel' and new @@ -592,7 +604,7 @@ class UpdateTransferConfigRequest(proto.Message): https://bigquery.cloud.google.com/datatransfer/oauthz/auth?redirect_uri=urn:ietf:wg:oauth:2.0:oob&response_type=authorization_code&client_id=client_id&scope=data_source_scopes - - The client_id is the OAuth client_id of the a data source + - The client_id is the OAuth client_id of the data source as returned by ListDataSources method. - data_source_scopes are the scopes returned by ListDataSources method. @@ -604,8 +616,10 @@ class UpdateTransferConfigRequest(proto.Message): Required. Required list of fields to be updated in this request. version_info (str): - Optional version info. This is required only if - ``transferConfig.dataSourceId`` is not 'youtube_channel' and + Optional version info. This parameter replaces + ``authorization_code`` which is no longer used in any data + sources. This is required only if + ``transferConfig.dataSourceId`` is 'youtube_channel' *or* new credentials are needed, as indicated by ``CheckValidCreds``. In order to obtain version info, make a request to the following URL: @@ -616,7 +630,7 @@ class UpdateTransferConfigRequest(proto.Message): https://bigquery.cloud.google.com/datatransfer/oauthz/auth?redirect_uri=urn:ietf:wg:oauth:2.0:oob&response_type=version_info&client_id=client_id&scope=data_source_scopes - - The client_id is the OAuth client_id of the a data source + - The client_id is the OAuth client_id of the data source as returned by ListDataSources method. - data_source_scopes are the scopes returned by ListDataSources method. From f69f15371d00578bf8727599fe0604bfa2d42392 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 20 Aug 2024 13:52:16 +0000 Subject: [PATCH 027/108] chore: release main (#13024) :robot: I have created a release *beep* *boop* ---
google-cloud-batch: 0.17.26 ## [0.17.26](https://github.com/googleapis/google-cloud-python/compare/google-cloud-batch-v0.17.25...google-cloud-batch-v0.17.26) (2024-08-20) ### Documentation * [google-cloud-batch] clarify tasks success criteria for background ([e3a6b17](https://github.com/googleapis/google-cloud-python/commit/e3a6b17c8b05ef23da801e81598ce2d75e18b6bb)) * [google-cloud-batch] clarify tasks success criteria for background runnable ([#13023](https://github.com/googleapis/google-cloud-python/issues/13023)) ([e3a6b17](https://github.com/googleapis/google-cloud-python/commit/e3a6b17c8b05ef23da801e81598ce2d75e18b6bb))
google-cloud-bigquery-datatransfer: 3.15.6 ## [3.15.6](https://github.com/googleapis/google-cloud-python/compare/google-cloud-bigquery-datatransfer-v3.15.5...google-cloud-bigquery-datatransfer-v3.15.6) (2024-08-20) ### Documentation * [google-cloud-bigquery-datatransfer] deprecate `authorization_code` ([#13021](https://github.com/googleapis/google-cloud-python/issues/13021)) ([78bd284](https://github.com/googleapis/google-cloud-python/commit/78bd284bbaa89ef26ce60c20beb6445212c8b27b))
google-cloud-texttospeech: 2.17.1 ## [2.17.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-texttospeech-v2.17.0...google-cloud-texttospeech-v2.17.1) (2024-08-20) ### Documentation * [google-cloud-texttospeech] update Long Audio capabilities to include SSML ([#13020](https://github.com/googleapis/google-cloud-python/issues/13020)) ([200a64b](https://github.com/googleapis/google-cloud-python/commit/200a64b0c9ce565103295ab0a8126353f386fbce))
--- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .release-please-manifest.json | 6 +++--- packages/google-cloud-batch/CHANGELOG.md | 8 ++++++++ .../google/cloud/batch/gapic_version.py | 2 +- .../google/cloud/batch_v1/gapic_version.py | 2 +- .../google/cloud/batch_v1alpha/gapic_version.py | 2 +- .../snippet_metadata_google.cloud.batch.v1.json | 2 +- .../snippet_metadata_google.cloud.batch.v1alpha.json | 2 +- packages/google-cloud-bigquery-datatransfer/CHANGELOG.md | 7 +++++++ .../google/cloud/bigquery_datatransfer/gapic_version.py | 2 +- .../cloud/bigquery_datatransfer_v1/gapic_version.py | 2 +- ...et_metadata_google.cloud.bigquery.datatransfer.v1.json | 2 +- packages/google-cloud-texttospeech/CHANGELOG.md | 7 +++++++ .../google/cloud/texttospeech/gapic_version.py | 2 +- .../google/cloud/texttospeech_v1/gapic_version.py | 2 +- .../google/cloud/texttospeech_v1beta1/gapic_version.py | 2 +- .../snippet_metadata_google.cloud.texttospeech.v1.json | 2 +- ...nippet_metadata_google.cloud.texttospeech.v1beta1.json | 2 +- 17 files changed, 38 insertions(+), 16 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index c0f47cf58de9..18795917ffb4 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -27,7 +27,7 @@ "packages/google-cloud-automl": "2.13.5", "packages/google-cloud-backupdr": "0.1.3", "packages/google-cloud-bare-metal-solution": "1.7.5", - "packages/google-cloud-batch": "0.17.25", + "packages/google-cloud-batch": "0.17.26", "packages/google-cloud-beyondcorp-appconnections": "0.4.11", "packages/google-cloud-beyondcorp-appconnectors": "0.4.11", "packages/google-cloud-beyondcorp-appgateways": "0.4.11", @@ -38,7 +38,7 @@ "packages/google-cloud-bigquery-connection": "1.15.5", "packages/google-cloud-bigquery-data-exchange": "0.5.13", "packages/google-cloud-bigquery-datapolicies": "0.6.8", - "packages/google-cloud-bigquery-datatransfer": "3.15.5", + "packages/google-cloud-bigquery-datatransfer": "3.15.6", "packages/google-cloud-bigquery-logging": "1.4.5", "packages/google-cloud-bigquery-migration": "0.11.9", "packages/google-cloud-bigquery-reservation": "1.13.5", @@ -158,7 +158,7 @@ "packages/google-cloud-talent": "2.13.5", "packages/google-cloud-tasks": "2.16.5", "packages/google-cloud-telcoautomation": "0.2.5", - "packages/google-cloud-texttospeech": "2.17.0", + "packages/google-cloud-texttospeech": "2.17.1", "packages/google-cloud-tpu": "1.18.5", "packages/google-cloud-trace": "1.13.5", "packages/google-cloud-translate": "3.16.0", diff --git a/packages/google-cloud-batch/CHANGELOG.md b/packages/google-cloud-batch/CHANGELOG.md index fe8a3a929a87..ef34a533cfa8 100644 --- a/packages/google-cloud-batch/CHANGELOG.md +++ b/packages/google-cloud-batch/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [0.17.26](https://github.com/googleapis/google-cloud-python/compare/google-cloud-batch-v0.17.25...google-cloud-batch-v0.17.26) (2024-08-20) + + +### Documentation + +* [google-cloud-batch] clarify tasks success criteria for background ([e3a6b17](https://github.com/googleapis/google-cloud-python/commit/e3a6b17c8b05ef23da801e81598ce2d75e18b6bb)) +* [google-cloud-batch] clarify tasks success criteria for background runnable ([#13023](https://github.com/googleapis/google-cloud-python/issues/13023)) ([e3a6b17](https://github.com/googleapis/google-cloud-python/commit/e3a6b17c8b05ef23da801e81598ce2d75e18b6bb)) + ## [0.17.25](https://github.com/googleapis/google-cloud-python/compare/google-cloud-batch-v0.17.24...google-cloud-batch-v0.17.25) (2024-08-19) diff --git a/packages/google-cloud-batch/google/cloud/batch/gapic_version.py b/packages/google-cloud-batch/google/cloud/batch/gapic_version.py index 558c8aab67c5..e4b992ee0cd9 100644 --- a/packages/google-cloud-batch/google/cloud/batch/gapic_version.py +++ b/packages/google-cloud-batch/google/cloud/batch/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.17.26" # {x-release-please-version} diff --git a/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py b/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py index 558c8aab67c5..e4b992ee0cd9 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.17.26" # {x-release-please-version} diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py index 558c8aab67c5..e4b992ee0cd9 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.17.26" # {x-release-please-version} diff --git a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json index e2df1067e4dd..32f12c161ccb 100644 --- a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json +++ b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-batch", - "version": "0.1.0" + "version": "0.17.26" }, "snippets": [ { diff --git a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json index 7f67670b100c..889a0b1efe83 100644 --- a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json +++ b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-batch", - "version": "0.1.0" + "version": "0.17.26" }, "snippets": [ { diff --git a/packages/google-cloud-bigquery-datatransfer/CHANGELOG.md b/packages/google-cloud-bigquery-datatransfer/CHANGELOG.md index 0139e057a0fa..649945d0a61f 100644 --- a/packages/google-cloud-bigquery-datatransfer/CHANGELOG.md +++ b/packages/google-cloud-bigquery-datatransfer/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-bigquery-datatransfer/#history +## [3.15.6](https://github.com/googleapis/google-cloud-python/compare/google-cloud-bigquery-datatransfer-v3.15.5...google-cloud-bigquery-datatransfer-v3.15.6) (2024-08-20) + + +### Documentation + +* [google-cloud-bigquery-datatransfer] deprecate `authorization_code` ([#13021](https://github.com/googleapis/google-cloud-python/issues/13021)) ([78bd284](https://github.com/googleapis/google-cloud-python/commit/78bd284bbaa89ef26ce60c20beb6445212c8b27b)) + ## [3.15.5](https://github.com/googleapis/google-cloud-python/compare/google-cloud-bigquery-datatransfer-v3.15.4...google-cloud-bigquery-datatransfer-v3.15.5) (2024-07-30) diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/gapic_version.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/gapic_version.py index 558c8aab67c5..769a9d92f8cf 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/gapic_version.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "3.15.6" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic_version.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic_version.py index 558c8aab67c5..769a9d92f8cf 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic_version.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "3.15.6" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-datatransfer/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datatransfer.v1.json b/packages/google-cloud-bigquery-datatransfer/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datatransfer.v1.json index adc8c281da8f..1fbf5757f11d 100644 --- a/packages/google-cloud-bigquery-datatransfer/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datatransfer.v1.json +++ b/packages/google-cloud-bigquery-datatransfer/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datatransfer.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-datatransfer", - "version": "0.1.0" + "version": "3.15.6" }, "snippets": [ { diff --git a/packages/google-cloud-texttospeech/CHANGELOG.md b/packages/google-cloud-texttospeech/CHANGELOG.md index c522f2fe2cec..14211a0b99aa 100644 --- a/packages/google-cloud-texttospeech/CHANGELOG.md +++ b/packages/google-cloud-texttospeech/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-texttospeech/#history +## [2.17.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-texttospeech-v2.17.0...google-cloud-texttospeech-v2.17.1) (2024-08-20) + + +### Documentation + +* [google-cloud-texttospeech] update Long Audio capabilities to include SSML ([#13020](https://github.com/googleapis/google-cloud-python/issues/13020)) ([200a64b](https://github.com/googleapis/google-cloud-python/commit/200a64b0c9ce565103295ab0a8126353f386fbce)) + ## [2.17.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-texttospeech-v2.16.5...google-cloud-texttospeech-v2.17.0) (2024-08-19) diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech/gapic_version.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech/gapic_version.py index 558c8aab67c5..6fecc94eb049 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech/gapic_version.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "2.17.1" # {x-release-please-version} diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/gapic_version.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/gapic_version.py index 558c8aab67c5..6fecc94eb049 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/gapic_version.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "2.17.1" # {x-release-please-version} diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/gapic_version.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/gapic_version.py index 558c8aab67c5..6fecc94eb049 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/gapic_version.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "2.17.1" # {x-release-please-version} diff --git a/packages/google-cloud-texttospeech/samples/generated_samples/snippet_metadata_google.cloud.texttospeech.v1.json b/packages/google-cloud-texttospeech/samples/generated_samples/snippet_metadata_google.cloud.texttospeech.v1.json index f87785fcdd45..ac836bbdf339 100644 --- a/packages/google-cloud-texttospeech/samples/generated_samples/snippet_metadata_google.cloud.texttospeech.v1.json +++ b/packages/google-cloud-texttospeech/samples/generated_samples/snippet_metadata_google.cloud.texttospeech.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-texttospeech", - "version": "0.1.0" + "version": "2.17.1" }, "snippets": [ { diff --git a/packages/google-cloud-texttospeech/samples/generated_samples/snippet_metadata_google.cloud.texttospeech.v1beta1.json b/packages/google-cloud-texttospeech/samples/generated_samples/snippet_metadata_google.cloud.texttospeech.v1beta1.json index 2877853b66c5..c0a9e929493b 100644 --- a/packages/google-cloud-texttospeech/samples/generated_samples/snippet_metadata_google.cloud.texttospeech.v1beta1.json +++ b/packages/google-cloud-texttospeech/samples/generated_samples/snippet_metadata_google.cloud.texttospeech.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-texttospeech", - "version": "0.1.0" + "version": "2.17.1" }, "snippets": [ { From 70e2dd5f024dd5c94a5e02b442bbab7e6e5f38fe Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 21 Aug 2024 09:35:29 -0400 Subject: [PATCH 028/108] docs: [google-shopping-css] update `Certification` field descriptions (#13027) - [ ] Regenerate this pull request now. PiperOrigin-RevId: 665432329 Source-Link: https://github.com/googleapis/googleapis/commit/59883000df20846cf207008980d89ac35be91fb2 Source-Link: https://github.com/googleapis/googleapis-gen/commit/bc2954fd1ed5de040517ee07fb84d925cafb1b52 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLXNob3BwaW5nLWNzcy8uT3dsQm90LnlhbWwiLCJoIjoiYmMyOTU0ZmQxZWQ1ZGUwNDA1MTdlZTA3ZmI4NGQ5MjVjYWZiMWI1MiJ9 --------- Co-authored-by: Owl Bot --- .../css_v1/types/css_product_common.py | 20 +++++++++++++++---- 1 file changed, 16 insertions(+), 4 deletions(-) diff --git a/packages/google-shopping-css/google/shopping/css_v1/types/css_product_common.py b/packages/google-shopping-css/google/shopping/css_v1/types/css_product_common.py index cec3dd7281db..ca3f7734688f 100644 --- a/packages/google-shopping-css/google/shopping/css_v1/types/css_product_common.py +++ b/packages/google-shopping-css/google/shopping/css_v1/types/css_product_common.py @@ -494,15 +494,27 @@ class Attributes(proto.Message): class Certification(proto.Message): - r"""The certification for the product. + r"""The certification for the product. Use the this attribute to + describe certifications, such as energy efficiency ratings, + associated with a product. Attributes: name (str): - Name of the certification. + The name of the certification. At this time, + the most common value is "EPREL", which + represents energy efficiency certifications in + the EU European Registry for Energy Labeling + (EPREL) database. authority (str): - Name of the certification body. + The authority or certification body responsible for issuing + the certification. At this time, the most common value is + "EC" or “European_Commission” for energy labels in the EU. code (str): - A unique code to identify the certification. + The code of the certification. For example, + for the EPREL certificate with the link + https://eprel.ec.europa.eu/screen/product/dishwashers2019/123456 + the code is 123456. The code is required for + European Energy Labels. """ name: str = proto.Field( From 0b90478bb70be96b304397fc433d2dbaf0160d30 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 21 Aug 2024 09:37:19 -0400 Subject: [PATCH 029/108] feat: [google-cloud-recaptcha-enterprise] add AddIpOverride RPC (#13026) - [ ] Regenerate this pull request now. BEGIN_COMMIT_OVERRIDE feat: add AddIpOverride RPC feat: add `express_settings` to `Key` docs: clarify `Event.express` field docs: fix billing, quota, and usecase links END_COMMIT_OVERRIDE PiperOrigin-RevId: 665409132 Source-Link: https://github.com/googleapis/googleapis/commit/cc4e33a26333b45f5b9ef15d9286b0aaadb320ea Source-Link: https://github.com/googleapis/googleapis-gen/commit/2be7002e454160c4c43bcaa708fff3bd03ed38cf Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXJlY2FwdGNoYS1lbnRlcnByaXNlLy5Pd2xCb3QueWFtbCIsImgiOiIyYmU3MDAyZTQ1NDE2MGM0YzQzYmNhYTcwOGZmZjNiZDAzZWQzOGNmIn0= --------- Co-authored-by: Owl Bot --- .../cloud/recaptchaenterprise/__init__.py | 8 + .../cloud/recaptchaenterprise_v1/__init__.py | 8 + .../gapic_metadata.json | 10 + .../async_client.py | 123 ++++++ .../recaptcha_enterprise_service/client.py | 120 ++++++ .../transports/base.py | 17 + .../transports/grpc.py | 33 ++ .../transports/grpc_asyncio.py | 38 ++ .../recaptchaenterprise_v1/types/__init__.py | 8 + .../types/recaptchaenterprise.py | 109 +++++- ...nterprise_service_add_ip_override_async.py | 57 +++ ...enterprise_service_add_ip_override_sync.py | 57 +++ ...a_google.cloud.recaptchaenterprise.v1.json | 169 ++++++++ .../fixup_recaptchaenterprise_v1_keywords.py | 1 + .../test_recaptcha_enterprise_service.py | 369 ++++++++++++++++++ 15 files changed, 1118 insertions(+), 9 deletions(-) create mode 100644 packages/google-cloud-recaptcha-enterprise/samples/generated_samples/recaptchaenterprise_v1_generated_recaptcha_enterprise_service_add_ip_override_async.py create mode 100644 packages/google-cloud-recaptcha-enterprise/samples/generated_samples/recaptchaenterprise_v1_generated_recaptcha_enterprise_service_add_ip_override_sync.py diff --git a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise/__init__.py b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise/__init__.py index f01824e24d20..a4192178f092 100644 --- a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise/__init__.py +++ b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise/__init__.py @@ -27,6 +27,8 @@ from google.cloud.recaptchaenterprise_v1.types.recaptchaenterprise import ( AccountDefenderAssessment, AccountVerificationInfo, + AddIpOverrideRequest, + AddIpOverrideResponse, AndroidKeySettings, AnnotateAssessmentRequest, AnnotateAssessmentResponse, @@ -40,6 +42,7 @@ DeleteKeyRequest, EndpointVerificationInfo, Event, + ExpressKeySettings, FirewallAction, FirewallPolicy, FirewallPolicyAssessment, @@ -49,6 +52,7 @@ GetKeyRequest, GetMetricsRequest, IOSKeySettings, + IpOverrideData, Key, ListFirewallPoliciesRequest, ListFirewallPoliciesResponse, @@ -91,6 +95,8 @@ "RecaptchaEnterpriseServiceAsyncClient", "AccountDefenderAssessment", "AccountVerificationInfo", + "AddIpOverrideRequest", + "AddIpOverrideResponse", "AndroidKeySettings", "AnnotateAssessmentRequest", "AnnotateAssessmentResponse", @@ -104,6 +110,7 @@ "DeleteKeyRequest", "EndpointVerificationInfo", "Event", + "ExpressKeySettings", "FirewallAction", "FirewallPolicy", "FirewallPolicyAssessment", @@ -113,6 +120,7 @@ "GetKeyRequest", "GetMetricsRequest", "IOSKeySettings", + "IpOverrideData", "Key", "ListFirewallPoliciesRequest", "ListFirewallPoliciesResponse", diff --git a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/__init__.py b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/__init__.py index 32e4af3d140c..f71b86ba1dc9 100644 --- a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/__init__.py +++ b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/__init__.py @@ -25,6 +25,8 @@ from .types.recaptchaenterprise import ( AccountDefenderAssessment, AccountVerificationInfo, + AddIpOverrideRequest, + AddIpOverrideResponse, AndroidKeySettings, AnnotateAssessmentRequest, AnnotateAssessmentResponse, @@ -38,6 +40,7 @@ DeleteKeyRequest, EndpointVerificationInfo, Event, + ExpressKeySettings, FirewallAction, FirewallPolicy, FirewallPolicyAssessment, @@ -47,6 +50,7 @@ GetKeyRequest, GetMetricsRequest, IOSKeySettings, + IpOverrideData, Key, ListFirewallPoliciesRequest, ListFirewallPoliciesResponse, @@ -88,6 +92,8 @@ "RecaptchaEnterpriseServiceAsyncClient", "AccountDefenderAssessment", "AccountVerificationInfo", + "AddIpOverrideRequest", + "AddIpOverrideResponse", "AndroidKeySettings", "AnnotateAssessmentRequest", "AnnotateAssessmentResponse", @@ -101,6 +107,7 @@ "DeleteKeyRequest", "EndpointVerificationInfo", "Event", + "ExpressKeySettings", "FirewallAction", "FirewallPolicy", "FirewallPolicyAssessment", @@ -110,6 +117,7 @@ "GetKeyRequest", "GetMetricsRequest", "IOSKeySettings", + "IpOverrideData", "Key", "ListFirewallPoliciesRequest", "ListFirewallPoliciesResponse", diff --git a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/gapic_metadata.json b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/gapic_metadata.json index dda75c44f68f..8722f66f3086 100644 --- a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/gapic_metadata.json +++ b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/gapic_metadata.json @@ -10,6 +10,11 @@ "grpc": { "libraryClient": "RecaptchaEnterpriseServiceClient", "rpcs": { + "AddIpOverride": { + "methods": [ + "add_ip_override" + ] + }, "AnnotateAssessment": { "methods": [ "annotate_assessment" @@ -110,6 +115,11 @@ "grpc-async": { "libraryClient": "RecaptchaEnterpriseServiceAsyncClient", "rpcs": { + "AddIpOverride": { + "methods": [ + "add_ip_override" + ] + }, "AnnotateAssessment": { "methods": [ "annotate_assessment" diff --git a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/services/recaptcha_enterprise_service/async_client.py b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/services/recaptcha_enterprise_service/async_client.py index ca4ff9f14e83..8241b218d463 100644 --- a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/services/recaptcha_enterprise_service/async_client.py +++ b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/services/recaptcha_enterprise_service/async_client.py @@ -1302,6 +1302,129 @@ async def sample_migrate_key(): # Done; return the response. return response + async def add_ip_override( + self, + request: Optional[Union[recaptchaenterprise.AddIpOverrideRequest, dict]] = None, + *, + name: Optional[str] = None, + ip_override_data: Optional[recaptchaenterprise.IpOverrideData] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> recaptchaenterprise.AddIpOverrideResponse: + r"""Adds an IP override to a key. The following restrictions hold: + + - The maximum number of IP overrides per key is 100. + - For any conflict (such as IP already exists or IP part of an + existing IP range), an error will be returned. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import recaptchaenterprise_v1 + + async def sample_add_ip_override(): + # Create a client + client = recaptchaenterprise_v1.RecaptchaEnterpriseServiceAsyncClient() + + # Initialize request argument(s) + ip_override_data = recaptchaenterprise_v1.IpOverrideData() + ip_override_data.ip = "ip_value" + ip_override_data.override_type = "ALLOW" + + request = recaptchaenterprise_v1.AddIpOverrideRequest( + name="name_value", + ip_override_data=ip_override_data, + ) + + # Make the request + response = await client.add_ip_override(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.recaptchaenterprise_v1.types.AddIpOverrideRequest, dict]]): + The request object. The AddIpOverride request message. + name (:class:`str`): + Required. The name of the key to which the IP override + is added, in the format + ``projects/{project}/keys/{key}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ip_override_data (:class:`google.cloud.recaptchaenterprise_v1.types.IpOverrideData`): + Required. IP override added to the + key. + + This corresponds to the ``ip_override_data`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.recaptchaenterprise_v1.types.AddIpOverrideResponse: + Response for AddIpOverride. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, ip_override_data]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, recaptchaenterprise.AddIpOverrideRequest): + request = recaptchaenterprise.AddIpOverrideRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if ip_override_data is not None: + request.ip_override_data = ip_override_data + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.add_ip_override + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def get_metrics( self, request: Optional[Union[recaptchaenterprise.GetMetricsRequest, dict]] = None, diff --git a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/services/recaptcha_enterprise_service/client.py b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/services/recaptcha_enterprise_service/client.py index e26f3ad6450c..3369fc67c142 100644 --- a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/services/recaptcha_enterprise_service/client.py +++ b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/services/recaptcha_enterprise_service/client.py @@ -1765,6 +1765,126 @@ def sample_migrate_key(): # Done; return the response. return response + def add_ip_override( + self, + request: Optional[Union[recaptchaenterprise.AddIpOverrideRequest, dict]] = None, + *, + name: Optional[str] = None, + ip_override_data: Optional[recaptchaenterprise.IpOverrideData] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> recaptchaenterprise.AddIpOverrideResponse: + r"""Adds an IP override to a key. The following restrictions hold: + + - The maximum number of IP overrides per key is 100. + - For any conflict (such as IP already exists or IP part of an + existing IP range), an error will be returned. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import recaptchaenterprise_v1 + + def sample_add_ip_override(): + # Create a client + client = recaptchaenterprise_v1.RecaptchaEnterpriseServiceClient() + + # Initialize request argument(s) + ip_override_data = recaptchaenterprise_v1.IpOverrideData() + ip_override_data.ip = "ip_value" + ip_override_data.override_type = "ALLOW" + + request = recaptchaenterprise_v1.AddIpOverrideRequest( + name="name_value", + ip_override_data=ip_override_data, + ) + + # Make the request + response = client.add_ip_override(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.recaptchaenterprise_v1.types.AddIpOverrideRequest, dict]): + The request object. The AddIpOverride request message. + name (str): + Required. The name of the key to which the IP override + is added, in the format + ``projects/{project}/keys/{key}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ip_override_data (google.cloud.recaptchaenterprise_v1.types.IpOverrideData): + Required. IP override added to the + key. + + This corresponds to the ``ip_override_data`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.recaptchaenterprise_v1.types.AddIpOverrideResponse: + Response for AddIpOverride. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, ip_override_data]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, recaptchaenterprise.AddIpOverrideRequest): + request = recaptchaenterprise.AddIpOverrideRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if ip_override_data is not None: + request.ip_override_data = ip_override_data + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.add_ip_override] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def get_metrics( self, request: Optional[Union[recaptchaenterprise.GetMetricsRequest, dict]] = None, diff --git a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/services/recaptcha_enterprise_service/transports/base.py b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/services/recaptcha_enterprise_service/transports/base.py index 9d55defeb4e0..1d997dedd2b7 100644 --- a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/services/recaptcha_enterprise_service/transports/base.py +++ b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/services/recaptcha_enterprise_service/transports/base.py @@ -174,6 +174,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.add_ip_override: gapic_v1.method.wrap_method( + self.add_ip_override, + default_timeout=None, + client_info=client_info, + ), self.get_metrics: gapic_v1.method.wrap_method( self.get_metrics, default_timeout=None, @@ -327,6 +332,18 @@ def migrate_key( ]: raise NotImplementedError() + @property + def add_ip_override( + self, + ) -> Callable[ + [recaptchaenterprise.AddIpOverrideRequest], + Union[ + recaptchaenterprise.AddIpOverrideResponse, + Awaitable[recaptchaenterprise.AddIpOverrideResponse], + ], + ]: + raise NotImplementedError() + @property def get_metrics( self, diff --git a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/services/recaptcha_enterprise_service/transports/grpc.py b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/services/recaptcha_enterprise_service/transports/grpc.py index 7f19869103b9..1e960bab7e13 100644 --- a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/services/recaptcha_enterprise_service/transports/grpc.py +++ b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/services/recaptcha_enterprise_service/transports/grpc.py @@ -491,6 +491,39 @@ def migrate_key( ) return self._stubs["migrate_key"] + @property + def add_ip_override( + self, + ) -> Callable[ + [recaptchaenterprise.AddIpOverrideRequest], + recaptchaenterprise.AddIpOverrideResponse, + ]: + r"""Return a callable for the add ip override method over gRPC. + + Adds an IP override to a key. The following restrictions hold: + + - The maximum number of IP overrides per key is 100. + - For any conflict (such as IP already exists or IP part of an + existing IP range), an error will be returned. + + Returns: + Callable[[~.AddIpOverrideRequest], + ~.AddIpOverrideResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "add_ip_override" not in self._stubs: + self._stubs["add_ip_override"] = self.grpc_channel.unary_unary( + "/google.cloud.recaptchaenterprise.v1.RecaptchaEnterpriseService/AddIpOverride", + request_serializer=recaptchaenterprise.AddIpOverrideRequest.serialize, + response_deserializer=recaptchaenterprise.AddIpOverrideResponse.deserialize, + ) + return self._stubs["add_ip_override"] + @property def get_metrics( self, diff --git a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/services/recaptcha_enterprise_service/transports/grpc_asyncio.py b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/services/recaptcha_enterprise_service/transports/grpc_asyncio.py index ef36498d9935..d500b28f4a20 100644 --- a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/services/recaptcha_enterprise_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/services/recaptcha_enterprise_service/transports/grpc_asyncio.py @@ -507,6 +507,39 @@ def migrate_key( ) return self._stubs["migrate_key"] + @property + def add_ip_override( + self, + ) -> Callable[ + [recaptchaenterprise.AddIpOverrideRequest], + Awaitable[recaptchaenterprise.AddIpOverrideResponse], + ]: + r"""Return a callable for the add ip override method over gRPC. + + Adds an IP override to a key. The following restrictions hold: + + - The maximum number of IP overrides per key is 100. + - For any conflict (such as IP already exists or IP part of an + existing IP range), an error will be returned. + + Returns: + Callable[[~.AddIpOverrideRequest], + Awaitable[~.AddIpOverrideResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "add_ip_override" not in self._stubs: + self._stubs["add_ip_override"] = self.grpc_channel.unary_unary( + "/google.cloud.recaptchaenterprise.v1.RecaptchaEnterpriseService/AddIpOverride", + request_serializer=recaptchaenterprise.AddIpOverrideRequest.serialize, + response_deserializer=recaptchaenterprise.AddIpOverrideResponse.deserialize, + ) + return self._stubs["add_ip_override"] + @property def get_metrics( self, @@ -853,6 +886,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.add_ip_override: gapic_v1.method_async.wrap_method( + self.add_ip_override, + default_timeout=None, + client_info=client_info, + ), self.get_metrics: gapic_v1.method_async.wrap_method( self.get_metrics, default_timeout=None, diff --git a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/types/__init__.py b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/types/__init__.py index c035b7638c0e..77aa68a2862c 100644 --- a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/types/__init__.py +++ b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/types/__init__.py @@ -16,6 +16,8 @@ from .recaptchaenterprise import ( AccountDefenderAssessment, AccountVerificationInfo, + AddIpOverrideRequest, + AddIpOverrideResponse, AndroidKeySettings, AnnotateAssessmentRequest, AnnotateAssessmentResponse, @@ -29,6 +31,7 @@ DeleteKeyRequest, EndpointVerificationInfo, Event, + ExpressKeySettings, FirewallAction, FirewallPolicy, FirewallPolicyAssessment, @@ -38,6 +41,7 @@ GetKeyRequest, GetMetricsRequest, IOSKeySettings, + IpOverrideData, Key, ListFirewallPoliciesRequest, ListFirewallPoliciesResponse, @@ -78,6 +82,8 @@ __all__ = ( "AccountDefenderAssessment", "AccountVerificationInfo", + "AddIpOverrideRequest", + "AddIpOverrideResponse", "AndroidKeySettings", "AnnotateAssessmentRequest", "AnnotateAssessmentResponse", @@ -91,6 +97,7 @@ "DeleteKeyRequest", "EndpointVerificationInfo", "Event", + "ExpressKeySettings", "FirewallAction", "FirewallPolicy", "FirewallPolicyAssessment", @@ -100,6 +107,7 @@ "GetKeyRequest", "GetMetricsRequest", "IOSKeySettings", + "IpOverrideData", "Key", "ListFirewallPoliciesRequest", "ListFirewallPoliciesResponse", diff --git a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/types/recaptchaenterprise.py b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/types/recaptchaenterprise.py index 8f4b9a655592..c4612c09acf2 100644 --- a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/types/recaptchaenterprise.py +++ b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/types/recaptchaenterprise.py @@ -68,6 +68,7 @@ "WebKeySettings", "AndroidKeySettings", "IOSKeySettings", + "ExpressKeySettings", "AppleDeveloperId", "ScoreDistribution", "ScoreMetrics", @@ -81,9 +82,12 @@ "ListRelatedAccountGroupsResponse", "SearchRelatedAccountGroupMembershipsRequest", "SearchRelatedAccountGroupMembershipsResponse", + "AddIpOverrideRequest", + "AddIpOverrideResponse", "RelatedAccountGroupMembership", "RelatedAccountGroup", "WafSettings", + "IpOverrideData", }, ) @@ -771,7 +775,7 @@ class Event(proto.Message): express (bool): Optional. Flag for a reCAPTCHA express request for an assessment without a token. If enabled, ``site_key`` must - reference a SCORE key with WAF feature set to EXPRESS. + reference an Express site key. requested_uri (str): Optional. The URI resource the user requested that triggered an assessment. @@ -2043,12 +2047,12 @@ class MigrateKeyRequest(proto.Message): Enterprise key or migrated key behaves differently than a reCAPTCHA (non-Enterprise version) key when you reach a quota limit (see - https://cloud.google.com/recaptcha-enterprise/quotas#quota_limit). - To avoid any disruption of your usage, we check that a - billing account is present. If your usage of reCAPTCHA is - under the free quota, you can safely skip the billing check - and proceed with the migration. See - https://cloud.google.com/recaptcha-enterprise/docs/billing-information. + https://cloud.google.com/recaptcha/quotas#quota_limit). To + avoid any disruption of your usage, we check that a billing + account is present. If your usage of reCAPTCHA is under the + free quota, you can safely skip the billing check and + proceed with the migration. See + https://cloud.google.com/recaptcha/docs/billing-information. """ name: str = proto.Field( @@ -2169,10 +2173,15 @@ class Key(proto.Message): Settings for keys that can be used by iOS apps. + This field is a member of `oneof`_ ``platform_settings``. + express_settings (google.cloud.recaptchaenterprise_v1.types.ExpressKeySettings): + Settings for keys that can be used by + reCAPTCHA Express. + This field is a member of `oneof`_ ``platform_settings``. labels (MutableMapping[str, str]): Optional. See [Creating and managing labels] - (https://cloud.google.com/recaptcha-enterprise/docs/labels). + (https://cloud.google.com/recaptcha/docs/labels). create_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The timestamp corresponding to the creation of this key. @@ -2209,6 +2218,12 @@ class Key(proto.Message): oneof="platform_settings", message="IOSKeySettings", ) + express_settings: "ExpressKeySettings" = proto.Field( + proto.MESSAGE, + number=11, + oneof="platform_settings", + message="ExpressKeySettings", + ) labels: MutableMapping[str, str] = proto.MapField( proto.STRING, proto.STRING, @@ -2449,6 +2464,13 @@ class IOSKeySettings(proto.Message): ) +class ExpressKeySettings(proto.Message): + r"""Settings specific to keys that can be used for reCAPTCHA + Express. + + """ + + class AppleDeveloperId(proto.Message): r"""Contains fields that are required to perform Apple-specific integrity checks. @@ -3032,6 +3054,32 @@ def raw_page(self): ) +class AddIpOverrideRequest(proto.Message): + r"""The AddIpOverride request message. + + Attributes: + name (str): + Required. The name of the key to which the IP override is + added, in the format ``projects/{project}/keys/{key}``. + ip_override_data (google.cloud.recaptchaenterprise_v1.types.IpOverrideData): + Required. IP override added to the key. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + ip_override_data: "IpOverrideData" = proto.Field( + proto.MESSAGE, + number=2, + message="IpOverrideData", + ) + + +class AddIpOverrideResponse(proto.Message): + r"""Response for AddIpOverride.""" + + class RelatedAccountGroupMembership(proto.Message): r"""A membership in a group of related accounts. @@ -3097,7 +3145,7 @@ class WafSettings(proto.Message): class WafFeature(proto.Enum): r"""Supported WAF features. For more information, see - https://cloud.google.com/recaptcha-enterprise/docs/usecase#comparison_of_features. + https://cloud.google.com/recaptcha/docs/usecase#comparison_of_features. Values: WAF_FEATURE_UNSPECIFIED (0): @@ -3151,4 +3199,47 @@ class WafService(proto.Enum): ) +class IpOverrideData(proto.Message): + r"""Information about the IP or IP range override. + + Attributes: + ip (str): + Required. The IP address to override (can be + IPv4, IPv6 or CIDR). The IP override must be a + valid IPv4 or IPv6 address, or a CIDR range. The + IP override must be a public IP address. + Example of IPv4: 168.192.5.6 + Example of IPv6: + 2001:0000:130F:0000:0000:09C0:876A:130B Example + of IPv4 with CIDR: 168.192.5.0/24 + Example of IPv6 with CIDR: 2001:0DB8:1234::/48 + override_type (google.cloud.recaptchaenterprise_v1.types.IpOverrideData.OverrideType): + Required. Describes the type of IP override. + """ + + class OverrideType(proto.Enum): + r"""Enum that represents the type of IP override. + + Values: + OVERRIDE_TYPE_UNSPECIFIED (0): + Default override type that indicates this + enum hasn't been specified. + ALLOW (1): + Allowlist the IP address; i.e. give a + ``risk_analysis.score`` of 0.9 for all valid assessments. + """ + OVERRIDE_TYPE_UNSPECIFIED = 0 + ALLOW = 1 + + ip: str = proto.Field( + proto.STRING, + number=1, + ) + override_type: OverrideType = proto.Field( + proto.ENUM, + number=3, + enum=OverrideType, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-recaptcha-enterprise/samples/generated_samples/recaptchaenterprise_v1_generated_recaptcha_enterprise_service_add_ip_override_async.py b/packages/google-cloud-recaptcha-enterprise/samples/generated_samples/recaptchaenterprise_v1_generated_recaptcha_enterprise_service_add_ip_override_async.py new file mode 100644 index 000000000000..0431a12687a1 --- /dev/null +++ b/packages/google-cloud-recaptcha-enterprise/samples/generated_samples/recaptchaenterprise_v1_generated_recaptcha_enterprise_service_add_ip_override_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AddIpOverride +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-recaptcha-enterprise + + +# [START recaptchaenterprise_v1_generated_RecaptchaEnterpriseService_AddIpOverride_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import recaptchaenterprise_v1 + + +async def sample_add_ip_override(): + # Create a client + client = recaptchaenterprise_v1.RecaptchaEnterpriseServiceAsyncClient() + + # Initialize request argument(s) + ip_override_data = recaptchaenterprise_v1.IpOverrideData() + ip_override_data.ip = "ip_value" + ip_override_data.override_type = "ALLOW" + + request = recaptchaenterprise_v1.AddIpOverrideRequest( + name="name_value", + ip_override_data=ip_override_data, + ) + + # Make the request + response = await client.add_ip_override(request=request) + + # Handle the response + print(response) + +# [END recaptchaenterprise_v1_generated_RecaptchaEnterpriseService_AddIpOverride_async] diff --git a/packages/google-cloud-recaptcha-enterprise/samples/generated_samples/recaptchaenterprise_v1_generated_recaptcha_enterprise_service_add_ip_override_sync.py b/packages/google-cloud-recaptcha-enterprise/samples/generated_samples/recaptchaenterprise_v1_generated_recaptcha_enterprise_service_add_ip_override_sync.py new file mode 100644 index 000000000000..f5727a9c8ee8 --- /dev/null +++ b/packages/google-cloud-recaptcha-enterprise/samples/generated_samples/recaptchaenterprise_v1_generated_recaptcha_enterprise_service_add_ip_override_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AddIpOverride +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-recaptcha-enterprise + + +# [START recaptchaenterprise_v1_generated_RecaptchaEnterpriseService_AddIpOverride_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import recaptchaenterprise_v1 + + +def sample_add_ip_override(): + # Create a client + client = recaptchaenterprise_v1.RecaptchaEnterpriseServiceClient() + + # Initialize request argument(s) + ip_override_data = recaptchaenterprise_v1.IpOverrideData() + ip_override_data.ip = "ip_value" + ip_override_data.override_type = "ALLOW" + + request = recaptchaenterprise_v1.AddIpOverrideRequest( + name="name_value", + ip_override_data=ip_override_data, + ) + + # Make the request + response = client.add_ip_override(request=request) + + # Handle the response + print(response) + +# [END recaptchaenterprise_v1_generated_RecaptchaEnterpriseService_AddIpOverride_sync] diff --git a/packages/google-cloud-recaptcha-enterprise/samples/generated_samples/snippet_metadata_google.cloud.recaptchaenterprise.v1.json b/packages/google-cloud-recaptcha-enterprise/samples/generated_samples/snippet_metadata_google.cloud.recaptchaenterprise.v1.json index aa554428ea59..b2ad3f987f98 100644 --- a/packages/google-cloud-recaptcha-enterprise/samples/generated_samples/snippet_metadata_google.cloud.recaptchaenterprise.v1.json +++ b/packages/google-cloud-recaptcha-enterprise/samples/generated_samples/snippet_metadata_google.cloud.recaptchaenterprise.v1.json @@ -11,6 +11,175 @@ "version": "0.1.0" }, "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.recaptchaenterprise_v1.RecaptchaEnterpriseServiceAsyncClient", + "shortName": "RecaptchaEnterpriseServiceAsyncClient" + }, + "fullName": "google.cloud.recaptchaenterprise_v1.RecaptchaEnterpriseServiceAsyncClient.add_ip_override", + "method": { + "fullName": "google.cloud.recaptchaenterprise.v1.RecaptchaEnterpriseService.AddIpOverride", + "service": { + "fullName": "google.cloud.recaptchaenterprise.v1.RecaptchaEnterpriseService", + "shortName": "RecaptchaEnterpriseService" + }, + "shortName": "AddIpOverride" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.recaptchaenterprise_v1.types.AddIpOverrideRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "ip_override_data", + "type": "google.cloud.recaptchaenterprise_v1.types.IpOverrideData" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.recaptchaenterprise_v1.types.AddIpOverrideResponse", + "shortName": "add_ip_override" + }, + "description": "Sample for AddIpOverride", + "file": "recaptchaenterprise_v1_generated_recaptcha_enterprise_service_add_ip_override_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "recaptchaenterprise_v1_generated_RecaptchaEnterpriseService_AddIpOverride_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "recaptchaenterprise_v1_generated_recaptcha_enterprise_service_add_ip_override_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.recaptchaenterprise_v1.RecaptchaEnterpriseServiceClient", + "shortName": "RecaptchaEnterpriseServiceClient" + }, + "fullName": "google.cloud.recaptchaenterprise_v1.RecaptchaEnterpriseServiceClient.add_ip_override", + "method": { + "fullName": "google.cloud.recaptchaenterprise.v1.RecaptchaEnterpriseService.AddIpOverride", + "service": { + "fullName": "google.cloud.recaptchaenterprise.v1.RecaptchaEnterpriseService", + "shortName": "RecaptchaEnterpriseService" + }, + "shortName": "AddIpOverride" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.recaptchaenterprise_v1.types.AddIpOverrideRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "ip_override_data", + "type": "google.cloud.recaptchaenterprise_v1.types.IpOverrideData" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.recaptchaenterprise_v1.types.AddIpOverrideResponse", + "shortName": "add_ip_override" + }, + "description": "Sample for AddIpOverride", + "file": "recaptchaenterprise_v1_generated_recaptcha_enterprise_service_add_ip_override_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "recaptchaenterprise_v1_generated_RecaptchaEnterpriseService_AddIpOverride_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "recaptchaenterprise_v1_generated_recaptcha_enterprise_service_add_ip_override_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-recaptcha-enterprise/scripts/fixup_recaptchaenterprise_v1_keywords.py b/packages/google-cloud-recaptcha-enterprise/scripts/fixup_recaptchaenterprise_v1_keywords.py index 59c889be9ff2..36e16174f78a 100644 --- a/packages/google-cloud-recaptcha-enterprise/scripts/fixup_recaptchaenterprise_v1_keywords.py +++ b/packages/google-cloud-recaptcha-enterprise/scripts/fixup_recaptchaenterprise_v1_keywords.py @@ -39,6 +39,7 @@ def partition( class recaptchaenterpriseCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'add_ip_override': ('name', 'ip_override_data', ), 'annotate_assessment': ('name', 'annotation', 'reasons', 'account_id', 'hashed_account_id', 'transaction_event', ), 'create_assessment': ('parent', 'assessment', ), 'create_firewall_policy': ('parent', 'firewall_policy', ), diff --git a/packages/google-cloud-recaptcha-enterprise/tests/unit/gapic/recaptchaenterprise_v1/test_recaptcha_enterprise_service.py b/packages/google-cloud-recaptcha-enterprise/tests/unit/gapic/recaptchaenterprise_v1/test_recaptcha_enterprise_service.py index 2134315d5a12..9230f6fa88c3 100644 --- a/packages/google-cloud-recaptcha-enterprise/tests/unit/gapic/recaptchaenterprise_v1/test_recaptcha_enterprise_service.py +++ b/packages/google-cloud-recaptcha-enterprise/tests/unit/gapic/recaptchaenterprise_v1/test_recaptcha_enterprise_service.py @@ -4646,6 +4646,374 @@ async def test_migrate_key_field_headers_async(): ) in kw["metadata"] +@pytest.mark.parametrize( + "request_type", + [ + recaptchaenterprise.AddIpOverrideRequest, + dict, + ], +) +def test_add_ip_override(request_type, transport: str = "grpc"): + client = RecaptchaEnterpriseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.add_ip_override), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = recaptchaenterprise.AddIpOverrideResponse() + response = client.add_ip_override(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = recaptchaenterprise.AddIpOverrideRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, recaptchaenterprise.AddIpOverrideResponse) + + +def test_add_ip_override_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RecaptchaEnterpriseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.add_ip_override), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.add_ip_override() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == recaptchaenterprise.AddIpOverrideRequest() + + +def test_add_ip_override_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = RecaptchaEnterpriseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = recaptchaenterprise.AddIpOverrideRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.add_ip_override), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.add_ip_override(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == recaptchaenterprise.AddIpOverrideRequest( + name="name_value", + ) + + +def test_add_ip_override_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RecaptchaEnterpriseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.add_ip_override in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.add_ip_override] = mock_rpc + request = {} + client.add_ip_override(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.add_ip_override(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_add_ip_override_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RecaptchaEnterpriseServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.add_ip_override), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + recaptchaenterprise.AddIpOverrideResponse() + ) + response = await client.add_ip_override() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == recaptchaenterprise.AddIpOverrideRequest() + + +@pytest.mark.asyncio +async def test_add_ip_override_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = RecaptchaEnterpriseServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.add_ip_override + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.add_ip_override + ] = mock_rpc + + request = {} + await client.add_ip_override(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.add_ip_override(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_add_ip_override_async( + transport: str = "grpc_asyncio", + request_type=recaptchaenterprise.AddIpOverrideRequest, +): + client = RecaptchaEnterpriseServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.add_ip_override), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + recaptchaenterprise.AddIpOverrideResponse() + ) + response = await client.add_ip_override(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = recaptchaenterprise.AddIpOverrideRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, recaptchaenterprise.AddIpOverrideResponse) + + +@pytest.mark.asyncio +async def test_add_ip_override_async_from_dict(): + await test_add_ip_override_async(request_type=dict) + + +def test_add_ip_override_field_headers(): + client = RecaptchaEnterpriseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = recaptchaenterprise.AddIpOverrideRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.add_ip_override), "__call__") as call: + call.return_value = recaptchaenterprise.AddIpOverrideResponse() + client.add_ip_override(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_add_ip_override_field_headers_async(): + client = RecaptchaEnterpriseServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = recaptchaenterprise.AddIpOverrideRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.add_ip_override), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + recaptchaenterprise.AddIpOverrideResponse() + ) + await client.add_ip_override(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_add_ip_override_flattened(): + client = RecaptchaEnterpriseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.add_ip_override), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = recaptchaenterprise.AddIpOverrideResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.add_ip_override( + name="name_value", + ip_override_data=recaptchaenterprise.IpOverrideData(ip="ip_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].ip_override_data + mock_val = recaptchaenterprise.IpOverrideData(ip="ip_value") + assert arg == mock_val + + +def test_add_ip_override_flattened_error(): + client = RecaptchaEnterpriseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.add_ip_override( + recaptchaenterprise.AddIpOverrideRequest(), + name="name_value", + ip_override_data=recaptchaenterprise.IpOverrideData(ip="ip_value"), + ) + + +@pytest.mark.asyncio +async def test_add_ip_override_flattened_async(): + client = RecaptchaEnterpriseServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.add_ip_override), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = recaptchaenterprise.AddIpOverrideResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + recaptchaenterprise.AddIpOverrideResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.add_ip_override( + name="name_value", + ip_override_data=recaptchaenterprise.IpOverrideData(ip="ip_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].ip_override_data + mock_val = recaptchaenterprise.IpOverrideData(ip="ip_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_add_ip_override_flattened_error_async(): + client = RecaptchaEnterpriseServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.add_ip_override( + recaptchaenterprise.AddIpOverrideRequest(), + name="name_value", + ip_override_data=recaptchaenterprise.IpOverrideData(ip="ip_value"), + ) + + @pytest.mark.parametrize( "request_type", [ @@ -9582,6 +9950,7 @@ def test_recaptcha_enterprise_service_base_transport(): "update_key", "delete_key", "migrate_key", + "add_ip_override", "get_metrics", "create_firewall_policy", "list_firewall_policies", From 6639798f019e86e72ce6cd5a2c837320439cb2b6 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 22 Aug 2024 14:23:21 -0400 Subject: [PATCH 030/108] feat!: [google-cloud-gke-connect-gateway] removed the nonfunctional GatewayService and replaced it with the GatewayControl service (#13028) - [ ] Regenerate this pull request now. PiperOrigin-RevId: 666340312 Source-Link: https://github.com/googleapis/googleapis/commit/82ab100ce17cd13635cdfb719a894821898221ec Source-Link: https://github.com/googleapis/googleapis-gen/commit/3f5c714aa4faaa7350f1145242f1927049bdd9a8 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWdrZS1jb25uZWN0LWdhdGV3YXkvLk93bEJvdC55YW1sIiwiaCI6IjNmNWM3MTRhYTRmYWFhNzM1MGYxMTQ1MjQyZjE5MjcwNDliZGQ5YTgifQ== BEGIN_COMMIT_OVERRIDE fix!: Set google.cloud.gkeconnect.gateway_v1 as the default import feat!: [google-cloud-gke-connect-gateway] removed the nonfunctional GatewayService and replaced it with the GatewayControl service BREAKING CHANGE: existing client libraries are being regenerated to remove unused functionality and introduce new features. END_COMMIT_OVERRIDE BEGIN_NESTED_COMMIT feat!: [google-cloud-gke-connect-gateway] removed the nonfunctional GatewayService and replaced it with the GatewayControl service BREAKING CHANGE: existing client libraries are being regenerated to remove unused functionality and introduce new features. PiperOrigin-RevId: 665564055 Source-Link: https://github.com/googleapis/googleapis/commit/71434fd174dbdf4714e82801501cd78db4897473 Source-Link: https://github.com/googleapis/googleapis-gen/commit/3fcf6a6943a930cba9090f8b88a6caf71c8fb694 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWdrZS1jb25uZWN0LWdhdGV3YXkvLk93bEJvdC55YW1sIiwiaCI6IjNmY2Y2YTY5NDNhOTMwY2JhOTA5MGY4Yjg4YTZjYWY3MWM4ZmI2OTQifQ== END_NESTED_COMMIT BEGIN_NESTED_COMMIT feat!: [google-cloud-gke-connect-gateway] removed the nonfunctional GatewayService and replaced it with the GatewayControl service BREAKING CHANGE: existing client libraries are being regenerated to remove unused functionality and introduce new features. PiperOrigin-RevId: 665485495 Source-Link: https://github.com/googleapis/googleapis/commit/5e76753c1855b907a7c452cdfe90921cbcc305c3 Source-Link: https://github.com/googleapis/googleapis-gen/commit/565ebdf8b33ceeaaed972e0d9fbe4aa063e5292e Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWdrZS1jb25uZWN0LWdhdGV3YXkvLk93bEJvdC55YW1sIiwiaCI6IjU2NWViZGY4YjMzY2VlYWFlZDk3MmUwZDlmYmU0YWEwNjNlNTI5MmUifQ== END_NESTED_COMMIT --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../.repo-metadata.json | 2 +- .../docs/gateway_v1/gateway_control.rst | 6 + .../docs/gateway_v1/services_.rst | 6 + .../docs/gateway_v1/types_.rst | 6 + ...ateway_service.rst => gateway_control.rst} | 4 +- .../docs/gateway_v1beta1/services_.rst | 2 +- .../docs/index.rst | 11 + .../cloud/gkeconnect/gateway/__init__.py | 18 +- .../cloud/gkeconnect/gateway_v1/__init__.py | 29 + .../gkeconnect/gateway_v1/gapic_metadata.json | 43 + .../gkeconnect/gateway_v1/gapic_version.py | 16 + .../cloud/gkeconnect/gateway_v1/py.typed | 2 + .../gateway_v1/services/__init__.py | 15 + .../services/gateway_control}/__init__.py | 8 +- .../services/gateway_control/async_client.py | 352 +++ .../services/gateway_control/client.py | 760 ++++++ .../gateway_control/transports/__init__.py | 36 + .../gateway_control/transports/base.py | 173 ++ .../gateway_control/transports/grpc.py | 275 +++ .../transports/grpc_asyncio.py | 295 +++ .../gateway_control/transports/rest.py | 308 +++ .../types/__init__.py} | 11 +- .../gkeconnect/gateway_v1/types/control.py | 123 + .../gkeconnect/gateway_v1beta1/__init__.py | 9 +- .../gateway_v1beta1/gapic_metadata.json | 54 +- .../services/gateway_control/__init__.py | 22 + .../services/gateway_control/async_client.py | 352 +++ .../services/gateway_control/client.py | 758 ++++++ .../transports/__init__.py | 18 +- .../transports/base.py | 88 +- .../transports/grpc.py | 149 +- .../transports/grpc_asyncio.py | 193 +- .../services/gateway_service/async_client.py | 1133 --------- .../services/gateway_service/client.py | 1536 ------------ .../gateway_v1beta1/types/__init__.py | 6 +- .../gateway_v1beta1/types/control.py | 123 + ...way_control_generate_credentials_async.py} | 18 +- ...eway_control_generate_credentials_sync.py} | 18 +- ...way_control_generate_credentials_async.py} | 16 +- ...eway_control_generate_credentials_sync.py} | 16 +- ...ed_gateway_service_delete_resource_sync.py | 52 - ...ated_gateway_service_get_resource_async.py | 52 - ...ted_gateway_service_post_resource_async.py | 52 - ...ated_gateway_service_post_resource_sync.py | 52 - ...ated_gateway_service_put_resource_async.py | 52 - ...rated_gateway_service_put_resource_sync.py | 52 - ...ta_google.cloud.gkeconnect.gateway.v1.json | 168 ++ ...ogle.cloud.gkeconnect.gateway.v1beta1.json | 680 +----- .../scripts/fixup_gateway_v1_keywords.py | 176 ++ .../scripts/fixup_gateway_v1beta1_keywords.py | 6 +- .../tests/unit/gapic/gateway_v1/__init__.py | 15 + .../test_gateway_control.py} | 1844 +++++---------- .../gateway_v1beta1/test_gateway_control.py | 2090 +++++++++++++++++ 53 files changed, 6989 insertions(+), 5312 deletions(-) create mode 100644 packages/google-cloud-gke-connect-gateway/docs/gateway_v1/gateway_control.rst create mode 100644 packages/google-cloud-gke-connect-gateway/docs/gateway_v1/services_.rst create mode 100644 packages/google-cloud-gke-connect-gateway/docs/gateway_v1/types_.rst rename packages/google-cloud-gke-connect-gateway/docs/gateway_v1beta1/{gateway_service.rst => gateway_control.rst} (80%) create mode 100644 packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/__init__.py create mode 100644 packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/gapic_metadata.json create mode 100644 packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/gapic_version.py create mode 100644 packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/py.typed create mode 100644 packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/services/__init__.py rename packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/{gateway_v1beta1/services/gateway_service => gateway_v1/services/gateway_control}/__init__.py (79%) create mode 100644 packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/services/gateway_control/async_client.py create mode 100644 packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/services/gateway_control/client.py create mode 100644 packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/services/gateway_control/transports/__init__.py create mode 100644 packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/services/gateway_control/transports/base.py create mode 100644 packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/services/gateway_control/transports/grpc.py create mode 100644 packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/services/gateway_control/transports/grpc_asyncio.py create mode 100644 packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/services/gateway_control/transports/rest.py rename packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/{gateway_v1beta1/types/gateway.py => gateway_v1/types/__init__.py} (77%) create mode 100644 packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/types/control.py create mode 100644 packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_control/__init__.py create mode 100644 packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_control/async_client.py create mode 100644 packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_control/client.py rename packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/{gateway_service => gateway_control}/transports/__init__.py (66%) rename packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/{gateway_service => gateway_control}/transports/base.py (73%) rename packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/{gateway_service => gateway_control}/transports/grpc.py (66%) rename packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/{gateway_service => gateway_control}/transports/grpc_asyncio.py (63%) delete mode 100644 packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_service/async_client.py delete mode 100644 packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_service/client.py create mode 100644 packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/types/control.py rename packages/google-cloud-gke-connect-gateway/samples/generated_samples/{connectgateway_v1beta1_generated_gateway_service_patch_resource_async.py => connectgateway_v1_generated_gateway_control_generate_credentials_async.py} (75%) rename packages/google-cloud-gke-connect-gateway/samples/generated_samples/{connectgateway_v1beta1_generated_gateway_service_patch_resource_sync.py => connectgateway_v1_generated_gateway_control_generate_credentials_sync.py} (75%) rename packages/google-cloud-gke-connect-gateway/samples/generated_samples/{connectgateway_v1beta1_generated_gateway_service_delete_resource_async.py => connectgateway_v1beta1_generated_gateway_control_generate_credentials_async.py} (76%) rename packages/google-cloud-gke-connect-gateway/samples/generated_samples/{connectgateway_v1beta1_generated_gateway_service_get_resource_sync.py => connectgateway_v1beta1_generated_gateway_control_generate_credentials_sync.py} (77%) delete mode 100644 packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_service_delete_resource_sync.py delete mode 100644 packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_service_get_resource_async.py delete mode 100644 packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_service_post_resource_async.py delete mode 100644 packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_service_post_resource_sync.py delete mode 100644 packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_service_put_resource_async.py delete mode 100644 packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_service_put_resource_sync.py create mode 100644 packages/google-cloud-gke-connect-gateway/samples/generated_samples/snippet_metadata_google.cloud.gkeconnect.gateway.v1.json create mode 100644 packages/google-cloud-gke-connect-gateway/scripts/fixup_gateway_v1_keywords.py create mode 100644 packages/google-cloud-gke-connect-gateway/tests/unit/gapic/gateway_v1/__init__.py rename packages/google-cloud-gke-connect-gateway/tests/unit/gapic/{gateway_v1beta1/test_gateway_service.py => gateway_v1/test_gateway_control.py} (56%) create mode 100644 packages/google-cloud-gke-connect-gateway/tests/unit/gapic/gateway_v1beta1/test_gateway_control.py diff --git a/packages/google-cloud-gke-connect-gateway/.repo-metadata.json b/packages/google-cloud-gke-connect-gateway/.repo-metadata.json index a621420cff60..8f664d786f4a 100644 --- a/packages/google-cloud-gke-connect-gateway/.repo-metadata.json +++ b/packages/google-cloud-gke-connect-gateway/.repo-metadata.json @@ -10,7 +10,7 @@ "repo": "googleapis/google-cloud-python", "distribution_name": "google-cloud-gke-connect-gateway", "api_id": "connectgateway.googleapis.com", - "default_version": "v1beta1", + "default_version": "v1", "codeowner_team": "", "api_shortname": "connectgateway", "api_description": "builds on the power of fleets to let Anthos users connect to and run commands against registered Anthos clusters in a simple, consistent, and secured way, whether the clusters are on Google Cloud, other public clouds, or on premises, and makes it easier to automate DevOps processes across all your clusters." diff --git a/packages/google-cloud-gke-connect-gateway/docs/gateway_v1/gateway_control.rst b/packages/google-cloud-gke-connect-gateway/docs/gateway_v1/gateway_control.rst new file mode 100644 index 000000000000..5f926c0f8738 --- /dev/null +++ b/packages/google-cloud-gke-connect-gateway/docs/gateway_v1/gateway_control.rst @@ -0,0 +1,6 @@ +GatewayControl +-------------------------------- + +.. automodule:: google.cloud.gkeconnect.gateway_v1.services.gateway_control + :members: + :inherited-members: diff --git a/packages/google-cloud-gke-connect-gateway/docs/gateway_v1/services_.rst b/packages/google-cloud-gke-connect-gateway/docs/gateway_v1/services_.rst new file mode 100644 index 000000000000..3a27320049ff --- /dev/null +++ b/packages/google-cloud-gke-connect-gateway/docs/gateway_v1/services_.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Gkeconnect Gateway v1 API +=================================================== +.. toctree:: + :maxdepth: 2 + + gateway_control diff --git a/packages/google-cloud-gke-connect-gateway/docs/gateway_v1/types_.rst b/packages/google-cloud-gke-connect-gateway/docs/gateway_v1/types_.rst new file mode 100644 index 000000000000..e043fb7f8f73 --- /dev/null +++ b/packages/google-cloud-gke-connect-gateway/docs/gateway_v1/types_.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Gkeconnect Gateway v1 API +================================================ + +.. automodule:: google.cloud.gkeconnect.gateway_v1.types + :members: + :show-inheritance: diff --git a/packages/google-cloud-gke-connect-gateway/docs/gateway_v1beta1/gateway_service.rst b/packages/google-cloud-gke-connect-gateway/docs/gateway_v1beta1/gateway_control.rst similarity index 80% rename from packages/google-cloud-gke-connect-gateway/docs/gateway_v1beta1/gateway_service.rst rename to packages/google-cloud-gke-connect-gateway/docs/gateway_v1beta1/gateway_control.rst index 7dffb5a4dc24..7091b0d7e026 100644 --- a/packages/google-cloud-gke-connect-gateway/docs/gateway_v1beta1/gateway_service.rst +++ b/packages/google-cloud-gke-connect-gateway/docs/gateway_v1beta1/gateway_control.rst @@ -1,6 +1,6 @@ -GatewayService +GatewayControl -------------------------------- -.. automodule:: google.cloud.gkeconnect.gateway_v1beta1.services.gateway_service +.. automodule:: google.cloud.gkeconnect.gateway_v1beta1.services.gateway_control :members: :inherited-members: diff --git a/packages/google-cloud-gke-connect-gateway/docs/gateway_v1beta1/services_.rst b/packages/google-cloud-gke-connect-gateway/docs/gateway_v1beta1/services_.rst index c9c94b3690bc..98a39ee54b47 100644 --- a/packages/google-cloud-gke-connect-gateway/docs/gateway_v1beta1/services_.rst +++ b/packages/google-cloud-gke-connect-gateway/docs/gateway_v1beta1/services_.rst @@ -3,4 +3,4 @@ Services for Google Cloud Gkeconnect Gateway v1beta1 API .. toctree:: :maxdepth: 2 - gateway_service + gateway_control diff --git a/packages/google-cloud-gke-connect-gateway/docs/index.rst b/packages/google-cloud-gke-connect-gateway/docs/index.rst index 16bff39fc480..901b79a633a2 100644 --- a/packages/google-cloud-gke-connect-gateway/docs/index.rst +++ b/packages/google-cloud-gke-connect-gateway/docs/index.rst @@ -2,6 +2,17 @@ .. include:: multiprocessing.rst +This package includes clients for multiple versions of GKE Connect Gateway. +By default, you will get version ``gateway_v1``. + + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + gateway_v1/services_ + gateway_v1/types_ API Reference ------------- diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway/__init__.py b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway/__init__.py index 4ec6e91d9d73..f15cecdbcbb1 100644 --- a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway/__init__.py +++ b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway/__init__.py @@ -18,14 +18,20 @@ __version__ = package_version.__version__ -from google.cloud.gkeconnect.gateway_v1beta1.services.gateway_service.async_client import ( - GatewayServiceAsyncClient, +from google.cloud.gkeconnect.gateway_v1.services.gateway_control.async_client import ( + GatewayControlAsyncClient, ) -from google.cloud.gkeconnect.gateway_v1beta1.services.gateway_service.client import ( - GatewayServiceClient, +from google.cloud.gkeconnect.gateway_v1.services.gateway_control.client import ( + GatewayControlClient, +) +from google.cloud.gkeconnect.gateway_v1.types.control import ( + GenerateCredentialsRequest, + GenerateCredentialsResponse, ) __all__ = ( - "GatewayServiceClient", - "GatewayServiceAsyncClient", + "GatewayControlClient", + "GatewayControlAsyncClient", + "GenerateCredentialsRequest", + "GenerateCredentialsResponse", ) diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/__init__.py b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/__init__.py new file mode 100644 index 000000000000..31f3cf855bef --- /dev/null +++ b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/__init__.py @@ -0,0 +1,29 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.gkeconnect.gateway_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.gateway_control import GatewayControlAsyncClient, GatewayControlClient +from .types.control import GenerateCredentialsRequest, GenerateCredentialsResponse + +__all__ = ( + "GatewayControlAsyncClient", + "GatewayControlClient", + "GenerateCredentialsRequest", + "GenerateCredentialsResponse", +) diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/gapic_metadata.json b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/gapic_metadata.json new file mode 100644 index 000000000000..5d08e8325089 --- /dev/null +++ b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/gapic_metadata.json @@ -0,0 +1,43 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.gkeconnect.gateway_v1", + "protoPackage": "google.cloud.gkeconnect.gateway.v1", + "schema": "1.0", + "services": { + "GatewayControl": { + "clients": { + "grpc": { + "libraryClient": "GatewayControlClient", + "rpcs": { + "GenerateCredentials": { + "methods": [ + "generate_credentials" + ] + } + } + }, + "grpc-async": { + "libraryClient": "GatewayControlAsyncClient", + "rpcs": { + "GenerateCredentials": { + "methods": [ + "generate_credentials" + ] + } + } + }, + "rest": { + "libraryClient": "GatewayControlClient", + "rpcs": { + "GenerateCredentials": { + "methods": [ + "generate_credentials" + ] + } + } + } + } + } + } +} diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/gapic_version.py b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/gapic_version.py new file mode 100644 index 000000000000..558c8aab67c5 --- /dev/null +++ b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/py.typed b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/py.typed new file mode 100644 index 000000000000..fc91be3f2256 --- /dev/null +++ b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-gke-connect-gateway package uses inline types. diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/services/__init__.py b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/services/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_service/__init__.py b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/services/gateway_control/__init__.py similarity index 79% rename from packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_service/__init__.py rename to packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/services/gateway_control/__init__.py index 1e7c2740f4c2..6d98a5d3aec9 100644 --- a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_service/__init__.py +++ b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/services/gateway_control/__init__.py @@ -13,10 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from .async_client import GatewayServiceAsyncClient -from .client import GatewayServiceClient +from .async_client import GatewayControlAsyncClient +from .client import GatewayControlClient __all__ = ( - "GatewayServiceClient", - "GatewayServiceAsyncClient", + "GatewayControlClient", + "GatewayControlAsyncClient", ) diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/services/gateway_control/async_client.py b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/services/gateway_control/async_client.py new file mode 100644 index 000000000000..1f6ebd6ef20d --- /dev/null +++ b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/services/gateway_control/async_client.py @@ -0,0 +1,352 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.gkeconnect.gateway_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.cloud.gkeconnect.gateway_v1.types import control + +from .client import GatewayControlClient +from .transports.base import DEFAULT_CLIENT_INFO, GatewayControlTransport +from .transports.grpc_asyncio import GatewayControlGrpcAsyncIOTransport + + +class GatewayControlAsyncClient: + """GatewayControl is the control plane API for Connect Gateway.""" + + _client: GatewayControlClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = GatewayControlClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = GatewayControlClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = GatewayControlClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = GatewayControlClient._DEFAULT_UNIVERSE + + common_billing_account_path = staticmethod( + GatewayControlClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + GatewayControlClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(GatewayControlClient.common_folder_path) + parse_common_folder_path = staticmethod( + GatewayControlClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + GatewayControlClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + GatewayControlClient.parse_common_organization_path + ) + common_project_path = staticmethod(GatewayControlClient.common_project_path) + parse_common_project_path = staticmethod( + GatewayControlClient.parse_common_project_path + ) + common_location_path = staticmethod(GatewayControlClient.common_location_path) + parse_common_location_path = staticmethod( + GatewayControlClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + GatewayControlAsyncClient: The constructed client. + """ + return GatewayControlClient.from_service_account_info.__func__(GatewayControlAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + GatewayControlAsyncClient: The constructed client. + """ + return GatewayControlClient.from_service_account_file.__func__(GatewayControlAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return GatewayControlClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> GatewayControlTransport: + """Returns the transport used by the client instance. + + Returns: + GatewayControlTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = GatewayControlClient.get_transport_class + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, GatewayControlTransport, Callable[..., GatewayControlTransport]] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the gateway control async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,GatewayControlTransport,Callable[..., GatewayControlTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the GatewayControlTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = GatewayControlClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def generate_credentials( + self, + request: Optional[Union[control.GenerateCredentialsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> control.GenerateCredentialsResponse: + r"""GenerateCredentials provides connection information + that allows a user to access the specified membership + using Connect Gateway. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.gkeconnect import gateway_v1 + + async def sample_generate_credentials(): + # Create a client + client = gateway_v1.GatewayControlAsyncClient() + + # Initialize request argument(s) + request = gateway_v1.GenerateCredentialsRequest( + name="name_value", + ) + + # Make the request + response = await client.generate_credentials(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.gkeconnect.gateway_v1.types.GenerateCredentialsRequest, dict]]): + The request object. A request for connection information + for a particular membership. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.gkeconnect.gateway_v1.types.GenerateCredentialsResponse: + Connection information for a + particular membership. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, control.GenerateCredentialsRequest): + request = control.GenerateCredentialsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.generate_credentials + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "GatewayControlAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("GatewayControlAsyncClient",) diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/services/gateway_control/client.py b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/services/gateway_control/client.py new file mode 100644 index 000000000000..0f9ff2470144 --- /dev/null +++ b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/services/gateway_control/client.py @@ -0,0 +1,760 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.gkeconnect.gateway_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.cloud.gkeconnect.gateway_v1.types import control + +from .transports.base import DEFAULT_CLIENT_INFO, GatewayControlTransport +from .transports.grpc import GatewayControlGrpcTransport +from .transports.grpc_asyncio import GatewayControlGrpcAsyncIOTransport +from .transports.rest import GatewayControlRestTransport + + +class GatewayControlClientMeta(type): + """Metaclass for the GatewayControl client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[GatewayControlTransport]] + _transport_registry["grpc"] = GatewayControlGrpcTransport + _transport_registry["grpc_asyncio"] = GatewayControlGrpcAsyncIOTransport + _transport_registry["rest"] = GatewayControlRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[GatewayControlTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class GatewayControlClient(metaclass=GatewayControlClientMeta): + """GatewayControl is the control plane API for Connect Gateway.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "connectgateway.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "connectgateway.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + GatewayControlClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + GatewayControlClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> GatewayControlTransport: + """Returns the transport used by the client instance. + + Returns: + GatewayControlTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = GatewayControlClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = GatewayControlClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = GatewayControlClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = GatewayControlClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = GatewayControlClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or GatewayControlClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, GatewayControlTransport, Callable[..., GatewayControlTransport]] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the gateway control client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,GatewayControlTransport,Callable[..., GatewayControlTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the GatewayControlTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = GatewayControlClient._read_environment_variables() + self._client_cert_source = GatewayControlClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = GatewayControlClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, GatewayControlTransport) + if transport_provided: + # transport is a GatewayControlTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(GatewayControlTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or GatewayControlClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[GatewayControlTransport], Callable[..., GatewayControlTransport] + ] = ( + GatewayControlClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., GatewayControlTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def generate_credentials( + self, + request: Optional[Union[control.GenerateCredentialsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> control.GenerateCredentialsResponse: + r"""GenerateCredentials provides connection information + that allows a user to access the specified membership + using Connect Gateway. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.gkeconnect import gateway_v1 + + def sample_generate_credentials(): + # Create a client + client = gateway_v1.GatewayControlClient() + + # Initialize request argument(s) + request = gateway_v1.GenerateCredentialsRequest( + name="name_value", + ) + + # Make the request + response = client.generate_credentials(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.gkeconnect.gateway_v1.types.GenerateCredentialsRequest, dict]): + The request object. A request for connection information + for a particular membership. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.gkeconnect.gateway_v1.types.GenerateCredentialsResponse: + Connection information for a + particular membership. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, control.GenerateCredentialsRequest): + request = control.GenerateCredentialsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.generate_credentials] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "GatewayControlClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("GatewayControlClient",) diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/services/gateway_control/transports/__init__.py b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/services/gateway_control/transports/__init__.py new file mode 100644 index 000000000000..bc45ac2893ec --- /dev/null +++ b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/services/gateway_control/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import GatewayControlTransport +from .grpc import GatewayControlGrpcTransport +from .grpc_asyncio import GatewayControlGrpcAsyncIOTransport +from .rest import GatewayControlRestInterceptor, GatewayControlRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[GatewayControlTransport]] +_transport_registry["grpc"] = GatewayControlGrpcTransport +_transport_registry["grpc_asyncio"] = GatewayControlGrpcAsyncIOTransport +_transport_registry["rest"] = GatewayControlRestTransport + +__all__ = ( + "GatewayControlTransport", + "GatewayControlGrpcTransport", + "GatewayControlGrpcAsyncIOTransport", + "GatewayControlRestTransport", + "GatewayControlRestInterceptor", +) diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/services/gateway_control/transports/base.py b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/services/gateway_control/transports/base.py new file mode 100644 index 000000000000..0ad0eea11657 --- /dev/null +++ b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/services/gateway_control/transports/base.py @@ -0,0 +1,173 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.gkeconnect.gateway_v1 import gapic_version as package_version +from google.cloud.gkeconnect.gateway_v1.types import control + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class GatewayControlTransport(abc.ABC): + """Abstract transport class for GatewayControl.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "connectgateway.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'connectgateway.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.generate_credentials: gapic_v1.method.wrap_method( + self.generate_credentials, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def generate_credentials( + self, + ) -> Callable[ + [control.GenerateCredentialsRequest], + Union[ + control.GenerateCredentialsResponse, + Awaitable[control.GenerateCredentialsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("GatewayControlTransport",) diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/services/gateway_control/transports/grpc.py b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/services/gateway_control/transports/grpc.py new file mode 100644 index 000000000000..893617331d3d --- /dev/null +++ b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/services/gateway_control/transports/grpc.py @@ -0,0 +1,275 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore + +from google.cloud.gkeconnect.gateway_v1.types import control + +from .base import DEFAULT_CLIENT_INFO, GatewayControlTransport + + +class GatewayControlGrpcTransport(GatewayControlTransport): + """gRPC backend transport for GatewayControl. + + GatewayControl is the control plane API for Connect Gateway. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "connectgateway.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'connectgateway.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "connectgateway.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def generate_credentials( + self, + ) -> Callable[ + [control.GenerateCredentialsRequest], control.GenerateCredentialsResponse + ]: + r"""Return a callable for the generate credentials method over gRPC. + + GenerateCredentials provides connection information + that allows a user to access the specified membership + using Connect Gateway. + + Returns: + Callable[[~.GenerateCredentialsRequest], + ~.GenerateCredentialsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "generate_credentials" not in self._stubs: + self._stubs["generate_credentials"] = self.grpc_channel.unary_unary( + "/google.cloud.gkeconnect.gateway.v1.GatewayControl/GenerateCredentials", + request_serializer=control.GenerateCredentialsRequest.serialize, + response_deserializer=control.GenerateCredentialsResponse.deserialize, + ) + return self._stubs["generate_credentials"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("GatewayControlGrpcTransport",) diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/services/gateway_control/transports/grpc_asyncio.py b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/services/gateway_control/transports/grpc_asyncio.py new file mode 100644 index 000000000000..8f4182cc9522 --- /dev/null +++ b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/services/gateway_control/transports/grpc_asyncio.py @@ -0,0 +1,295 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.gkeconnect.gateway_v1.types import control + +from .base import DEFAULT_CLIENT_INFO, GatewayControlTransport +from .grpc import GatewayControlGrpcTransport + + +class GatewayControlGrpcAsyncIOTransport(GatewayControlTransport): + """gRPC AsyncIO backend transport for GatewayControl. + + GatewayControl is the control plane API for Connect Gateway. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "connectgateway.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "connectgateway.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'connectgateway.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def generate_credentials( + self, + ) -> Callable[ + [control.GenerateCredentialsRequest], + Awaitable[control.GenerateCredentialsResponse], + ]: + r"""Return a callable for the generate credentials method over gRPC. + + GenerateCredentials provides connection information + that allows a user to access the specified membership + using Connect Gateway. + + Returns: + Callable[[~.GenerateCredentialsRequest], + Awaitable[~.GenerateCredentialsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "generate_credentials" not in self._stubs: + self._stubs["generate_credentials"] = self.grpc_channel.unary_unary( + "/google.cloud.gkeconnect.gateway.v1.GatewayControl/GenerateCredentials", + request_serializer=control.GenerateCredentialsRequest.serialize, + response_deserializer=control.GenerateCredentialsResponse.deserialize, + ) + return self._stubs["generate_credentials"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.generate_credentials: gapic_v1.method_async.wrap_method( + self.generate_credentials, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + } + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("GatewayControlGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/services/gateway_control/transports/rest.py b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/services/gateway_control/transports/rest.py new file mode 100644 index 000000000000..de0570c0ecdb --- /dev/null +++ b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/services/gateway_control/transports/rest.py @@ -0,0 +1,308 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.cloud.gkeconnect.gateway_v1.types import control + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import GatewayControlTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class GatewayControlRestInterceptor: + """Interceptor for GatewayControl. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the GatewayControlRestTransport. + + .. code-block:: python + class MyCustomGatewayControlInterceptor(GatewayControlRestInterceptor): + def pre_generate_credentials(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_generate_credentials(self, response): + logging.log(f"Received response: {response}") + return response + + transport = GatewayControlRestTransport(interceptor=MyCustomGatewayControlInterceptor()) + client = GatewayControlClient(transport=transport) + + + """ + + def pre_generate_credentials( + self, + request: control.GenerateCredentialsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[control.GenerateCredentialsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for generate_credentials + + Override in a subclass to manipulate the request or metadata + before they are sent to the GatewayControl server. + """ + return request, metadata + + def post_generate_credentials( + self, response: control.GenerateCredentialsResponse + ) -> control.GenerateCredentialsResponse: + """Post-rpc interceptor for generate_credentials + + Override in a subclass to manipulate the response + after it is returned by the GatewayControl server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class GatewayControlRestStub: + _session: AuthorizedSession + _host: str + _interceptor: GatewayControlRestInterceptor + + +class GatewayControlRestTransport(GatewayControlTransport): + """REST backend transport for GatewayControl. + + GatewayControl is the control plane API for Connect Gateway. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "connectgateway.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[GatewayControlRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'connectgateway.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or GatewayControlRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _GenerateCredentials(GatewayControlRestStub): + def __hash__(self): + return hash("GenerateCredentials") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: control.GenerateCredentialsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> control.GenerateCredentialsResponse: + r"""Call the generate credentials method over HTTP. + + Args: + request (~.control.GenerateCredentialsRequest): + The request object. A request for connection information + for a particular membership. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.control.GenerateCredentialsResponse: + Connection information for a + particular membership. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/memberships/*}:generateCredentials", + }, + ] + request, metadata = self._interceptor.pre_generate_credentials( + request, metadata + ) + pb_request = control.GenerateCredentialsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = control.GenerateCredentialsResponse() + pb_resp = control.GenerateCredentialsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_generate_credentials(resp) + return resp + + @property + def generate_credentials( + self, + ) -> Callable[ + [control.GenerateCredentialsRequest], control.GenerateCredentialsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GenerateCredentials(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("GatewayControlRestTransport",) diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/types/gateway.py b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/types/__init__.py similarity index 77% rename from packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/types/gateway.py rename to packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/types/__init__.py index ca8527fb89ec..2bfb31823e04 100644 --- a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/types/gateway.py +++ b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/types/__init__.py @@ -13,12 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import proto # type: ignore +from .control import GenerateCredentialsRequest, GenerateCredentialsResponse -__protobuf__ = proto.module( - package="google.cloud.gkeconnect.gateway.v1beta1", - manifest={}, +__all__ = ( + "GenerateCredentialsRequest", + "GenerateCredentialsResponse", ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/types/control.py b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/types/control.py new file mode 100644 index 000000000000..71e358d796f7 --- /dev/null +++ b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/types/control.py @@ -0,0 +1,123 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.gkeconnect.gateway.v1", + manifest={ + "GenerateCredentialsRequest", + "GenerateCredentialsResponse", + }, +) + + +class GenerateCredentialsRequest(proto.Message): + r"""A request for connection information for a particular + membership. + + Attributes: + name (str): + Required. The Fleet membership resource. + force_use_agent (bool): + Optional. Whether to force the use of Connect + Agent-based transport. + This will return a configuration that uses + Connect Agent as the underlying transport + mechanism for cluster types that would otherwise + have used a different transport. Requires that + Connect Agent be installed on the cluster. + Setting this field to false is equivalent to not + setting it. + version (str): + Optional. The Connect Gateway version to be + used in the resulting configuration. + + Leave this field blank to let the server choose + the version (recommended). + kubernetes_namespace (str): + Optional. The namespace to use in the kubeconfig context. + + If this field is specified, the server will set the + ``namespace`` field in kubeconfig context. If not specified, + the ``namespace`` field is omitted. + operating_system (google.cloud.gkeconnect.gateway_v1.types.GenerateCredentialsRequest.OperatingSystem): + Optional. The operating system where the + kubeconfig will be used. + """ + + class OperatingSystem(proto.Enum): + r"""Operating systems requiring specialized kubeconfigs. + + Values: + OPERATING_SYSTEM_UNSPECIFIED (0): + Generates a kubeconfig that works for all + operating systems not defined below. + OPERATING_SYSTEM_WINDOWS (1): + Generates a kubeconfig that is specifically + designed to work with Windows. + """ + OPERATING_SYSTEM_UNSPECIFIED = 0 + OPERATING_SYSTEM_WINDOWS = 1 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + force_use_agent: bool = proto.Field( + proto.BOOL, + number=2, + ) + version: str = proto.Field( + proto.STRING, + number=3, + ) + kubernetes_namespace: str = proto.Field( + proto.STRING, + number=4, + ) + operating_system: OperatingSystem = proto.Field( + proto.ENUM, + number=5, + enum=OperatingSystem, + ) + + +class GenerateCredentialsResponse(proto.Message): + r"""Connection information for a particular membership. + + Attributes: + kubeconfig (bytes): + A full YAML kubeconfig in serialized format. + endpoint (str): + The generated URI of the cluster as accessed + through the Connect Gateway API. + """ + + kubeconfig: bytes = proto.Field( + proto.BYTES, + number=1, + ) + endpoint: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/__init__.py b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/__init__.py index d16e8a461ad2..f58bb78cb5e7 100644 --- a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/__init__.py +++ b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/__init__.py @@ -18,9 +18,12 @@ __version__ = package_version.__version__ -from .services.gateway_service import GatewayServiceAsyncClient, GatewayServiceClient +from .services.gateway_control import GatewayControlAsyncClient, GatewayControlClient +from .types.control import GenerateCredentialsRequest, GenerateCredentialsResponse __all__ = ( - "GatewayServiceAsyncClient", - "GatewayServiceClient", + "GatewayControlAsyncClient", + "GatewayControlClient", + "GenerateCredentialsRequest", + "GenerateCredentialsResponse", ) diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/gapic_metadata.json b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/gapic_metadata.json index 38a616fb276e..33fee488e86c 100644 --- a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/gapic_metadata.json +++ b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/gapic_metadata.json @@ -5,64 +5,24 @@ "protoPackage": "google.cloud.gkeconnect.gateway.v1beta1", "schema": "1.0", "services": { - "GatewayService": { + "GatewayControl": { "clients": { "grpc": { - "libraryClient": "GatewayServiceClient", + "libraryClient": "GatewayControlClient", "rpcs": { - "DeleteResource": { + "GenerateCredentials": { "methods": [ - "delete_resource" - ] - }, - "GetResource": { - "methods": [ - "get_resource" - ] - }, - "PatchResource": { - "methods": [ - "patch_resource" - ] - }, - "PostResource": { - "methods": [ - "post_resource" - ] - }, - "PutResource": { - "methods": [ - "put_resource" + "generate_credentials" ] } } }, "grpc-async": { - "libraryClient": "GatewayServiceAsyncClient", + "libraryClient": "GatewayControlAsyncClient", "rpcs": { - "DeleteResource": { - "methods": [ - "delete_resource" - ] - }, - "GetResource": { - "methods": [ - "get_resource" - ] - }, - "PatchResource": { - "methods": [ - "patch_resource" - ] - }, - "PostResource": { - "methods": [ - "post_resource" - ] - }, - "PutResource": { + "GenerateCredentials": { "methods": [ - "put_resource" + "generate_credentials" ] } } diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_control/__init__.py b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_control/__init__.py new file mode 100644 index 000000000000..6d98a5d3aec9 --- /dev/null +++ b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_control/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import GatewayControlAsyncClient +from .client import GatewayControlClient + +__all__ = ( + "GatewayControlClient", + "GatewayControlAsyncClient", +) diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_control/async_client.py b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_control/async_client.py new file mode 100644 index 000000000000..a00f6b015f62 --- /dev/null +++ b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_control/async_client.py @@ -0,0 +1,352 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.gkeconnect.gateway_v1beta1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.cloud.gkeconnect.gateway_v1beta1.types import control + +from .client import GatewayControlClient +from .transports.base import DEFAULT_CLIENT_INFO, GatewayControlTransport +from .transports.grpc_asyncio import GatewayControlGrpcAsyncIOTransport + + +class GatewayControlAsyncClient: + """GatewayControl is the control plane API for Connect Gateway.""" + + _client: GatewayControlClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = GatewayControlClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = GatewayControlClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = GatewayControlClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = GatewayControlClient._DEFAULT_UNIVERSE + + common_billing_account_path = staticmethod( + GatewayControlClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + GatewayControlClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(GatewayControlClient.common_folder_path) + parse_common_folder_path = staticmethod( + GatewayControlClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + GatewayControlClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + GatewayControlClient.parse_common_organization_path + ) + common_project_path = staticmethod(GatewayControlClient.common_project_path) + parse_common_project_path = staticmethod( + GatewayControlClient.parse_common_project_path + ) + common_location_path = staticmethod(GatewayControlClient.common_location_path) + parse_common_location_path = staticmethod( + GatewayControlClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + GatewayControlAsyncClient: The constructed client. + """ + return GatewayControlClient.from_service_account_info.__func__(GatewayControlAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + GatewayControlAsyncClient: The constructed client. + """ + return GatewayControlClient.from_service_account_file.__func__(GatewayControlAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return GatewayControlClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> GatewayControlTransport: + """Returns the transport used by the client instance. + + Returns: + GatewayControlTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = GatewayControlClient.get_transport_class + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, GatewayControlTransport, Callable[..., GatewayControlTransport]] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the gateway control async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,GatewayControlTransport,Callable[..., GatewayControlTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the GatewayControlTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = GatewayControlClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def generate_credentials( + self, + request: Optional[Union[control.GenerateCredentialsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> control.GenerateCredentialsResponse: + r"""GenerateCredentials provides connection information + that allows a user to access the specified membership + using Connect Gateway. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.gkeconnect import gateway_v1beta1 + + async def sample_generate_credentials(): + # Create a client + client = gateway_v1beta1.GatewayControlAsyncClient() + + # Initialize request argument(s) + request = gateway_v1beta1.GenerateCredentialsRequest( + name="name_value", + ) + + # Make the request + response = await client.generate_credentials(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.gkeconnect.gateway_v1beta1.types.GenerateCredentialsRequest, dict]]): + The request object. A request for connection information + for a particular membership. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.gkeconnect.gateway_v1beta1.types.GenerateCredentialsResponse: + Connection information for a + particular membership. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, control.GenerateCredentialsRequest): + request = control.GenerateCredentialsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.generate_credentials + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "GatewayControlAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("GatewayControlAsyncClient",) diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_control/client.py b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_control/client.py new file mode 100644 index 000000000000..be9bc2efdcf0 --- /dev/null +++ b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_control/client.py @@ -0,0 +1,758 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.gkeconnect.gateway_v1beta1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.cloud.gkeconnect.gateway_v1beta1.types import control + +from .transports.base import DEFAULT_CLIENT_INFO, GatewayControlTransport +from .transports.grpc import GatewayControlGrpcTransport +from .transports.grpc_asyncio import GatewayControlGrpcAsyncIOTransport + + +class GatewayControlClientMeta(type): + """Metaclass for the GatewayControl client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[GatewayControlTransport]] + _transport_registry["grpc"] = GatewayControlGrpcTransport + _transport_registry["grpc_asyncio"] = GatewayControlGrpcAsyncIOTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[GatewayControlTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class GatewayControlClient(metaclass=GatewayControlClientMeta): + """GatewayControl is the control plane API for Connect Gateway.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "connectgateway.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "connectgateway.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + GatewayControlClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + GatewayControlClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> GatewayControlTransport: + """Returns the transport used by the client instance. + + Returns: + GatewayControlTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = GatewayControlClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = GatewayControlClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = GatewayControlClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = GatewayControlClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = GatewayControlClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or GatewayControlClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, GatewayControlTransport, Callable[..., GatewayControlTransport]] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the gateway control client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,GatewayControlTransport,Callable[..., GatewayControlTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the GatewayControlTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = GatewayControlClient._read_environment_variables() + self._client_cert_source = GatewayControlClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = GatewayControlClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, GatewayControlTransport) + if transport_provided: + # transport is a GatewayControlTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(GatewayControlTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or GatewayControlClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[GatewayControlTransport], Callable[..., GatewayControlTransport] + ] = ( + GatewayControlClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., GatewayControlTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def generate_credentials( + self, + request: Optional[Union[control.GenerateCredentialsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> control.GenerateCredentialsResponse: + r"""GenerateCredentials provides connection information + that allows a user to access the specified membership + using Connect Gateway. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.gkeconnect import gateway_v1beta1 + + def sample_generate_credentials(): + # Create a client + client = gateway_v1beta1.GatewayControlClient() + + # Initialize request argument(s) + request = gateway_v1beta1.GenerateCredentialsRequest( + name="name_value", + ) + + # Make the request + response = client.generate_credentials(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.gkeconnect.gateway_v1beta1.types.GenerateCredentialsRequest, dict]): + The request object. A request for connection information + for a particular membership. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.gkeconnect.gateway_v1beta1.types.GenerateCredentialsResponse: + Connection information for a + particular membership. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, control.GenerateCredentialsRequest): + request = control.GenerateCredentialsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.generate_credentials] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "GatewayControlClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("GatewayControlClient",) diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_service/transports/__init__.py b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_control/transports/__init__.py similarity index 66% rename from packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_service/transports/__init__.py rename to packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_control/transports/__init__.py index d04e25a4ef88..3692124a1a40 100644 --- a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_service/transports/__init__.py +++ b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_control/transports/__init__.py @@ -16,17 +16,17 @@ from collections import OrderedDict from typing import Dict, Type -from .base import GatewayServiceTransport -from .grpc import GatewayServiceGrpcTransport -from .grpc_asyncio import GatewayServiceGrpcAsyncIOTransport +from .base import GatewayControlTransport +from .grpc import GatewayControlGrpcTransport +from .grpc_asyncio import GatewayControlGrpcAsyncIOTransport # Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[GatewayServiceTransport]] -_transport_registry["grpc"] = GatewayServiceGrpcTransport -_transport_registry["grpc_asyncio"] = GatewayServiceGrpcAsyncIOTransport +_transport_registry = OrderedDict() # type: Dict[str, Type[GatewayControlTransport]] +_transport_registry["grpc"] = GatewayControlGrpcTransport +_transport_registry["grpc_asyncio"] = GatewayControlGrpcAsyncIOTransport __all__ = ( - "GatewayServiceTransport", - "GatewayServiceGrpcTransport", - "GatewayServiceGrpcAsyncIOTransport", + "GatewayControlTransport", + "GatewayControlGrpcTransport", + "GatewayControlGrpcAsyncIOTransport", ) diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_service/transports/base.py b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_control/transports/base.py similarity index 73% rename from packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_service/transports/base.py rename to packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_control/transports/base.py index 232b9f043b8c..5640d2d30b08 100644 --- a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_service/transports/base.py +++ b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_control/transports/base.py @@ -16,7 +16,6 @@ import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -from google.api import httpbody_pb2 # type: ignore import google.api_core from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 @@ -26,14 +25,15 @@ from google.oauth2 import service_account # type: ignore from google.cloud.gkeconnect.gateway_v1beta1 import gapic_version as package_version +from google.cloud.gkeconnect.gateway_v1beta1.types import control DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ ) -class GatewayServiceTransport(abc.ABC): - """Abstract transport class for GatewayService.""" +class GatewayControlTransport(abc.ABC): + """Abstract transport class for GatewayControl.""" AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) @@ -128,29 +128,18 @@ def host(self): def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { - self.get_resource: gapic_v1.method.wrap_method( - self.get_resource, - default_timeout=None, - client_info=client_info, - ), - self.post_resource: gapic_v1.method.wrap_method( - self.post_resource, - default_timeout=None, - client_info=client_info, - ), - self.delete_resource: gapic_v1.method.wrap_method( - self.delete_resource, - default_timeout=None, - client_info=client_info, - ), - self.put_resource: gapic_v1.method.wrap_method( - self.put_resource, - default_timeout=None, - client_info=client_info, - ), - self.patch_resource: gapic_v1.method.wrap_method( - self.patch_resource, - default_timeout=None, + self.generate_credentials: gapic_v1.method.wrap_method( + self.generate_credentials, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, client_info=client_info, ), } @@ -165,47 +154,14 @@ def close(self): raise NotImplementedError() @property - def get_resource( - self, - ) -> Callable[ - [httpbody_pb2.HttpBody], - Union[httpbody_pb2.HttpBody, Awaitable[httpbody_pb2.HttpBody]], - ]: - raise NotImplementedError() - - @property - def post_resource( - self, - ) -> Callable[ - [httpbody_pb2.HttpBody], - Union[httpbody_pb2.HttpBody, Awaitable[httpbody_pb2.HttpBody]], - ]: - raise NotImplementedError() - - @property - def delete_resource( - self, - ) -> Callable[ - [httpbody_pb2.HttpBody], - Union[httpbody_pb2.HttpBody, Awaitable[httpbody_pb2.HttpBody]], - ]: - raise NotImplementedError() - - @property - def put_resource( - self, - ) -> Callable[ - [httpbody_pb2.HttpBody], - Union[httpbody_pb2.HttpBody, Awaitable[httpbody_pb2.HttpBody]], - ]: - raise NotImplementedError() - - @property - def patch_resource( + def generate_credentials( self, ) -> Callable[ - [httpbody_pb2.HttpBody], - Union[httpbody_pb2.HttpBody, Awaitable[httpbody_pb2.HttpBody]], + [control.GenerateCredentialsRequest], + Union[ + control.GenerateCredentialsResponse, + Awaitable[control.GenerateCredentialsResponse], + ], ]: raise NotImplementedError() @@ -214,4 +170,4 @@ def kind(self) -> str: raise NotImplementedError() -__all__ = ("GatewayServiceTransport",) +__all__ = ("GatewayControlTransport",) diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_service/transports/grpc.py b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_control/transports/grpc.py similarity index 66% rename from packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_service/transports/grpc.py rename to packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_control/transports/grpc.py index 69b428b95167..c61f1c5d018d 100644 --- a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_service/transports/grpc.py +++ b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_control/transports/grpc.py @@ -16,25 +16,21 @@ from typing import Callable, Dict, Optional, Sequence, Tuple, Union import warnings -from google.api import httpbody_pb2 # type: ignore from google.api_core import gapic_v1, grpc_helpers import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore import grpc # type: ignore -from .base import DEFAULT_CLIENT_INFO, GatewayServiceTransport +from google.cloud.gkeconnect.gateway_v1beta1.types import control +from .base import DEFAULT_CLIENT_INFO, GatewayControlTransport -class GatewayServiceGrpcTransport(GatewayServiceTransport): - """gRPC backend transport for GatewayService. - Gateway service is a public API which works as a Kubernetes - resource model proxy between end users and registered Kubernetes - clusters. Each RPC in this service matches with an HTTP verb. - End user will initiate kubectl commands against the Gateway - service, and Gateway service will forward user requests to - clusters. +class GatewayControlGrpcTransport(GatewayControlTransport): + """gRPC backend transport for GatewayControl. + + GatewayControl is the control plane API for Connect Gateway. This class defines the same methods as the primary client, so the primary client can load the underlying transport implementation @@ -239,119 +235,20 @@ def grpc_channel(self) -> grpc.Channel: return self._grpc_channel @property - def get_resource(self) -> Callable[[httpbody_pb2.HttpBody], httpbody_pb2.HttpBody]: - r"""Return a callable for the get resource method over gRPC. - - GetResource performs an HTTP GET request on the - Kubernetes API Server. - - Returns: - Callable[[~.HttpBody], - ~.HttpBody]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_resource" not in self._stubs: - self._stubs["get_resource"] = self.grpc_channel.unary_unary( - "/google.cloud.gkeconnect.gateway.v1beta1.GatewayService/GetResource", - request_serializer=httpbody_pb2.HttpBody.SerializeToString, - response_deserializer=httpbody_pb2.HttpBody.FromString, - ) - return self._stubs["get_resource"] - - @property - def post_resource(self) -> Callable[[httpbody_pb2.HttpBody], httpbody_pb2.HttpBody]: - r"""Return a callable for the post resource method over gRPC. - - PostResource performs an HTTP POST on the Kubernetes - API Server. - - Returns: - Callable[[~.HttpBody], - ~.HttpBody]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "post_resource" not in self._stubs: - self._stubs["post_resource"] = self.grpc_channel.unary_unary( - "/google.cloud.gkeconnect.gateway.v1beta1.GatewayService/PostResource", - request_serializer=httpbody_pb2.HttpBody.SerializeToString, - response_deserializer=httpbody_pb2.HttpBody.FromString, - ) - return self._stubs["post_resource"] - - @property - def delete_resource( - self, - ) -> Callable[[httpbody_pb2.HttpBody], httpbody_pb2.HttpBody]: - r"""Return a callable for the delete resource method over gRPC. - - DeleteResource performs an HTTP DELETE on the - Kubernetes API Server. - - Returns: - Callable[[~.HttpBody], - ~.HttpBody]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_resource" not in self._stubs: - self._stubs["delete_resource"] = self.grpc_channel.unary_unary( - "/google.cloud.gkeconnect.gateway.v1beta1.GatewayService/DeleteResource", - request_serializer=httpbody_pb2.HttpBody.SerializeToString, - response_deserializer=httpbody_pb2.HttpBody.FromString, - ) - return self._stubs["delete_resource"] - - @property - def put_resource(self) -> Callable[[httpbody_pb2.HttpBody], httpbody_pb2.HttpBody]: - r"""Return a callable for the put resource method over gRPC. - - PutResource performs an HTTP PUT on the Kubernetes - API Server. - - Returns: - Callable[[~.HttpBody], - ~.HttpBody]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "put_resource" not in self._stubs: - self._stubs["put_resource"] = self.grpc_channel.unary_unary( - "/google.cloud.gkeconnect.gateway.v1beta1.GatewayService/PutResource", - request_serializer=httpbody_pb2.HttpBody.SerializeToString, - response_deserializer=httpbody_pb2.HttpBody.FromString, - ) - return self._stubs["put_resource"] - - @property - def patch_resource( + def generate_credentials( self, - ) -> Callable[[httpbody_pb2.HttpBody], httpbody_pb2.HttpBody]: - r"""Return a callable for the patch resource method over gRPC. + ) -> Callable[ + [control.GenerateCredentialsRequest], control.GenerateCredentialsResponse + ]: + r"""Return a callable for the generate credentials method over gRPC. - PatchResource performs an HTTP PATCH on the - Kubernetes API Server. + GenerateCredentials provides connection information + that allows a user to access the specified membership + using Connect Gateway. Returns: - Callable[[~.HttpBody], - ~.HttpBody]: + Callable[[~.GenerateCredentialsRequest], + ~.GenerateCredentialsResponse]: A function that, when called, will call the underlying RPC on the server. """ @@ -359,13 +256,13 @@ def patch_resource( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "patch_resource" not in self._stubs: - self._stubs["patch_resource"] = self.grpc_channel.unary_unary( - "/google.cloud.gkeconnect.gateway.v1beta1.GatewayService/PatchResource", - request_serializer=httpbody_pb2.HttpBody.SerializeToString, - response_deserializer=httpbody_pb2.HttpBody.FromString, + if "generate_credentials" not in self._stubs: + self._stubs["generate_credentials"] = self.grpc_channel.unary_unary( + "/google.cloud.gkeconnect.gateway.v1beta1.GatewayControl/GenerateCredentials", + request_serializer=control.GenerateCredentialsRequest.serialize, + response_deserializer=control.GenerateCredentialsResponse.deserialize, ) - return self._stubs["patch_resource"] + return self._stubs["generate_credentials"] def close(self): self.grpc_channel.close() @@ -375,4 +272,4 @@ def kind(self) -> str: return "grpc" -__all__ = ("GatewayServiceGrpcTransport",) +__all__ = ("GatewayControlGrpcTransport",) diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_service/transports/grpc_asyncio.py b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_control/transports/grpc_asyncio.py similarity index 63% rename from packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_service/transports/grpc_asyncio.py rename to packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_control/transports/grpc_asyncio.py index b4f9b940f08d..f51400e32386 100644 --- a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_control/transports/grpc_asyncio.py @@ -16,7 +16,6 @@ from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union import warnings -from google.api import httpbody_pb2 # type: ignore from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1, grpc_helpers_async from google.api_core import retry_async as retries @@ -25,19 +24,16 @@ import grpc # type: ignore from grpc.experimental import aio # type: ignore -from .base import DEFAULT_CLIENT_INFO, GatewayServiceTransport -from .grpc import GatewayServiceGrpcTransport +from google.cloud.gkeconnect.gateway_v1beta1.types import control +from .base import DEFAULT_CLIENT_INFO, GatewayControlTransport +from .grpc import GatewayControlGrpcTransport -class GatewayServiceGrpcAsyncIOTransport(GatewayServiceTransport): - """gRPC AsyncIO backend transport for GatewayService. - Gateway service is a public API which works as a Kubernetes - resource model proxy between end users and registered Kubernetes - clusters. Each RPC in this service matches with an HTTP verb. - End user will initiate kubectl commands against the Gateway - service, and Gateway service will forward user requests to - clusters. +class GatewayControlGrpcAsyncIOTransport(GatewayControlTransport): + """gRPC AsyncIO backend transport for GatewayControl. + + GatewayControl is the control plane API for Connect Gateway. This class defines the same methods as the primary client, so the primary client can load the underlying transport implementation @@ -243,17 +239,21 @@ def grpc_channel(self) -> aio.Channel: return self._grpc_channel @property - def get_resource( + def generate_credentials( self, - ) -> Callable[[httpbody_pb2.HttpBody], Awaitable[httpbody_pb2.HttpBody]]: - r"""Return a callable for the get resource method over gRPC. + ) -> Callable[ + [control.GenerateCredentialsRequest], + Awaitable[control.GenerateCredentialsResponse], + ]: + r"""Return a callable for the generate credentials method over gRPC. - GetResource performs an HTTP GET request on the - Kubernetes API Server. + GenerateCredentials provides connection information + that allows a user to access the specified membership + using Connect Gateway. Returns: - Callable[[~.HttpBody], - Awaitable[~.HttpBody]]: + Callable[[~.GenerateCredentialsRequest], + Awaitable[~.GenerateCredentialsResponse]]: A function that, when called, will call the underlying RPC on the server. """ @@ -261,148 +261,29 @@ def get_resource( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "get_resource" not in self._stubs: - self._stubs["get_resource"] = self.grpc_channel.unary_unary( - "/google.cloud.gkeconnect.gateway.v1beta1.GatewayService/GetResource", - request_serializer=httpbody_pb2.HttpBody.SerializeToString, - response_deserializer=httpbody_pb2.HttpBody.FromString, + if "generate_credentials" not in self._stubs: + self._stubs["generate_credentials"] = self.grpc_channel.unary_unary( + "/google.cloud.gkeconnect.gateway.v1beta1.GatewayControl/GenerateCredentials", + request_serializer=control.GenerateCredentialsRequest.serialize, + response_deserializer=control.GenerateCredentialsResponse.deserialize, ) - return self._stubs["get_resource"] - - @property - def post_resource( - self, - ) -> Callable[[httpbody_pb2.HttpBody], Awaitable[httpbody_pb2.HttpBody]]: - r"""Return a callable for the post resource method over gRPC. - - PostResource performs an HTTP POST on the Kubernetes - API Server. - - Returns: - Callable[[~.HttpBody], - Awaitable[~.HttpBody]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "post_resource" not in self._stubs: - self._stubs["post_resource"] = self.grpc_channel.unary_unary( - "/google.cloud.gkeconnect.gateway.v1beta1.GatewayService/PostResource", - request_serializer=httpbody_pb2.HttpBody.SerializeToString, - response_deserializer=httpbody_pb2.HttpBody.FromString, - ) - return self._stubs["post_resource"] - - @property - def delete_resource( - self, - ) -> Callable[[httpbody_pb2.HttpBody], Awaitable[httpbody_pb2.HttpBody]]: - r"""Return a callable for the delete resource method over gRPC. - - DeleteResource performs an HTTP DELETE on the - Kubernetes API Server. - - Returns: - Callable[[~.HttpBody], - Awaitable[~.HttpBody]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_resource" not in self._stubs: - self._stubs["delete_resource"] = self.grpc_channel.unary_unary( - "/google.cloud.gkeconnect.gateway.v1beta1.GatewayService/DeleteResource", - request_serializer=httpbody_pb2.HttpBody.SerializeToString, - response_deserializer=httpbody_pb2.HttpBody.FromString, - ) - return self._stubs["delete_resource"] - - @property - def put_resource( - self, - ) -> Callable[[httpbody_pb2.HttpBody], Awaitable[httpbody_pb2.HttpBody]]: - r"""Return a callable for the put resource method over gRPC. - - PutResource performs an HTTP PUT on the Kubernetes - API Server. - - Returns: - Callable[[~.HttpBody], - Awaitable[~.HttpBody]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "put_resource" not in self._stubs: - self._stubs["put_resource"] = self.grpc_channel.unary_unary( - "/google.cloud.gkeconnect.gateway.v1beta1.GatewayService/PutResource", - request_serializer=httpbody_pb2.HttpBody.SerializeToString, - response_deserializer=httpbody_pb2.HttpBody.FromString, - ) - return self._stubs["put_resource"] - - @property - def patch_resource( - self, - ) -> Callable[[httpbody_pb2.HttpBody], Awaitable[httpbody_pb2.HttpBody]]: - r"""Return a callable for the patch resource method over gRPC. - - PatchResource performs an HTTP PATCH on the - Kubernetes API Server. - - Returns: - Callable[[~.HttpBody], - Awaitable[~.HttpBody]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "patch_resource" not in self._stubs: - self._stubs["patch_resource"] = self.grpc_channel.unary_unary( - "/google.cloud.gkeconnect.gateway.v1beta1.GatewayService/PatchResource", - request_serializer=httpbody_pb2.HttpBody.SerializeToString, - response_deserializer=httpbody_pb2.HttpBody.FromString, - ) - return self._stubs["patch_resource"] + return self._stubs["generate_credentials"] def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { - self.get_resource: gapic_v1.method_async.wrap_method( - self.get_resource, - default_timeout=None, - client_info=client_info, - ), - self.post_resource: gapic_v1.method_async.wrap_method( - self.post_resource, - default_timeout=None, - client_info=client_info, - ), - self.delete_resource: gapic_v1.method_async.wrap_method( - self.delete_resource, - default_timeout=None, - client_info=client_info, - ), - self.put_resource: gapic_v1.method_async.wrap_method( - self.put_resource, - default_timeout=None, - client_info=client_info, - ), - self.patch_resource: gapic_v1.method_async.wrap_method( - self.patch_resource, - default_timeout=None, + self.generate_credentials: gapic_v1.method_async.wrap_method( + self.generate_credentials, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, client_info=client_info, ), } @@ -411,4 +292,4 @@ def close(self): return self.grpc_channel.close() -__all__ = ("GatewayServiceGrpcAsyncIOTransport",) +__all__ = ("GatewayControlGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_service/async_client.py b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_service/async_client.py deleted file mode 100644 index 067ac7ab5cc4..000000000000 --- a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_service/async_client.py +++ /dev/null @@ -1,1133 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import re -from typing import ( - Callable, - Dict, - Mapping, - MutableMapping, - MutableSequence, - Optional, - Sequence, - Tuple, - Type, - Union, -) - -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry_async as retries -from google.api_core.client_options import ClientOptions -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.gkeconnect.gateway_v1beta1 import gapic_version as package_version - -try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore - -from google.api import httpbody_pb2 # type: ignore -from google.protobuf import any_pb2 # type: ignore - -from .client import GatewayServiceClient -from .transports.base import DEFAULT_CLIENT_INFO, GatewayServiceTransport -from .transports.grpc_asyncio import GatewayServiceGrpcAsyncIOTransport - - -class GatewayServiceAsyncClient: - """Gateway service is a public API which works as a Kubernetes - resource model proxy between end users and registered Kubernetes - clusters. Each RPC in this service matches with an HTTP verb. - End user will initiate kubectl commands against the Gateway - service, and Gateway service will forward user requests to - clusters. - """ - - _client: GatewayServiceClient - - # Copy defaults from the synchronous client for use here. - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = GatewayServiceClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = GatewayServiceClient.DEFAULT_MTLS_ENDPOINT - _DEFAULT_ENDPOINT_TEMPLATE = GatewayServiceClient._DEFAULT_ENDPOINT_TEMPLATE - _DEFAULT_UNIVERSE = GatewayServiceClient._DEFAULT_UNIVERSE - - common_billing_account_path = staticmethod( - GatewayServiceClient.common_billing_account_path - ) - parse_common_billing_account_path = staticmethod( - GatewayServiceClient.parse_common_billing_account_path - ) - common_folder_path = staticmethod(GatewayServiceClient.common_folder_path) - parse_common_folder_path = staticmethod( - GatewayServiceClient.parse_common_folder_path - ) - common_organization_path = staticmethod( - GatewayServiceClient.common_organization_path - ) - parse_common_organization_path = staticmethod( - GatewayServiceClient.parse_common_organization_path - ) - common_project_path = staticmethod(GatewayServiceClient.common_project_path) - parse_common_project_path = staticmethod( - GatewayServiceClient.parse_common_project_path - ) - common_location_path = staticmethod(GatewayServiceClient.common_location_path) - parse_common_location_path = staticmethod( - GatewayServiceClient.parse_common_location_path - ) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - GatewayServiceAsyncClient: The constructed client. - """ - return GatewayServiceClient.from_service_account_info.__func__(GatewayServiceAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - GatewayServiceAsyncClient: The constructed client. - """ - return GatewayServiceClient.from_service_account_file.__func__(GatewayServiceAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source( - cls, client_options: Optional[ClientOptions] = None - ): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return GatewayServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> GatewayServiceTransport: - """Returns the transport used by the client instance. - - Returns: - GatewayServiceTransport: The transport used by the client instance. - """ - return self._client.transport - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._client._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used - by the client instance. - """ - return self._client._universe_domain - - get_transport_class = GatewayServiceClient.get_transport_class - - def __init__( - self, - *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[ - Union[str, GatewayServiceTransport, Callable[..., GatewayServiceTransport]] - ] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the gateway service async client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,GatewayServiceTransport,Callable[..., GatewayServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport to use. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the GatewayServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = GatewayServiceClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - ) - - async def get_resource( - self, - request: Optional[Union[httpbody_pb2.HttpBody, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> httpbody_pb2.HttpBody: - r"""GetResource performs an HTTP GET request on the - Kubernetes API Server. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.api import httpbody_pb2 # type: ignore - from google.cloud.gkeconnect import gateway_v1beta1 - - async def sample_get_resource(): - # Create a client - client = gateway_v1beta1.GatewayServiceAsyncClient() - - # Initialize request argument(s) - request = httpbody_pb2.HttpBody( - ) - - # Make the request - response = await client.get_resource(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.api.httpbody_pb2.HttpBody, dict]]): - The request object. Message that represents an arbitrary HTTP body. It - should only be used for payload formats that can't be - represented as JSON, such as raw binary or an HTML page. - - This message can be used both in streaming and - non-streaming API methods in the request as well as the - response. - - It can be used as a top-level request field, which is - convenient if one wants to extract parameters from - either the URL or HTTP template into the request fields - and also want access to the raw HTTP body. - - Example: - - :: - - message GetResourceRequest { - // A unique request id. - string request_id = 1; - - // The raw HTTP body is bound to this field. - google.api.HttpBody http_body = 2; - - } - - service ResourceService { - rpc GetResource(GetResourceRequest) - returns (google.api.HttpBody); - rpc UpdateResource(google.api.HttpBody) - returns (google.protobuf.Empty); - - } - - Example with streaming methods: - - :: - - service CaldavService { - rpc GetCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - rpc UpdateCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - - } - - Use of this type only changes how the request and - response bodies are handled, all other features will - continue to work unchanged. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api.httpbody_pb2.HttpBody: - Message that represents an arbitrary HTTP body. It should only be used for - payload formats that can't be represented as JSON, - such as raw binary or an HTML page. - - This message can be used both in streaming and - non-streaming API methods in the request as well as - the response. - - It can be used as a top-level request field, which is - convenient if one wants to extract parameters from - either the URL or HTTP template into the request - fields and also want access to the raw HTTP body. - - Example: - - message GetResourceRequest { - // A unique request id. string request_id = 1; - - // The raw HTTP body is bound to this field. - google.api.HttpBody http_body = 2; - - } - - service ResourceService { - rpc GetResource(GetResourceRequest) - returns (google.api.HttpBody); - - rpc UpdateResource(google.api.HttpBody) - returns (google.protobuf.Empty); - - } - - Example with streaming methods: - - service CaldavService { - rpc GetCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - - rpc UpdateCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - - } - - Use of this type only changes how the request and - response bodies are handled, all other features will - continue to work unchanged. - - """ - # Create or coerce a protobuf request object. - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = httpbody_pb2.HttpBody(**request) - elif not request: - request = httpbody_pb2.HttpBody() - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[ - self._client._transport.get_resource - ] - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def post_resource( - self, - request: Optional[Union[httpbody_pb2.HttpBody, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> httpbody_pb2.HttpBody: - r"""PostResource performs an HTTP POST on the Kubernetes - API Server. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.api import httpbody_pb2 # type: ignore - from google.cloud.gkeconnect import gateway_v1beta1 - - async def sample_post_resource(): - # Create a client - client = gateway_v1beta1.GatewayServiceAsyncClient() - - # Initialize request argument(s) - request = httpbody_pb2.HttpBody( - ) - - # Make the request - response = await client.post_resource(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.api.httpbody_pb2.HttpBody, dict]]): - The request object. Message that represents an arbitrary HTTP body. It - should only be used for payload formats that can't be - represented as JSON, such as raw binary or an HTML page. - - This message can be used both in streaming and - non-streaming API methods in the request as well as the - response. - - It can be used as a top-level request field, which is - convenient if one wants to extract parameters from - either the URL or HTTP template into the request fields - and also want access to the raw HTTP body. - - Example: - - :: - - message GetResourceRequest { - // A unique request id. - string request_id = 1; - - // The raw HTTP body is bound to this field. - google.api.HttpBody http_body = 2; - - } - - service ResourceService { - rpc GetResource(GetResourceRequest) - returns (google.api.HttpBody); - rpc UpdateResource(google.api.HttpBody) - returns (google.protobuf.Empty); - - } - - Example with streaming methods: - - :: - - service CaldavService { - rpc GetCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - rpc UpdateCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - - } - - Use of this type only changes how the request and - response bodies are handled, all other features will - continue to work unchanged. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api.httpbody_pb2.HttpBody: - Message that represents an arbitrary HTTP body. It should only be used for - payload formats that can't be represented as JSON, - such as raw binary or an HTML page. - - This message can be used both in streaming and - non-streaming API methods in the request as well as - the response. - - It can be used as a top-level request field, which is - convenient if one wants to extract parameters from - either the URL or HTTP template into the request - fields and also want access to the raw HTTP body. - - Example: - - message GetResourceRequest { - // A unique request id. string request_id = 1; - - // The raw HTTP body is bound to this field. - google.api.HttpBody http_body = 2; - - } - - service ResourceService { - rpc GetResource(GetResourceRequest) - returns (google.api.HttpBody); - - rpc UpdateResource(google.api.HttpBody) - returns (google.protobuf.Empty); - - } - - Example with streaming methods: - - service CaldavService { - rpc GetCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - - rpc UpdateCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - - } - - Use of this type only changes how the request and - response bodies are handled, all other features will - continue to work unchanged. - - """ - # Create or coerce a protobuf request object. - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = httpbody_pb2.HttpBody(**request) - elif not request: - request = httpbody_pb2.HttpBody() - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[ - self._client._transport.post_resource - ] - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_resource( - self, - request: Optional[Union[httpbody_pb2.HttpBody, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> httpbody_pb2.HttpBody: - r"""DeleteResource performs an HTTP DELETE on the - Kubernetes API Server. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.api import httpbody_pb2 # type: ignore - from google.cloud.gkeconnect import gateway_v1beta1 - - async def sample_delete_resource(): - # Create a client - client = gateway_v1beta1.GatewayServiceAsyncClient() - - # Initialize request argument(s) - request = httpbody_pb2.HttpBody( - ) - - # Make the request - response = await client.delete_resource(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.api.httpbody_pb2.HttpBody, dict]]): - The request object. Message that represents an arbitrary HTTP body. It - should only be used for payload formats that can't be - represented as JSON, such as raw binary or an HTML page. - - This message can be used both in streaming and - non-streaming API methods in the request as well as the - response. - - It can be used as a top-level request field, which is - convenient if one wants to extract parameters from - either the URL or HTTP template into the request fields - and also want access to the raw HTTP body. - - Example: - - :: - - message GetResourceRequest { - // A unique request id. - string request_id = 1; - - // The raw HTTP body is bound to this field. - google.api.HttpBody http_body = 2; - - } - - service ResourceService { - rpc GetResource(GetResourceRequest) - returns (google.api.HttpBody); - rpc UpdateResource(google.api.HttpBody) - returns (google.protobuf.Empty); - - } - - Example with streaming methods: - - :: - - service CaldavService { - rpc GetCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - rpc UpdateCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - - } - - Use of this type only changes how the request and - response bodies are handled, all other features will - continue to work unchanged. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api.httpbody_pb2.HttpBody: - Message that represents an arbitrary HTTP body. It should only be used for - payload formats that can't be represented as JSON, - such as raw binary or an HTML page. - - This message can be used both in streaming and - non-streaming API methods in the request as well as - the response. - - It can be used as a top-level request field, which is - convenient if one wants to extract parameters from - either the URL or HTTP template into the request - fields and also want access to the raw HTTP body. - - Example: - - message GetResourceRequest { - // A unique request id. string request_id = 1; - - // The raw HTTP body is bound to this field. - google.api.HttpBody http_body = 2; - - } - - service ResourceService { - rpc GetResource(GetResourceRequest) - returns (google.api.HttpBody); - - rpc UpdateResource(google.api.HttpBody) - returns (google.protobuf.Empty); - - } - - Example with streaming methods: - - service CaldavService { - rpc GetCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - - rpc UpdateCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - - } - - Use of this type only changes how the request and - response bodies are handled, all other features will - continue to work unchanged. - - """ - # Create or coerce a protobuf request object. - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = httpbody_pb2.HttpBody(**request) - elif not request: - request = httpbody_pb2.HttpBody() - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[ - self._client._transport.delete_resource - ] - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def put_resource( - self, - request: Optional[Union[httpbody_pb2.HttpBody, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> httpbody_pb2.HttpBody: - r"""PutResource performs an HTTP PUT on the Kubernetes - API Server. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.api import httpbody_pb2 # type: ignore - from google.cloud.gkeconnect import gateway_v1beta1 - - async def sample_put_resource(): - # Create a client - client = gateway_v1beta1.GatewayServiceAsyncClient() - - # Initialize request argument(s) - request = httpbody_pb2.HttpBody( - ) - - # Make the request - response = await client.put_resource(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.api.httpbody_pb2.HttpBody, dict]]): - The request object. Message that represents an arbitrary HTTP body. It - should only be used for payload formats that can't be - represented as JSON, such as raw binary or an HTML page. - - This message can be used both in streaming and - non-streaming API methods in the request as well as the - response. - - It can be used as a top-level request field, which is - convenient if one wants to extract parameters from - either the URL or HTTP template into the request fields - and also want access to the raw HTTP body. - - Example: - - :: - - message GetResourceRequest { - // A unique request id. - string request_id = 1; - - // The raw HTTP body is bound to this field. - google.api.HttpBody http_body = 2; - - } - - service ResourceService { - rpc GetResource(GetResourceRequest) - returns (google.api.HttpBody); - rpc UpdateResource(google.api.HttpBody) - returns (google.protobuf.Empty); - - } - - Example with streaming methods: - - :: - - service CaldavService { - rpc GetCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - rpc UpdateCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - - } - - Use of this type only changes how the request and - response bodies are handled, all other features will - continue to work unchanged. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api.httpbody_pb2.HttpBody: - Message that represents an arbitrary HTTP body. It should only be used for - payload formats that can't be represented as JSON, - such as raw binary or an HTML page. - - This message can be used both in streaming and - non-streaming API methods in the request as well as - the response. - - It can be used as a top-level request field, which is - convenient if one wants to extract parameters from - either the URL or HTTP template into the request - fields and also want access to the raw HTTP body. - - Example: - - message GetResourceRequest { - // A unique request id. string request_id = 1; - - // The raw HTTP body is bound to this field. - google.api.HttpBody http_body = 2; - - } - - service ResourceService { - rpc GetResource(GetResourceRequest) - returns (google.api.HttpBody); - - rpc UpdateResource(google.api.HttpBody) - returns (google.protobuf.Empty); - - } - - Example with streaming methods: - - service CaldavService { - rpc GetCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - - rpc UpdateCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - - } - - Use of this type only changes how the request and - response bodies are handled, all other features will - continue to work unchanged. - - """ - # Create or coerce a protobuf request object. - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = httpbody_pb2.HttpBody(**request) - elif not request: - request = httpbody_pb2.HttpBody() - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[ - self._client._transport.put_resource - ] - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def patch_resource( - self, - request: Optional[Union[httpbody_pb2.HttpBody, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> httpbody_pb2.HttpBody: - r"""PatchResource performs an HTTP PATCH on the - Kubernetes API Server. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.api import httpbody_pb2 # type: ignore - from google.cloud.gkeconnect import gateway_v1beta1 - - async def sample_patch_resource(): - # Create a client - client = gateway_v1beta1.GatewayServiceAsyncClient() - - # Initialize request argument(s) - request = httpbody_pb2.HttpBody( - ) - - # Make the request - response = await client.patch_resource(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.api.httpbody_pb2.HttpBody, dict]]): - The request object. Message that represents an arbitrary HTTP body. It - should only be used for payload formats that can't be - represented as JSON, such as raw binary or an HTML page. - - This message can be used both in streaming and - non-streaming API methods in the request as well as the - response. - - It can be used as a top-level request field, which is - convenient if one wants to extract parameters from - either the URL or HTTP template into the request fields - and also want access to the raw HTTP body. - - Example: - - :: - - message GetResourceRequest { - // A unique request id. - string request_id = 1; - - // The raw HTTP body is bound to this field. - google.api.HttpBody http_body = 2; - - } - - service ResourceService { - rpc GetResource(GetResourceRequest) - returns (google.api.HttpBody); - rpc UpdateResource(google.api.HttpBody) - returns (google.protobuf.Empty); - - } - - Example with streaming methods: - - :: - - service CaldavService { - rpc GetCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - rpc UpdateCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - - } - - Use of this type only changes how the request and - response bodies are handled, all other features will - continue to work unchanged. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api.httpbody_pb2.HttpBody: - Message that represents an arbitrary HTTP body. It should only be used for - payload formats that can't be represented as JSON, - such as raw binary or an HTML page. - - This message can be used both in streaming and - non-streaming API methods in the request as well as - the response. - - It can be used as a top-level request field, which is - convenient if one wants to extract parameters from - either the URL or HTTP template into the request - fields and also want access to the raw HTTP body. - - Example: - - message GetResourceRequest { - // A unique request id. string request_id = 1; - - // The raw HTTP body is bound to this field. - google.api.HttpBody http_body = 2; - - } - - service ResourceService { - rpc GetResource(GetResourceRequest) - returns (google.api.HttpBody); - - rpc UpdateResource(google.api.HttpBody) - returns (google.protobuf.Empty); - - } - - Example with streaming methods: - - service CaldavService { - rpc GetCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - - rpc UpdateCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - - } - - Use of this type only changes how the request and - response bodies are handled, all other features will - continue to work unchanged. - - """ - # Create or coerce a protobuf request object. - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = httpbody_pb2.HttpBody(**request) - elif not request: - request = httpbody_pb2.HttpBody() - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[ - self._client._transport.patch_resource - ] - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def __aenter__(self) -> "GatewayServiceAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=package_version.__version__ -) - - -__all__ = ("GatewayServiceAsyncClient",) diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_service/client.py b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_service/client.py deleted file mode 100644 index bc179d1d0508..000000000000 --- a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_service/client.py +++ /dev/null @@ -1,1536 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import os -import re -from typing import ( - Callable, - Dict, - Mapping, - MutableMapping, - MutableSequence, - Optional, - Sequence, - Tuple, - Type, - Union, - cast, -) -import warnings - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.gkeconnect.gateway_v1beta1 import gapic_version as package_version - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -from google.api import httpbody_pb2 # type: ignore -from google.protobuf import any_pb2 # type: ignore - -from .transports.base import DEFAULT_CLIENT_INFO, GatewayServiceTransport -from .transports.grpc import GatewayServiceGrpcTransport -from .transports.grpc_asyncio import GatewayServiceGrpcAsyncIOTransport - - -class GatewayServiceClientMeta(type): - """Metaclass for the GatewayService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - - _transport_registry = ( - OrderedDict() - ) # type: Dict[str, Type[GatewayServiceTransport]] - _transport_registry["grpc"] = GatewayServiceGrpcTransport - _transport_registry["grpc_asyncio"] = GatewayServiceGrpcAsyncIOTransport - - def get_transport_class( - cls, - label: Optional[str] = None, - ) -> Type[GatewayServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class GatewayServiceClient(metaclass=GatewayServiceClientMeta): - """Gateway service is a public API which works as a Kubernetes - resource model proxy between end users and registered Kubernetes - clusters. Each RPC in this service matches with an HTTP verb. - End user will initiate kubectl commands against the Gateway - service, and Gateway service will forward user requests to - clusters. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = "connectgateway.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - _DEFAULT_ENDPOINT_TEMPLATE = "connectgateway.{UNIVERSE_DOMAIN}" - _DEFAULT_UNIVERSE = "googleapis.com" - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - GatewayServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - GatewayServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file(filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> GatewayServiceTransport: - """Returns the transport used by the client instance. - - Returns: - GatewayServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def common_billing_account_path( - billing_account: str, - ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format( - billing_account=billing_account, - ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str, str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path( - folder: str, - ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format( - folder=folder, - ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str, str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path( - organization: str, - ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format( - organization=organization, - ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str, str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path( - project: str, - ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format( - project=project, - ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str, str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path( - project: str, - location: str, - ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format( - project=project, - location=location, - ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str, str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source( - cls, client_options: Optional[client_options_lib.ClientOptions] = None - ): - """Deprecated. Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - - warnings.warn( - "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning, - ) - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError( - "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - ) - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError( - "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - ) - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or ( - use_mtls_endpoint == "auto" and client_cert_source - ): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - @staticmethod - def _read_environment_variables(): - """Returns the environment variables used by the client. - - Returns: - Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, - GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. - - Raises: - ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not - any of ["true", "false"]. - google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT - is not any of ["auto", "never", "always"]. - """ - use_client_cert = os.getenv( - "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" - ).lower() - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() - universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError( - "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - ) - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError( - "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - ) - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env - - @staticmethod - def _get_client_cert_source(provided_cert_source, use_cert_flag): - """Return the client cert source to be used by the client. - - Args: - provided_cert_source (bytes): The client certificate source provided. - use_cert_flag (bool): A flag indicating whether to use the client certificate. - - Returns: - bytes or None: The client cert source to be used by the client. - """ - client_cert_source = None - if use_cert_flag: - if provided_cert_source: - client_cert_source = provided_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - return client_cert_source - - @staticmethod - def _get_api_endpoint( - api_override, client_cert_source, universe_domain, use_mtls_endpoint - ): - """Return the API endpoint used by the client. - - Args: - api_override (str): The API endpoint override. If specified, this is always - the return value of this function and the other arguments are not used. - client_cert_source (bytes): The client certificate source used by the client. - universe_domain (str): The universe domain used by the client. - use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. - Possible values are "always", "auto", or "never". - - Returns: - str: The API endpoint to be used by the client. - """ - if api_override is not None: - api_endpoint = api_override - elif use_mtls_endpoint == "always" or ( - use_mtls_endpoint == "auto" and client_cert_source - ): - _default_universe = GatewayServiceClient._DEFAULT_UNIVERSE - if universe_domain != _default_universe: - raise MutualTLSChannelError( - f"mTLS is not supported in any universe other than {_default_universe}." - ) - api_endpoint = GatewayServiceClient.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = GatewayServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=universe_domain - ) - return api_endpoint - - @staticmethod - def _get_universe_domain( - client_universe_domain: Optional[str], universe_domain_env: Optional[str] - ) -> str: - """Return the universe domain used by the client. - - Args: - client_universe_domain (Optional[str]): The universe domain configured via the client options. - universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. - - Returns: - str: The universe domain to be used by the client. - - Raises: - ValueError: If the universe domain is an empty string. - """ - universe_domain = GatewayServiceClient._DEFAULT_UNIVERSE - if client_universe_domain is not None: - universe_domain = client_universe_domain - elif universe_domain_env is not None: - universe_domain = universe_domain_env - if len(universe_domain.strip()) == 0: - raise ValueError("Universe Domain cannot be an empty string.") - return universe_domain - - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = GatewayServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. - - Raises: - ValueError: If the configured universe domain is not valid. - """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or GatewayServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used by the client instance. - """ - return self._universe_domain - - def __init__( - self, - *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[ - Union[str, GatewayServiceTransport, Callable[..., GatewayServiceTransport]] - ] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the gateway service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,GatewayServiceTransport,Callable[..., GatewayServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the GatewayServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that the ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client_options = client_options - if isinstance(self._client_options, dict): - self._client_options = client_options_lib.from_dict(self._client_options) - if self._client_options is None: - self._client_options = client_options_lib.ClientOptions() - self._client_options = cast( - client_options_lib.ClientOptions, self._client_options - ) - - universe_domain_opt = getattr(self._client_options, "universe_domain", None) - - ( - self._use_client_cert, - self._use_mtls_endpoint, - self._universe_domain_env, - ) = GatewayServiceClient._read_environment_variables() - self._client_cert_source = GatewayServiceClient._get_client_cert_source( - self._client_options.client_cert_source, self._use_client_cert - ) - self._universe_domain = GatewayServiceClient._get_universe_domain( - universe_domain_opt, self._universe_domain_env - ) - self._api_endpoint = None # updated below, depending on `transport` - - # Initialize the universe domain validation. - self._is_universe_domain_valid = False - - api_key_value = getattr(self._client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError( - "client_options.api_key and credentials are mutually exclusive" - ) - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - transport_provided = isinstance(transport, GatewayServiceTransport) - if transport_provided: - # transport is a GatewayServiceTransport instance. - if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError( - "When providing a transport instance, " - "provide its credentials directly." - ) - if self._client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = cast(GatewayServiceTransport, transport) - self._api_endpoint = self._transport.host - - self._api_endpoint = ( - self._api_endpoint - or GatewayServiceClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint, - ) - ) - - if not transport_provided: - import google.auth._default # type: ignore - - if api_key_value and hasattr( - google.auth._default, "get_api_key_credentials" - ): - credentials = google.auth._default.get_api_key_credentials( - api_key_value - ) - - transport_init: Union[ - Type[GatewayServiceTransport], Callable[..., GatewayServiceTransport] - ] = ( - GatewayServiceClient.get_transport_class(transport) - if isinstance(transport, str) or transport is None - else cast(Callable[..., GatewayServiceTransport], transport) - ) - # initialize with the provided callable or the passed in class - self._transport = transport_init( - credentials=credentials, - credentials_file=self._client_options.credentials_file, - host=self._api_endpoint, - scopes=self._client_options.scopes, - client_cert_source_for_mtls=self._client_cert_source, - quota_project_id=self._client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=self._client_options.api_audience, - ) - - def get_resource( - self, - request: Optional[Union[httpbody_pb2.HttpBody, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> httpbody_pb2.HttpBody: - r"""GetResource performs an HTTP GET request on the - Kubernetes API Server. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.api import httpbody_pb2 # type: ignore - from google.cloud.gkeconnect import gateway_v1beta1 - - def sample_get_resource(): - # Create a client - client = gateway_v1beta1.GatewayServiceClient() - - # Initialize request argument(s) - request = httpbody_pb2.HttpBody( - ) - - # Make the request - response = client.get_resource(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.api.httpbody_pb2.HttpBody, dict]): - The request object. Message that represents an arbitrary HTTP body. It - should only be used for payload formats that can't be - represented as JSON, such as raw binary or an HTML page. - - This message can be used both in streaming and - non-streaming API methods in the request as well as the - response. - - It can be used as a top-level request field, which is - convenient if one wants to extract parameters from - either the URL or HTTP template into the request fields - and also want access to the raw HTTP body. - - Example: - - :: - - message GetResourceRequest { - // A unique request id. - string request_id = 1; - - // The raw HTTP body is bound to this field. - google.api.HttpBody http_body = 2; - - } - - service ResourceService { - rpc GetResource(GetResourceRequest) - returns (google.api.HttpBody); - rpc UpdateResource(google.api.HttpBody) - returns (google.protobuf.Empty); - - } - - Example with streaming methods: - - :: - - service CaldavService { - rpc GetCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - rpc UpdateCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - - } - - Use of this type only changes how the request and - response bodies are handled, all other features will - continue to work unchanged. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api.httpbody_pb2.HttpBody: - Message that represents an arbitrary HTTP body. It should only be used for - payload formats that can't be represented as JSON, - such as raw binary or an HTML page. - - This message can be used both in streaming and - non-streaming API methods in the request as well as - the response. - - It can be used as a top-level request field, which is - convenient if one wants to extract parameters from - either the URL or HTTP template into the request - fields and also want access to the raw HTTP body. - - Example: - - message GetResourceRequest { - // A unique request id. string request_id = 1; - - // The raw HTTP body is bound to this field. - google.api.HttpBody http_body = 2; - - } - - service ResourceService { - rpc GetResource(GetResourceRequest) - returns (google.api.HttpBody); - - rpc UpdateResource(google.api.HttpBody) - returns (google.protobuf.Empty); - - } - - Example with streaming methods: - - service CaldavService { - rpc GetCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - - rpc UpdateCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - - } - - Use of this type only changes how the request and - response bodies are handled, all other features will - continue to work unchanged. - - """ - # Create or coerce a protobuf request object. - if isinstance(request, dict): - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = httpbody_pb2.HttpBody(**request) - elif not request: - # Null request, just make one. - request = httpbody_pb2.HttpBody() - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_resource] - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def post_resource( - self, - request: Optional[Union[httpbody_pb2.HttpBody, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> httpbody_pb2.HttpBody: - r"""PostResource performs an HTTP POST on the Kubernetes - API Server. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.api import httpbody_pb2 # type: ignore - from google.cloud.gkeconnect import gateway_v1beta1 - - def sample_post_resource(): - # Create a client - client = gateway_v1beta1.GatewayServiceClient() - - # Initialize request argument(s) - request = httpbody_pb2.HttpBody( - ) - - # Make the request - response = client.post_resource(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.api.httpbody_pb2.HttpBody, dict]): - The request object. Message that represents an arbitrary HTTP body. It - should only be used for payload formats that can't be - represented as JSON, such as raw binary or an HTML page. - - This message can be used both in streaming and - non-streaming API methods in the request as well as the - response. - - It can be used as a top-level request field, which is - convenient if one wants to extract parameters from - either the URL or HTTP template into the request fields - and also want access to the raw HTTP body. - - Example: - - :: - - message GetResourceRequest { - // A unique request id. - string request_id = 1; - - // The raw HTTP body is bound to this field. - google.api.HttpBody http_body = 2; - - } - - service ResourceService { - rpc GetResource(GetResourceRequest) - returns (google.api.HttpBody); - rpc UpdateResource(google.api.HttpBody) - returns (google.protobuf.Empty); - - } - - Example with streaming methods: - - :: - - service CaldavService { - rpc GetCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - rpc UpdateCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - - } - - Use of this type only changes how the request and - response bodies are handled, all other features will - continue to work unchanged. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api.httpbody_pb2.HttpBody: - Message that represents an arbitrary HTTP body. It should only be used for - payload formats that can't be represented as JSON, - such as raw binary or an HTML page. - - This message can be used both in streaming and - non-streaming API methods in the request as well as - the response. - - It can be used as a top-level request field, which is - convenient if one wants to extract parameters from - either the URL or HTTP template into the request - fields and also want access to the raw HTTP body. - - Example: - - message GetResourceRequest { - // A unique request id. string request_id = 1; - - // The raw HTTP body is bound to this field. - google.api.HttpBody http_body = 2; - - } - - service ResourceService { - rpc GetResource(GetResourceRequest) - returns (google.api.HttpBody); - - rpc UpdateResource(google.api.HttpBody) - returns (google.protobuf.Empty); - - } - - Example with streaming methods: - - service CaldavService { - rpc GetCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - - rpc UpdateCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - - } - - Use of this type only changes how the request and - response bodies are handled, all other features will - continue to work unchanged. - - """ - # Create or coerce a protobuf request object. - if isinstance(request, dict): - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = httpbody_pb2.HttpBody(**request) - elif not request: - # Null request, just make one. - request = httpbody_pb2.HttpBody() - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.post_resource] - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_resource( - self, - request: Optional[Union[httpbody_pb2.HttpBody, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> httpbody_pb2.HttpBody: - r"""DeleteResource performs an HTTP DELETE on the - Kubernetes API Server. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.api import httpbody_pb2 # type: ignore - from google.cloud.gkeconnect import gateway_v1beta1 - - def sample_delete_resource(): - # Create a client - client = gateway_v1beta1.GatewayServiceClient() - - # Initialize request argument(s) - request = httpbody_pb2.HttpBody( - ) - - # Make the request - response = client.delete_resource(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.api.httpbody_pb2.HttpBody, dict]): - The request object. Message that represents an arbitrary HTTP body. It - should only be used for payload formats that can't be - represented as JSON, such as raw binary or an HTML page. - - This message can be used both in streaming and - non-streaming API methods in the request as well as the - response. - - It can be used as a top-level request field, which is - convenient if one wants to extract parameters from - either the URL or HTTP template into the request fields - and also want access to the raw HTTP body. - - Example: - - :: - - message GetResourceRequest { - // A unique request id. - string request_id = 1; - - // The raw HTTP body is bound to this field. - google.api.HttpBody http_body = 2; - - } - - service ResourceService { - rpc GetResource(GetResourceRequest) - returns (google.api.HttpBody); - rpc UpdateResource(google.api.HttpBody) - returns (google.protobuf.Empty); - - } - - Example with streaming methods: - - :: - - service CaldavService { - rpc GetCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - rpc UpdateCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - - } - - Use of this type only changes how the request and - response bodies are handled, all other features will - continue to work unchanged. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api.httpbody_pb2.HttpBody: - Message that represents an arbitrary HTTP body. It should only be used for - payload formats that can't be represented as JSON, - such as raw binary or an HTML page. - - This message can be used both in streaming and - non-streaming API methods in the request as well as - the response. - - It can be used as a top-level request field, which is - convenient if one wants to extract parameters from - either the URL or HTTP template into the request - fields and also want access to the raw HTTP body. - - Example: - - message GetResourceRequest { - // A unique request id. string request_id = 1; - - // The raw HTTP body is bound to this field. - google.api.HttpBody http_body = 2; - - } - - service ResourceService { - rpc GetResource(GetResourceRequest) - returns (google.api.HttpBody); - - rpc UpdateResource(google.api.HttpBody) - returns (google.protobuf.Empty); - - } - - Example with streaming methods: - - service CaldavService { - rpc GetCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - - rpc UpdateCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - - } - - Use of this type only changes how the request and - response bodies are handled, all other features will - continue to work unchanged. - - """ - # Create or coerce a protobuf request object. - if isinstance(request, dict): - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = httpbody_pb2.HttpBody(**request) - elif not request: - # Null request, just make one. - request = httpbody_pb2.HttpBody() - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_resource] - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def put_resource( - self, - request: Optional[Union[httpbody_pb2.HttpBody, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> httpbody_pb2.HttpBody: - r"""PutResource performs an HTTP PUT on the Kubernetes - API Server. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.api import httpbody_pb2 # type: ignore - from google.cloud.gkeconnect import gateway_v1beta1 - - def sample_put_resource(): - # Create a client - client = gateway_v1beta1.GatewayServiceClient() - - # Initialize request argument(s) - request = httpbody_pb2.HttpBody( - ) - - # Make the request - response = client.put_resource(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.api.httpbody_pb2.HttpBody, dict]): - The request object. Message that represents an arbitrary HTTP body. It - should only be used for payload formats that can't be - represented as JSON, such as raw binary or an HTML page. - - This message can be used both in streaming and - non-streaming API methods in the request as well as the - response. - - It can be used as a top-level request field, which is - convenient if one wants to extract parameters from - either the URL or HTTP template into the request fields - and also want access to the raw HTTP body. - - Example: - - :: - - message GetResourceRequest { - // A unique request id. - string request_id = 1; - - // The raw HTTP body is bound to this field. - google.api.HttpBody http_body = 2; - - } - - service ResourceService { - rpc GetResource(GetResourceRequest) - returns (google.api.HttpBody); - rpc UpdateResource(google.api.HttpBody) - returns (google.protobuf.Empty); - - } - - Example with streaming methods: - - :: - - service CaldavService { - rpc GetCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - rpc UpdateCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - - } - - Use of this type only changes how the request and - response bodies are handled, all other features will - continue to work unchanged. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api.httpbody_pb2.HttpBody: - Message that represents an arbitrary HTTP body. It should only be used for - payload formats that can't be represented as JSON, - such as raw binary or an HTML page. - - This message can be used both in streaming and - non-streaming API methods in the request as well as - the response. - - It can be used as a top-level request field, which is - convenient if one wants to extract parameters from - either the URL or HTTP template into the request - fields and also want access to the raw HTTP body. - - Example: - - message GetResourceRequest { - // A unique request id. string request_id = 1; - - // The raw HTTP body is bound to this field. - google.api.HttpBody http_body = 2; - - } - - service ResourceService { - rpc GetResource(GetResourceRequest) - returns (google.api.HttpBody); - - rpc UpdateResource(google.api.HttpBody) - returns (google.protobuf.Empty); - - } - - Example with streaming methods: - - service CaldavService { - rpc GetCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - - rpc UpdateCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - - } - - Use of this type only changes how the request and - response bodies are handled, all other features will - continue to work unchanged. - - """ - # Create or coerce a protobuf request object. - if isinstance(request, dict): - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = httpbody_pb2.HttpBody(**request) - elif not request: - # Null request, just make one. - request = httpbody_pb2.HttpBody() - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.put_resource] - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def patch_resource( - self, - request: Optional[Union[httpbody_pb2.HttpBody, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> httpbody_pb2.HttpBody: - r"""PatchResource performs an HTTP PATCH on the - Kubernetes API Server. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.api import httpbody_pb2 # type: ignore - from google.cloud.gkeconnect import gateway_v1beta1 - - def sample_patch_resource(): - # Create a client - client = gateway_v1beta1.GatewayServiceClient() - - # Initialize request argument(s) - request = httpbody_pb2.HttpBody( - ) - - # Make the request - response = client.patch_resource(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.api.httpbody_pb2.HttpBody, dict]): - The request object. Message that represents an arbitrary HTTP body. It - should only be used for payload formats that can't be - represented as JSON, such as raw binary or an HTML page. - - This message can be used both in streaming and - non-streaming API methods in the request as well as the - response. - - It can be used as a top-level request field, which is - convenient if one wants to extract parameters from - either the URL or HTTP template into the request fields - and also want access to the raw HTTP body. - - Example: - - :: - - message GetResourceRequest { - // A unique request id. - string request_id = 1; - - // The raw HTTP body is bound to this field. - google.api.HttpBody http_body = 2; - - } - - service ResourceService { - rpc GetResource(GetResourceRequest) - returns (google.api.HttpBody); - rpc UpdateResource(google.api.HttpBody) - returns (google.protobuf.Empty); - - } - - Example with streaming methods: - - :: - - service CaldavService { - rpc GetCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - rpc UpdateCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - - } - - Use of this type only changes how the request and - response bodies are handled, all other features will - continue to work unchanged. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api.httpbody_pb2.HttpBody: - Message that represents an arbitrary HTTP body. It should only be used for - payload formats that can't be represented as JSON, - such as raw binary or an HTML page. - - This message can be used both in streaming and - non-streaming API methods in the request as well as - the response. - - It can be used as a top-level request field, which is - convenient if one wants to extract parameters from - either the URL or HTTP template into the request - fields and also want access to the raw HTTP body. - - Example: - - message GetResourceRequest { - // A unique request id. string request_id = 1; - - // The raw HTTP body is bound to this field. - google.api.HttpBody http_body = 2; - - } - - service ResourceService { - rpc GetResource(GetResourceRequest) - returns (google.api.HttpBody); - - rpc UpdateResource(google.api.HttpBody) - returns (google.protobuf.Empty); - - } - - Example with streaming methods: - - service CaldavService { - rpc GetCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - - rpc UpdateCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - - } - - Use of this type only changes how the request and - response bodies are handled, all other features will - continue to work unchanged. - - """ - # Create or coerce a protobuf request object. - if isinstance(request, dict): - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = httpbody_pb2.HttpBody(**request) - elif not request: - # Null request, just make one. - request = httpbody_pb2.HttpBody() - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.patch_resource] - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "GatewayServiceClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=package_version.__version__ -) - - -__all__ = ("GatewayServiceClient",) diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/types/__init__.py b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/types/__init__.py index 77c6c7a35ebe..2bfb31823e04 100644 --- a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/types/__init__.py +++ b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/types/__init__.py @@ -13,5 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from .control import GenerateCredentialsRequest, GenerateCredentialsResponse -__all__ = () +__all__ = ( + "GenerateCredentialsRequest", + "GenerateCredentialsResponse", +) diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/types/control.py b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/types/control.py new file mode 100644 index 000000000000..4afb5fd530f1 --- /dev/null +++ b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/types/control.py @@ -0,0 +1,123 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.gkeconnect.gateway.v1beta1", + manifest={ + "GenerateCredentialsRequest", + "GenerateCredentialsResponse", + }, +) + + +class GenerateCredentialsRequest(proto.Message): + r"""A request for connection information for a particular + membership. + + Attributes: + name (str): + Required. The Fleet membership resource. + force_use_agent (bool): + Optional. Whether to force the use of Connect + Agent-based transport. + This will return a configuration that uses + Connect Agent as the underlying transport + mechanism for cluster types that would otherwise + have used a different transport. Requires that + Connect Agent be installed on the cluster. + Setting this field to false is equivalent to not + setting it. + version (str): + Optional. The Connect Gateway version to be + used in the resulting configuration. + + Leave this field blank to let the server choose + the version (recommended). + kubernetes_namespace (str): + Optional. The namespace to use in the kubeconfig context. + + If this field is specified, the server will set the + ``namespace`` field in kubeconfig context. If not specified, + the ``namespace`` field is omitted. + operating_system (google.cloud.gkeconnect.gateway_v1beta1.types.GenerateCredentialsRequest.OperatingSystem): + Optional. The operating system where the + kubeconfig will be used. + """ + + class OperatingSystem(proto.Enum): + r"""Operating systems requiring specialized kubeconfigs. + + Values: + OPERATING_SYSTEM_UNSPECIFIED (0): + Generates a kubeconfig that works for all + operating systems not defined below. + OPERATING_SYSTEM_WINDOWS (1): + Generates a kubeconfig that is specifically + designed to work with Windows. + """ + OPERATING_SYSTEM_UNSPECIFIED = 0 + OPERATING_SYSTEM_WINDOWS = 1 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + force_use_agent: bool = proto.Field( + proto.BOOL, + number=2, + ) + version: str = proto.Field( + proto.STRING, + number=3, + ) + kubernetes_namespace: str = proto.Field( + proto.STRING, + number=4, + ) + operating_system: OperatingSystem = proto.Field( + proto.ENUM, + number=5, + enum=OperatingSystem, + ) + + +class GenerateCredentialsResponse(proto.Message): + r"""Connection information for a particular membership. + + Attributes: + kubeconfig (bytes): + A full YAML kubeconfig in serialized format. + endpoint (str): + The generated URI of the cluster as accessed + through the Connect Gateway API. + """ + + kubeconfig: bytes = proto.Field( + proto.BYTES, + number=1, + ) + endpoint: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_service_patch_resource_async.py b/packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1_generated_gateway_control_generate_credentials_async.py similarity index 75% rename from packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_service_patch_resource_async.py rename to packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1_generated_gateway_control_generate_credentials_async.py index 2351568e5723..ead6fc1342dd 100644 --- a/packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_service_patch_resource_async.py +++ b/packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1_generated_gateway_control_generate_credentials_async.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for PatchResource +# Snippet for GenerateCredentials # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-gke-connect-gateway -# [START connectgateway_v1beta1_generated_GatewayService_PatchResource_async] +# [START connectgateway_v1_generated_GatewayControl_GenerateCredentials_async] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -31,22 +31,22 @@ # - It may require specifying regional endpoints when creating the service # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.api import httpbody_pb2 # type: ignore -from google.cloud.gkeconnect import gateway_v1beta1 +from google.cloud.gkeconnect import gateway_v1 -async def sample_patch_resource(): +async def sample_generate_credentials(): # Create a client - client = gateway_v1beta1.GatewayServiceAsyncClient() + client = gateway_v1.GatewayControlAsyncClient() # Initialize request argument(s) - request = httpbody_pb2.HttpBody( + request = gateway_v1.GenerateCredentialsRequest( + name="name_value", ) # Make the request - response = await client.patch_resource(request=request) + response = await client.generate_credentials(request=request) # Handle the response print(response) -# [END connectgateway_v1beta1_generated_GatewayService_PatchResource_async] +# [END connectgateway_v1_generated_GatewayControl_GenerateCredentials_async] diff --git a/packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_service_patch_resource_sync.py b/packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1_generated_gateway_control_generate_credentials_sync.py similarity index 75% rename from packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_service_patch_resource_sync.py rename to packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1_generated_gateway_control_generate_credentials_sync.py index e88cf56e0b48..fcb75ad701c7 100644 --- a/packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_service_patch_resource_sync.py +++ b/packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1_generated_gateway_control_generate_credentials_sync.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for PatchResource +# Snippet for GenerateCredentials # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-gke-connect-gateway -# [START connectgateway_v1beta1_generated_GatewayService_PatchResource_sync] +# [START connectgateway_v1_generated_GatewayControl_GenerateCredentials_sync] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -31,22 +31,22 @@ # - It may require specifying regional endpoints when creating the service # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.api import httpbody_pb2 # type: ignore -from google.cloud.gkeconnect import gateway_v1beta1 +from google.cloud.gkeconnect import gateway_v1 -def sample_patch_resource(): +def sample_generate_credentials(): # Create a client - client = gateway_v1beta1.GatewayServiceClient() + client = gateway_v1.GatewayControlClient() # Initialize request argument(s) - request = httpbody_pb2.HttpBody( + request = gateway_v1.GenerateCredentialsRequest( + name="name_value", ) # Make the request - response = client.patch_resource(request=request) + response = client.generate_credentials(request=request) # Handle the response print(response) -# [END connectgateway_v1beta1_generated_GatewayService_PatchResource_sync] +# [END connectgateway_v1_generated_GatewayControl_GenerateCredentials_sync] diff --git a/packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_service_delete_resource_async.py b/packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_control_generate_credentials_async.py similarity index 76% rename from packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_service_delete_resource_async.py rename to packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_control_generate_credentials_async.py index fb088f6698bd..61202c081a3f 100644 --- a/packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_service_delete_resource_async.py +++ b/packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_control_generate_credentials_async.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for DeleteResource +# Snippet for GenerateCredentials # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-gke-connect-gateway -# [START connectgateway_v1beta1_generated_GatewayService_DeleteResource_async] +# [START connectgateway_v1beta1_generated_GatewayControl_GenerateCredentials_async] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -31,22 +31,22 @@ # - It may require specifying regional endpoints when creating the service # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.api import httpbody_pb2 # type: ignore from google.cloud.gkeconnect import gateway_v1beta1 -async def sample_delete_resource(): +async def sample_generate_credentials(): # Create a client - client = gateway_v1beta1.GatewayServiceAsyncClient() + client = gateway_v1beta1.GatewayControlAsyncClient() # Initialize request argument(s) - request = httpbody_pb2.HttpBody( + request = gateway_v1beta1.GenerateCredentialsRequest( + name="name_value", ) # Make the request - response = await client.delete_resource(request=request) + response = await client.generate_credentials(request=request) # Handle the response print(response) -# [END connectgateway_v1beta1_generated_GatewayService_DeleteResource_async] +# [END connectgateway_v1beta1_generated_GatewayControl_GenerateCredentials_async] diff --git a/packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_service_get_resource_sync.py b/packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_control_generate_credentials_sync.py similarity index 77% rename from packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_service_get_resource_sync.py rename to packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_control_generate_credentials_sync.py index c83488e8f680..87603404a7a2 100644 --- a/packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_service_get_resource_sync.py +++ b/packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_control_generate_credentials_sync.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for GetResource +# Snippet for GenerateCredentials # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-gke-connect-gateway -# [START connectgateway_v1beta1_generated_GatewayService_GetResource_sync] +# [START connectgateway_v1beta1_generated_GatewayControl_GenerateCredentials_sync] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -31,22 +31,22 @@ # - It may require specifying regional endpoints when creating the service # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.api import httpbody_pb2 # type: ignore from google.cloud.gkeconnect import gateway_v1beta1 -def sample_get_resource(): +def sample_generate_credentials(): # Create a client - client = gateway_v1beta1.GatewayServiceClient() + client = gateway_v1beta1.GatewayControlClient() # Initialize request argument(s) - request = httpbody_pb2.HttpBody( + request = gateway_v1beta1.GenerateCredentialsRequest( + name="name_value", ) # Make the request - response = client.get_resource(request=request) + response = client.generate_credentials(request=request) # Handle the response print(response) -# [END connectgateway_v1beta1_generated_GatewayService_GetResource_sync] +# [END connectgateway_v1beta1_generated_GatewayControl_GenerateCredentials_sync] diff --git a/packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_service_delete_resource_sync.py b/packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_service_delete_resource_sync.py deleted file mode 100644 index 3177e20c062a..000000000000 --- a/packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_service_delete_resource_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteResource -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-gke-connect-gateway - - -# [START connectgateway_v1beta1_generated_GatewayService_DeleteResource_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.api import httpbody_pb2 # type: ignore -from google.cloud.gkeconnect import gateway_v1beta1 - - -def sample_delete_resource(): - # Create a client - client = gateway_v1beta1.GatewayServiceClient() - - # Initialize request argument(s) - request = httpbody_pb2.HttpBody( - ) - - # Make the request - response = client.delete_resource(request=request) - - # Handle the response - print(response) - -# [END connectgateway_v1beta1_generated_GatewayService_DeleteResource_sync] diff --git a/packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_service_get_resource_async.py b/packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_service_get_resource_async.py deleted file mode 100644 index b11e6fe6d708..000000000000 --- a/packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_service_get_resource_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetResource -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-gke-connect-gateway - - -# [START connectgateway_v1beta1_generated_GatewayService_GetResource_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.api import httpbody_pb2 # type: ignore -from google.cloud.gkeconnect import gateway_v1beta1 - - -async def sample_get_resource(): - # Create a client - client = gateway_v1beta1.GatewayServiceAsyncClient() - - # Initialize request argument(s) - request = httpbody_pb2.HttpBody( - ) - - # Make the request - response = await client.get_resource(request=request) - - # Handle the response - print(response) - -# [END connectgateway_v1beta1_generated_GatewayService_GetResource_async] diff --git a/packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_service_post_resource_async.py b/packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_service_post_resource_async.py deleted file mode 100644 index bda10b9a013a..000000000000 --- a/packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_service_post_resource_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for PostResource -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-gke-connect-gateway - - -# [START connectgateway_v1beta1_generated_GatewayService_PostResource_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.api import httpbody_pb2 # type: ignore -from google.cloud.gkeconnect import gateway_v1beta1 - - -async def sample_post_resource(): - # Create a client - client = gateway_v1beta1.GatewayServiceAsyncClient() - - # Initialize request argument(s) - request = httpbody_pb2.HttpBody( - ) - - # Make the request - response = await client.post_resource(request=request) - - # Handle the response - print(response) - -# [END connectgateway_v1beta1_generated_GatewayService_PostResource_async] diff --git a/packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_service_post_resource_sync.py b/packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_service_post_resource_sync.py deleted file mode 100644 index 2a5747eb1c12..000000000000 --- a/packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_service_post_resource_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for PostResource -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-gke-connect-gateway - - -# [START connectgateway_v1beta1_generated_GatewayService_PostResource_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.api import httpbody_pb2 # type: ignore -from google.cloud.gkeconnect import gateway_v1beta1 - - -def sample_post_resource(): - # Create a client - client = gateway_v1beta1.GatewayServiceClient() - - # Initialize request argument(s) - request = httpbody_pb2.HttpBody( - ) - - # Make the request - response = client.post_resource(request=request) - - # Handle the response - print(response) - -# [END connectgateway_v1beta1_generated_GatewayService_PostResource_sync] diff --git a/packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_service_put_resource_async.py b/packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_service_put_resource_async.py deleted file mode 100644 index 2dd639963534..000000000000 --- a/packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_service_put_resource_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for PutResource -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-gke-connect-gateway - - -# [START connectgateway_v1beta1_generated_GatewayService_PutResource_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.api import httpbody_pb2 # type: ignore -from google.cloud.gkeconnect import gateway_v1beta1 - - -async def sample_put_resource(): - # Create a client - client = gateway_v1beta1.GatewayServiceAsyncClient() - - # Initialize request argument(s) - request = httpbody_pb2.HttpBody( - ) - - # Make the request - response = await client.put_resource(request=request) - - # Handle the response - print(response) - -# [END connectgateway_v1beta1_generated_GatewayService_PutResource_async] diff --git a/packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_service_put_resource_sync.py b/packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_service_put_resource_sync.py deleted file mode 100644 index e549ff995cd5..000000000000 --- a/packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_service_put_resource_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for PutResource -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-gke-connect-gateway - - -# [START connectgateway_v1beta1_generated_GatewayService_PutResource_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.api import httpbody_pb2 # type: ignore -from google.cloud.gkeconnect import gateway_v1beta1 - - -def sample_put_resource(): - # Create a client - client = gateway_v1beta1.GatewayServiceClient() - - # Initialize request argument(s) - request = httpbody_pb2.HttpBody( - ) - - # Make the request - response = client.put_resource(request=request) - - # Handle the response - print(response) - -# [END connectgateway_v1beta1_generated_GatewayService_PutResource_sync] diff --git a/packages/google-cloud-gke-connect-gateway/samples/generated_samples/snippet_metadata_google.cloud.gkeconnect.gateway.v1.json b/packages/google-cloud-gke-connect-gateway/samples/generated_samples/snippet_metadata_google.cloud.gkeconnect.gateway.v1.json new file mode 100644 index 000000000000..0b1343a8f6ad --- /dev/null +++ b/packages/google-cloud-gke-connect-gateway/samples/generated_samples/snippet_metadata_google.cloud.gkeconnect.gateway.v1.json @@ -0,0 +1,168 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.cloud.gkeconnect.gateway.v1", + "version": "v1" + } + ], + "language": "PYTHON", + "name": "google-cloud-gke-connect-gateway", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.gkeconnect.gateway_v1.GatewayControlAsyncClient", + "shortName": "GatewayControlAsyncClient" + }, + "fullName": "google.cloud.gkeconnect.gateway_v1.GatewayControlAsyncClient.generate_credentials", + "method": { + "fullName": "google.cloud.gkeconnect.gateway.v1.GatewayControl.GenerateCredentials", + "service": { + "fullName": "google.cloud.gkeconnect.gateway.v1.GatewayControl", + "shortName": "GatewayControl" + }, + "shortName": "GenerateCredentials" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gkeconnect.gateway_v1.types.GenerateCredentialsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.gkeconnect.gateway_v1.types.GenerateCredentialsResponse", + "shortName": "generate_credentials" + }, + "description": "Sample for GenerateCredentials", + "file": "connectgateway_v1_generated_gateway_control_generate_credentials_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "connectgateway_v1_generated_GatewayControl_GenerateCredentials_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "connectgateway_v1_generated_gateway_control_generate_credentials_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.gkeconnect.gateway_v1.GatewayControlClient", + "shortName": "GatewayControlClient" + }, + "fullName": "google.cloud.gkeconnect.gateway_v1.GatewayControlClient.generate_credentials", + "method": { + "fullName": "google.cloud.gkeconnect.gateway.v1.GatewayControl.GenerateCredentials", + "service": { + "fullName": "google.cloud.gkeconnect.gateway.v1.GatewayControl", + "shortName": "GatewayControl" + }, + "shortName": "GenerateCredentials" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gkeconnect.gateway_v1.types.GenerateCredentialsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.gkeconnect.gateway_v1.types.GenerateCredentialsResponse", + "shortName": "generate_credentials" + }, + "description": "Sample for GenerateCredentials", + "file": "connectgateway_v1_generated_gateway_control_generate_credentials_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "connectgateway_v1_generated_GatewayControl_GenerateCredentials_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "connectgateway_v1_generated_gateway_control_generate_credentials_sync.py" + } + ] +} diff --git a/packages/google-cloud-gke-connect-gateway/samples/generated_samples/snippet_metadata_google.cloud.gkeconnect.gateway.v1beta1.json b/packages/google-cloud-gke-connect-gateway/samples/generated_samples/snippet_metadata_google.cloud.gkeconnect.gateway.v1beta1.json index 6586a7512b44..f57662ce8498 100644 --- a/packages/google-cloud-gke-connect-gateway/samples/generated_samples/snippet_metadata_google.cloud.gkeconnect.gateway.v1beta1.json +++ b/packages/google-cloud-gke-connect-gateway/samples/generated_samples/snippet_metadata_google.cloud.gkeconnect.gateway.v1beta1.json @@ -16,22 +16,22 @@ "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.gkeconnect.gateway_v1beta1.GatewayServiceAsyncClient", - "shortName": "GatewayServiceAsyncClient" + "fullName": "google.cloud.gkeconnect.gateway_v1beta1.GatewayControlAsyncClient", + "shortName": "GatewayControlAsyncClient" }, - "fullName": "google.cloud.gkeconnect.gateway_v1beta1.GatewayServiceAsyncClient.delete_resource", + "fullName": "google.cloud.gkeconnect.gateway_v1beta1.GatewayControlAsyncClient.generate_credentials", "method": { - "fullName": "google.cloud.gkeconnect.gateway.v1beta1.GatewayService.DeleteResource", + "fullName": "google.cloud.gkeconnect.gateway.v1beta1.GatewayControl.GenerateCredentials", "service": { - "fullName": "google.cloud.gkeconnect.gateway.v1beta1.GatewayService", - "shortName": "GatewayService" + "fullName": "google.cloud.gkeconnect.gateway.v1beta1.GatewayControl", + "shortName": "GatewayControl" }, - "shortName": "DeleteResource" + "shortName": "GenerateCredentials" }, "parameters": [ { "name": "request", - "type": "google.api.httpbody_pb2.HttpBody" + "type": "google.cloud.gkeconnect.gateway_v1beta1.types.GenerateCredentialsRequest" }, { "name": "retry", @@ -46,14 +46,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api.httpbody_pb2.HttpBody", - "shortName": "delete_resource" + "resultType": "google.cloud.gkeconnect.gateway_v1beta1.types.GenerateCredentialsResponse", + "shortName": "generate_credentials" }, - "description": "Sample for DeleteResource", - "file": "connectgateway_v1beta1_generated_gateway_service_delete_resource_async.py", + "description": "Sample for GenerateCredentials", + "file": "connectgateway_v1beta1_generated_gateway_control_generate_credentials_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "connectgateway_v1beta1_generated_GatewayService_DeleteResource_async", + "regionTag": "connectgateway_v1beta1_generated_GatewayControl_GenerateCredentials_async", "segments": [ { "end": 51, @@ -66,13 +66,13 @@ "type": "SHORT" }, { - "end": 41, - "start": 39, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { "end": 45, - "start": 42, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { @@ -86,28 +86,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "connectgateway_v1beta1_generated_gateway_service_delete_resource_async.py" + "title": "connectgateway_v1beta1_generated_gateway_control_generate_credentials_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.gkeconnect.gateway_v1beta1.GatewayServiceClient", - "shortName": "GatewayServiceClient" + "fullName": "google.cloud.gkeconnect.gateway_v1beta1.GatewayControlClient", + "shortName": "GatewayControlClient" }, - "fullName": "google.cloud.gkeconnect.gateway_v1beta1.GatewayServiceClient.delete_resource", + "fullName": "google.cloud.gkeconnect.gateway_v1beta1.GatewayControlClient.generate_credentials", "method": { - "fullName": "google.cloud.gkeconnect.gateway.v1beta1.GatewayService.DeleteResource", + "fullName": "google.cloud.gkeconnect.gateway.v1beta1.GatewayControl.GenerateCredentials", "service": { - "fullName": "google.cloud.gkeconnect.gateway.v1beta1.GatewayService", - "shortName": "GatewayService" + "fullName": "google.cloud.gkeconnect.gateway.v1beta1.GatewayControl", + "shortName": "GatewayControl" }, - "shortName": "DeleteResource" + "shortName": "GenerateCredentials" }, "parameters": [ { "name": "request", - "type": "google.api.httpbody_pb2.HttpBody" + "type": "google.cloud.gkeconnect.gateway_v1beta1.types.GenerateCredentialsRequest" }, { "name": "retry", @@ -122,14 +122,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api.httpbody_pb2.HttpBody", - "shortName": "delete_resource" + "resultType": "google.cloud.gkeconnect.gateway_v1beta1.types.GenerateCredentialsResponse", + "shortName": "generate_credentials" }, - "description": "Sample for DeleteResource", - "file": "connectgateway_v1beta1_generated_gateway_service_delete_resource_sync.py", + "description": "Sample for GenerateCredentials", + "file": "connectgateway_v1beta1_generated_gateway_control_generate_credentials_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "connectgateway_v1beta1_generated_GatewayService_DeleteResource_sync", + "regionTag": "connectgateway_v1beta1_generated_GatewayControl_GenerateCredentials_sync", "segments": [ { "end": 51, @@ -142,13 +142,13 @@ "type": "SHORT" }, { - "end": 41, - "start": 39, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { "end": 45, - "start": 42, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { @@ -162,619 +162,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "connectgateway_v1beta1_generated_gateway_service_delete_resource_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.gkeconnect.gateway_v1beta1.GatewayServiceAsyncClient", - "shortName": "GatewayServiceAsyncClient" - }, - "fullName": "google.cloud.gkeconnect.gateway_v1beta1.GatewayServiceAsyncClient.get_resource", - "method": { - "fullName": "google.cloud.gkeconnect.gateway.v1beta1.GatewayService.GetResource", - "service": { - "fullName": "google.cloud.gkeconnect.gateway.v1beta1.GatewayService", - "shortName": "GatewayService" - }, - "shortName": "GetResource" - }, - "parameters": [ - { - "name": "request", - "type": "google.api.httpbody_pb2.HttpBody" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api.httpbody_pb2.HttpBody", - "shortName": "get_resource" - }, - "description": "Sample for GetResource", - "file": "connectgateway_v1beta1_generated_gateway_service_get_resource_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "connectgateway_v1beta1_generated_GatewayService_GetResource_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "connectgateway_v1beta1_generated_gateway_service_get_resource_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.gkeconnect.gateway_v1beta1.GatewayServiceClient", - "shortName": "GatewayServiceClient" - }, - "fullName": "google.cloud.gkeconnect.gateway_v1beta1.GatewayServiceClient.get_resource", - "method": { - "fullName": "google.cloud.gkeconnect.gateway.v1beta1.GatewayService.GetResource", - "service": { - "fullName": "google.cloud.gkeconnect.gateway.v1beta1.GatewayService", - "shortName": "GatewayService" - }, - "shortName": "GetResource" - }, - "parameters": [ - { - "name": "request", - "type": "google.api.httpbody_pb2.HttpBody" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api.httpbody_pb2.HttpBody", - "shortName": "get_resource" - }, - "description": "Sample for GetResource", - "file": "connectgateway_v1beta1_generated_gateway_service_get_resource_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "connectgateway_v1beta1_generated_GatewayService_GetResource_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "connectgateway_v1beta1_generated_gateway_service_get_resource_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.gkeconnect.gateway_v1beta1.GatewayServiceAsyncClient", - "shortName": "GatewayServiceAsyncClient" - }, - "fullName": "google.cloud.gkeconnect.gateway_v1beta1.GatewayServiceAsyncClient.patch_resource", - "method": { - "fullName": "google.cloud.gkeconnect.gateway.v1beta1.GatewayService.PatchResource", - "service": { - "fullName": "google.cloud.gkeconnect.gateway.v1beta1.GatewayService", - "shortName": "GatewayService" - }, - "shortName": "PatchResource" - }, - "parameters": [ - { - "name": "request", - "type": "google.api.httpbody_pb2.HttpBody" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api.httpbody_pb2.HttpBody", - "shortName": "patch_resource" - }, - "description": "Sample for PatchResource", - "file": "connectgateway_v1beta1_generated_gateway_service_patch_resource_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "connectgateway_v1beta1_generated_GatewayService_PatchResource_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "connectgateway_v1beta1_generated_gateway_service_patch_resource_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.gkeconnect.gateway_v1beta1.GatewayServiceClient", - "shortName": "GatewayServiceClient" - }, - "fullName": "google.cloud.gkeconnect.gateway_v1beta1.GatewayServiceClient.patch_resource", - "method": { - "fullName": "google.cloud.gkeconnect.gateway.v1beta1.GatewayService.PatchResource", - "service": { - "fullName": "google.cloud.gkeconnect.gateway.v1beta1.GatewayService", - "shortName": "GatewayService" - }, - "shortName": "PatchResource" - }, - "parameters": [ - { - "name": "request", - "type": "google.api.httpbody_pb2.HttpBody" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api.httpbody_pb2.HttpBody", - "shortName": "patch_resource" - }, - "description": "Sample for PatchResource", - "file": "connectgateway_v1beta1_generated_gateway_service_patch_resource_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "connectgateway_v1beta1_generated_GatewayService_PatchResource_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "connectgateway_v1beta1_generated_gateway_service_patch_resource_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.gkeconnect.gateway_v1beta1.GatewayServiceAsyncClient", - "shortName": "GatewayServiceAsyncClient" - }, - "fullName": "google.cloud.gkeconnect.gateway_v1beta1.GatewayServiceAsyncClient.post_resource", - "method": { - "fullName": "google.cloud.gkeconnect.gateway.v1beta1.GatewayService.PostResource", - "service": { - "fullName": "google.cloud.gkeconnect.gateway.v1beta1.GatewayService", - "shortName": "GatewayService" - }, - "shortName": "PostResource" - }, - "parameters": [ - { - "name": "request", - "type": "google.api.httpbody_pb2.HttpBody" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api.httpbody_pb2.HttpBody", - "shortName": "post_resource" - }, - "description": "Sample for PostResource", - "file": "connectgateway_v1beta1_generated_gateway_service_post_resource_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "connectgateway_v1beta1_generated_GatewayService_PostResource_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "connectgateway_v1beta1_generated_gateway_service_post_resource_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.gkeconnect.gateway_v1beta1.GatewayServiceClient", - "shortName": "GatewayServiceClient" - }, - "fullName": "google.cloud.gkeconnect.gateway_v1beta1.GatewayServiceClient.post_resource", - "method": { - "fullName": "google.cloud.gkeconnect.gateway.v1beta1.GatewayService.PostResource", - "service": { - "fullName": "google.cloud.gkeconnect.gateway.v1beta1.GatewayService", - "shortName": "GatewayService" - }, - "shortName": "PostResource" - }, - "parameters": [ - { - "name": "request", - "type": "google.api.httpbody_pb2.HttpBody" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api.httpbody_pb2.HttpBody", - "shortName": "post_resource" - }, - "description": "Sample for PostResource", - "file": "connectgateway_v1beta1_generated_gateway_service_post_resource_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "connectgateway_v1beta1_generated_GatewayService_PostResource_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "connectgateway_v1beta1_generated_gateway_service_post_resource_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.gkeconnect.gateway_v1beta1.GatewayServiceAsyncClient", - "shortName": "GatewayServiceAsyncClient" - }, - "fullName": "google.cloud.gkeconnect.gateway_v1beta1.GatewayServiceAsyncClient.put_resource", - "method": { - "fullName": "google.cloud.gkeconnect.gateway.v1beta1.GatewayService.PutResource", - "service": { - "fullName": "google.cloud.gkeconnect.gateway.v1beta1.GatewayService", - "shortName": "GatewayService" - }, - "shortName": "PutResource" - }, - "parameters": [ - { - "name": "request", - "type": "google.api.httpbody_pb2.HttpBody" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api.httpbody_pb2.HttpBody", - "shortName": "put_resource" - }, - "description": "Sample for PutResource", - "file": "connectgateway_v1beta1_generated_gateway_service_put_resource_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "connectgateway_v1beta1_generated_GatewayService_PutResource_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "connectgateway_v1beta1_generated_gateway_service_put_resource_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.gkeconnect.gateway_v1beta1.GatewayServiceClient", - "shortName": "GatewayServiceClient" - }, - "fullName": "google.cloud.gkeconnect.gateway_v1beta1.GatewayServiceClient.put_resource", - "method": { - "fullName": "google.cloud.gkeconnect.gateway.v1beta1.GatewayService.PutResource", - "service": { - "fullName": "google.cloud.gkeconnect.gateway.v1beta1.GatewayService", - "shortName": "GatewayService" - }, - "shortName": "PutResource" - }, - "parameters": [ - { - "name": "request", - "type": "google.api.httpbody_pb2.HttpBody" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api.httpbody_pb2.HttpBody", - "shortName": "put_resource" - }, - "description": "Sample for PutResource", - "file": "connectgateway_v1beta1_generated_gateway_service_put_resource_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "connectgateway_v1beta1_generated_GatewayService_PutResource_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "connectgateway_v1beta1_generated_gateway_service_put_resource_sync.py" + "title": "connectgateway_v1beta1_generated_gateway_control_generate_credentials_sync.py" } ] } diff --git a/packages/google-cloud-gke-connect-gateway/scripts/fixup_gateway_v1_keywords.py b/packages/google-cloud-gke-connect-gateway/scripts/fixup_gateway_v1_keywords.py new file mode 100644 index 000000000000..10884865fe8a --- /dev/null +++ b/packages/google-cloud-gke-connect-gateway/scripts/fixup_gateway_v1_keywords.py @@ -0,0 +1,176 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class gatewayCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'generate_credentials': ('name', 'force_use_agent', 'version', 'kubernetes_namespace', 'operating_system', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=gatewayCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the gateway client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-gke-connect-gateway/scripts/fixup_gateway_v1beta1_keywords.py b/packages/google-cloud-gke-connect-gateway/scripts/fixup_gateway_v1beta1_keywords.py index 56dd2027e348..10884865fe8a 100644 --- a/packages/google-cloud-gke-connect-gateway/scripts/fixup_gateway_v1beta1_keywords.py +++ b/packages/google-cloud-gke-connect-gateway/scripts/fixup_gateway_v1beta1_keywords.py @@ -39,11 +39,7 @@ def partition( class gatewayCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'delete_resource': ('content_type', 'data', 'extensions', ), - 'get_resource': ('content_type', 'data', 'extensions', ), - 'patch_resource': ('content_type', 'data', 'extensions', ), - 'post_resource': ('content_type', 'data', 'extensions', ), - 'put_resource': ('content_type', 'data', 'extensions', ), + 'generate_credentials': ('name', 'force_use_agent', 'version', 'kubernetes_namespace', 'operating_system', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: diff --git a/packages/google-cloud-gke-connect-gateway/tests/unit/gapic/gateway_v1/__init__.py b/packages/google-cloud-gke-connect-gateway/tests/unit/gapic/gateway_v1/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-cloud-gke-connect-gateway/tests/unit/gapic/gateway_v1/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-gke-connect-gateway/tests/unit/gapic/gateway_v1beta1/test_gateway_service.py b/packages/google-cloud-gke-connect-gateway/tests/unit/gapic/gateway_v1/test_gateway_control.py similarity index 56% rename from packages/google-cloud-gke-connect-gateway/tests/unit/gapic/gateway_v1beta1/test_gateway_service.py rename to packages/google-cloud-gke-connect-gateway/tests/unit/gapic/gateway_v1/test_gateway_control.py index 97430148490d..67c78f2a83b7 100644 --- a/packages/google-cloud-gke-connect-gateway/tests/unit/gapic/gateway_v1beta1/test_gateway_service.py +++ b/packages/google-cloud-gke-connect-gateway/tests/unit/gapic/gateway_v1/test_gateway_control.py @@ -22,9 +22,10 @@ except ImportError: # pragma: NO COVER import mock +from collections.abc import Iterable +import json import math -from google.api import httpbody_pb2 # type: ignore from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template from google.api_core import api_core_version, client_options from google.api_core import exceptions as core_exceptions @@ -33,18 +34,21 @@ from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.oauth2 import service_account -from google.protobuf import any_pb2 # type: ignore +from google.protobuf import json_format import grpc from grpc.experimental import aio from proto.marshal.rules import wrappers from proto.marshal.rules.dates import DurationRule, TimestampRule import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session -from google.cloud.gkeconnect.gateway_v1beta1.services.gateway_service import ( - GatewayServiceAsyncClient, - GatewayServiceClient, +from google.cloud.gkeconnect.gateway_v1.services.gateway_control import ( + GatewayControlAsyncClient, + GatewayControlClient, transports, ) +from google.cloud.gkeconnect.gateway_v1.types import control def client_cert_source_callback(): @@ -80,40 +84,40 @@ def test__get_default_mtls_endpoint(): sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" non_googleapi = "api.example.com" - assert GatewayServiceClient._get_default_mtls_endpoint(None) is None + assert GatewayControlClient._get_default_mtls_endpoint(None) is None assert ( - GatewayServiceClient._get_default_mtls_endpoint(api_endpoint) + GatewayControlClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint ) assert ( - GatewayServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + GatewayControlClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint ) assert ( - GatewayServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + GatewayControlClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint ) assert ( - GatewayServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + GatewayControlClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint ) assert ( - GatewayServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + GatewayControlClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi ) def test__read_environment_variables(): - assert GatewayServiceClient._read_environment_variables() == (False, "auto", None) + assert GatewayControlClient._read_environment_variables() == (False, "auto", None) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert GatewayServiceClient._read_environment_variables() == ( + assert GatewayControlClient._read_environment_variables() == ( True, "auto", None, ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert GatewayServiceClient._read_environment_variables() == ( + assert GatewayControlClient._read_environment_variables() == ( False, "auto", None, @@ -123,28 +127,28 @@ def test__read_environment_variables(): os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} ): with pytest.raises(ValueError) as excinfo: - GatewayServiceClient._read_environment_variables() + GatewayControlClient._read_environment_variables() assert ( str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert GatewayServiceClient._read_environment_variables() == ( + assert GatewayControlClient._read_environment_variables() == ( False, "never", None, ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert GatewayServiceClient._read_environment_variables() == ( + assert GatewayControlClient._read_environment_variables() == ( False, "always", None, ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert GatewayServiceClient._read_environment_variables() == ( + assert GatewayControlClient._read_environment_variables() == ( False, "auto", None, @@ -152,14 +156,14 @@ def test__read_environment_variables(): with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError) as excinfo: - GatewayServiceClient._read_environment_variables() + GatewayControlClient._read_environment_variables() assert ( str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" ) with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert GatewayServiceClient._read_environment_variables() == ( + assert GatewayControlClient._read_environment_variables() == ( False, "auto", "foo.com", @@ -170,13 +174,13 @@ def test__get_client_cert_source(): mock_provided_cert_source = mock.Mock() mock_default_cert_source = mock.Mock() - assert GatewayServiceClient._get_client_cert_source(None, False) is None + assert GatewayControlClient._get_client_cert_source(None, False) is None assert ( - GatewayServiceClient._get_client_cert_source(mock_provided_cert_source, False) + GatewayControlClient._get_client_cert_source(mock_provided_cert_source, False) is None ) assert ( - GatewayServiceClient._get_client_cert_source(mock_provided_cert_source, True) + GatewayControlClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source ) @@ -188,11 +192,11 @@ def test__get_client_cert_source(): return_value=mock_default_cert_source, ): assert ( - GatewayServiceClient._get_client_cert_source(None, True) + GatewayControlClient._get_client_cert_source(None, True) is mock_default_cert_source ) assert ( - GatewayServiceClient._get_client_cert_source( + GatewayControlClient._get_client_cert_source( mock_provided_cert_source, "true" ) is mock_provided_cert_source @@ -200,64 +204,64 @@ def test__get_client_cert_source(): @mock.patch.object( - GatewayServiceClient, + GatewayControlClient, "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(GatewayServiceClient), + modify_default_endpoint_template(GatewayControlClient), ) @mock.patch.object( - GatewayServiceAsyncClient, + GatewayControlAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(GatewayServiceAsyncClient), + modify_default_endpoint_template(GatewayControlAsyncClient), ) def test__get_api_endpoint(): api_override = "foo.com" mock_client_cert_source = mock.Mock() - default_universe = GatewayServiceClient._DEFAULT_UNIVERSE - default_endpoint = GatewayServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + default_universe = GatewayControlClient._DEFAULT_UNIVERSE + default_endpoint = GatewayControlClient._DEFAULT_ENDPOINT_TEMPLATE.format( UNIVERSE_DOMAIN=default_universe ) mock_universe = "bar.com" - mock_endpoint = GatewayServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + mock_endpoint = GatewayControlClient._DEFAULT_ENDPOINT_TEMPLATE.format( UNIVERSE_DOMAIN=mock_universe ) assert ( - GatewayServiceClient._get_api_endpoint( + GatewayControlClient._get_api_endpoint( api_override, mock_client_cert_source, default_universe, "always" ) == api_override ) assert ( - GatewayServiceClient._get_api_endpoint( + GatewayControlClient._get_api_endpoint( None, mock_client_cert_source, default_universe, "auto" ) - == GatewayServiceClient.DEFAULT_MTLS_ENDPOINT + == GatewayControlClient.DEFAULT_MTLS_ENDPOINT ) assert ( - GatewayServiceClient._get_api_endpoint(None, None, default_universe, "auto") + GatewayControlClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint ) assert ( - GatewayServiceClient._get_api_endpoint(None, None, default_universe, "always") - == GatewayServiceClient.DEFAULT_MTLS_ENDPOINT + GatewayControlClient._get_api_endpoint(None, None, default_universe, "always") + == GatewayControlClient.DEFAULT_MTLS_ENDPOINT ) assert ( - GatewayServiceClient._get_api_endpoint( + GatewayControlClient._get_api_endpoint( None, mock_client_cert_source, default_universe, "always" ) - == GatewayServiceClient.DEFAULT_MTLS_ENDPOINT + == GatewayControlClient.DEFAULT_MTLS_ENDPOINT ) assert ( - GatewayServiceClient._get_api_endpoint(None, None, mock_universe, "never") + GatewayControlClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint ) assert ( - GatewayServiceClient._get_api_endpoint(None, None, default_universe, "never") + GatewayControlClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint ) with pytest.raises(MutualTLSChannelError) as excinfo: - GatewayServiceClient._get_api_endpoint( + GatewayControlClient._get_api_endpoint( None, mock_client_cert_source, mock_universe, "auto" ) assert ( @@ -271,29 +275,30 @@ def test__get_universe_domain(): universe_domain_env = "bar.com" assert ( - GatewayServiceClient._get_universe_domain( + GatewayControlClient._get_universe_domain( client_universe_domain, universe_domain_env ) == client_universe_domain ) assert ( - GatewayServiceClient._get_universe_domain(None, universe_domain_env) + GatewayControlClient._get_universe_domain(None, universe_domain_env) == universe_domain_env ) assert ( - GatewayServiceClient._get_universe_domain(None, None) - == GatewayServiceClient._DEFAULT_UNIVERSE + GatewayControlClient._get_universe_domain(None, None) + == GatewayControlClient._DEFAULT_UNIVERSE ) with pytest.raises(ValueError) as excinfo: - GatewayServiceClient._get_universe_domain("", None) + GatewayControlClient._get_universe_domain("", None) assert str(excinfo.value) == "Universe Domain cannot be an empty string." @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - (GatewayServiceClient, transports.GatewayServiceGrpcTransport, "grpc"), + (GatewayControlClient, transports.GatewayControlGrpcTransport, "grpc"), + (GatewayControlClient, transports.GatewayControlRestTransport, "rest"), ], ) def test__validate_universe_domain(client_class, transport_class, transport_name): @@ -372,11 +377,12 @@ def test__validate_universe_domain(client_class, transport_class, transport_name @pytest.mark.parametrize( "client_class,transport_name", [ - (GatewayServiceClient, "grpc"), - (GatewayServiceAsyncClient, "grpc_asyncio"), + (GatewayControlClient, "grpc"), + (GatewayControlAsyncClient, "grpc_asyncio"), + (GatewayControlClient, "rest"), ], ) -def test_gateway_service_client_from_service_account_info(client_class, transport_name): +def test_gateway_control_client_from_service_account_info(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" @@ -387,17 +393,22 @@ def test_gateway_service_client_from_service_account_info(client_class, transpor assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("connectgateway.googleapis.com:443") + assert client.transport._host == ( + "connectgateway.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://connectgateway.googleapis.com/" + ) @pytest.mark.parametrize( "transport_class,transport_name", [ - (transports.GatewayServiceGrpcTransport, "grpc"), - (transports.GatewayServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.GatewayControlGrpcTransport, "grpc"), + (transports.GatewayControlGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.GatewayControlRestTransport, "rest"), ], ) -def test_gateway_service_client_service_account_always_use_jwt( +def test_gateway_control_client_service_account_always_use_jwt( transport_class, transport_name ): with mock.patch.object( @@ -418,11 +429,12 @@ def test_gateway_service_client_service_account_always_use_jwt( @pytest.mark.parametrize( "client_class,transport_name", [ - (GatewayServiceClient, "grpc"), - (GatewayServiceAsyncClient, "grpc_asyncio"), + (GatewayControlClient, "grpc"), + (GatewayControlAsyncClient, "grpc_asyncio"), + (GatewayControlClient, "rest"), ], ) -def test_gateway_service_client_from_service_account_file(client_class, transport_name): +def test_gateway_control_client_from_service_account_file(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" @@ -440,52 +452,58 @@ def test_gateway_service_client_from_service_account_file(client_class, transpor assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("connectgateway.googleapis.com:443") + assert client.transport._host == ( + "connectgateway.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://connectgateway.googleapis.com/" + ) -def test_gateway_service_client_get_transport_class(): - transport = GatewayServiceClient.get_transport_class() +def test_gateway_control_client_get_transport_class(): + transport = GatewayControlClient.get_transport_class() available_transports = [ - transports.GatewayServiceGrpcTransport, + transports.GatewayControlGrpcTransport, + transports.GatewayControlRestTransport, ] assert transport in available_transports - transport = GatewayServiceClient.get_transport_class("grpc") - assert transport == transports.GatewayServiceGrpcTransport + transport = GatewayControlClient.get_transport_class("grpc") + assert transport == transports.GatewayControlGrpcTransport @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - (GatewayServiceClient, transports.GatewayServiceGrpcTransport, "grpc"), + (GatewayControlClient, transports.GatewayControlGrpcTransport, "grpc"), ( - GatewayServiceAsyncClient, - transports.GatewayServiceGrpcAsyncIOTransport, + GatewayControlAsyncClient, + transports.GatewayControlGrpcAsyncIOTransport, "grpc_asyncio", ), + (GatewayControlClient, transports.GatewayControlRestTransport, "rest"), ], ) @mock.patch.object( - GatewayServiceClient, + GatewayControlClient, "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(GatewayServiceClient), + modify_default_endpoint_template(GatewayControlClient), ) @mock.patch.object( - GatewayServiceAsyncClient, + GatewayControlAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(GatewayServiceAsyncClient), + modify_default_endpoint_template(GatewayControlAsyncClient), ) -def test_gateway_service_client_client_options( +def test_gateway_control_client_client_options( client_class, transport_class, transport_name ): # Check that if channel is provided we won't create a new one. - with mock.patch.object(GatewayServiceClient, "get_transport_class") as gtc: + with mock.patch.object(GatewayControlClient, "get_transport_class") as gtc: transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() # Check that if channel is provided via str we will create a new one. - with mock.patch.object(GatewayServiceClient, "get_transport_class") as gtc: + with mock.patch.object(GatewayControlClient, "get_transport_class") as gtc: client = client_class(transport=transport_name) gtc.assert_called() @@ -608,34 +626,36 @@ def test_gateway_service_client_client_options( @pytest.mark.parametrize( "client_class,transport_class,transport_name,use_client_cert_env", [ - (GatewayServiceClient, transports.GatewayServiceGrpcTransport, "grpc", "true"), + (GatewayControlClient, transports.GatewayControlGrpcTransport, "grpc", "true"), ( - GatewayServiceAsyncClient, - transports.GatewayServiceGrpcAsyncIOTransport, + GatewayControlAsyncClient, + transports.GatewayControlGrpcAsyncIOTransport, "grpc_asyncio", "true", ), - (GatewayServiceClient, transports.GatewayServiceGrpcTransport, "grpc", "false"), + (GatewayControlClient, transports.GatewayControlGrpcTransport, "grpc", "false"), ( - GatewayServiceAsyncClient, - transports.GatewayServiceGrpcAsyncIOTransport, + GatewayControlAsyncClient, + transports.GatewayControlGrpcAsyncIOTransport, "grpc_asyncio", "false", ), + (GatewayControlClient, transports.GatewayControlRestTransport, "rest", "true"), + (GatewayControlClient, transports.GatewayControlRestTransport, "rest", "false"), ], ) @mock.patch.object( - GatewayServiceClient, + GatewayControlClient, "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(GatewayServiceClient), + modify_default_endpoint_template(GatewayControlClient), ) @mock.patch.object( - GatewayServiceAsyncClient, + GatewayControlAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(GatewayServiceAsyncClient), + modify_default_endpoint_template(GatewayControlAsyncClient), ) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_gateway_service_client_mtls_env_auto( +def test_gateway_control_client_mtls_env_auto( client_class, transport_class, transport_name, use_client_cert_env ): # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default @@ -738,19 +758,19 @@ def test_gateway_service_client_mtls_env_auto( @pytest.mark.parametrize( - "client_class", [GatewayServiceClient, GatewayServiceAsyncClient] + "client_class", [GatewayControlClient, GatewayControlAsyncClient] ) @mock.patch.object( - GatewayServiceClient, + GatewayControlClient, "DEFAULT_ENDPOINT", - modify_default_endpoint(GatewayServiceClient), + modify_default_endpoint(GatewayControlClient), ) @mock.patch.object( - GatewayServiceAsyncClient, + GatewayControlAsyncClient, "DEFAULT_ENDPOINT", - modify_default_endpoint(GatewayServiceAsyncClient), + modify_default_endpoint(GatewayControlAsyncClient), ) -def test_gateway_service_client_get_mtls_endpoint_and_cert_source(client_class): +def test_gateway_control_client_get_mtls_endpoint_and_cert_source(client_class): mock_client_cert_source = mock.Mock() # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". @@ -842,27 +862,27 @@ def test_gateway_service_client_get_mtls_endpoint_and_cert_source(client_class): @pytest.mark.parametrize( - "client_class", [GatewayServiceClient, GatewayServiceAsyncClient] + "client_class", [GatewayControlClient, GatewayControlAsyncClient] ) @mock.patch.object( - GatewayServiceClient, + GatewayControlClient, "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(GatewayServiceClient), + modify_default_endpoint_template(GatewayControlClient), ) @mock.patch.object( - GatewayServiceAsyncClient, + GatewayControlAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(GatewayServiceAsyncClient), + modify_default_endpoint_template(GatewayControlAsyncClient), ) -def test_gateway_service_client_client_api_endpoint(client_class): +def test_gateway_control_client_client_api_endpoint(client_class): mock_client_cert_source = client_cert_source_callback api_override = "foo.com" - default_universe = GatewayServiceClient._DEFAULT_UNIVERSE - default_endpoint = GatewayServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + default_universe = GatewayControlClient._DEFAULT_UNIVERSE + default_endpoint = GatewayControlClient._DEFAULT_ENDPOINT_TEMPLATE.format( UNIVERSE_DOMAIN=default_universe ) mock_universe = "bar.com" - mock_endpoint = GatewayServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + mock_endpoint = GatewayControlClient._DEFAULT_ENDPOINT_TEMPLATE.format( UNIVERSE_DOMAIN=mock_universe ) @@ -930,15 +950,16 @@ def test_gateway_service_client_client_api_endpoint(client_class): @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - (GatewayServiceClient, transports.GatewayServiceGrpcTransport, "grpc"), + (GatewayControlClient, transports.GatewayControlGrpcTransport, "grpc"), ( - GatewayServiceAsyncClient, - transports.GatewayServiceGrpcAsyncIOTransport, + GatewayControlAsyncClient, + transports.GatewayControlGrpcAsyncIOTransport, "grpc_asyncio", ), + (GatewayControlClient, transports.GatewayControlRestTransport, "rest"), ], ) -def test_gateway_service_client_client_options_scopes( +def test_gateway_control_client_client_options_scopes( client_class, transport_class, transport_name ): # Check the case scopes are provided. @@ -967,20 +988,21 @@ def test_gateway_service_client_client_options_scopes( "client_class,transport_class,transport_name,grpc_helpers", [ ( - GatewayServiceClient, - transports.GatewayServiceGrpcTransport, + GatewayControlClient, + transports.GatewayControlGrpcTransport, "grpc", grpc_helpers, ), ( - GatewayServiceAsyncClient, - transports.GatewayServiceGrpcAsyncIOTransport, + GatewayControlAsyncClient, + transports.GatewayControlGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async, ), + (GatewayControlClient, transports.GatewayControlRestTransport, "rest", None), ], ) -def test_gateway_service_client_client_options_credentials_file( +def test_gateway_control_client_client_options_credentials_file( client_class, transport_class, transport_name, grpc_helpers ): # Check the case credentials file is provided. @@ -1004,12 +1026,12 @@ def test_gateway_service_client_client_options_credentials_file( ) -def test_gateway_service_client_client_options_from_dict(): +def test_gateway_control_client_client_options_from_dict(): with mock.patch( - "google.cloud.gkeconnect.gateway_v1beta1.services.gateway_service.transports.GatewayServiceGrpcTransport.__init__" + "google.cloud.gkeconnect.gateway_v1.services.gateway_control.transports.GatewayControlGrpcTransport.__init__" ) as grpc_transport: grpc_transport.return_value = None - client = GatewayServiceClient( + client = GatewayControlClient( client_options={"api_endpoint": "squid.clam.whelk"} ) grpc_transport.assert_called_once_with( @@ -1029,20 +1051,20 @@ def test_gateway_service_client_client_options_from_dict(): "client_class,transport_class,transport_name,grpc_helpers", [ ( - GatewayServiceClient, - transports.GatewayServiceGrpcTransport, + GatewayControlClient, + transports.GatewayControlGrpcTransport, "grpc", grpc_helpers, ), ( - GatewayServiceAsyncClient, - transports.GatewayServiceGrpcAsyncIOTransport, + GatewayControlAsyncClient, + transports.GatewayControlGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async, ), ], ) -def test_gateway_service_client_create_channel_credentials_file( +def test_gateway_control_client_create_channel_credentials_file( client_class, transport_class, transport_name, grpc_helpers ): # Check the case credentials file is provided. @@ -1097,12 +1119,12 @@ def test_gateway_service_client_create_channel_credentials_file( @pytest.mark.parametrize( "request_type", [ - httpbody_pb2.HttpBody, + control.GenerateCredentialsRequest, dict, ], ) -def test_get_resource(request_type, transport: str = "grpc"): - client = GatewayServiceClient( +def test_generate_credentials(request_type, transport: str = "grpc"): + client = GatewayControlClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1112,49 +1134,53 @@ def test_get_resource(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_resource), "__call__") as call: + with mock.patch.object( + type(client.transport.generate_credentials), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = httpbody_pb2.HttpBody( - content_type="content_type_value", - data=b"data_blob", + call.return_value = control.GenerateCredentialsResponse( + kubeconfig=b"kubeconfig_blob", + endpoint="endpoint_value", ) - response = client.get_resource(request) + response = client.generate_credentials(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = httpbody_pb2.HttpBody() + request = control.GenerateCredentialsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, httpbody_pb2.HttpBody) - assert response.content_type == "content_type_value" - assert response.data == b"data_blob" + assert isinstance(response, control.GenerateCredentialsResponse) + assert response.kubeconfig == b"kubeconfig_blob" + assert response.endpoint == "endpoint_value" -def test_get_resource_empty_call(): +def test_generate_credentials_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. - client = GatewayServiceClient( + client = GatewayControlClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_resource), "__call__") as call: + with mock.patch.object( + type(client.transport.generate_credentials), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_resource() + client.generate_credentials() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == httpbody_pb2.HttpBody() + assert args[0] == control.GenerateCredentialsRequest() -def test_get_resource_non_empty_request_with_auto_populated_field(): +def test_generate_credentials_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. - client = GatewayServiceClient( + client = GatewayControlClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) @@ -1162,28 +1188,34 @@ def test_get_resource_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = httpbody_pb2.HttpBody( - content_type="content_type_value", + request = control.GenerateCredentialsRequest( + name="name_value", + version="version_value", + kubernetes_namespace="kubernetes_namespace_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_resource), "__call__") as call: + with mock.patch.object( + type(client.transport.generate_credentials), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_resource(request=request) + client.generate_credentials(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == httpbody_pb2.HttpBody( - content_type="content_type_value", + assert args[0] == control.GenerateCredentialsRequest( + name="name_value", + version="version_value", + kubernetes_namespace="kubernetes_namespace_value", ) -def test_get_resource_use_cached_wrapped_rpc(): +def test_generate_credentials_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = GatewayServiceClient( + client = GatewayControlClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) @@ -1193,21 +1225,25 @@ def test_get_resource_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_resource in client._transport._wrapped_methods + assert ( + client._transport.generate_credentials in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_resource] = mock_rpc + client._transport._wrapped_methods[ + client._transport.generate_credentials + ] = mock_rpc request = {} - client.get_resource(request) + client.generate_credentials(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_resource(request) + client.generate_credentials(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -1215,37 +1251,39 @@ def test_get_resource_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_resource_empty_call_async(): +async def test_generate_credentials_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. - client = GatewayServiceAsyncClient( + client = GatewayControlAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_resource), "__call__") as call: + with mock.patch.object( + type(client.transport.generate_credentials), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - httpbody_pb2.HttpBody( - content_type="content_type_value", - data=b"data_blob", + control.GenerateCredentialsResponse( + kubeconfig=b"kubeconfig_blob", + endpoint="endpoint_value", ) ) - response = await client.get_resource() + response = await client.generate_credentials() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == httpbody_pb2.HttpBody() + assert args[0] == control.GenerateCredentialsRequest() @pytest.mark.asyncio -async def test_get_resource_async_use_cached_wrapped_rpc( +async def test_generate_credentials_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = GatewayServiceAsyncClient( + client = GatewayControlAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1256,7 +1294,7 @@ async def test_get_resource_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_resource + client._client._transport.generate_credentials in client._client._transport._wrapped_methods ) @@ -1264,16 +1302,16 @@ async def test_get_resource_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_resource + client._client._transport.generate_credentials ] = mock_rpc request = {} - await client.get_resource(request) + await client.generate_credentials(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.get_resource(request) + await client.generate_credentials(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -1281,10 +1319,10 @@ async def test_get_resource_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_get_resource_async( - transport: str = "grpc_asyncio", request_type=httpbody_pb2.HttpBody +async def test_generate_credentials_async( + transport: str = "grpc_asyncio", request_type=control.GenerateCredentialsRequest ): - client = GatewayServiceAsyncClient( + client = GatewayControlAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1294,145 +1332,149 @@ async def test_get_resource_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_resource), "__call__") as call: + with mock.patch.object( + type(client.transport.generate_credentials), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - httpbody_pb2.HttpBody( - content_type="content_type_value", - data=b"data_blob", + control.GenerateCredentialsResponse( + kubeconfig=b"kubeconfig_blob", + endpoint="endpoint_value", ) ) - response = await client.get_resource(request) + response = await client.generate_credentials(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = httpbody_pb2.HttpBody() + request = control.GenerateCredentialsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, httpbody_pb2.HttpBody) - assert response.content_type == "content_type_value" - assert response.data == b"data_blob" + assert isinstance(response, control.GenerateCredentialsResponse) + assert response.kubeconfig == b"kubeconfig_blob" + assert response.endpoint == "endpoint_value" @pytest.mark.asyncio -async def test_get_resource_async_from_dict(): - await test_get_resource_async(request_type=dict) +async def test_generate_credentials_async_from_dict(): + await test_generate_credentials_async(request_type=dict) -def test_get_resource_from_dict_foreign(): - client = GatewayServiceClient( +def test_generate_credentials_field_headers(): + client = GatewayControlClient( credentials=ga_credentials.AnonymousCredentials(), ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_resource), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = httpbody_pb2.HttpBody() - response = client.get_resource( - request={ - "content_type": "content_type_value", - "data": b"data_blob", - "extensions": [ - any_pb2.Any(type_url="type.googleapis.com/google.protobuf.Empty") - ], - } - ) - call.assert_called() + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = control.GenerateCredentialsRequest() -@pytest.mark.parametrize( - "request_type", - [ - httpbody_pb2.HttpBody, - dict, - ], -) -def test_post_resource(request_type, transport: str = "grpc"): - client = GatewayServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.post_resource), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = httpbody_pb2.HttpBody( - content_type="content_type_value", - data=b"data_blob", - ) - response = client.post_resource(request) + with mock.patch.object( + type(client.transport.generate_credentials), "__call__" + ) as call: + call.return_value = control.GenerateCredentialsResponse() + client.generate_credentials(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = httpbody_pb2.HttpBody() assert args[0] == request - # Establish that the response is the type that we expect. - assert isinstance(response, httpbody_pb2.HttpBody) - assert response.content_type == "content_type_value" - assert response.data == b"data_blob" + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] -def test_post_resource_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = GatewayServiceClient( +@pytest.mark.asyncio +async def test_generate_credentials_field_headers_async(): + client = GatewayControlAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = control.GenerateCredentialsRequest() + + request.name = "name_value" + # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.post_resource), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object( + type(client.transport.generate_credentials), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + control.GenerateCredentialsResponse() ) - client.post_resource() - call.assert_called() + await client.generate_credentials(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == httpbody_pb2.HttpBody() + assert args[0] == request + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] -def test_post_resource_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = GatewayServiceClient( + +@pytest.mark.parametrize( + "request_type", + [ + control.GenerateCredentialsRequest, + dict, + ], +) +def test_generate_credentials_rest(request_type): + client = GatewayControlClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = httpbody_pb2.HttpBody( - content_type="content_type_value", - ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/memberships/sample3"} + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.post_resource), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.post_resource(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == httpbody_pb2.HttpBody( - content_type="content_type_value", + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = control.GenerateCredentialsResponse( + kubeconfig=b"kubeconfig_blob", + endpoint="endpoint_value", ) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = control.GenerateCredentialsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.generate_credentials(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, control.GenerateCredentialsResponse) + assert response.kubeconfig == b"kubeconfig_blob" + assert response.endpoint == "endpoint_value" + -def test_post_resource_use_cached_wrapped_rpc(): +def test_generate_credentials_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = GatewayServiceClient( + client = GatewayControlClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -1440,965 +1482,291 @@ def test_post_resource_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.post_resource in client._transport._wrapped_methods + assert ( + client._transport.generate_credentials in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.post_resource] = mock_rpc + client._transport._wrapped_methods[ + client._transport.generate_credentials + ] = mock_rpc + request = {} - client.post_resource(request) + client.generate_credentials(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.post_resource(request) + client.generate_credentials(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_post_resource_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = GatewayServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", +def test_generate_credentials_rest_required_fields( + request_type=control.GenerateCredentialsRequest, +): + transport_class = transports.GatewayControlRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.post_resource), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - httpbody_pb2.HttpBody( - content_type="content_type_value", - data=b"data_blob", - ) - ) - response = await client.post_resource() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == httpbody_pb2.HttpBody() + # verify fields with default values are dropped + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).generate_credentials._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) -@pytest.mark.asyncio -async def test_post_resource_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = GatewayServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # verify required fields with default values are now present - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + jsonified_request["name"] = "name_value" - # Ensure method has been cached - assert ( - client._client._transport.post_resource - in client._client._transport._wrapped_methods + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).generate_credentials._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "force_use_agent", + "kubernetes_namespace", + "operating_system", + "version", ) + ) + jsonified_request.update(unset_fields) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.post_resource - ] = mock_rpc + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" - request = {} - await client.post_resource(request) + client = GatewayControlClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = control.GenerateCredentialsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + response_value = Response() + response_value.status_code = 200 - await client.post_resource(request) + # Convert return value to protobuf type + return_value = control.GenerateCredentialsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.generate_credentials(request) -@pytest.mark.asyncio -async def test_post_resource_async( - transport: str = "grpc_asyncio", request_type=httpbody_pb2.HttpBody -): - client = GatewayServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.post_resource), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - httpbody_pb2.HttpBody( - content_type="content_type_value", - data=b"data_blob", +def test_generate_credentials_rest_unset_required_fields(): + transport = transports.GatewayControlRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.generate_credentials._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "forceUseAgent", + "kubernetesNamespace", + "operatingSystem", + "version", ) ) - response = await client.post_resource(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = httpbody_pb2.HttpBody() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, httpbody_pb2.HttpBody) - assert response.content_type == "content_type_value" - assert response.data == b"data_blob" - - -@pytest.mark.asyncio -async def test_post_resource_async_from_dict(): - await test_post_resource_async(request_type=dict) + & set(("name",)) + ) -def test_post_resource_from_dict_foreign(): - client = GatewayServiceClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_generate_credentials_rest_interceptors(null_interceptor): + transport = transports.GatewayControlRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.GatewayControlRestInterceptor(), ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.post_resource), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = httpbody_pb2.HttpBody() - response = client.post_resource( - request={ - "content_type": "content_type_value", - "data": b"data_blob", - "extensions": [ - any_pb2.Any(type_url="type.googleapis.com/google.protobuf.Empty") - ], - } + client = GatewayControlClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.GatewayControlRestInterceptor, "post_generate_credentials" + ) as post, mock.patch.object( + transports.GatewayControlRestInterceptor, "pre_generate_credentials" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = control.GenerateCredentialsRequest.pb( + control.GenerateCredentialsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = control.GenerateCredentialsResponse.to_json( + control.GenerateCredentialsResponse() + ) + + request = control.GenerateCredentialsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = control.GenerateCredentialsResponse() + + client.generate_credentials( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - call.assert_called() + pre.assert_called_once() + post.assert_called_once() -@pytest.mark.parametrize( - "request_type", - [ - httpbody_pb2.HttpBody, - dict, - ], -) -def test_delete_resource(request_type, transport: str = "grpc"): - client = GatewayServiceClient( + +def test_generate_credentials_rest_bad_request( + transport: str = "rest", request_type=control.GenerateCredentialsRequest +): + client = GatewayControlClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/memberships/sample3"} + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_resource), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = httpbody_pb2.HttpBody( - content_type="content_type_value", - data=b"data_blob", - ) - response = client.delete_resource(request) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.generate_credentials(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = httpbody_pb2.HttpBody() - assert args[0] == request - # Establish that the response is the type that we expect. - assert isinstance(response, httpbody_pb2.HttpBody) - assert response.content_type == "content_type_value" - assert response.data == b"data_blob" +def test_generate_credentials_rest_error(): + client = GatewayControlClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) -def test_delete_resource_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = GatewayServiceClient( +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.GatewayControlGrpcTransport( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_resource), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + with pytest.raises(ValueError): + client = GatewayControlClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - client.delete_resource() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == httpbody_pb2.HttpBody() - -def test_delete_resource_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = GatewayServiceClient( + # It is an error to provide a credentials file and a transport instance. + transport = transports.GatewayControlGrpcTransport( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", ) + with pytest.raises(ValueError): + client = GatewayControlClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = httpbody_pb2.HttpBody( - content_type="content_type_value", + # It is an error to provide an api_key and a transport instance. + transport = transports.GatewayControlGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_resource), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_resource(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == httpbody_pb2.HttpBody( - content_type="content_type_value", + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = GatewayControlClient( + client_options=options, + transport=transport, ) - -def test_delete_resource_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = GatewayServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = GatewayControlClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() ) - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_resource in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + # It is an error to provide scopes and a transport instance. + transport = transports.GatewayControlGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = GatewayControlClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, ) - client._transport._wrapped_methods[client._transport.delete_resource] = mock_rpc - request = {} - client.delete_resource(request) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - client.delete_resource(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_delete_resource_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = GatewayServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_resource), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - httpbody_pb2.HttpBody( - content_type="content_type_value", - data=b"data_blob", - ) - ) - response = await client.delete_resource() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == httpbody_pb2.HttpBody() - - -@pytest.mark.asyncio -async def test_delete_resource_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = GatewayServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.delete_resource - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.delete_resource - ] = mock_rpc - - request = {} - await client.delete_resource(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_resource(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_delete_resource_async( - transport: str = "grpc_asyncio", request_type=httpbody_pb2.HttpBody -): - client = GatewayServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_resource), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - httpbody_pb2.HttpBody( - content_type="content_type_value", - data=b"data_blob", - ) - ) - response = await client.delete_resource(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = httpbody_pb2.HttpBody() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, httpbody_pb2.HttpBody) - assert response.content_type == "content_type_value" - assert response.data == b"data_blob" - - -@pytest.mark.asyncio -async def test_delete_resource_async_from_dict(): - await test_delete_resource_async(request_type=dict) - - -def test_delete_resource_from_dict_foreign(): - client = GatewayServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_resource), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = httpbody_pb2.HttpBody() - response = client.delete_resource( - request={ - "content_type": "content_type_value", - "data": b"data_blob", - "extensions": [ - any_pb2.Any(type_url="type.googleapis.com/google.protobuf.Empty") - ], - } - ) - call.assert_called() - - -@pytest.mark.parametrize( - "request_type", - [ - httpbody_pb2.HttpBody, - dict, - ], -) -def test_put_resource(request_type, transport: str = "grpc"): - client = GatewayServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.put_resource), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = httpbody_pb2.HttpBody( - content_type="content_type_value", - data=b"data_blob", - ) - response = client.put_resource(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = httpbody_pb2.HttpBody() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, httpbody_pb2.HttpBody) - assert response.content_type == "content_type_value" - assert response.data == b"data_blob" - - -def test_put_resource_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = GatewayServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.put_resource), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.put_resource() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == httpbody_pb2.HttpBody() - - -def test_put_resource_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = GatewayServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = httpbody_pb2.HttpBody( - content_type="content_type_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.put_resource), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.put_resource(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == httpbody_pb2.HttpBody( - content_type="content_type_value", - ) - - -def test_put_resource_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = GatewayServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.put_resource in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.put_resource] = mock_rpc - request = {} - client.put_resource(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.put_resource(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_put_resource_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = GatewayServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.put_resource), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - httpbody_pb2.HttpBody( - content_type="content_type_value", - data=b"data_blob", - ) - ) - response = await client.put_resource() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == httpbody_pb2.HttpBody() - - -@pytest.mark.asyncio -async def test_put_resource_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = GatewayServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.put_resource - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.put_resource - ] = mock_rpc - - request = {} - await client.put_resource(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.put_resource(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_put_resource_async( - transport: str = "grpc_asyncio", request_type=httpbody_pb2.HttpBody -): - client = GatewayServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.put_resource), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - httpbody_pb2.HttpBody( - content_type="content_type_value", - data=b"data_blob", - ) - ) - response = await client.put_resource(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = httpbody_pb2.HttpBody() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, httpbody_pb2.HttpBody) - assert response.content_type == "content_type_value" - assert response.data == b"data_blob" - - -@pytest.mark.asyncio -async def test_put_resource_async_from_dict(): - await test_put_resource_async(request_type=dict) - - -def test_put_resource_from_dict_foreign(): - client = GatewayServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.put_resource), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = httpbody_pb2.HttpBody() - response = client.put_resource( - request={ - "content_type": "content_type_value", - "data": b"data_blob", - "extensions": [ - any_pb2.Any(type_url="type.googleapis.com/google.protobuf.Empty") - ], - } - ) - call.assert_called() - - -@pytest.mark.parametrize( - "request_type", - [ - httpbody_pb2.HttpBody, - dict, - ], -) -def test_patch_resource(request_type, transport: str = "grpc"): - client = GatewayServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.patch_resource), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = httpbody_pb2.HttpBody( - content_type="content_type_value", - data=b"data_blob", - ) - response = client.patch_resource(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = httpbody_pb2.HttpBody() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, httpbody_pb2.HttpBody) - assert response.content_type == "content_type_value" - assert response.data == b"data_blob" - - -def test_patch_resource_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = GatewayServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.patch_resource), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.patch_resource() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == httpbody_pb2.HttpBody() - - -def test_patch_resource_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = GatewayServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = httpbody_pb2.HttpBody( - content_type="content_type_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.patch_resource), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.patch_resource(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == httpbody_pb2.HttpBody( - content_type="content_type_value", - ) - - -def test_patch_resource_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = GatewayServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.patch_resource in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.patch_resource] = mock_rpc - request = {} - client.patch_resource(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.patch_resource(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_patch_resource_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = GatewayServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.patch_resource), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - httpbody_pb2.HttpBody( - content_type="content_type_value", - data=b"data_blob", - ) - ) - response = await client.patch_resource() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == httpbody_pb2.HttpBody() - - -@pytest.mark.asyncio -async def test_patch_resource_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = GatewayServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.patch_resource - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.patch_resource - ] = mock_rpc - - request = {} - await client.patch_resource(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.patch_resource(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_patch_resource_async( - transport: str = "grpc_asyncio", request_type=httpbody_pb2.HttpBody -): - client = GatewayServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.patch_resource), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - httpbody_pb2.HttpBody( - content_type="content_type_value", - data=b"data_blob", - ) - ) - response = await client.patch_resource(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = httpbody_pb2.HttpBody() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, httpbody_pb2.HttpBody) - assert response.content_type == "content_type_value" - assert response.data == b"data_blob" - - -@pytest.mark.asyncio -async def test_patch_resource_async_from_dict(): - await test_patch_resource_async(request_type=dict) - - -def test_patch_resource_from_dict_foreign(): - client = GatewayServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.patch_resource), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = httpbody_pb2.HttpBody() - response = client.patch_resource( - request={ - "content_type": "content_type_value", - "data": b"data_blob", - "extensions": [ - any_pb2.Any(type_url="type.googleapis.com/google.protobuf.Empty") - ], - } - ) - call.assert_called() - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.GatewayServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = GatewayServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.GatewayServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = GatewayServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.GatewayServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = GatewayServiceClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = GatewayServiceClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.GatewayServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = GatewayServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.GatewayServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = GatewayServiceClient(transport=transport) - assert client.transport is transport +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.GatewayControlGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = GatewayControlClient(transport=transport) + assert client.transport is transport def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. - transport = transports.GatewayServiceGrpcTransport( + transport = transports.GatewayControlGrpcTransport( credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel - transport = transports.GatewayServiceGrpcAsyncIOTransport( + transport = transports.GatewayControlGrpcAsyncIOTransport( credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel @@ -2408,8 +1776,9 @@ def test_transport_get_channel(): @pytest.mark.parametrize( "transport_class", [ - transports.GatewayServiceGrpcTransport, - transports.GatewayServiceGrpcAsyncIOTransport, + transports.GatewayControlGrpcTransport, + transports.GatewayControlGrpcAsyncIOTransport, + transports.GatewayControlRestTransport, ], ) def test_transport_adc(transport_class): @@ -2424,10 +1793,11 @@ def test_transport_adc(transport_class): "transport_name", [ "grpc", + "rest", ], ) def test_transport_kind(transport_name): - transport = GatewayServiceClient.get_transport_class(transport_name)( + transport = GatewayControlClient.get_transport_class(transport_name)( credentials=ga_credentials.AnonymousCredentials(), ) assert transport.kind == transport_name @@ -2435,43 +1805,37 @@ def test_transport_kind(transport_name): def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = GatewayServiceClient( + client = GatewayControlClient( credentials=ga_credentials.AnonymousCredentials(), ) assert isinstance( client.transport, - transports.GatewayServiceGrpcTransport, + transports.GatewayControlGrpcTransport, ) -def test_gateway_service_base_transport_error(): +def test_gateway_control_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.GatewayServiceTransport( + transport = transports.GatewayControlTransport( credentials=ga_credentials.AnonymousCredentials(), credentials_file="credentials.json", ) -def test_gateway_service_base_transport(): +def test_gateway_control_base_transport(): # Instantiate the base transport. with mock.patch( - "google.cloud.gkeconnect.gateway_v1beta1.services.gateway_service.transports.GatewayServiceTransport.__init__" + "google.cloud.gkeconnect.gateway_v1.services.gateway_control.transports.GatewayControlTransport.__init__" ) as Transport: Transport.return_value = None - transport = transports.GatewayServiceTransport( + transport = transports.GatewayControlTransport( credentials=ga_credentials.AnonymousCredentials(), ) # Every method on the transport should just blindly # raise NotImplementedError. - methods = ( - "get_resource", - "post_resource", - "delete_resource", - "put_resource", - "patch_resource", - ) + methods = ("generate_credentials",) for method in methods: with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) @@ -2488,16 +1852,16 @@ def test_gateway_service_base_transport(): getattr(transport, r)() -def test_gateway_service_base_transport_with_credentials_file(): +def test_gateway_control_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( google.auth, "load_credentials_from_file", autospec=True ) as load_creds, mock.patch( - "google.cloud.gkeconnect.gateway_v1beta1.services.gateway_service.transports.GatewayServiceTransport._prep_wrapped_messages" + "google.cloud.gkeconnect.gateway_v1.services.gateway_control.transports.GatewayControlTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.GatewayServiceTransport( + transport = transports.GatewayControlTransport( credentials_file="credentials.json", quota_project_id="octopus", ) @@ -2509,22 +1873,22 @@ def test_gateway_service_base_transport_with_credentials_file(): ) -def test_gateway_service_base_transport_with_adc(): +def test_gateway_control_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( - "google.cloud.gkeconnect.gateway_v1beta1.services.gateway_service.transports.GatewayServiceTransport._prep_wrapped_messages" + "google.cloud.gkeconnect.gateway_v1.services.gateway_control.transports.GatewayControlTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.GatewayServiceTransport() + transport = transports.GatewayControlTransport() adc.assert_called_once() -def test_gateway_service_auth_adc(): +def test_gateway_control_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) - GatewayServiceClient() + GatewayControlClient() adc.assert_called_once_with( scopes=None, default_scopes=("https://www.googleapis.com/auth/cloud-platform",), @@ -2535,11 +1899,11 @@ def test_gateway_service_auth_adc(): @pytest.mark.parametrize( "transport_class", [ - transports.GatewayServiceGrpcTransport, - transports.GatewayServiceGrpcAsyncIOTransport, + transports.GatewayControlGrpcTransport, + transports.GatewayControlGrpcAsyncIOTransport, ], ) -def test_gateway_service_transport_auth_adc(transport_class): +def test_gateway_control_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -2555,11 +1919,12 @@ def test_gateway_service_transport_auth_adc(transport_class): @pytest.mark.parametrize( "transport_class", [ - transports.GatewayServiceGrpcTransport, - transports.GatewayServiceGrpcAsyncIOTransport, + transports.GatewayControlGrpcTransport, + transports.GatewayControlGrpcAsyncIOTransport, + transports.GatewayControlRestTransport, ], ) -def test_gateway_service_transport_auth_gdch_credentials(transport_class): +def test_gateway_control_transport_auth_gdch_credentials(transport_class): host = "https://language.com/" api_audience_tests = [None, "https://language2.com/"] api_audience_expect = [host, "https://language2.com/"] @@ -2577,11 +1942,11 @@ def test_gateway_service_transport_auth_gdch_credentials(transport_class): @pytest.mark.parametrize( "transport_class,grpc_helpers", [ - (transports.GatewayServiceGrpcTransport, grpc_helpers), - (transports.GatewayServiceGrpcAsyncIOTransport, grpc_helpers_async), + (transports.GatewayControlGrpcTransport, grpc_helpers), + (transports.GatewayControlGrpcAsyncIOTransport, grpc_helpers_async), ], ) -def test_gateway_service_transport_create_channel(transport_class, grpc_helpers): +def test_gateway_control_transport_create_channel(transport_class, grpc_helpers): # If credentials and host are not provided, the transport class should use # ADC credentials. with mock.patch.object( @@ -2612,11 +1977,11 @@ def test_gateway_service_transport_create_channel(transport_class, grpc_helpers) @pytest.mark.parametrize( "transport_class", [ - transports.GatewayServiceGrpcTransport, - transports.GatewayServiceGrpcAsyncIOTransport, + transports.GatewayControlGrpcTransport, + transports.GatewayControlGrpcAsyncIOTransport, ], ) -def test_gateway_service_grpc_transport_client_cert_source_for_mtls(transport_class): +def test_gateway_control_grpc_transport_client_cert_source_for_mtls(transport_class): cred = ga_credentials.AnonymousCredentials() # Check ssl_channel_credentials is used if provided. @@ -2654,22 +2019,38 @@ def test_gateway_service_grpc_transport_client_cert_source_for_mtls(transport_cl ) +def test_gateway_control_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.GatewayControlRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + @pytest.mark.parametrize( "transport_name", [ "grpc", "grpc_asyncio", + "rest", ], ) -def test_gateway_service_host_no_port(transport_name): - client = GatewayServiceClient( +def test_gateway_control_host_no_port(transport_name): + client = GatewayControlClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="connectgateway.googleapis.com" ), transport=transport_name, ) - assert client.transport._host == ("connectgateway.googleapis.com:443") + assert client.transport._host == ( + "connectgateway.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://connectgateway.googleapis.com/" + ) @pytest.mark.parametrize( @@ -2677,24 +2058,51 @@ def test_gateway_service_host_no_port(transport_name): [ "grpc", "grpc_asyncio", + "rest", ], ) -def test_gateway_service_host_with_port(transport_name): - client = GatewayServiceClient( +def test_gateway_control_host_with_port(transport_name): + client = GatewayControlClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="connectgateway.googleapis.com:8000" ), transport=transport_name, ) - assert client.transport._host == ("connectgateway.googleapis.com:8000") + assert client.transport._host == ( + "connectgateway.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://connectgateway.googleapis.com:8000/" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_gateway_control_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = GatewayControlClient( + credentials=creds1, + transport=transport_name, + ) + client2 = GatewayControlClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.generate_credentials._session + session2 = client2.transport.generate_credentials._session + assert session1 != session2 -def test_gateway_service_grpc_transport_channel(): +def test_gateway_control_grpc_transport_channel(): channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. - transport = transports.GatewayServiceGrpcTransport( + transport = transports.GatewayControlGrpcTransport( host="squid.clam.whelk", channel=channel, ) @@ -2703,11 +2111,11 @@ def test_gateway_service_grpc_transport_channel(): assert transport._ssl_channel_credentials == None -def test_gateway_service_grpc_asyncio_transport_channel(): +def test_gateway_control_grpc_asyncio_transport_channel(): channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. - transport = transports.GatewayServiceGrpcAsyncIOTransport( + transport = transports.GatewayControlGrpcAsyncIOTransport( host="squid.clam.whelk", channel=channel, ) @@ -2721,11 +2129,11 @@ def test_gateway_service_grpc_asyncio_transport_channel(): @pytest.mark.parametrize( "transport_class", [ - transports.GatewayServiceGrpcTransport, - transports.GatewayServiceGrpcAsyncIOTransport, + transports.GatewayControlGrpcTransport, + transports.GatewayControlGrpcAsyncIOTransport, ], ) -def test_gateway_service_transport_channel_mtls_with_client_cert_source( +def test_gateway_control_transport_channel_mtls_with_client_cert_source( transport_class, ): with mock.patch( @@ -2775,11 +2183,11 @@ def test_gateway_service_transport_channel_mtls_with_client_cert_source( @pytest.mark.parametrize( "transport_class", [ - transports.GatewayServiceGrpcTransport, - transports.GatewayServiceGrpcAsyncIOTransport, + transports.GatewayControlGrpcTransport, + transports.GatewayControlGrpcAsyncIOTransport, ], ) -def test_gateway_service_transport_channel_mtls_with_adc(transport_class): +def test_gateway_control_transport_channel_mtls_with_adc(transport_class): mock_ssl_cred = mock.Mock() with mock.patch.multiple( "google.auth.transport.grpc.SslCredentials", @@ -2821,7 +2229,7 @@ def test_common_billing_account_path(): expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) - actual = GatewayServiceClient.common_billing_account_path(billing_account) + actual = GatewayControlClient.common_billing_account_path(billing_account) assert expected == actual @@ -2829,10 +2237,10 @@ def test_parse_common_billing_account_path(): expected = { "billing_account": "clam", } - path = GatewayServiceClient.common_billing_account_path(**expected) + path = GatewayControlClient.common_billing_account_path(**expected) # Check that the path construction is reversible. - actual = GatewayServiceClient.parse_common_billing_account_path(path) + actual = GatewayControlClient.parse_common_billing_account_path(path) assert expected == actual @@ -2841,7 +2249,7 @@ def test_common_folder_path(): expected = "folders/{folder}".format( folder=folder, ) - actual = GatewayServiceClient.common_folder_path(folder) + actual = GatewayControlClient.common_folder_path(folder) assert expected == actual @@ -2849,10 +2257,10 @@ def test_parse_common_folder_path(): expected = { "folder": "octopus", } - path = GatewayServiceClient.common_folder_path(**expected) + path = GatewayControlClient.common_folder_path(**expected) # Check that the path construction is reversible. - actual = GatewayServiceClient.parse_common_folder_path(path) + actual = GatewayControlClient.parse_common_folder_path(path) assert expected == actual @@ -2861,7 +2269,7 @@ def test_common_organization_path(): expected = "organizations/{organization}".format( organization=organization, ) - actual = GatewayServiceClient.common_organization_path(organization) + actual = GatewayControlClient.common_organization_path(organization) assert expected == actual @@ -2869,10 +2277,10 @@ def test_parse_common_organization_path(): expected = { "organization": "nudibranch", } - path = GatewayServiceClient.common_organization_path(**expected) + path = GatewayControlClient.common_organization_path(**expected) # Check that the path construction is reversible. - actual = GatewayServiceClient.parse_common_organization_path(path) + actual = GatewayControlClient.parse_common_organization_path(path) assert expected == actual @@ -2881,7 +2289,7 @@ def test_common_project_path(): expected = "projects/{project}".format( project=project, ) - actual = GatewayServiceClient.common_project_path(project) + actual = GatewayControlClient.common_project_path(project) assert expected == actual @@ -2889,10 +2297,10 @@ def test_parse_common_project_path(): expected = { "project": "mussel", } - path = GatewayServiceClient.common_project_path(**expected) + path = GatewayControlClient.common_project_path(**expected) # Check that the path construction is reversible. - actual = GatewayServiceClient.parse_common_project_path(path) + actual = GatewayControlClient.parse_common_project_path(path) assert expected == actual @@ -2903,7 +2311,7 @@ def test_common_location_path(): project=project, location=location, ) - actual = GatewayServiceClient.common_location_path(project, location) + actual = GatewayControlClient.common_location_path(project, location) assert expected == actual @@ -2912,10 +2320,10 @@ def test_parse_common_location_path(): "project": "scallop", "location": "abalone", } - path = GatewayServiceClient.common_location_path(**expected) + path = GatewayControlClient.common_location_path(**expected) # Check that the path construction is reversible. - actual = GatewayServiceClient.parse_common_location_path(path) + actual = GatewayControlClient.parse_common_location_path(path) assert expected == actual @@ -2923,18 +2331,18 @@ def test_client_with_default_client_info(): client_info = gapic_v1.client_info.ClientInfo() with mock.patch.object( - transports.GatewayServiceTransport, "_prep_wrapped_messages" + transports.GatewayControlTransport, "_prep_wrapped_messages" ) as prep: - client = GatewayServiceClient( + client = GatewayControlClient( credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) with mock.patch.object( - transports.GatewayServiceTransport, "_prep_wrapped_messages" + transports.GatewayControlTransport, "_prep_wrapped_messages" ) as prep: - transport_class = GatewayServiceClient.get_transport_class() + transport_class = GatewayControlClient.get_transport_class() transport = transport_class( credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, @@ -2944,7 +2352,7 @@ def test_client_with_default_client_info(): @pytest.mark.asyncio async def test_transport_close_async(): - client = GatewayServiceAsyncClient( + client = GatewayControlAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", ) @@ -2958,11 +2366,12 @@ async def test_transport_close_async(): def test_transport_close(): transports = { + "rest": "_session", "grpc": "_grpc_channel", } for transport, close_name in transports.items(): - client = GatewayServiceClient( + client = GatewayControlClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport ) with mock.patch.object( @@ -2975,10 +2384,11 @@ def test_transport_close(): def test_client_ctx(): transports = [ + "rest", "grpc", ] for transport in transports: - client = GatewayServiceClient( + client = GatewayControlClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport ) # Test client calls underlying transport. @@ -2992,8 +2402,8 @@ def test_client_ctx(): @pytest.mark.parametrize( "client_class,transport_class", [ - (GatewayServiceClient, transports.GatewayServiceGrpcTransport), - (GatewayServiceAsyncClient, transports.GatewayServiceGrpcAsyncIOTransport), + (GatewayControlClient, transports.GatewayControlGrpcTransport), + (GatewayControlAsyncClient, transports.GatewayControlGrpcAsyncIOTransport), ], ) def test_api_key_credentials(client_class, transport_class): diff --git a/packages/google-cloud-gke-connect-gateway/tests/unit/gapic/gateway_v1beta1/test_gateway_control.py b/packages/google-cloud-gke-connect-gateway/tests/unit/gapic/gateway_v1beta1/test_gateway_control.py new file mode 100644 index 000000000000..024863c9d32c --- /dev/null +++ b/packages/google-cloud-gke-connect-gateway/tests/unit/gapic/gateway_v1beta1/test_gateway_control.py @@ -0,0 +1,2090 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.oauth2 import service_account +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest + +from google.cloud.gkeconnect.gateway_v1beta1.services.gateway_control import ( + GatewayControlAsyncClient, + GatewayControlClient, + transports, +) +from google.cloud.gkeconnect.gateway_v1beta1.types import control + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert GatewayControlClient._get_default_mtls_endpoint(None) is None + assert ( + GatewayControlClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + GatewayControlClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + GatewayControlClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + GatewayControlClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + GatewayControlClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + ) + + +def test__read_environment_variables(): + assert GatewayControlClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert GatewayControlClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert GatewayControlClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + GatewayControlClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert GatewayControlClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert GatewayControlClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert GatewayControlClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + GatewayControlClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert GatewayControlClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert GatewayControlClient._get_client_cert_source(None, False) is None + assert ( + GatewayControlClient._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + GatewayControlClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + GatewayControlClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + GatewayControlClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + GatewayControlClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(GatewayControlClient), +) +@mock.patch.object( + GatewayControlAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(GatewayControlAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = GatewayControlClient._DEFAULT_UNIVERSE + default_endpoint = GatewayControlClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = GatewayControlClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + GatewayControlClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + GatewayControlClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == GatewayControlClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + GatewayControlClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + GatewayControlClient._get_api_endpoint(None, None, default_universe, "always") + == GatewayControlClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + GatewayControlClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == GatewayControlClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + GatewayControlClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + GatewayControlClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + GatewayControlClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + GatewayControlClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + GatewayControlClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + GatewayControlClient._get_universe_domain(None, None) + == GatewayControlClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + GatewayControlClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (GatewayControlClient, transports.GatewayControlGrpcTransport, "grpc"), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (GatewayControlClient, "grpc"), + (GatewayControlAsyncClient, "grpc_asyncio"), + ], +) +def test_gateway_control_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ("connectgateway.googleapis.com:443") + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.GatewayControlGrpcTransport, "grpc"), + (transports.GatewayControlGrpcAsyncIOTransport, "grpc_asyncio"), + ], +) +def test_gateway_control_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (GatewayControlClient, "grpc"), + (GatewayControlAsyncClient, "grpc_asyncio"), + ], +) +def test_gateway_control_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ("connectgateway.googleapis.com:443") + + +def test_gateway_control_client_get_transport_class(): + transport = GatewayControlClient.get_transport_class() + available_transports = [ + transports.GatewayControlGrpcTransport, + ] + assert transport in available_transports + + transport = GatewayControlClient.get_transport_class("grpc") + assert transport == transports.GatewayControlGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (GatewayControlClient, transports.GatewayControlGrpcTransport, "grpc"), + ( + GatewayControlAsyncClient, + transports.GatewayControlGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +@mock.patch.object( + GatewayControlClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(GatewayControlClient), +) +@mock.patch.object( + GatewayControlAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(GatewayControlAsyncClient), +) +def test_gateway_control_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(GatewayControlClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(GatewayControlClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com/" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com/", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (GatewayControlClient, transports.GatewayControlGrpcTransport, "grpc", "true"), + ( + GatewayControlAsyncClient, + transports.GatewayControlGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (GatewayControlClient, transports.GatewayControlGrpcTransport, "grpc", "false"), + ( + GatewayControlAsyncClient, + transports.GatewayControlGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ], +) +@mock.patch.object( + GatewayControlClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(GatewayControlClient), +) +@mock.patch.object( + GatewayControlAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(GatewayControlAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_gateway_control_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [GatewayControlClient, GatewayControlAsyncClient] +) +@mock.patch.object( + GatewayControlClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(GatewayControlClient), +) +@mock.patch.object( + GatewayControlAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(GatewayControlAsyncClient), +) +def test_gateway_control_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", [GatewayControlClient, GatewayControlAsyncClient] +) +@mock.patch.object( + GatewayControlClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(GatewayControlClient), +) +@mock.patch.object( + GatewayControlAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(GatewayControlAsyncClient), +) +def test_gateway_control_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = GatewayControlClient._DEFAULT_UNIVERSE + default_endpoint = GatewayControlClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = GatewayControlClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (GatewayControlClient, transports.GatewayControlGrpcTransport, "grpc"), + ( + GatewayControlAsyncClient, + transports.GatewayControlGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_gateway_control_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + GatewayControlClient, + transports.GatewayControlGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + GatewayControlAsyncClient, + transports.GatewayControlGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_gateway_control_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_gateway_control_client_client_options_from_dict(): + with mock.patch( + "google.cloud.gkeconnect.gateway_v1beta1.services.gateway_control.transports.GatewayControlGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = GatewayControlClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + GatewayControlClient, + transports.GatewayControlGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + GatewayControlAsyncClient, + transports.GatewayControlGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_gateway_control_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "connectgateway.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="connectgateway.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + control.GenerateCredentialsRequest, + dict, + ], +) +def test_generate_credentials(request_type, transport: str = "grpc"): + client = GatewayControlClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_credentials), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = control.GenerateCredentialsResponse( + kubeconfig=b"kubeconfig_blob", + endpoint="endpoint_value", + ) + response = client.generate_credentials(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = control.GenerateCredentialsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, control.GenerateCredentialsResponse) + assert response.kubeconfig == b"kubeconfig_blob" + assert response.endpoint == "endpoint_value" + + +def test_generate_credentials_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GatewayControlClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_credentials), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.generate_credentials() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == control.GenerateCredentialsRequest() + + +def test_generate_credentials_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = GatewayControlClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = control.GenerateCredentialsRequest( + name="name_value", + version="version_value", + kubernetes_namespace="kubernetes_namespace_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_credentials), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.generate_credentials(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == control.GenerateCredentialsRequest( + name="name_value", + version="version_value", + kubernetes_namespace="kubernetes_namespace_value", + ) + + +def test_generate_credentials_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GatewayControlClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.generate_credentials in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.generate_credentials + ] = mock_rpc + request = {} + client.generate_credentials(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.generate_credentials(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_generate_credentials_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GatewayControlAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_credentials), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + control.GenerateCredentialsResponse( + kubeconfig=b"kubeconfig_blob", + endpoint="endpoint_value", + ) + ) + response = await client.generate_credentials() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == control.GenerateCredentialsRequest() + + +@pytest.mark.asyncio +async def test_generate_credentials_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = GatewayControlAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.generate_credentials + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.generate_credentials + ] = mock_rpc + + request = {} + await client.generate_credentials(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.generate_credentials(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_generate_credentials_async( + transport: str = "grpc_asyncio", request_type=control.GenerateCredentialsRequest +): + client = GatewayControlAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_credentials), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + control.GenerateCredentialsResponse( + kubeconfig=b"kubeconfig_blob", + endpoint="endpoint_value", + ) + ) + response = await client.generate_credentials(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = control.GenerateCredentialsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, control.GenerateCredentialsResponse) + assert response.kubeconfig == b"kubeconfig_blob" + assert response.endpoint == "endpoint_value" + + +@pytest.mark.asyncio +async def test_generate_credentials_async_from_dict(): + await test_generate_credentials_async(request_type=dict) + + +def test_generate_credentials_field_headers(): + client = GatewayControlClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = control.GenerateCredentialsRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_credentials), "__call__" + ) as call: + call.return_value = control.GenerateCredentialsResponse() + client.generate_credentials(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_generate_credentials_field_headers_async(): + client = GatewayControlAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = control.GenerateCredentialsRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_credentials), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + control.GenerateCredentialsResponse() + ) + await client.generate_credentials(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.GatewayControlGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = GatewayControlClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.GatewayControlGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = GatewayControlClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.GatewayControlGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = GatewayControlClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = GatewayControlClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.GatewayControlGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = GatewayControlClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.GatewayControlGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = GatewayControlClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.GatewayControlGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.GatewayControlGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.GatewayControlGrpcTransport, + transports.GatewayControlGrpcAsyncIOTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + ], +) +def test_transport_kind(transport_name): + transport = GatewayControlClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = GatewayControlClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.GatewayControlGrpcTransport, + ) + + +def test_gateway_control_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.GatewayControlTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_gateway_control_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.gkeconnect.gateway_v1beta1.services.gateway_control.transports.GatewayControlTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.GatewayControlTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ("generate_credentials",) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_gateway_control_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.gkeconnect.gateway_v1beta1.services.gateway_control.transports.GatewayControlTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.GatewayControlTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_gateway_control_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.gkeconnect.gateway_v1beta1.services.gateway_control.transports.GatewayControlTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.GatewayControlTransport() + adc.assert_called_once() + + +def test_gateway_control_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + GatewayControlClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.GatewayControlGrpcTransport, + transports.GatewayControlGrpcAsyncIOTransport, + ], +) +def test_gateway_control_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.GatewayControlGrpcTransport, + transports.GatewayControlGrpcAsyncIOTransport, + ], +) +def test_gateway_control_transport_auth_gdch_credentials(transport_class): + host = "https://language.com/" + api_audience_tests = [None, "https://language2.com/"] + api_audience_expect = [host, "https://language2.com/"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.GatewayControlGrpcTransport, grpc_helpers), + (transports.GatewayControlGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_gateway_control_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "connectgateway.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="connectgateway.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.GatewayControlGrpcTransport, + transports.GatewayControlGrpcAsyncIOTransport, + ], +) +def test_gateway_control_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_gateway_control_host_no_port(transport_name): + client = GatewayControlClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="connectgateway.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ("connectgateway.googleapis.com:443") + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_gateway_control_host_with_port(transport_name): + client = GatewayControlClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="connectgateway.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ("connectgateway.googleapis.com:8000") + + +def test_gateway_control_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.GatewayControlGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_gateway_control_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.GatewayControlGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.GatewayControlGrpcTransport, + transports.GatewayControlGrpcAsyncIOTransport, + ], +) +def test_gateway_control_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.GatewayControlGrpcTransport, + transports.GatewayControlGrpcAsyncIOTransport, + ], +) +def test_gateway_control_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = GatewayControlClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = GatewayControlClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = GatewayControlClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = GatewayControlClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = GatewayControlClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = GatewayControlClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = GatewayControlClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = GatewayControlClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = GatewayControlClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format( + project=project, + ) + actual = GatewayControlClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = GatewayControlClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = GatewayControlClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = GatewayControlClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = GatewayControlClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = GatewayControlClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.GatewayControlTransport, "_prep_wrapped_messages" + ) as prep: + client = GatewayControlClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.GatewayControlTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = GatewayControlClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = GatewayControlAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = GatewayControlClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "grpc", + ] + for transport in transports: + client = GatewayControlClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (GatewayControlClient, transports.GatewayControlGrpcTransport), + (GatewayControlAsyncClient, transports.GatewayControlGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) From 48eb58513cbb3faf45fe64c8f9c25ccb706e6e48 Mon Sep 17 00:00:00 2001 From: yoshi-code-bot <70984784+yoshi-code-bot@users.noreply.github.com> Date: Thu, 22 Aug 2024 11:28:49 -0700 Subject: [PATCH 031/108] chore: Update release-please config files (#13030) Update release-please config files --- release-please-config.json | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/release-please-config.json b/release-please-config.json index fcf90e2a8f7e..ea9a89e5e9fc 100644 --- a/release-please-config.json +++ b/release-please-config.json @@ -1542,7 +1542,13 @@ "component": "google-cloud-gke-connect-gateway", "extra-files": [ "google/cloud/gkeconnect/gateway/gapic_version.py", + "google/cloud/gkeconnect/gateway_v1/gapic_version.py", "google/cloud/gkeconnect/gateway_v1beta1/gapic_version.py", + { + "jsonpath": "$.clientLibrary.version", + "path": "samples/generated_samples/snippet_metadata_google.cloud.gkeconnect.gateway.v1.json", + "type": "json" + }, { "jsonpath": "$.clientLibrary.version", "path": "samples/generated_samples/snippet_metadata_google.cloud.gkeconnect.gateway.v1beta1.json", From 812331cb25611c1cf068af903a41735b608d2e13 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 22 Aug 2024 18:32:13 +0000 Subject: [PATCH 032/108] chore: release main (#13029) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit :robot: I have created a release *beep* *boop* ---
google-cloud-gke-connect-gateway: 0.9.0 ## [0.9.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-gke-connect-gateway-v0.8.11...google-cloud-gke-connect-gateway-v0.9.0) (2024-08-22) ### ⚠ BREAKING CHANGES * [google-cloud-gke-connect-gateway] removed the nonfunctional GatewayService and replaced it with the GatewayControl service * existing client libraries are being regenerated to remove unused functionality and introduce new features. ### Features * [google-cloud-gke-connect-gateway] removed the nonfunctional GatewayService and replaced it with the GatewayControl service ([6639798](https://github.com/googleapis/google-cloud-python/commit/6639798f019e86e72ce6cd5a2c837320439cb2b6)) ### Bug Fixes * Set google.cloud.gkeconnect.gateway_v1 as the default import ([6639798](https://github.com/googleapis/google-cloud-python/commit/6639798f019e86e72ce6cd5a2c837320439cb2b6))
google-cloud-recaptcha-enterprise: 1.22.0 ## [1.22.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-recaptcha-enterprise-v1.21.2...google-cloud-recaptcha-enterprise-v1.22.0) (2024-08-22) ### Features * add `express_settings` to `Key` ([0b90478](https://github.com/googleapis/google-cloud-python/commit/0b90478bb70be96b304397fc433d2dbaf0160d30)) * add AddIpOverride RPC ([0b90478](https://github.com/googleapis/google-cloud-python/commit/0b90478bb70be96b304397fc433d2dbaf0160d30)) ### Documentation * clarify `Event.express` field ([0b90478](https://github.com/googleapis/google-cloud-python/commit/0b90478bb70be96b304397fc433d2dbaf0160d30)) * fix billing, quota, and usecase links ([0b90478](https://github.com/googleapis/google-cloud-python/commit/0b90478bb70be96b304397fc433d2dbaf0160d30))
google-shopping-css: 0.1.8 ## [0.1.8](https://github.com/googleapis/google-cloud-python/compare/google-shopping-css-v0.1.7...google-shopping-css-v0.1.8) (2024-08-22) ### Documentation * [google-shopping-css] update `Certification` field descriptions ([#13027](https://github.com/googleapis/google-cloud-python/issues/13027)) ([70e2dd5](https://github.com/googleapis/google-cloud-python/commit/70e2dd5f024dd5c94a5e02b442bbab7e6e5f38fe))
--- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> Co-authored-by: Anthonios Partheniou --- .release-please-manifest.json | 6 +++--- .../CHANGELOG.md | 17 +++++++++++++++++ .../cloud/gkeconnect/gateway/gapic_version.py | 2 +- .../gkeconnect/gateway_v1beta1/gapic_version.py | 2 +- ...google.cloud.gkeconnect.gateway.v1beta1.json | 2 +- .../CHANGELOG.md | 14 ++++++++++++++ .../cloud/recaptchaenterprise/gapic_version.py | 2 +- .../recaptchaenterprise_v1/gapic_version.py | 2 +- ...ata_google.cloud.recaptchaenterprise.v1.json | 2 +- packages/google-shopping-css/CHANGELOG.md | 7 +++++++ .../google/shopping/css/gapic_version.py | 2 +- .../google/shopping/css_v1/gapic_version.py | 2 +- ...snippet_metadata_google.shopping.css.v1.json | 2 +- 13 files changed, 50 insertions(+), 12 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 18795917ffb4..85b9607f3fcb 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -89,7 +89,7 @@ "packages/google-cloud-functions": "1.17.0", "packages/google-cloud-gdchardwaremanagement": "0.1.3", "packages/google-cloud-gke-backup": "0.5.11", - "packages/google-cloud-gke-connect-gateway": "0.8.11", + "packages/google-cloud-gke-connect-gateway": "0.9.0", "packages/google-cloud-gke-hub": "1.14.2", "packages/google-cloud-gke-multicloud": "0.6.12", "packages/google-cloud-gsuiteaddons": "0.3.10", @@ -129,7 +129,7 @@ "packages/google-cloud-privilegedaccessmanager": "0.1.1", "packages/google-cloud-public-ca": "0.3.12", "packages/google-cloud-rapidmigrationassessment": "0.1.9", - "packages/google-cloud-recaptcha-enterprise": "1.21.2", + "packages/google-cloud-recaptcha-enterprise": "1.22.0", "packages/google-cloud-recommendations-ai": "0.10.12", "packages/google-cloud-recommender": "2.15.5", "packages/google-cloud-redis": "2.15.5", @@ -184,7 +184,7 @@ "packages/google-maps-routeoptimization": "0.1.2", "packages/google-maps-routing": "0.6.10", "packages/google-maps-solar": "0.1.2", - "packages/google-shopping-css": "0.1.7", + "packages/google-shopping-css": "0.1.8", "packages/google-shopping-merchant-accounts": "0.1.3", "packages/google-shopping-merchant-conversions": "0.1.3", "packages/google-shopping-merchant-datasources": "0.1.2", diff --git a/packages/google-cloud-gke-connect-gateway/CHANGELOG.md b/packages/google-cloud-gke-connect-gateway/CHANGELOG.md index 3912730be4d5..3d4aa240a4d4 100644 --- a/packages/google-cloud-gke-connect-gateway/CHANGELOG.md +++ b/packages/google-cloud-gke-connect-gateway/CHANGELOG.md @@ -1,5 +1,22 @@ # Changelog +## [0.9.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-gke-connect-gateway-v0.8.11...google-cloud-gke-connect-gateway-v0.9.0) (2024-08-22) + + +### ⚠ BREAKING CHANGES + +* [google-cloud-gke-connect-gateway] removed the nonfunctional GatewayService and replaced it with the GatewayControl service +* existing client libraries are being regenerated to remove unused functionality and introduce new features. + +### Features + +* [google-cloud-gke-connect-gateway] removed the nonfunctional GatewayService and replaced it with the GatewayControl service ([6639798](https://github.com/googleapis/google-cloud-python/commit/6639798f019e86e72ce6cd5a2c837320439cb2b6)) + + +### Bug Fixes + +* Set google.cloud.gkeconnect.gateway_v1 as the default import ([6639798](https://github.com/googleapis/google-cloud-python/commit/6639798f019e86e72ce6cd5a2c837320439cb2b6)) + ## [0.8.11](https://github.com/googleapis/google-cloud-python/compare/google-cloud-gke-connect-gateway-v0.8.10...google-cloud-gke-connect-gateway-v0.8.11) (2024-07-30) diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway/gapic_version.py b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway/gapic_version.py index 558c8aab67c5..a4e5ba3ce496 100644 --- a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway/gapic_version.py +++ b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.9.0" # {x-release-please-version} diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/gapic_version.py b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/gapic_version.py index 558c8aab67c5..a4e5ba3ce496 100644 --- a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/gapic_version.py +++ b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.9.0" # {x-release-please-version} diff --git a/packages/google-cloud-gke-connect-gateway/samples/generated_samples/snippet_metadata_google.cloud.gkeconnect.gateway.v1beta1.json b/packages/google-cloud-gke-connect-gateway/samples/generated_samples/snippet_metadata_google.cloud.gkeconnect.gateway.v1beta1.json index f57662ce8498..7ab5b8ced60e 100644 --- a/packages/google-cloud-gke-connect-gateway/samples/generated_samples/snippet_metadata_google.cloud.gkeconnect.gateway.v1beta1.json +++ b/packages/google-cloud-gke-connect-gateway/samples/generated_samples/snippet_metadata_google.cloud.gkeconnect.gateway.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-gke-connect-gateway", - "version": "0.1.0" + "version": "0.9.0" }, "snippets": [ { diff --git a/packages/google-cloud-recaptcha-enterprise/CHANGELOG.md b/packages/google-cloud-recaptcha-enterprise/CHANGELOG.md index cef8c7414214..0fb0e97fd517 100644 --- a/packages/google-cloud-recaptcha-enterprise/CHANGELOG.md +++ b/packages/google-cloud-recaptcha-enterprise/CHANGELOG.md @@ -1,5 +1,19 @@ # Changelog +## [1.22.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-recaptcha-enterprise-v1.21.2...google-cloud-recaptcha-enterprise-v1.22.0) (2024-08-22) + + +### Features + +* add `express_settings` to `Key` ([0b90478](https://github.com/googleapis/google-cloud-python/commit/0b90478bb70be96b304397fc433d2dbaf0160d30)) +* add AddIpOverride RPC ([0b90478](https://github.com/googleapis/google-cloud-python/commit/0b90478bb70be96b304397fc433d2dbaf0160d30)) + + +### Documentation + +* clarify `Event.express` field ([0b90478](https://github.com/googleapis/google-cloud-python/commit/0b90478bb70be96b304397fc433d2dbaf0160d30)) +* fix billing, quota, and usecase links ([0b90478](https://github.com/googleapis/google-cloud-python/commit/0b90478bb70be96b304397fc433d2dbaf0160d30)) + ## [1.21.2](https://github.com/googleapis/google-cloud-python/compare/google-cloud-recaptcha-enterprise-v1.21.1...google-cloud-recaptcha-enterprise-v1.21.2) (2024-07-30) diff --git a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise/gapic_version.py b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise/gapic_version.py index 558c8aab67c5..8b80cf328714 100644 --- a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise/gapic_version.py +++ b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "1.22.0" # {x-release-please-version} diff --git a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/gapic_version.py b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/gapic_version.py index 558c8aab67c5..8b80cf328714 100644 --- a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/gapic_version.py +++ b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "1.22.0" # {x-release-please-version} diff --git a/packages/google-cloud-recaptcha-enterprise/samples/generated_samples/snippet_metadata_google.cloud.recaptchaenterprise.v1.json b/packages/google-cloud-recaptcha-enterprise/samples/generated_samples/snippet_metadata_google.cloud.recaptchaenterprise.v1.json index b2ad3f987f98..cc98231dae95 100644 --- a/packages/google-cloud-recaptcha-enterprise/samples/generated_samples/snippet_metadata_google.cloud.recaptchaenterprise.v1.json +++ b/packages/google-cloud-recaptcha-enterprise/samples/generated_samples/snippet_metadata_google.cloud.recaptchaenterprise.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-recaptcha-enterprise", - "version": "0.1.0" + "version": "1.22.0" }, "snippets": [ { diff --git a/packages/google-shopping-css/CHANGELOG.md b/packages/google-shopping-css/CHANGELOG.md index e18689dcec95..9150653216a2 100644 --- a/packages/google-shopping-css/CHANGELOG.md +++ b/packages/google-shopping-css/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.1.8](https://github.com/googleapis/google-cloud-python/compare/google-shopping-css-v0.1.7...google-shopping-css-v0.1.8) (2024-08-22) + + +### Documentation + +* [google-shopping-css] update `Certification` field descriptions ([#13027](https://github.com/googleapis/google-cloud-python/issues/13027)) ([70e2dd5](https://github.com/googleapis/google-cloud-python/commit/70e2dd5f024dd5c94a5e02b442bbab7e6e5f38fe)) + ## [0.1.7](https://github.com/googleapis/google-cloud-python/compare/google-shopping-css-v0.1.6...google-shopping-css-v0.1.7) (2024-07-30) diff --git a/packages/google-shopping-css/google/shopping/css/gapic_version.py b/packages/google-shopping-css/google/shopping/css/gapic_version.py index 558c8aab67c5..ec8d212c9160 100644 --- a/packages/google-shopping-css/google/shopping/css/gapic_version.py +++ b/packages/google-shopping-css/google/shopping/css/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.8" # {x-release-please-version} diff --git a/packages/google-shopping-css/google/shopping/css_v1/gapic_version.py b/packages/google-shopping-css/google/shopping/css_v1/gapic_version.py index 558c8aab67c5..ec8d212c9160 100644 --- a/packages/google-shopping-css/google/shopping/css_v1/gapic_version.py +++ b/packages/google-shopping-css/google/shopping/css_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.8" # {x-release-please-version} diff --git a/packages/google-shopping-css/samples/generated_samples/snippet_metadata_google.shopping.css.v1.json b/packages/google-shopping-css/samples/generated_samples/snippet_metadata_google.shopping.css.v1.json index 5b682921bee5..301d93f4bbc4 100644 --- a/packages/google-shopping-css/samples/generated_samples/snippet_metadata_google.shopping.css.v1.json +++ b/packages/google-shopping-css/samples/generated_samples/snippet_metadata_google.shopping.css.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-shopping-css", - "version": "0.1.0" + "version": "0.1.8" }, "snippets": [ { From 46ef3250151c5939100ab4a1dcda6ccf68ea9e4c Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 26 Aug 2024 10:00:11 -0400 Subject: [PATCH 033/108] docs: [google-cloud-texttospeech] update Long Audio capabilities to include SSML (#13031) - [ ] Regenerate this pull request now. PiperOrigin-RevId: 666797967 Source-Link: https://github.com/googleapis/googleapis/commit/818d9fd16dfa254048240ea045b94b6757b20091 Source-Link: https://github.com/googleapis/googleapis-gen/commit/983f7c82cf98b8a6a39bbb254717b0b5c484e15b Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXRleHR0b3NwZWVjaC8uT3dsQm90LnlhbWwiLCJoIjoiOTgzZjdjODJjZjk4YjhhNmEzOWJiYjI1NDcxN2IwYjVjNDg0ZTE1YiJ9 --------- Co-authored-by: Owl Bot --- .../google/cloud/texttospeech/gapic_version.py | 2 +- .../google/cloud/texttospeech_v1/gapic_version.py | 2 +- .../google/cloud/texttospeech_v1beta1/gapic_version.py | 2 +- .../google/cloud/texttospeech_v1beta1/types/cloud_tts_lrs.py | 3 +-- .../snippet_metadata_google.cloud.texttospeech.v1.json | 2 +- .../snippet_metadata_google.cloud.texttospeech.v1beta1.json | 2 +- 6 files changed, 6 insertions(+), 7 deletions(-) diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech/gapic_version.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech/gapic_version.py index 6fecc94eb049..558c8aab67c5 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech/gapic_version.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.17.1" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/gapic_version.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/gapic_version.py index 6fecc94eb049..558c8aab67c5 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/gapic_version.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.17.1" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/gapic_version.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/gapic_version.py index 6fecc94eb049..558c8aab67c5 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/gapic_version.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.17.1" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/types/cloud_tts_lrs.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/types/cloud_tts_lrs.py index 7617ef4b5f77..67b70c095524 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/types/cloud_tts_lrs.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/types/cloud_tts_lrs.py @@ -42,8 +42,7 @@ class SynthesizeLongAudioRequest(proto.Message): ``projects/*/locations/*``. input (google.cloud.texttospeech_v1beta1.types.SynthesisInput): Required. The Synthesizer requires either - plain text or SSML as input. While Long Audio is - in preview, SSML is temporarily unsupported. + plain text or SSML as input. audio_config (google.cloud.texttospeech_v1beta1.types.AudioConfig): Required. The configuration of the synthesized audio. diff --git a/packages/google-cloud-texttospeech/samples/generated_samples/snippet_metadata_google.cloud.texttospeech.v1.json b/packages/google-cloud-texttospeech/samples/generated_samples/snippet_metadata_google.cloud.texttospeech.v1.json index ac836bbdf339..f87785fcdd45 100644 --- a/packages/google-cloud-texttospeech/samples/generated_samples/snippet_metadata_google.cloud.texttospeech.v1.json +++ b/packages/google-cloud-texttospeech/samples/generated_samples/snippet_metadata_google.cloud.texttospeech.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-texttospeech", - "version": "2.17.1" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-texttospeech/samples/generated_samples/snippet_metadata_google.cloud.texttospeech.v1beta1.json b/packages/google-cloud-texttospeech/samples/generated_samples/snippet_metadata_google.cloud.texttospeech.v1beta1.json index c0a9e929493b..2877853b66c5 100644 --- a/packages/google-cloud-texttospeech/samples/generated_samples/snippet_metadata_google.cloud.texttospeech.v1beta1.json +++ b/packages/google-cloud-texttospeech/samples/generated_samples/snippet_metadata_google.cloud.texttospeech.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-texttospeech", - "version": "2.17.1" + "version": "0.1.0" }, "snippets": [ { From ee7248f6baa3d09482ab2b8afd2d12db50861d6f Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 26 Aug 2024 14:08:23 +0000 Subject: [PATCH 034/108] chore: release main (#13033) :robot: I have created a release *beep* *boop* ---
google-cloud-texttospeech: 2.17.2 ## [2.17.2](https://github.com/googleapis/google-cloud-python/compare/google-cloud-texttospeech-v2.17.1...google-cloud-texttospeech-v2.17.2) (2024-08-26) ### Documentation * [google-cloud-texttospeech] update Long Audio capabilities to include SSML ([#13031](https://github.com/googleapis/google-cloud-python/issues/13031)) ([46ef325](https://github.com/googleapis/google-cloud-python/commit/46ef3250151c5939100ab4a1dcda6ccf68ea9e4c))
--- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .release-please-manifest.json | 2 +- packages/google-cloud-texttospeech/CHANGELOG.md | 7 +++++++ .../google/cloud/texttospeech/gapic_version.py | 2 +- .../google/cloud/texttospeech_v1/gapic_version.py | 2 +- .../google/cloud/texttospeech_v1beta1/gapic_version.py | 2 +- .../snippet_metadata_google.cloud.texttospeech.v1.json | 2 +- ...snippet_metadata_google.cloud.texttospeech.v1beta1.json | 2 +- 7 files changed, 13 insertions(+), 6 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 85b9607f3fcb..0d0833f003e9 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -158,7 +158,7 @@ "packages/google-cloud-talent": "2.13.5", "packages/google-cloud-tasks": "2.16.5", "packages/google-cloud-telcoautomation": "0.2.5", - "packages/google-cloud-texttospeech": "2.17.1", + "packages/google-cloud-texttospeech": "2.17.2", "packages/google-cloud-tpu": "1.18.5", "packages/google-cloud-trace": "1.13.5", "packages/google-cloud-translate": "3.16.0", diff --git a/packages/google-cloud-texttospeech/CHANGELOG.md b/packages/google-cloud-texttospeech/CHANGELOG.md index 14211a0b99aa..0ebabf8433ec 100644 --- a/packages/google-cloud-texttospeech/CHANGELOG.md +++ b/packages/google-cloud-texttospeech/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-texttospeech/#history +## [2.17.2](https://github.com/googleapis/google-cloud-python/compare/google-cloud-texttospeech-v2.17.1...google-cloud-texttospeech-v2.17.2) (2024-08-26) + + +### Documentation + +* [google-cloud-texttospeech] update Long Audio capabilities to include SSML ([#13031](https://github.com/googleapis/google-cloud-python/issues/13031)) ([46ef325](https://github.com/googleapis/google-cloud-python/commit/46ef3250151c5939100ab4a1dcda6ccf68ea9e4c)) + ## [2.17.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-texttospeech-v2.17.0...google-cloud-texttospeech-v2.17.1) (2024-08-20) diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech/gapic_version.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech/gapic_version.py index 558c8aab67c5..b2378ad73fca 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech/gapic_version.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "2.17.2" # {x-release-please-version} diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/gapic_version.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/gapic_version.py index 558c8aab67c5..b2378ad73fca 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/gapic_version.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "2.17.2" # {x-release-please-version} diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/gapic_version.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/gapic_version.py index 558c8aab67c5..b2378ad73fca 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/gapic_version.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "2.17.2" # {x-release-please-version} diff --git a/packages/google-cloud-texttospeech/samples/generated_samples/snippet_metadata_google.cloud.texttospeech.v1.json b/packages/google-cloud-texttospeech/samples/generated_samples/snippet_metadata_google.cloud.texttospeech.v1.json index f87785fcdd45..bf52a9d09886 100644 --- a/packages/google-cloud-texttospeech/samples/generated_samples/snippet_metadata_google.cloud.texttospeech.v1.json +++ b/packages/google-cloud-texttospeech/samples/generated_samples/snippet_metadata_google.cloud.texttospeech.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-texttospeech", - "version": "0.1.0" + "version": "2.17.2" }, "snippets": [ { diff --git a/packages/google-cloud-texttospeech/samples/generated_samples/snippet_metadata_google.cloud.texttospeech.v1beta1.json b/packages/google-cloud-texttospeech/samples/generated_samples/snippet_metadata_google.cloud.texttospeech.v1beta1.json index 2877853b66c5..f3e0f230302f 100644 --- a/packages/google-cloud-texttospeech/samples/generated_samples/snippet_metadata_google.cloud.texttospeech.v1beta1.json +++ b/packages/google-cloud-texttospeech/samples/generated_samples/snippet_metadata_google.cloud.texttospeech.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-texttospeech", - "version": "0.1.0" + "version": "2.17.2" }, "snippets": [ { From 957778b3fd25529bbb3f85dac3d9922a50fdaad9 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Mon, 26 Aug 2024 16:11:03 +0200 Subject: [PATCH 035/108] chore(deps): update all dependencies (#13032) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![Mend Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [cachetools](https://togithub.com/tkem/cachetools) | `==5.4.0` -> `==5.5.0` | [![age](https://developer.mend.io/api/mc/badges/age/pypi/cachetools/5.5.0?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://developer.mend.io/api/mc/badges/adoption/pypi/cachetools/5.5.0?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://developer.mend.io/api/mc/badges/compatibility/pypi/cachetools/5.4.0/5.5.0?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://developer.mend.io/api/mc/badges/confidence/pypi/cachetools/5.4.0/5.5.0?slim=true)](https://docs.renovatebot.com/merge-confidence/) | | [cffi](http://cffi.readthedocs.org) ([source](https://togithub.com/python-cffi/cffi), [changelog](https://cffi.readthedocs.io/en/latest/whatsnew.html)) | `==1.16.0` -> `==1.17.0` | [![age](https://developer.mend.io/api/mc/badges/age/pypi/cffi/1.17.0?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://developer.mend.io/api/mc/badges/adoption/pypi/cffi/1.17.0?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://developer.mend.io/api/mc/badges/compatibility/pypi/cffi/1.16.0/1.17.0?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://developer.mend.io/api/mc/badges/confidence/pypi/cffi/1.16.0/1.17.0?slim=true)](https://docs.renovatebot.com/merge-confidence/) | | [google-auth](https://togithub.com/googleapis/google-auth-library-python) | `==2.32.0` -> `==2.34.0` | [![age](https://developer.mend.io/api/mc/badges/age/pypi/google-auth/2.34.0?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://developer.mend.io/api/mc/badges/adoption/pypi/google-auth/2.34.0?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://developer.mend.io/api/mc/badges/compatibility/pypi/google-auth/2.32.0/2.34.0?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://developer.mend.io/api/mc/badges/confidence/pypi/google-auth/2.32.0/2.34.0?slim=true)](https://docs.renovatebot.com/merge-confidence/) | | [idna](https://togithub.com/kjd/idna) ([changelog](https://togithub.com/kjd/idna/blob/master/HISTORY.rst)) | `==3.7` -> `==3.8` | [![age](https://developer.mend.io/api/mc/badges/age/pypi/idna/3.8?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://developer.mend.io/api/mc/badges/adoption/pypi/idna/3.8?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://developer.mend.io/api/mc/badges/compatibility/pypi/idna/3.7/3.8?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://developer.mend.io/api/mc/badges/confidence/pypi/idna/3.7/3.8?slim=true)](https://docs.renovatebot.com/merge-confidence/) | | [importlib-metadata](https://togithub.com/python/importlib_metadata) | `==8.2.0` -> `==8.4.0` | [![age](https://developer.mend.io/api/mc/badges/age/pypi/importlib-metadata/8.4.0?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://developer.mend.io/api/mc/badges/adoption/pypi/importlib-metadata/8.4.0?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://developer.mend.io/api/mc/badges/compatibility/pypi/importlib-metadata/8.2.0/8.4.0?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://developer.mend.io/api/mc/badges/confidence/pypi/importlib-metadata/8.2.0/8.4.0?slim=true)](https://docs.renovatebot.com/merge-confidence/) | | [jaraco-context](https://togithub.com/jaraco/jaraco.context) | `==5.3.0` -> `==6.0.1` | [![age](https://developer.mend.io/api/mc/badges/age/pypi/jaraco-context/6.0.1?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://developer.mend.io/api/mc/badges/adoption/pypi/jaraco-context/6.0.1?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://developer.mend.io/api/mc/badges/compatibility/pypi/jaraco-context/5.3.0/6.0.1?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://developer.mend.io/api/mc/badges/confidence/pypi/jaraco-context/5.3.0/6.0.1?slim=true)](https://docs.renovatebot.com/merge-confidence/) | | [keyring](https://togithub.com/jaraco/keyring) | `==25.2.1` -> `==25.3.0` | [![age](https://developer.mend.io/api/mc/badges/age/pypi/keyring/25.3.0?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://developer.mend.io/api/mc/badges/adoption/pypi/keyring/25.3.0?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://developer.mend.io/api/mc/badges/compatibility/pypi/keyring/25.2.1/25.3.0?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://developer.mend.io/api/mc/badges/confidence/pypi/keyring/25.2.1/25.3.0?slim=true)](https://docs.renovatebot.com/merge-confidence/) | | [more-itertools](https://togithub.com/more-itertools/more-itertools) | `==10.3.0` -> `==10.4.0` | [![age](https://developer.mend.io/api/mc/badges/age/pypi/more-itertools/10.4.0?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://developer.mend.io/api/mc/badges/adoption/pypi/more-itertools/10.4.0?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://developer.mend.io/api/mc/badges/compatibility/pypi/more-itertools/10.3.0/10.4.0?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://developer.mend.io/api/mc/badges/confidence/pypi/more-itertools/10.3.0/10.4.0?slim=true)](https://docs.renovatebot.com/merge-confidence/) | | [zipp](https://togithub.com/jaraco/zipp) | `==3.19.2` -> `==3.20.0` | [![age](https://developer.mend.io/api/mc/badges/age/pypi/zipp/3.20.0?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://developer.mend.io/api/mc/badges/adoption/pypi/zipp/3.20.0?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://developer.mend.io/api/mc/badges/compatibility/pypi/zipp/3.19.2/3.20.0?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://developer.mend.io/api/mc/badges/confidence/pypi/zipp/3.19.2/3.20.0?slim=true)](https://docs.renovatebot.com/merge-confidence/) | --- ### Release Notes
tkem/cachetools (cachetools) ### [`v5.5.0`](https://togithub.com/tkem/cachetools/blob/HEAD/CHANGELOG.rst#v550-2024-08-18) [Compare Source](https://togithub.com/tkem/cachetools/compare/v5.4.0...v5.5.0) \=================== - `TTLCache.expire()` returns iterable of expired `(key, value)` pairs. - `TLRUCache.expire()` returns iterable of expired `(key, value)` pairs. - Documentation improvements. - Update CI environment.
python-cffi/cffi (cffi) ### [`v1.17.0`](https://togithub.com/python-cffi/cffi/releases/tag/v1.17.0) [Compare Source](https://togithub.com/python-cffi/cffi/compare/v1.16.0...v1.17.0) - Add support for Python 3.13. - Free-threaded CPython builds (i.e. `python3.13t` and the `3.13t` ABI) are not currently supported. - In API mode, when you get a function from a C library by writing `fn = lib.myfunc`, you get an object of a special type for performance reasons, instead of a ``. Before version 1.17 you could only call such objects. You could write `ffi.addressof(lib, "myfunc")` in order to get a real `` object, based on the idea that in these cases in C you'd usually write `&myfunc` instead of `myfunc`. In version 1.17, the special object `lib.myfunc` can now be passed in many places where CFFI expects a regular `` object. For example, you can now pass it as a callback to a C function call, or write it inside a C structure field of the correct pointer-to-function type, or use `ffi.cast()` or `ffi.typeof()` on it. **Full Changelog**: https://github.com/python-cffi/cffi/compare/v1.16.0...v1.17.0
googleapis/google-auth-library-python (google-auth) ### [`v2.34.0`](https://togithub.com/googleapis/google-auth-library-python/blob/HEAD/CHANGELOG.md#2340-2024-08-13) [Compare Source](https://togithub.com/googleapis/google-auth-library-python/compare/v2.33.0...v2.34.0) ##### Features - **auth:** Update get_client_ssl_credentials to support X.509 workload certs ([#​1558](https://togithub.com/googleapis/google-auth-library-python/issues/1558)) ([18c2ec1](https://togithub.com/googleapis/google-auth-library-python/commit/18c2ec1b571d506c0dbcffc483aa5e7b95e1b246)) ##### Bug Fixes - Retry token request on retryable status code ([#​1563](https://togithub.com/googleapis/google-auth-library-python/issues/1563)) ([f858a15](https://togithub.com/googleapis/google-auth-library-python/commit/f858a151cb7e29d34578e03c9e3fd4110c6bc258)) ### [`v2.33.0`](https://togithub.com/googleapis/google-auth-library-python/blob/HEAD/CHANGELOG.md#2330-2024-08-06) [Compare Source](https://togithub.com/googleapis/google-auth-library-python/compare/v2.32.0...v2.33.0) ##### Features - Implement async `StaticCredentials` using access tokens ([#​1559](https://togithub.com/googleapis/google-auth-library-python/issues/1559)) ([dc17dfc](https://togithub.com/googleapis/google-auth-library-python/commit/dc17dfc3fb65c87f2912300f0d11f79781240e78)) - Implement base classes for credentials and request sessions ([#​1551](https://togithub.com/googleapis/google-auth-library-python/issues/1551)) ([036dac4](https://togithub.com/googleapis/google-auth-library-python/commit/036dac43018b8cc26b5608e1bb21d6e3ee62a282)) ##### Bug Fixes - **metadata:** Enhance retry logic for metadata server access in \_metadata.py ([#​1545](https://togithub.com/googleapis/google-auth-library-python/issues/1545)) ([61c2432](https://togithub.com/googleapis/google-auth-library-python/commit/61c24321e52f6e017eecee211e11260d621c909b)) ##### Documentation - Update argument for Credentials initialization ([#​1557](https://togithub.com/googleapis/google-auth-library-python/issues/1557)) ([40b9ed9](https://togithub.com/googleapis/google-auth-library-python/commit/40b9ed91a6b01948561cfc71edaaabdd7f362f17))
kjd/idna (idna) ### [`v3.8`](https://togithub.com/kjd/idna/releases/tag/v3.8) [Compare Source](https://togithub.com/kjd/idna/compare/v3.7...v3.8) #### What's Changed - Fix regression where IDNAError exception was not being produced for certain inputs. - Add support for Python 3.13, drop support for Python 3.5 as it is no longer testable. - Documentation improvements - Updates to package testing using Github actions Thanks to Hugo van Kemenade for contributions to this release. **Full Changelog**: https://github.com/kjd/idna/compare/v3.7...v3.8
python/importlib_metadata (importlib-metadata) ### [`v8.4.0`](https://togithub.com/python/importlib_metadata/compare/v8.3.0...v8.4.0) [Compare Source](https://togithub.com/python/importlib_metadata/compare/v8.3.0...v8.4.0) ### [`v8.3.0`](https://togithub.com/python/importlib_metadata/compare/v8.2.0...v8.3.0) [Compare Source](https://togithub.com/python/importlib_metadata/compare/v8.2.0...v8.3.0)
jaraco/jaraco.context (jaraco-context) ### [`v6.0.1`](https://togithub.com/jaraco/jaraco.context/compare/v6.0.0...v6.0.1) [Compare Source](https://togithub.com/jaraco/jaraco.context/compare/v6.0.0...v6.0.1) ### [`v6.0.0`](https://togithub.com/jaraco/jaraco.context/compare/v5.3.0...v6.0.0) [Compare Source](https://togithub.com/jaraco/jaraco.context/compare/v5.3.0...v6.0.0)
jaraco/keyring (keyring) ### [`v25.3.0`](https://togithub.com/jaraco/keyring/compare/v25.2.1...v25.3.0) [Compare Source](https://togithub.com/jaraco/keyring/compare/v25.2.1...v25.3.0)
more-itertools/more-itertools (more-itertools) ### [`v10.4.0`](https://togithub.com/more-itertools/more-itertools/releases/tag/v10.4.0): Version 10.4.0 [Compare Source](https://togithub.com/more-itertools/more-itertools/compare/v10.3.0...v10.4.0) ##### What's Changed - Issue 854: sample improvements by [@​bbayles](https://togithub.com/bbayles) in [https://github.com/more-itertools/more-itertools/pull/855](https://togithub.com/more-itertools/more-itertools/pull/855) - Issue 858: Use chain and starmap in run_length.decode by [@​bbayles](https://togithub.com/bbayles) in [https://github.com/more-itertools/more-itertools/pull/861](https://togithub.com/more-itertools/more-itertools/pull/861) - Issue 859: Update totient recipe by [@​bbayles](https://togithub.com/bbayles) in [https://github.com/more-itertools/more-itertools/pull/860](https://togithub.com/more-itertools/more-itertools/pull/860) - Distinct permutations of incomparable items by [@​JamesParrott](https://togithub.com/JamesParrott) in [https://github.com/more-itertools/more-itertools/pull/834](https://togithub.com/more-itertools/more-itertools/pull/834) - Clarify seekable.relative_seek behavior by [@​bbayles](https://togithub.com/bbayles) in [https://github.com/more-itertools/more-itertools/pull/863](https://togithub.com/more-itertools/more-itertools/pull/863) - Issue 864: Improve \_sample_unweighted by [@​bbayles](https://togithub.com/bbayles) in [https://github.com/more-itertools/more-itertools/pull/865](https://togithub.com/more-itertools/more-itertools/pull/865) - Use log1p for \_sample_unweighted by [@​bbayles](https://togithub.com/bbayles) in [https://github.com/more-itertools/more-itertools/pull/868](https://togithub.com/more-itertools/more-itertools/pull/868) - Issue 862: change relative_seek() behaviour by [@​dkrikun](https://togithub.com/dkrikun) in [https://github.com/more-itertools/more-itertools/pull/866](https://togithub.com/more-itertools/more-itertools/pull/866) - Issue 876: is_sorted clarifications by [@​bbayles](https://togithub.com/bbayles) in [https://github.com/more-itertools/more-itertools/pull/877](https://togithub.com/more-itertools/more-itertools/pull/877) - Issue 870: counts parameter for sample by [@​bbayles](https://togithub.com/bbayles) in [https://github.com/more-itertools/more-itertools/pull/875](https://togithub.com/more-itertools/more-itertools/pull/875) - Issue 869: Add a steps argument to circular_shifts by [@​bbayles](https://togithub.com/bbayles) in [https://github.com/more-itertools/more-itertools/pull/874](https://togithub.com/more-itertools/more-itertools/pull/874) - Issue 871: Add a fast path for sliding_window by [@​bbayles](https://togithub.com/bbayles) in [https://github.com/more-itertools/more-itertools/pull/873](https://togithub.com/more-itertools/more-itertools/pull/873) - type annotation of `windowed_complete` corrected by [@​m472](https://togithub.com/m472) in [https://github.com/more-itertools/more-itertools/pull/881](https://togithub.com/more-itertools/more-itertools/pull/881) - \[Docs] Fix strictly_n missing the n parameter by [@​fakuivan](https://togithub.com/fakuivan) in [https://github.com/more-itertools/more-itertools/pull/886](https://togithub.com/more-itertools/more-itertools/pull/886) - Standardize type hints for isinstance's second argument by [@​jbosboom](https://togithub.com/jbosboom) in [https://github.com/more-itertools/more-itertools/pull/887](https://togithub.com/more-itertools/more-itertools/pull/887) - Issue 883: change type hint by [@​akisatoon1](https://togithub.com/akisatoon1) in [https://github.com/more-itertools/more-itertools/pull/884](https://togithub.com/more-itertools/more-itertools/pull/884) - Add type overloads for `zip_broadcast` by [@​Pandede](https://togithub.com/Pandede) in [https://github.com/more-itertools/more-itertools/pull/888](https://togithub.com/more-itertools/more-itertools/pull/888) - Issue 889: Optimize triplewise by [@​bbayles](https://togithub.com/bbayles) in [https://github.com/more-itertools/more-itertools/pull/891](https://togithub.com/more-itertools/more-itertools/pull/891) - Add option `strict` to `sort_together` by [@​Pandede](https://togithub.com/Pandede) in [https://github.com/more-itertools/more-itertools/pull/892](https://togithub.com/more-itertools/more-itertools/pull/892) - Updates for version 10.4.0 by [@​bbayles](https://togithub.com/bbayles) in [https://github.com/more-itertools/more-itertools/pull/893](https://togithub.com/more-itertools/more-itertools/pull/893) ##### New Contributors - [@​JamesParrott](https://togithub.com/JamesParrott) made their first contribution in [https://github.com/more-itertools/more-itertools/pull/834](https://togithub.com/more-itertools/more-itertools/pull/834) - [@​dkrikun](https://togithub.com/dkrikun) made their first contribution in [https://github.com/more-itertools/more-itertools/pull/866](https://togithub.com/more-itertools/more-itertools/pull/866) - [@​m472](https://togithub.com/m472) made their first contribution in [https://github.com/more-itertools/more-itertools/pull/881](https://togithub.com/more-itertools/more-itertools/pull/881) - [@​fakuivan](https://togithub.com/fakuivan) made their first contribution in [https://github.com/more-itertools/more-itertools/pull/886](https://togithub.com/more-itertools/more-itertools/pull/886) - [@​jbosboom](https://togithub.com/jbosboom) made their first contribution in [https://github.com/more-itertools/more-itertools/pull/887](https://togithub.com/more-itertools/more-itertools/pull/887) - [@​akisatoon1](https://togithub.com/akisatoon1) made their first contribution in [https://github.com/more-itertools/more-itertools/pull/884](https://togithub.com/more-itertools/more-itertools/pull/884) - [@​Pandede](https://togithub.com/Pandede) made their first contribution in [https://github.com/more-itertools/more-itertools/pull/888](https://togithub.com/more-itertools/more-itertools/pull/888) **Full Changelog**: https://github.com/more-itertools/more-itertools/compare/v10.3.0...v10.4.0
jaraco/zipp (zipp) ### [`v3.20.0`](https://togithub.com/jaraco/zipp/compare/v3.19.3...v3.20.0) [Compare Source](https://togithub.com/jaraco/zipp/compare/v3.19.3...v3.20.0) ### [`v3.19.3`](https://togithub.com/jaraco/zipp/compare/v3.19.2...v3.19.3) [Compare Source](https://togithub.com/jaraco/zipp/compare/v3.19.2...v3.19.3)
--- ### Configuration 📅 **Schedule**: Branch creation - "before 4am on Monday" (UTC), Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 👻 **Immortal**: This PR will be recreated if closed unmerged. Get [config help](https://togithub.com/renovatebot/renovate/discussions) if that's undesired. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR was generated by [Mend Renovate](https://www.mend.io/free-developer-tools/renovate/). View the [repository job log](https://developer.mend.io/github/googleapis/google-cloud-python). Co-authored-by: Anthonios Partheniou --- .kokoro/requirements-aoss.txt | 168 +++++++++++++++++++--------------- 1 file changed, 93 insertions(+), 75 deletions(-) diff --git a/.kokoro/requirements-aoss.txt b/.kokoro/requirements-aoss.txt index 34b8f631e421..eede56b4ce68 100644 --- a/.kokoro/requirements-aoss.txt +++ b/.kokoro/requirements-aoss.txt @@ -8,67 +8,82 @@ backports-tarfile==1.2.0 \ --hash=sha256:77e284d754527b01fb1e6fa8a1afe577858ebe4e9dad8919e34c862cb399bc34 \ --hash=sha256:d75e02c268746e1b8144c278978b6e98e85de6ad16f8e4b0844a154557eca991 # via jaraco-context -cachetools==5.4.0 \ - --hash=sha256:3ae3b49a3d5e28a77a0be2b37dbcb89005058959cb2323858c2657c4a8cab474 \ - --hash=sha256:b8adc2e7c07f105ced7bc56dbb6dfbe7c4a00acce20e2227b3f355be89bc6827 +cachetools==5.5.0 \ + --hash=sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292 \ + --hash=sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a # via google-auth certifi==2024.7.4 \ --hash=sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b \ --hash=sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90 # via requests -cffi==1.16.0 \ - --hash=sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc \ - --hash=sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a \ - --hash=sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417 \ - --hash=sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab \ - --hash=sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520 \ - --hash=sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36 \ - --hash=sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743 \ - --hash=sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8 \ - --hash=sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed \ - --hash=sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684 \ - --hash=sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56 \ - --hash=sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324 \ - --hash=sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d \ - --hash=sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235 \ - --hash=sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e \ - --hash=sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088 \ - --hash=sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000 \ - --hash=sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7 \ - --hash=sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e \ - --hash=sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673 \ - --hash=sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c \ - --hash=sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe \ - --hash=sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2 \ - --hash=sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098 \ - --hash=sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8 \ - --hash=sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a \ - --hash=sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0 \ - --hash=sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b \ - --hash=sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896 \ - --hash=sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e \ - --hash=sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9 \ - --hash=sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2 \ - --hash=sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b \ - --hash=sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6 \ - --hash=sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404 \ - --hash=sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f \ - --hash=sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0 \ - --hash=sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4 \ - --hash=sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc \ - --hash=sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936 \ - --hash=sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba \ - --hash=sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872 \ - --hash=sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb \ - --hash=sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614 \ - --hash=sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1 \ - --hash=sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d \ - --hash=sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969 \ - --hash=sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b \ - --hash=sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4 \ - --hash=sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627 \ - --hash=sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956 \ - --hash=sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357 +cffi==1.17.0 \ + --hash=sha256:011aff3524d578a9412c8b3cfaa50f2c0bd78e03eb7af7aa5e0df59b158efb2f \ + --hash=sha256:0a048d4f6630113e54bb4b77e315e1ba32a5a31512c31a273807d0027a7e69ab \ + --hash=sha256:0bb15e7acf8ab35ca8b24b90af52c8b391690ef5c4aec3d31f38f0d37d2cc499 \ + --hash=sha256:0d46ee4764b88b91f16661a8befc6bfb24806d885e27436fdc292ed7e6f6d058 \ + --hash=sha256:0e60821d312f99d3e1569202518dddf10ae547e799d75aef3bca3a2d9e8ee693 \ + --hash=sha256:0fdacad9e0d9fc23e519efd5ea24a70348305e8d7d85ecbb1a5fa66dc834e7fb \ + --hash=sha256:14b9cbc8f7ac98a739558eb86fabc283d4d564dafed50216e7f7ee62d0d25377 \ + --hash=sha256:17c6d6d3260c7f2d94f657e6872591fe8733872a86ed1345bda872cfc8c74885 \ + --hash=sha256:1a2ddbac59dc3716bc79f27906c010406155031a1c801410f1bafff17ea304d2 \ + --hash=sha256:2404f3de742f47cb62d023f0ba7c5a916c9c653d5b368cc966382ae4e57da401 \ + --hash=sha256:24658baf6224d8f280e827f0a50c46ad819ec8ba380a42448e24459daf809cf4 \ + --hash=sha256:24aa705a5f5bd3a8bcfa4d123f03413de5d86e497435693b638cbffb7d5d8a1b \ + --hash=sha256:2770bb0d5e3cc0e31e7318db06efcbcdb7b31bcb1a70086d3177692a02256f59 \ + --hash=sha256:331ad15c39c9fe9186ceaf87203a9ecf5ae0ba2538c9e898e3a6967e8ad3db6f \ + --hash=sha256:3aa9d43b02a0c681f0bfbc12d476d47b2b2b6a3f9287f11ee42989a268a1833c \ + --hash=sha256:41f4915e09218744d8bae14759f983e466ab69b178de38066f7579892ff2a555 \ + --hash=sha256:4304d4416ff032ed50ad6bb87416d802e67139e31c0bde4628f36a47a3164bfa \ + --hash=sha256:435a22d00ec7d7ea533db494da8581b05977f9c37338c80bc86314bec2619424 \ + --hash=sha256:45f7cd36186db767d803b1473b3c659d57a23b5fa491ad83c6d40f2af58e4dbb \ + --hash=sha256:48b389b1fd5144603d61d752afd7167dfd205973a43151ae5045b35793232aa2 \ + --hash=sha256:4e67d26532bfd8b7f7c05d5a766d6f437b362c1bf203a3a5ce3593a645e870b8 \ + --hash=sha256:516a405f174fd3b88829eabfe4bb296ac602d6a0f68e0d64d5ac9456194a5b7e \ + --hash=sha256:5ba5c243f4004c750836f81606a9fcb7841f8874ad8f3bf204ff5e56332b72b9 \ + --hash=sha256:5bdc0f1f610d067c70aa3737ed06e2726fd9d6f7bfee4a351f4c40b6831f4e82 \ + --hash=sha256:6107e445faf057c118d5050560695e46d272e5301feffda3c41849641222a828 \ + --hash=sha256:6327b572f5770293fc062a7ec04160e89741e8552bf1c358d1a23eba68166759 \ + --hash=sha256:669b29a9eca6146465cc574659058ed949748f0809a2582d1f1a324eb91054dc \ + --hash=sha256:6ce01337d23884b21c03869d2f68c5523d43174d4fc405490eb0091057943118 \ + --hash=sha256:6d872186c1617d143969defeadac5a904e6e374183e07977eedef9c07c8953bf \ + --hash=sha256:6f76a90c345796c01d85e6332e81cab6d70de83b829cf1d9762d0a3da59c7932 \ + --hash=sha256:70d2aa9fb00cf52034feac4b913181a6e10356019b18ef89bc7c12a283bf5f5a \ + --hash=sha256:7cbc78dc018596315d4e7841c8c3a7ae31cc4d638c9b627f87d52e8abaaf2d29 \ + --hash=sha256:856bf0924d24e7f93b8aee12a3a1095c34085600aa805693fb7f5d1962393206 \ + --hash=sha256:8a98748ed1a1df4ee1d6f927e151ed6c1a09d5ec21684de879c7ea6aa96f58f2 \ + --hash=sha256:93a7350f6706b31f457c1457d3a3259ff9071a66f312ae64dc024f049055f72c \ + --hash=sha256:964823b2fc77b55355999ade496c54dde161c621cb1f6eac61dc30ed1b63cd4c \ + --hash=sha256:a003ac9edc22d99ae1286b0875c460351f4e101f8c9d9d2576e78d7e048f64e0 \ + --hash=sha256:a0ce71725cacc9ebf839630772b07eeec220cbb5f03be1399e0457a1464f8e1a \ + --hash=sha256:a47eef975d2b8b721775a0fa286f50eab535b9d56c70a6e62842134cf7841195 \ + --hash=sha256:a8b5b9712783415695663bd463990e2f00c6750562e6ad1d28e072a611c5f2a6 \ + --hash=sha256:a9015f5b8af1bb6837a3fcb0cdf3b874fe3385ff6274e8b7925d81ccaec3c5c9 \ + --hash=sha256:aec510255ce690d240f7cb23d7114f6b351c733a74c279a84def763660a2c3bc \ + --hash=sha256:b00e7bcd71caa0282cbe3c90966f738e2db91e64092a877c3ff7f19a1628fdcb \ + --hash=sha256:b50aaac7d05c2c26dfd50c3321199f019ba76bb650e346a6ef3616306eed67b0 \ + --hash=sha256:b7b6ea9e36d32582cda3465f54c4b454f62f23cb083ebc7a94e2ca6ef011c3a7 \ + --hash=sha256:bb9333f58fc3a2296fb1d54576138d4cf5d496a2cc118422bd77835e6ae0b9cb \ + --hash=sha256:c1c13185b90bbd3f8b5963cd8ce7ad4ff441924c31e23c975cb150e27c2bf67a \ + --hash=sha256:c3b8bd3133cd50f6b637bb4322822c94c5ce4bf0d724ed5ae70afce62187c492 \ + --hash=sha256:c5d97162c196ce54af6700949ddf9409e9833ef1003b4741c2b39ef46f1d9720 \ + --hash=sha256:c815270206f983309915a6844fe994b2fa47e5d05c4c4cef267c3b30e34dbe42 \ + --hash=sha256:cab2eba3830bf4f6d91e2d6718e0e1c14a2f5ad1af68a89d24ace0c6b17cced7 \ + --hash=sha256:d1df34588123fcc88c872f5acb6f74ae59e9d182a2707097f9e28275ec26a12d \ + --hash=sha256:d6bdcd415ba87846fd317bee0774e412e8792832e7805938987e4ede1d13046d \ + --hash=sha256:db9a30ec064129d605d0f1aedc93e00894b9334ec74ba9c6bdd08147434b33eb \ + --hash=sha256:dbc183e7bef690c9abe5ea67b7b60fdbca81aa8da43468287dae7b5c046107d4 \ + --hash=sha256:dca802c8db0720ce1c49cce1149ff7b06e91ba15fa84b1d59144fef1a1bc7ac2 \ + --hash=sha256:dec6b307ce928e8e112a6bb9921a1cb00a0e14979bf28b98e084a4b8a742bd9b \ + --hash=sha256:df8bb0010fdd0a743b7542589223a2816bdde4d94bb5ad67884348fa2c1c67e8 \ + --hash=sha256:e4094c7b464cf0a858e75cd14b03509e84789abf7b79f8537e6a72152109c76e \ + --hash=sha256:e4760a68cab57bfaa628938e9c2971137e05ce48e762a9cb53b76c9b569f1204 \ + --hash=sha256:eb09b82377233b902d4c3fbeeb7ad731cdab579c6c6fda1f763cd779139e47c3 \ + --hash=sha256:eb862356ee9391dc5a0b3cbc00f416b48c1b9a52d252d898e5b7696a5f9fe150 \ + --hash=sha256:ef9528915df81b8f4c7612b19b8628214c65c9b7f74db2e34a646a0a2a0da2d4 \ + --hash=sha256:f3157624b7558b914cb039fd1af735e5e8049a87c817cc215109ad1c8779df76 \ + --hash=sha256:f3e0992f23bbb0be00a921eae5363329253c3b86287db27092461c887b791e5e \ + --hash=sha256:f9338cc05451f1942d0d8203ec2c346c830f8e86469903d5126c1f0a13a2bcbb \ + --hash=sha256:ffef8fd58a36fb5f1196919638f73dd3ae0db1a878982b27a9a5a176ede4ba91 # via cryptography charset-normalizer==3.3.2 \ --hash=sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027 \ @@ -191,23 +206,23 @@ cryptography==43.0.0 \ --hash=sha256:ee0c405832ade84d4de74b9029bedb7b31200600fa524d218fc29bfa371e97f5 \ --hash=sha256:fdcb265de28585de5b859ae13e3846a8e805268a823a12a4da2597f1f5afc9f0 # via secretstorage -google-auth==2.32.0 \ - --hash=sha256:49315be72c55a6a37d62819e3573f6b416aca00721f7e3e31a008d928bf64022 \ - --hash=sha256:53326ea2ebec768070a94bee4e1b9194c9646ea0c2bd72422785bd0f9abfad7b +google-auth==2.34.0 \ + --hash=sha256:72fd4733b80b6d777dcde515628a9eb4a577339437012874ea286bca7261ee65 \ + --hash=sha256:8eb87396435c19b20d32abd2f984e31c191a15284af72eb922f10e5bde9c04cc # via keyrings-google-artifactregistry-auth -idna==3.7 \ - --hash=sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc \ - --hash=sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0 +idna==3.8 \ + --hash=sha256:050b4e5baadcd44d760cedbd2b8e639f2ff89bbc7a5730fcc662954303377aac \ + --hash=sha256:d838c2c0ed6fced7693d5e8ab8e734d5f8fda53a039c0164afb0b82e771e3603 # via requests -importlib-metadata==8.2.0 \ - --hash=sha256:11901fa0c2f97919b288679932bb64febaeacf289d18ac84dd68cb2e74213369 \ - --hash=sha256:72e8d4399996132204f9a16dcc751af254a48f8d1b20b9ff0f98d4a8f901e73d +importlib-metadata==8.4.0 \ + --hash=sha256:66f342cc6ac9818fc6ff340576acd24d65ba0b3efabb2b4ac08b598965a4a2f1 \ + --hash=sha256:9a547d3bc3608b025f93d403fdd1aae741c24fbb8314df4b155675742ce303c5 # via keyring jaraco-classes==3.4.0 \ --hash=sha256:47a024b51d0239c0dd8c8540c6c7f484be3b8fcf0b2d85c13825780d3b3f3acd \ --hash=sha256:f662826b6bed8cace05e7ff873ce0f9283b5c924470fe664fff1c2f00f581790 # via keyring -jaraco-context==5.3.0 \ +jaraco-context==6.0.1 \ --hash=sha256:3e16388f7da43d384a1a7cd3452e72e14732ac9fe459678773a3608a812bf266 \ --hash=sha256:c2f67165ce1f9be20f32f650f25d8edfc1646a8aeee48ae06fb35f90763576d2 # via keyring @@ -221,9 +236,9 @@ jeepney==0.8.0 \ # via # keyring # secretstorage -keyring==25.2.1 \ - --hash=sha256:2458681cdefc0dbc0b7eb6cf75d0b98e59f9ad9b2d4edd319d18f68bdca95e50 \ - --hash=sha256:daaffd42dbda25ddafb1ad5fec4024e5bbcfe424597ca1ca452b299861e49f1b +keyring==25.3.0 \ + --hash=sha256:8d85a1ea5d6db8515b59e1c5d1d1678b03cf7fc8b8dcfb1651e8c4a524eb42ef \ + --hash=sha256:8d963da00ccdf06e356acd9bf3b743208878751032d8599c6cc89eb51310ffae # via # -r requirements-aoss.in # keyrings-google-artifactregistry-auth @@ -231,9 +246,9 @@ keyrings-google-artifactregistry-auth==1.1.2 \ --hash=sha256:bd6abb72740d2dfeb4a5c03c3b105c6f7dba169caa29dee3959694f1f02c77de \ --hash=sha256:e3f18b50fa945c786593014dc225810d191671d4f5f8e12d9259e39bad3605a3 # via -r requirements-aoss.in -more-itertools==10.3.0 \ - --hash=sha256:e5d93ef411224fbcef366a6e8ddc4c5781bc6359d43412a65dd5964e46111463 \ - --hash=sha256:ea6a02e24a9161e51faad17a8782b92a0df82c12c1c8886fec7f0c3fa1a1b320 +more-itertools==10.4.0 \ + --hash=sha256:0f7d9f83a0a8dcfa8a2694a770590d98a67ea943e3d9f5298309a484758c4e27 \ + --hash=sha256:fe0e63c4ab068eac62410ab05cccca2dc71ec44ba8ef29916a0090df061cf923 # via # jaraco-classes # jaraco-functools @@ -271,7 +286,10 @@ urllib3==2.2.2 \ --hash=sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472 \ --hash=sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168 # via requests -zipp==3.19.2 \ - --hash=sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19 \ - --hash=sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c +zipp==3.20.0 \ + --hash=sha256:0145e43d89664cfe1a2e533adc75adafed82fe2da404b4bbb6b026c0157bdb31 \ + --hash=sha256:58da6168be89f0be59beb194da1250516fdaa062ccebd30127ac65d30045e10d # via importlib-metadata +jaraco.context==6.0.1 \ + --hash=sha256:9bae4ea555cf0b14938dc0aee7c9f32ed303aa20a3b73e7dc80111628792d1b3 \ + --hash=sha256:f797fc481b490edb305122c9181830a3a5b76d84ef6d1aef2fb9b47ab956f9e4 From c54700d3e11e59eb5fae01fda25dbf3a9acbe382 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 3 Sep 2024 11:23:31 -0400 Subject: [PATCH 036/108] feat: [google-cloud-service-management] Support local binding for variables with keyword name collision (#13034) BEGIN_COMMIT_OVERRIDE feat: Support local binding for variables with keyword name collision END_COMMIT_OVERRIDE - [ ] Regenerate this pull request now. PiperOrigin-RevId: 667703338 Source-Link: https://github.com/googleapis/googleapis/commit/ec3826751c9b4117292e35aa812da35efe854877 Source-Link: https://github.com/googleapis/googleapis-gen/commit/5d929e3dce5ae1ee0dc7e45df0c797796b55b2ee Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXNlcnZpY2UtbWFuYWdlbWVudC8uT3dsQm90LnlhbWwiLCJoIjoiNWQ5MjllM2RjZTVhZTFlZTBkYzdlNDVkZjBjNzk3Nzk2YjU1YjJlZSJ9 --------- Co-authored-by: Owl Bot --- .../unit/gapic/servicemanagement_v1/test_service_manager.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-service-management/tests/unit/gapic/servicemanagement_v1/test_service_manager.py b/packages/google-cloud-service-management/tests/unit/gapic/servicemanagement_v1/test_service_manager.py index 12b5f383294f..10c05a61ed21 100644 --- a/packages/google-cloud-service-management/tests/unit/gapic/servicemanagement_v1/test_service_manager.py +++ b/packages/google-cloud-service-management/tests/unit/gapic/servicemanagement_v1/test_service_manager.py @@ -9286,7 +9286,10 @@ def test_create_service_config_rest(request_type): }, "cpp_settings": {"common": {}}, "php_settings": {"common": {}}, - "python_settings": {"common": {}}, + "python_settings": { + "common": {}, + "experimental_features": {"rest_async_io_enabled": True}, + }, "node_settings": {"common": {}}, "dotnet_settings": { "common": {}, From 7e40c8c40039e0c01ef9a8bfea8804edfee48d70 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 3 Sep 2024 11:28:34 -0400 Subject: [PATCH 037/108] docs: [google-cloud-recaptcha-enterprise] minor doc fixes (#13039) BEGIN_COMMIT_OVERRIDE docs: minor doc fixes END_COMMIT_OVERRIDE - [ ] Regenerate this pull request now. PiperOrigin-RevId: 668754916 Source-Link: https://github.com/googleapis/googleapis/commit/b99f12ae81cdfa330817274aed2b272e0c57c1c5 Source-Link: https://github.com/googleapis/googleapis-gen/commit/43ba3fad89f71737dc1c561b3d8b5ca4585fe5b8 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXJlY2FwdGNoYS1lbnRlcnByaXNlLy5Pd2xCb3QueWFtbCIsImgiOiI0M2JhM2ZhZDg5ZjcxNzM3ZGMxYzU2MWIzZDhiNWNhNDU4NWZlNWI4In0= BEGIN_NESTED_COMMIT feat: [google-cloud-recaptcha-enterprise] add AssessmentEnvironment for CreateAssessement to explicitly describe the environment of the assessment PiperOrigin-RevId: 668000366 Source-Link: https://github.com/googleapis/googleapis/commit/14d1fe2996a0237d2be8de0e26b507760dba8ce4 Source-Link: https://github.com/googleapis/googleapis-gen/commit/a53bb74c0a311860c70318509db1767dfeb5e580 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXJlY2FwdGNoYS1lbnRlcnByaXNlLy5Pd2xCb3QueWFtbCIsImgiOiJhNTNiYjc0YzBhMzExODYwYzcwMzE4NTA5ZGIxNzY3ZGZlYjVlNTgwIn0= END_NESTED_COMMIT --------- Co-authored-by: Owl Bot Co-authored-by: ohmayr --- .../cloud/recaptchaenterprise/__init__.py | 2 + .../recaptchaenterprise/gapic_version.py | 2 +- .../cloud/recaptchaenterprise_v1/__init__.py | 2 + .../recaptchaenterprise_v1/gapic_version.py | 2 +- .../async_client.py | 17 ++-- .../recaptcha_enterprise_service/client.py | 17 ++-- .../transports/grpc.py | 2 +- .../transports/grpc_asyncio.py | 2 +- .../recaptchaenterprise_v1/types/__init__.py | 2 + .../types/recaptchaenterprise.py | 99 +++++++++++++------ ...a_google.cloud.recaptchaenterprise.v1.json | 2 +- 11 files changed, 98 insertions(+), 51 deletions(-) diff --git a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise/__init__.py b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise/__init__.py index a4192178f092..74085402cea4 100644 --- a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise/__init__.py +++ b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise/__init__.py @@ -34,6 +34,7 @@ AnnotateAssessmentResponse, AppleDeveloperId, Assessment, + AssessmentEnvironment, ChallengeMetrics, CreateAssessmentRequest, CreateFirewallPolicyRequest, @@ -102,6 +103,7 @@ "AnnotateAssessmentResponse", "AppleDeveloperId", "Assessment", + "AssessmentEnvironment", "ChallengeMetrics", "CreateAssessmentRequest", "CreateFirewallPolicyRequest", diff --git a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise/gapic_version.py b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise/gapic_version.py index 8b80cf328714..558c8aab67c5 100644 --- a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise/gapic_version.py +++ b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.22.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/__init__.py b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/__init__.py index f71b86ba1dc9..872b50ab387a 100644 --- a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/__init__.py +++ b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/__init__.py @@ -32,6 +32,7 @@ AnnotateAssessmentResponse, AppleDeveloperId, Assessment, + AssessmentEnvironment, ChallengeMetrics, CreateAssessmentRequest, CreateFirewallPolicyRequest, @@ -99,6 +100,7 @@ "AnnotateAssessmentResponse", "AppleDeveloperId", "Assessment", + "AssessmentEnvironment", "ChallengeMetrics", "CreateAssessmentRequest", "CreateFirewallPolicyRequest", diff --git a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/gapic_version.py b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/gapic_version.py index 8b80cf328714..558c8aab67c5 100644 --- a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/gapic_version.py +++ b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.22.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/services/recaptcha_enterprise_service/async_client.py b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/services/recaptcha_enterprise_service/async_client.py index 8241b218d463..1ae940ad5ff1 100644 --- a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/services/recaptcha_enterprise_service/async_client.py +++ b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/services/recaptcha_enterprise_service/async_client.py @@ -344,7 +344,7 @@ async def sample_create_assessment(): message. parent (:class:`str`): Required. The name of the project in which the - assessment will be created, in the format + assessment is created, in the format ``projects/{project}``. This corresponds to the ``parent`` field @@ -471,7 +471,7 @@ async def sample_annotate_assessment(): on the ``request`` instance; if ``request`` is provided, this should not be set. annotation (:class:`google.cloud.recaptchaenterprise_v1.types.AnnotateAssessmentRequest.Annotation`): - Optional. The annotation that will be + Optional. The annotation that is assigned to the Event. This field can be left empty to provide reasons that apply to an event without concluding whether @@ -587,8 +587,8 @@ async def sample_create_key(): request (Optional[Union[google.cloud.recaptchaenterprise_v1.types.CreateKeyRequest, dict]]): The request object. The create key request message. parent (:class:`str`): - Required. The name of the project in which the key will - be created, in the format ``projects/{project}``. + Required. The name of the project in which the key is + created, in the format ``projects/{project}``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -705,8 +705,7 @@ async def sample_list_keys(): The request object. The list keys request message. parent (:class:`str`): Required. The name of the project that contains the keys - that will be listed, in the format - ``projects/{project}``. + that are listed, in the format ``projects/{project}``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1052,7 +1051,7 @@ async def sample_update_key(): update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): Optional. The mask to control which fields of the key get updated. If the - mask is not present, all fields will be + mask is not present, all fields are updated. This corresponds to the ``update_mask`` field @@ -1316,7 +1315,7 @@ async def add_ip_override( - The maximum number of IP overrides per key is 100. - For any conflict (such as IP already exists or IP part of an - existing IP range), an error will be returned. + existing IP range), an error is returned. .. code-block:: python @@ -1935,7 +1934,7 @@ async def sample_update_firewall_policy(): update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): Optional. The mask to control which fields of the policy get updated. If the - mask is not present, all fields will be + mask is not present, all fields are updated. This corresponds to the ``update_mask`` field diff --git a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/services/recaptcha_enterprise_service/client.py b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/services/recaptcha_enterprise_service/client.py index 3369fc67c142..bd62b5ca1e8c 100644 --- a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/services/recaptcha_enterprise_service/client.py +++ b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/services/recaptcha_enterprise_service/client.py @@ -829,7 +829,7 @@ def sample_create_assessment(): message. parent (str): Required. The name of the project in which the - assessment will be created, in the format + assessment is created, in the format ``projects/{project}``. This corresponds to the ``parent`` field @@ -953,7 +953,7 @@ def sample_annotate_assessment(): on the ``request`` instance; if ``request`` is provided, this should not be set. annotation (google.cloud.recaptchaenterprise_v1.types.AnnotateAssessmentRequest.Annotation): - Optional. The annotation that will be + Optional. The annotation that is assigned to the Event. This field can be left empty to provide reasons that apply to an event without concluding whether @@ -1066,8 +1066,8 @@ def sample_create_key(): request (Union[google.cloud.recaptchaenterprise_v1.types.CreateKeyRequest, dict]): The request object. The create key request message. parent (str): - Required. The name of the project in which the key will - be created, in the format ``projects/{project}``. + Required. The name of the project in which the key is + created, in the format ``projects/{project}``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1181,8 +1181,7 @@ def sample_list_keys(): The request object. The list keys request message. parent (str): Required. The name of the project that contains the keys - that will be listed, in the format - ``projects/{project}``. + that are listed, in the format ``projects/{project}``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1523,7 +1522,7 @@ def sample_update_key(): update_mask (google.protobuf.field_mask_pb2.FieldMask): Optional. The mask to control which fields of the key get updated. If the - mask is not present, all fields will be + mask is not present, all fields are updated. This corresponds to the ``update_mask`` field @@ -1779,7 +1778,7 @@ def add_ip_override( - The maximum number of IP overrides per key is 100. - For any conflict (such as IP already exists or IP part of an - existing IP range), an error will be returned. + existing IP range), an error is returned. .. code-block:: python @@ -2383,7 +2382,7 @@ def sample_update_firewall_policy(): update_mask (google.protobuf.field_mask_pb2.FieldMask): Optional. The mask to control which fields of the policy get updated. If the - mask is not present, all fields will be + mask is not present, all fields are updated. This corresponds to the ``update_mask`` field diff --git a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/services/recaptcha_enterprise_service/transports/grpc.py b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/services/recaptcha_enterprise_service/transports/grpc.py index 1e960bab7e13..bb598e78465f 100644 --- a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/services/recaptcha_enterprise_service/transports/grpc.py +++ b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/services/recaptcha_enterprise_service/transports/grpc.py @@ -504,7 +504,7 @@ def add_ip_override( - The maximum number of IP overrides per key is 100. - For any conflict (such as IP already exists or IP part of an - existing IP range), an error will be returned. + existing IP range), an error is returned. Returns: Callable[[~.AddIpOverrideRequest], diff --git a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/services/recaptcha_enterprise_service/transports/grpc_asyncio.py b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/services/recaptcha_enterprise_service/transports/grpc_asyncio.py index d500b28f4a20..00886fe8df24 100644 --- a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/services/recaptcha_enterprise_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/services/recaptcha_enterprise_service/transports/grpc_asyncio.py @@ -520,7 +520,7 @@ def add_ip_override( - The maximum number of IP overrides per key is 100. - For any conflict (such as IP already exists or IP part of an - existing IP range), an error will be returned. + existing IP range), an error is returned. Returns: Callable[[~.AddIpOverrideRequest], diff --git a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/types/__init__.py b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/types/__init__.py index 77aa68a2862c..230f8d821762 100644 --- a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/types/__init__.py +++ b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/types/__init__.py @@ -23,6 +23,7 @@ AnnotateAssessmentResponse, AppleDeveloperId, Assessment, + AssessmentEnvironment, ChallengeMetrics, CreateAssessmentRequest, CreateFirewallPolicyRequest, @@ -89,6 +90,7 @@ "AnnotateAssessmentResponse", "AppleDeveloperId", "Assessment", + "AssessmentEnvironment", "ChallengeMetrics", "CreateAssessmentRequest", "CreateFirewallPolicyRequest", diff --git a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/types/recaptchaenterprise.py b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/types/recaptchaenterprise.py index c4612c09acf2..129df8657cdd 100644 --- a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/types/recaptchaenterprise.py +++ b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/types/recaptchaenterprise.py @@ -87,6 +87,7 @@ "RelatedAccountGroupMembership", "RelatedAccountGroup", "WafSettings", + "AssessmentEnvironment", "IpOverrideData", }, ) @@ -97,8 +98,8 @@ class CreateAssessmentRequest(proto.Message): Attributes: parent (str): - Required. The name of the project in which the assessment - will be created, in the format ``projects/{project}``. + Required. The name of the project in which the assessment is + created, in the format ``projects/{project}``. assessment (google.cloud.recaptchaenterprise_v1.types.Assessment): Required. The assessment details. """ @@ -281,11 +282,11 @@ class AnnotateAssessmentRequest(proto.Message): Required. The resource name of the Assessment, in the format ``projects/{project}/assessments/{assessment}``. annotation (google.cloud.recaptchaenterprise_v1.types.AnnotateAssessmentRequest.Annotation): - Optional. The annotation that will be - assigned to the Event. This field can be left - empty to provide reasons that apply to an event - without concluding whether the event is - legitimate or fraudulent. + Optional. The annotation that is assigned to + the Event. This field can be left empty to + provide reasons that apply to an event without + concluding whether the event is legitimate or + fraudulent. reasons (MutableSequence[google.cloud.recaptchaenterprise_v1.types.AnnotateAssessmentRequest.Reason]): Optional. Reasons for the annotation that are assigned to the event. @@ -684,6 +685,11 @@ class Assessment(proto.Message): Output only. Assessment returned when a site key, a token, and a phone number as ``user_id`` are provided. Account defender and SMS toll fraud protection need to be enabled. + assessment_environment (google.cloud.recaptchaenterprise_v1.types.AssessmentEnvironment): + Optional. The environment creating the + assessment. This describes your environment (the + system invoking CreateAssessment), NOT the + environment of your user. """ name: str = proto.Field( @@ -740,6 +746,11 @@ class Assessment(proto.Message): number=12, message="PhoneFraudAssessment", ) + assessment_environment: "AssessmentEnvironment" = proto.Field( + proto.MESSAGE, + number=14, + message="AssessmentEnvironment", + ) class Event(proto.Message): @@ -775,7 +786,7 @@ class Event(proto.Message): express (bool): Optional. Flag for a reCAPTCHA express request for an assessment without a token. If enabled, ``site_key`` must - reference an Express site key. + reference an express key. requested_uri (str): Optional. The URI resource the user requested that triggered an assessment. @@ -792,15 +803,14 @@ class Event(proto.Message): firewall_policy_evaluation (bool): Optional. Flag for enabling firewall policy config assessment. If this flag is enabled, the - firewall policy will be evaluated and a - suggested firewall action will be returned in - the response. + firewall policy is evaluated and a suggested + firewall action is returned in the response. transaction_data (google.cloud.recaptchaenterprise_v1.types.TransactionData): Optional. Data describing a payment transaction to be assessed. Sending this data - enables reCAPTCHA Enterprise Fraud Prevention - and the FraudPreventionAssessment component in - the response. + enables reCAPTCHA Fraud Prevention and the + FraudPreventionAssessment component in the + response. user_info (google.cloud.recaptchaenterprise_v1.types.UserInfo): Optional. Information about the user that generates this event, when they can be @@ -1732,7 +1742,7 @@ class CreateKeyRequest(proto.Message): Attributes: parent (str): - Required. The name of the project in which the key will be + Required. The name of the project in which the key is created, in the format ``projects/{project}``. key (google.cloud.recaptchaenterprise_v1.types.Key): Required. Information to create a reCAPTCHA @@ -1756,7 +1766,7 @@ class ListKeysRequest(proto.Message): Attributes: parent (str): Required. The name of the project that contains the keys - that will be listed, in the format ``projects/{project}``. + that are listed, in the format ``projects/{project}``. page_size (int): Optional. The maximum number of keys to return. Default is 10. Max limit is 1000. @@ -1844,7 +1854,7 @@ class UpdateKeyRequest(proto.Message): update_mask (google.protobuf.field_mask_pb2.FieldMask): Optional. The mask to control which fields of the key get updated. If the mask is not present, - all fields will be updated. + all fields are updated. """ key: "Key" = proto.Field( @@ -1977,7 +1987,7 @@ class UpdateFirewallPolicyRequest(proto.Message): update_mask (google.protobuf.field_mask_pb2.FieldMask): Optional. The mask to control which fields of the policy get updated. If the mask is not - present, all fields will be updated. + present, all fields are updated. """ firewall_policy: "FirewallPolicy" = proto.Field( @@ -2090,12 +2100,12 @@ class Metrics(proto.Message): start_time (google.protobuf.timestamp_pb2.Timestamp): Inclusive start time aligned to a day (UTC). score_metrics (MutableSequence[google.cloud.recaptchaenterprise_v1.types.ScoreMetrics]): - Metrics will be continuous and in order by - dates, and in the granularity of day. All Key - types should have score-based data. + Metrics are continuous and in order by dates, + and in the granularity of day. All Key types + should have score-based data. challenge_metrics (MutableSequence[google.cloud.recaptchaenterprise_v1.types.ChallengeMetrics]): - Metrics will be continuous and in order by - dates, and in the granularity of day. Only + Metrics are continuous and in order by dates, + and in the granularity of day. Only challenge-based keys (CHECKBOX, INVISIBLE), will have challenge-based data. """ @@ -2175,8 +2185,8 @@ class Key(proto.Message): This field is a member of `oneof`_ ``platform_settings``. express_settings (google.cloud.recaptchaenterprise_v1.types.ExpressKeySettings): - Settings for keys that can be used by - reCAPTCHA Express. + Settings specific to keys that can be used + for reCAPTCHA Express. This field is a member of `oneof`_ ``platform_settings``. labels (MutableMapping[str, str]): @@ -2594,13 +2604,12 @@ class FirewallPolicyAssessment(proto.Message): Attributes: error (google.rpc.status_pb2.Status): Output only. If the processing of a policy config fails, an - error will be populated and the firewall_policy will be left - empty. + error is populated and the firewall_policy is left empty. firewall_policy (google.cloud.recaptchaenterprise_v1.types.FirewallPolicy): Output only. The policy that matched the request. If more than one policy may match, this is the first match. If no policy matches the - incoming request, the policy field will be left + incoming request, the policy field is left empty. """ @@ -3199,6 +3208,40 @@ class WafService(proto.Enum): ) +class AssessmentEnvironment(proto.Message): + r"""The environment creating the assessment. This describes your + environment (the system invoking CreateAssessment), NOT the + environment of your user. + + Attributes: + client (str): + Optional. Identifies the client module + initiating the CreateAssessment request. This + can be the link to the client module's project. + Examples include: + + - + "github.com/GoogleCloudPlatform/recaptcha-enterprise-google-tag-manager" + - + "cloud.google.com/recaptcha/docs/implement-waf-akamai" + - + "cloud.google.com/recaptcha/docs/implement-waf-cloudflare" + - "wordpress.org/plugins/recaptcha-something". + version (str): + Optional. The version of the client module. + For example, "1.0.0". + """ + + client: str = proto.Field( + proto.STRING, + number=1, + ) + version: str = proto.Field( + proto.STRING, + number=2, + ) + + class IpOverrideData(proto.Message): r"""Information about the IP or IP range override. diff --git a/packages/google-cloud-recaptcha-enterprise/samples/generated_samples/snippet_metadata_google.cloud.recaptchaenterprise.v1.json b/packages/google-cloud-recaptcha-enterprise/samples/generated_samples/snippet_metadata_google.cloud.recaptchaenterprise.v1.json index cc98231dae95..b2ad3f987f98 100644 --- a/packages/google-cloud-recaptcha-enterprise/samples/generated_samples/snippet_metadata_google.cloud.recaptchaenterprise.v1.json +++ b/packages/google-cloud-recaptcha-enterprise/samples/generated_samples/snippet_metadata_google.cloud.recaptchaenterprise.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-recaptcha-enterprise", - "version": "1.22.0" + "version": "0.1.0" }, "snippets": [ { From 127e5c097b08042989c124ac4cdfb5147181855d Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 3 Sep 2024 11:33:02 -0400 Subject: [PATCH 038/108] feat: [google-cloud-dataproc]add optional parameters (tarball-access) in DiagnoseClusterRequest (#13044) BEGIN_COMMIT_OVERRIDE feat:add optional parameters (tarball-access) in DiagnoseClusterRequest END_COMMIT_OVERRIDE - [ ] Regenerate this pull request now. PiperOrigin-RevId: 669375999 Source-Link: https://github.com/googleapis/googleapis/commit/6a474b31c53cc1797710206824a17b364a835d2d Source-Link: https://github.com/googleapis/googleapis-gen/commit/3e834b4e0df6957c72524c7b63d41c3068ebccb4 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWRhdGFwcm9jLy5Pd2xCb3QueWFtbCIsImgiOiIzZTgzNGI0ZTBkZjY5NTdjNzI1MjRjN2I2M2Q0MWMzMDY4ZWJjY2I0In0= --------- Co-authored-by: Owl Bot Co-authored-by: ohmayr --- .../cloud/dataproc_v1/types/clusters.py | 28 +++++++++++++++++++ .../scripts/fixup_dataproc_v1_keywords.py | 2 +- 2 files changed, 29 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/clusters.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/clusters.py index ec33ae97f36c..b6e60e1765c0 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/clusters.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/clusters.py @@ -2397,6 +2397,10 @@ class DiagnoseClusterRequest(proto.Message): for the diagnostic tarball. If not specified, a task-specific directory in the cluster's staging bucket will be used. + tarball_access (google.cloud.dataproc_v1.types.DiagnoseClusterRequest.TarballAccess): + Optional. (Optional) The access type to the + diagnostic tarball. If not specified, falls back + to default access of the bucket diagnosis_interval (google.type.interval_pb2.Interval): Optional. Time interval in which diagnosis should be carried out on the cluster. @@ -2410,6 +2414,25 @@ class DiagnoseClusterRequest(proto.Message): performed. """ + class TarballAccess(proto.Enum): + r"""Defines who has access to the diagnostic tarball + + Values: + TARBALL_ACCESS_UNSPECIFIED (0): + Tarball Access unspecified. Falls back to + default access of the bucket + GOOGLE_CLOUD_SUPPORT (1): + Google Cloud Support group has read access to + the diagnostic tarball + GOOGLE_DATAPROC_DIAGNOSE (2): + Google Cloud Dataproc Diagnose service + account has read access to the diagnostic + tarball + """ + TARBALL_ACCESS_UNSPECIFIED = 0 + GOOGLE_CLOUD_SUPPORT = 1 + GOOGLE_DATAPROC_DIAGNOSE = 2 + project_id: str = proto.Field( proto.STRING, number=1, @@ -2426,6 +2449,11 @@ class DiagnoseClusterRequest(proto.Message): proto.STRING, number=4, ) + tarball_access: TarballAccess = proto.Field( + proto.ENUM, + number=5, + enum=TarballAccess, + ) diagnosis_interval: interval_pb2.Interval = proto.Field( proto.MESSAGE, number=6, diff --git a/packages/google-cloud-dataproc/scripts/fixup_dataproc_v1_keywords.py b/packages/google-cloud-dataproc/scripts/fixup_dataproc_v1_keywords.py index 0a6aa2e35d07..565665908c4f 100644 --- a/packages/google-cloud-dataproc/scripts/fixup_dataproc_v1_keywords.py +++ b/packages/google-cloud-dataproc/scripts/fixup_dataproc_v1_keywords.py @@ -54,7 +54,7 @@ class dataprocCallTransformer(cst.CSTTransformer): 'delete_session': ('name', 'request_id', ), 'delete_session_template': ('name', ), 'delete_workflow_template': ('name', 'version', ), - 'diagnose_cluster': ('project_id', 'region', 'cluster_name', 'tarball_gcs_dir', 'diagnosis_interval', 'jobs', 'yarn_application_ids', ), + 'diagnose_cluster': ('project_id', 'region', 'cluster_name', 'tarball_gcs_dir', 'tarball_access', 'diagnosis_interval', 'jobs', 'yarn_application_ids', ), 'get_autoscaling_policy': ('name', ), 'get_batch': ('name', ), 'get_cluster': ('project_id', 'region', 'cluster_name', ), From 308de6b266e24a8996875736b66485d92f299401 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 3 Sep 2024 12:14:36 -0400 Subject: [PATCH 039/108] docs: [google-cloud-edgenetwork] swap comments on `BONDED` and `NON_BONDED` enums in `BondingType` (#13043) BEGIN_COMMIT_OVERRIDE docs: swap comments on `BONDED` and `NON_BONDED` enums in `BondingType` END_COMMIT_OVERRIDE - [ ] Regenerate this pull request now. PiperOrigin-RevId: 669345789 Source-Link: https://github.com/googleapis/googleapis/commit/7310c46ada62661d6a64c899738315f28feb031b Source-Link: https://github.com/googleapis/googleapis-gen/commit/59cdf5f52643f4252d391bd859ab28268236a1f4 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWVkZ2VuZXR3b3JrLy5Pd2xCb3QueWFtbCIsImgiOiI1OWNkZjVmNTI2NDNmNDI1MmQzOTFiZDg1OWFiMjgyNjgyMzZhMWY0In0= --------- Co-authored-by: Owl Bot Co-authored-by: ohmayr --- .../google/cloud/edgenetwork_v1/types/resources.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-edgenetwork/google/cloud/edgenetwork_v1/types/resources.py b/packages/google-cloud-edgenetwork/google/cloud/edgenetwork_v1/types/resources.py index dee3539ac39e..af3183abd5d5 100644 --- a/packages/google-cloud-edgenetwork/google/cloud/edgenetwork_v1/types/resources.py +++ b/packages/google-cloud-edgenetwork/google/cloud/edgenetwork_v1/types/resources.py @@ -224,9 +224,9 @@ class BondingType(proto.Enum): have both bonded and non-bonded connectivity to machines. BONDED (1): - Single homed. - NON_BONDED (2): Multi homed. + NON_BONDED (2): + Single homed. """ BONDING_TYPE_UNSPECIFIED = 0 BONDED = 1 From ea71725d3fe3bde0afd775d20127bed958e8eb8e Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 3 Sep 2024 12:20:00 -0400 Subject: [PATCH 040/108] feat: [google-cloud-container] add `EXTENDED` enum value for `ReleaseChannel.Channel` (#13040) BEGIN_COMMIT_OVERRIDE feat: add `EXTENDED` enum value for `ReleaseChannel.Channel` feat: add ReleaseChannel EXTENDED value END_COMMIT_OVERRIDE - [ ] Regenerate this pull request now. PiperOrigin-RevId: 668474656 Source-Link: https://github.com/googleapis/googleapis/commit/b1a9e6c1b07dcdc809b8b3ba2ceebf9c5c03f475 Source-Link: https://github.com/googleapis/googleapis-gen/commit/c0109f32a0f9b9ce300566b3a7dd3b31b0c8cf82 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWNvbnRhaW5lci8uT3dsQm90LnlhbWwiLCJoIjoiYzAxMDlmMzJhMGY5YjljZTMwMDU2NmIzYTdkZDNiMzFiMGM4Y2Y4MiJ9 BEGIN_NESTED_COMMIT feat: [google-cloud-container] add ReleaseChannel EXTENDED value PiperOrigin-RevId: 668105133 Source-Link: https://github.com/googleapis/googleapis/commit/0e9edd7617044f88b9667f9783729cd8e8b528e5 Source-Link: https://github.com/googleapis/googleapis-gen/commit/ec9af8f4fb10c8f89a2bcb36345fda2b3ef3675d Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWNvbnRhaW5lci8uT3dsQm90LnlhbWwiLCJoIjoiZWM5YWY4ZjRmYjEwYzhmODlhMmJjYjM2MzQ1ZmRhMmIzZWYzNjc1ZCJ9 END_NESTED_COMMIT --------- Co-authored-by: Owl Bot Co-authored-by: ohmayr --- .../google/cloud/container_v1/types/cluster_service.py | 6 ++++++ .../google/cloud/container_v1beta1/types/cluster_service.py | 6 ++++++ 2 files changed, 12 insertions(+) diff --git a/packages/google-cloud-container/google/cloud/container_v1/types/cluster_service.py b/packages/google-cloud-container/google/cloud/container_v1/types/cluster_service.py index e5d04d99d2eb..20d4fb52446a 100644 --- a/packages/google-cloud-container/google/cloud/container_v1/types/cluster_service.py +++ b/packages/google-cloud-container/google/cloud/container_v1/types/cluster_service.py @@ -8388,11 +8388,17 @@ class Channel(proto.Enum): Clusters subscribed to STABLE receive versions that are known to be stable and reliable in production. + EXTENDED (4): + Clusters subscribed to EXTENDED receive + extended support and availability for versions + which are known to be stable and reliable in + production. """ UNSPECIFIED = 0 RAPID = 1 REGULAR = 2 STABLE = 3 + EXTENDED = 4 channel: Channel = proto.Field( proto.ENUM, diff --git a/packages/google-cloud-container/google/cloud/container_v1beta1/types/cluster_service.py b/packages/google-cloud-container/google/cloud/container_v1beta1/types/cluster_service.py index 04e05220a931..f6187795c40e 100644 --- a/packages/google-cloud-container/google/cloud/container_v1beta1/types/cluster_service.py +++ b/packages/google-cloud-container/google/cloud/container_v1beta1/types/cluster_service.py @@ -10166,11 +10166,17 @@ class Channel(proto.Enum): Clusters subscribed to STABLE receive versions that are known to be stable and reliable in production. + EXTENDED (4): + Clusters subscribed to EXTENDED receive + extended support and availability for versions + which are known to be stable and reliable in + production. """ UNSPECIFIED = 0 RAPID = 1 REGULAR = 2 STABLE = 3 + EXTENDED = 4 channel: Channel = proto.Field( proto.ENUM, From 4f468fa598c51426ef31ef878f9c3b61f79802f9 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 3 Sep 2024 12:24:23 -0400 Subject: [PATCH 041/108] feat: [google-cloud-alloydb] support for enabling outbound public IP on an instance (#13035) BEGIN_COMMIT_OVERRIDE feat: support for enabling outbound public IP on an instance feat: support for getting outbound public IP addresses of an instance feat: support for setting maintenance update policy on a cluster feat: support for getting maintenance schedule of a cluster END_COMMIT_OVERRIDE - [ ] Regenerate this pull request now. feat: support for getting outbound public IP addresses of an instance feat: support for setting maintenance update policy on a cluster feat: support for getting maintenance schedule of a cluster PiperOrigin-RevId: 667707701 Source-Link: https://github.com/googleapis/googleapis/commit/a1185ce21454c5e5dbcf56098bb430d7f209633b Source-Link: https://github.com/googleapis/googleapis-gen/commit/6ca9eb3a69c2c33fca86b8f4150aba846924f9cb Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWFsbG95ZGIvLk93bEJvdC55YW1sIiwiaCI6IjZjYTllYjNhNjljMmMzM2ZjYTg2YjhmNDE1MGFiYTg0NjkyNGY5Y2IifQ== --------- Co-authored-by: Owl Bot Co-authored-by: ohmayr --- .../google/cloud/alloydb_v1beta/__init__.py | 4 + .../cloud/alloydb_v1beta/types/__init__.py | 4 + .../cloud/alloydb_v1beta/types/resources.py | 101 +++++++++++++++++- .../alloydb_v1beta/test_alloy_db_admin.py | 45 ++++++++ 4 files changed, 152 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/__init__.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/__init__.py index 648260737ae8..209ee9e5b9ce 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/__init__.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/__init__.py @@ -35,6 +35,8 @@ EncryptionInfo, Instance, InstanceView, + MaintenanceSchedule, + MaintenanceUpdatePolicy, MigrationSource, SslConfig, SupportedDatabaseFlag, @@ -142,6 +144,8 @@ "ListSupportedDatabaseFlagsResponse", "ListUsersRequest", "ListUsersResponse", + "MaintenanceSchedule", + "MaintenanceUpdatePolicy", "MigrationSource", "OperationMetadata", "PromoteClusterRequest", diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/__init__.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/__init__.py index 69269be33581..262b798ccfc1 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/__init__.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/__init__.py @@ -29,6 +29,8 @@ EncryptionInfo, Instance, InstanceView, + MaintenanceSchedule, + MaintenanceUpdatePolicy, MigrationSource, SslConfig, SupportedDatabaseFlag, @@ -95,6 +97,8 @@ "EncryptionConfig", "EncryptionInfo", "Instance", + "MaintenanceSchedule", + "MaintenanceUpdatePolicy", "MigrationSource", "SslConfig", "SupportedDatabaseFlag", diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/resources.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/resources.py index fd99ddd2630f..9957f9c6b926 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/resources.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/resources.py @@ -40,6 +40,8 @@ "ContinuousBackupInfo", "BackupSource", "ContinuousBackupSource", + "MaintenanceUpdatePolicy", + "MaintenanceSchedule", "Cluster", "Instance", "ConnectionInfo", @@ -622,6 +624,69 @@ class ContinuousBackupSource(proto.Message): ) +class MaintenanceUpdatePolicy(proto.Message): + r"""MaintenanceUpdatePolicy defines the policy for system + updates. + + Attributes: + maintenance_windows (MutableSequence[google.cloud.alloydb_v1beta.types.MaintenanceUpdatePolicy.MaintenanceWindow]): + Preferred windows to perform maintenance. + Currently limited to 1. + """ + + class MaintenanceWindow(proto.Message): + r"""MaintenanceWindow specifies a preferred day and time for + maintenance. + + Attributes: + day (google.type.dayofweek_pb2.DayOfWeek): + Preferred day of the week for maintenance, + e.g. MONDAY, TUESDAY, etc. + start_time (google.type.timeofday_pb2.TimeOfDay): + Preferred time to start the maintenance + operation on the specified day. Maintenance will + start within 1 hour of this time. + """ + + day: dayofweek_pb2.DayOfWeek = proto.Field( + proto.ENUM, + number=1, + enum=dayofweek_pb2.DayOfWeek, + ) + start_time: timeofday_pb2.TimeOfDay = proto.Field( + proto.MESSAGE, + number=2, + message=timeofday_pb2.TimeOfDay, + ) + + maintenance_windows: MutableSequence[MaintenanceWindow] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=MaintenanceWindow, + ) + + +class MaintenanceSchedule(proto.Message): + r"""MaintenanceSchedule stores the maintenance schedule generated + from the MaintenanceUpdatePolicy, once a maintenance rollout is + triggered, if MaintenanceWindow is set, and if there is no + conflicting DenyPeriod. The schedule is cleared once the update + takes place. This field cannot be manually changed; modify the + MaintenanceUpdatePolicy instead. + + Attributes: + start_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The scheduled start time for the + maintenance. + """ + + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + + class Cluster(proto.Message): r"""A cluster is a collection of regional AlloyDB resources. It can include a primary instance and one or more read pool @@ -693,7 +758,7 @@ class Cluster(proto.Message): cluster resources are created and from which they are accessible via Private IP. The network must belong to the same project as the cluster. It is specified in the form: - "projects/{project}/global/networks/{network_id}". This is + ``projects/{project}/global/networks/{network_id}``. This is required to create a cluster. Deprecated, use network_config.network instead. etag (str): @@ -752,6 +817,13 @@ class Cluster(proto.Message): specific to PRIMARY cluster. satisfies_pzs (bool): Output only. Reserved for future use. + maintenance_update_policy (google.cloud.alloydb_v1beta.types.MaintenanceUpdatePolicy): + Optional. The maintenance update policy + determines when to allow or deny updates. + maintenance_schedule (google.cloud.alloydb_v1beta.types.MaintenanceSchedule): + Output only. The maintenance schedule for the + cluster, generated for a specific rollout if a + maintenance window is set. """ class State(proto.Enum): @@ -830,7 +902,7 @@ class NetworkConfig(proto.Message): cluster resources are created and from which they are accessible via Private IP. The network must belong to the same project as the cluster. It is specified in the form: - "projects/{project_number}/global/networks/{network_id}". + ``projects/{project_number}/global/networks/{network_id}``. This is required to create a cluster. allocated_ip_range (str): Optional. Name of the allocated IP range for the private IP @@ -1014,6 +1086,16 @@ class PrimaryConfig(proto.Message): proto.BOOL, number=30, ) + maintenance_update_policy: "MaintenanceUpdatePolicy" = proto.Field( + proto.MESSAGE, + number=32, + message="MaintenanceUpdatePolicy", + ) + maintenance_schedule: "MaintenanceSchedule" = proto.Field( + proto.MESSAGE, + number=37, + message="MaintenanceSchedule", + ) class Instance(proto.Message): @@ -1147,6 +1229,9 @@ class Instance(proto.Message): network_config (google.cloud.alloydb_v1beta.types.Instance.InstanceNetworkConfig): Optional. Instance level network configuration. + outbound_public_ip_addresses (MutableSequence[str]): + Output only. All outbound public IP addresses + configured for the instance. """ class State(proto.Enum): @@ -1514,6 +1599,10 @@ class InstanceNetworkConfig(proto.Message): enable_public_ip (bool): Optional. Enabling public ip for the instance. + enable_outbound_public_ip (bool): + Optional. Enabling an outbound public IP + address to support a database server sending + requests out into the internet. """ class AuthorizedNetwork(proto.Message): @@ -1542,6 +1631,10 @@ class AuthorizedNetwork(proto.Message): proto.BOOL, number=2, ) + enable_outbound_public_ip: bool = proto.Field( + proto.BOOL, + number=3, + ) name: str = proto.Field( proto.STRING, @@ -1669,6 +1762,10 @@ class AuthorizedNetwork(proto.Message): number=29, message=InstanceNetworkConfig, ) + outbound_public_ip_addresses: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=34, + ) class ConnectionInfo(proto.Message): diff --git a/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1beta/test_alloy_db_admin.py b/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1beta/test_alloy_db_admin.py index 510dd441d48d..ab12494b9268 100644 --- a/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1beta/test_alloy_db_admin.py +++ b/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1beta/test_alloy_db_admin.py @@ -4942,6 +4942,7 @@ def test_get_instance(request_type, transport: str = "grpc"): reconciling=True, etag="etag_value", satisfies_pzs=True, + outbound_public_ip_addresses=["outbound_public_ip_addresses_value"], ) response = client.get_instance(request) @@ -4965,6 +4966,9 @@ def test_get_instance(request_type, transport: str = "grpc"): assert response.reconciling is True assert response.etag == "etag_value" assert response.satisfies_pzs is True + assert response.outbound_public_ip_addresses == [ + "outbound_public_ip_addresses_value" + ] def test_get_instance_empty_call(): @@ -5075,6 +5079,7 @@ async def test_get_instance_empty_call_async(): reconciling=True, etag="etag_value", satisfies_pzs=True, + outbound_public_ip_addresses=["outbound_public_ip_addresses_value"], ) ) response = await client.get_instance() @@ -5155,6 +5160,7 @@ async def test_get_instance_async( reconciling=True, etag="etag_value", satisfies_pzs=True, + outbound_public_ip_addresses=["outbound_public_ip_addresses_value"], ) ) response = await client.get_instance(request) @@ -5179,6 +5185,9 @@ async def test_get_instance_async( assert response.reconciling is True assert response.etag == "etag_value" assert response.satisfies_pzs is True + assert response.outbound_public_ip_addresses == [ + "outbound_public_ip_addresses_value" + ] @pytest.mark.asyncio @@ -15285,6 +15294,10 @@ def test_create_cluster_rest(request_type): ] }, "satisfies_pzs": True, + "maintenance_update_policy": { + "maintenance_windows": [{"day": 1, "start_time": {}}] + }, + "maintenance_schedule": {"start_time": {}}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -15766,6 +15779,10 @@ def test_update_cluster_rest(request_type): ] }, "satisfies_pzs": True, + "maintenance_update_policy": { + "maintenance_windows": [{"day": 1, "start_time": {}}] + }, + "maintenance_schedule": {"start_time": {}}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -17093,6 +17110,10 @@ def test_create_secondary_cluster_rest(request_type): ] }, "satisfies_pzs": True, + "maintenance_update_policy": { + "maintenance_windows": [{"day": 1, "start_time": {}}] + }, + "maintenance_schedule": {"start_time": {}}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -17909,6 +17930,7 @@ def test_get_instance_rest(request_type): reconciling=True, etag="etag_value", satisfies_pzs=True, + outbound_public_ip_addresses=["outbound_public_ip_addresses_value"], ) # Wrap the value into a proper Response obj @@ -17936,6 +17958,9 @@ def test_get_instance_rest(request_type): assert response.reconciling is True assert response.etag == "etag_value" assert response.satisfies_pzs is True + assert response.outbound_public_ip_addresses == [ + "outbound_public_ip_addresses_value" + ] def test_get_instance_rest_use_cached_wrapped_rpc(): @@ -18283,7 +18308,12 @@ def test_create_instance_rest(request_type): "network_config": { "authorized_external_networks": [{"cidr_range": "cidr_range_value"}], "enable_public_ip": True, + "enable_outbound_public_ip": True, }, + "outbound_public_ip_addresses": [ + "outbound_public_ip_addresses_value1", + "outbound_public_ip_addresses_value2", + ], } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -18759,7 +18789,12 @@ def test_create_secondary_instance_rest(request_type): "network_config": { "authorized_external_networks": [{"cidr_range": "cidr_range_value"}], "enable_public_ip": True, + "enable_outbound_public_ip": True, }, + "outbound_public_ip_addresses": [ + "outbound_public_ip_addresses_value1", + "outbound_public_ip_addresses_value2", + ], } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -19249,7 +19284,12 @@ def test_batch_create_instances_rest(request_type): {"cidr_range": "cidr_range_value"} ], "enable_public_ip": True, + "enable_outbound_public_ip": True, }, + "outbound_public_ip_addresses": [ + "outbound_public_ip_addresses_value1", + "outbound_public_ip_addresses_value2", + ], }, "request_id": "request_id_value", "validate_only": True, @@ -19654,7 +19694,12 @@ def test_update_instance_rest(request_type): "network_config": { "authorized_external_networks": [{"cidr_range": "cidr_range_value"}], "enable_public_ip": True, + "enable_outbound_public_ip": True, }, + "outbound_public_ip_addresses": [ + "outbound_public_ip_addresses_value1", + "outbound_public_ip_addresses_value2", + ], } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency From 63a6de00b1c6e2b6289b4fa76468859c828cb363 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 3 Sep 2024 12:27:38 -0400 Subject: [PATCH 042/108] feat(v1): [google-cloud-batch] promote block_project_ssh_keys support to batch v1 API (#13046) BEGIN_COMMIT_OVERRIDE feat(v1): promote block_project_ssh_keys support to batch v1 API END_COMMIT_OVERRIDE - [ ] Regenerate this pull request now. PiperOrigin-RevId: 670424240 Source-Link: https://github.com/googleapis/googleapis/commit/4bf146f05883186375b4192b8c3b441398bab3d1 Source-Link: https://github.com/googleapis/googleapis-gen/commit/38ece342ba3cdf07db6b5e3c37252ae8f3f7ade6 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWJhdGNoLy5Pd2xCb3QueWFtbCIsImgiOiIzOGVjZTM0MmJhM2NkZjA3ZGI2YjVlM2MzNzI1MmFlOGYzZjdhZGU2In0= --------- Co-authored-by: Owl Bot Co-authored-by: ohmayr --- .../google/cloud/batch/gapic_version.py | 2 +- .../google/cloud/batch_v1/gapic_version.py | 2 +- .../google/cloud/batch_v1/types/job.py | 24 +++++++++++++++++++ .../cloud/batch_v1alpha/gapic_version.py | 2 +- ...nippet_metadata_google.cloud.batch.v1.json | 2 +- ...t_metadata_google.cloud.batch.v1alpha.json | 2 +- .../unit/gapic/batch_v1/test_batch_service.py | 1 + 7 files changed, 30 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-batch/google/cloud/batch/gapic_version.py b/packages/google-cloud-batch/google/cloud/batch/gapic_version.py index e4b992ee0cd9..558c8aab67c5 100644 --- a/packages/google-cloud-batch/google/cloud/batch/gapic_version.py +++ b/packages/google-cloud-batch/google/cloud/batch/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.17.26" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py b/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py index e4b992ee0cd9..558c8aab67c5 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.17.26" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-batch/google/cloud/batch_v1/types/job.py b/packages/google-cloud-batch/google/cloud/batch_v1/types/job.py index 47abe1466463..c11a34f16b56 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1/types/job.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1/types/job.py @@ -810,6 +810,26 @@ class InstancePolicyOrTemplate(proto.Message): Optional. Set this field true if you want Batch to install Ops Agent on your behalf. Default is false. + block_project_ssh_keys (bool): + Optional. Set this field to ``true`` if you want Batch to + block project-level SSH keys from accessing this job's VMs. + Alternatively, you can configure the job to specify a VM + instance template that blocks project-level SSH keys. In + either case, Batch blocks project-level SSH keys while + creating the VMs for this job. + + Batch allows project-level SSH keys for a job's VMs only if + all the following are true: + + - This field is undefined or set to ``false``. + - The job's VM instance template (if any) doesn't block + project-level SSH keys. + + Notably, you can override this behavior by manually updating + a VM to block or allow project-level SSH keys. For more + information about blocking project-level SSH keys, see the + Compute Engine documentation: + https://cloud.google.com/compute/docs/connect/restrict-ssh-keys#block-keys """ policy: "AllocationPolicy.InstancePolicy" = proto.Field( @@ -831,6 +851,10 @@ class InstancePolicyOrTemplate(proto.Message): proto.BOOL, number=4, ) + block_project_ssh_keys: bool = proto.Field( + proto.BOOL, + number=5, + ) class NetworkInterface(proto.Message): r"""A network interface. diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py index e4b992ee0cd9..558c8aab67c5 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.17.26" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json index 32f12c161ccb..e2df1067e4dd 100644 --- a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json +++ b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-batch", - "version": "0.17.26" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json index 889a0b1efe83..7f67670b100c 100644 --- a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json +++ b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-batch", - "version": "0.17.26" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-batch/tests/unit/gapic/batch_v1/test_batch_service.py b/packages/google-cloud-batch/tests/unit/gapic/batch_v1/test_batch_service.py index 3a3cc5d39a91..d123a2803997 100644 --- a/packages/google-cloud-batch/tests/unit/gapic/batch_v1/test_batch_service.py +++ b/packages/google-cloud-batch/tests/unit/gapic/batch_v1/test_batch_service.py @@ -3867,6 +3867,7 @@ def test_create_job_rest(request_type): "instance_template": "instance_template_value", "install_gpu_drivers": True, "install_ops_agent": True, + "block_project_ssh_keys": True, } ], "service_account": { From 0321915e31c12f24e96b778b5b3814507ff547d6 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 3 Sep 2024 12:37:18 -0400 Subject: [PATCH 043/108] feat: [google-cloud-visionai] request client libraries for new languages (#13036) BEGIN_COMMIT_OVERRIDE feat: request client libraries for new languages feat: add BatchOperationStatus to import metadata docs: A comment for method `ClipAsset` in service `Warehouse` is changed docs: A comment for enum value `IN_PROGRESS` in enum `State` is changed docs: A comment for enum value `SUCCEEDED` in enum `State` is changed docs: A comment for enum value `FAILED` in enum `State` is changed docs: A comment for field `relevance` in message `.google.cloud.visionai.v1.SearchResultItem` is changed END_COMMIT_OVERRIDE - [ ] Regenerate this pull request now. feat: add BatchOperationStatus to import metadata docs: A comment for method `ClipAsset` in service `Warehouse` is changed docs: A comment for enum value `IN_PROGRESS` in enum `State` is changed docs: A comment for enum value `SUCCEEDED` in enum `State` is changed docs: A comment for enum value `FAILED` in enum `State` is changed docs: A comment for field `relevance` in message `.google.cloud.visionai.v1.SearchResultItem` is changed PiperOrigin-RevId: 667729001 Source-Link: https://github.com/googleapis/googleapis/commit/f17f11412a8770c52e7280fa0e66ab17cb2f3af9 Source-Link: https://github.com/googleapis/googleapis-gen/commit/da60873b0b72c5750144c58b761218c8eb7f8cb4 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXZpc2lvbmFpLy5Pd2xCb3QueWFtbCIsImgiOiJkYTYwODczYjBiNzJjNTc1MDE0NGM1OGI3NjEyMThjOGViN2Y4Y2I0In0= --------- Co-authored-by: Owl Bot Co-authored-by: ohmayr --- .../google/cloud/visionai/__init__.py | 2 + .../google/cloud/visionai_v1/__init__.py | 2 + .../services/warehouse/async_client.py | 11 +- .../visionai_v1/services/warehouse/client.py | 11 +- .../services/warehouse/transports/grpc.py | 11 +- .../warehouse/transports/grpc_asyncio.py | 11 +- .../cloud/visionai_v1/types/__init__.py | 2 + .../cloud/visionai_v1/types/warehouse.py | 135 ++++++++++++++++-- .../unit/gapic/visionai_v1/test_warehouse.py | 68 +++++++++ 9 files changed, 225 insertions(+), 28 deletions(-) diff --git a/packages/google-cloud-visionai/google/cloud/visionai/__init__.py b/packages/google-cloud-visionai/google/cloud/visionai/__init__.py index ccaffd1a09fa..1849c4efa2ad 100644 --- a/packages/google-cloud-visionai/google/cloud/visionai/__init__.py +++ b/packages/google-cloud-visionai/google/cloud/visionai/__init__.py @@ -276,6 +276,7 @@ AnnotationValue, Asset, AssetSource, + BatchOperationStatus, BoolValue, CircleArea, ClipAssetRequest, @@ -640,6 +641,7 @@ "AnnotationValue", "Asset", "AssetSource", + "BatchOperationStatus", "BoolValue", "CircleArea", "ClipAssetRequest", diff --git a/packages/google-cloud-visionai/google/cloud/visionai_v1/__init__.py b/packages/google-cloud-visionai/google/cloud/visionai_v1/__init__.py index 82e8e3764093..732d7a958c52 100644 --- a/packages/google-cloud-visionai/google/cloud/visionai_v1/__init__.py +++ b/packages/google-cloud-visionai/google/cloud/visionai_v1/__init__.py @@ -250,6 +250,7 @@ AnnotationValue, Asset, AssetSource, + BatchOperationStatus, BoolValue, CircleArea, ClipAssetRequest, @@ -432,6 +433,7 @@ "AssetSource", "AttributeValue", "AutoscalingMetricSpec", + "BatchOperationStatus", "BatchRunProcessRequest", "BatchRunProcessResponse", "BigQueryConfig", diff --git a/packages/google-cloud-visionai/google/cloud/visionai_v1/services/warehouse/async_client.py b/packages/google-cloud-visionai/google/cloud/visionai_v1/services/warehouse/async_client.py index e33e38726d0e..85335ef56434 100644 --- a/packages/google-cloud-visionai/google/cloud/visionai_v1/services/warehouse/async_client.py +++ b/packages/google-cloud-visionai/google/cloud/visionai_v1/services/warehouse/async_client.py @@ -4057,11 +4057,12 @@ async def clip_asset( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> warehouse.ClipAssetResponse: - r"""Generates clips for downloading. The api takes in a time range, - and generates a clip of the first content available after - start_time and before end_time, which may overflow beyond these - bounds. Returned clips are truncated if the total size of the - clips are larger than 100MB. + r"""Supported by STREAM_VIDEO corpus type. Generates clips for + downloading. The api takes in a time range, and generates a clip + of the first content available after start_time and before + end_time, which may overflow beyond these bounds. Returned clips + are truncated if the total size of the clips are larger than + 100MB. .. code-block:: python diff --git a/packages/google-cloud-visionai/google/cloud/visionai_v1/services/warehouse/client.py b/packages/google-cloud-visionai/google/cloud/visionai_v1/services/warehouse/client.py index a7b0f4fa4c78..a8da521a4414 100644 --- a/packages/google-cloud-visionai/google/cloud/visionai_v1/services/warehouse/client.py +++ b/packages/google-cloud-visionai/google/cloud/visionai_v1/services/warehouse/client.py @@ -4562,11 +4562,12 @@ def clip_asset( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> warehouse.ClipAssetResponse: - r"""Generates clips for downloading. The api takes in a time range, - and generates a clip of the first content available after - start_time and before end_time, which may overflow beyond these - bounds. Returned clips are truncated if the total size of the - clips are larger than 100MB. + r"""Supported by STREAM_VIDEO corpus type. Generates clips for + downloading. The api takes in a time range, and generates a clip + of the first content available after start_time and before + end_time, which may overflow beyond these bounds. Returned clips + are truncated if the total size of the clips are larger than + 100MB. .. code-block:: python diff --git a/packages/google-cloud-visionai/google/cloud/visionai_v1/services/warehouse/transports/grpc.py b/packages/google-cloud-visionai/google/cloud/visionai_v1/services/warehouse/transports/grpc.py index df1b049eebc6..372e53e0beca 100644 --- a/packages/google-cloud-visionai/google/cloud/visionai_v1/services/warehouse/transports/grpc.py +++ b/packages/google-cloud-visionai/google/cloud/visionai_v1/services/warehouse/transports/grpc.py @@ -1142,11 +1142,12 @@ def clip_asset( ) -> Callable[[warehouse.ClipAssetRequest], warehouse.ClipAssetResponse]: r"""Return a callable for the clip asset method over gRPC. - Generates clips for downloading. The api takes in a time range, - and generates a clip of the first content available after - start_time and before end_time, which may overflow beyond these - bounds. Returned clips are truncated if the total size of the - clips are larger than 100MB. + Supported by STREAM_VIDEO corpus type. Generates clips for + downloading. The api takes in a time range, and generates a clip + of the first content available after start_time and before + end_time, which may overflow beyond these bounds. Returned clips + are truncated if the total size of the clips are larger than + 100MB. Returns: Callable[[~.ClipAssetRequest], diff --git a/packages/google-cloud-visionai/google/cloud/visionai_v1/services/warehouse/transports/grpc_asyncio.py b/packages/google-cloud-visionai/google/cloud/visionai_v1/services/warehouse/transports/grpc_asyncio.py index d7eef7e7e00e..dccfe2920d8b 100644 --- a/packages/google-cloud-visionai/google/cloud/visionai_v1/services/warehouse/transports/grpc_asyncio.py +++ b/packages/google-cloud-visionai/google/cloud/visionai_v1/services/warehouse/transports/grpc_asyncio.py @@ -1172,11 +1172,12 @@ def clip_asset( ) -> Callable[[warehouse.ClipAssetRequest], Awaitable[warehouse.ClipAssetResponse]]: r"""Return a callable for the clip asset method over gRPC. - Generates clips for downloading. The api takes in a time range, - and generates a clip of the first content available after - start_time and before end_time, which may overflow beyond these - bounds. Returned clips are truncated if the total size of the - clips are larger than 100MB. + Supported by STREAM_VIDEO corpus type. Generates clips for + downloading. The api takes in a time range, and generates a clip + of the first content available after start_time and before + end_time, which may overflow beyond these bounds. Returned clips + are truncated if the total size of the clips are larger than + 100MB. Returns: Callable[[~.ClipAssetRequest], diff --git a/packages/google-cloud-visionai/google/cloud/visionai_v1/types/__init__.py b/packages/google-cloud-visionai/google/cloud/visionai_v1/types/__init__.py index df36023a838c..08ebd9fab538 100644 --- a/packages/google-cloud-visionai/google/cloud/visionai_v1/types/__init__.py +++ b/packages/google-cloud-visionai/google/cloud/visionai_v1/types/__init__.py @@ -230,6 +230,7 @@ AnnotationValue, Asset, AssetSource, + BatchOperationStatus, BoolValue, CircleArea, ClipAssetRequest, @@ -582,6 +583,7 @@ "AnnotationValue", "Asset", "AssetSource", + "BatchOperationStatus", "BoolValue", "CircleArea", "ClipAssetRequest", diff --git a/packages/google-cloud-visionai/google/cloud/visionai_v1/types/warehouse.py b/packages/google-cloud-visionai/google/cloud/visionai_v1/types/warehouse.py index 92451e6b9b0e..a236ffe33e3e 100644 --- a/packages/google-cloud-visionai/google/cloud/visionai_v1/types/warehouse.py +++ b/packages/google-cloud-visionai/google/cloud/visionai_v1/types/warehouse.py @@ -120,6 +120,7 @@ "DeleteAnnotationRequest", "ImportAssetsRequest", "ImportAssetsMetadata", + "BatchOperationStatus", "ImportAssetsResponse", "CreateSearchConfigRequest", "UpdateSearchConfigRequest", @@ -607,11 +608,11 @@ class State(proto.Enum): The default process state should never happen. IN_PROGRESS (1): - The feature is in progress. + The ml model analysis is in progress. SUCCEEDED (2): - The process is successfully done. + The ml model analysis is successfully done. FAILED (3): - The process failed. + The ml model analysis failed. """ STATE_UNSPECIFIED = 0 IN_PROGRESS = 1 @@ -1629,6 +1630,22 @@ class Index(proto.Message): Index of VIDEO_ON_DEMAND corpus can have at most one deployed index. Index of IMAGE corpus can have multiple deployed indexes. + satisfies_pzs (bool): + Output only. This boolean field is only set + for projects that have Physical Zone Separation + enabled via an Org Policy constraint. It is set + to true when the index is a valid zone separated + index and false if it isn't. + + This field is a member of `oneof`_ ``_satisfies_pzs``. + satisfies_pzi (bool): + Output only. This boolean field is only set + for projects that have Physical Zone Isolation + enabled via an Org Policy constraint. It is set + to true when the index is a valid zone isolated + index and false if it isn't. + + This field is a member of `oneof`_ ``_satisfies_pzi``. """ class State(proto.Enum): @@ -1687,6 +1704,16 @@ class State(proto.Enum): number=8, message="DeployedIndexReference", ) + satisfies_pzs: bool = proto.Field( + proto.BOOL, + number=11, + optional=True, + ) + satisfies_pzi: bool = proto.Field( + proto.BOOL, + number=12, + optional=True, + ) class DeployedIndexReference(proto.Message): @@ -1709,6 +1736,9 @@ class Corpus(proto.Message): Within a corpus, media shares the same data schema. Search is also restricted within a single corpus. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: name (str): Resource name of the corpus. Format: @@ -1729,6 +1759,22 @@ class Corpus(proto.Message): search_capability_setting (google.cloud.visionai_v1.types.SearchCapabilitySetting): Default search capability setting on corpus level. + satisfies_pzs (bool): + Output only. This boolean field is only set + for projects that have Physical Zone Separation + enabled via an Org Policy constraint. It is set + to true when the corpus is a valid zone + separated corpus and false if it isn't. + + This field is a member of `oneof`_ ``_satisfies_pzs``. + satisfies_pzi (bool): + Output only. This boolean field is only set + for projects that have Physical Zone Isolation + enabled via an Org Policy constraint. It is set + to true when the corpus is a valid zone isolated + corpus and false if it isn't. + + This field is a member of `oneof`_ ``_satisfies_pzi``. """ class Type(proto.Enum): @@ -1778,6 +1824,16 @@ class Type(proto.Enum): number=8, message="SearchCapabilitySetting", ) + satisfies_pzs: bool = proto.Field( + proto.BOOL, + number=11, + optional=True, + ) + satisfies_pzi: bool = proto.Field( + proto.BOOL, + number=12, + optional=True, + ) class GetCorpusRequest(proto.Message): @@ -2874,6 +2930,11 @@ class ImportAssetsMetadata(proto.Message): Attributes: metadata (google.cloud.visionai_v1.types.OperationMetadata): The metadata of the operation. + status (google.cloud.visionai_v1.types.BatchOperationStatus): + The importing status including partial + failures, if the implementation can provide such + information during the progress of the + ImportAssets. """ metadata: common.OperationMetadata = proto.Field( @@ -2881,6 +2942,33 @@ class ImportAssetsMetadata(proto.Message): number=1, message=common.OperationMetadata, ) + status: "BatchOperationStatus" = proto.Field( + proto.MESSAGE, + number=2, + message="BatchOperationStatus", + ) + + +class BatchOperationStatus(proto.Message): + r"""The batch operation status. + + Attributes: + success_count (int): + The count of assets (together with their + annotations if any) successfully ingested. + failure_count (int): + The count of assets failed to ingested; it + might be due to the annotation ingestion error. + """ + + success_count: int = proto.Field( + proto.INT32, + number=1, + ) + failure_count: int = proto.Field( + proto.INT32, + number=2, + ) class ImportAssetsResponse(proto.Message): @@ -3084,6 +3172,9 @@ class IndexEndpoint(proto.Message): r"""Message representing IndexEndpoint resource. Indexes are deployed into it. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: name (str): Output only. Resource name of the IndexEndpoint. Format: @@ -3125,6 +3216,22 @@ class IndexEndpoint(proto.Message): Output only. Create timestamp. update_time (google.protobuf.timestamp_pb2.Timestamp): Output only. Update timestamp. + satisfies_pzs (bool): + Output only. This boolean field is only set + for projects that have Physical Zone Separation + enabled via an Org Policy constraint. It is set + to true when the index endpoint is a valid zone + separated index endpoint and false if it isn't. + + This field is a member of `oneof`_ ``_satisfies_pzs``. + satisfies_pzi (bool): + Output only. This boolean field is only set + for projects that have Physical Zone Isolation + enabled via an Org Policy constraint. It is set + to true when the index endpoint is a valid zone + isolated index endpoint and false if it isn't. + + This field is a member of `oneof`_ ``_satisfies_pzi``. """ class State(proto.Enum): @@ -3185,6 +3292,16 @@ class State(proto.Enum): number=8, message=timestamp_pb2.Timestamp, ) + satisfies_pzs: bool = proto.Field( + proto.BOOL, + number=10, + optional=True, + ) + satisfies_pzi: bool = proto.Field( + proto.BOOL, + number=11, + optional=True, + ) class CreateIndexEndpointRequest(proto.Message): @@ -4639,11 +4756,13 @@ class SearchResultItem(proto.Message): segment (google.cloud.visionai_v1.types.Partition.TemporalPartition): The matched asset segment. relevance (float): - Relevance of this ``SearchResultItem`` to user search - request. Currently available only in Image Warehouse, and by - default represents cosine similarity. In the future can be - other measures such as "dot product" or "topicality" - requested in the search request. + Available to IMAGE corpus types. Relevance of this + ``SearchResultItem`` to user search query (text query or + image query). By default this represents cosine similarity + between the query and the retrieved media content. The value + is in the range of [-1, 1]. Note that search ranking is not + only decided by this relevance score, but also other factors + such as the match of annotations. requested_annotations (MutableSequence[google.cloud.visionai_v1.types.Annotation]): Search result annotations specified by result_annotation_keys in search request. diff --git a/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1/test_warehouse.py b/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1/test_warehouse.py index 0b6e0f0a95cc..4cfebb38f721 100644 --- a/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1/test_warehouse.py +++ b/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1/test_warehouse.py @@ -5976,6 +5976,8 @@ def test_get_index(request_type, transport: str = "grpc"): display_name="display_name_value", description="description_value", state=warehouse.Index.State.CREATING, + satisfies_pzs=True, + satisfies_pzi=True, entire_corpus=True, ) response = client.get_index(request) @@ -5992,6 +5994,8 @@ def test_get_index(request_type, transport: str = "grpc"): assert response.display_name == "display_name_value" assert response.description == "description_value" assert response.state == warehouse.Index.State.CREATING + assert response.satisfies_pzs is True + assert response.satisfies_pzi is True def test_get_index_empty_call(): @@ -6094,6 +6098,8 @@ async def test_get_index_empty_call_async(): display_name="display_name_value", description="description_value", state=warehouse.Index.State.CREATING, + satisfies_pzs=True, + satisfies_pzi=True, ) ) response = await client.get_index() @@ -6164,6 +6170,8 @@ async def test_get_index_async( display_name="display_name_value", description="description_value", state=warehouse.Index.State.CREATING, + satisfies_pzs=True, + satisfies_pzi=True, ) ) response = await client.get_index(request) @@ -6180,6 +6188,8 @@ async def test_get_index_async( assert response.display_name == "display_name_value" assert response.description == "description_value" assert response.state == warehouse.Index.State.CREATING + assert response.satisfies_pzs is True + assert response.satisfies_pzi is True @pytest.mark.asyncio @@ -7656,6 +7666,8 @@ def test_get_corpus(request_type, transport: str = "grpc"): display_name="display_name_value", description="description_value", type_=warehouse.Corpus.Type.STREAM_VIDEO, + satisfies_pzs=True, + satisfies_pzi=True, ) response = client.get_corpus(request) @@ -7671,6 +7683,8 @@ def test_get_corpus(request_type, transport: str = "grpc"): assert response.display_name == "display_name_value" assert response.description == "description_value" assert response.type_ == warehouse.Corpus.Type.STREAM_VIDEO + assert response.satisfies_pzs is True + assert response.satisfies_pzi is True def test_get_corpus_empty_call(): @@ -7773,6 +7787,8 @@ async def test_get_corpus_empty_call_async(): display_name="display_name_value", description="description_value", type_=warehouse.Corpus.Type.STREAM_VIDEO, + satisfies_pzs=True, + satisfies_pzi=True, ) ) response = await client.get_corpus() @@ -7843,6 +7859,8 @@ async def test_get_corpus_async( display_name="display_name_value", description="description_value", type_=warehouse.Corpus.Type.STREAM_VIDEO, + satisfies_pzs=True, + satisfies_pzi=True, ) ) response = await client.get_corpus(request) @@ -7859,6 +7877,8 @@ async def test_get_corpus_async( assert response.display_name == "display_name_value" assert response.description == "description_value" assert response.type_ == warehouse.Corpus.Type.STREAM_VIDEO + assert response.satisfies_pzs is True + assert response.satisfies_pzi is True @pytest.mark.asyncio @@ -8030,6 +8050,8 @@ def test_update_corpus(request_type, transport: str = "grpc"): display_name="display_name_value", description="description_value", type_=warehouse.Corpus.Type.STREAM_VIDEO, + satisfies_pzs=True, + satisfies_pzi=True, ) response = client.update_corpus(request) @@ -8045,6 +8067,8 @@ def test_update_corpus(request_type, transport: str = "grpc"): assert response.display_name == "display_name_value" assert response.description == "description_value" assert response.type_ == warehouse.Corpus.Type.STREAM_VIDEO + assert response.satisfies_pzs is True + assert response.satisfies_pzi is True def test_update_corpus_empty_call(): @@ -8143,6 +8167,8 @@ async def test_update_corpus_empty_call_async(): display_name="display_name_value", description="description_value", type_=warehouse.Corpus.Type.STREAM_VIDEO, + satisfies_pzs=True, + satisfies_pzi=True, ) ) response = await client.update_corpus() @@ -8215,6 +8241,8 @@ async def test_update_corpus_async( display_name="display_name_value", description="description_value", type_=warehouse.Corpus.Type.STREAM_VIDEO, + satisfies_pzs=True, + satisfies_pzi=True, ) ) response = await client.update_corpus(request) @@ -8231,6 +8259,8 @@ async def test_update_corpus_async( assert response.display_name == "display_name_value" assert response.description == "description_value" assert response.type_ == warehouse.Corpus.Type.STREAM_VIDEO + assert response.satisfies_pzs is True + assert response.satisfies_pzi is True @pytest.mark.asyncio @@ -20567,6 +20597,8 @@ def test_get_index_endpoint(request_type, transport: str = "grpc"): display_name="display_name_value", description="description_value", state=warehouse.IndexEndpoint.State.CREATING, + satisfies_pzs=True, + satisfies_pzi=True, ) response = client.get_index_endpoint(request) @@ -20582,6 +20614,8 @@ def test_get_index_endpoint(request_type, transport: str = "grpc"): assert response.display_name == "display_name_value" assert response.description == "description_value" assert response.state == warehouse.IndexEndpoint.State.CREATING + assert response.satisfies_pzs is True + assert response.satisfies_pzi is True def test_get_index_endpoint_empty_call(): @@ -20694,6 +20728,8 @@ async def test_get_index_endpoint_empty_call_async(): display_name="display_name_value", description="description_value", state=warehouse.IndexEndpoint.State.CREATING, + satisfies_pzs=True, + satisfies_pzi=True, ) ) response = await client.get_index_endpoint() @@ -20768,6 +20804,8 @@ async def test_get_index_endpoint_async( display_name="display_name_value", description="description_value", state=warehouse.IndexEndpoint.State.CREATING, + satisfies_pzs=True, + satisfies_pzi=True, ) ) response = await client.get_index_endpoint(request) @@ -20784,6 +20822,8 @@ async def test_get_index_endpoint_async( assert response.display_name == "display_name_value" assert response.description == "description_value" assert response.state == warehouse.IndexEndpoint.State.CREATING + assert response.satisfies_pzs is True + assert response.satisfies_pzi is True @pytest.mark.asyncio @@ -29750,6 +29790,8 @@ def test_create_index_rest(request_type): "create_time": {"seconds": 751, "nanos": 543}, "update_time": {}, "deployed_indexes": [{"index_endpoint": "index_endpoint_value"}], + "satisfies_pzs": True, + "satisfies_pzi": True, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -30141,6 +30183,8 @@ def test_update_index_rest(request_type): "create_time": {"seconds": 751, "nanos": 543}, "update_time": {}, "deployed_indexes": [{"index_endpoint": "index_endpoint_value"}], + "satisfies_pzs": True, + "satisfies_pzi": True, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -30530,6 +30574,8 @@ def test_get_index_rest(request_type): display_name="display_name_value", description="description_value", state=warehouse.Index.State.CREATING, + satisfies_pzs=True, + satisfies_pzi=True, entire_corpus=True, ) @@ -30550,6 +30596,8 @@ def test_get_index_rest(request_type): assert response.display_name == "display_name_value" assert response.description == "description_value" assert response.state == warehouse.Index.State.CREATING + assert response.satisfies_pzs is True + assert response.satisfies_pzi is True def test_get_index_rest_use_cached_wrapped_rpc(): @@ -31503,6 +31551,8 @@ def test_create_corpus_rest(request_type): "default_ttl": {"seconds": 751, "nanos": 543}, "type_": 1, "search_capability_setting": {"search_capabilities": [{"type_": 1}]}, + "satisfies_pzs": True, + "satisfies_pzi": True, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -31884,6 +31934,8 @@ def test_get_corpus_rest(request_type): display_name="display_name_value", description="description_value", type_=warehouse.Corpus.Type.STREAM_VIDEO, + satisfies_pzs=True, + satisfies_pzi=True, ) # Wrap the value into a proper Response obj @@ -31903,6 +31955,8 @@ def test_get_corpus_rest(request_type): assert response.display_name == "display_name_value" assert response.description == "description_value" assert response.type_ == warehouse.Corpus.Type.STREAM_VIDEO + assert response.satisfies_pzs is True + assert response.satisfies_pzi is True def test_get_corpus_rest_use_cached_wrapped_rpc(): @@ -32183,6 +32237,8 @@ def test_update_corpus_rest(request_type): "default_ttl": {"seconds": 751, "nanos": 543}, "type_": 1, "search_capability_setting": {"search_capabilities": [{"type_": 1}]}, + "satisfies_pzs": True, + "satisfies_pzi": True, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -32261,6 +32317,8 @@ def get_message_fields(field): display_name="display_name_value", description="description_value", type_=warehouse.Corpus.Type.STREAM_VIDEO, + satisfies_pzs=True, + satisfies_pzi=True, ) # Wrap the value into a proper Response obj @@ -32280,6 +32338,8 @@ def get_message_fields(field): assert response.display_name == "display_name_value" assert response.description == "description_value" assert response.type_ == warehouse.Corpus.Type.STREAM_VIDEO + assert response.satisfies_pzs is True + assert response.satisfies_pzi is True def test_update_corpus_rest_use_cached_wrapped_rpc(): @@ -41934,6 +41994,8 @@ def test_create_index_endpoint_rest(request_type): "labels": {}, "create_time": {"seconds": 751, "nanos": 543}, "update_time": {}, + "satisfies_pzs": True, + "satisfies_pzi": True, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -42329,6 +42391,8 @@ def test_get_index_endpoint_rest(request_type): display_name="display_name_value", description="description_value", state=warehouse.IndexEndpoint.State.CREATING, + satisfies_pzs=True, + satisfies_pzi=True, ) # Wrap the value into a proper Response obj @@ -42348,6 +42412,8 @@ def test_get_index_endpoint_rest(request_type): assert response.display_name == "display_name_value" assert response.description == "description_value" assert response.state == warehouse.IndexEndpoint.State.CREATING + assert response.satisfies_pzs is True + assert response.satisfies_pzi is True def test_get_index_endpoint_rest_use_cached_wrapped_rpc(): @@ -43024,6 +43090,8 @@ def test_update_index_endpoint_rest(request_type): "labels": {}, "create_time": {"seconds": 751, "nanos": 543}, "update_time": {}, + "satisfies_pzs": True, + "satisfies_pzi": True, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency From fa7a0925df5f2d65b11fdd3b0f38a0ba9d95db70 Mon Sep 17 00:00:00 2001 From: yoshi-code-bot <70984784+yoshi-code-bot@users.noreply.github.com> Date: Tue, 3 Sep 2024 09:43:47 -0700 Subject: [PATCH 044/108] chore: Update the root changelog (#13037) Update the root changelog Co-authored-by: ohmayr --- CHANGELOG.md | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2696012f56a1..9f4e47fb21b5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -30,7 +30,7 @@ Changelogs - [google-cloud-automl==2.13.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-automl/CHANGELOG.md) - [google-cloud-backupdr==0.1.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-backupdr/CHANGELOG.md) - [google-cloud-bare-metal-solution==1.7.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bare-metal-solution/CHANGELOG.md) -- [google-cloud-batch==0.17.25](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-batch/CHANGELOG.md) +- [google-cloud-batch==0.17.26](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-batch/CHANGELOG.md) - [google-cloud-beyondcorp-appconnections==0.4.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-appconnections/CHANGELOG.md) - [google-cloud-beyondcorp-appconnectors==0.4.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-appconnectors/CHANGELOG.md) - [google-cloud-beyondcorp-appgateways==0.4.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-appgateways/CHANGELOG.md) @@ -41,7 +41,7 @@ Changelogs - [google-cloud-bigquery-connection==1.15.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-connection/CHANGELOG.md) - [google-cloud-bigquery-data-exchange==0.5.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-data-exchange/CHANGELOG.md) - [google-cloud-bigquery-datapolicies==0.6.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-datapolicies/CHANGELOG.md) -- [google-cloud-bigquery-datatransfer==3.15.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-datatransfer/CHANGELOG.md) +- [google-cloud-bigquery-datatransfer==3.15.6](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-datatransfer/CHANGELOG.md) - [google-cloud-bigquery-logging==1.4.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-logging/CHANGELOG.md) - [google-cloud-bigquery-migration==0.11.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-migration/CHANGELOG.md) - [google-cloud-bigquery-reservation==1.13.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-reservation/CHANGELOG.md) @@ -92,7 +92,7 @@ Changelogs - [google-cloud-functions==1.17.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-functions/CHANGELOG.md) - [google-cloud-gdchardwaremanagement==0.1.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gdchardwaremanagement/CHANGELOG.md) - [google-cloud-gke-backup==0.5.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-backup/CHANGELOG.md) -- [google-cloud-gke-connect-gateway==0.8.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-connect-gateway/CHANGELOG.md) +- [google-cloud-gke-connect-gateway==0.9.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-connect-gateway/CHANGELOG.md) - [google-cloud-gke-hub==1.14.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-hub/CHANGELOG.md) - [google-cloud-gke-multicloud==0.6.12](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-multicloud/CHANGELOG.md) - [google-cloud-gsuiteaddons==0.3.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gsuiteaddons/CHANGELOG.md) @@ -132,7 +132,7 @@ Changelogs - [google-cloud-privilegedaccessmanager==0.1.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-privilegedaccessmanager/CHANGELOG.md) - [google-cloud-public-ca==0.3.12](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-public-ca/CHANGELOG.md) - [google-cloud-rapidmigrationassessment==0.1.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-rapidmigrationassessment/CHANGELOG.md) -- [google-cloud-recaptcha-enterprise==1.21.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-recaptcha-enterprise/CHANGELOG.md) +- [google-cloud-recaptcha-enterprise==1.22.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-recaptcha-enterprise/CHANGELOG.md) - [google-cloud-recommendations-ai==0.10.12](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-recommendations-ai/CHANGELOG.md) - [google-cloud-recommender==2.15.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-recommender/CHANGELOG.md) - [google-cloud-redis-cluster==0.1.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-redis-cluster/CHANGELOG.md) @@ -161,7 +161,7 @@ Changelogs - [google-cloud-talent==2.13.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-talent/CHANGELOG.md) - [google-cloud-tasks==2.16.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-tasks/CHANGELOG.md) - [google-cloud-telcoautomation==0.2.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-telcoautomation/CHANGELOG.md) -- [google-cloud-texttospeech==2.17.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-texttospeech/CHANGELOG.md) +- [google-cloud-texttospeech==2.17.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-texttospeech/CHANGELOG.md) - [google-cloud-tpu==1.18.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-tpu/CHANGELOG.md) - [google-cloud-trace==1.13.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-trace/CHANGELOG.md) - [google-cloud-translate==3.16.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-translate/CHANGELOG.md) @@ -187,7 +187,7 @@ Changelogs - [google-maps-routeoptimization==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-routeoptimization/CHANGELOG.md) - [google-maps-routing==0.6.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-routing/CHANGELOG.md) - [google-maps-solar==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-solar/CHANGELOG.md) -- [google-shopping-css==0.1.7](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-css/CHANGELOG.md) +- [google-shopping-css==0.1.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-css/CHANGELOG.md) - [google-shopping-merchant-accounts==0.1.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-accounts/CHANGELOG.md) - [google-shopping-merchant-conversions==0.1.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-conversions/CHANGELOG.md) - [google-shopping-merchant-datasources==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-datasources/CHANGELOG.md) From c1252cae2ec146b2b362c1f117d8628de69b6671 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 3 Sep 2024 16:46:00 -0400 Subject: [PATCH 045/108] chore: release main (#13047) :robot: I have created a release *beep* *boop* ---
google-cloud-alloydb: 0.3.13 ## [0.3.13](https://github.com/googleapis/google-cloud-python/compare/google-cloud-alloydb-v0.3.12...google-cloud-alloydb-v0.3.13) (2024-09-03) ### Features * support for enabling outbound public IP on an instance ([4f468fa](https://github.com/googleapis/google-cloud-python/commit/4f468fa598c51426ef31ef878f9c3b61f79802f9)) * support for getting maintenance schedule of a cluster ([4f468fa](https://github.com/googleapis/google-cloud-python/commit/4f468fa598c51426ef31ef878f9c3b61f79802f9)) * support for getting outbound public IP addresses of an instance ([4f468fa](https://github.com/googleapis/google-cloud-python/commit/4f468fa598c51426ef31ef878f9c3b61f79802f9)) * support for setting maintenance update policy on a cluster ([4f468fa](https://github.com/googleapis/google-cloud-python/commit/4f468fa598c51426ef31ef878f9c3b61f79802f9))
google-cloud-batch: 0.17.27 ## [0.17.27](https://github.com/googleapis/google-cloud-python/compare/google-cloud-batch-v0.17.26...google-cloud-batch-v0.17.27) (2024-09-03) ### Features * **v1:** promote block_project_ssh_keys support to batch v1 API ([63a6de0](https://github.com/googleapis/google-cloud-python/commit/63a6de00b1c6e2b6289b4fa76468859c828cb363))
google-cloud-container: 2.51.0 ## [2.51.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-container-v2.50.0...google-cloud-container-v2.51.0) (2024-09-03) ### Features * add `EXTENDED` enum value for `ReleaseChannel.Channel` ([ea71725](https://github.com/googleapis/google-cloud-python/commit/ea71725d3fe3bde0afd775d20127bed958e8eb8e)) * add ReleaseChannel EXTENDED value ([ea71725](https://github.com/googleapis/google-cloud-python/commit/ea71725d3fe3bde0afd775d20127bed958e8eb8e))
google-cloud-dataproc: 5.11.0 ## [5.11.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dataproc-v5.10.2...google-cloud-dataproc-v5.11.0) (2024-09-03) ### Features * add optional parameters (tarball-access) in DiagnoseClusterRequest ([127e5c0](https://github.com/googleapis/google-cloud-python/commit/127e5c097b08042989c124ac4cdfb5147181855d))
google-cloud-edgenetwork: 0.1.11 ## [0.1.11](https://github.com/googleapis/google-cloud-python/compare/google-cloud-edgenetwork-v0.1.10...google-cloud-edgenetwork-v0.1.11) (2024-09-03) ### Documentation * swap comments on `BONDED` and `NON_BONDED` enums in `BondingType` ([308de6b](https://github.com/googleapis/google-cloud-python/commit/308de6b266e24a8996875736b66485d92f299401))
google-cloud-recaptcha-enterprise: 1.22.1 ## [1.22.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-recaptcha-enterprise-v1.22.0...google-cloud-recaptcha-enterprise-v1.22.1) (2024-09-03) ### Documentation * minor doc fixes ([7e40c8c](https://github.com/googleapis/google-cloud-python/commit/7e40c8c40039e0c01ef9a8bfea8804edfee48d70))
google-cloud-service-management: 1.9.0 ## [1.9.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-service-management-v1.8.5...google-cloud-service-management-v1.9.0) (2024-09-03) ### Features * Support local binding for variables with keyword name collision ([c54700d](https://github.com/googleapis/google-cloud-python/commit/c54700d3e11e59eb5fae01fda25dbf3a9acbe382))
google-cloud-visionai: 0.1.3 ## [0.1.3](https://github.com/googleapis/google-cloud-python/compare/google-cloud-visionai-v0.1.2...google-cloud-visionai-v0.1.3) (2024-09-03) ### Features * add BatchOperationStatus to import metadata ([0321915](https://github.com/googleapis/google-cloud-python/commit/0321915e31c12f24e96b778b5b3814507ff547d6)) * request client libraries for new languages ([0321915](https://github.com/googleapis/google-cloud-python/commit/0321915e31c12f24e96b778b5b3814507ff547d6)) ### Documentation * A comment for enum value `FAILED` in enum `State` is changed ([0321915](https://github.com/googleapis/google-cloud-python/commit/0321915e31c12f24e96b778b5b3814507ff547d6)) * A comment for enum value `IN_PROGRESS` in enum `State` is changed ([0321915](https://github.com/googleapis/google-cloud-python/commit/0321915e31c12f24e96b778b5b3814507ff547d6)) * A comment for enum value `SUCCEEDED` in enum `State` is changed ([0321915](https://github.com/googleapis/google-cloud-python/commit/0321915e31c12f24e96b778b5b3814507ff547d6)) * A comment for field `relevance` in message `.google.cloud.visionai.v1.SearchResultItem` is changed ([0321915](https://github.com/googleapis/google-cloud-python/commit/0321915e31c12f24e96b778b5b3814507ff547d6)) * A comment for method `ClipAsset` in service `Warehouse` is changed ([0321915](https://github.com/googleapis/google-cloud-python/commit/0321915e31c12f24e96b778b5b3814507ff547d6))
--- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). --------- Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> Co-authored-by: ohmayr --- .release-please-manifest.json | 16 ++++++++-------- packages/google-cloud-alloydb/CHANGELOG.md | 10 ++++++++++ .../google/cloud/alloydb/gapic_version.py | 2 +- .../google/cloud/alloydb_v1/gapic_version.py | 2 +- .../google/cloud/alloydb_v1/types/resources.py | 2 +- .../cloud/alloydb_v1alpha/gapic_version.py | 2 +- .../cloud/alloydb_v1alpha/types/resources.py | 2 +- .../cloud/alloydb_v1beta/gapic_version.py | 2 +- .../cloud/alloydb_v1beta/types/resources.py | 2 +- ...nippet_metadata_google.cloud.alloydb.v1.json | 2 +- ...t_metadata_google.cloud.alloydb.v1alpha.json | 2 +- ...et_metadata_google.cloud.alloydb.v1beta.json | 2 +- packages/google-cloud-batch/CHANGELOG.md | 7 +++++++ .../google/cloud/batch/gapic_version.py | 2 +- .../google/cloud/batch_v1/gapic_version.py | 2 +- .../google/cloud/batch_v1alpha/gapic_version.py | 2 +- .../snippet_metadata_google.cloud.batch.v1.json | 2 +- ...pet_metadata_google.cloud.batch.v1alpha.json | 2 +- packages/google-cloud-container/CHANGELOG.md | 8 ++++++++ .../google/cloud/container/gapic_version.py | 2 +- .../google/cloud/container_v1/gapic_version.py | 2 +- .../cloud/container_v1beta1/gapic_version.py | 2 +- .../snippet_metadata_google.container.v1.json | 2 +- ...ippet_metadata_google.container.v1beta1.json | 2 +- packages/google-cloud-dataproc/CHANGELOG.md | 7 +++++++ .../google/cloud/dataproc/gapic_version.py | 2 +- .../google/cloud/dataproc_v1/gapic_version.py | 2 +- ...ippet_metadata_google.cloud.dataproc.v1.json | 2 +- packages/google-cloud-edgenetwork/CHANGELOG.md | 7 +++++++ .../google/cloud/edgenetwork/gapic_version.py | 2 +- .../cloud/edgenetwork_v1/gapic_version.py | 2 +- ...et_metadata_google.cloud.edgenetwork.v1.json | 2 +- .../CHANGELOG.md | 7 +++++++ .../cloud/recaptchaenterprise/gapic_version.py | 2 +- .../recaptchaenterprise_v1/gapic_version.py | 2 +- ...ata_google.cloud.recaptchaenterprise.v1.json | 2 +- .../CHANGELOG.md | 7 +++++++ .../cloud/servicemanagement/gapic_version.py | 2 +- .../cloud/servicemanagement_v1/gapic_version.py | 2 +- ...etadata_google.api.servicemanagement.v1.json | 2 +- packages/google-cloud-visionai/CHANGELOG.md | 17 +++++++++++++++++ .../google/cloud/visionai/gapic_version.py | 2 +- .../google/cloud/visionai_v1/gapic_version.py | 2 +- .../cloud/visionai_v1alpha1/gapic_version.py | 2 +- .../cloud/visionai_v1alpha1/types/platform.py | 3 ++- ...ippet_metadata_google.cloud.visionai.v1.json | 2 +- ...metadata_google.cloud.visionai.v1alpha1.json | 2 +- 47 files changed, 117 insertions(+), 46 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 0d0833f003e9..be5aedaf8867 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -11,7 +11,7 @@ "packages/google-area120-tables": "0.11.11", "packages/google-cloud-access-approval": "1.13.5", "packages/google-cloud-advisorynotifications": "0.3.10", - "packages/google-cloud-alloydb": "0.3.12", + "packages/google-cloud-alloydb": "0.3.13", "packages/google-cloud-alloydb-connectors": "0.1.6", "packages/google-cloud-api-gateway": "1.9.5", "packages/google-cloud-api-keys": "0.5.11", @@ -27,7 +27,7 @@ "packages/google-cloud-automl": "2.13.5", "packages/google-cloud-backupdr": "0.1.3", "packages/google-cloud-bare-metal-solution": "1.7.5", - "packages/google-cloud-batch": "0.17.26", + "packages/google-cloud-batch": "0.17.27", "packages/google-cloud-beyondcorp-appconnections": "0.4.11", "packages/google-cloud-beyondcorp-appconnectors": "0.4.11", "packages/google-cloud-beyondcorp-appgateways": "0.4.11", @@ -56,7 +56,7 @@ "packages/google-cloud-confidentialcomputing": "0.4.11", "packages/google-cloud-config": "0.1.11", "packages/google-cloud-contact-center-insights": "1.17.5", - "packages/google-cloud-container": "2.50.0", + "packages/google-cloud-container": "2.51.0", "packages/google-cloud-containeranalysis": "2.14.5", "packages/google-cloud-contentwarehouse": "0.7.9", "packages/google-cloud-data-fusion": "1.10.5", @@ -67,7 +67,7 @@ "packages/google-cloud-dataform": "0.5.11", "packages/google-cloud-datalabeling": "1.10.5", "packages/google-cloud-dataplex": "2.2.2", - "packages/google-cloud-dataproc": "5.10.2", + "packages/google-cloud-dataproc": "5.11.0", "packages/google-cloud-dataproc-metastore": "1.15.5", "packages/google-cloud-datastream": "1.9.5", "packages/google-cloud-deploy": "2.0.1", @@ -80,7 +80,7 @@ "packages/google-cloud-documentai": "2.31.0", "packages/google-cloud-domains": "1.7.5", "packages/google-cloud-edgecontainer": "0.5.11", - "packages/google-cloud-edgenetwork": "0.1.10", + "packages/google-cloud-edgenetwork": "0.1.11", "packages/google-cloud-enterpriseknowledgegraph": "0.3.11", "packages/google-cloud-essential-contacts": "1.7.5", "packages/google-cloud-eventarc": "1.11.5", @@ -129,7 +129,7 @@ "packages/google-cloud-privilegedaccessmanager": "0.1.1", "packages/google-cloud-public-ca": "0.3.12", "packages/google-cloud-rapidmigrationassessment": "0.1.9", - "packages/google-cloud-recaptcha-enterprise": "1.22.0", + "packages/google-cloud-recaptcha-enterprise": "1.22.1", "packages/google-cloud-recommendations-ai": "0.10.12", "packages/google-cloud-recommender": "2.15.5", "packages/google-cloud-redis": "2.15.5", @@ -145,7 +145,7 @@ "packages/google-cloud-securitycentermanagement": "0.1.14", "packages/google-cloud-service-control": "1.12.3", "packages/google-cloud-service-directory": "1.11.6", - "packages/google-cloud-service-management": "1.8.5", + "packages/google-cloud-service-management": "1.9.0", "packages/google-cloud-service-usage": "1.10.5", "packages/google-cloud-servicehealth": "0.1.6", "packages/google-cloud-shell": "1.9.5", @@ -167,7 +167,7 @@ "packages/google-cloud-video-transcoder": "1.12.5", "packages/google-cloud-videointelligence": "2.13.5", "packages/google-cloud-vision": "3.7.4", - "packages/google-cloud-visionai": "0.1.2", + "packages/google-cloud-visionai": "0.1.3", "packages/google-cloud-vm-migration": "1.8.5", "packages/google-cloud-vmwareengine": "1.5.0", "packages/google-cloud-vpc-access": "1.10.5", diff --git a/packages/google-cloud-alloydb/CHANGELOG.md b/packages/google-cloud-alloydb/CHANGELOG.md index c2be3ab22ace..7545a07df323 100644 --- a/packages/google-cloud-alloydb/CHANGELOG.md +++ b/packages/google-cloud-alloydb/CHANGELOG.md @@ -1,5 +1,15 @@ # Changelog +## [0.3.13](https://github.com/googleapis/google-cloud-python/compare/google-cloud-alloydb-v0.3.12...google-cloud-alloydb-v0.3.13) (2024-09-03) + + +### Features + +* support for enabling outbound public IP on an instance ([4f468fa](https://github.com/googleapis/google-cloud-python/commit/4f468fa598c51426ef31ef878f9c3b61f79802f9)) +* support for getting maintenance schedule of a cluster ([4f468fa](https://github.com/googleapis/google-cloud-python/commit/4f468fa598c51426ef31ef878f9c3b61f79802f9)) +* support for getting outbound public IP addresses of an instance ([4f468fa](https://github.com/googleapis/google-cloud-python/commit/4f468fa598c51426ef31ef878f9c3b61f79802f9)) +* support for setting maintenance update policy on a cluster ([4f468fa](https://github.com/googleapis/google-cloud-python/commit/4f468fa598c51426ef31ef878f9c3b61f79802f9)) + ## [0.3.12](https://github.com/googleapis/google-cloud-python/compare/google-cloud-alloydb-v0.3.11...google-cloud-alloydb-v0.3.12) (2024-07-30) diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb/gapic_version.py b/packages/google-cloud-alloydb/google/cloud/alloydb/gapic_version.py index 558c8aab67c5..fb3463bbb3c2 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb/gapic_version.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.3.13" # {x-release-please-version} diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/gapic_version.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/gapic_version.py index 558c8aab67c5..fb3463bbb3c2 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/gapic_version.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.3.13" # {x-release-please-version} diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/types/resources.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/types/resources.py index 235d25432420..eab98a4fe3ff 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/types/resources.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/types/resources.py @@ -1283,7 +1283,7 @@ class QueryInsightsInstanceConfig(proto.Message): query_string_length (int): Query string length. The default value is 1024. Any integer between 256 and 4500 is - considered valid. + considered valid. query_plans_per_minute (int): Number of query execution plans captured by Insights per minute for all queries combined. diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/gapic_version.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/gapic_version.py index 558c8aab67c5..fb3463bbb3c2 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/gapic_version.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.3.13" # {x-release-please-version} diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/resources.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/resources.py index e4af7a8c95de..22d401ff11cc 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/resources.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/resources.py @@ -1344,7 +1344,7 @@ class QueryInsightsInstanceConfig(proto.Message): query_string_length (int): Query string length. The default value is 1024. Any integer between 256 and 4500 is - considered valid. + considered valid. query_plans_per_minute (int): Number of query execution plans captured by Insights per minute for all queries combined. diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/gapic_version.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/gapic_version.py index 558c8aab67c5..fb3463bbb3c2 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/gapic_version.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.3.13" # {x-release-please-version} diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/resources.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/resources.py index 9957f9c6b926..9cf140f311a4 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/resources.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/resources.py @@ -1397,7 +1397,7 @@ class QueryInsightsInstanceConfig(proto.Message): query_string_length (int): Query string length. The default value is 1024. Any integer between 256 and 4500 is - considered valid. + considered valid. query_plans_per_minute (int): Number of query execution plans captured by Insights per minute for all queries combined. diff --git a/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1.json b/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1.json index 1cb197a5b3b4..70a94ca97046 100644 --- a/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1.json +++ b/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-alloydb", - "version": "0.1.0" + "version": "0.3.13" }, "snippets": [ { diff --git a/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1alpha.json b/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1alpha.json index d70e655d280d..1ed9939262fa 100644 --- a/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1alpha.json +++ b/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1alpha.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-alloydb", - "version": "0.1.0" + "version": "0.3.13" }, "snippets": [ { diff --git a/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1beta.json b/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1beta.json index dc19bd4f2a54..8aac8040c177 100644 --- a/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1beta.json +++ b/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-alloydb", - "version": "0.1.0" + "version": "0.3.13" }, "snippets": [ { diff --git a/packages/google-cloud-batch/CHANGELOG.md b/packages/google-cloud-batch/CHANGELOG.md index ef34a533cfa8..addee943a3ea 100644 --- a/packages/google-cloud-batch/CHANGELOG.md +++ b/packages/google-cloud-batch/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.17.27](https://github.com/googleapis/google-cloud-python/compare/google-cloud-batch-v0.17.26...google-cloud-batch-v0.17.27) (2024-09-03) + + +### Features + +* **v1:** promote block_project_ssh_keys support to batch v1 API ([63a6de0](https://github.com/googleapis/google-cloud-python/commit/63a6de00b1c6e2b6289b4fa76468859c828cb363)) + ## [0.17.26](https://github.com/googleapis/google-cloud-python/compare/google-cloud-batch-v0.17.25...google-cloud-batch-v0.17.26) (2024-08-20) diff --git a/packages/google-cloud-batch/google/cloud/batch/gapic_version.py b/packages/google-cloud-batch/google/cloud/batch/gapic_version.py index 558c8aab67c5..7de8a6a6838d 100644 --- a/packages/google-cloud-batch/google/cloud/batch/gapic_version.py +++ b/packages/google-cloud-batch/google/cloud/batch/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.17.27" # {x-release-please-version} diff --git a/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py b/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py index 558c8aab67c5..7de8a6a6838d 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.17.27" # {x-release-please-version} diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py index 558c8aab67c5..7de8a6a6838d 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.17.27" # {x-release-please-version} diff --git a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json index e2df1067e4dd..1a9ad7a0b658 100644 --- a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json +++ b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-batch", - "version": "0.1.0" + "version": "0.17.27" }, "snippets": [ { diff --git a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json index 7f67670b100c..9c3638c4d767 100644 --- a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json +++ b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-batch", - "version": "0.1.0" + "version": "0.17.27" }, "snippets": [ { diff --git a/packages/google-cloud-container/CHANGELOG.md b/packages/google-cloud-container/CHANGELOG.md index d939cc42c90a..c39b8c48144d 100644 --- a/packages/google-cloud-container/CHANGELOG.md +++ b/packages/google-cloud-container/CHANGELOG.md @@ -4,6 +4,14 @@ [1]: https://pypi.org/project/google-cloud-container/#history +## [2.51.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-container-v2.50.0...google-cloud-container-v2.51.0) (2024-09-03) + + +### Features + +* add `EXTENDED` enum value for `ReleaseChannel.Channel` ([ea71725](https://github.com/googleapis/google-cloud-python/commit/ea71725d3fe3bde0afd775d20127bed958e8eb8e)) +* add ReleaseChannel EXTENDED value ([ea71725](https://github.com/googleapis/google-cloud-python/commit/ea71725d3fe3bde0afd775d20127bed958e8eb8e)) + ## [2.50.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-container-v2.49.0...google-cloud-container-v2.50.0) (2024-07-30) diff --git a/packages/google-cloud-container/google/cloud/container/gapic_version.py b/packages/google-cloud-container/google/cloud/container/gapic_version.py index 558c8aab67c5..a08d6ef9b30b 100644 --- a/packages/google-cloud-container/google/cloud/container/gapic_version.py +++ b/packages/google-cloud-container/google/cloud/container/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "2.51.0" # {x-release-please-version} diff --git a/packages/google-cloud-container/google/cloud/container_v1/gapic_version.py b/packages/google-cloud-container/google/cloud/container_v1/gapic_version.py index 558c8aab67c5..a08d6ef9b30b 100644 --- a/packages/google-cloud-container/google/cloud/container_v1/gapic_version.py +++ b/packages/google-cloud-container/google/cloud/container_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "2.51.0" # {x-release-please-version} diff --git a/packages/google-cloud-container/google/cloud/container_v1beta1/gapic_version.py b/packages/google-cloud-container/google/cloud/container_v1beta1/gapic_version.py index 558c8aab67c5..a08d6ef9b30b 100644 --- a/packages/google-cloud-container/google/cloud/container_v1beta1/gapic_version.py +++ b/packages/google-cloud-container/google/cloud/container_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "2.51.0" # {x-release-please-version} diff --git a/packages/google-cloud-container/samples/generated_samples/snippet_metadata_google.container.v1.json b/packages/google-cloud-container/samples/generated_samples/snippet_metadata_google.container.v1.json index 2a343a6434fa..b317d472f3e2 100644 --- a/packages/google-cloud-container/samples/generated_samples/snippet_metadata_google.container.v1.json +++ b/packages/google-cloud-container/samples/generated_samples/snippet_metadata_google.container.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-container", - "version": "0.1.0" + "version": "2.51.0" }, "snippets": [ { diff --git a/packages/google-cloud-container/samples/generated_samples/snippet_metadata_google.container.v1beta1.json b/packages/google-cloud-container/samples/generated_samples/snippet_metadata_google.container.v1beta1.json index afdb562b267e..f6b08f446330 100644 --- a/packages/google-cloud-container/samples/generated_samples/snippet_metadata_google.container.v1beta1.json +++ b/packages/google-cloud-container/samples/generated_samples/snippet_metadata_google.container.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-container", - "version": "0.1.0" + "version": "2.51.0" }, "snippets": [ { diff --git a/packages/google-cloud-dataproc/CHANGELOG.md b/packages/google-cloud-dataproc/CHANGELOG.md index d912ceecb20e..00b658392846 100644 --- a/packages/google-cloud-dataproc/CHANGELOG.md +++ b/packages/google-cloud-dataproc/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-dataproc/#history +## [5.11.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dataproc-v5.10.2...google-cloud-dataproc-v5.11.0) (2024-09-03) + + +### Features + +* add optional parameters (tarball-access) in DiagnoseClusterRequest ([127e5c0](https://github.com/googleapis/google-cloud-python/commit/127e5c097b08042989c124ac4cdfb5147181855d)) + ## [5.10.2](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dataproc-v5.10.1...google-cloud-dataproc-v5.10.2) (2024-07-30) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc/gapic_version.py b/packages/google-cloud-dataproc/google/cloud/dataproc/gapic_version.py index 558c8aab67c5..0f412e925d59 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc/gapic_version.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "5.11.0" # {x-release-please-version} diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/gapic_version.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/gapic_version.py index 558c8aab67c5..0f412e925d59 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/gapic_version.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "5.11.0" # {x-release-please-version} diff --git a/packages/google-cloud-dataproc/samples/generated_samples/snippet_metadata_google.cloud.dataproc.v1.json b/packages/google-cloud-dataproc/samples/generated_samples/snippet_metadata_google.cloud.dataproc.v1.json index c5f4e003db04..c1b4b338fe39 100644 --- a/packages/google-cloud-dataproc/samples/generated_samples/snippet_metadata_google.cloud.dataproc.v1.json +++ b/packages/google-cloud-dataproc/samples/generated_samples/snippet_metadata_google.cloud.dataproc.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dataproc", - "version": "0.1.0" + "version": "5.11.0" }, "snippets": [ { diff --git a/packages/google-cloud-edgenetwork/CHANGELOG.md b/packages/google-cloud-edgenetwork/CHANGELOG.md index 87be46e4a5c2..07373dfdf459 100644 --- a/packages/google-cloud-edgenetwork/CHANGELOG.md +++ b/packages/google-cloud-edgenetwork/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.1.11](https://github.com/googleapis/google-cloud-python/compare/google-cloud-edgenetwork-v0.1.10...google-cloud-edgenetwork-v0.1.11) (2024-09-03) + + +### Documentation + +* swap comments on `BONDED` and `NON_BONDED` enums in `BondingType` ([308de6b](https://github.com/googleapis/google-cloud-python/commit/308de6b266e24a8996875736b66485d92f299401)) + ## [0.1.10](https://github.com/googleapis/google-cloud-python/compare/google-cloud-edgenetwork-v0.1.9...google-cloud-edgenetwork-v0.1.10) (2024-07-30) diff --git a/packages/google-cloud-edgenetwork/google/cloud/edgenetwork/gapic_version.py b/packages/google-cloud-edgenetwork/google/cloud/edgenetwork/gapic_version.py index 558c8aab67c5..4b834789ba9e 100644 --- a/packages/google-cloud-edgenetwork/google/cloud/edgenetwork/gapic_version.py +++ b/packages/google-cloud-edgenetwork/google/cloud/edgenetwork/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.11" # {x-release-please-version} diff --git a/packages/google-cloud-edgenetwork/google/cloud/edgenetwork_v1/gapic_version.py b/packages/google-cloud-edgenetwork/google/cloud/edgenetwork_v1/gapic_version.py index 558c8aab67c5..4b834789ba9e 100644 --- a/packages/google-cloud-edgenetwork/google/cloud/edgenetwork_v1/gapic_version.py +++ b/packages/google-cloud-edgenetwork/google/cloud/edgenetwork_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.11" # {x-release-please-version} diff --git a/packages/google-cloud-edgenetwork/samples/generated_samples/snippet_metadata_google.cloud.edgenetwork.v1.json b/packages/google-cloud-edgenetwork/samples/generated_samples/snippet_metadata_google.cloud.edgenetwork.v1.json index 04d64b8b1330..4e8d32cac42d 100644 --- a/packages/google-cloud-edgenetwork/samples/generated_samples/snippet_metadata_google.cloud.edgenetwork.v1.json +++ b/packages/google-cloud-edgenetwork/samples/generated_samples/snippet_metadata_google.cloud.edgenetwork.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-edgenetwork", - "version": "0.1.0" + "version": "0.1.11" }, "snippets": [ { diff --git a/packages/google-cloud-recaptcha-enterprise/CHANGELOG.md b/packages/google-cloud-recaptcha-enterprise/CHANGELOG.md index 0fb0e97fd517..5503697c10fa 100644 --- a/packages/google-cloud-recaptcha-enterprise/CHANGELOG.md +++ b/packages/google-cloud-recaptcha-enterprise/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [1.22.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-recaptcha-enterprise-v1.22.0...google-cloud-recaptcha-enterprise-v1.22.1) (2024-09-03) + + +### Documentation + +* minor doc fixes ([7e40c8c](https://github.com/googleapis/google-cloud-python/commit/7e40c8c40039e0c01ef9a8bfea8804edfee48d70)) + ## [1.22.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-recaptcha-enterprise-v1.21.2...google-cloud-recaptcha-enterprise-v1.22.0) (2024-08-22) diff --git a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise/gapic_version.py b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise/gapic_version.py index 558c8aab67c5..bcb1d2f54b4a 100644 --- a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise/gapic_version.py +++ b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "1.22.1" # {x-release-please-version} diff --git a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/gapic_version.py b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/gapic_version.py index 558c8aab67c5..bcb1d2f54b4a 100644 --- a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/gapic_version.py +++ b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "1.22.1" # {x-release-please-version} diff --git a/packages/google-cloud-recaptcha-enterprise/samples/generated_samples/snippet_metadata_google.cloud.recaptchaenterprise.v1.json b/packages/google-cloud-recaptcha-enterprise/samples/generated_samples/snippet_metadata_google.cloud.recaptchaenterprise.v1.json index b2ad3f987f98..225f39c961dd 100644 --- a/packages/google-cloud-recaptcha-enterprise/samples/generated_samples/snippet_metadata_google.cloud.recaptchaenterprise.v1.json +++ b/packages/google-cloud-recaptcha-enterprise/samples/generated_samples/snippet_metadata_google.cloud.recaptchaenterprise.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-recaptcha-enterprise", - "version": "0.1.0" + "version": "1.22.1" }, "snippets": [ { diff --git a/packages/google-cloud-service-management/CHANGELOG.md b/packages/google-cloud-service-management/CHANGELOG.md index c62d63a5255d..40b0c2033509 100644 --- a/packages/google-cloud-service-management/CHANGELOG.md +++ b/packages/google-cloud-service-management/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [1.9.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-service-management-v1.8.5...google-cloud-service-management-v1.9.0) (2024-09-03) + + +### Features + +* Support local binding for variables with keyword name collision ([c54700d](https://github.com/googleapis/google-cloud-python/commit/c54700d3e11e59eb5fae01fda25dbf3a9acbe382)) + ## [1.8.5](https://github.com/googleapis/google-cloud-python/compare/google-cloud-service-management-v1.8.4...google-cloud-service-management-v1.8.5) (2024-07-30) diff --git a/packages/google-cloud-service-management/google/cloud/servicemanagement/gapic_version.py b/packages/google-cloud-service-management/google/cloud/servicemanagement/gapic_version.py index 558c8aab67c5..1c08bcbd1569 100644 --- a/packages/google-cloud-service-management/google/cloud/servicemanagement/gapic_version.py +++ b/packages/google-cloud-service-management/google/cloud/servicemanagement/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "1.9.0" # {x-release-please-version} diff --git a/packages/google-cloud-service-management/google/cloud/servicemanagement_v1/gapic_version.py b/packages/google-cloud-service-management/google/cloud/servicemanagement_v1/gapic_version.py index 558c8aab67c5..1c08bcbd1569 100644 --- a/packages/google-cloud-service-management/google/cloud/servicemanagement_v1/gapic_version.py +++ b/packages/google-cloud-service-management/google/cloud/servicemanagement_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "1.9.0" # {x-release-please-version} diff --git a/packages/google-cloud-service-management/samples/generated_samples/snippet_metadata_google.api.servicemanagement.v1.json b/packages/google-cloud-service-management/samples/generated_samples/snippet_metadata_google.api.servicemanagement.v1.json index 8b514f320127..f108a0c7b5e1 100644 --- a/packages/google-cloud-service-management/samples/generated_samples/snippet_metadata_google.api.servicemanagement.v1.json +++ b/packages/google-cloud-service-management/samples/generated_samples/snippet_metadata_google.api.servicemanagement.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-service-management", - "version": "0.1.0" + "version": "1.9.0" }, "snippets": [ { diff --git a/packages/google-cloud-visionai/CHANGELOG.md b/packages/google-cloud-visionai/CHANGELOG.md index 55e12017ac55..ef410baf042c 100644 --- a/packages/google-cloud-visionai/CHANGELOG.md +++ b/packages/google-cloud-visionai/CHANGELOG.md @@ -1,5 +1,22 @@ # Changelog +## [0.1.3](https://github.com/googleapis/google-cloud-python/compare/google-cloud-visionai-v0.1.2...google-cloud-visionai-v0.1.3) (2024-09-03) + + +### Features + +* add BatchOperationStatus to import metadata ([0321915](https://github.com/googleapis/google-cloud-python/commit/0321915e31c12f24e96b778b5b3814507ff547d6)) +* request client libraries for new languages ([0321915](https://github.com/googleapis/google-cloud-python/commit/0321915e31c12f24e96b778b5b3814507ff547d6)) + + +### Documentation + +* A comment for enum value `FAILED` in enum `State` is changed ([0321915](https://github.com/googleapis/google-cloud-python/commit/0321915e31c12f24e96b778b5b3814507ff547d6)) +* A comment for enum value `IN_PROGRESS` in enum `State` is changed ([0321915](https://github.com/googleapis/google-cloud-python/commit/0321915e31c12f24e96b778b5b3814507ff547d6)) +* A comment for enum value `SUCCEEDED` in enum `State` is changed ([0321915](https://github.com/googleapis/google-cloud-python/commit/0321915e31c12f24e96b778b5b3814507ff547d6)) +* A comment for field `relevance` in message `.google.cloud.visionai.v1.SearchResultItem` is changed ([0321915](https://github.com/googleapis/google-cloud-python/commit/0321915e31c12f24e96b778b5b3814507ff547d6)) +* A comment for method `ClipAsset` in service `Warehouse` is changed ([0321915](https://github.com/googleapis/google-cloud-python/commit/0321915e31c12f24e96b778b5b3814507ff547d6)) + ## [0.1.2](https://github.com/googleapis/google-cloud-python/compare/google-cloud-visionai-v0.1.1...google-cloud-visionai-v0.1.2) (2024-07-30) diff --git a/packages/google-cloud-visionai/google/cloud/visionai/gapic_version.py b/packages/google-cloud-visionai/google/cloud/visionai/gapic_version.py index 558c8aab67c5..114e40645800 100644 --- a/packages/google-cloud-visionai/google/cloud/visionai/gapic_version.py +++ b/packages/google-cloud-visionai/google/cloud/visionai/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.3" # {x-release-please-version} diff --git a/packages/google-cloud-visionai/google/cloud/visionai_v1/gapic_version.py b/packages/google-cloud-visionai/google/cloud/visionai_v1/gapic_version.py index 558c8aab67c5..114e40645800 100644 --- a/packages/google-cloud-visionai/google/cloud/visionai_v1/gapic_version.py +++ b/packages/google-cloud-visionai/google/cloud/visionai_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.3" # {x-release-please-version} diff --git a/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/gapic_version.py b/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/gapic_version.py index 558c8aab67c5..114e40645800 100644 --- a/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/gapic_version.py +++ b/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.3" # {x-release-please-version} diff --git a/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/types/platform.py b/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/types/platform.py index fd8a8b002e97..1710e7124467 100644 --- a/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/types/platform.py +++ b/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/types/platform.py @@ -3253,7 +3253,8 @@ class VertexCustomConfig(proto.Message): 'instanceId': STRING; 'node': STRING; 'processor': STRING; - } + + } """ max_prediction_fps: int = proto.Field( diff --git a/packages/google-cloud-visionai/samples/generated_samples/snippet_metadata_google.cloud.visionai.v1.json b/packages/google-cloud-visionai/samples/generated_samples/snippet_metadata_google.cloud.visionai.v1.json index db06e43868dd..37adaa87e6b4 100644 --- a/packages/google-cloud-visionai/samples/generated_samples/snippet_metadata_google.cloud.visionai.v1.json +++ b/packages/google-cloud-visionai/samples/generated_samples/snippet_metadata_google.cloud.visionai.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-visionai", - "version": "0.1.0" + "version": "0.1.3" }, "snippets": [ { diff --git a/packages/google-cloud-visionai/samples/generated_samples/snippet_metadata_google.cloud.visionai.v1alpha1.json b/packages/google-cloud-visionai/samples/generated_samples/snippet_metadata_google.cloud.visionai.v1alpha1.json index f7d1e3f46ede..26bf9524f8af 100644 --- a/packages/google-cloud-visionai/samples/generated_samples/snippet_metadata_google.cloud.visionai.v1alpha1.json +++ b/packages/google-cloud-visionai/samples/generated_samples/snippet_metadata_google.cloud.visionai.v1alpha1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-visionai", - "version": "0.1.0" + "version": "0.1.3" }, "snippets": [ { From 51947fb5913a3859ee45cd66a0dbad51a84cebe9 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 4 Sep 2024 08:07:56 -0400 Subject: [PATCH 046/108] feat: [google-cloud-discoveryengine] promote search tuning service to v1 (#13049) - [ ] Regenerate this pull request now. BEGIN_COMMIT_OVERRIDE feat(v1): promote search tuning service to v1 feat(v1): promot user event purge to v1 feat(v1): return structured document info in answers feat(v1): return index status in document feat(v1): support batch documents purge with GCS input feat(v1): support batch get documents metadata by uri patterns feat(v1): return joined status in user event docs(v1): keep the API doc up-to-date with recent changes feat(v1beta): support natural language understanding in search feat(v1beta): allow set relevance threshold on search feat(v1beta): support one box search feat(v1beta): return structured document info in answers feat(v1beta): return index status in document feat(v1beta): support batch documents purge with GCS input feat(v1beta): support batch get documents metadata by uri patterns feat(v1beta): return joined status in user event docs(v1beta): keep the API doc up-to-date with recent changes feat(v1alpha): return index status in document feat(v1alpha): support creating workspace search data stores feat(v1alpha): support batch get documents metadata by uri patterns feat(v1alpha): return query segment in NL query understanding feat(v1alpha): return joined status in user event docs(v1alpha): keep the API doc up-to-date with recent changes END_COMMIT_OVERRIDE PiperOrigin-RevId: 670771871 Source-Link: https://github.com/googleapis/googleapis/commit/5314818275923044fb5af690c5ad85c3428d0842 Source-Link: https://github.com/googleapis/googleapis-gen/commit/85cebbbe5a100fa106d93dac34dacbf19a3a6a48 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWRpc2NvdmVyeWVuZ2luZS8uT3dsQm90LnlhbWwiLCJoIjoiODVjZWJiYmU1YTEwMGZhMTA2ZDkzZGFjMzRkYWNiZjE5YTNhNmE0OCJ9 BEGIN_NESTED_COMMIT feat: [google-cloud-discoveryengine] support natural language understanding in search feat: allow set relevance threshold on search feat: support one box search feat: return structured document info in answers feat: return index status in document feat: support batch documents purge with GCS input feat: support batch get documents metadata by uri patterns feat: return joined status in user event docs: keep the API doc up-to-date with recent changes PiperOrigin-RevId: 670771759 Source-Link: https://github.com/googleapis/googleapis/commit/6cb0a970d16964a22626a61d9f7a1cc879f0fc7a Source-Link: https://github.com/googleapis/googleapis-gen/commit/ad7bcde915dc55058df32077a44de7a5cf5cb1fc Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWRpc2NvdmVyeWVuZ2luZS8uT3dsQm90LnlhbWwiLCJoIjoiYWQ3YmNkZTkxNWRjNTUwNThkZjMyMDc3YTQ0ZGU3YTVjZjVjYjFmYyJ9 END_NESTED_COMMIT BEGIN_NESTED_COMMIT feat: [google-cloud-discoveryengine] return index status in document feat: support creating workspace search data stores feat: support batch get documents metadata by uri patterns feat: return query segment in NL query understanding feat: return joined status in user event docs: keep the API doc up-to-date with recent changes PiperOrigin-RevId: 670770678 Source-Link: https://github.com/googleapis/googleapis/commit/ce9d18865ea37d50d772665c36949723afe91ddf Source-Link: https://github.com/googleapis/googleapis-gen/commit/a0866c4cd0b20e0ab502590b5228c197e42dc17e Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWRpc2NvdmVyeWVuZ2luZS8uT3dsQm90LnlhbWwiLCJoIjoiYTA4NjZjNGNkMGIyMGUwYWI1MDI1OTBiNTIyOGMxOTdlNDJkYzE3ZSJ9 END_NESTED_COMMIT --------- Co-authored-by: Owl Bot --- .../search_tuning_service.rst | 6 + .../docs/discoveryengine_v1/services_.rst | 1 + .../google/cloud/discoveryengine/__init__.py | 6 + .../cloud/discoveryengine_v1/__init__.py | 32 + .../discoveryengine_v1/gapic_metadata.json | 79 + .../completion_service/transports/rest.py | 16 + .../services/control_service/async_client.py | 15 +- .../services/control_service/client.py | 15 +- .../control_service/transports/rest.py | 29 +- .../transports/rest.py | 8 + .../data_store_service/transports/rest.py | 16 + .../services/document_service/async_client.py | 116 + .../services/document_service/client.py | 115 + .../document_service/transports/base.py | 17 + .../document_service/transports/grpc.py | 31 + .../transports/grpc_asyncio.py | 36 + .../document_service/transports/rest.py | 159 + .../engine_service/transports/rest.py | 16 + .../transports/rest.py | 8 + .../project_service/transports/rest.py | 16 + .../services/rank_service/transports/rest.py | 8 + .../recommendation_service/transports/rest.py | 8 + .../schema_service/transports/rest.py | 16 + .../search_service/transports/rest.py | 8 + .../search_tuning_service/__init__.py | 22 + .../search_tuning_service/async_client.py | 654 +++ .../services/search_tuning_service/client.py | 1097 +++++ .../transports/__init__.py | 38 + .../search_tuning_service/transports/base.py | 212 + .../search_tuning_service/transports/grpc.py | 372 ++ .../transports/grpc_asyncio.py | 390 ++ .../search_tuning_service/transports/rest.py | 1034 +++++ .../transports/rest.py | 16 + .../user_event_service/async_client.py | 105 + .../services/user_event_service/client.py | 103 + .../user_event_service/transports/base.py | 15 + .../user_event_service/transports/grpc.py | 31 + .../transports/grpc_asyncio.py | 38 + .../user_event_service/transports/rest.py | 157 + .../discoveryengine_v1/types/__init__.py | 26 + .../cloud/discoveryengine_v1/types/answer.py | 88 +- .../cloud/discoveryengine_v1/types/control.py | 13 +- .../types/conversational_search_service.py | 17 +- .../types/custom_tuning_model.py | 125 + .../types/data_store_service.py | 15 + .../discoveryengine_v1/types/document.py | 39 + .../types/document_processing_config.py | 2 + .../types/document_service.py | 172 + .../types/grounded_generation_service.py | 3 +- .../discoveryengine_v1/types/import_config.py | 41 +- .../discoveryengine_v1/types/purge_config.py | 198 + .../types/recommendation_service.py | 3 +- .../types/search_service.py | 63 +- .../types/search_tuning_service.py | 271 ++ .../discoveryengine_v1/types/user_event.py | 7 + .../cloud/discoveryengine_v1alpha/__init__.py | 7 +- .../gapic_metadata.json | 15 + .../acl_config_service/async_client.py | 2 +- .../services/acl_config_service/client.py | 2 +- .../acl_config_service/transports/grpc.py | 2 +- .../transports/grpc_asyncio.py | 2 +- .../services/document_service/async_client.py | 112 + .../services/document_service/client.py | 111 + .../document_service/transports/base.py | 17 + .../document_service/transports/grpc.py | 31 + .../transports/grpc_asyncio.py | 36 + .../document_service/transports/rest.py | 143 + .../discoveryengine_v1alpha/types/__init__.py | 7 +- .../discoveryengine_v1alpha/types/answer.py | 7 + .../discoveryengine_v1alpha/types/control.py | 2 +- .../types/custom_tuning_model.py | 4 + .../types/data_store.py | 70 + .../discoveryengine_v1alpha/types/document.py | 39 + .../types/document_service.py | 162 + .../types/grounded_generation_service.py | 3 +- .../types/import_config.py | 32 +- .../types/search_service.py | 31 +- .../types/user_event.py | 7 + .../cloud/discoveryengine_v1beta/__init__.py | 6 + .../gapic_metadata.json | 15 + .../services/document_service/async_client.py | 116 + .../services/document_service/client.py | 115 + .../document_service/transports/base.py | 17 + .../document_service/transports/grpc.py | 31 + .../transports/grpc_asyncio.py | 36 + .../document_service/transports/rest.py | 143 + .../discoveryengine_v1beta/types/__init__.py | 6 + .../discoveryengine_v1beta/types/answer.py | 79 +- .../discoveryengine_v1beta/types/control.py | 2 +- .../types/conversational_search_service.py | 3 + .../types/custom_tuning_model.py | 4 + .../discoveryengine_v1beta/types/document.py | 39 + .../types/document_processing_config.py | 2 + .../types/document_service.py | 173 + .../types/grounded_generation_service.py | 3 +- .../types/import_config.py | 41 +- .../types/purge_config.py | 84 + .../types/search_service.py | 157 +- .../types/user_event.py | 7 + ...vice_batch_get_documents_metadata_async.py | 52 + ...rvice_batch_get_documents_metadata_sync.py | 52 + ..._document_service_purge_documents_async.py | 4 + ...d_document_service_purge_documents_sync.py | 4 + ...tuning_service_list_custom_models_async.py | 52 + ..._tuning_service_list_custom_models_sync.py | 52 + ...tuning_service_train_custom_model_async.py | 56 + ..._tuning_service_train_custom_model_sync.py | 56 + ...r_event_service_purge_user_events_async.py | 57 + ...er_event_service_purge_user_events_sync.py | 57 + ...vice_batch_get_documents_metadata_async.py | 52 + ...rvice_batch_get_documents_metadata_sync.py | 52 + ...vice_batch_get_documents_metadata_async.py | 52 + ...rvice_batch_get_documents_metadata_sync.py | 52 + ..._document_service_purge_documents_async.py | 4 + ...d_document_service_purge_documents_sync.py | 4 + ...adata_google.cloud.discoveryengine.v1.json | 790 +++- ..._google.cloud.discoveryengine.v1alpha.json | 161 + ...a_google.cloud.discoveryengine.v1beta.json | 189 +- .../fixup_discoveryengine_v1_keywords.py | 8 +- .../fixup_discoveryengine_v1alpha_keywords.py | 1 + .../fixup_discoveryengine_v1beta_keywords.py | 5 +- .../test_data_store_service.py | 2 + .../test_document_service.py | 2041 ++++++--- .../test_search_tuning_service.py | 3772 +++++++++++++++++ .../test_user_event_service.py | 568 +++ .../test_data_store_service.py | 8 + .../test_document_service.py | 2131 +++++++--- .../test_user_event_service.py | 1 + .../test_document_service.py | 2041 ++++++--- .../test_evaluation_service.py | 2 + .../test_serving_config_service.py | 1 + .../test_user_event_service.py | 1 + 132 files changed, 18228 insertions(+), 2219 deletions(-) create mode 100644 packages/google-cloud-discoveryengine/docs/discoveryengine_v1/search_tuning_service.rst create mode 100644 packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/__init__.py create mode 100644 packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/async_client.py create mode 100644 packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/client.py create mode 100644 packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/transports/__init__.py create mode 100644 packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/transports/base.py create mode 100644 packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/transports/grpc.py create mode 100644 packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/transports/grpc_asyncio.py create mode 100644 packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/transports/rest.py create mode 100644 packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/custom_tuning_model.py create mode 100644 packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/search_tuning_service.py create mode 100644 packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_batch_get_documents_metadata_async.py create mode 100644 packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_batch_get_documents_metadata_sync.py create mode 100644 packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_search_tuning_service_list_custom_models_async.py create mode 100644 packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_search_tuning_service_list_custom_models_sync.py create mode 100644 packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_search_tuning_service_train_custom_model_async.py create mode 100644 packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_search_tuning_service_train_custom_model_sync.py create mode 100644 packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_user_event_service_purge_user_events_async.py create mode 100644 packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_user_event_service_purge_user_events_sync.py create mode 100644 packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_document_service_batch_get_documents_metadata_async.py create mode 100644 packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_document_service_batch_get_documents_metadata_sync.py create mode 100644 packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_batch_get_documents_metadata_async.py create mode 100644 packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_batch_get_documents_metadata_sync.py create mode 100644 packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_search_tuning_service.py diff --git a/packages/google-cloud-discoveryengine/docs/discoveryengine_v1/search_tuning_service.rst b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1/search_tuning_service.rst new file mode 100644 index 000000000000..02b0fe7ef583 --- /dev/null +++ b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1/search_tuning_service.rst @@ -0,0 +1,6 @@ +SearchTuningService +------------------------------------- + +.. automodule:: google.cloud.discoveryengine_v1.services.search_tuning_service + :members: + :inherited-members: diff --git a/packages/google-cloud-discoveryengine/docs/discoveryengine_v1/services_.rst b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1/services_.rst index 6e0bbcda2e72..411ae24c92cc 100644 --- a/packages/google-cloud-discoveryengine/docs/discoveryengine_v1/services_.rst +++ b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1/services_.rst @@ -15,5 +15,6 @@ Services for Google Cloud Discoveryengine v1 API recommendation_service schema_service search_service + search_tuning_service site_search_engine_service user_event_service diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine/__init__.py index 42959becdbfc..48c89d24d6f4 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine/__init__.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine/__init__.py @@ -208,6 +208,8 @@ DocumentProcessingConfig, ) from google.cloud.discoveryengine_v1beta.types.document_service import ( + BatchGetDocumentsMetadataRequest, + BatchGetDocumentsMetadataResponse, CreateDocumentRequest, DeleteDocumentRequest, GetDocumentRequest, @@ -289,6 +291,7 @@ PurgeDocumentsMetadata, PurgeDocumentsRequest, PurgeDocumentsResponse, + PurgeErrorConfig, PurgeSuggestionDenyListEntriesMetadata, PurgeSuggestionDenyListEntriesRequest, PurgeSuggestionDenyListEntriesResponse, @@ -501,6 +504,8 @@ "UpdateDataStoreRequest", "Document", "DocumentProcessingConfig", + "BatchGetDocumentsMetadataRequest", + "BatchGetDocumentsMetadataResponse", "CreateDocumentRequest", "DeleteDocumentRequest", "GetDocumentRequest", @@ -569,6 +574,7 @@ "PurgeDocumentsMetadata", "PurgeDocumentsRequest", "PurgeDocumentsResponse", + "PurgeErrorConfig", "PurgeSuggestionDenyListEntriesMetadata", "PurgeSuggestionDenyListEntriesRequest", "PurgeSuggestionDenyListEntriesResponse", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/__init__.py index e06104fe1669..261c72ef4b22 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/__init__.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/__init__.py @@ -45,6 +45,10 @@ ) from .services.schema_service import SchemaServiceAsyncClient, SchemaServiceClient from .services.search_service import SearchServiceAsyncClient, SearchServiceClient +from .services.search_tuning_service import ( + SearchTuningServiceAsyncClient, + SearchTuningServiceClient, +) from .services.site_search_engine_service import ( SiteSearchEngineServiceAsyncClient, SiteSearchEngineServiceClient, @@ -102,6 +106,7 @@ UpdateConversationRequest, UpdateSessionRequest, ) +from .types.custom_tuning_model import CustomTuningModel from .types.data_store import DataStore from .types.data_store_service import ( CreateDataStoreMetadata, @@ -116,6 +121,8 @@ from .types.document import Document from .types.document_processing_config import DocumentProcessingConfig from .types.document_service import ( + BatchGetDocumentsMetadataRequest, + BatchGetDocumentsMetadataResponse, CreateDocumentRequest, DeleteDocumentRequest, GetDocumentRequest, @@ -173,9 +180,13 @@ PurgeDocumentsMetadata, PurgeDocumentsRequest, PurgeDocumentsResponse, + PurgeErrorConfig, PurgeSuggestionDenyListEntriesMetadata, PurgeSuggestionDenyListEntriesRequest, PurgeSuggestionDenyListEntriesResponse, + PurgeUserEventsMetadata, + PurgeUserEventsRequest, + PurgeUserEventsResponse, ) from .types.rank_service import RankingRecord, RankRequest, RankResponse from .types.recommendation_service import RecommendRequest, RecommendResponse @@ -192,6 +203,13 @@ UpdateSchemaRequest, ) from .types.search_service import SearchRequest, SearchResponse +from .types.search_tuning_service import ( + ListCustomModelsRequest, + ListCustomModelsResponse, + TrainCustomModelMetadata, + TrainCustomModelRequest, + TrainCustomModelResponse, +) from .types.session import Query, Session from .types.site_search_engine import SiteSearchEngine, SiteVerificationInfo, TargetSite from .types.site_search_engine_service import ( @@ -248,6 +266,7 @@ "RecommendationServiceAsyncClient", "SchemaServiceAsyncClient", "SearchServiceAsyncClient", + "SearchTuningServiceAsyncClient", "SiteSearchEngineServiceAsyncClient", "UserEventServiceAsyncClient", "AlloyDbSource", @@ -257,6 +276,8 @@ "BatchCreateTargetSiteMetadata", "BatchCreateTargetSitesRequest", "BatchCreateTargetSitesResponse", + "BatchGetDocumentsMetadataRequest", + "BatchGetDocumentsMetadataResponse", "BatchVerifyTargetSitesMetadata", "BatchVerifyTargetSitesRequest", "BatchVerifyTargetSitesResponse", @@ -296,6 +317,7 @@ "CreateTargetSiteMetadata", "CreateTargetSiteRequest", "CustomAttribute", + "CustomTuningModel", "DataStore", "DataStoreServiceClient", "DeleteControlRequest", @@ -359,6 +381,8 @@ "ListControlsResponse", "ListConversationsRequest", "ListConversationsResponse", + "ListCustomModelsRequest", + "ListCustomModelsResponse", "ListDataStoresRequest", "ListDataStoresResponse", "ListDocumentsRequest", @@ -384,9 +408,13 @@ "PurgeDocumentsMetadata", "PurgeDocumentsRequest", "PurgeDocumentsResponse", + "PurgeErrorConfig", "PurgeSuggestionDenyListEntriesMetadata", "PurgeSuggestionDenyListEntriesRequest", "PurgeSuggestionDenyListEntriesResponse", + "PurgeUserEventsMetadata", + "PurgeUserEventsRequest", + "PurgeUserEventsResponse", "Query", "RankRequest", "RankResponse", @@ -407,6 +435,7 @@ "SearchResponse", "SearchServiceClient", "SearchTier", + "SearchTuningServiceClient", "SearchUseCase", "Session", "SiteSearchEngine", @@ -417,6 +446,9 @@ "SuggestionDenyListEntry", "TargetSite", "TextInput", + "TrainCustomModelMetadata", + "TrainCustomModelRequest", + "TrainCustomModelResponse", "TransactionInfo", "UpdateControlRequest", "UpdateConversationRequest", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/gapic_metadata.json b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/gapic_metadata.json index 855e95c5aceb..219241f131af 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/gapic_metadata.json +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/gapic_metadata.json @@ -506,6 +506,11 @@ "grpc": { "libraryClient": "DocumentServiceClient", "rpcs": { + "BatchGetDocumentsMetadata": { + "methods": [ + "batch_get_documents_metadata" + ] + }, "CreateDocument": { "methods": [ "create_document" @@ -546,6 +551,11 @@ "grpc-async": { "libraryClient": "DocumentServiceAsyncClient", "rpcs": { + "BatchGetDocumentsMetadata": { + "methods": [ + "batch_get_documents_metadata" + ] + }, "CreateDocument": { "methods": [ "create_document" @@ -586,6 +596,11 @@ "rest": { "libraryClient": "DocumentServiceClient", "rpcs": { + "BatchGetDocumentsMetadata": { + "methods": [ + "batch_get_documents_metadata" + ] + }, "CreateDocument": { "methods": [ "create_document" @@ -983,6 +998,55 @@ } } }, + "SearchTuningService": { + "clients": { + "grpc": { + "libraryClient": "SearchTuningServiceClient", + "rpcs": { + "ListCustomModels": { + "methods": [ + "list_custom_models" + ] + }, + "TrainCustomModel": { + "methods": [ + "train_custom_model" + ] + } + } + }, + "grpc-async": { + "libraryClient": "SearchTuningServiceAsyncClient", + "rpcs": { + "ListCustomModels": { + "methods": [ + "list_custom_models" + ] + }, + "TrainCustomModel": { + "methods": [ + "train_custom_model" + ] + } + } + }, + "rest": { + "libraryClient": "SearchTuningServiceClient", + "rpcs": { + "ListCustomModels": { + "methods": [ + "list_custom_models" + ] + }, + "TrainCustomModel": { + "methods": [ + "train_custom_model" + ] + } + } + } + } + }, "SiteSearchEngineService": { "clients": { "grpc": { @@ -1197,6 +1261,11 @@ "import_user_events" ] }, + "PurgeUserEvents": { + "methods": [ + "purge_user_events" + ] + }, "WriteUserEvent": { "methods": [ "write_user_event" @@ -1217,6 +1286,11 @@ "import_user_events" ] }, + "PurgeUserEvents": { + "methods": [ + "purge_user_events" + ] + }, "WriteUserEvent": { "methods": [ "write_user_event" @@ -1237,6 +1311,11 @@ "import_user_events" ] }, + "PurgeUserEvents": { + "methods": [ + "purge_user_events" + ] + }, "WriteUserEvent": { "methods": [ "write_user_event" diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/transports/rest.py index 836224789b61..d47335b10033 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/transports/rest.py @@ -490,6 +490,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/operations/*}", @@ -552,6 +556,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*}/operations", @@ -1302,6 +1310,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/operations/*}", @@ -1424,6 +1436,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*}/operations", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/async_client.py index 35b700126b56..377edbf159d7 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/async_client.py @@ -358,8 +358,9 @@ async def sample_create_control(): Returns: google.cloud.discoveryengine_v1.types.Control: Defines a conditioned behavior to employ during serving. - Must be attached to a [ServingConfig][] to be - considered at serving time. Permitted actions + Must be attached to a + [ServingConfig][google.cloud.discoveryengine.v1.ServingConfig] + to be considered at serving time. Permitted actions dependent on SolutionType. """ @@ -590,8 +591,9 @@ async def sample_update_control(): Returns: google.cloud.discoveryengine_v1.types.Control: Defines a conditioned behavior to employ during serving. - Must be attached to a [ServingConfig][] to be - considered at serving time. Permitted actions + Must be attached to a + [ServingConfig][google.cloud.discoveryengine.v1.ServingConfig] + to be considered at serving time. Permitted actions dependent on SolutionType. """ @@ -702,8 +704,9 @@ async def sample_get_control(): Returns: google.cloud.discoveryengine_v1.types.Control: Defines a conditioned behavior to employ during serving. - Must be attached to a [ServingConfig][] to be - considered at serving time. Permitted actions + Must be attached to a + [ServingConfig][google.cloud.discoveryengine.v1.ServingConfig] + to be considered at serving time. Permitted actions dependent on SolutionType. """ diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/client.py index 2a900c13463d..19ac1f8332c8 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/client.py @@ -803,8 +803,9 @@ def sample_create_control(): Returns: google.cloud.discoveryengine_v1.types.Control: Defines a conditioned behavior to employ during serving. - Must be attached to a [ServingConfig][] to be - considered at serving time. Permitted actions + Must be attached to a + [ServingConfig][google.cloud.discoveryengine.v1.ServingConfig] + to be considered at serving time. Permitted actions dependent on SolutionType. """ @@ -1029,8 +1030,9 @@ def sample_update_control(): Returns: google.cloud.discoveryengine_v1.types.Control: Defines a conditioned behavior to employ during serving. - Must be attached to a [ServingConfig][] to be - considered at serving time. Permitted actions + Must be attached to a + [ServingConfig][google.cloud.discoveryengine.v1.ServingConfig] + to be considered at serving time. Permitted actions dependent on SolutionType. """ @@ -1138,8 +1140,9 @@ def sample_get_control(): Returns: google.cloud.discoveryengine_v1.types.Control: Defines a conditioned behavior to employ during serving. - Must be attached to a [ServingConfig][] to be - considered at serving time. Permitted actions + Must be attached to a + [ServingConfig][google.cloud.discoveryengine.v1.ServingConfig] + to be considered at serving time. Permitted actions dependent on SolutionType. """ diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/transports/rest.py index dc8bd898b465..7d8ea550e2a5 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/transports/rest.py @@ -414,9 +414,10 @@ def __call__( Returns: ~.gcd_control.Control: Defines a conditioned behavior to employ during serving. - Must be attached to a [ServingConfig][] to be considered - at serving time. Permitted actions dependent on - ``SolutionType``. + Must be attached to a + [ServingConfig][google.cloud.discoveryengine.v1.ServingConfig] + to be considered at serving time. Permitted actions + dependent on ``SolutionType``. """ @@ -601,9 +602,10 @@ def __call__( Returns: ~.control.Control: Defines a conditioned behavior to employ during serving. - Must be attached to a [ServingConfig][] to be considered - at serving time. Permitted actions dependent on - ``SolutionType``. + Must be attached to a + [ServingConfig][google.cloud.discoveryengine.v1.ServingConfig] + to be considered at serving time. Permitted actions + dependent on ``SolutionType``. """ @@ -791,9 +793,10 @@ def __call__( Returns: ~.gcd_control.Control: Defines a conditioned behavior to employ during serving. - Must be attached to a [ServingConfig][] to be considered - at serving time. Permitted actions dependent on - ``SolutionType``. + Must be attached to a + [ServingConfig][google.cloud.discoveryengine.v1.ServingConfig] + to be considered at serving time. Permitted actions + dependent on ``SolutionType``. """ @@ -1059,6 +1062,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/operations/*}", @@ -1181,6 +1188,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*}/operations", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/conversational_search_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/conversational_search_service/transports/rest.py index 84b6bc30c1a6..f1211380fe64 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/conversational_search_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/conversational_search_service/transports/rest.py @@ -2213,6 +2213,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/operations/*}", @@ -2335,6 +2339,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*}/operations", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/transports/rest.py index 65051ae919e2..caae22741d06 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/transports/rest.py @@ -482,6 +482,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/operations/*}", @@ -544,6 +548,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*}/operations", @@ -1266,6 +1274,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/operations/*}", @@ -1388,6 +1400,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*}/operations", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/async_client.py index 9cf6c715c029..2a549bd41562 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/async_client.py @@ -1065,7 +1065,11 @@ async def sample_purge_documents(): client = discoveryengine_v1.DocumentServiceAsyncClient() # Initialize request argument(s) + gcs_source = discoveryengine_v1.GcsSource() + gcs_source.input_uris = ['input_uris_value1', 'input_uris_value2'] + request = discoveryengine_v1.PurgeDocumentsRequest( + gcs_source=gcs_source, parent="parent_value", filter="filter_value", ) @@ -1142,6 +1146,118 @@ async def sample_purge_documents(): # Done; return the response. return response + async def batch_get_documents_metadata( + self, + request: Optional[ + Union[document_service.BatchGetDocumentsMetadataRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> document_service.BatchGetDocumentsMetadataResponse: + r"""Gets index freshness metadata for + [Document][google.cloud.discoveryengine.v1.Document]s. Supported + for website search only. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + async def sample_batch_get_documents_metadata(): + # Create a client + client = discoveryengine_v1.DocumentServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.BatchGetDocumentsMetadataRequest( + parent="parent_value", + ) + + # Make the request + response = await client.batch_get_documents_metadata(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1.types.BatchGetDocumentsMetadataRequest, dict]]): + The request object. Request message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1.DocumentService.BatchGetDocumentsMetadata] + method. + parent (:class:`str`): + Required. The parent branch resource name, such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1.types.BatchGetDocumentsMetadataResponse: + Response message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1.DocumentService.BatchGetDocumentsMetadata] + method. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, document_service.BatchGetDocumentsMetadataRequest): + request = document_service.BatchGetDocumentsMetadataRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.batch_get_documents_metadata + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def list_operations( self, request: Optional[operations_pb2.ListOperationsRequest] = None, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/client.py index 0de64afac5e6..7cd09e1391e9 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/client.py @@ -1519,7 +1519,11 @@ def sample_purge_documents(): client = discoveryengine_v1.DocumentServiceClient() # Initialize request argument(s) + gcs_source = discoveryengine_v1.GcsSource() + gcs_source.input_uris = ['input_uris_value1', 'input_uris_value2'] + request = discoveryengine_v1.PurgeDocumentsRequest( + gcs_source=gcs_source, parent="parent_value", filter="filter_value", ) @@ -1594,6 +1598,117 @@ def sample_purge_documents(): # Done; return the response. return response + def batch_get_documents_metadata( + self, + request: Optional[ + Union[document_service.BatchGetDocumentsMetadataRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> document_service.BatchGetDocumentsMetadataResponse: + r"""Gets index freshness metadata for + [Document][google.cloud.discoveryengine.v1.Document]s. Supported + for website search only. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + def sample_batch_get_documents_metadata(): + # Create a client + client = discoveryengine_v1.DocumentServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.BatchGetDocumentsMetadataRequest( + parent="parent_value", + ) + + # Make the request + response = client.batch_get_documents_metadata(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1.types.BatchGetDocumentsMetadataRequest, dict]): + The request object. Request message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1.DocumentService.BatchGetDocumentsMetadata] + method. + parent (str): + Required. The parent branch resource name, such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1.types.BatchGetDocumentsMetadataResponse: + Response message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1.DocumentService.BatchGetDocumentsMetadata] + method. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, document_service.BatchGetDocumentsMetadataRequest): + request = document_service.BatchGetDocumentsMetadataRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.batch_get_documents_metadata + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "DocumentServiceClient": return self diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/base.py index 4df6d3f36550..5c05090f3e92 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/base.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/base.py @@ -181,6 +181,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.batch_get_documents_metadata: gapic_v1.method.wrap_method( + self.batch_get_documents_metadata, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -263,6 +268,18 @@ def purge_documents( ]: raise NotImplementedError() + @property + def batch_get_documents_metadata( + self, + ) -> Callable[ + [document_service.BatchGetDocumentsMetadataRequest], + Union[ + document_service.BatchGetDocumentsMetadataResponse, + Awaitable[document_service.BatchGetDocumentsMetadataResponse], + ], + ]: + raise NotImplementedError() + @property def list_operations( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/grpc.py index 7b5a3256af7f..688476d1b16f 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/grpc.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/grpc.py @@ -469,6 +469,37 @@ def purge_documents( ) return self._stubs["purge_documents"] + @property + def batch_get_documents_metadata( + self, + ) -> Callable[ + [document_service.BatchGetDocumentsMetadataRequest], + document_service.BatchGetDocumentsMetadataResponse, + ]: + r"""Return a callable for the batch get documents metadata method over gRPC. + + Gets index freshness metadata for + [Document][google.cloud.discoveryengine.v1.Document]s. Supported + for website search only. + + Returns: + Callable[[~.BatchGetDocumentsMetadataRequest], + ~.BatchGetDocumentsMetadataResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_get_documents_metadata" not in self._stubs: + self._stubs["batch_get_documents_metadata"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.DocumentService/BatchGetDocumentsMetadata", + request_serializer=document_service.BatchGetDocumentsMetadataRequest.serialize, + response_deserializer=document_service.BatchGetDocumentsMetadataResponse.deserialize, + ) + return self._stubs["batch_get_documents_metadata"] + def close(self): self.grpc_channel.close() diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/grpc_asyncio.py index 4bd82f9fdb85..7667dceb4ddb 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/grpc_asyncio.py @@ -484,6 +484,37 @@ def purge_documents( ) return self._stubs["purge_documents"] + @property + def batch_get_documents_metadata( + self, + ) -> Callable[ + [document_service.BatchGetDocumentsMetadataRequest], + Awaitable[document_service.BatchGetDocumentsMetadataResponse], + ]: + r"""Return a callable for the batch get documents metadata method over gRPC. + + Gets index freshness metadata for + [Document][google.cloud.discoveryengine.v1.Document]s. Supported + for website search only. + + Returns: + Callable[[~.BatchGetDocumentsMetadataRequest], + Awaitable[~.BatchGetDocumentsMetadataResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_get_documents_metadata" not in self._stubs: + self._stubs["batch_get_documents_metadata"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.DocumentService/BatchGetDocumentsMetadata", + request_serializer=document_service.BatchGetDocumentsMetadataRequest.serialize, + response_deserializer=document_service.BatchGetDocumentsMetadataResponse.deserialize, + ) + return self._stubs["batch_get_documents_metadata"] + def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { @@ -531,6 +562,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.batch_get_documents_metadata: gapic_v1.method_async.wrap_method( + self.batch_get_documents_metadata, + default_timeout=None, + client_info=client_info, + ), } def close(self): diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/rest.py index 1418d3d2036b..51788a4c0f13 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/rest.py @@ -79,6 +79,14 @@ class DocumentServiceRestInterceptor: .. code-block:: python class MyCustomDocumentServiceInterceptor(DocumentServiceRestInterceptor): + def pre_batch_get_documents_metadata(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_batch_get_documents_metadata(self, response): + logging.log(f"Received response: {response}") + return response + def pre_create_document(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -137,6 +145,31 @@ def post_update_document(self, response): """ + def pre_batch_get_documents_metadata( + self, + request: document_service.BatchGetDocumentsMetadataRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + document_service.BatchGetDocumentsMetadataRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for batch_get_documents_metadata + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentService server. + """ + return request, metadata + + def post_batch_get_documents_metadata( + self, response: document_service.BatchGetDocumentsMetadataResponse + ) -> document_service.BatchGetDocumentsMetadataResponse: + """Post-rpc interceptor for batch_get_documents_metadata + + Override in a subclass to manipulate the response + after it is returned by the DocumentService server but before + it is returned to user code. + """ + return response + def pre_create_document( self, request: document_service.CreateDocumentRequest, @@ -532,6 +565,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/operations/*}", @@ -594,6 +631,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*}/operations", @@ -621,6 +662,105 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Return the client from cache. return self._operations_client + class _BatchGetDocumentsMetadata(DocumentServiceRestStub): + def __hash__(self): + return hash("BatchGetDocumentsMetadata") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "matcher": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_service.BatchGetDocumentsMetadataRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> document_service.BatchGetDocumentsMetadataResponse: + r"""Call the batch get documents + metadata method over HTTP. + + Args: + request (~.document_service.BatchGetDocumentsMetadataRequest): + The request object. Request message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1.DocumentService.BatchGetDocumentsMetadata] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.document_service.BatchGetDocumentsMetadataResponse: + Response message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1.DocumentService.BatchGetDocumentsMetadata] + method. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/dataStores/*/branches/*}/batchGetDocumentsMetadata", + }, + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/collections/*/dataStores/*/branches/*}/batchGetDocumentsMetadata", + }, + ] + request, metadata = self._interceptor.pre_batch_get_documents_metadata( + request, metadata + ) + pb_request = document_service.BatchGetDocumentsMetadataRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = document_service.BatchGetDocumentsMetadataResponse() + pb_resp = document_service.BatchGetDocumentsMetadataResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_batch_get_documents_metadata(resp) + return resp + class _CreateDocument(DocumentServiceRestStub): def __hash__(self): return hash("CreateDocument") @@ -1294,6 +1434,17 @@ def __call__( resp = self._interceptor.post_update_document(resp) return resp + @property + def batch_get_documents_metadata( + self, + ) -> Callable[ + [document_service.BatchGetDocumentsMetadataRequest], + document_service.BatchGetDocumentsMetadataResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BatchGetDocumentsMetadata(self._session, self._host, self._interceptor) # type: ignore + @property def create_document( self, @@ -1508,6 +1659,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/operations/*}", @@ -1630,6 +1785,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*}/operations", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/transports/rest.py index 9f8887074424..5cdbcf388f23 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/transports/rest.py @@ -477,6 +477,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/operations/*}", @@ -539,6 +543,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*}/operations", @@ -1224,6 +1232,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/operations/*}", @@ -1346,6 +1358,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*}/operations", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/grounded_generation_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/grounded_generation_service/transports/rest.py index ee8a9194089c..ef84072de1d1 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/grounded_generation_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/grounded_generation_service/transports/rest.py @@ -533,6 +533,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/operations/*}", @@ -655,6 +659,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*}/operations", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/project_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/project_service/transports/rest.py index b8a0f24db978..86fe2e880195 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/project_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/project_service/transports/rest.py @@ -355,6 +355,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/operations/*}", @@ -417,6 +421,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*}/operations", @@ -705,6 +713,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/operations/*}", @@ -827,6 +839,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*}/operations", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/rank_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/rank_service/transports/rest.py index 8796a94833df..c4725bfa7dfa 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/rank_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/rank_service/transports/rest.py @@ -524,6 +524,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/operations/*}", @@ -646,6 +650,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*}/operations", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/recommendation_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/recommendation_service/transports/rest.py index 8b3979d914df..dac81936a0ee 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/recommendation_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/recommendation_service/transports/rest.py @@ -538,6 +538,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/operations/*}", @@ -660,6 +664,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*}/operations", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/transports/rest.py index 214d34eaee49..56d8cdb52691 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/transports/rest.py @@ -477,6 +477,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/operations/*}", @@ -539,6 +543,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*}/operations", @@ -1243,6 +1251,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/operations/*}", @@ -1365,6 +1377,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*}/operations", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/transports/rest.py index c8165a22227b..6d55c0b7011d 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/transports/rest.py @@ -536,6 +536,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/operations/*}", @@ -658,6 +662,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*}/operations", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/__init__.py new file mode 100644 index 000000000000..71fba7ca5f3f --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import SearchTuningServiceAsyncClient +from .client import SearchTuningServiceClient + +__all__ = ( + "SearchTuningServiceClient", + "SearchTuningServiceAsyncClient", +) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/async_client.py new file mode 100644 index 000000000000..af413b4bedf6 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/async_client.py @@ -0,0 +1,654 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.discoveryengine_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.discoveryengine_v1.types import ( + custom_tuning_model, + search_tuning_service, +) + +from .client import SearchTuningServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, SearchTuningServiceTransport +from .transports.grpc_asyncio import SearchTuningServiceGrpcAsyncIOTransport + + +class SearchTuningServiceAsyncClient: + """Service for search tuning.""" + + _client: SearchTuningServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = SearchTuningServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = SearchTuningServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = SearchTuningServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = SearchTuningServiceClient._DEFAULT_UNIVERSE + + custom_tuning_model_path = staticmethod( + SearchTuningServiceClient.custom_tuning_model_path + ) + parse_custom_tuning_model_path = staticmethod( + SearchTuningServiceClient.parse_custom_tuning_model_path + ) + data_store_path = staticmethod(SearchTuningServiceClient.data_store_path) + parse_data_store_path = staticmethod( + SearchTuningServiceClient.parse_data_store_path + ) + common_billing_account_path = staticmethod( + SearchTuningServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + SearchTuningServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(SearchTuningServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + SearchTuningServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + SearchTuningServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + SearchTuningServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(SearchTuningServiceClient.common_project_path) + parse_common_project_path = staticmethod( + SearchTuningServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(SearchTuningServiceClient.common_location_path) + parse_common_location_path = staticmethod( + SearchTuningServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SearchTuningServiceAsyncClient: The constructed client. + """ + return SearchTuningServiceClient.from_service_account_info.__func__(SearchTuningServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SearchTuningServiceAsyncClient: The constructed client. + """ + return SearchTuningServiceClient.from_service_account_file.__func__(SearchTuningServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return SearchTuningServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> SearchTuningServiceTransport: + """Returns the transport used by the client instance. + + Returns: + SearchTuningServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = SearchTuningServiceClient.get_transport_class + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + SearchTuningServiceTransport, + Callable[..., SearchTuningServiceTransport], + ] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the search tuning service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,SearchTuningServiceTransport,Callable[..., SearchTuningServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the SearchTuningServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = SearchTuningServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def train_custom_model( + self, + request: Optional[ + Union[search_tuning_service.TrainCustomModelRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Trains a custom model. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + async def sample_train_custom_model(): + # Create a client + client = discoveryengine_v1.SearchTuningServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.TrainCustomModelRequest( + data_store="data_store_value", + ) + + # Make the request + operation = client.train_custom_model(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1.types.TrainCustomModelRequest, dict]]): + The request object. Request message for + [SearchTuningService.TrainCustomModel][google.cloud.discoveryengine.v1.SearchTuningService.TrainCustomModel] + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.discoveryengine_v1.types.TrainCustomModelResponse` Response of the + [TrainCustomModelRequest][google.cloud.discoveryengine.v1.TrainCustomModelRequest]. + This message is returned by the + google.longrunning.Operations.response field. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, search_tuning_service.TrainCustomModelRequest): + request = search_tuning_service.TrainCustomModelRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.train_custom_model + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("data_store", request.data_store),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + search_tuning_service.TrainCustomModelResponse, + metadata_type=search_tuning_service.TrainCustomModelMetadata, + ) + + # Done; return the response. + return response + + async def list_custom_models( + self, + request: Optional[ + Union[search_tuning_service.ListCustomModelsRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> search_tuning_service.ListCustomModelsResponse: + r"""Gets a list of all the custom models. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + async def sample_list_custom_models(): + # Create a client + client = discoveryengine_v1.SearchTuningServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.ListCustomModelsRequest( + data_store="data_store_value", + ) + + # Make the request + response = await client.list_custom_models(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1.types.ListCustomModelsRequest, dict]]): + The request object. Request message for + [SearchTuningService.ListCustomModels][google.cloud.discoveryengine.v1.SearchTuningService.ListCustomModels] + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1.types.ListCustomModelsResponse: + Response message for + [SearchTuningService.ListCustomModels][google.cloud.discoveryengine.v1.SearchTuningService.ListCustomModels] + method. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, search_tuning_service.ListCustomModelsRequest): + request = search_tuning_service.ListCustomModelsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_custom_models + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("data_store", request.data_store),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def __aenter__(self) -> "SearchTuningServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("SearchTuningServiceAsyncClient",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/client.py new file mode 100644 index 000000000000..97d62eb19d51 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/client.py @@ -0,0 +1,1097 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.discoveryengine_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.discoveryengine_v1.types import ( + custom_tuning_model, + search_tuning_service, +) + +from .transports.base import DEFAULT_CLIENT_INFO, SearchTuningServiceTransport +from .transports.grpc import SearchTuningServiceGrpcTransport +from .transports.grpc_asyncio import SearchTuningServiceGrpcAsyncIOTransport +from .transports.rest import SearchTuningServiceRestTransport + + +class SearchTuningServiceClientMeta(type): + """Metaclass for the SearchTuningService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[SearchTuningServiceTransport]] + _transport_registry["grpc"] = SearchTuningServiceGrpcTransport + _transport_registry["grpc_asyncio"] = SearchTuningServiceGrpcAsyncIOTransport + _transport_registry["rest"] = SearchTuningServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[SearchTuningServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class SearchTuningServiceClient(metaclass=SearchTuningServiceClientMeta): + """Service for search tuning.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "discoveryengine.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "discoveryengine.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SearchTuningServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SearchTuningServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> SearchTuningServiceTransport: + """Returns the transport used by the client instance. + + Returns: + SearchTuningServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def custom_tuning_model_path( + project: str, + location: str, + data_store: str, + custom_tuning_model: str, + ) -> str: + """Returns a fully-qualified custom_tuning_model string.""" + return "projects/{project}/locations/{location}/dataStores/{data_store}/customTuningModels/{custom_tuning_model}".format( + project=project, + location=location, + data_store=data_store, + custom_tuning_model=custom_tuning_model, + ) + + @staticmethod + def parse_custom_tuning_model_path(path: str) -> Dict[str, str]: + """Parses a custom_tuning_model path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/dataStores/(?P.+?)/customTuningModels/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def data_store_path( + project: str, + location: str, + data_store: str, + ) -> str: + """Returns a fully-qualified data_store string.""" + return "projects/{project}/locations/{location}/dataStores/{data_store}".format( + project=project, + location=location, + data_store=data_store, + ) + + @staticmethod + def parse_data_store_path(path: str) -> Dict[str, str]: + """Parses a data_store path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/dataStores/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = SearchTuningServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = SearchTuningServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = SearchTuningServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = SearchTuningServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = SearchTuningServiceClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or SearchTuningServiceClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + SearchTuningServiceTransport, + Callable[..., SearchTuningServiceTransport], + ] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the search tuning service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,SearchTuningServiceTransport,Callable[..., SearchTuningServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the SearchTuningServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = SearchTuningServiceClient._read_environment_variables() + self._client_cert_source = SearchTuningServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = SearchTuningServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, SearchTuningServiceTransport) + if transport_provided: + # transport is a SearchTuningServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(SearchTuningServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or SearchTuningServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[SearchTuningServiceTransport], + Callable[..., SearchTuningServiceTransport], + ] = ( + SearchTuningServiceClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., SearchTuningServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def train_custom_model( + self, + request: Optional[ + Union[search_tuning_service.TrainCustomModelRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Trains a custom model. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + def sample_train_custom_model(): + # Create a client + client = discoveryengine_v1.SearchTuningServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.TrainCustomModelRequest( + data_store="data_store_value", + ) + + # Make the request + operation = client.train_custom_model(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1.types.TrainCustomModelRequest, dict]): + The request object. Request message for + [SearchTuningService.TrainCustomModel][google.cloud.discoveryengine.v1.SearchTuningService.TrainCustomModel] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.discoveryengine_v1.types.TrainCustomModelResponse` Response of the + [TrainCustomModelRequest][google.cloud.discoveryengine.v1.TrainCustomModelRequest]. + This message is returned by the + google.longrunning.Operations.response field. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, search_tuning_service.TrainCustomModelRequest): + request = search_tuning_service.TrainCustomModelRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.train_custom_model] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("data_store", request.data_store),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + search_tuning_service.TrainCustomModelResponse, + metadata_type=search_tuning_service.TrainCustomModelMetadata, + ) + + # Done; return the response. + return response + + def list_custom_models( + self, + request: Optional[ + Union[search_tuning_service.ListCustomModelsRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> search_tuning_service.ListCustomModelsResponse: + r"""Gets a list of all the custom models. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + def sample_list_custom_models(): + # Create a client + client = discoveryengine_v1.SearchTuningServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.ListCustomModelsRequest( + data_store="data_store_value", + ) + + # Make the request + response = client.list_custom_models(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1.types.ListCustomModelsRequest, dict]): + The request object. Request message for + [SearchTuningService.ListCustomModels][google.cloud.discoveryengine.v1.SearchTuningService.ListCustomModels] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1.types.ListCustomModelsResponse: + Response message for + [SearchTuningService.ListCustomModels][google.cloud.discoveryengine.v1.SearchTuningService.ListCustomModels] + method. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, search_tuning_service.ListCustomModelsRequest): + request = search_tuning_service.ListCustomModelsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_custom_models] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("data_store", request.data_store),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "SearchTuningServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("SearchTuningServiceClient",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/transports/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/transports/__init__.py new file mode 100644 index 000000000000..8c56b6549cef --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/transports/__init__.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import SearchTuningServiceTransport +from .grpc import SearchTuningServiceGrpcTransport +from .grpc_asyncio import SearchTuningServiceGrpcAsyncIOTransport +from .rest import SearchTuningServiceRestInterceptor, SearchTuningServiceRestTransport + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[SearchTuningServiceTransport]] +_transport_registry["grpc"] = SearchTuningServiceGrpcTransport +_transport_registry["grpc_asyncio"] = SearchTuningServiceGrpcAsyncIOTransport +_transport_registry["rest"] = SearchTuningServiceRestTransport + +__all__ = ( + "SearchTuningServiceTransport", + "SearchTuningServiceGrpcTransport", + "SearchTuningServiceGrpcAsyncIOTransport", + "SearchTuningServiceRestTransport", + "SearchTuningServiceRestInterceptor", +) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/transports/base.py new file mode 100644 index 000000000000..cd575fdaf62d --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/transports/base.py @@ -0,0 +1,212 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.discoveryengine_v1 import gapic_version as package_version +from google.cloud.discoveryengine_v1.types import search_tuning_service + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class SearchTuningServiceTransport(abc.ABC): + """Abstract transport class for SearchTuningService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "discoveryengine.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'discoveryengine.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.train_custom_model: gapic_v1.method.wrap_method( + self.train_custom_model, + default_timeout=None, + client_info=client_info, + ), + self.list_custom_models: gapic_v1.method.wrap_method( + self.list_custom_models, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def train_custom_model( + self, + ) -> Callable[ + [search_tuning_service.TrainCustomModelRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_custom_models( + self, + ) -> Callable[ + [search_tuning_service.ListCustomModelsRequest], + Union[ + search_tuning_service.ListCustomModelsResponse, + Awaitable[search_tuning_service.ListCustomModelsResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("SearchTuningServiceTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/transports/grpc.py new file mode 100644 index 000000000000..614fb0942cbe --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/transports/grpc.py @@ -0,0 +1,372 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.discoveryengine_v1.types import search_tuning_service + +from .base import DEFAULT_CLIENT_INFO, SearchTuningServiceTransport + + +class SearchTuningServiceGrpcTransport(SearchTuningServiceTransport): + """gRPC backend transport for SearchTuningService. + + Service for search tuning. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'discoveryengine.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + + @property + def train_custom_model( + self, + ) -> Callable[ + [search_tuning_service.TrainCustomModelRequest], operations_pb2.Operation + ]: + r"""Return a callable for the train custom model method over gRPC. + + Trains a custom model. + + Returns: + Callable[[~.TrainCustomModelRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "train_custom_model" not in self._stubs: + self._stubs["train_custom_model"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.SearchTuningService/TrainCustomModel", + request_serializer=search_tuning_service.TrainCustomModelRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["train_custom_model"] + + @property + def list_custom_models( + self, + ) -> Callable[ + [search_tuning_service.ListCustomModelsRequest], + search_tuning_service.ListCustomModelsResponse, + ]: + r"""Return a callable for the list custom models method over gRPC. + + Gets a list of all the custom models. + + Returns: + Callable[[~.ListCustomModelsRequest], + ~.ListCustomModelsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_custom_models" not in self._stubs: + self._stubs["list_custom_models"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.SearchTuningService/ListCustomModels", + request_serializer=search_tuning_service.ListCustomModelsRequest.serialize, + response_deserializer=search_tuning_service.ListCustomModelsResponse.deserialize, + ) + return self._stubs["list_custom_models"] + + def close(self): + self.grpc_channel.close() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("SearchTuningServiceGrpcTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..3b4639ec7de9 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/transports/grpc_asyncio.py @@ -0,0 +1,390 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.discoveryengine_v1.types import search_tuning_service + +from .base import DEFAULT_CLIENT_INFO, SearchTuningServiceTransport +from .grpc import SearchTuningServiceGrpcTransport + + +class SearchTuningServiceGrpcAsyncIOTransport(SearchTuningServiceTransport): + """gRPC AsyncIO backend transport for SearchTuningService. + + Service for search tuning. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'discoveryengine.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def train_custom_model( + self, + ) -> Callable[ + [search_tuning_service.TrainCustomModelRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the train custom model method over gRPC. + + Trains a custom model. + + Returns: + Callable[[~.TrainCustomModelRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "train_custom_model" not in self._stubs: + self._stubs["train_custom_model"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.SearchTuningService/TrainCustomModel", + request_serializer=search_tuning_service.TrainCustomModelRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["train_custom_model"] + + @property + def list_custom_models( + self, + ) -> Callable[ + [search_tuning_service.ListCustomModelsRequest], + Awaitable[search_tuning_service.ListCustomModelsResponse], + ]: + r"""Return a callable for the list custom models method over gRPC. + + Gets a list of all the custom models. + + Returns: + Callable[[~.ListCustomModelsRequest], + Awaitable[~.ListCustomModelsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_custom_models" not in self._stubs: + self._stubs["list_custom_models"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.SearchTuningService/ListCustomModels", + request_serializer=search_tuning_service.ListCustomModelsRequest.serialize, + response_deserializer=search_tuning_service.ListCustomModelsResponse.deserialize, + ) + return self._stubs["list_custom_models"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.train_custom_model: gapic_v1.method_async.wrap_method( + self.train_custom_model, + default_timeout=None, + client_info=client_info, + ), + self.list_custom_models: gapic_v1.method_async.wrap_method( + self.list_custom_models, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + return self.grpc_channel.close() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + +__all__ = ("SearchTuningServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/transports/rest.py new file mode 100644 index 000000000000..7418b189ce0d --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/transports/rest.py @@ -0,0 +1,1034 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.discoveryengine_v1.types import search_tuning_service + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import SearchTuningServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class SearchTuningServiceRestInterceptor: + """Interceptor for SearchTuningService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the SearchTuningServiceRestTransport. + + .. code-block:: python + class MyCustomSearchTuningServiceInterceptor(SearchTuningServiceRestInterceptor): + def pre_list_custom_models(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_custom_models(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_train_custom_model(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_train_custom_model(self, response): + logging.log(f"Received response: {response}") + return response + + transport = SearchTuningServiceRestTransport(interceptor=MyCustomSearchTuningServiceInterceptor()) + client = SearchTuningServiceClient(transport=transport) + + + """ + + def pre_list_custom_models( + self, + request: search_tuning_service.ListCustomModelsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + search_tuning_service.ListCustomModelsRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for list_custom_models + + Override in a subclass to manipulate the request or metadata + before they are sent to the SearchTuningService server. + """ + return request, metadata + + def post_list_custom_models( + self, response: search_tuning_service.ListCustomModelsResponse + ) -> search_tuning_service.ListCustomModelsResponse: + """Post-rpc interceptor for list_custom_models + + Override in a subclass to manipulate the response + after it is returned by the SearchTuningService server but before + it is returned to user code. + """ + return response + + def pre_train_custom_model( + self, + request: search_tuning_service.TrainCustomModelRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + search_tuning_service.TrainCustomModelRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for train_custom_model + + Override in a subclass to manipulate the request or metadata + before they are sent to the SearchTuningService server. + """ + return request, metadata + + def post_train_custom_model( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for train_custom_model + + Override in a subclass to manipulate the response + after it is returned by the SearchTuningService server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the SearchTuningService server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the SearchTuningService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the SearchTuningService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the SearchTuningService server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the SearchTuningService server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the SearchTuningService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class SearchTuningServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: SearchTuningServiceRestInterceptor + + +class SearchTuningServiceRestTransport(SearchTuningServiceTransport): + """REST backend transport for SearchTuningService. + + Service for search tuning. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[SearchTuningServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'discoveryengine.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or SearchTuningServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1/{name=projects/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ], + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataConnector/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/engines/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1/{name=projects/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataConnector}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/engines/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*}/operations", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _ListCustomModels(SearchTuningServiceRestStub): + def __hash__(self): + return hash("ListCustomModels") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: search_tuning_service.ListCustomModelsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> search_tuning_service.ListCustomModelsResponse: + r"""Call the list custom models method over HTTP. + + Args: + request (~.search_tuning_service.ListCustomModelsRequest): + The request object. Request message for + [SearchTuningService.ListCustomModels][google.cloud.discoveryengine.v1.SearchTuningService.ListCustomModels] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.search_tuning_service.ListCustomModelsResponse: + Response message for + [SearchTuningService.ListCustomModels][google.cloud.discoveryengine.v1.SearchTuningService.ListCustomModels] + method. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{data_store=projects/*/locations/*/collections/*/dataStores/*}/customModels", + }, + ] + request, metadata = self._interceptor.pre_list_custom_models( + request, metadata + ) + pb_request = search_tuning_service.ListCustomModelsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = search_tuning_service.ListCustomModelsResponse() + pb_resp = search_tuning_service.ListCustomModelsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_custom_models(resp) + return resp + + class _TrainCustomModel(SearchTuningServiceRestStub): + def __hash__(self): + return hash("TrainCustomModel") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: search_tuning_service.TrainCustomModelRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the train custom model method over HTTP. + + Args: + request (~.search_tuning_service.TrainCustomModelRequest): + The request object. Request message for + [SearchTuningService.TrainCustomModel][google.cloud.discoveryengine.v1.SearchTuningService.TrainCustomModel] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{data_store=projects/*/locations/*/collections/*/dataStores/*}:trainCustomModel", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_train_custom_model( + request, metadata + ) + pb_request = search_tuning_service.TrainCustomModelRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_train_custom_model(resp) + return resp + + @property + def list_custom_models( + self, + ) -> Callable[ + [search_tuning_service.ListCustomModelsRequest], + search_tuning_service.ListCustomModelsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListCustomModels(self._session, self._host, self._interceptor) # type: ignore + + @property + def train_custom_model( + self, + ) -> Callable[ + [search_tuning_service.TrainCustomModelRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._TrainCustomModel(self._session, self._host, self._interceptor) # type: ignore + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(SearchTuningServiceRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(SearchTuningServiceRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataConnector/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/engines/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(SearchTuningServiceRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataConnector}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/engines/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("SearchTuningServiceRestTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/transports/rest.py index fc45da7663ef..319dd0689231 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/transports/rest.py @@ -727,6 +727,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/operations/*}", @@ -789,6 +793,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*}/operations", @@ -2299,6 +2307,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/operations/*}", @@ -2421,6 +2433,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*}/operations", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/async_client.py index 20dbd3c1b872..320432527156 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/async_client.py @@ -53,6 +53,7 @@ from google.cloud.discoveryengine_v1.types import ( common, import_config, + purge_config, user_event, user_event_service, ) @@ -497,6 +498,110 @@ async def sample_collect_user_event(): # Done; return the response. return response + async def purge_user_events( + self, + request: Optional[Union[purge_config.PurgeUserEventsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes permanently all user events specified by the + filter provided. Depending on the number of events + specified by the filter, this operation could take hours + or days to complete. To test a filter, use the list + command first. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + async def sample_purge_user_events(): + # Create a client + client = discoveryengine_v1.UserEventServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.PurgeUserEventsRequest( + parent="parent_value", + filter="filter_value", + ) + + # Make the request + operation = client.purge_user_events(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1.types.PurgeUserEventsRequest, dict]]): + The request object. Request message for PurgeUserEvents + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.discoveryengine_v1.types.PurgeUserEventsResponse` Response of the PurgeUserEventsRequest. If the long running operation is + successfully done, then this message is returned by + the google.longrunning.Operations.response field. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, purge_config.PurgeUserEventsRequest): + request = purge_config.PurgeUserEventsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.purge_user_events + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + purge_config.PurgeUserEventsResponse, + metadata_type=purge_config.PurgeUserEventsMetadata, + ) + + # Done; return the response. + return response + async def import_user_events( self, request: Optional[Union[import_config.ImportUserEventsRequest, dict]] = None, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/client.py index e2936a08eb22..c881e7747a63 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/client.py @@ -59,6 +59,7 @@ from google.cloud.discoveryengine_v1.types import ( common, import_config, + purge_config, user_event, user_event_service, ) @@ -963,6 +964,108 @@ def sample_collect_user_event(): # Done; return the response. return response + def purge_user_events( + self, + request: Optional[Union[purge_config.PurgeUserEventsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes permanently all user events specified by the + filter provided. Depending on the number of events + specified by the filter, this operation could take hours + or days to complete. To test a filter, use the list + command first. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + def sample_purge_user_events(): + # Create a client + client = discoveryengine_v1.UserEventServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.PurgeUserEventsRequest( + parent="parent_value", + filter="filter_value", + ) + + # Make the request + operation = client.purge_user_events(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1.types.PurgeUserEventsRequest, dict]): + The request object. Request message for PurgeUserEvents + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.discoveryengine_v1.types.PurgeUserEventsResponse` Response of the PurgeUserEventsRequest. If the long running operation is + successfully done, then this message is returned by + the google.longrunning.Operations.response field. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, purge_config.PurgeUserEventsRequest): + request = purge_config.PurgeUserEventsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.purge_user_events] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + purge_config.PurgeUserEventsResponse, + metadata_type=purge_config.PurgeUserEventsMetadata, + ) + + # Done; return the response. + return response + def import_user_events( self, request: Optional[Union[import_config.ImportUserEventsRequest, dict]] = None, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/base.py index db533bd228dd..bb46c14805c3 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/base.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/base.py @@ -30,6 +30,7 @@ from google.cloud.discoveryengine_v1 import gapic_version as package_version from google.cloud.discoveryengine_v1.types import ( import_config, + purge_config, user_event, user_event_service, ) @@ -145,6 +146,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.purge_user_events: gapic_v1.method.wrap_method( + self.purge_user_events, + default_timeout=None, + client_info=client_info, + ), self.import_user_events: gapic_v1.method.wrap_method( self.import_user_events, default_retry=retries.Retry( @@ -193,6 +199,15 @@ def collect_user_event( ]: raise NotImplementedError() + @property + def purge_user_events( + self, + ) -> Callable[ + [purge_config.PurgeUserEventsRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def import_user_events( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/grpc.py index 29ea6149acb1..21e5a071cbb7 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/grpc.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/grpc.py @@ -27,6 +27,7 @@ from google.cloud.discoveryengine_v1.types import ( import_config, + purge_config, user_event, user_event_service, ) @@ -315,6 +316,36 @@ def collect_user_event( ) return self._stubs["collect_user_event"] + @property + def purge_user_events( + self, + ) -> Callable[[purge_config.PurgeUserEventsRequest], operations_pb2.Operation]: + r"""Return a callable for the purge user events method over gRPC. + + Deletes permanently all user events specified by the + filter provided. Depending on the number of events + specified by the filter, this operation could take hours + or days to complete. To test a filter, use the list + command first. + + Returns: + Callable[[~.PurgeUserEventsRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "purge_user_events" not in self._stubs: + self._stubs["purge_user_events"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.UserEventService/PurgeUserEvents", + request_serializer=purge_config.PurgeUserEventsRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["purge_user_events"] + @property def import_user_events( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/grpc_asyncio.py index 030bc370d024..06c828c190ac 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/grpc_asyncio.py @@ -29,6 +29,7 @@ from google.cloud.discoveryengine_v1.types import ( import_config, + purge_config, user_event, user_event_service, ) @@ -325,6 +326,38 @@ def collect_user_event( ) return self._stubs["collect_user_event"] + @property + def purge_user_events( + self, + ) -> Callable[ + [purge_config.PurgeUserEventsRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the purge user events method over gRPC. + + Deletes permanently all user events specified by the + filter provided. Depending on the number of events + specified by the filter, this operation could take hours + or days to complete. To test a filter, use the list + command first. + + Returns: + Callable[[~.PurgeUserEventsRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "purge_user_events" not in self._stubs: + self._stubs["purge_user_events"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.UserEventService/PurgeUserEvents", + request_serializer=purge_config.PurgeUserEventsRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["purge_user_events"] + @property def import_user_events( self, @@ -373,6 +406,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.purge_user_events: gapic_v1.method_async.wrap_method( + self.purge_user_events, + default_timeout=None, + client_info=client_info, + ), self.import_user_events: gapic_v1.method_async.wrap_method( self.import_user_events, default_retry=retries.AsyncRetry( diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/rest.py index 2f87a0777f58..95798800bf12 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/rest.py @@ -48,6 +48,7 @@ from google.cloud.discoveryengine_v1.types import ( import_config, + purge_config, user_event, user_event_service, ) @@ -93,6 +94,14 @@ def post_import_user_events(self, response): logging.log(f"Received response: {response}") return response + def pre_purge_user_events(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_purge_user_events(self, response): + logging.log(f"Received response: {response}") + return response + def pre_write_user_event(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -153,6 +162,29 @@ def post_import_user_events( """ return response + def pre_purge_user_events( + self, + request: purge_config.PurgeUserEventsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[purge_config.PurgeUserEventsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for purge_user_events + + Override in a subclass to manipulate the request or metadata + before they are sent to the UserEventService server. + """ + return request, metadata + + def post_purge_user_events( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for purge_user_events + + Override in a subclass to manipulate the response + after it is returned by the UserEventService server but before + it is returned to user code. + """ + return response + def pre_write_user_event( self, request: user_event_service.WriteUserEventRequest, @@ -422,6 +454,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/operations/*}", @@ -484,6 +520,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*}/operations", @@ -759,6 +799,107 @@ def __call__( resp = self._interceptor.post_import_user_events(resp) return resp + class _PurgeUserEvents(UserEventServiceRestStub): + def __hash__(self): + return hash("PurgeUserEvents") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: purge_config.PurgeUserEventsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the purge user events method over HTTP. + + Args: + request (~.purge_config.PurgeUserEventsRequest): + The request object. Request message for PurgeUserEvents + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/dataStores/*}/userEvents:purge", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/collections/*/dataStores/*}/userEvents:purge", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_purge_user_events( + request, metadata + ) + pb_request = purge_config.PurgeUserEventsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_purge_user_events(resp) + return resp + class _WriteUserEvent(UserEventServiceRestStub): def __hash__(self): return hash("WriteUserEvent") @@ -884,6 +1025,14 @@ def import_user_events( # In C++ this would require a dynamic_cast return self._ImportUserEvents(self._session, self._host, self._interceptor) # type: ignore + @property + def purge_user_events( + self, + ) -> Callable[[purge_config.PurgeUserEventsRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._PurgeUserEvents(self._session, self._host, self._interceptor) # type: ignore + @property def write_user_event( self, @@ -1048,6 +1197,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/operations/*}", @@ -1170,6 +1323,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*}/operations", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/__init__.py index 79d6e4f7f3e3..8804192f3d63 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/__init__.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/__init__.py @@ -62,6 +62,7 @@ UpdateConversationRequest, UpdateSessionRequest, ) +from .custom_tuning_model import CustomTuningModel from .data_store import DataStore from .data_store_service import ( CreateDataStoreMetadata, @@ -76,6 +77,8 @@ from .document import Document from .document_processing_config import DocumentProcessingConfig from .document_service import ( + BatchGetDocumentsMetadataRequest, + BatchGetDocumentsMetadataResponse, CreateDocumentRequest, DeleteDocumentRequest, GetDocumentRequest, @@ -133,9 +136,13 @@ PurgeDocumentsMetadata, PurgeDocumentsRequest, PurgeDocumentsResponse, + PurgeErrorConfig, PurgeSuggestionDenyListEntriesMetadata, PurgeSuggestionDenyListEntriesRequest, PurgeSuggestionDenyListEntriesResponse, + PurgeUserEventsMetadata, + PurgeUserEventsRequest, + PurgeUserEventsResponse, ) from .rank_service import RankingRecord, RankRequest, RankResponse from .recommendation_service import RecommendRequest, RecommendResponse @@ -152,6 +159,13 @@ UpdateSchemaRequest, ) from .search_service import SearchRequest, SearchResponse +from .search_tuning_service import ( + ListCustomModelsRequest, + ListCustomModelsResponse, + TrainCustomModelMetadata, + TrainCustomModelRequest, + TrainCustomModelResponse, +) from .session import Query, Session from .site_search_engine import SiteSearchEngine, SiteVerificationInfo, TargetSite from .site_search_engine_service import ( @@ -240,6 +254,7 @@ "ListSessionsResponse", "UpdateConversationRequest", "UpdateSessionRequest", + "CustomTuningModel", "DataStore", "CreateDataStoreMetadata", "CreateDataStoreRequest", @@ -251,6 +266,8 @@ "UpdateDataStoreRequest", "Document", "DocumentProcessingConfig", + "BatchGetDocumentsMetadataRequest", + "BatchGetDocumentsMetadataResponse", "CreateDocumentRequest", "DeleteDocumentRequest", "GetDocumentRequest", @@ -302,9 +319,13 @@ "PurgeDocumentsMetadata", "PurgeDocumentsRequest", "PurgeDocumentsResponse", + "PurgeErrorConfig", "PurgeSuggestionDenyListEntriesMetadata", "PurgeSuggestionDenyListEntriesRequest", "PurgeSuggestionDenyListEntriesResponse", + "PurgeUserEventsMetadata", + "PurgeUserEventsRequest", + "PurgeUserEventsResponse", "RankingRecord", "RankRequest", "RankResponse", @@ -322,6 +343,11 @@ "UpdateSchemaRequest", "SearchRequest", "SearchResponse", + "ListCustomModelsRequest", + "ListCustomModelsResponse", + "TrainCustomModelMetadata", + "TrainCustomModelRequest", + "TrainCustomModelResponse", "Query", "Session", "SiteSearchEngine", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/answer.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/answer.py index 8615cafa87d4..290bf922e3c3 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/answer.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/answer.py @@ -105,6 +105,18 @@ class AnswerSkippedReason(proto.Enum): Google skips the answer if there is no relevant content in the retrieved search results. + JAIL_BREAKING_QUERY_IGNORED (6): + The jail-breaking query ignored case. + + For example, "Reply in the tone of a competing + company's CEO". Google skips the answer if the + query is classified as a jail-breaking query. + CUSTOMER_POLICY_VIOLATION (7): + The customer policy violation case. + + Google skips the summary if there is a customer + policy violation detected. The policy is defined + by the customer. """ ANSWER_SKIPPED_REASON_UNSPECIFIED = 0 ADVERSARIAL_QUERY_IGNORED = 1 @@ -112,6 +124,8 @@ class AnswerSkippedReason(proto.Enum): OUT_OF_DOMAIN_QUERY_IGNORED = 3 POTENTIAL_POLICY_VIOLATION = 4 NO_RELEVANT_CONTENT = 5 + JAIL_BREAKING_QUERY_IGNORED = 6 + CUSTOMER_POLICY_VIOLATION = 7 class Citation(proto.Message): r"""Citation info for a segment. @@ -171,6 +185,10 @@ class Reference(proto.Message): chunk_info (google.cloud.discoveryengine_v1.types.Answer.Reference.ChunkInfo): Chunk information. + This field is a member of `oneof`_ ``content``. + structured_document_info (google.cloud.discoveryengine_v1.types.Answer.Reference.StructuredDocumentInfo): + Structured document information. + This field is a member of `oneof`_ ``content``. """ @@ -196,11 +214,22 @@ class UnstructuredDocumentInfo(proto.Message): class ChunkContent(proto.Message): r"""Chunk content. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: content (str): Chunk textual content. page_identifier (str): Page identifier. + relevance_score (float): + The relevance of the chunk for a given query. + Values range from 0.0 (completely irrelevant) to + 1.0 (completely relevant). This value is for + informational purpose only. It may change for + the same query and chunk at any time due to a + model retraining or change in implementation. + + This field is a member of `oneof`_ ``_relevance_score``. """ content: str = proto.Field( @@ -211,6 +240,11 @@ class ChunkContent(proto.Message): proto.STRING, number=2, ) + relevance_score: float = proto.Field( + proto.FLOAT, + number=3, + optional=True, + ) document: str = proto.Field( proto.STRING, @@ -248,7 +282,12 @@ class ChunkInfo(proto.Message): content (str): Chunk textual content. relevance_score (float): - Relevance score. + The relevance of the chunk for a given query. + Values range from 0.0 (completely irrelevant) to + 1.0 (completely relevant). This value is for + informational purpose only. It may change for + the same query and chunk at any time due to a + model retraining or change in implementation. This field is a member of `oneof`_ ``_relevance_score``. document_metadata (google.cloud.discoveryengine_v1.types.Answer.Reference.ChunkInfo.DocumentMetadata): @@ -316,6 +355,26 @@ class DocumentMetadata(proto.Message): ) ) + class StructuredDocumentInfo(proto.Message): + r"""Structured search information. + + Attributes: + document (str): + Document resource name. + struct_data (google.protobuf.struct_pb2.Struct): + Structured search data. + """ + + document: str = proto.Field( + proto.STRING, + number=1, + ) + struct_data: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=2, + message=struct_pb2.Struct, + ) + unstructured_document_info: "Answer.Reference.UnstructuredDocumentInfo" = ( proto.Field( proto.MESSAGE, @@ -330,6 +389,14 @@ class DocumentMetadata(proto.Message): oneof="content", message="Answer.Reference.ChunkInfo", ) + structured_document_info: "Answer.Reference.StructuredDocumentInfo" = ( + proto.Field( + proto.MESSAGE, + number=3, + oneof="content", + message="Answer.Reference.StructuredDocumentInfo", + ) + ) class Step(proto.Message): r"""Step information. @@ -417,11 +484,10 @@ class SearchResult(proto.Message): If citation_type is CHUNK_LEVEL_CITATION and chunk mode is on, populate chunk info. struct_data (google.protobuf.struct_pb2.Struct): - Data representation. The structured JSON data for the - document. It's populated from the struct data from the - Document (code pointer: http://shortn/_objzAfIiHq), or the - Chunk in search result (code pointer: - http://shortn/_Ipo6KFFGBL). + Data representation. + The structured JSON data for the document. + It's populated from the struct data from the + Document, or the Chunk in search result. """ class SnippetInfo(proto.Message): @@ -455,7 +521,12 @@ class ChunkInfo(proto.Message): content (str): Chunk textual content. relevance_score (float): - Relevance score. + The relevance of the chunk for a given query. + Values range from 0.0 (completely irrelevant) to + 1.0 (completely relevant). This value is for + informational purpose only. It may change for + the same query and chunk at any time due to a + model retraining or change in implementation. This field is a member of `oneof`_ ``_relevance_score``. """ @@ -573,10 +644,13 @@ class Type(proto.Enum): Adversarial query classification type. NON_ANSWER_SEEKING_QUERY (2): Non-answer-seeking query classification type. + JAIL_BREAKING_QUERY (3): + Jail-breaking query classification type. """ TYPE_UNSPECIFIED = 0 ADVERSARIAL_QUERY = 1 NON_ANSWER_SEEKING_QUERY = 2 + JAIL_BREAKING_QUERY = 3 type_: "Answer.QueryUnderstandingInfo.QueryClassificationInfo.Type" = ( proto.Field( diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/control.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/control.py index db871cdc66b1..2e7748c91522 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/control.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/control.py @@ -111,8 +111,10 @@ class TimeRange(proto.Message): class Control(proto.Message): r"""Defines a conditioned behavior to employ during serving. Must be - attached to a [ServingConfig][] to be considered at serving time. - Permitted actions dependent on ``SolutionType``. + attached to a + [ServingConfig][google.cloud.discoveryengine.v1.ServingConfig] to be + considered at serving time. Permitted actions dependent on + ``SolutionType``. This message has `oneof`_ fields (mutually exclusive fields). For each oneof, at most one member field can be set at the same time. @@ -150,9 +152,10 @@ class Control(proto.Message): 128 characters. Otherwise an INVALID ARGUMENT error is thrown. associated_serving_config_ids (MutableSequence[str]): - Output only. List of all [ServingConfig][] ids this control - is attached to. May take up to 10 minutes to update after - changes. + Output only. List of all + [ServingConfig][google.cloud.discoveryengine.v1.ServingConfig] + IDs this control is attached to. May take up to 10 minutes + to update after changes. solution_type (google.cloud.discoveryengine_v1.types.SolutionType): Required. Immutable. What solution the control belongs to. diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/conversational_search_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/conversational_search_service.py index 61b087c0b75e..b979c3de69bf 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/conversational_search_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/conversational_search_service.py @@ -654,14 +654,8 @@ class SearchParams(proto.Message): returned. search_result_mode (google.cloud.discoveryengine_v1.types.SearchRequest.ContentSearchSpec.SearchResultMode): Specifies the search result mode. If unspecified, the search - result mode is based on - [DataStore.DocumentProcessingConfig.chunking_config][]: - - - If [DataStore.DocumentProcessingConfig.chunking_config][] - is specified, it defaults to ``CHUNKS``. - - Otherwise, it defaults to ``DOCUMENTS``. See `parse and - chunk - documents `__ + result mode defaults to ``DOCUMENTS``. See `parse and chunk + documents `__ data_store_specs (MutableSequence[google.cloud.discoveryengine_v1.types.SearchRequest.DataStoreSpec]): Specs defining dataStores to filter on in a search call and configurations for those @@ -924,10 +918,13 @@ class Type(proto.Enum): Adversarial query classification type. NON_ANSWER_SEEKING_QUERY (2): Non-answer-seeking query classification type. + JAIL_BREAKING_QUERY (3): + Jail-breaking query classification type. """ TYPE_UNSPECIFIED = 0 ADVERSARIAL_QUERY = 1 NON_ANSWER_SEEKING_QUERY = 2 + JAIL_BREAKING_QUERY = 3 types: MutableSequence[ "AnswerQueryRequest.QueryUnderstandingSpec.QueryClassificationSpec.Type" @@ -1031,8 +1028,8 @@ class AnswerQueryResponse(proto.Message): Attributes: answer (google.cloud.discoveryengine_v1.types.Answer): Answer resource object. If - [AnswerQueryRequest.StepSpec.max_step_count][] is greater - than 1, use + [AnswerQueryRequest.QueryUnderstandingSpec.QueryRephraserSpec.max_rephrase_steps][google.cloud.discoveryengine.v1.AnswerQueryRequest.QueryUnderstandingSpec.QueryRephraserSpec.max_rephrase_steps] + is greater than 1, use [Answer.name][google.cloud.discoveryengine.v1.Answer.name] to fetch answer information using [ConversationalSearchService.GetAnswer][google.cloud.discoveryengine.v1.ConversationalSearchService.GetAnswer] diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/custom_tuning_model.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/custom_tuning_model.py new file mode 100644 index 000000000000..6b1a58131a45 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/custom_tuning_model.py @@ -0,0 +1,125 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.discoveryengine.v1", + manifest={ + "CustomTuningModel", + }, +) + + +class CustomTuningModel(proto.Message): + r"""Metadata that describes a custom tuned model. + + Attributes: + name (str): + Required. The fully qualified resource name of the model. + + Format: + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/customTuningModels/{custom_tuning_model}`` + model must be an alpha-numerical string with limit of 40 + characters. + display_name (str): + The display name of the model. + model_version (int): + The version of the model. + model_state (google.cloud.discoveryengine_v1.types.CustomTuningModel.ModelState): + The state that the model is in (e.g.``TRAINING`` or + ``TRAINING_FAILED``). + create_time (google.protobuf.timestamp_pb2.Timestamp): + Deprecated: timestamp the Model was created + at. + training_start_time (google.protobuf.timestamp_pb2.Timestamp): + Timestamp the model training was initiated. + metrics (MutableMapping[str, float]): + The metrics of the trained model. + """ + + class ModelState(proto.Enum): + r"""The state of the model. + + Values: + MODEL_STATE_UNSPECIFIED (0): + Default value. + TRAINING_PAUSED (1): + The model is in a paused training state. + TRAINING (2): + The model is currently training. + TRAINING_COMPLETE (3): + The model has successfully completed + training. + READY_FOR_SERVING (4): + The model is ready for serving. + TRAINING_FAILED (5): + The model training failed. + NO_IMPROVEMENT (6): + The model training finished successfully but + metrics did not improve. + INPUT_VALIDATION_FAILED (7): + Input data validation failed. Model training + didn't start. + """ + MODEL_STATE_UNSPECIFIED = 0 + TRAINING_PAUSED = 1 + TRAINING = 2 + TRAINING_COMPLETE = 3 + READY_FOR_SERVING = 4 + TRAINING_FAILED = 5 + NO_IMPROVEMENT = 6 + INPUT_VALIDATION_FAILED = 7 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + model_version: int = proto.Field( + proto.INT64, + number=3, + ) + model_state: ModelState = proto.Field( + proto.ENUM, + number=4, + enum=ModelState, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + training_start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + metrics: MutableMapping[str, float] = proto.MapField( + proto.STRING, + proto.DOUBLE, + number=7, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/data_store_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/data_store_service.py index 89f4d5d68ccc..65647f1a300a 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/data_store_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/data_store_service.py @@ -67,6 +67,17 @@ class CreateDataStoreRequest(proto.Message): create an advanced data store for site search. If the data store is not configured as site search (GENERIC vertical and PUBLIC_WEBSITE content_config), this flag will be ignored. + skip_default_schema_creation (bool): + A boolean flag indicating whether to skip the default schema + creation for the data store. Only enable this flag if you + are certain that the default schema is incompatible with + your use case. + + If set to true, you must manually create a schema for the + data store before any documents can be ingested. + + This flag cannot be specified if + ``data_store.starting_schema`` is specified. """ parent: str = proto.Field( @@ -86,6 +97,10 @@ class CreateDataStoreRequest(proto.Message): proto.BOOL, number=4, ) + skip_default_schema_creation: bool = proto.Field( + proto.BOOL, + number=7, + ) class GetDataStoreRequest(proto.Message): diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/document.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/document.py index 0d4d2ad7fa5b..b613e83e20a2 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/document.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/document.py @@ -19,6 +19,7 @@ from google.protobuf import struct_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore import proto # type: ignore __protobuf__ = proto.module( @@ -91,6 +92,14 @@ class Document(proto.Message): This field is OUTPUT_ONLY. If this field is not populated, it means the document has never been indexed. + index_status (google.cloud.discoveryengine_v1.types.Document.IndexStatus): + Output only. The index status of the document. + + - If document is indexed successfully, the index_time field + is populated. + - Otherwise, if document is not indexed due to errors, the + error_samples field is populated. + - Otherwise, index_status is unset. """ class Content(proto.Message): @@ -154,6 +163,31 @@ class Content(proto.Message): number=1, ) + class IndexStatus(proto.Message): + r"""Index status of the document. + + Attributes: + index_time (google.protobuf.timestamp_pb2.Timestamp): + The time when the document was indexed. + If this field is populated, it means the + document has been indexed. + error_samples (MutableSequence[google.rpc.status_pb2.Status]): + A sample of errors encountered while indexing + the document. If this field is populated, the + document is not indexed due to errors. + """ + + index_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + error_samples: MutableSequence[status_pb2.Status] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=status_pb2.Status, + ) + struct_data: struct_pb2.Struct = proto.Field( proto.MESSAGE, number=4, @@ -196,6 +230,11 @@ class Content(proto.Message): number=13, message=timestamp_pb2.Timestamp, ) + index_status: IndexStatus = proto.Field( + proto.MESSAGE, + number=15, + message=IndexStatus, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/document_processing_config.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/document_processing_config.py index 99d5a7597569..7f19cb717e94 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/document_processing_config.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/document_processing_config.py @@ -61,6 +61,8 @@ class DocumentProcessingConfig(proto.Message): digital parsing and layout parsing are supported. - ``pptx``: Override parsing config for PPTX files, only digital parsing and layout parsing are supported. + - ``xlsx``: Override parsing config for XLSX files, only + digital parsing and layout parsing are supported. """ class ChunkingConfig(proto.Message): diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/document_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/document_service.py index 7e8d2b5f8263..39388da42c72 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/document_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/document_service.py @@ -18,6 +18,7 @@ from typing import MutableMapping, MutableSequence from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore import proto # type: ignore from google.cloud.discoveryengine_v1.types import document as gcd_document @@ -31,6 +32,8 @@ "CreateDocumentRequest", "UpdateDocumentRequest", "DeleteDocumentRequest", + "BatchGetDocumentsMetadataRequest", + "BatchGetDocumentsMetadataResponse", }, ) @@ -268,4 +271,173 @@ class DeleteDocumentRequest(proto.Message): ) +class BatchGetDocumentsMetadataRequest(proto.Message): + r"""Request message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1.DocumentService.BatchGetDocumentsMetadata] + method. + + Attributes: + parent (str): + Required. The parent branch resource name, such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}``. + matcher (google.cloud.discoveryengine_v1.types.BatchGetDocumentsMetadataRequest.Matcher): + Required. Matcher for the + [Document][google.cloud.discoveryengine.v1.Document]s. + """ + + class UrisMatcher(proto.Message): + r"""Matcher for the + [Document][google.cloud.discoveryengine.v1.Document]s by exact uris. + + Attributes: + uris (MutableSequence[str]): + The exact URIs to match by. + """ + + uris: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + class Matcher(proto.Message): + r"""Matcher for the + [Document][google.cloud.discoveryengine.v1.Document]s. Currently + supports matching by exact URIs. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + uris_matcher (google.cloud.discoveryengine_v1.types.BatchGetDocumentsMetadataRequest.UrisMatcher): + Matcher by exact URIs. + + This field is a member of `oneof`_ ``matcher``. + """ + + uris_matcher: "BatchGetDocumentsMetadataRequest.UrisMatcher" = proto.Field( + proto.MESSAGE, + number=1, + oneof="matcher", + message="BatchGetDocumentsMetadataRequest.UrisMatcher", + ) + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + matcher: Matcher = proto.Field( + proto.MESSAGE, + number=2, + message=Matcher, + ) + + +class BatchGetDocumentsMetadataResponse(proto.Message): + r"""Response message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1.DocumentService.BatchGetDocumentsMetadata] + method. + + Attributes: + documents_metadata (MutableSequence[google.cloud.discoveryengine_v1.types.BatchGetDocumentsMetadataResponse.DocumentMetadata]): + The metadata of the + [Document][google.cloud.discoveryengine.v1.Document]s. + """ + + class State(proto.Enum): + r"""The state of the + [Document][google.cloud.discoveryengine.v1.Document]. + + Values: + STATE_UNSPECIFIED (0): + Should never be set. + INDEXED (1): + The [Document][google.cloud.discoveryengine.v1.Document] is + indexed. + NOT_IN_TARGET_SITE (2): + The [Document][google.cloud.discoveryengine.v1.Document] is + not indexed because its URI is not in the + [TargetSite][google.cloud.discoveryengine.v1.TargetSite]. + NOT_IN_INDEX (3): + The [Document][google.cloud.discoveryengine.v1.Document] is + not indexed. + """ + STATE_UNSPECIFIED = 0 + INDEXED = 1 + NOT_IN_TARGET_SITE = 2 + NOT_IN_INDEX = 3 + + class DocumentMetadata(proto.Message): + r"""The metadata of a + [Document][google.cloud.discoveryengine.v1.Document]. + + Attributes: + matcher_value (google.cloud.discoveryengine_v1.types.BatchGetDocumentsMetadataResponse.DocumentMetadata.MatcherValue): + The value of the matcher that was used to match the + [Document][google.cloud.discoveryengine.v1.Document]. + state (google.cloud.discoveryengine_v1.types.BatchGetDocumentsMetadataResponse.State): + The state of the document. + last_refreshed_time (google.protobuf.timestamp_pb2.Timestamp): + The timestamp of the last time the + [Document][google.cloud.discoveryengine.v1.Document] was + last indexed. + data_ingestion_source (str): + The data ingestion source of the + [Document][google.cloud.discoveryengine.v1.Document]. + + Allowed values are: + + - ``batch``: Data ingested via Batch API, e.g., + ImportDocuments. + - ``streaming`` Data ingested via Streaming API, e.g., FHIR + streaming. + """ + + class MatcherValue(proto.Message): + r"""The value of the matcher that was used to match the + [Document][google.cloud.discoveryengine.v1.Document]. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + uri (str): + If match by URI, the URI of the + [Document][google.cloud.discoveryengine.v1.Document]. + + This field is a member of `oneof`_ ``matcher_value``. + """ + + uri: str = proto.Field( + proto.STRING, + number=1, + oneof="matcher_value", + ) + + matcher_value: "BatchGetDocumentsMetadataResponse.DocumentMetadata.MatcherValue" = proto.Field( + proto.MESSAGE, + number=2, + message="BatchGetDocumentsMetadataResponse.DocumentMetadata.MatcherValue", + ) + state: "BatchGetDocumentsMetadataResponse.State" = proto.Field( + proto.ENUM, + number=3, + enum="BatchGetDocumentsMetadataResponse.State", + ) + last_refreshed_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + data_ingestion_source: str = proto.Field( + proto.STRING, + number=5, + ) + + documents_metadata: MutableSequence[DocumentMetadata] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=DocumentMetadata, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/grounded_generation_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/grounded_generation_service.py index 36de20d74df6..009d4e1badfd 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/grounded_generation_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/grounded_generation_service.py @@ -180,8 +180,9 @@ class Claim(proto.Message): false. In that case, no grounding check was done for the claim and therefore [citation_indices][google.cloud.discoveryengine.v1.CheckGroundingResponse.Claim.citation_indices], + [anti_citation_indices][google.cloud.discoveryengine.v1.CheckGroundingResponse.Claim.anti_citation_indices], and - [anti_citation_indices][google.cloud.discoveryengine.v1.CheckGroundingResponse.Claim.anti_citation_indices] + [score][google.cloud.discoveryengine.v1.CheckGroundingResponse.Claim.score] should not be returned. This field is a member of `oneof`_ ``_grounding_check_required``. diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/import_config.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/import_config.py index bade57407d0a..7c238cb1b194 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/import_config.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/import_config.py @@ -121,10 +121,10 @@ class BigQuerySource(proto.Message): This field is a member of `oneof`_ ``partition``. project_id (str): - The project ID (can be project # or ID) that - the BigQuery source is in with a length limit of - 128 characters. If not specified, inherits the - project ID from the parent request. + The project ID or the project number that + contains the BigQuery source. Has a length limit + of 128 characters. If not specified, inherits + the project ID from the parent request. dataset_id (str): Required. The BigQuery data set to copy the data from with a length limit of 1,024 @@ -197,9 +197,9 @@ class SpannerSource(proto.Message): Attributes: project_id (str): - The project ID that the Spanner source is in - with a length limit of 128 characters. If not - specified, inherits the project ID from the + The project ID that contains the Spanner + source. Has a length limit of 128 characters. If + not specified, inherits the project ID from the parent request. instance_id (str): Required. The instance ID of the source @@ -420,9 +420,9 @@ class BigtableSource(proto.Message): Attributes: project_id (str): - The project ID that the Bigtable source is in - with a length limit of 128 characters. If not - specified, inherits the project ID from the + The project ID that contains the Bigtable + source. Has a length limit of 128 characters. If + not specified, inherits the project ID from the parent request. instance_id (str): Required. The instance ID of the Cloud @@ -470,6 +470,11 @@ class FhirStoreSource(proto.Message): characters. Can be specified if one wants to have the FhirStore export to a specific Cloud Storage directory. + resource_types (MutableSequence[str]): + The FHIR resource types to import. The resource types should + be a subset of all `supported FHIR resource + types `__. + Default to all supported FHIR resource types if empty. """ fhir_store: str = proto.Field( @@ -480,6 +485,10 @@ class FhirStoreSource(proto.Message): proto.STRING, number=2, ) + resource_types: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) class CloudSqlSource(proto.Message): @@ -487,9 +496,9 @@ class CloudSqlSource(proto.Message): Attributes: project_id (str): - The project ID that the Cloud SQL source is - in with a length limit of 128 characters. If not - specified, inherits the project ID from the + The project ID that contains the Cloud SQL + source. Has a length limit of 128 characters. If + not specified, inherits the project ID from the parent request. instance_id (str): Required. The Cloud SQL instance to copy the @@ -547,9 +556,9 @@ class AlloyDbSource(proto.Message): Attributes: project_id (str): - The project ID that the AlloyDB source is in - with a length limit of 128 characters. If not - specified, inherits the project ID from the + The project ID that contains the AlloyDB + source. Has a length limit of 128 characters. If + not specified, inherits the project ID from the parent request. location_id (str): Required. The AlloyDB location to copy the diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/purge_config.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/purge_config.py index 6a4b091e5ba5..6633bc93f7a8 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/purge_config.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/purge_config.py @@ -21,9 +21,15 @@ from google.rpc import status_pb2 # type: ignore import proto # type: ignore +from google.cloud.discoveryengine_v1.types import import_config + __protobuf__ = proto.module( package="google.cloud.discoveryengine.v1", manifest={ + "PurgeUserEventsRequest", + "PurgeUserEventsResponse", + "PurgeUserEventsMetadata", + "PurgeErrorConfig", "PurgeDocumentsRequest", "PurgeDocumentsResponse", "PurgeDocumentsMetadata", @@ -37,24 +43,211 @@ ) +class PurgeUserEventsRequest(proto.Message): + r"""Request message for PurgeUserEvents method. + + Attributes: + parent (str): + Required. The resource name of the catalog under which the + events are created. The format is + ``projects/{project}/locations/global/collections/{collection}/dataStores/{dataStore}`` + filter (str): + Required. The filter string to specify the events to be + deleted with a length limit of 5,000 characters. The + eligible fields for filtering are: + + - ``eventType``: Double quoted + [UserEvent.event_type][google.cloud.discoveryengine.v1.UserEvent.event_type] + string. + - ``eventTime``: in ISO 8601 "zulu" format. + - ``userPseudoId``: Double quoted string. Specifying this + will delete all events associated with a visitor. + - ``userId``: Double quoted string. Specifying this will + delete all events associated with a user. + + Examples: + + - Deleting all events in a time range: + ``eventTime > "2012-04-23T18:25:43.511Z" eventTime < "2012-04-23T18:30:43.511Z"`` + - Deleting specific eventType: ``eventType = "search"`` + - Deleting all events for a specific visitor: + ``userPseudoId = "visitor1024"`` + - Deleting all events inside a DataStore: ``*`` + + The filtering fields are assumed to have an implicit AND. + force (bool): + The ``force`` field is currently not supported. Purge user + event requests will permanently delete all purgeable events. + Once the development is complete: If ``force`` is set to + false, the method will return the expected purge count + without deleting any user events. This field will default to + false if not included in the request. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + filter: str = proto.Field( + proto.STRING, + number=2, + ) + force: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class PurgeUserEventsResponse(proto.Message): + r"""Response of the PurgeUserEventsRequest. If the long running + operation is successfully done, then this message is returned by + the google.longrunning.Operations.response field. + + Attributes: + purge_count (int): + The total count of events purged as a result + of the operation. + """ + + purge_count: int = proto.Field( + proto.INT64, + number=1, + ) + + +class PurgeUserEventsMetadata(proto.Message): + r"""Metadata related to the progress of the PurgeUserEvents + operation. This will be returned by the + google.longrunning.Operation.metadata field. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Operation create time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Operation last update time. If the operation + is done, this is also the finish time. + success_count (int): + Count of entries that were deleted + successfully. + failure_count (int): + Count of entries that encountered errors + while processing. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + success_count: int = proto.Field( + proto.INT64, + number=3, + ) + failure_count: int = proto.Field( + proto.INT64, + number=4, + ) + + +class PurgeErrorConfig(proto.Message): + r"""Configuration of destination for Purge related errors. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + gcs_prefix (str): + Cloud Storage prefix for purge errors. This must be an + empty, existing Cloud Storage directory. Purge errors are + written to sharded files in this directory, one per line, as + a JSON-encoded ``google.rpc.Status`` message. + + This field is a member of `oneof`_ ``destination``. + """ + + gcs_prefix: str = proto.Field( + proto.STRING, + number=1, + oneof="destination", + ) + + class PurgeDocumentsRequest(proto.Message): r"""Request message for [DocumentService.PurgeDocuments][google.cloud.discoveryengine.v1.DocumentService.PurgeDocuments] method. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: + gcs_source (google.cloud.discoveryengine_v1.types.GcsSource): + Cloud Storage location for the input content. Supported + ``data_schema``: + + - ``document_id``: One valid + [Document.id][google.cloud.discoveryengine.v1.Document.id] + per line. + + This field is a member of `oneof`_ ``source``. + inline_source (google.cloud.discoveryengine_v1.types.PurgeDocumentsRequest.InlineSource): + Inline source for the input content for + purge. + + This field is a member of `oneof`_ ``source``. parent (str): Required. The parent resource name, such as ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}``. filter (str): Required. Filter matching documents to purge. Only currently supported value is ``*`` (all items). + error_config (google.cloud.discoveryengine_v1.types.PurgeErrorConfig): + The desired location of errors incurred + during the purge. force (bool): Actually performs the purge. If ``force`` is set to false, return the expected purge count without deleting any documents. """ + class InlineSource(proto.Message): + r"""The inline source for the input config for + [DocumentService.PurgeDocuments][google.cloud.discoveryengine.v1.DocumentService.PurgeDocuments] + method. + + Attributes: + documents (MutableSequence[str]): + Required. A list of full resource name of documents to + purge. In the format + ``projects/*/locations/*/collections/*/dataStores/*/branches/*/documents/*``. + Recommended max of 100 items. + """ + + documents: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + gcs_source: import_config.GcsSource = proto.Field( + proto.MESSAGE, + number=5, + oneof="source", + message=import_config.GcsSource, + ) + inline_source: InlineSource = proto.Field( + proto.MESSAGE, + number=6, + oneof="source", + message=InlineSource, + ) parent: str = proto.Field( proto.STRING, number=1, @@ -63,6 +256,11 @@ class PurgeDocumentsRequest(proto.Message): proto.STRING, number=2, ) + error_config: "PurgeErrorConfig" = proto.Field( + proto.MESSAGE, + number=7, + message="PurgeErrorConfig", + ) force: bool = proto.Field( proto.BOOL, number=3, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/recommendation_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/recommendation_service.py index f225dff737b9..ae0cbbd72035 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/recommendation_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/recommendation_service.py @@ -37,7 +37,8 @@ class RecommendRequest(proto.Message): Attributes: serving_config (str): - Required. Full resource name of a [ServingConfig][]: + Required. Full resource name of a + [ServingConfig][google.cloud.discoveryengine.v1.ServingConfig]: ``projects/*/locations/global/collections/*/engines/*/servingConfigs/*``, or ``projects/*/locations/global/collections/*/dataStores/*/servingConfigs/*`` diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/search_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/search_service.py index 082241aaa2dd..4c31c629c97b 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/search_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/search_service.py @@ -139,9 +139,12 @@ class SearchRequest(proto.Message): Leave it unset if ordered by relevance. ``order_by`` expression is case-sensitive. - For more information on ordering for retail search, see - `Ordering `__ - + For more information on ordering the website search results, + see `Order web search + results `__. + For more information on ordering the healthcare search + results, see `Order healthcare search + results `__. If this field is unrecognizable, an ``INVALID_ARGUMENT`` is returned. user_info (google.cloud.discoveryengine_v1.types.UserInfo): @@ -682,12 +685,7 @@ class ContentSearchSpec(proto.Message): be no extractive answer in the search response. search_result_mode (google.cloud.discoveryengine_v1.types.SearchRequest.ContentSearchSpec.SearchResultMode): Specifies the search result mode. If unspecified, the search - result mode is based on - [DataStore.DocumentProcessingConfig.chunking_config][]: - - - If [DataStore.DocumentProcessingConfig.chunking_config][] - is specified, it defaults to ``CHUNKS``. - - Otherwise, it defaults to ``DOCUMENTS``. + result mode defaults to ``DOCUMENTS``. chunk_spec (google.cloud.discoveryengine_v1.types.SearchRequest.ContentSearchSpec.ChunkSpec): Specifies the chunk spec to be returned from the search response. Only available if the @@ -698,12 +696,7 @@ class ContentSearchSpec(proto.Message): class SearchResultMode(proto.Enum): r"""Specifies the search result mode. If unspecified, the search result - mode is based on - [DataStore.DocumentProcessingConfig.chunking_config][]: - - - If [DataStore.DocumentProcessingConfig.chunking_config][] is - specified, it defaults to ``CHUNKS``. - - Otherwise, it defaults to ``DOCUMENTS``. + mode defaults to ``DOCUMENTS``. Values: SEARCH_RESULT_MODE_UNSPECIFIED (0): @@ -815,6 +808,14 @@ class SummarySpec(proto.Message): navigational queries. If this field is set to ``true``, we skip generating summaries for non-summary seeking queries and return fallback messages instead. + ignore_low_relevant_content (bool): + Specifies whether to filter out queries that have low + relevance. The default value is ``false``. + + If this field is set to ``false``, all search results are + used regardless of relevance to generate answers. If set to + ``true``, only queries with high relevance search results + will generate answers. model_prompt_spec (google.cloud.discoveryengine_v1.types.SearchRequest.ContentSearchSpec.SummarySpec.ModelPromptSpec): If specified, the spec will be used to modify the prompt provided to the LLM. @@ -892,6 +893,10 @@ class ModelSpec(proto.Message): proto.BOOL, number=4, ) + ignore_low_relevant_content: bool = proto.Field( + proto.BOOL, + number=9, + ) model_prompt_spec: "SearchRequest.ContentSearchSpec.SummarySpec.ModelPromptSpec" = proto.Field( proto.MESSAGE, number=5, @@ -1275,7 +1280,8 @@ class SearchResponse(proto.Message): A unique search token. This should be included in the [UserEvent][google.cloud.discoveryengine.v1.UserEvent] logs resulting from this search, which enables accurate - attribution of search model performance. + attribution of search model performance. This also helps to + identify a request during the customer support scenarios. redirect_uri (str): The URI of a customer-defined redirect page. If redirect action is triggered, no search is performed, and only @@ -1442,13 +1448,13 @@ class SummarySkippedReason(proto.Enum): ADVERSARIAL_QUERY_IGNORED (1): The adversarial query ignored case. - Only populated when + Only used when [SummarySpec.ignore_adversarial_query][google.cloud.discoveryengine.v1.SearchRequest.ContentSearchSpec.SummarySpec.ignore_adversarial_query] is set to ``true``. NON_SUMMARY_SEEKING_QUERY_IGNORED (2): The non-summary seeking query ignored case. - Only populated when + Only used when [SummarySpec.ignore_non_summary_seeking_query][google.cloud.discoveryengine.v1.SearchRequest.ContentSearchSpec.SummarySpec.ignore_non_summary_seeking_query] is set to ``true``. OUT_OF_DOMAIN_QUERY_IGNORED (3): @@ -1470,6 +1476,24 @@ class SummarySkippedReason(proto.Enum): Google skips the summary if the LLM addon is not enabled. + NO_RELEVANT_CONTENT (6): + The no relevant content case. + + Google skips the summary if there is no relevant + content in the retrieved search results. + JAIL_BREAKING_QUERY_IGNORED (7): + The jail-breaking query ignored case. + + For example, "Reply in the tone of a competing company's + CEO". Only used when + [SearchRequest.ContentSearchSpec.SummarySpec.ignore_jail_breaking_query] + is set to ``true``. + CUSTOMER_POLICY_VIOLATION (8): + The customer policy violation case. + + Google skips the summary if there is a customer + policy violation detected. The policy is defined + by the customer. """ SUMMARY_SKIPPED_REASON_UNSPECIFIED = 0 ADVERSARIAL_QUERY_IGNORED = 1 @@ -1477,6 +1501,9 @@ class SummarySkippedReason(proto.Enum): OUT_OF_DOMAIN_QUERY_IGNORED = 3 POTENTIAL_POLICY_VIOLATION = 4 LLM_ADDON_NOT_ENABLED = 5 + NO_RELEVANT_CONTENT = 6 + JAIL_BREAKING_QUERY_IGNORED = 7 + CUSTOMER_POLICY_VIOLATION = 8 class SafetyAttributes(proto.Message): r"""Safety Attribute categories and their associated confidence diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/search_tuning_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/search_tuning_service.py new file mode 100644 index 000000000000..d32623e52681 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/search_tuning_service.py @@ -0,0 +1,271 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.discoveryengine_v1.types import custom_tuning_model, import_config + +__protobuf__ = proto.module( + package="google.cloud.discoveryengine.v1", + manifest={ + "ListCustomModelsRequest", + "ListCustomModelsResponse", + "TrainCustomModelRequest", + "TrainCustomModelResponse", + "TrainCustomModelMetadata", + }, +) + + +class ListCustomModelsRequest(proto.Message): + r"""Request message for + [SearchTuningService.ListCustomModels][google.cloud.discoveryengine.v1.SearchTuningService.ListCustomModels] + method. + + Attributes: + data_store (str): + Required. The resource name of the parent Data Store, such + as + ``projects/*/locations/global/collections/default_collection/dataStores/default_data_store``. + This field is used to identify the data store where to fetch + the models from. + """ + + data_store: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListCustomModelsResponse(proto.Message): + r"""Response message for + [SearchTuningService.ListCustomModels][google.cloud.discoveryengine.v1.SearchTuningService.ListCustomModels] + method. + + Attributes: + models (MutableSequence[google.cloud.discoveryengine_v1.types.CustomTuningModel]): + List of custom tuning models. + """ + + models: MutableSequence[ + custom_tuning_model.CustomTuningModel + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=custom_tuning_model.CustomTuningModel, + ) + + +class TrainCustomModelRequest(proto.Message): + r"""Request message for + [SearchTuningService.TrainCustomModel][google.cloud.discoveryengine.v1.SearchTuningService.TrainCustomModel] + method. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + gcs_training_input (google.cloud.discoveryengine_v1.types.TrainCustomModelRequest.GcsTrainingInput): + Cloud Storage training input. + + This field is a member of `oneof`_ ``training_input``. + data_store (str): + Required. The resource name of the Data Store, such as + ``projects/*/locations/global/collections/default_collection/dataStores/default_data_store``. + This field is used to identify the data store where to train + the models. + model_type (str): + Model to be trained. Supported values are: + + - **search-tuning**: Fine tuning the search system based on + data provided. + error_config (google.cloud.discoveryengine_v1.types.ImportErrorConfig): + The desired location of errors incurred + during the data ingestion and training. + model_id (str): + If not provided, a UUID will be generated. + """ + + class GcsTrainingInput(proto.Message): + r"""Cloud Storage training data input. + + Attributes: + corpus_data_path (str): + The Cloud Storage corpus data which could be associated in + train data. The data path format is + ``gs:///``. A newline + delimited jsonl/ndjson file. + + For search-tuning model, each line should have the \_id, + title and text. Example: + ``{"_id": "doc1", title: "relevant doc", "text": "relevant text"}`` + query_data_path (str): + The gcs query data which could be associated in train data. + The data path format is + ``gs:///``. A newline + delimited jsonl/ndjson file. + + For search-tuning model, each line should have the \_id and + text. Example: {"_id": "query1", "text": "example query"} + train_data_path (str): + Cloud Storage training data path whose format should be + ``gs:///``. The file should + be in tsv format. Each line should have the doc_id and + query_id and score (number). + + For search-tuning model, it should have the query-id + corpus-id score as tsv file header. The score should be a + number in ``[0, inf+)``. The larger the number is, the more + relevant the pair is. Example: + + - ``query-id\tcorpus-id\tscore`` + - ``query1\tdoc1\t1`` + test_data_path (str): + Cloud Storage test data. Same format as train_data_path. If + not provided, a random 80/20 train/test split will be + performed on train_data_path. + """ + + corpus_data_path: str = proto.Field( + proto.STRING, + number=1, + ) + query_data_path: str = proto.Field( + proto.STRING, + number=2, + ) + train_data_path: str = proto.Field( + proto.STRING, + number=3, + ) + test_data_path: str = proto.Field( + proto.STRING, + number=4, + ) + + gcs_training_input: GcsTrainingInput = proto.Field( + proto.MESSAGE, + number=2, + oneof="training_input", + message=GcsTrainingInput, + ) + data_store: str = proto.Field( + proto.STRING, + number=1, + ) + model_type: str = proto.Field( + proto.STRING, + number=3, + ) + error_config: import_config.ImportErrorConfig = proto.Field( + proto.MESSAGE, + number=4, + message=import_config.ImportErrorConfig, + ) + model_id: str = proto.Field( + proto.STRING, + number=5, + ) + + +class TrainCustomModelResponse(proto.Message): + r"""Response of the + [TrainCustomModelRequest][google.cloud.discoveryengine.v1.TrainCustomModelRequest]. + This message is returned by the + google.longrunning.Operations.response field. + + Attributes: + error_samples (MutableSequence[google.rpc.status_pb2.Status]): + A sample of errors encountered while + processing the data. + error_config (google.cloud.discoveryengine_v1.types.ImportErrorConfig): + Echoes the destination for the complete + errors in the request if set. + model_status (str): + The trained model status. Possible values are: + + - **bad-data**: The training data quality is bad. + - **no-improvement**: Tuning didn't improve performance. + Won't deploy. + - **in-progress**: Model training job creation is in + progress. + - **training**: Model is actively training. + - **evaluating**: The model is evaluating trained metrics. + - **indexing**: The model trained metrics are indexing. + - **ready**: The model is ready for serving. + metrics (MutableMapping[str, float]): + The metrics of the trained model. + model_name (str): + Fully qualified name of the + CustomTuningModel. + """ + + error_samples: MutableSequence[status_pb2.Status] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=status_pb2.Status, + ) + error_config: import_config.ImportErrorConfig = proto.Field( + proto.MESSAGE, + number=2, + message=import_config.ImportErrorConfig, + ) + model_status: str = proto.Field( + proto.STRING, + number=3, + ) + metrics: MutableMapping[str, float] = proto.MapField( + proto.STRING, + proto.DOUBLE, + number=4, + ) + model_name: str = proto.Field( + proto.STRING, + number=5, + ) + + +class TrainCustomModelMetadata(proto.Message): + r"""Metadata related to the progress of the TrainCustomModel + operation. This is returned by the + google.longrunning.Operation.metadata field. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Operation create time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Operation last update time. If the operation + is done, this is also the finish time. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/user_event.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/user_event.py index 450bc325a9de..65732fabdad7 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/user_event.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/user_event.py @@ -671,6 +671,9 @@ class DocumentInfo(proto.Message): The promotion IDs associated with this Document. Currently, this field is restricted to at most one ID. + joined (bool): + Output only. Whether the referenced Document + can be found in the data store. """ id: str = proto.Field( @@ -697,6 +700,10 @@ class DocumentInfo(proto.Message): proto.STRING, number=4, ) + joined: bool = proto.Field( + proto.BOOL, + number=5, + ) class PanelInfo(proto.Message): diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/__init__.py index 1c4059675d8a..0c6552f8e52c 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/__init__.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/__init__.py @@ -141,7 +141,7 @@ UpdateSessionRequest, ) from .types.custom_tuning_model import CustomTuningModel -from .types.data_store import DataStore, LanguageInfo +from .types.data_store import DataStore, LanguageInfo, WorkspaceConfig from .types.data_store_service import ( CreateDataStoreMetadata, CreateDataStoreRequest, @@ -157,6 +157,8 @@ from .types.document import Document, ProcessedDocument from .types.document_processing_config import DocumentProcessingConfig from .types.document_service import ( + BatchGetDocumentsMetadataRequest, + BatchGetDocumentsMetadataResponse, CreateDocumentRequest, DeleteDocumentRequest, GetDocumentRequest, @@ -378,6 +380,8 @@ "BatchCreateTargetSiteMetadata", "BatchCreateTargetSitesRequest", "BatchCreateTargetSitesResponse", + "BatchGetDocumentsMetadataRequest", + "BatchGetDocumentsMetadataResponse", "BatchVerifyTargetSitesMetadata", "BatchVerifyTargetSitesRequest", "BatchVerifyTargetSitesResponse", @@ -632,5 +636,6 @@ "UserEvent", "UserEventServiceClient", "UserInfo", + "WorkspaceConfig", "WriteUserEventRequest", ) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/gapic_metadata.json b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/gapic_metadata.json index d1a982467642..ef4a01c7f9ab 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/gapic_metadata.json +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/gapic_metadata.json @@ -634,6 +634,11 @@ "grpc": { "libraryClient": "DocumentServiceClient", "rpcs": { + "BatchGetDocumentsMetadata": { + "methods": [ + "batch_get_documents_metadata" + ] + }, "CreateDocument": { "methods": [ "create_document" @@ -679,6 +684,11 @@ "grpc-async": { "libraryClient": "DocumentServiceAsyncClient", "rpcs": { + "BatchGetDocumentsMetadata": { + "methods": [ + "batch_get_documents_metadata" + ] + }, "CreateDocument": { "methods": [ "create_document" @@ -724,6 +734,11 @@ "rest": { "libraryClient": "DocumentServiceClient", "rpcs": { + "BatchGetDocumentsMetadata": { + "methods": [ + "batch_get_documents_metadata" + ] + }, "CreateDocument": { "methods": [ "create_document" diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/async_client.py index d27321c77e71..8d0e4ed5f010 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/async_client.py @@ -272,7 +272,7 @@ async def update_acl_config( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> acl_config.AclConfig: - r"""Default Acl Configuration for use in a location of a + r"""Default ACL configuration for use in a location of a customer's project. Updates will only reflect to new data stores. Existing data stores will still use the old value. diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/client.py index 93ac988cd074..21cb6595333e 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/client.py @@ -693,7 +693,7 @@ def update_acl_config( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> acl_config.AclConfig: - r"""Default Acl Configuration for use in a location of a + r"""Default ACL configuration for use in a location of a customer's project. Updates will only reflect to new data stores. Existing data stores will still use the old value. diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/transports/grpc.py index 1c655ad8b840..89945a11af67 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/transports/grpc.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/transports/grpc.py @@ -242,7 +242,7 @@ def update_acl_config( ) -> Callable[[acl_config_service.UpdateAclConfigRequest], acl_config.AclConfig]: r"""Return a callable for the update acl config method over gRPC. - Default Acl Configuration for use in a location of a + Default ACL configuration for use in a location of a customer's project. Updates will only reflect to new data stores. Existing data stores will still use the old value. diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/transports/grpc_asyncio.py index c20e2dc0d50c..ca2d70a4a12f 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/transports/grpc_asyncio.py @@ -248,7 +248,7 @@ def update_acl_config( ]: r"""Return a callable for the update acl config method over gRPC. - Default Acl Configuration for use in a location of a + Default ACL configuration for use in a location of a customer's project. Updates will only reflect to new data stores. Existing data stores will still use the old value. diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/async_client.py index ceaa82413af3..c5be03187f84 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/async_client.py @@ -1274,6 +1274,118 @@ async def sample_get_processed_document(): # Done; return the response. return response + async def batch_get_documents_metadata( + self, + request: Optional[ + Union[document_service.BatchGetDocumentsMetadataRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> document_service.BatchGetDocumentsMetadataResponse: + r"""Gets index freshness metadata for + [Document][google.cloud.discoveryengine.v1alpha.Document]s. + Supported for website search only. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1alpha + + async def sample_batch_get_documents_metadata(): + # Create a client + client = discoveryengine_v1alpha.DocumentServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1alpha.BatchGetDocumentsMetadataRequest( + parent="parent_value", + ) + + # Make the request + response = await client.batch_get_documents_metadata(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1alpha.types.BatchGetDocumentsMetadataRequest, dict]]): + The request object. Request message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1alpha.DocumentService.BatchGetDocumentsMetadata] + method. + parent (:class:`str`): + Required. The parent branch resource name, such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1alpha.types.BatchGetDocumentsMetadataResponse: + Response message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1alpha.DocumentService.BatchGetDocumentsMetadata] + method. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, document_service.BatchGetDocumentsMetadataRequest): + request = document_service.BatchGetDocumentsMetadataRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.batch_get_documents_metadata + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def list_operations( self, request: Optional[operations_pb2.ListOperationsRequest] = None, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/client.py index 4713addb0922..11e4241be964 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/client.py @@ -1723,6 +1723,117 @@ def sample_get_processed_document(): # Done; return the response. return response + def batch_get_documents_metadata( + self, + request: Optional[ + Union[document_service.BatchGetDocumentsMetadataRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> document_service.BatchGetDocumentsMetadataResponse: + r"""Gets index freshness metadata for + [Document][google.cloud.discoveryengine.v1alpha.Document]s. + Supported for website search only. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1alpha + + def sample_batch_get_documents_metadata(): + # Create a client + client = discoveryengine_v1alpha.DocumentServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1alpha.BatchGetDocumentsMetadataRequest( + parent="parent_value", + ) + + # Make the request + response = client.batch_get_documents_metadata(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1alpha.types.BatchGetDocumentsMetadataRequest, dict]): + The request object. Request message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1alpha.DocumentService.BatchGetDocumentsMetadata] + method. + parent (str): + Required. The parent branch resource name, such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1alpha.types.BatchGetDocumentsMetadataResponse: + Response message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1alpha.DocumentService.BatchGetDocumentsMetadata] + method. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, document_service.BatchGetDocumentsMetadataRequest): + request = document_service.BatchGetDocumentsMetadataRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.batch_get_documents_metadata + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "DocumentServiceClient": return self diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/base.py index 5acab8f79e13..6eb81a0622da 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/base.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/base.py @@ -186,6 +186,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.batch_get_documents_metadata: gapic_v1.method.wrap_method( + self.batch_get_documents_metadata, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -277,6 +282,18 @@ def get_processed_document( ]: raise NotImplementedError() + @property + def batch_get_documents_metadata( + self, + ) -> Callable[ + [document_service.BatchGetDocumentsMetadataRequest], + Union[ + document_service.BatchGetDocumentsMetadataResponse, + Awaitable[document_service.BatchGetDocumentsMetadataResponse], + ], + ]: + raise NotImplementedError() + @property def list_operations( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/grpc.py index 462ae0dae998..5a71389bf9b1 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/grpc.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/grpc.py @@ -503,6 +503,37 @@ def get_processed_document( ) return self._stubs["get_processed_document"] + @property + def batch_get_documents_metadata( + self, + ) -> Callable[ + [document_service.BatchGetDocumentsMetadataRequest], + document_service.BatchGetDocumentsMetadataResponse, + ]: + r"""Return a callable for the batch get documents metadata method over gRPC. + + Gets index freshness metadata for + [Document][google.cloud.discoveryengine.v1alpha.Document]s. + Supported for website search only. + + Returns: + Callable[[~.BatchGetDocumentsMetadataRequest], + ~.BatchGetDocumentsMetadataResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_get_documents_metadata" not in self._stubs: + self._stubs["batch_get_documents_metadata"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1alpha.DocumentService/BatchGetDocumentsMetadata", + request_serializer=document_service.BatchGetDocumentsMetadataRequest.serialize, + response_deserializer=document_service.BatchGetDocumentsMetadataResponse.deserialize, + ) + return self._stubs["batch_get_documents_metadata"] + def close(self): self.grpc_channel.close() diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/grpc_asyncio.py index 46ea26ca0ae2..6a794841145b 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/grpc_asyncio.py @@ -519,6 +519,37 @@ def get_processed_document( ) return self._stubs["get_processed_document"] + @property + def batch_get_documents_metadata( + self, + ) -> Callable[ + [document_service.BatchGetDocumentsMetadataRequest], + Awaitable[document_service.BatchGetDocumentsMetadataResponse], + ]: + r"""Return a callable for the batch get documents metadata method over gRPC. + + Gets index freshness metadata for + [Document][google.cloud.discoveryengine.v1alpha.Document]s. + Supported for website search only. + + Returns: + Callable[[~.BatchGetDocumentsMetadataRequest], + Awaitable[~.BatchGetDocumentsMetadataResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_get_documents_metadata" not in self._stubs: + self._stubs["batch_get_documents_metadata"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1alpha.DocumentService/BatchGetDocumentsMetadata", + request_serializer=document_service.BatchGetDocumentsMetadataRequest.serialize, + response_deserializer=document_service.BatchGetDocumentsMetadataResponse.deserialize, + ) + return self._stubs["batch_get_documents_metadata"] + def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { @@ -571,6 +602,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.batch_get_documents_metadata: gapic_v1.method_async.wrap_method( + self.batch_get_documents_metadata, + default_timeout=None, + client_info=client_info, + ), } def close(self): diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/rest.py index e3eca6321b0d..23d578101bc4 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/rest.py @@ -79,6 +79,14 @@ class DocumentServiceRestInterceptor: .. code-block:: python class MyCustomDocumentServiceInterceptor(DocumentServiceRestInterceptor): + def pre_batch_get_documents_metadata(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_batch_get_documents_metadata(self, response): + logging.log(f"Received response: {response}") + return response + def pre_create_document(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -145,6 +153,31 @@ def post_update_document(self, response): """ + def pre_batch_get_documents_metadata( + self, + request: document_service.BatchGetDocumentsMetadataRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + document_service.BatchGetDocumentsMetadataRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for batch_get_documents_metadata + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentService server. + """ + return request, metadata + + def post_batch_get_documents_metadata( + self, response: document_service.BatchGetDocumentsMetadataResponse + ) -> document_service.BatchGetDocumentsMetadataResponse: + """Post-rpc interceptor for batch_get_documents_metadata + + Override in a subclass to manipulate the response + after it is returned by the DocumentService server but before + it is returned to user code. + """ + return response + def pre_create_document( self, request: document_service.CreateDocumentRequest, @@ -655,6 +688,105 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Return the client from cache. return self._operations_client + class _BatchGetDocumentsMetadata(DocumentServiceRestStub): + def __hash__(self): + return hash("BatchGetDocumentsMetadata") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "matcher": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_service.BatchGetDocumentsMetadataRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> document_service.BatchGetDocumentsMetadataResponse: + r"""Call the batch get documents + metadata method over HTTP. + + Args: + request (~.document_service.BatchGetDocumentsMetadataRequest): + The request object. Request message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1alpha.DocumentService.BatchGetDocumentsMetadata] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.document_service.BatchGetDocumentsMetadataResponse: + Response message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1alpha.DocumentService.BatchGetDocumentsMetadata] + method. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{parent=projects/*/locations/*/dataStores/*/branches/*}/batchGetDocumentsMetadata", + }, + { + "method": "get", + "uri": "/v1alpha/{parent=projects/*/locations/*/collections/*/dataStores/*/branches/*}/batchGetDocumentsMetadata", + }, + ] + request, metadata = self._interceptor.pre_batch_get_documents_metadata( + request, metadata + ) + pb_request = document_service.BatchGetDocumentsMetadataRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = document_service.BatchGetDocumentsMetadataResponse() + pb_resp = document_service.BatchGetDocumentsMetadataResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_batch_get_documents_metadata(resp) + return resp + class _CreateDocument(DocumentServiceRestStub): def __hash__(self): return hash("CreateDocument") @@ -1426,6 +1558,17 @@ def __call__( resp = self._interceptor.post_update_document(resp) return resp + @property + def batch_get_documents_metadata( + self, + ) -> Callable[ + [document_service.BatchGetDocumentsMetadataRequest], + document_service.BatchGetDocumentsMetadataResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BatchGetDocumentsMetadata(self._session, self._host, self._interceptor) # type: ignore + @property def create_document( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/__init__.py index 3f73bf8fcdbc..caa4c390030b 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/__init__.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/__init__.py @@ -72,7 +72,7 @@ UpdateSessionRequest, ) from .custom_tuning_model import CustomTuningModel -from .data_store import DataStore, LanguageInfo +from .data_store import DataStore, LanguageInfo, WorkspaceConfig from .data_store_service import ( CreateDataStoreMetadata, CreateDataStoreRequest, @@ -88,6 +88,8 @@ from .document import Document, ProcessedDocument from .document_processing_config import DocumentProcessingConfig from .document_service import ( + BatchGetDocumentsMetadataRequest, + BatchGetDocumentsMetadataResponse, CreateDocumentRequest, DeleteDocumentRequest, GetDocumentRequest, @@ -337,6 +339,7 @@ "CustomTuningModel", "DataStore", "LanguageInfo", + "WorkspaceConfig", "CreateDataStoreMetadata", "CreateDataStoreRequest", "DeleteDataStoreMetadata", @@ -350,6 +353,8 @@ "Document", "ProcessedDocument", "DocumentProcessingConfig", + "BatchGetDocumentsMetadataRequest", + "BatchGetDocumentsMetadataResponse", "CreateDocumentRequest", "DeleteDocumentRequest", "GetDocumentRequest", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/answer.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/answer.py index aea3764d85fb..ef0d79a41a53 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/answer.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/answer.py @@ -111,6 +111,12 @@ class AnswerSkippedReason(proto.Enum): For example, "Reply in the tone of a competing company's CEO". Google skips the answer if the query is classified as a jail-breaking query. + CUSTOMER_POLICY_VIOLATION (7): + The customer policy violation case. + + Google skips the summary if there is a customer + policy violation detected. The policy is defined + by the customer. """ ANSWER_SKIPPED_REASON_UNSPECIFIED = 0 ADVERSARIAL_QUERY_IGNORED = 1 @@ -119,6 +125,7 @@ class AnswerSkippedReason(proto.Enum): POTENTIAL_POLICY_VIOLATION = 4 NO_RELEVANT_CONTENT = 5 JAIL_BREAKING_QUERY_IGNORED = 6 + CUSTOMER_POLICY_VIOLATION = 7 class Citation(proto.Message): r"""Citation info for a segment. diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/control.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/control.py index 6f9d1b7ecb2c..c0972089ab3e 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/control.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/control.py @@ -154,7 +154,7 @@ class Control(proto.Message): associated_serving_config_ids (MutableSequence[str]): Output only. List of all [ServingConfig][google.cloud.discoveryengine.v1alpha.ServingConfig] - ids this control is attached to. May take up to 10 minutes + IDs this control is attached to. May take up to 10 minutes to update after changes. solution_type (google.cloud.discoveryengine_v1alpha.types.SolutionType): Required. Immutable. What solution the diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/custom_tuning_model.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/custom_tuning_model.py index ab529ab2e91f..5381115d74c7 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/custom_tuning_model.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/custom_tuning_model.py @@ -74,6 +74,9 @@ class ModelState(proto.Enum): NO_IMPROVEMENT (6): The model training finished successfully but metrics did not improve. + INPUT_VALIDATION_FAILED (7): + Input data validation failed. Model training + didn't start. """ MODEL_STATE_UNSPECIFIED = 0 TRAINING_PAUSED = 1 @@ -82,6 +85,7 @@ class ModelState(proto.Enum): READY_FOR_SERVING = 4 TRAINING_FAILED = 5 NO_IMPROVEMENT = 6 + INPUT_VALIDATION_FAILED = 7 name: str = proto.Field( proto.STRING, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/data_store.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/data_store.py index 82b01be6e0e5..d5ef66749bbe 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/data_store.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/data_store.py @@ -31,6 +31,7 @@ manifest={ "DataStore", "LanguageInfo", + "WorkspaceConfig", }, ) @@ -100,6 +101,12 @@ class DataStore(proto.Message): Currently ACL is only supported in ``GENERIC`` industry vertical with non-\ ``PUBLIC_WEBSITE`` content config. + workspace_config (google.cloud.discoveryengine_v1alpha.types.WorkspaceConfig): + Config to store data store type configuration for workspace + data. This must be set when + [DataStore.content_config][google.cloud.discoveryengine.v1alpha.DataStore.content_config] + is set as + [DataStore.ContentConfig.GOOGLE_WORKSPACE][google.cloud.discoveryengine.v1alpha.DataStore.ContentConfig.GOOGLE_WORKSPACE]. document_processing_config (google.cloud.discoveryengine_v1alpha.types.DocumentProcessingConfig): Configuration for Document understanding and enrichment. @@ -138,11 +145,16 @@ class ContentConfig(proto.Enum): PUBLIC_WEBSITE (3): The data store is used for public website search. + GOOGLE_WORKSPACE (4): + The data store is used for workspace search. Details of + workspace data store are specified in the + [WorkspaceConfig][google.cloud.discoveryengine.v1alpha.WorkspaceConfig]. """ CONTENT_CONFIG_UNSPECIFIED = 0 NO_CONTENT = 1 CONTENT_REQUIRED = 2 PUBLIC_WEBSITE = 3 + GOOGLE_WORKSPACE = 4 name: str = proto.Field( proto.STRING, @@ -190,6 +202,11 @@ class ContentConfig(proto.Enum): proto.BOOL, number=24, ) + workspace_config: "WorkspaceConfig" = proto.Field( + proto.MESSAGE, + number=25, + message="WorkspaceConfig", + ) document_processing_config: gcd_document_processing_config.DocumentProcessingConfig = proto.Field( proto.MESSAGE, number=27, @@ -240,4 +257,57 @@ class LanguageInfo(proto.Message): ) +class WorkspaceConfig(proto.Message): + r"""Config to store data store type configuration for workspace + data + + Attributes: + type_ (google.cloud.discoveryengine_v1alpha.types.WorkspaceConfig.Type): + The Google Workspace data source. + dasher_customer_id (str): + Obfuscated Dasher customer ID. + """ + + class Type(proto.Enum): + r"""Specifies the type of Workspace App supported by this + DataStore + + Values: + TYPE_UNSPECIFIED (0): + Defaults to an unspecified Workspace type. + GOOGLE_DRIVE (1): + Workspace Data Store contains Drive data + GOOGLE_MAIL (2): + Workspace Data Store contains Mail data + GOOGLE_SITES (3): + Workspace Data Store contains Sites data + GOOGLE_CALENDAR (4): + Workspace Data Store contains Calendar data + GOOGLE_CHAT (5): + Workspace Data Store contains Chat data + GOOGLE_GROUPS (6): + Workspace Data Store contains Groups data + GOOGLE_KEEP (7): + Workspace Data Store contains Keep data + """ + TYPE_UNSPECIFIED = 0 + GOOGLE_DRIVE = 1 + GOOGLE_MAIL = 2 + GOOGLE_SITES = 3 + GOOGLE_CALENDAR = 4 + GOOGLE_CHAT = 5 + GOOGLE_GROUPS = 6 + GOOGLE_KEEP = 7 + + type_: Type = proto.Field( + proto.ENUM, + number=1, + enum=Type, + ) + dasher_customer_id: str = proto.Field( + proto.STRING, + number=2, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/document.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/document.py index 7792e6eb5a81..119f1b89d472 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/document.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/document.py @@ -19,6 +19,7 @@ from google.protobuf import struct_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore import proto # type: ignore from google.cloud.discoveryengine_v1alpha.types import common @@ -96,6 +97,14 @@ class Document(proto.Message): This field is OUTPUT_ONLY. If this field is not populated, it means the document has never been indexed. + index_status (google.cloud.discoveryengine_v1alpha.types.Document.IndexStatus): + Output only. The index status of the document. + + - If document is indexed successfully, the index_time field + is populated. + - Otherwise, if document is not indexed due to errors, the + error_samples field is populated. + - Otherwise, index_status is unset. """ class Content(proto.Message): @@ -211,6 +220,31 @@ class AccessRestriction(proto.Message): message="Document.AclInfo.AccessRestriction", ) + class IndexStatus(proto.Message): + r"""Index status of the document. + + Attributes: + index_time (google.protobuf.timestamp_pb2.Timestamp): + The time when the document was indexed. + If this field is populated, it means the + document has been indexed. + error_samples (MutableSequence[google.rpc.status_pb2.Status]): + A sample of errors encountered while indexing + the document. If this field is populated, the + document is not indexed due to errors. + """ + + index_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + error_samples: MutableSequence[status_pb2.Status] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=status_pb2.Status, + ) + struct_data: struct_pb2.Struct = proto.Field( proto.MESSAGE, number=4, @@ -258,6 +292,11 @@ class AccessRestriction(proto.Message): number=13, message=timestamp_pb2.Timestamp, ) + index_status: IndexStatus = proto.Field( + proto.MESSAGE, + number=15, + message=IndexStatus, + ) class ProcessedDocument(proto.Message): diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/document_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/document_service.py index cee091f225bd..f1e063d05b9b 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/document_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/document_service.py @@ -18,6 +18,7 @@ from typing import MutableMapping, MutableSequence from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore import proto # type: ignore from google.cloud.discoveryengine_v1alpha.types import document as gcd_document @@ -32,6 +33,8 @@ "UpdateDocumentRequest", "DeleteDocumentRequest", "GetProcessedDocumentRequest", + "BatchGetDocumentsMetadataRequest", + "BatchGetDocumentsMetadataResponse", }, ) @@ -349,4 +352,163 @@ class ProcessedDocumentFormat(proto.Enum): ) +class BatchGetDocumentsMetadataRequest(proto.Message): + r"""Request message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1alpha.DocumentService.BatchGetDocumentsMetadata] + method. + + Attributes: + parent (str): + Required. The parent branch resource name, such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}``. + matcher (google.cloud.discoveryengine_v1alpha.types.BatchGetDocumentsMetadataRequest.Matcher): + Required. Matcher for the + [Document][google.cloud.discoveryengine.v1alpha.Document]s. + """ + + class UrisMatcher(proto.Message): + r"""Matcher for the + [Document][google.cloud.discoveryengine.v1alpha.Document]s by exact + uris. + + Attributes: + uris (MutableSequence[str]): + The exact URIs to match by. + """ + + uris: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + class Matcher(proto.Message): + r"""Matcher for the + [Document][google.cloud.discoveryengine.v1alpha.Document]s. + Currently supports matching by exact URIs. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + uris_matcher (google.cloud.discoveryengine_v1alpha.types.BatchGetDocumentsMetadataRequest.UrisMatcher): + Matcher by exact URIs. + + This field is a member of `oneof`_ ``matcher``. + """ + + uris_matcher: "BatchGetDocumentsMetadataRequest.UrisMatcher" = proto.Field( + proto.MESSAGE, + number=1, + oneof="matcher", + message="BatchGetDocumentsMetadataRequest.UrisMatcher", + ) + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + matcher: Matcher = proto.Field( + proto.MESSAGE, + number=2, + message=Matcher, + ) + + +class BatchGetDocumentsMetadataResponse(proto.Message): + r"""Response message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1alpha.DocumentService.BatchGetDocumentsMetadata] + method. + + Attributes: + documents_metadata (MutableSequence[google.cloud.discoveryengine_v1alpha.types.BatchGetDocumentsMetadataResponse.DocumentMetadata]): + The metadata of the + [Document][google.cloud.discoveryengine.v1alpha.Document]s. + """ + + class State(proto.Enum): + r"""The state of the + [Document][google.cloud.discoveryengine.v1alpha.Document]. + + Values: + STATE_UNSPECIFIED (0): + Should never be set. + INDEXED (1): + The + [Document][google.cloud.discoveryengine.v1alpha.Document] is + indexed. + NOT_IN_TARGET_SITE (2): + The + [Document][google.cloud.discoveryengine.v1alpha.Document] is + not indexed because its URI is not in the + [TargetSite][google.cloud.discoveryengine.v1alpha.TargetSite]. + NOT_IN_INDEX (3): + The + [Document][google.cloud.discoveryengine.v1alpha.Document] is + not indexed. + """ + STATE_UNSPECIFIED = 0 + INDEXED = 1 + NOT_IN_TARGET_SITE = 2 + NOT_IN_INDEX = 3 + + class DocumentMetadata(proto.Message): + r"""The metadata of a + [Document][google.cloud.discoveryengine.v1alpha.Document]. + + Attributes: + matcher_value (google.cloud.discoveryengine_v1alpha.types.BatchGetDocumentsMetadataResponse.DocumentMetadata.MatcherValue): + The value of the matcher that was used to match the + [Document][google.cloud.discoveryengine.v1alpha.Document]. + state (google.cloud.discoveryengine_v1alpha.types.BatchGetDocumentsMetadataResponse.State): + The state of the document. + last_refreshed_time (google.protobuf.timestamp_pb2.Timestamp): + The timestamp of the last time the + [Document][google.cloud.discoveryengine.v1alpha.Document] + was last indexed. + """ + + class MatcherValue(proto.Message): + r"""The value of the matcher that was used to match the + [Document][google.cloud.discoveryengine.v1alpha.Document]. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + uri (str): + If match by URI, the URI of the + [Document][google.cloud.discoveryengine.v1alpha.Document]. + + This field is a member of `oneof`_ ``matcher_value``. + """ + + uri: str = proto.Field( + proto.STRING, + number=1, + oneof="matcher_value", + ) + + matcher_value: "BatchGetDocumentsMetadataResponse.DocumentMetadata.MatcherValue" = proto.Field( + proto.MESSAGE, + number=2, + message="BatchGetDocumentsMetadataResponse.DocumentMetadata.MatcherValue", + ) + state: "BatchGetDocumentsMetadataResponse.State" = proto.Field( + proto.ENUM, + number=3, + enum="BatchGetDocumentsMetadataResponse.State", + ) + last_refreshed_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + + documents_metadata: MutableSequence[DocumentMetadata] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=DocumentMetadata, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/grounded_generation_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/grounded_generation_service.py index 40fa41fd061f..03ce6ee20fd3 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/grounded_generation_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/grounded_generation_service.py @@ -180,8 +180,9 @@ class Claim(proto.Message): false. In that case, no grounding check was done for the claim and therefore [citation_indices][google.cloud.discoveryengine.v1alpha.CheckGroundingResponse.Claim.citation_indices], + [anti_citation_indices][google.cloud.discoveryengine.v1alpha.CheckGroundingResponse.Claim.anti_citation_indices], and - [anti_citation_indices][google.cloud.discoveryengine.v1alpha.CheckGroundingResponse.Claim.anti_citation_indices] + [score][google.cloud.discoveryengine.v1alpha.CheckGroundingResponse.Claim.score] should not be returned. This field is a member of `oneof`_ ``_grounding_check_required``. diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/import_config.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/import_config.py index 35e5e74f7660..7052c1e8850f 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/import_config.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/import_config.py @@ -129,10 +129,10 @@ class BigQuerySource(proto.Message): This field is a member of `oneof`_ ``partition``. project_id (str): - The project ID (can be project # or ID) that - the BigQuery source is in with a length limit of - 128 characters. If not specified, inherits the - project ID from the parent request. + The project ID or the project number that + contains the BigQuery source. Has a length limit + of 128 characters. If not specified, inherits + the project ID from the parent request. dataset_id (str): Required. The BigQuery data set to copy the data from with a length limit of 1,024 @@ -205,9 +205,9 @@ class SpannerSource(proto.Message): Attributes: project_id (str): - The project ID that the Spanner source is in - with a length limit of 128 characters. If not - specified, inherits the project ID from the + The project ID that contains the Spanner + source. Has a length limit of 128 characters. If + not specified, inherits the project ID from the parent request. instance_id (str): Required. The instance ID of the source @@ -428,9 +428,9 @@ class BigtableSource(proto.Message): Attributes: project_id (str): - The project ID that the Bigtable source is in - with a length limit of 128 characters. If not - specified, inherits the project ID from the + The project ID that contains the Bigtable + source. Has a length limit of 128 characters. If + not specified, inherits the project ID from the parent request. instance_id (str): Required. The instance ID of the Cloud @@ -504,9 +504,9 @@ class CloudSqlSource(proto.Message): Attributes: project_id (str): - The project ID that the Cloud SQL source is - in with a length limit of 128 characters. If not - specified, inherits the project ID from the + The project ID that contains the Cloud SQL + source. Has a length limit of 128 characters. If + not specified, inherits the project ID from the parent request. instance_id (str): Required. The Cloud SQL instance to copy the @@ -564,9 +564,9 @@ class AlloyDbSource(proto.Message): Attributes: project_id (str): - The project ID that the AlloyDB source is in - with a length limit of 128 characters. If not - specified, inherits the project ID from the + The project ID that contains the AlloyDB + source. Has a length limit of 128 characters. If + not specified, inherits the project ID from the parent request. location_id (str): Required. The AlloyDB location to copy the diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/search_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/search_service.py index 00e46651dd81..f1af3c79f881 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/search_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/search_service.py @@ -1605,7 +1605,8 @@ class SearchResponse(proto.Message): A unique search token. This should be included in the [UserEvent][google.cloud.discoveryengine.v1alpha.UserEvent] logs resulting from this search, which enables accurate - attribution of search model performance. + attribution of search model performance. This also helps to + identify a request during the customer support scenarios. redirect_uri (str): The URI of a customer-defined redirect page. If redirect action is triggered, no search is performed, and only @@ -1876,6 +1877,18 @@ class SummarySkippedReason(proto.Enum): CEO". Only used when [SearchRequest.ContentSearchSpec.SummarySpec.ignore_jail_breaking_query] is set to ``true``. + CUSTOMER_POLICY_VIOLATION (8): + The customer policy violation case. + + Google skips the summary if there is a customer + policy violation detected. The policy is defined + by the customer. + NON_SUMMARY_SEEKING_QUERY_IGNORED_V2 (9): + The non-answer seeking query ignored case. + + Only used when + [SearchRequest.ContentSearchSpec.SummarySpec.ignore_non_answer_seeking_query] + is set to ``true``. """ SUMMARY_SKIPPED_REASON_UNSPECIFIED = 0 ADVERSARIAL_QUERY_IGNORED = 1 @@ -1885,6 +1898,8 @@ class SummarySkippedReason(proto.Enum): LLM_ADDON_NOT_ENABLED = 5 NO_RELEVANT_CONTENT = 6 JAIL_BREAKING_QUERY_IGNORED = 7 + CUSTOMER_POLICY_VIOLATION = 8 + NON_SUMMARY_SEEKING_QUERY_IGNORED_V2 = 9 class SafetyAttributes(proto.Message): r"""Safety Attribute categories and their associated confidence @@ -2169,6 +2184,9 @@ class StringConstraint(proto.Message): Values of the string field. The record will only be returned if the field value matches one of the values specified here. + query_segment (str): + Identifies the keywords within the search + query that match a filter. """ field_name: str = proto.Field( @@ -2179,6 +2197,10 @@ class StringConstraint(proto.Message): proto.STRING, number=2, ) + query_segment: str = proto.Field( + proto.STRING, + number=3, + ) class NumberConstraint(proto.Message): r"""Constraint expression of a number field. Example: price < @@ -2195,6 +2217,9 @@ class NumberConstraint(proto.Message): value (float): The value specified in the numerical constraint. + query_segment (str): + Identifies the keywords within the search + query that match a filter. """ class Comparison(proto.Enum): @@ -2234,6 +2259,10 @@ class Comparison(proto.Enum): proto.DOUBLE, number=3, ) + query_segment: str = proto.Field( + proto.STRING, + number=4, + ) class GeolocationConstraint(proto.Message): r"""Constraint of a geolocation field. diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/user_event.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/user_event.py index 4deac76799fd..2162001f7b3b 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/user_event.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/user_event.py @@ -676,6 +676,9 @@ class DocumentInfo(proto.Message): The promotion IDs associated with this Document. Currently, this field is restricted to at most one ID. + joined (bool): + Output only. Whether the referenced Document + can be found in the data store. """ id: str = proto.Field( @@ -702,6 +705,10 @@ class DocumentInfo(proto.Message): proto.STRING, number=4, ) + joined: bool = proto.Field( + proto.BOOL, + number=5, + ) class PanelInfo(proto.Message): diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/__init__.py index 3bde65ad26b0..c469dab86a26 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/__init__.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/__init__.py @@ -139,6 +139,8 @@ from .types.document import Document from .types.document_processing_config import DocumentProcessingConfig from .types.document_service import ( + BatchGetDocumentsMetadataRequest, + BatchGetDocumentsMetadataResponse, CreateDocumentRequest, DeleteDocumentRequest, GetDocumentRequest, @@ -214,6 +216,7 @@ PurgeDocumentsMetadata, PurgeDocumentsRequest, PurgeDocumentsResponse, + PurgeErrorConfig, PurgeSuggestionDenyListEntriesMetadata, PurgeSuggestionDenyListEntriesRequest, PurgeSuggestionDenyListEntriesResponse, @@ -338,6 +341,8 @@ "BatchCreateTargetSiteMetadata", "BatchCreateTargetSitesRequest", "BatchCreateTargetSitesResponse", + "BatchGetDocumentsMetadataRequest", + "BatchGetDocumentsMetadataResponse", "BatchVerifyTargetSitesMetadata", "BatchVerifyTargetSitesRequest", "BatchVerifyTargetSitesResponse", @@ -497,6 +502,7 @@ "PurgeDocumentsMetadata", "PurgeDocumentsRequest", "PurgeDocumentsResponse", + "PurgeErrorConfig", "PurgeSuggestionDenyListEntriesMetadata", "PurgeSuggestionDenyListEntriesRequest", "PurgeSuggestionDenyListEntriesResponse", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/gapic_metadata.json b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/gapic_metadata.json index 8afe7dc88cd2..02ac1aa251d2 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/gapic_metadata.json +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/gapic_metadata.json @@ -506,6 +506,11 @@ "grpc": { "libraryClient": "DocumentServiceClient", "rpcs": { + "BatchGetDocumentsMetadata": { + "methods": [ + "batch_get_documents_metadata" + ] + }, "CreateDocument": { "methods": [ "create_document" @@ -546,6 +551,11 @@ "grpc-async": { "libraryClient": "DocumentServiceAsyncClient", "rpcs": { + "BatchGetDocumentsMetadata": { + "methods": [ + "batch_get_documents_metadata" + ] + }, "CreateDocument": { "methods": [ "create_document" @@ -586,6 +596,11 @@ "rest": { "libraryClient": "DocumentServiceClient", "rpcs": { + "BatchGetDocumentsMetadata": { + "methods": [ + "batch_get_documents_metadata" + ] + }, "CreateDocument": { "methods": [ "create_document" diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/async_client.py index 3cd99963bdee..a75f7fba7fec 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/async_client.py @@ -1069,7 +1069,11 @@ async def sample_purge_documents(): client = discoveryengine_v1beta.DocumentServiceAsyncClient() # Initialize request argument(s) + gcs_source = discoveryengine_v1beta.GcsSource() + gcs_source.input_uris = ['input_uris_value1', 'input_uris_value2'] + request = discoveryengine_v1beta.PurgeDocumentsRequest( + gcs_source=gcs_source, parent="parent_value", filter="filter_value", ) @@ -1146,6 +1150,118 @@ async def sample_purge_documents(): # Done; return the response. return response + async def batch_get_documents_metadata( + self, + request: Optional[ + Union[document_service.BatchGetDocumentsMetadataRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> document_service.BatchGetDocumentsMetadataResponse: + r"""Gets index freshness metadata for + [Document][google.cloud.discoveryengine.v1beta.Document]s. + Supported for website search only. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + async def sample_batch_get_documents_metadata(): + # Create a client + client = discoveryengine_v1beta.DocumentServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.BatchGetDocumentsMetadataRequest( + parent="parent_value", + ) + + # Make the request + response = await client.batch_get_documents_metadata(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1beta.types.BatchGetDocumentsMetadataRequest, dict]]): + The request object. Request message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1beta.DocumentService.BatchGetDocumentsMetadata] + method. + parent (:class:`str`): + Required. The parent branch resource name, such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1beta.types.BatchGetDocumentsMetadataResponse: + Response message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1beta.DocumentService.BatchGetDocumentsMetadata] + method. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, document_service.BatchGetDocumentsMetadataRequest): + request = document_service.BatchGetDocumentsMetadataRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.batch_get_documents_metadata + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def list_operations( self, request: Optional[operations_pb2.ListOperationsRequest] = None, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/client.py index 4dbf8a833298..696861d2050d 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/client.py @@ -1523,7 +1523,11 @@ def sample_purge_documents(): client = discoveryengine_v1beta.DocumentServiceClient() # Initialize request argument(s) + gcs_source = discoveryengine_v1beta.GcsSource() + gcs_source.input_uris = ['input_uris_value1', 'input_uris_value2'] + request = discoveryengine_v1beta.PurgeDocumentsRequest( + gcs_source=gcs_source, parent="parent_value", filter="filter_value", ) @@ -1598,6 +1602,117 @@ def sample_purge_documents(): # Done; return the response. return response + def batch_get_documents_metadata( + self, + request: Optional[ + Union[document_service.BatchGetDocumentsMetadataRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> document_service.BatchGetDocumentsMetadataResponse: + r"""Gets index freshness metadata for + [Document][google.cloud.discoveryengine.v1beta.Document]s. + Supported for website search only. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + def sample_batch_get_documents_metadata(): + # Create a client + client = discoveryengine_v1beta.DocumentServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.BatchGetDocumentsMetadataRequest( + parent="parent_value", + ) + + # Make the request + response = client.batch_get_documents_metadata(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1beta.types.BatchGetDocumentsMetadataRequest, dict]): + The request object. Request message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1beta.DocumentService.BatchGetDocumentsMetadata] + method. + parent (str): + Required. The parent branch resource name, such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1beta.types.BatchGetDocumentsMetadataResponse: + Response message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1beta.DocumentService.BatchGetDocumentsMetadata] + method. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, document_service.BatchGetDocumentsMetadataRequest): + request = document_service.BatchGetDocumentsMetadataRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.batch_get_documents_metadata + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "DocumentServiceClient": return self diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/base.py index 6491783a2c52..8cb10eba46cf 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/base.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/base.py @@ -181,6 +181,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.batch_get_documents_metadata: gapic_v1.method.wrap_method( + self.batch_get_documents_metadata, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -263,6 +268,18 @@ def purge_documents( ]: raise NotImplementedError() + @property + def batch_get_documents_metadata( + self, + ) -> Callable[ + [document_service.BatchGetDocumentsMetadataRequest], + Union[ + document_service.BatchGetDocumentsMetadataResponse, + Awaitable[document_service.BatchGetDocumentsMetadataResponse], + ], + ]: + raise NotImplementedError() + @property def list_operations( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/grpc.py index 97a0a6923af9..5d1d8749c5d0 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/grpc.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/grpc.py @@ -473,6 +473,37 @@ def purge_documents( ) return self._stubs["purge_documents"] + @property + def batch_get_documents_metadata( + self, + ) -> Callable[ + [document_service.BatchGetDocumentsMetadataRequest], + document_service.BatchGetDocumentsMetadataResponse, + ]: + r"""Return a callable for the batch get documents metadata method over gRPC. + + Gets index freshness metadata for + [Document][google.cloud.discoveryengine.v1beta.Document]s. + Supported for website search only. + + Returns: + Callable[[~.BatchGetDocumentsMetadataRequest], + ~.BatchGetDocumentsMetadataResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_get_documents_metadata" not in self._stubs: + self._stubs["batch_get_documents_metadata"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.DocumentService/BatchGetDocumentsMetadata", + request_serializer=document_service.BatchGetDocumentsMetadataRequest.serialize, + response_deserializer=document_service.BatchGetDocumentsMetadataResponse.deserialize, + ) + return self._stubs["batch_get_documents_metadata"] + def close(self): self.grpc_channel.close() diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/grpc_asyncio.py index d8dc444b89d1..cebef2f02dd5 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/grpc_asyncio.py @@ -488,6 +488,37 @@ def purge_documents( ) return self._stubs["purge_documents"] + @property + def batch_get_documents_metadata( + self, + ) -> Callable[ + [document_service.BatchGetDocumentsMetadataRequest], + Awaitable[document_service.BatchGetDocumentsMetadataResponse], + ]: + r"""Return a callable for the batch get documents metadata method over gRPC. + + Gets index freshness metadata for + [Document][google.cloud.discoveryengine.v1beta.Document]s. + Supported for website search only. + + Returns: + Callable[[~.BatchGetDocumentsMetadataRequest], + Awaitable[~.BatchGetDocumentsMetadataResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_get_documents_metadata" not in self._stubs: + self._stubs["batch_get_documents_metadata"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.DocumentService/BatchGetDocumentsMetadata", + request_serializer=document_service.BatchGetDocumentsMetadataRequest.serialize, + response_deserializer=document_service.BatchGetDocumentsMetadataResponse.deserialize, + ) + return self._stubs["batch_get_documents_metadata"] + def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { @@ -535,6 +566,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.batch_get_documents_metadata: gapic_v1.method_async.wrap_method( + self.batch_get_documents_metadata, + default_timeout=None, + client_info=client_info, + ), } def close(self): diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/rest.py index db93f79c1cf2..2014752b11ae 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/rest.py @@ -79,6 +79,14 @@ class DocumentServiceRestInterceptor: .. code-block:: python class MyCustomDocumentServiceInterceptor(DocumentServiceRestInterceptor): + def pre_batch_get_documents_metadata(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_batch_get_documents_metadata(self, response): + logging.log(f"Received response: {response}") + return response + def pre_create_document(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -137,6 +145,31 @@ def post_update_document(self, response): """ + def pre_batch_get_documents_metadata( + self, + request: document_service.BatchGetDocumentsMetadataRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + document_service.BatchGetDocumentsMetadataRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for batch_get_documents_metadata + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentService server. + """ + return request, metadata + + def post_batch_get_documents_metadata( + self, response: document_service.BatchGetDocumentsMetadataResponse + ) -> document_service.BatchGetDocumentsMetadataResponse: + """Post-rpc interceptor for batch_get_documents_metadata + + Override in a subclass to manipulate the response + after it is returned by the DocumentService server but before + it is returned to user code. + """ + return response + def pre_create_document( self, request: document_service.CreateDocumentRequest, @@ -616,6 +649,105 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Return the client from cache. return self._operations_client + class _BatchGetDocumentsMetadata(DocumentServiceRestStub): + def __hash__(self): + return hash("BatchGetDocumentsMetadata") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "matcher": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_service.BatchGetDocumentsMetadataRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> document_service.BatchGetDocumentsMetadataResponse: + r"""Call the batch get documents + metadata method over HTTP. + + Args: + request (~.document_service.BatchGetDocumentsMetadataRequest): + The request object. Request message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1beta.DocumentService.BatchGetDocumentsMetadata] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.document_service.BatchGetDocumentsMetadataResponse: + Response message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1beta.DocumentService.BatchGetDocumentsMetadata] + method. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{parent=projects/*/locations/*/dataStores/*/branches/*}/batchGetDocumentsMetadata", + }, + { + "method": "get", + "uri": "/v1beta/{parent=projects/*/locations/*/collections/*/dataStores/*/branches/*}/batchGetDocumentsMetadata", + }, + ] + request, metadata = self._interceptor.pre_batch_get_documents_metadata( + request, metadata + ) + pb_request = document_service.BatchGetDocumentsMetadataRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = document_service.BatchGetDocumentsMetadataResponse() + pb_resp = document_service.BatchGetDocumentsMetadataResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_batch_get_documents_metadata(resp) + return resp + class _CreateDocument(DocumentServiceRestStub): def __hash__(self): return hash("CreateDocument") @@ -1289,6 +1421,17 @@ def __call__( resp = self._interceptor.post_update_document(resp) return resp + @property + def batch_get_documents_metadata( + self, + ) -> Callable[ + [document_service.BatchGetDocumentsMetadataRequest], + document_service.BatchGetDocumentsMetadataResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BatchGetDocumentsMetadata(self._session, self._host, self._interceptor) # type: ignore + @property def create_document( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/__init__.py index 1392c0e5f3a6..cc7c07b55354 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/__init__.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/__init__.py @@ -79,6 +79,8 @@ from .document import Document from .document_processing_config import DocumentProcessingConfig from .document_service import ( + BatchGetDocumentsMetadataRequest, + BatchGetDocumentsMetadataResponse, CreateDocumentRequest, DeleteDocumentRequest, GetDocumentRequest, @@ -154,6 +156,7 @@ PurgeDocumentsMetadata, PurgeDocumentsRequest, PurgeDocumentsResponse, + PurgeErrorConfig, PurgeSuggestionDenyListEntriesMetadata, PurgeSuggestionDenyListEntriesRequest, PurgeSuggestionDenyListEntriesResponse, @@ -311,6 +314,8 @@ "UpdateDataStoreRequest", "Document", "DocumentProcessingConfig", + "BatchGetDocumentsMetadataRequest", + "BatchGetDocumentsMetadataResponse", "CreateDocumentRequest", "DeleteDocumentRequest", "GetDocumentRequest", @@ -379,6 +384,7 @@ "PurgeDocumentsMetadata", "PurgeDocumentsRequest", "PurgeDocumentsResponse", + "PurgeErrorConfig", "PurgeSuggestionDenyListEntriesMetadata", "PurgeSuggestionDenyListEntriesRequest", "PurgeSuggestionDenyListEntriesResponse", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/answer.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/answer.py index 57d56b1b142f..1347bf5c6e59 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/answer.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/answer.py @@ -105,6 +105,18 @@ class AnswerSkippedReason(proto.Enum): Google skips the answer if there is no relevant content in the retrieved search results. + JAIL_BREAKING_QUERY_IGNORED (6): + The jail-breaking query ignored case. + + For example, "Reply in the tone of a competing + company's CEO". Google skips the answer if the + query is classified as a jail-breaking query. + CUSTOMER_POLICY_VIOLATION (7): + The customer policy violation case. + + Google skips the summary if there is a customer + policy violation detected. The policy is defined + by the customer. """ ANSWER_SKIPPED_REASON_UNSPECIFIED = 0 ADVERSARIAL_QUERY_IGNORED = 1 @@ -112,6 +124,8 @@ class AnswerSkippedReason(proto.Enum): OUT_OF_DOMAIN_QUERY_IGNORED = 3 POTENTIAL_POLICY_VIOLATION = 4 NO_RELEVANT_CONTENT = 5 + JAIL_BREAKING_QUERY_IGNORED = 6 + CUSTOMER_POLICY_VIOLATION = 7 class Citation(proto.Message): r"""Citation info for a segment. @@ -171,6 +185,10 @@ class Reference(proto.Message): chunk_info (google.cloud.discoveryengine_v1beta.types.Answer.Reference.ChunkInfo): Chunk information. + This field is a member of `oneof`_ ``content``. + structured_document_info (google.cloud.discoveryengine_v1beta.types.Answer.Reference.StructuredDocumentInfo): + Structured document information. + This field is a member of `oneof`_ ``content``. """ @@ -196,11 +214,22 @@ class UnstructuredDocumentInfo(proto.Message): class ChunkContent(proto.Message): r"""Chunk content. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: content (str): Chunk textual content. page_identifier (str): Page identifier. + relevance_score (float): + The relevance of the chunk for a given query. + Values range from 0.0 (completely irrelevant) to + 1.0 (completely relevant). This value is for + informational purpose only. It may change for + the same query and chunk at any time due to a + model retraining or change in implementation. + + This field is a member of `oneof`_ ``_relevance_score``. """ content: str = proto.Field( @@ -211,6 +240,11 @@ class ChunkContent(proto.Message): proto.STRING, number=2, ) + relevance_score: float = proto.Field( + proto.FLOAT, + number=3, + optional=True, + ) document: str = proto.Field( proto.STRING, @@ -248,7 +282,12 @@ class ChunkInfo(proto.Message): content (str): Chunk textual content. relevance_score (float): - Relevance score. + The relevance of the chunk for a given query. + Values range from 0.0 (completely irrelevant) to + 1.0 (completely relevant). This value is for + informational purpose only. It may change for + the same query and chunk at any time due to a + model retraining or change in implementation. This field is a member of `oneof`_ ``_relevance_score``. document_metadata (google.cloud.discoveryengine_v1beta.types.Answer.Reference.ChunkInfo.DocumentMetadata): @@ -316,6 +355,26 @@ class DocumentMetadata(proto.Message): ) ) + class StructuredDocumentInfo(proto.Message): + r"""Structured search information. + + Attributes: + document (str): + Document resource name. + struct_data (google.protobuf.struct_pb2.Struct): + Structured search data. + """ + + document: str = proto.Field( + proto.STRING, + number=1, + ) + struct_data: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=2, + message=struct_pb2.Struct, + ) + unstructured_document_info: "Answer.Reference.UnstructuredDocumentInfo" = ( proto.Field( proto.MESSAGE, @@ -330,6 +389,14 @@ class DocumentMetadata(proto.Message): oneof="content", message="Answer.Reference.ChunkInfo", ) + structured_document_info: "Answer.Reference.StructuredDocumentInfo" = ( + proto.Field( + proto.MESSAGE, + number=3, + oneof="content", + message="Answer.Reference.StructuredDocumentInfo", + ) + ) class Step(proto.Message): r"""Step information. @@ -456,7 +523,12 @@ class ChunkInfo(proto.Message): content (str): Chunk textual content. relevance_score (float): - Relevance score. + The relevance of the chunk for a given query. + Values range from 0.0 (completely irrelevant) to + 1.0 (completely relevant). This value is for + informational purpose only. It may change for + the same query and chunk at any time due to a + model retraining or change in implementation. This field is a member of `oneof`_ ``_relevance_score``. """ @@ -574,10 +646,13 @@ class Type(proto.Enum): Adversarial query classification type. NON_ANSWER_SEEKING_QUERY (2): Non-answer-seeking query classification type. + JAIL_BREAKING_QUERY (3): + Jail-breaking query classification type. """ TYPE_UNSPECIFIED = 0 ADVERSARIAL_QUERY = 1 NON_ANSWER_SEEKING_QUERY = 2 + JAIL_BREAKING_QUERY = 3 type_: "Answer.QueryUnderstandingInfo.QueryClassificationInfo.Type" = ( proto.Field( diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/control.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/control.py index 9fa4d299a928..f4a052314afd 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/control.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/control.py @@ -154,7 +154,7 @@ class Control(proto.Message): associated_serving_config_ids (MutableSequence[str]): Output only. List of all [ServingConfig][google.cloud.discoveryengine.v1beta.ServingConfig] - ids this control is attached to. May take up to 10 minutes + IDs this control is attached to. May take up to 10 minutes to update after changes. solution_type (google.cloud.discoveryengine_v1beta.types.SolutionType): Required. Immutable. What solution the diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/conversational_search_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/conversational_search_service.py index 9ddc57efdf78..e09b08bc0e50 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/conversational_search_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/conversational_search_service.py @@ -924,10 +924,13 @@ class Type(proto.Enum): Adversarial query classification type. NON_ANSWER_SEEKING_QUERY (2): Non-answer-seeking query classification type. + JAIL_BREAKING_QUERY (3): + Jail-breaking query classification type. """ TYPE_UNSPECIFIED = 0 ADVERSARIAL_QUERY = 1 NON_ANSWER_SEEKING_QUERY = 2 + JAIL_BREAKING_QUERY = 3 types: MutableSequence[ "AnswerQueryRequest.QueryUnderstandingSpec.QueryClassificationSpec.Type" diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/custom_tuning_model.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/custom_tuning_model.py index 257280080b65..d0f53427c220 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/custom_tuning_model.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/custom_tuning_model.py @@ -74,6 +74,9 @@ class ModelState(proto.Enum): NO_IMPROVEMENT (6): The model training finished successfully but metrics did not improve. + INPUT_VALIDATION_FAILED (7): + Input data validation failed. Model training + didn't start. """ MODEL_STATE_UNSPECIFIED = 0 TRAINING_PAUSED = 1 @@ -82,6 +85,7 @@ class ModelState(proto.Enum): READY_FOR_SERVING = 4 TRAINING_FAILED = 5 NO_IMPROVEMENT = 6 + INPUT_VALIDATION_FAILED = 7 name: str = proto.Field( proto.STRING, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/document.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/document.py index 00619f5b456e..b09bd5a4a0f1 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/document.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/document.py @@ -19,6 +19,7 @@ from google.protobuf import struct_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore import proto # type: ignore __protobuf__ = proto.module( @@ -91,6 +92,14 @@ class Document(proto.Message): This field is OUTPUT_ONLY. If this field is not populated, it means the document has never been indexed. + index_status (google.cloud.discoveryengine_v1beta.types.Document.IndexStatus): + Output only. The index status of the document. + + - If document is indexed successfully, the index_time field + is populated. + - Otherwise, if document is not indexed due to errors, the + error_samples field is populated. + - Otherwise, index_status is unset. """ class Content(proto.Message): @@ -154,6 +163,31 @@ class Content(proto.Message): number=1, ) + class IndexStatus(proto.Message): + r"""Index status of the document. + + Attributes: + index_time (google.protobuf.timestamp_pb2.Timestamp): + The time when the document was indexed. + If this field is populated, it means the + document has been indexed. + error_samples (MutableSequence[google.rpc.status_pb2.Status]): + A sample of errors encountered while indexing + the document. If this field is populated, the + document is not indexed due to errors. + """ + + index_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + error_samples: MutableSequence[status_pb2.Status] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=status_pb2.Status, + ) + struct_data: struct_pb2.Struct = proto.Field( proto.MESSAGE, number=4, @@ -196,6 +230,11 @@ class Content(proto.Message): number=13, message=timestamp_pb2.Timestamp, ) + index_status: IndexStatus = proto.Field( + proto.MESSAGE, + number=15, + message=IndexStatus, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/document_processing_config.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/document_processing_config.py index ee06ee0d8a47..2192893da7b4 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/document_processing_config.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/document_processing_config.py @@ -62,6 +62,8 @@ class DocumentProcessingConfig(proto.Message): digital parsing and layout parsing are supported. - ``pptx``: Override parsing config for PPTX files, only digital parsing and layout parsing are supported. + - ``xlsx``: Override parsing config for XLSX files, only + digital parsing and layout parsing are supported. """ class ChunkingConfig(proto.Message): diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/document_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/document_service.py index 52f0c0f67f71..39c4a3a68d11 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/document_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/document_service.py @@ -18,6 +18,7 @@ from typing import MutableMapping, MutableSequence from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore import proto # type: ignore from google.cloud.discoveryengine_v1beta.types import document as gcd_document @@ -31,6 +32,8 @@ "CreateDocumentRequest", "UpdateDocumentRequest", "DeleteDocumentRequest", + "BatchGetDocumentsMetadataRequest", + "BatchGetDocumentsMetadataResponse", }, ) @@ -271,4 +274,174 @@ class DeleteDocumentRequest(proto.Message): ) +class BatchGetDocumentsMetadataRequest(proto.Message): + r"""Request message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1beta.DocumentService.BatchGetDocumentsMetadata] + method. + + Attributes: + parent (str): + Required. The parent branch resource name, such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}``. + matcher (google.cloud.discoveryengine_v1beta.types.BatchGetDocumentsMetadataRequest.Matcher): + Required. Matcher for the + [Document][google.cloud.discoveryengine.v1beta.Document]s. + """ + + class UrisMatcher(proto.Message): + r"""Matcher for the + [Document][google.cloud.discoveryengine.v1beta.Document]s by exact + uris. + + Attributes: + uris (MutableSequence[str]): + The exact URIs to match by. + """ + + uris: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + class Matcher(proto.Message): + r"""Matcher for the + [Document][google.cloud.discoveryengine.v1beta.Document]s. Currently + supports matching by exact URIs. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + uris_matcher (google.cloud.discoveryengine_v1beta.types.BatchGetDocumentsMetadataRequest.UrisMatcher): + Matcher by exact URIs. + + This field is a member of `oneof`_ ``matcher``. + """ + + uris_matcher: "BatchGetDocumentsMetadataRequest.UrisMatcher" = proto.Field( + proto.MESSAGE, + number=1, + oneof="matcher", + message="BatchGetDocumentsMetadataRequest.UrisMatcher", + ) + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + matcher: Matcher = proto.Field( + proto.MESSAGE, + number=2, + message=Matcher, + ) + + +class BatchGetDocumentsMetadataResponse(proto.Message): + r"""Response message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1beta.DocumentService.BatchGetDocumentsMetadata] + method. + + Attributes: + documents_metadata (MutableSequence[google.cloud.discoveryengine_v1beta.types.BatchGetDocumentsMetadataResponse.DocumentMetadata]): + The metadata of the + [Document][google.cloud.discoveryengine.v1beta.Document]s. + """ + + class State(proto.Enum): + r"""The state of the + [Document][google.cloud.discoveryengine.v1beta.Document]. + + Values: + STATE_UNSPECIFIED (0): + Should never be set. + INDEXED (1): + The [Document][google.cloud.discoveryengine.v1beta.Document] + is indexed. + NOT_IN_TARGET_SITE (2): + The [Document][google.cloud.discoveryengine.v1beta.Document] + is not indexed because its URI is not in the + [TargetSite][google.cloud.discoveryengine.v1beta.TargetSite]. + NOT_IN_INDEX (3): + The [Document][google.cloud.discoveryengine.v1beta.Document] + is not indexed. + """ + STATE_UNSPECIFIED = 0 + INDEXED = 1 + NOT_IN_TARGET_SITE = 2 + NOT_IN_INDEX = 3 + + class DocumentMetadata(proto.Message): + r"""The metadata of a + [Document][google.cloud.discoveryengine.v1beta.Document]. + + Attributes: + matcher_value (google.cloud.discoveryengine_v1beta.types.BatchGetDocumentsMetadataResponse.DocumentMetadata.MatcherValue): + The value of the matcher that was used to match the + [Document][google.cloud.discoveryengine.v1beta.Document]. + state (google.cloud.discoveryengine_v1beta.types.BatchGetDocumentsMetadataResponse.State): + The state of the document. + last_refreshed_time (google.protobuf.timestamp_pb2.Timestamp): + The timestamp of the last time the + [Document][google.cloud.discoveryengine.v1beta.Document] was + last indexed. + data_ingestion_source (str): + The data ingestion source of the + [Document][google.cloud.discoveryengine.v1beta.Document]. + + Allowed values are: + + - ``batch``: Data ingested via Batch API, e.g., + ImportDocuments. + - ``streaming`` Data ingested via Streaming API, e.g., FHIR + streaming. + """ + + class MatcherValue(proto.Message): + r"""The value of the matcher that was used to match the + [Document][google.cloud.discoveryengine.v1beta.Document]. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + uri (str): + If match by URI, the URI of the + [Document][google.cloud.discoveryengine.v1beta.Document]. + + This field is a member of `oneof`_ ``matcher_value``. + """ + + uri: str = proto.Field( + proto.STRING, + number=1, + oneof="matcher_value", + ) + + matcher_value: "BatchGetDocumentsMetadataResponse.DocumentMetadata.MatcherValue" = proto.Field( + proto.MESSAGE, + number=2, + message="BatchGetDocumentsMetadataResponse.DocumentMetadata.MatcherValue", + ) + state: "BatchGetDocumentsMetadataResponse.State" = proto.Field( + proto.ENUM, + number=3, + enum="BatchGetDocumentsMetadataResponse.State", + ) + last_refreshed_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + data_ingestion_source: str = proto.Field( + proto.STRING, + number=5, + ) + + documents_metadata: MutableSequence[DocumentMetadata] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=DocumentMetadata, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/grounded_generation_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/grounded_generation_service.py index 3734ca3d5c68..cbdefdb249b3 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/grounded_generation_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/grounded_generation_service.py @@ -180,8 +180,9 @@ class Claim(proto.Message): false. In that case, no grounding check was done for the claim and therefore [citation_indices][google.cloud.discoveryengine.v1beta.CheckGroundingResponse.Claim.citation_indices], + [anti_citation_indices][google.cloud.discoveryengine.v1beta.CheckGroundingResponse.Claim.anti_citation_indices], and - [anti_citation_indices][google.cloud.discoveryengine.v1beta.CheckGroundingResponse.Claim.anti_citation_indices] + [score][google.cloud.discoveryengine.v1beta.CheckGroundingResponse.Claim.score] should not be returned. This field is a member of `oneof`_ ``_grounding_check_required``. diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/import_config.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/import_config.py index 40e7225633b5..b4cb57eb641b 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/import_config.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/import_config.py @@ -129,10 +129,10 @@ class BigQuerySource(proto.Message): This field is a member of `oneof`_ ``partition``. project_id (str): - The project ID (can be project # or ID) that - the BigQuery source is in with a length limit of - 128 characters. If not specified, inherits the - project ID from the parent request. + The project ID or the project number that + contains the BigQuery source. Has a length limit + of 128 characters. If not specified, inherits + the project ID from the parent request. dataset_id (str): Required. The BigQuery data set to copy the data from with a length limit of 1,024 @@ -205,9 +205,9 @@ class SpannerSource(proto.Message): Attributes: project_id (str): - The project ID that the Spanner source is in - with a length limit of 128 characters. If not - specified, inherits the project ID from the + The project ID that contains the Spanner + source. Has a length limit of 128 characters. If + not specified, inherits the project ID from the parent request. instance_id (str): Required. The instance ID of the source @@ -428,9 +428,9 @@ class BigtableSource(proto.Message): Attributes: project_id (str): - The project ID that the Bigtable source is in - with a length limit of 128 characters. If not - specified, inherits the project ID from the + The project ID that contains the Bigtable + source. Has a length limit of 128 characters. If + not specified, inherits the project ID from the parent request. instance_id (str): Required. The instance ID of the Cloud @@ -478,6 +478,11 @@ class FhirStoreSource(proto.Message): characters. Can be specified if one wants to have the FhirStore export to a specific Cloud Storage directory. + resource_types (MutableSequence[str]): + The FHIR resource types to import. The resource types should + be a subset of all `supported FHIR resource + types `__. + Default to all supported FHIR resource types if empty. """ fhir_store: str = proto.Field( @@ -488,6 +493,10 @@ class FhirStoreSource(proto.Message): proto.STRING, number=2, ) + resource_types: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) class CloudSqlSource(proto.Message): @@ -495,9 +504,9 @@ class CloudSqlSource(proto.Message): Attributes: project_id (str): - The project ID that the Cloud SQL source is - in with a length limit of 128 characters. If not - specified, inherits the project ID from the + The project ID that contains the Cloud SQL + source. Has a length limit of 128 characters. If + not specified, inherits the project ID from the parent request. instance_id (str): Required. The Cloud SQL instance to copy the @@ -555,9 +564,9 @@ class AlloyDbSource(proto.Message): Attributes: project_id (str): - The project ID that the AlloyDB source is in - with a length limit of 128 characters. If not - specified, inherits the project ID from the + The project ID that contains the AlloyDB + source. Has a length limit of 128 characters. If + not specified, inherits the project ID from the parent request. location_id (str): Required. The AlloyDB location to copy the diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/purge_config.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/purge_config.py index 5a7d3c02f8af..829032161fdd 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/purge_config.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/purge_config.py @@ -21,12 +21,15 @@ from google.rpc import status_pb2 # type: ignore import proto # type: ignore +from google.cloud.discoveryengine_v1beta.types import import_config + __protobuf__ = proto.module( package="google.cloud.discoveryengine.v1beta", manifest={ "PurgeUserEventsRequest", "PurgeUserEventsResponse", "PurgeUserEventsMetadata", + "PurgeErrorConfig", "PurgeDocumentsRequest", "PurgeDocumentsResponse", "PurgeDocumentsMetadata", @@ -151,24 +154,100 @@ class PurgeUserEventsMetadata(proto.Message): ) +class PurgeErrorConfig(proto.Message): + r"""Configuration of destination for Purge related errors. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + gcs_prefix (str): + Cloud Storage prefix for purge errors. This must be an + empty, existing Cloud Storage directory. Purge errors are + written to sharded files in this directory, one per line, as + a JSON-encoded ``google.rpc.Status`` message. + + This field is a member of `oneof`_ ``destination``. + """ + + gcs_prefix: str = proto.Field( + proto.STRING, + number=1, + oneof="destination", + ) + + class PurgeDocumentsRequest(proto.Message): r"""Request message for [DocumentService.PurgeDocuments][google.cloud.discoveryengine.v1beta.DocumentService.PurgeDocuments] method. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: + gcs_source (google.cloud.discoveryengine_v1beta.types.GcsSource): + Cloud Storage location for the input content. Supported + ``data_schema``: + + - ``document_id``: One valid + [Document.id][google.cloud.discoveryengine.v1beta.Document.id] + per line. + + This field is a member of `oneof`_ ``source``. + inline_source (google.cloud.discoveryengine_v1beta.types.PurgeDocumentsRequest.InlineSource): + Inline source for the input content for + purge. + + This field is a member of `oneof`_ ``source``. parent (str): Required. The parent resource name, such as ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}``. filter (str): Required. Filter matching documents to purge. Only currently supported value is ``*`` (all items). + error_config (google.cloud.discoveryengine_v1beta.types.PurgeErrorConfig): + The desired location of errors incurred + during the purge. force (bool): Actually performs the purge. If ``force`` is set to false, return the expected purge count without deleting any documents. """ + class InlineSource(proto.Message): + r"""The inline source for the input config for + [DocumentService.PurgeDocuments][google.cloud.discoveryengine.v1beta.DocumentService.PurgeDocuments] + method. + + Attributes: + documents (MutableSequence[str]): + Required. A list of full resource name of documents to + purge. In the format + ``projects/*/locations/*/collections/*/dataStores/*/branches/*/documents/*``. + Recommended max of 100 items. + """ + + documents: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + gcs_source: import_config.GcsSource = proto.Field( + proto.MESSAGE, + number=5, + oneof="source", + message=import_config.GcsSource, + ) + inline_source: InlineSource = proto.Field( + proto.MESSAGE, + number=6, + oneof="source", + message=InlineSource, + ) parent: str = proto.Field( proto.STRING, number=1, @@ -177,6 +256,11 @@ class PurgeDocumentsRequest(proto.Message): proto.STRING, number=2, ) + error_config: "PurgeErrorConfig" = proto.Field( + proto.MESSAGE, + number=7, + message="PurgeErrorConfig", + ) force: bool = proto.Field( proto.BOOL, number=3, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/search_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/search_service.py index 710ff4c38cba..8db64de1bb12 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/search_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/search_service.py @@ -139,9 +139,12 @@ class SearchRequest(proto.Message): object. Leave it unset if ordered by relevance. ``order_by`` expression is case-sensitive. - For more information on ordering for retail search, see - `Ordering `__ - + For more information on ordering the website search results, + see `Order web search + results `__. + For more information on ordering the healthcare search + results, see `Order healthcare search + results `__. If this field is unrecognizable, an ``INVALID_ARGUMENT`` is returned. user_info (google.cloud.discoveryengine_v1beta.types.UserInfo): @@ -329,8 +332,39 @@ class SearchRequest(proto.Message): Session specification. Can be used only when ``session`` is set. + relevance_threshold (google.cloud.discoveryengine_v1beta.types.SearchRequest.RelevanceThreshold): + The relevance threshold of the search + results. + Default to Google defined threshold, leveraging + a balance of precision and recall to deliver + both highly accurate results and comprehensive + coverage of relevant information. """ + class RelevanceThreshold(proto.Enum): + r"""The relevance threshold of the search results. The higher + relevance threshold is, the higher relevant results are shown + and the less number of results are returned. + + Values: + RELEVANCE_THRESHOLD_UNSPECIFIED (0): + Default value. In this case, server behavior + defaults to Google defined threshold. + LOWEST (1): + Lowest relevance threshold. + LOW (2): + Low relevance threshold. + MEDIUM (3): + Medium relevance threshold. + HIGH (4): + High relevance threshold. + """ + RELEVANCE_THRESHOLD_UNSPECIFIED = 0 + LOWEST = 1 + LOW = 2 + MEDIUM = 3 + HIGH = 4 + class ImageQuery(proto.Message): r"""Specifies the image query input. @@ -980,6 +1014,14 @@ class SummarySpec(proto.Message): navigational queries. If this field is set to ``true``, we skip generating summaries for non-summary seeking queries and return fallback messages instead. + ignore_low_relevant_content (bool): + Specifies whether to filter out queries that have low + relevance. The default value is ``false``. + + If this field is set to ``false``, all search results are + used regardless of relevance to generate answers. If set to + ``true``, only queries with high relevance search results + will generate answers. model_prompt_spec (google.cloud.discoveryengine_v1beta.types.SearchRequest.ContentSearchSpec.SummarySpec.ModelPromptSpec): If specified, the spec will be used to modify the prompt provided to the LLM. @@ -1057,6 +1099,10 @@ class ModelSpec(proto.Message): proto.BOOL, number=4, ) + ignore_low_relevant_content: bool = proto.Field( + proto.BOOL, + number=9, + ) model_prompt_spec: "SearchRequest.ContentSearchSpec.SummarySpec.ModelPromptSpec" = proto.Field( proto.MESSAGE, number=5, @@ -1519,6 +1565,11 @@ class SessionSpec(proto.Message): number=42, message=SessionSpec, ) + relevance_threshold: RelevanceThreshold = proto.Field( + proto.ENUM, + number=44, + enum=RelevanceThreshold, + ) class SearchResponse(proto.Message): @@ -1545,7 +1596,8 @@ class SearchResponse(proto.Message): A unique search token. This should be included in the [UserEvent][google.cloud.discoveryengine.v1beta.UserEvent] logs resulting from this search, which enables accurate - attribution of search model performance. + attribution of search model performance. This also helps to + identify a request during the customer support scenarios. redirect_uri (str): The URI of a customer-defined redirect page. If redirect action is triggered, no search is performed, and only @@ -1585,6 +1637,9 @@ class SearchResponse(proto.Message): Only set if [SearchRequest.session][google.cloud.discoveryengine.v1beta.SearchRequest.session] is provided. See its description for more details. + one_box_results (MutableSequence[google.cloud.discoveryengine_v1beta.types.SearchResponse.OneBoxResult]): + A list of One Box results. There can be + multiple One Box results of different types. """ class SearchResult(proto.Message): @@ -1773,13 +1828,13 @@ class SummarySkippedReason(proto.Enum): ADVERSARIAL_QUERY_IGNORED (1): The adversarial query ignored case. - Only populated when + Only used when [SummarySpec.ignore_adversarial_query][google.cloud.discoveryengine.v1beta.SearchRequest.ContentSearchSpec.SummarySpec.ignore_adversarial_query] is set to ``true``. NON_SUMMARY_SEEKING_QUERY_IGNORED (2): The non-summary seeking query ignored case. - Only populated when + Only used when [SummarySpec.ignore_non_summary_seeking_query][google.cloud.discoveryengine.v1beta.SearchRequest.ContentSearchSpec.SummarySpec.ignore_non_summary_seeking_query] is set to ``true``. OUT_OF_DOMAIN_QUERY_IGNORED (3): @@ -1806,6 +1861,19 @@ class SummarySkippedReason(proto.Enum): Google skips the summary if there is no relevant content in the retrieved search results. + JAIL_BREAKING_QUERY_IGNORED (7): + The jail-breaking query ignored case. + + For example, "Reply in the tone of a competing company's + CEO". Only used when + [SearchRequest.ContentSearchSpec.SummarySpec.ignore_jail_breaking_query] + is set to ``true``. + CUSTOMER_POLICY_VIOLATION (8): + The customer policy violation case. + + Google skips the summary if there is a customer + policy violation detected. The policy is defined + by the customer. """ SUMMARY_SKIPPED_REASON_UNSPECIFIED = 0 ADVERSARIAL_QUERY_IGNORED = 1 @@ -1814,6 +1882,8 @@ class SummarySkippedReason(proto.Enum): POTENTIAL_POLICY_VIOLATION = 4 LLM_ADDON_NOT_ENABLED = 5 NO_RELEVANT_CONTENT = 6 + JAIL_BREAKING_QUERY_IGNORED = 7 + CUSTOMER_POLICY_VIOLATION = 8 class SafetyAttributes(proto.Message): r"""Safety Attribute categories and their associated confidence @@ -2098,6 +2168,9 @@ class StringConstraint(proto.Message): Values of the string field. The record will only be returned if the field value matches one of the values specified here. + query_segment (str): + Identifies the keywords within the search + query that match a filter. """ field_name: str = proto.Field( @@ -2108,6 +2181,10 @@ class StringConstraint(proto.Message): proto.STRING, number=2, ) + query_segment: str = proto.Field( + proto.STRING, + number=3, + ) class NumberConstraint(proto.Message): r"""Constraint expression of a number field. Example: price < @@ -2124,6 +2201,9 @@ class NumberConstraint(proto.Message): value (float): The value specified in the numerical constraint. + query_segment (str): + Identifies the keywords within the search + query that match a filter. """ class Comparison(proto.Enum): @@ -2163,6 +2243,10 @@ class Comparison(proto.Enum): proto.DOUBLE, number=3, ) + query_segment: str = proto.Field( + proto.STRING, + number=4, + ) class GeolocationConstraint(proto.Message): r"""Constraint of a geolocation field. @@ -2177,6 +2261,12 @@ class GeolocationConstraint(proto.Message): the input query. The proximity of the reference address to the geolocation field will be used to filter the results. + latitude (float): + The latitude of the geolocation inferred from + the input query. + longitude (float): + The longitude of the geolocation inferred + from the input query. radius_in_meters (float): The radius in meters around the address. The record is returned if the location of the @@ -2191,6 +2281,14 @@ class GeolocationConstraint(proto.Message): proto.STRING, number=2, ) + latitude: float = proto.Field( + proto.DOUBLE, + number=4, + ) + longitude: float = proto.Field( + proto.DOUBLE, + number=5, + ) radius_in_meters: float = proto.Field( proto.FLOAT, number=3, @@ -2344,6 +2442,48 @@ class SessionInfo(proto.Message): number=2, ) + class OneBoxResult(proto.Message): + r"""OneBoxResult is a holder for all results of specific type + that we want to display in UI differently. + + Attributes: + one_box_type (google.cloud.discoveryengine_v1beta.types.SearchResponse.OneBoxResult.OneBoxType): + The type of One Box result. + search_results (MutableSequence[google.cloud.discoveryengine_v1beta.types.SearchResponse.SearchResult]): + The search results for this One Box. + """ + + class OneBoxType(proto.Enum): + r"""The type of One Box result. + + Values: + ONE_BOX_TYPE_UNSPECIFIED (0): + Default value. Should not be used. + PEOPLE (1): + One Box result contains people results. + ORGANIZATION (2): + One Box result contains organization results. + SLACK (3): + One Box result contains slack results. + """ + ONE_BOX_TYPE_UNSPECIFIED = 0 + PEOPLE = 1 + ORGANIZATION = 2 + SLACK = 3 + + one_box_type: "SearchResponse.OneBoxResult.OneBoxType" = proto.Field( + proto.ENUM, + number=1, + enum="SearchResponse.OneBoxResult.OneBoxType", + ) + search_results: MutableSequence[ + "SearchResponse.SearchResult" + ] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="SearchResponse.SearchResult", + ) + @property def raw_page(self): return self @@ -2414,6 +2554,11 @@ def raw_page(self): number=19, message=SessionInfo, ) + one_box_results: MutableSequence[OneBoxResult] = proto.RepeatedField( + proto.MESSAGE, + number=20, + message=OneBoxResult, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/user_event.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/user_event.py index 23500fae2a57..5c959d423932 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/user_event.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/user_event.py @@ -673,6 +673,9 @@ class DocumentInfo(proto.Message): The promotion IDs associated with this Document. Currently, this field is restricted to at most one ID. + joined (bool): + Output only. Whether the referenced Document + can be found in the data store. """ id: str = proto.Field( @@ -699,6 +702,10 @@ class DocumentInfo(proto.Message): proto.STRING, number=4, ) + joined: bool = proto.Field( + proto.BOOL, + number=5, + ) class PanelInfo(proto.Message): diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_batch_get_documents_metadata_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_batch_get_documents_metadata_async.py new file mode 100644 index 000000000000..ff9157e921d6 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_batch_get_documents_metadata_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchGetDocumentsMetadata +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_DocumentService_BatchGetDocumentsMetadata_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +async def sample_batch_get_documents_metadata(): + # Create a client + client = discoveryengine_v1.DocumentServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.BatchGetDocumentsMetadataRequest( + parent="parent_value", + ) + + # Make the request + response = await client.batch_get_documents_metadata(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_DocumentService_BatchGetDocumentsMetadata_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_batch_get_documents_metadata_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_batch_get_documents_metadata_sync.py new file mode 100644 index 000000000000..84861f6a63ac --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_batch_get_documents_metadata_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchGetDocumentsMetadata +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_DocumentService_BatchGetDocumentsMetadata_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +def sample_batch_get_documents_metadata(): + # Create a client + client = discoveryengine_v1.DocumentServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.BatchGetDocumentsMetadataRequest( + parent="parent_value", + ) + + # Make the request + response = client.batch_get_documents_metadata(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_DocumentService_BatchGetDocumentsMetadata_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_purge_documents_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_purge_documents_async.py index dded80300569..7e27f4affa4f 100644 --- a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_purge_documents_async.py +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_purge_documents_async.py @@ -39,7 +39,11 @@ async def sample_purge_documents(): client = discoveryengine_v1.DocumentServiceAsyncClient() # Initialize request argument(s) + gcs_source = discoveryengine_v1.GcsSource() + gcs_source.input_uris = ['input_uris_value1', 'input_uris_value2'] + request = discoveryengine_v1.PurgeDocumentsRequest( + gcs_source=gcs_source, parent="parent_value", filter="filter_value", ) diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_purge_documents_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_purge_documents_sync.py index d0d2932cf202..d051616047f7 100644 --- a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_purge_documents_sync.py +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_purge_documents_sync.py @@ -39,7 +39,11 @@ def sample_purge_documents(): client = discoveryengine_v1.DocumentServiceClient() # Initialize request argument(s) + gcs_source = discoveryengine_v1.GcsSource() + gcs_source.input_uris = ['input_uris_value1', 'input_uris_value2'] + request = discoveryengine_v1.PurgeDocumentsRequest( + gcs_source=gcs_source, parent="parent_value", filter="filter_value", ) diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_search_tuning_service_list_custom_models_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_search_tuning_service_list_custom_models_async.py new file mode 100644 index 000000000000..9d7a80ef3114 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_search_tuning_service_list_custom_models_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListCustomModels +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_SearchTuningService_ListCustomModels_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +async def sample_list_custom_models(): + # Create a client + client = discoveryengine_v1.SearchTuningServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.ListCustomModelsRequest( + data_store="data_store_value", + ) + + # Make the request + response = await client.list_custom_models(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_SearchTuningService_ListCustomModels_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_search_tuning_service_list_custom_models_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_search_tuning_service_list_custom_models_sync.py new file mode 100644 index 000000000000..faedb982f00e --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_search_tuning_service_list_custom_models_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListCustomModels +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_SearchTuningService_ListCustomModels_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +def sample_list_custom_models(): + # Create a client + client = discoveryengine_v1.SearchTuningServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.ListCustomModelsRequest( + data_store="data_store_value", + ) + + # Make the request + response = client.list_custom_models(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_SearchTuningService_ListCustomModels_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_search_tuning_service_train_custom_model_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_search_tuning_service_train_custom_model_async.py new file mode 100644 index 000000000000..ba97a5e74862 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_search_tuning_service_train_custom_model_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TrainCustomModel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_SearchTuningService_TrainCustomModel_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +async def sample_train_custom_model(): + # Create a client + client = discoveryengine_v1.SearchTuningServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.TrainCustomModelRequest( + data_store="data_store_value", + ) + + # Make the request + operation = client.train_custom_model(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_SearchTuningService_TrainCustomModel_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_search_tuning_service_train_custom_model_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_search_tuning_service_train_custom_model_sync.py new file mode 100644 index 000000000000..422d9bcdcc6d --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_search_tuning_service_train_custom_model_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TrainCustomModel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_SearchTuningService_TrainCustomModel_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +def sample_train_custom_model(): + # Create a client + client = discoveryengine_v1.SearchTuningServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.TrainCustomModelRequest( + data_store="data_store_value", + ) + + # Make the request + operation = client.train_custom_model(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_SearchTuningService_TrainCustomModel_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_user_event_service_purge_user_events_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_user_event_service_purge_user_events_async.py new file mode 100644 index 000000000000..13cc76da400c --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_user_event_service_purge_user_events_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for PurgeUserEvents +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_UserEventService_PurgeUserEvents_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +async def sample_purge_user_events(): + # Create a client + client = discoveryengine_v1.UserEventServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.PurgeUserEventsRequest( + parent="parent_value", + filter="filter_value", + ) + + # Make the request + operation = client.purge_user_events(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_UserEventService_PurgeUserEvents_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_user_event_service_purge_user_events_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_user_event_service_purge_user_events_sync.py new file mode 100644 index 000000000000..eeab6a2ea2b3 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_user_event_service_purge_user_events_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for PurgeUserEvents +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_UserEventService_PurgeUserEvents_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +def sample_purge_user_events(): + # Create a client + client = discoveryengine_v1.UserEventServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.PurgeUserEventsRequest( + parent="parent_value", + filter="filter_value", + ) + + # Make the request + operation = client.purge_user_events(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_UserEventService_PurgeUserEvents_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_document_service_batch_get_documents_metadata_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_document_service_batch_get_documents_metadata_async.py new file mode 100644 index 000000000000..8a98d99976c6 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_document_service_batch_get_documents_metadata_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchGetDocumentsMetadata +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1alpha_generated_DocumentService_BatchGetDocumentsMetadata_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1alpha + + +async def sample_batch_get_documents_metadata(): + # Create a client + client = discoveryengine_v1alpha.DocumentServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1alpha.BatchGetDocumentsMetadataRequest( + parent="parent_value", + ) + + # Make the request + response = await client.batch_get_documents_metadata(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1alpha_generated_DocumentService_BatchGetDocumentsMetadata_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_document_service_batch_get_documents_metadata_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_document_service_batch_get_documents_metadata_sync.py new file mode 100644 index 000000000000..972a126db36b --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_document_service_batch_get_documents_metadata_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchGetDocumentsMetadata +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1alpha_generated_DocumentService_BatchGetDocumentsMetadata_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1alpha + + +def sample_batch_get_documents_metadata(): + # Create a client + client = discoveryengine_v1alpha.DocumentServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1alpha.BatchGetDocumentsMetadataRequest( + parent="parent_value", + ) + + # Make the request + response = client.batch_get_documents_metadata(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1alpha_generated_DocumentService_BatchGetDocumentsMetadata_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_batch_get_documents_metadata_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_batch_get_documents_metadata_async.py new file mode 100644 index 000000000000..37e8933b085d --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_batch_get_documents_metadata_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchGetDocumentsMetadata +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_DocumentService_BatchGetDocumentsMetadata_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +async def sample_batch_get_documents_metadata(): + # Create a client + client = discoveryengine_v1beta.DocumentServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.BatchGetDocumentsMetadataRequest( + parent="parent_value", + ) + + # Make the request + response = await client.batch_get_documents_metadata(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_DocumentService_BatchGetDocumentsMetadata_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_batch_get_documents_metadata_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_batch_get_documents_metadata_sync.py new file mode 100644 index 000000000000..2164801056cc --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_batch_get_documents_metadata_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchGetDocumentsMetadata +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_DocumentService_BatchGetDocumentsMetadata_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +def sample_batch_get_documents_metadata(): + # Create a client + client = discoveryengine_v1beta.DocumentServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.BatchGetDocumentsMetadataRequest( + parent="parent_value", + ) + + # Make the request + response = client.batch_get_documents_metadata(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_DocumentService_BatchGetDocumentsMetadata_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_purge_documents_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_purge_documents_async.py index 204de42ab390..d57fdf0e3be2 100644 --- a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_purge_documents_async.py +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_purge_documents_async.py @@ -39,7 +39,11 @@ async def sample_purge_documents(): client = discoveryengine_v1beta.DocumentServiceAsyncClient() # Initialize request argument(s) + gcs_source = discoveryengine_v1beta.GcsSource() + gcs_source.input_uris = ['input_uris_value1', 'input_uris_value2'] + request = discoveryengine_v1beta.PurgeDocumentsRequest( + gcs_source=gcs_source, parent="parent_value", filter="filter_value", ) diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_purge_documents_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_purge_documents_sync.py index d4538c05c707..6819052d64c8 100644 --- a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_purge_documents_sync.py +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_purge_documents_sync.py @@ -39,7 +39,11 @@ def sample_purge_documents(): client = discoveryengine_v1beta.DocumentServiceClient() # Initialize request argument(s) + gcs_source = discoveryengine_v1beta.GcsSource() + gcs_source.input_uris = ['input_uris_value1', 'input_uris_value2'] + request = discoveryengine_v1beta.PurgeDocumentsRequest( + gcs_source=gcs_source, parent="parent_value", filter="filter_value", ) diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1.json b/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1.json index 69944fc7ce8a..7c8ee0acc6ee 100644 --- a/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1.json +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1.json @@ -4541,6 +4541,167 @@ ], "title": "discoveryengine_v1_generated_data_store_service_update_data_store_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient", + "shortName": "DocumentServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient.batch_get_documents_metadata", + "method": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService.BatchGetDocumentsMetadata", + "service": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "BatchGetDocumentsMetadata" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.BatchGetDocumentsMetadataRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1.types.BatchGetDocumentsMetadataResponse", + "shortName": "batch_get_documents_metadata" + }, + "description": "Sample for BatchGetDocumentsMetadata", + "file": "discoveryengine_v1_generated_document_service_batch_get_documents_metadata_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_DocumentService_BatchGetDocumentsMetadata_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_document_service_batch_get_documents_metadata_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient", + "shortName": "DocumentServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient.batch_get_documents_metadata", + "method": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService.BatchGetDocumentsMetadata", + "service": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "BatchGetDocumentsMetadata" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.BatchGetDocumentsMetadataRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1.types.BatchGetDocumentsMetadataResponse", + "shortName": "batch_get_documents_metadata" + }, + "description": "Sample for BatchGetDocumentsMetadata", + "file": "discoveryengine_v1_generated_document_service_batch_get_documents_metadata_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_DocumentService_BatchGetDocumentsMetadata_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_document_service_batch_get_documents_metadata_sync.py" + }, { "canonical": true, "clientMethod": { @@ -5393,12 +5554,12 @@ "regionTag": "discoveryengine_v1_generated_DocumentService_PurgeDocuments_async", "segments": [ { - "end": 56, + "end": 60, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 60, "start": 27, "type": "SHORT" }, @@ -5408,18 +5569,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 50, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 47, + "end": 57, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 61, + "start": 58, "type": "RESPONSE_HANDLING" } ], @@ -5469,12 +5630,12 @@ "regionTag": "discoveryengine_v1_generated_DocumentService_PurgeDocuments_sync", "segments": [ { - "end": 56, + "end": 60, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 60, "start": 27, "type": "SHORT" }, @@ -5484,18 +5645,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 50, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 47, + "end": 57, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 61, + "start": 58, "type": "RESPONSE_HANDLING" } ], @@ -8090,22 +8251,22 @@ "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceAsyncClient", - "shortName": "SiteSearchEngineServiceAsyncClient" + "fullName": "google.cloud.discoveryengine_v1.SearchTuningServiceAsyncClient", + "shortName": "SearchTuningServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceAsyncClient.batch_create_target_sites", + "fullName": "google.cloud.discoveryengine_v1.SearchTuningServiceAsyncClient.list_custom_models", "method": { - "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService.BatchCreateTargetSites", + "fullName": "google.cloud.discoveryengine.v1.SearchTuningService.ListCustomModels", "service": { - "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService", - "shortName": "SiteSearchEngineService" + "fullName": "google.cloud.discoveryengine.v1.SearchTuningService", + "shortName": "SearchTuningService" }, - "shortName": "BatchCreateTargetSites" + "shortName": "ListCustomModels" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.BatchCreateTargetSitesRequest" + "type": "google.cloud.discoveryengine_v1.types.ListCustomModelsRequest" }, { "name": "retry", @@ -8120,22 +8281,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "batch_create_target_sites" + "resultType": "google.cloud.discoveryengine_v1.types.ListCustomModelsResponse", + "shortName": "list_custom_models" }, - "description": "Sample for BatchCreateTargetSites", - "file": "discoveryengine_v1_generated_site_search_engine_service_batch_create_target_sites_async.py", + "description": "Sample for ListCustomModels", + "file": "discoveryengine_v1_generated_search_tuning_service_list_custom_models_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_SiteSearchEngineService_BatchCreateTargetSites_async", + "regionTag": "discoveryengine_v1_generated_SearchTuningService_ListCustomModels_async", "segments": [ { - "end": 60, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 60, + "end": 51, "start": 27, "type": "SHORT" }, @@ -8145,43 +8306,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 57, - "start": 51, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 61, - "start": 58, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_site_search_engine_service_batch_create_target_sites_async.py" + "title": "discoveryengine_v1_generated_search_tuning_service_list_custom_models_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceClient", - "shortName": "SiteSearchEngineServiceClient" + "fullName": "google.cloud.discoveryengine_v1.SearchTuningServiceClient", + "shortName": "SearchTuningServiceClient" }, - "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceClient.batch_create_target_sites", + "fullName": "google.cloud.discoveryengine_v1.SearchTuningServiceClient.list_custom_models", "method": { - "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService.BatchCreateTargetSites", + "fullName": "google.cloud.discoveryengine.v1.SearchTuningService.ListCustomModels", "service": { - "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService", - "shortName": "SiteSearchEngineService" + "fullName": "google.cloud.discoveryengine.v1.SearchTuningService", + "shortName": "SearchTuningService" }, - "shortName": "BatchCreateTargetSites" + "shortName": "ListCustomModels" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.BatchCreateTargetSitesRequest" + "type": "google.cloud.discoveryengine_v1.types.ListCustomModelsRequest" }, { "name": "retry", @@ -8196,22 +8357,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation.Operation", - "shortName": "batch_create_target_sites" + "resultType": "google.cloud.discoveryengine_v1.types.ListCustomModelsResponse", + "shortName": "list_custom_models" }, - "description": "Sample for BatchCreateTargetSites", - "file": "discoveryengine_v1_generated_site_search_engine_service_batch_create_target_sites_sync.py", + "description": "Sample for ListCustomModels", + "file": "discoveryengine_v1_generated_search_tuning_service_list_custom_models_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_SiteSearchEngineService_BatchCreateTargetSites_sync", + "regionTag": "discoveryengine_v1_generated_SearchTuningService_ListCustomModels_sync", "segments": [ { - "end": 60, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 60, + "end": 51, "start": 27, "type": "SHORT" }, @@ -8221,44 +8382,44 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 57, - "start": 51, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 61, - "start": 58, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_site_search_engine_service_batch_create_target_sites_sync.py" + "title": "discoveryengine_v1_generated_search_tuning_service_list_custom_models_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceAsyncClient", - "shortName": "SiteSearchEngineServiceAsyncClient" + "fullName": "google.cloud.discoveryengine_v1.SearchTuningServiceAsyncClient", + "shortName": "SearchTuningServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceAsyncClient.batch_verify_target_sites", + "fullName": "google.cloud.discoveryengine_v1.SearchTuningServiceAsyncClient.train_custom_model", "method": { - "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService.BatchVerifyTargetSites", + "fullName": "google.cloud.discoveryengine.v1.SearchTuningService.TrainCustomModel", "service": { - "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService", - "shortName": "SiteSearchEngineService" + "fullName": "google.cloud.discoveryengine.v1.SearchTuningService", + "shortName": "SearchTuningService" }, - "shortName": "BatchVerifyTargetSites" + "shortName": "TrainCustomModel" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.BatchVerifyTargetSitesRequest" + "type": "google.cloud.discoveryengine_v1.types.TrainCustomModelRequest" }, { "name": "retry", @@ -8274,13 +8435,13 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "batch_verify_target_sites" + "shortName": "train_custom_model" }, - "description": "Sample for BatchVerifyTargetSites", - "file": "discoveryengine_v1_generated_site_search_engine_service_batch_verify_target_sites_async.py", + "description": "Sample for TrainCustomModel", + "file": "discoveryengine_v1_generated_search_tuning_service_train_custom_model_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_SiteSearchEngineService_BatchVerifyTargetSites_async", + "regionTag": "discoveryengine_v1_generated_SearchTuningService_TrainCustomModel_async", "segments": [ { "end": 55, @@ -8313,28 +8474,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_site_search_engine_service_batch_verify_target_sites_async.py" + "title": "discoveryengine_v1_generated_search_tuning_service_train_custom_model_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceClient", - "shortName": "SiteSearchEngineServiceClient" + "fullName": "google.cloud.discoveryengine_v1.SearchTuningServiceClient", + "shortName": "SearchTuningServiceClient" }, - "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceClient.batch_verify_target_sites", + "fullName": "google.cloud.discoveryengine_v1.SearchTuningServiceClient.train_custom_model", "method": { - "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService.BatchVerifyTargetSites", + "fullName": "google.cloud.discoveryengine.v1.SearchTuningService.TrainCustomModel", "service": { - "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService", - "shortName": "SiteSearchEngineService" + "fullName": "google.cloud.discoveryengine.v1.SearchTuningService", + "shortName": "SearchTuningService" }, - "shortName": "BatchVerifyTargetSites" + "shortName": "TrainCustomModel" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.BatchVerifyTargetSitesRequest" + "type": "google.cloud.discoveryengine_v1.types.TrainCustomModelRequest" }, { "name": "retry", @@ -8350,13 +8511,13 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "batch_verify_target_sites" + "shortName": "train_custom_model" }, - "description": "Sample for BatchVerifyTargetSites", - "file": "discoveryengine_v1_generated_site_search_engine_service_batch_verify_target_sites_sync.py", + "description": "Sample for TrainCustomModel", + "file": "discoveryengine_v1_generated_search_tuning_service_train_custom_model_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_SiteSearchEngineService_BatchVerifyTargetSites_sync", + "regionTag": "discoveryengine_v1_generated_SearchTuningService_TrainCustomModel_sync", "segments": [ { "end": 55, @@ -8389,7 +8550,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_site_search_engine_service_batch_verify_target_sites_sync.py" + "title": "discoveryengine_v1_generated_search_tuning_service_train_custom_model_sync.py" }, { "canonical": true, @@ -8399,14 +8560,320 @@ "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceAsyncClient", "shortName": "SiteSearchEngineServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceAsyncClient.create_target_site", + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceAsyncClient.batch_create_target_sites", "method": { - "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService.CreateTargetSite", + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService.BatchCreateTargetSites", "service": { "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService", "shortName": "SiteSearchEngineService" }, - "shortName": "CreateTargetSite" + "shortName": "BatchCreateTargetSites" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.BatchCreateTargetSitesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "batch_create_target_sites" + }, + "description": "Sample for BatchCreateTargetSites", + "file": "discoveryengine_v1_generated_site_search_engine_service_batch_create_target_sites_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_SiteSearchEngineService_BatchCreateTargetSites_async", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_site_search_engine_service_batch_create_target_sites_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceClient", + "shortName": "SiteSearchEngineServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceClient.batch_create_target_sites", + "method": { + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService.BatchCreateTargetSites", + "service": { + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" + }, + "shortName": "BatchCreateTargetSites" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.BatchCreateTargetSitesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "batch_create_target_sites" + }, + "description": "Sample for BatchCreateTargetSites", + "file": "discoveryengine_v1_generated_site_search_engine_service_batch_create_target_sites_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_SiteSearchEngineService_BatchCreateTargetSites_sync", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_site_search_engine_service_batch_create_target_sites_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceAsyncClient", + "shortName": "SiteSearchEngineServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceAsyncClient.batch_verify_target_sites", + "method": { + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService.BatchVerifyTargetSites", + "service": { + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" + }, + "shortName": "BatchVerifyTargetSites" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.BatchVerifyTargetSitesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "batch_verify_target_sites" + }, + "description": "Sample for BatchVerifyTargetSites", + "file": "discoveryengine_v1_generated_site_search_engine_service_batch_verify_target_sites_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_SiteSearchEngineService_BatchVerifyTargetSites_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_site_search_engine_service_batch_verify_target_sites_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceClient", + "shortName": "SiteSearchEngineServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceClient.batch_verify_target_sites", + "method": { + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService.BatchVerifyTargetSites", + "service": { + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" + }, + "shortName": "BatchVerifyTargetSites" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.BatchVerifyTargetSitesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "batch_verify_target_sites" + }, + "description": "Sample for BatchVerifyTargetSites", + "file": "discoveryengine_v1_generated_site_search_engine_service_batch_verify_target_sites_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_SiteSearchEngineService_BatchVerifyTargetSites_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_site_search_engine_service_batch_verify_target_sites_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceAsyncClient", + "shortName": "SiteSearchEngineServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceAsyncClient.create_target_site", + "method": { + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService.CreateTargetSite", + "service": { + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" + }, + "shortName": "CreateTargetSite" }, "parameters": [ { @@ -10283,6 +10750,159 @@ ], "title": "discoveryengine_v1_generated_user_event_service_import_user_events_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1.UserEventServiceAsyncClient", + "shortName": "UserEventServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1.UserEventServiceAsyncClient.purge_user_events", + "method": { + "fullName": "google.cloud.discoveryengine.v1.UserEventService.PurgeUserEvents", + "service": { + "fullName": "google.cloud.discoveryengine.v1.UserEventService", + "shortName": "UserEventService" + }, + "shortName": "PurgeUserEvents" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.PurgeUserEventsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "purge_user_events" + }, + "description": "Sample for PurgeUserEvents", + "file": "discoveryengine_v1_generated_user_event_service_purge_user_events_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_UserEventService_PurgeUserEvents_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_user_event_service_purge_user_events_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1.UserEventServiceClient", + "shortName": "UserEventServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1.UserEventServiceClient.purge_user_events", + "method": { + "fullName": "google.cloud.discoveryengine.v1.UserEventService.PurgeUserEvents", + "service": { + "fullName": "google.cloud.discoveryengine.v1.UserEventService", + "shortName": "UserEventService" + }, + "shortName": "PurgeUserEvents" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.PurgeUserEventsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "purge_user_events" + }, + "description": "Sample for PurgeUserEvents", + "file": "discoveryengine_v1_generated_user_event_service_purge_user_events_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_UserEventService_PurgeUserEvents_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_user_event_service_purge_user_events_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1alpha.json b/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1alpha.json index 2884e903e0cf..4d014b08daf6 100644 --- a/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1alpha.json +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1alpha.json @@ -5507,6 +5507,167 @@ ], "title": "discoveryengine_v1alpha_generated_data_store_service_update_document_processing_config_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1alpha.DocumentServiceAsyncClient", + "shortName": "DocumentServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1alpha.DocumentServiceAsyncClient.batch_get_documents_metadata", + "method": { + "fullName": "google.cloud.discoveryengine.v1alpha.DocumentService.BatchGetDocumentsMetadata", + "service": { + "fullName": "google.cloud.discoveryengine.v1alpha.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "BatchGetDocumentsMetadata" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1alpha.types.BatchGetDocumentsMetadataRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1alpha.types.BatchGetDocumentsMetadataResponse", + "shortName": "batch_get_documents_metadata" + }, + "description": "Sample for BatchGetDocumentsMetadata", + "file": "discoveryengine_v1alpha_generated_document_service_batch_get_documents_metadata_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1alpha_generated_DocumentService_BatchGetDocumentsMetadata_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1alpha_generated_document_service_batch_get_documents_metadata_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1alpha.DocumentServiceClient", + "shortName": "DocumentServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1alpha.DocumentServiceClient.batch_get_documents_metadata", + "method": { + "fullName": "google.cloud.discoveryengine.v1alpha.DocumentService.BatchGetDocumentsMetadata", + "service": { + "fullName": "google.cloud.discoveryengine.v1alpha.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "BatchGetDocumentsMetadata" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1alpha.types.BatchGetDocumentsMetadataRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1alpha.types.BatchGetDocumentsMetadataResponse", + "shortName": "batch_get_documents_metadata" + }, + "description": "Sample for BatchGetDocumentsMetadata", + "file": "discoveryengine_v1alpha_generated_document_service_batch_get_documents_metadata_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1alpha_generated_DocumentService_BatchGetDocumentsMetadata_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1alpha_generated_document_service_batch_get_documents_metadata_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1beta.json b/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1beta.json index cff39c47dc30..c8efdd7d5106 100644 --- a/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1beta.json +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1beta.json @@ -4541,6 +4541,167 @@ ], "title": "discoveryengine_v1beta_generated_data_store_service_update_data_store_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceAsyncClient", + "shortName": "DocumentServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceAsyncClient.batch_get_documents_metadata", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.DocumentService.BatchGetDocumentsMetadata", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "BatchGetDocumentsMetadata" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.BatchGetDocumentsMetadataRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1beta.types.BatchGetDocumentsMetadataResponse", + "shortName": "batch_get_documents_metadata" + }, + "description": "Sample for BatchGetDocumentsMetadata", + "file": "discoveryengine_v1beta_generated_document_service_batch_get_documents_metadata_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_DocumentService_BatchGetDocumentsMetadata_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_document_service_batch_get_documents_metadata_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceClient", + "shortName": "DocumentServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceClient.batch_get_documents_metadata", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.DocumentService.BatchGetDocumentsMetadata", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "BatchGetDocumentsMetadata" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.BatchGetDocumentsMetadataRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1beta.types.BatchGetDocumentsMetadataResponse", + "shortName": "batch_get_documents_metadata" + }, + "description": "Sample for BatchGetDocumentsMetadata", + "file": "discoveryengine_v1beta_generated_document_service_batch_get_documents_metadata_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_DocumentService_BatchGetDocumentsMetadata_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_document_service_batch_get_documents_metadata_sync.py" + }, { "canonical": true, "clientMethod": { @@ -5393,12 +5554,12 @@ "regionTag": "discoveryengine_v1beta_generated_DocumentService_PurgeDocuments_async", "segments": [ { - "end": 56, + "end": 60, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 60, "start": 27, "type": "SHORT" }, @@ -5408,18 +5569,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 50, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 47, + "end": 57, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 61, + "start": 58, "type": "RESPONSE_HANDLING" } ], @@ -5469,12 +5630,12 @@ "regionTag": "discoveryengine_v1beta_generated_DocumentService_PurgeDocuments_sync", "segments": [ { - "end": 56, + "end": 60, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 60, "start": 27, "type": "SHORT" }, @@ -5484,18 +5645,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 50, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 47, + "end": 57, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 61, + "start": 58, "type": "RESPONSE_HANDLING" } ], diff --git a/packages/google-cloud-discoveryengine/scripts/fixup_discoveryengine_v1_keywords.py b/packages/google-cloud-discoveryengine/scripts/fixup_discoveryengine_v1_keywords.py index c9797094604f..b89b5e57f538 100644 --- a/packages/google-cloud-discoveryengine/scripts/fixup_discoveryengine_v1_keywords.py +++ b/packages/google-cloud-discoveryengine/scripts/fixup_discoveryengine_v1_keywords.py @@ -41,6 +41,7 @@ class discoveryengineCallTransformer(cst.CSTTransformer): METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { 'answer_query': ('serving_config', 'query', 'session', 'safety_spec', 'related_questions_spec', 'answer_generation_spec', 'search_spec', 'query_understanding_spec', 'asynchronous_mode', 'user_pseudo_id', 'user_labels', ), 'batch_create_target_sites': ('parent', 'requests', ), + 'batch_get_documents_metadata': ('parent', 'matcher', ), 'batch_verify_target_sites': ('parent', ), 'check_grounding': ('grounding_config', 'answer_candidate', 'facts', 'grounding_spec', 'user_labels', ), 'collect_user_event': ('parent', 'user_event', 'uri', 'ets', ), @@ -48,7 +49,7 @@ class discoveryengineCallTransformer(cst.CSTTransformer): 'converse_conversation': ('name', 'query', 'serving_config', 'conversation', 'safe_search', 'user_labels', 'summary_spec', 'filter', 'boost_spec', ), 'create_control': ('parent', 'control', 'control_id', ), 'create_conversation': ('parent', 'conversation', ), - 'create_data_store': ('parent', 'data_store', 'data_store_id', 'create_advanced_site_search', ), + 'create_data_store': ('parent', 'data_store', 'data_store_id', 'create_advanced_site_search', 'skip_default_schema_creation', ), 'create_document': ('parent', 'document', 'document_id', ), 'create_engine': ('parent', 'engine', 'engine_id', ), 'create_schema': ('parent', 'schema', 'schema_id', ), @@ -81,6 +82,7 @@ class discoveryengineCallTransformer(cst.CSTTransformer): 'import_user_events': ('parent', 'inline_source', 'gcs_source', 'bigquery_source', 'error_config', ), 'list_controls': ('parent', 'page_size', 'page_token', 'filter', ), 'list_conversations': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_custom_models': ('data_store', ), 'list_data_stores': ('parent', 'page_size', 'page_token', 'filter', ), 'list_documents': ('parent', 'page_size', 'page_token', ), 'list_engines': ('parent', 'page_size', 'page_token', 'filter', ), @@ -89,12 +91,14 @@ class discoveryengineCallTransformer(cst.CSTTransformer): 'list_target_sites': ('parent', 'page_size', 'page_token', ), 'provision_project': ('name', 'accept_data_use_terms', 'data_use_terms_version', ), 'purge_completion_suggestions': ('parent', ), - 'purge_documents': ('parent', 'filter', 'force', ), + 'purge_documents': ('parent', 'filter', 'gcs_source', 'inline_source', 'error_config', 'force', ), 'purge_suggestion_deny_list_entries': ('parent', ), + 'purge_user_events': ('parent', 'filter', 'force', ), 'rank': ('ranking_config', 'records', 'model', 'top_n', 'query', 'ignore_record_details_in_response', 'user_labels', ), 'recommend': ('serving_config', 'user_event', 'page_size', 'filter', 'validate_only', 'params', 'user_labels', ), 'recrawl_uris': ('site_search_engine', 'uris', ), 'search': ('serving_config', 'branch', 'query', 'image_query', 'page_size', 'page_token', 'offset', 'data_store_specs', 'filter', 'canonical_filter', 'order_by', 'user_info', 'language_code', 'facet_specs', 'boost_spec', 'params', 'query_expansion_spec', 'spell_correction_spec', 'user_pseudo_id', 'content_search_spec', 'safe_search', 'user_labels', 'search_as_you_type_spec', 'session', 'session_spec', ), + 'train_custom_model': ('data_store', 'gcs_training_input', 'model_type', 'error_config', 'model_id', ), 'update_control': ('control', 'update_mask', ), 'update_conversation': ('conversation', 'update_mask', ), 'update_data_store': ('data_store', 'update_mask', ), diff --git a/packages/google-cloud-discoveryengine/scripts/fixup_discoveryengine_v1alpha_keywords.py b/packages/google-cloud-discoveryengine/scripts/fixup_discoveryengine_v1alpha_keywords.py index e10372b3b1fa..d82dccd4ad51 100644 --- a/packages/google-cloud-discoveryengine/scripts/fixup_discoveryengine_v1alpha_keywords.py +++ b/packages/google-cloud-discoveryengine/scripts/fixup_discoveryengine_v1alpha_keywords.py @@ -41,6 +41,7 @@ class discoveryengineCallTransformer(cst.CSTTransformer): METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { 'answer_query': ('serving_config', 'query', 'session', 'safety_spec', 'related_questions_spec', 'answer_generation_spec', 'search_spec', 'query_understanding_spec', 'asynchronous_mode', 'user_pseudo_id', 'user_labels', ), 'batch_create_target_sites': ('parent', 'requests', ), + 'batch_get_documents_metadata': ('parent', 'matcher', ), 'batch_verify_target_sites': ('parent', ), 'check_grounding': ('grounding_config', 'answer_candidate', 'facts', 'grounding_spec', 'user_labels', ), 'collect_user_event': ('parent', 'user_event', 'uri', 'ets', ), diff --git a/packages/google-cloud-discoveryengine/scripts/fixup_discoveryengine_v1beta_keywords.py b/packages/google-cloud-discoveryengine/scripts/fixup_discoveryengine_v1beta_keywords.py index 8a4765d046fe..f2f74d58b3d5 100644 --- a/packages/google-cloud-discoveryengine/scripts/fixup_discoveryengine_v1beta_keywords.py +++ b/packages/google-cloud-discoveryengine/scripts/fixup_discoveryengine_v1beta_keywords.py @@ -41,6 +41,7 @@ class discoveryengineCallTransformer(cst.CSTTransformer): METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { 'answer_query': ('serving_config', 'query', 'session', 'safety_spec', 'related_questions_spec', 'answer_generation_spec', 'search_spec', 'query_understanding_spec', 'asynchronous_mode', 'user_pseudo_id', 'user_labels', ), 'batch_create_target_sites': ('parent', 'requests', ), + 'batch_get_documents_metadata': ('parent', 'matcher', ), 'batch_verify_target_sites': ('parent', ), 'check_grounding': ('grounding_config', 'answer_candidate', 'facts', 'grounding_spec', 'user_labels', ), 'collect_user_event': ('parent', 'user_event', 'uri', 'ets', ), @@ -106,14 +107,14 @@ class discoveryengineCallTransformer(cst.CSTTransformer): 'pause_engine': ('name', ), 'provision_project': ('name', 'accept_data_use_terms', 'data_use_terms_version', ), 'purge_completion_suggestions': ('parent', ), - 'purge_documents': ('parent', 'filter', 'force', ), + 'purge_documents': ('parent', 'filter', 'gcs_source', 'inline_source', 'error_config', 'force', ), 'purge_suggestion_deny_list_entries': ('parent', ), 'purge_user_events': ('parent', 'filter', 'force', ), 'rank': ('ranking_config', 'records', 'model', 'top_n', 'query', 'ignore_record_details_in_response', 'user_labels', ), 'recommend': ('serving_config', 'user_event', 'page_size', 'filter', 'validate_only', 'params', 'user_labels', ), 'recrawl_uris': ('site_search_engine', 'uris', ), 'resume_engine': ('name', ), - 'search': ('serving_config', 'branch', 'query', 'image_query', 'page_size', 'page_token', 'offset', 'data_store_specs', 'filter', 'canonical_filter', 'order_by', 'user_info', 'language_code', 'region_code', 'facet_specs', 'boost_spec', 'params', 'query_expansion_spec', 'spell_correction_spec', 'user_pseudo_id', 'content_search_spec', 'embedding_spec', 'ranking_expression', 'safe_search', 'user_labels', 'natural_language_query_understanding_spec', 'search_as_you_type_spec', 'session', 'session_spec', ), + 'search': ('serving_config', 'branch', 'query', 'image_query', 'page_size', 'page_token', 'offset', 'data_store_specs', 'filter', 'canonical_filter', 'order_by', 'user_info', 'language_code', 'region_code', 'facet_specs', 'boost_spec', 'params', 'query_expansion_spec', 'spell_correction_spec', 'user_pseudo_id', 'content_search_spec', 'embedding_spec', 'ranking_expression', 'safe_search', 'user_labels', 'natural_language_query_understanding_spec', 'search_as_you_type_spec', 'session', 'session_spec', 'relevance_threshold', ), 'train_custom_model': ('data_store', 'gcs_training_input', 'model_type', 'error_config', 'model_id', ), 'tune_engine': ('name', ), 'update_control': ('control', 'update_mask', ), diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_data_store_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_data_store_service.py index 9459a96eceda..c3bd0fd5703a 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_data_store_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_data_store_service.py @@ -3556,6 +3556,7 @@ def test_create_data_store_rest_required_fields( ( "create_advanced_site_search", "data_store_id", + "skip_default_schema_creation", ) ) jsonified_request.update(unset_fields) @@ -3622,6 +3623,7 @@ def test_create_data_store_rest_unset_required_fields(): ( "createAdvancedSiteSearch", "dataStoreId", + "skipDefaultSchemaCreation", ) ) & set( diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_document_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_document_service.py index 207850afb022..2ad97529f22b 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_document_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_document_service.py @@ -45,10 +45,12 @@ from google.cloud.location import locations_pb2 from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account +from google.protobuf import any_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import json_format from google.protobuf import struct_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore from google.type import date_pb2 # type: ignore import grpc from grpc.experimental import aio @@ -3911,6 +3913,387 @@ async def test_purge_documents_field_headers_async(): ) in kw["metadata"] +@pytest.mark.parametrize( + "request_type", + [ + document_service.BatchGetDocumentsMetadataRequest, + dict, + ], +) +def test_batch_get_documents_metadata(request_type, transport: str = "grpc"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = document_service.BatchGetDocumentsMetadataResponse() + response = client.batch_get_documents_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = document_service.BatchGetDocumentsMetadataRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, document_service.BatchGetDocumentsMetadataResponse) + + +def test_batch_get_documents_metadata_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.batch_get_documents_metadata() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == document_service.BatchGetDocumentsMetadataRequest() + + +def test_batch_get_documents_metadata_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = document_service.BatchGetDocumentsMetadataRequest( + parent="parent_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.batch_get_documents_metadata(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == document_service.BatchGetDocumentsMetadataRequest( + parent="parent_value", + ) + + +def test_batch_get_documents_metadata_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.batch_get_documents_metadata + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.batch_get_documents_metadata + ] = mock_rpc + request = {} + client.batch_get_documents_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.batch_get_documents_metadata(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_batch_get_documents_metadata_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + document_service.BatchGetDocumentsMetadataResponse() + ) + response = await client.batch_get_documents_metadata() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == document_service.BatchGetDocumentsMetadataRequest() + + +@pytest.mark.asyncio +async def test_batch_get_documents_metadata_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.batch_get_documents_metadata + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.batch_get_documents_metadata + ] = mock_rpc + + request = {} + await client.batch_get_documents_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.batch_get_documents_metadata(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_batch_get_documents_metadata_async( + transport: str = "grpc_asyncio", + request_type=document_service.BatchGetDocumentsMetadataRequest, +): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + document_service.BatchGetDocumentsMetadataResponse() + ) + response = await client.batch_get_documents_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = document_service.BatchGetDocumentsMetadataRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, document_service.BatchGetDocumentsMetadataResponse) + + +@pytest.mark.asyncio +async def test_batch_get_documents_metadata_async_from_dict(): + await test_batch_get_documents_metadata_async(request_type=dict) + + +def test_batch_get_documents_metadata_field_headers(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = document_service.BatchGetDocumentsMetadataRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + call.return_value = document_service.BatchGetDocumentsMetadataResponse() + client.batch_get_documents_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_batch_get_documents_metadata_field_headers_async(): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = document_service.BatchGetDocumentsMetadataRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + document_service.BatchGetDocumentsMetadataResponse() + ) + await client.batch_get_documents_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_batch_get_documents_metadata_flattened(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = document_service.BatchGetDocumentsMetadataResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.batch_get_documents_metadata( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_batch_get_documents_metadata_flattened_error(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.batch_get_documents_metadata( + document_service.BatchGetDocumentsMetadataRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_batch_get_documents_metadata_flattened_async(): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = document_service.BatchGetDocumentsMetadataResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + document_service.BatchGetDocumentsMetadataResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.batch_get_documents_metadata( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_batch_get_documents_metadata_flattened_error_async(): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.batch_get_documents_metadata( + document_service.BatchGetDocumentsMetadataRequest(), + parent="parent_value", + ) + + @pytest.mark.parametrize( "request_type", [ @@ -3918,7 +4301,321 @@ async def test_purge_documents_field_headers_async(): dict, ], ) -def test_get_document_rest(request_type): +def test_get_document_rest(request_type): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document.Document( + name="name_value", + id="id_value", + schema_id="schema_id_value", + parent_document_id="parent_document_id_value", + json_data="json_data_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = document.Document.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_document(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, document.Document) + assert response.name == "name_value" + assert response.id == "id_value" + assert response.schema_id == "schema_id_value" + assert response.parent_document_id == "parent_document_id_value" + + +def test_get_document_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_document in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_document] = mock_rpc + + request = {} + client.get_document(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_document(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_document_rest_required_fields( + request_type=document_service.GetDocumentRequest, +): + transport_class = transports.DocumentServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_document._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_document._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = document.Document() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = document.Document.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_document(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_document_rest_unset_required_fields(): + transport = transports.DocumentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_document._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_document_rest_interceptors(null_interceptor): + transport = transports.DocumentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentServiceRestInterceptor(), + ) + client = DocumentServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DocumentServiceRestInterceptor, "post_get_document" + ) as post, mock.patch.object( + transports.DocumentServiceRestInterceptor, "pre_get_document" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = document_service.GetDocumentRequest.pb( + document_service.GetDocumentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = document.Document.to_json(document.Document()) + + request = document_service.GetDocumentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = document.Document() + + client.get_document( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_document_rest_bad_request( + transport: str = "rest", request_type=document_service.GetDocumentRequest +): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_document(request) + + +def test_get_document_rest_flattened(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document.Document() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = document.Document.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_document(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/dataStores/*/branches/*/documents/*}" + % client.transport._host, + args[1], + ) + + +def test_get_document_rest_flattened_error(transport: str = "rest"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_document( + document_service.GetDocumentRequest(), + name="name_value", + ) + + +def test_get_document_rest_error(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + document_service.ListDocumentsRequest, + dict, + ], +) +def test_list_documents_rest(request_type): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -3926,41 +4623,34 @@ def test_get_document_rest(request_type): # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = document.Document( - name="name_value", - id="id_value", - schema_id="schema_id_value", - parent_document_id="parent_document_id_value", - json_data="json_data_value", + return_value = document_service.ListDocumentsResponse( + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = document.Document.pb(return_value) + return_value = document_service.ListDocumentsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_document(request) + response = client.list_documents(request) # Establish that the response is the type that we expect. - assert isinstance(response, document.Document) - assert response.name == "name_value" - assert response.id == "id_value" - assert response.schema_id == "schema_id_value" - assert response.parent_document_id == "parent_document_id_value" + assert isinstance(response, pagers.ListDocumentsPager) + assert response.next_page_token == "next_page_token_value" -def test_get_document_rest_use_cached_wrapped_rpc(): +def test_list_documents_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -3974,35 +4664,35 @@ def test_get_document_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_document in client._transport._wrapped_methods + assert client._transport.list_documents in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_document] = mock_rpc + client._transport._wrapped_methods[client._transport.list_documents] = mock_rpc request = {} - client.get_document(request) + client.list_documents(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_document(request) + client.list_documents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_document_rest_required_fields( - request_type=document_service.GetDocumentRequest, +def test_list_documents_rest_required_fields( + request_type=document_service.ListDocumentsRequest, ): transport_class = transports.DocumentServiceRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -4013,21 +4703,28 @@ def test_get_document_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_document._get_unset_required_fields(jsonified_request) + ).list_documents._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_document._get_unset_required_fields(jsonified_request) + ).list_documents._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4036,7 +4733,7 @@ def test_get_document_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = document.Document() + return_value = document_service.ListDocumentsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -4057,30 +4754,38 @@ def test_get_document_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = document.Document.pb(return_value) + return_value = document_service.ListDocumentsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_document(request) + response = client.list_documents(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_document_rest_unset_required_fields(): +def test_list_documents_rest_unset_required_fields(): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_document._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.list_documents._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_document_rest_interceptors(null_interceptor): +def test_list_documents_rest_interceptors(null_interceptor): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -4093,14 +4798,14 @@ def test_get_document_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DocumentServiceRestInterceptor, "post_get_document" + transports.DocumentServiceRestInterceptor, "post_list_documents" ) as post, mock.patch.object( - transports.DocumentServiceRestInterceptor, "pre_get_document" + transports.DocumentServiceRestInterceptor, "pre_list_documents" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = document_service.GetDocumentRequest.pb( - document_service.GetDocumentRequest() + pb_message = document_service.ListDocumentsRequest.pb( + document_service.ListDocumentsRequest() ) transcode.return_value = { "method": "post", @@ -4112,17 +4817,19 @@ def test_get_document_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = document.Document.to_json(document.Document()) + req.return_value._content = document_service.ListDocumentsResponse.to_json( + document_service.ListDocumentsResponse() + ) - request = document_service.GetDocumentRequest() + request = document_service.ListDocumentsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = document.Document() + post.return_value = document_service.ListDocumentsResponse() - client.get_document( + client.list_documents( request, metadata=[ ("key", "val"), @@ -4134,8 +4841,8 @@ def test_get_document_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_document_rest_bad_request( - transport: str = "rest", request_type=document_service.GetDocumentRequest +def test_list_documents_rest_bad_request( + transport: str = "rest", request_type=document_service.ListDocumentsRequest ): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4144,7 +4851,7 @@ def test_get_document_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" } request = request_type(**request_init) @@ -4157,10 +4864,10 @@ def test_get_document_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_document(request) + client.list_documents(request) -def test_get_document_rest_flattened(): +def test_list_documents_rest_flattened(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4169,16 +4876,16 @@ def test_get_document_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = document.Document() + return_value = document_service.ListDocumentsResponse() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" } # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) @@ -4186,25 +4893,25 @@ def test_get_document_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = document.Document.pb(return_value) + return_value = document_service.ListDocumentsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_document(**mock_args) + client.list_documents(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/dataStores/*/branches/*/documents/*}" + "%s/v1/{parent=projects/*/locations/*/dataStores/*/branches/*}/documents" % client.transport._host, args[1], ) -def test_get_document_rest_flattened_error(transport: str = "rest"): +def test_list_documents_rest_flattened_error(transport: str = "rest"): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4213,61 +4920,224 @@ def test_get_document_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_document( - document_service.GetDocumentRequest(), - name="name_value", + client.list_documents( + document_service.ListDocumentsRequest(), + parent="parent_value", + ) + + +def test_list_documents_rest_pager(transport: str = "rest"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + document_service.ListDocumentsResponse( + documents=[ + document.Document(), + document.Document(), + document.Document(), + ], + next_page_token="abc", + ), + document_service.ListDocumentsResponse( + documents=[], + next_page_token="def", + ), + document_service.ListDocumentsResponse( + documents=[ + document.Document(), + ], + next_page_token="ghi", + ), + document_service.ListDocumentsResponse( + documents=[ + document.Document(), + document.Document(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + document_service.ListDocumentsResponse.to_json(x) for x in response ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + } + + pager = client.list_documents(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, document.Document) for i in results) + + pages = list(client.list_documents(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + document_service.CreateDocumentRequest, + dict, + ], +) +def test_create_document_rest(request_type): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + } + request_init["document"] = { + "struct_data": {"fields": {}}, + "json_data": "json_data_value", + "name": "name_value", + "id": "id_value", + "schema_id": "schema_id_value", + "content": { + "raw_bytes": b"raw_bytes_blob", + "uri": "uri_value", + "mime_type": "mime_type_value", + }, + "parent_document_id": "parent_document_id_value", + "derived_struct_data": {}, + "index_time": {"seconds": 751, "nanos": 543}, + "index_status": { + "index_time": {}, + "error_samples": [ + { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + } + ], + }, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = document_service.CreateDocumentRequest.meta.fields["document"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] -def test_get_document_rest_error(): - client = DocumentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) + subfields_not_in_runtime = [] + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["document"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value -@pytest.mark.parametrize( - "request_type", - [ - document_service.ListDocumentsRequest, - dict, - ], -) -def test_list_documents_rest(request_type): - client = DocumentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) - # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" - } + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["document"][field])): + del request_init["document"][field][i][subfield] + else: + del request_init["document"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = document_service.ListDocumentsResponse( - next_page_token="next_page_token_value", + return_value = gcd_document.Document( + name="name_value", + id="id_value", + schema_id="schema_id_value", + parent_document_id="parent_document_id_value", + json_data="json_data_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = document_service.ListDocumentsResponse.pb(return_value) + return_value = gcd_document.Document.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_documents(request) + response = client.create_document(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDocumentsPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, gcd_document.Document) + assert response.name == "name_value" + assert response.id == "id_value" + assert response.schema_id == "schema_id_value" + assert response.parent_document_id == "parent_document_id_value" -def test_list_documents_rest_use_cached_wrapped_rpc(): +def test_create_document_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -4281,35 +5151,36 @@ def test_list_documents_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_documents in client._transport._wrapped_methods + assert client._transport.create_document in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_documents] = mock_rpc + client._transport._wrapped_methods[client._transport.create_document] = mock_rpc request = {} - client.list_documents(request) + client.create_document(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_documents(request) + client.create_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_documents_rest_required_fields( - request_type=document_service.ListDocumentsRequest, +def test_create_document_rest_required_fields( + request_type=document_service.CreateDocumentRequest, ): transport_class = transports.DocumentServiceRestTransport request_init = {} request_init["parent"] = "" + request_init["document_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -4317,31 +5188,32 @@ def test_list_documents_rest_required_fields( ) # verify fields with default values are dropped + assert "documentId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_documents._get_unset_required_fields(jsonified_request) + ).create_document._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "documentId" in jsonified_request + assert jsonified_request["documentId"] == request_init["document_id"] jsonified_request["parent"] = "parent_value" + jsonified_request["documentId"] = "document_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_documents._get_unset_required_fields(jsonified_request) + ).create_document._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "page_size", - "page_token", - ) - ) + assert not set(unset_fields) - set(("document_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" + assert "documentId" in jsonified_request + assert jsonified_request["documentId"] == "document_id_value" client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4350,7 +5222,7 @@ def test_list_documents_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = document_service.ListDocumentsResponse() + return_value = gcd_document.Document() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -4362,47 +5234,55 @@ def test_list_documents_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = document_service.ListDocumentsResponse.pb(return_value) + return_value = gcd_document.Document.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_documents(request) + response = client.create_document(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "documentId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_documents_rest_unset_required_fields(): +def test_create_document_rest_unset_required_fields(): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_documents._get_unset_required_fields({}) + unset_fields = transport.create_document._get_unset_required_fields({}) assert set(unset_fields) == ( - set( + set(("documentId",)) + & set( ( - "pageSize", - "pageToken", + "parent", + "document", + "documentId", ) ) - & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_documents_rest_interceptors(null_interceptor): +def test_create_document_rest_interceptors(null_interceptor): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -4415,14 +5295,14 @@ def test_list_documents_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DocumentServiceRestInterceptor, "post_list_documents" + transports.DocumentServiceRestInterceptor, "post_create_document" ) as post, mock.patch.object( - transports.DocumentServiceRestInterceptor, "pre_list_documents" + transports.DocumentServiceRestInterceptor, "pre_create_document" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = document_service.ListDocumentsRequest.pb( - document_service.ListDocumentsRequest() + pb_message = document_service.CreateDocumentRequest.pb( + document_service.CreateDocumentRequest() ) transcode.return_value = { "method": "post", @@ -4434,19 +5314,19 @@ def test_list_documents_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = document_service.ListDocumentsResponse.to_json( - document_service.ListDocumentsResponse() + req.return_value._content = gcd_document.Document.to_json( + gcd_document.Document() ) - request = document_service.ListDocumentsRequest() + request = document_service.CreateDocumentRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = document_service.ListDocumentsResponse() + post.return_value = gcd_document.Document() - client.list_documents( + client.create_document( request, metadata=[ ("key", "val"), @@ -4458,8 +5338,8 @@ def test_list_documents_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_documents_rest_bad_request( - transport: str = "rest", request_type=document_service.ListDocumentsRequest +def test_create_document_rest_bad_request( + transport: str = "rest", request_type=document_service.CreateDocumentRequest ): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4481,10 +5361,10 @@ def test_list_documents_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_documents(request) + client.create_document(request) -def test_list_documents_rest_flattened(): +def test_create_document_rest_flattened(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4493,7 +5373,7 @@ def test_list_documents_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = document_service.ListDocumentsResponse() + return_value = gcd_document.Document() # get arguments that satisfy an http rule for this method sample_request = { @@ -4503,6 +5383,16 @@ def test_list_documents_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", + document=gcd_document.Document( + struct_data=struct_pb2.Struct( + fields={ + "key_value": struct_pb2.Value( + null_value=struct_pb2.NullValue.NULL_VALUE + ) + } + ) + ), + document_id="document_id_value", ) mock_args.update(sample_request) @@ -4510,12 +5400,12 @@ def test_list_documents_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = document_service.ListDocumentsResponse.pb(return_value) + return_value = gcd_document.Document.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_documents(**mock_args) + client.create_document(**mock_args) # Establish that the underlying call was made with the expected # request object values. @@ -4528,94 +5418,45 @@ def test_list_documents_rest_flattened(): ) -def test_list_documents_rest_flattened_error(transport: str = "rest"): - client = DocumentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_documents( - document_service.ListDocumentsRequest(), - parent="parent_value", - ) - - -def test_list_documents_rest_pager(transport: str = "rest"): - client = DocumentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - document_service.ListDocumentsResponse( - documents=[ - document.Document(), - document.Document(), - document.Document(), - ], - next_page_token="abc", - ), - document_service.ListDocumentsResponse( - documents=[], - next_page_token="def", - ), - document_service.ListDocumentsResponse( - documents=[ - document.Document(), - ], - next_page_token="ghi", - ), - document_service.ListDocumentsResponse( - documents=[ - document.Document(), - document.Document(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - document_service.ListDocumentsResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = { - "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" - } +def test_create_document_rest_flattened_error(transport: str = "rest"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - pager = client.list_documents(request=sample_request) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_document( + document_service.CreateDocumentRequest(), + parent="parent_value", + document=gcd_document.Document( + struct_data=struct_pb2.Struct( + fields={ + "key_value": struct_pb2.Value( + null_value=struct_pb2.NullValue.NULL_VALUE + ) + } + ) + ), + document_id="document_id_value", + ) - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, document.Document) for i in results) - pages = list(client.list_documents(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token +def test_create_document_rest_error(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) @pytest.mark.parametrize( "request_type", [ - document_service.CreateDocumentRequest, + document_service.UpdateDocumentRequest, dict, ], ) -def test_create_document_rest(request_type): +def test_update_document_rest(request_type): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4623,12 +5464,14 @@ def test_create_document_rest(request_type): # send a request that will satisfy transcoding request_init = { - "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + "document": { + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + } } request_init["document"] = { "struct_data": {"fields": {}}, "json_data": "json_data_value", - "name": "name_value", + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5", "id": "id_value", "schema_id": "schema_id_value", "content": { @@ -4639,13 +5482,28 @@ def test_create_document_rest(request_type): "parent_document_id": "parent_document_id_value", "derived_struct_data": {}, "index_time": {"seconds": 751, "nanos": 543}, + "index_status": { + "index_time": {}, + "error_samples": [ + { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + } + ], + }, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = document_service.CreateDocumentRequest.meta.fields["document"] + test_field = document_service.UpdateDocumentRequest.meta.fields["document"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -4729,7 +5587,7 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_document(request) + response = client.update_document(request) # Establish that the response is the type that we expect. assert isinstance(response, gcd_document.Document) @@ -4739,7 +5597,7 @@ def get_message_fields(field): assert response.parent_document_id == "parent_document_id_value" -def test_create_document_rest_use_cached_wrapped_rpc(): +def test_update_document_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -4753,36 +5611,34 @@ def test_create_document_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_document in client._transport._wrapped_methods + assert client._transport.update_document in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_document] = mock_rpc + client._transport._wrapped_methods[client._transport.update_document] = mock_rpc request = {} - client.create_document(request) + client.update_document(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_document(request) + client.update_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_document_rest_required_fields( - request_type=document_service.CreateDocumentRequest, +def test_update_document_rest_required_fields( + request_type=document_service.UpdateDocumentRequest, ): transport_class = transports.DocumentServiceRestTransport request_init = {} - request_init["parent"] = "" - request_init["document_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -4790,32 +5646,27 @@ def test_create_document_rest_required_fields( ) # verify fields with default values are dropped - assert "documentId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_document._get_unset_required_fields(jsonified_request) + ).update_document._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "documentId" in jsonified_request - assert jsonified_request["documentId"] == request_init["document_id"] - - jsonified_request["parent"] = "parent_value" - jsonified_request["documentId"] = "document_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_document._get_unset_required_fields(jsonified_request) + ).update_document._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("document_id",)) + assert not set(unset_fields) - set( + ( + "allow_missing", + "update_mask", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "documentId" in jsonified_request - assert jsonified_request["documentId"] == "document_id_value" client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4836,7 +5687,7 @@ def test_create_document_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "patch", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -4852,39 +5703,32 @@ def test_create_document_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_document(request) + response = client.update_document(request) - expected_params = [ - ( - "documentId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_document_rest_unset_required_fields(): +def test_update_document_rest_unset_required_fields(): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_document._get_unset_required_fields({}) + unset_fields = transport.update_document._get_unset_required_fields({}) assert set(unset_fields) == ( - set(("documentId",)) - & set( + set( ( - "parent", - "document", - "documentId", + "allowMissing", + "updateMask", ) ) + & set(("document",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_document_rest_interceptors(null_interceptor): +def test_update_document_rest_interceptors(null_interceptor): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -4897,14 +5741,14 @@ def test_create_document_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DocumentServiceRestInterceptor, "post_create_document" + transports.DocumentServiceRestInterceptor, "post_update_document" ) as post, mock.patch.object( - transports.DocumentServiceRestInterceptor, "pre_create_document" + transports.DocumentServiceRestInterceptor, "pre_update_document" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = document_service.CreateDocumentRequest.pb( - document_service.CreateDocumentRequest() + pb_message = document_service.UpdateDocumentRequest.pb( + document_service.UpdateDocumentRequest() ) transcode.return_value = { "method": "post", @@ -4920,7 +5764,7 @@ def test_create_document_rest_interceptors(null_interceptor): gcd_document.Document() ) - request = document_service.CreateDocumentRequest() + request = document_service.UpdateDocumentRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -4928,7 +5772,7 @@ def test_create_document_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = gcd_document.Document() - client.create_document( + client.update_document( request, metadata=[ ("key", "val"), @@ -4940,8 +5784,8 @@ def test_create_document_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_document_rest_bad_request( - transport: str = "rest", request_type=document_service.CreateDocumentRequest +def test_update_document_rest_bad_request( + transport: str = "rest", request_type=document_service.UpdateDocumentRequest ): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4950,7 +5794,9 @@ def test_create_document_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + "document": { + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + } } request = request_type(**request_init) @@ -4963,10 +5809,10 @@ def test_create_document_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_document(request) + client.update_document(request) -def test_create_document_rest_flattened(): +def test_update_document_rest_flattened(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4979,12 +5825,13 @@ def test_create_document_rest_flattened(): # get arguments that satisfy an http rule for this method sample_request = { - "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + "document": { + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + } } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", document=gcd_document.Document( struct_data=struct_pb2.Struct( fields={ @@ -4994,7 +5841,7 @@ def test_create_document_rest_flattened(): } ) ), - document_id="document_id_value", + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -5007,20 +5854,20 @@ def test_create_document_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_document(**mock_args) + client.update_document(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/dataStores/*/branches/*}/documents" + "%s/v1/{document.name=projects/*/locations/*/dataStores/*/branches/*/documents/*}" % client.transport._host, args[1], ) -def test_create_document_rest_flattened_error(transport: str = "rest"): +def test_update_document_rest_flattened_error(transport: str = "rest"): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5029,9 +5876,8 @@ def test_create_document_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_document( - document_service.CreateDocumentRequest(), - parent="parent_value", + client.update_document( + document_service.UpdateDocumentRequest(), document=gcd_document.Document( struct_data=struct_pb2.Struct( fields={ @@ -5041,150 +5887,54 @@ def test_create_document_rest_flattened_error(transport: str = "rest"): } ) ), - document_id="document_id_value", + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_create_document_rest_error(): +def test_update_document_rest_error(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - document_service.UpdateDocumentRequest, - dict, - ], -) -def test_update_document_rest(request_type): - client = DocumentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "document": { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" - } - } - request_init["document"] = { - "struct_data": {"fields": {}}, - "json_data": "json_data_value", - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5", - "id": "id_value", - "schema_id": "schema_id_value", - "content": { - "raw_bytes": b"raw_bytes_blob", - "uri": "uri_value", - "mime_type": "mime_type_value", - }, - "parent_document_id": "parent_document_id_value", - "derived_struct_data": {}, - "index_time": {"seconds": 751, "nanos": 543}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = document_service.UpdateDocumentRequest.meta.fields["document"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["document"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) + ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["document"][field])): - del request_init["document"][field][i][subfield] - else: - del request_init["document"][field][subfield] + +@pytest.mark.parametrize( + "request_type", + [ + document_service.DeleteDocumentRequest, + dict, + ], +) +def test_delete_document_rest(request_type): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = gcd_document.Document( - name="name_value", - id="id_value", - schema_id="schema_id_value", - parent_document_id="parent_document_id_value", - json_data="json_data_value", - ) + return_value = None # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gcd_document.Document.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_document(request) + response = client.delete_document(request) # Establish that the response is the type that we expect. - assert isinstance(response, gcd_document.Document) - assert response.name == "name_value" - assert response.id == "id_value" - assert response.schema_id == "schema_id_value" - assert response.parent_document_id == "parent_document_id_value" + assert response is None -def test_update_document_rest_use_cached_wrapped_rpc(): +def test_delete_document_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5198,34 +5948,35 @@ def test_update_document_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_document in client._transport._wrapped_methods + assert client._transport.delete_document in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.update_document] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_document] = mock_rpc request = {} - client.update_document(request) + client.delete_document(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_document(request) + client.delete_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_document_rest_required_fields( - request_type=document_service.UpdateDocumentRequest, +def test_delete_document_rest_required_fields( + request_type=document_service.DeleteDocumentRequest, ): transport_class = transports.DocumentServiceRestTransport request_init = {} + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -5236,24 +5987,21 @@ def test_update_document_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_document._get_unset_required_fields(jsonified_request) + ).delete_document._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_document._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "allow_missing", - "update_mask", - ) - ) + ).delete_document._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5262,7 +6010,7 @@ def test_update_document_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = gcd_document.Document() + return_value = None # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -5274,48 +6022,36 @@ def test_update_document_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = gcd_document.Document.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_document(request) + response = client.delete_document(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_document_rest_unset_required_fields(): +def test_delete_document_rest_unset_required_fields(): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_document._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "allowMissing", - "updateMask", - ) - ) - & set(("document",)) - ) + unset_fields = transport.delete_document._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_document_rest_interceptors(null_interceptor): +def test_delete_document_rest_interceptors(null_interceptor): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -5328,14 +6064,11 @@ def test_update_document_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DocumentServiceRestInterceptor, "post_update_document" - ) as post, mock.patch.object( - transports.DocumentServiceRestInterceptor, "pre_update_document" + transports.DocumentServiceRestInterceptor, "pre_delete_document" ) as pre: pre.assert_not_called() - post.assert_not_called() - pb_message = document_service.UpdateDocumentRequest.pb( - document_service.UpdateDocumentRequest() + pb_message = document_service.DeleteDocumentRequest.pb( + document_service.DeleteDocumentRequest() ) transcode.return_value = { "method": "post", @@ -5347,19 +6080,15 @@ def test_update_document_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = gcd_document.Document.to_json( - gcd_document.Document() - ) - request = document_service.UpdateDocumentRequest() + request = document_service.DeleteDocumentRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = gcd_document.Document() - client.update_document( + client.delete_document( request, metadata=[ ("key", "val"), @@ -5368,11 +6097,10 @@ def test_update_document_rest_interceptors(null_interceptor): ) pre.assert_called_once() - post.assert_called_once() -def test_update_document_rest_bad_request( - transport: str = "rest", request_type=document_service.UpdateDocumentRequest +def test_delete_document_rest_bad_request( + transport: str = "rest", request_type=document_service.DeleteDocumentRequest ): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5381,9 +6109,7 @@ def test_update_document_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "document": { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" - } + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" } request = request_type(**request_init) @@ -5396,10 +6122,10 @@ def test_update_document_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_document(request) + client.delete_document(request) -def test_update_document_rest_flattened(): +def test_delete_document_rest_flattened(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5408,53 +6134,40 @@ def test_update_document_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = gcd_document.Document() + return_value = None # get arguments that satisfy an http rule for this method sample_request = { - "document": { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" - } + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" } # get truthy value for each flattened field mock_args = dict( - document=gcd_document.Document( - struct_data=struct_pb2.Struct( - fields={ - "key_value": struct_pb2.Value( - null_value=struct_pb2.NullValue.NULL_VALUE - ) - } - ) - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gcd_document.Document.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_document(**mock_args) + client.delete_document(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{document.name=projects/*/locations/*/dataStores/*/branches/*/documents/*}" + "%s/v1/{name=projects/*/locations/*/dataStores/*/branches/*/documents/*}" % client.transport._host, args[1], ) -def test_update_document_rest_flattened_error(transport: str = "rest"): +def test_delete_document_rest_flattened_error(transport: str = "rest"): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5463,22 +6176,13 @@ def test_update_document_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_document( - document_service.UpdateDocumentRequest(), - document=gcd_document.Document( - struct_data=struct_pb2.Struct( - fields={ - "key_value": struct_pb2.Value( - null_value=struct_pb2.NullValue.NULL_VALUE - ) - } - ) - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.delete_document( + document_service.DeleteDocumentRequest(), + name="name_value", ) -def test_update_document_rest_error(): +def test_delete_document_rest_error(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -5487,11 +6191,11 @@ def test_update_document_rest_error(): @pytest.mark.parametrize( "request_type", [ - document_service.DeleteDocumentRequest, + import_config.ImportDocumentsRequest, dict, ], ) -def test_delete_document_rest(request_type): +def test_import_documents_rest(request_type): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5499,29 +6203,29 @@ def test_delete_document_rest(request_type): # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_document(request) + response = client.import_documents(request) # Establish that the response is the type that we expect. - assert response is None + assert response.operation.name == "operations/spam" -def test_delete_document_rest_use_cached_wrapped_rpc(): +def test_import_documents_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5535,35 +6239,41 @@ def test_delete_document_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_document in client._transport._wrapped_methods + assert client._transport.import_documents in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.delete_document] = mock_rpc + client._transport._wrapped_methods[ + client._transport.import_documents + ] = mock_rpc request = {} - client.delete_document(request) + client.import_documents(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_document(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.import_documents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_document_rest_required_fields( - request_type=document_service.DeleteDocumentRequest, +def test_import_documents_rest_required_fields( + request_type=import_config.ImportDocumentsRequest, ): transport_class = transports.DocumentServiceRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -5574,21 +6284,21 @@ def test_delete_document_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_document._get_unset_required_fields(jsonified_request) + ).import_documents._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_document._get_unset_required_fields(jsonified_request) + ).import_documents._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5597,7 +6307,7 @@ def test_delete_document_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = None + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -5609,36 +6319,37 @@ def test_delete_document_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = "" + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_document(request) + response = client.import_documents(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_document_rest_unset_required_fields(): +def test_import_documents_rest_unset_required_fields(): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_document._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.import_documents._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_document_rest_interceptors(null_interceptor): +def test_import_documents_rest_interceptors(null_interceptor): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -5651,11 +6362,16 @@ def test_delete_document_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DocumentServiceRestInterceptor, "pre_delete_document" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DocumentServiceRestInterceptor, "post_import_documents" + ) as post, mock.patch.object( + transports.DocumentServiceRestInterceptor, "pre_import_documents" ) as pre: pre.assert_not_called() - pb_message = document_service.DeleteDocumentRequest.pb( - document_service.DeleteDocumentRequest() + post.assert_not_called() + pb_message = import_config.ImportDocumentsRequest.pb( + import_config.ImportDocumentsRequest() ) transcode.return_value = { "method": "post", @@ -5667,15 +6383,19 @@ def test_delete_document_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) - request = document_service.DeleteDocumentRequest() + request = import_config.ImportDocumentsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() - client.delete_document( + client.import_documents( request, metadata=[ ("key", "val"), @@ -5684,10 +6404,11 @@ def test_delete_document_rest_interceptors(null_interceptor): ) pre.assert_called_once() + post.assert_called_once() -def test_delete_document_rest_bad_request( - transport: str = "rest", request_type=document_service.DeleteDocumentRequest +def test_import_documents_rest_bad_request( + transport: str = "rest", request_type=import_config.ImportDocumentsRequest ): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5696,7 +6417,7 @@ def test_delete_document_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" } request = request_type(**request_init) @@ -5709,67 +6430,10 @@ def test_delete_document_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_document(request) - - -def test_delete_document_rest_flattened(): - client = DocumentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.delete_document(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/dataStores/*/branches/*/documents/*}" - % client.transport._host, - args[1], - ) - - -def test_delete_document_rest_flattened_error(transport: str = "rest"): - client = DocumentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_document( - document_service.DeleteDocumentRequest(), - name="name_value", - ) + client.import_documents(request) -def test_delete_document_rest_error(): +def test_import_documents_rest_error(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -5778,11 +6442,11 @@ def test_delete_document_rest_error(): @pytest.mark.parametrize( "request_type", [ - import_config.ImportDocumentsRequest, + purge_config.PurgeDocumentsRequest, dict, ], ) -def test_import_documents_rest(request_type): +def test_purge_documents_rest(request_type): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5806,13 +6470,13 @@ def test_import_documents_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.import_documents(request) + response = client.purge_documents(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_import_documents_rest_use_cached_wrapped_rpc(): +def test_purge_documents_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5826,19 +6490,17 @@ def test_import_documents_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.import_documents in client._transport._wrapped_methods + assert client._transport.purge_documents in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.import_documents - ] = mock_rpc + client._transport._wrapped_methods[client._transport.purge_documents] = mock_rpc request = {} - client.import_documents(request) + client.purge_documents(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -5847,20 +6509,21 @@ def test_import_documents_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.import_documents(request) + client.purge_documents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_import_documents_rest_required_fields( - request_type=import_config.ImportDocumentsRequest, +def test_purge_documents_rest_required_fields( + request_type=purge_config.PurgeDocumentsRequest, ): transport_class = transports.DocumentServiceRestTransport request_init = {} request_init["parent"] = "" + request_init["filter"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -5871,21 +6534,24 @@ def test_import_documents_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).import_documents._get_unset_required_fields(jsonified_request) + ).purge_documents._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["parent"] = "parent_value" + jsonified_request["filter"] = "filter_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).import_documents._get_unset_required_fields(jsonified_request) + ).purge_documents._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" + assert "filter" in jsonified_request + assert jsonified_request["filter"] == "filter_value" client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5919,24 +6585,32 @@ def test_import_documents_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.import_documents(request) + response = client.purge_documents(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_import_documents_rest_unset_required_fields(): +def test_purge_documents_rest_unset_required_fields(): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.import_documents._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent",))) + unset_fields = transport.purge_documents._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "filter", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_import_documents_rest_interceptors(null_interceptor): +def test_purge_documents_rest_interceptors(null_interceptor): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -5951,14 +6625,14 @@ def test_import_documents_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.DocumentServiceRestInterceptor, "post_import_documents" + transports.DocumentServiceRestInterceptor, "post_purge_documents" ) as post, mock.patch.object( - transports.DocumentServiceRestInterceptor, "pre_import_documents" + transports.DocumentServiceRestInterceptor, "pre_purge_documents" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = import_config.ImportDocumentsRequest.pb( - import_config.ImportDocumentsRequest() + pb_message = purge_config.PurgeDocumentsRequest.pb( + purge_config.PurgeDocumentsRequest() ) transcode.return_value = { "method": "post", @@ -5974,7 +6648,7 @@ def test_import_documents_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = import_config.ImportDocumentsRequest() + request = purge_config.PurgeDocumentsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -5982,7 +6656,7 @@ def test_import_documents_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.import_documents( + client.purge_documents( request, metadata=[ ("key", "val"), @@ -5994,8 +6668,8 @@ def test_import_documents_rest_interceptors(null_interceptor): post.assert_called_once() -def test_import_documents_rest_bad_request( - transport: str = "rest", request_type=import_config.ImportDocumentsRequest +def test_purge_documents_rest_bad_request( + transport: str = "rest", request_type=purge_config.PurgeDocumentsRequest ): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6017,10 +6691,10 @@ def test_import_documents_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.import_documents(request) + client.purge_documents(request) -def test_import_documents_rest_error(): +def test_purge_documents_rest_error(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -6029,11 +6703,11 @@ def test_import_documents_rest_error(): @pytest.mark.parametrize( "request_type", [ - purge_config.PurgeDocumentsRequest, + document_service.BatchGetDocumentsMetadataRequest, dict, ], ) -def test_purge_documents_rest(request_type): +def test_batch_get_documents_metadata_rest(request_type): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -6048,22 +6722,26 @@ def test_purge_documents_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = document_service.BatchGetDocumentsMetadataResponse() # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = document_service.BatchGetDocumentsMetadataResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.purge_documents(request) + response = client.batch_get_documents_metadata(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, document_service.BatchGetDocumentsMetadataResponse) -def test_purge_documents_rest_use_cached_wrapped_rpc(): +def test_batch_get_documents_metadata_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -6077,40 +6755,40 @@ def test_purge_documents_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.purge_documents in client._transport._wrapped_methods + assert ( + client._transport.batch_get_documents_metadata + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.purge_documents] = mock_rpc + client._transport._wrapped_methods[ + client._transport.batch_get_documents_metadata + ] = mock_rpc request = {} - client.purge_documents(request) + client.batch_get_documents_metadata(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.purge_documents(request) + client.batch_get_documents_metadata(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_purge_documents_rest_required_fields( - request_type=purge_config.PurgeDocumentsRequest, +def test_batch_get_documents_metadata_rest_required_fields( + request_type=document_service.BatchGetDocumentsMetadataRequest, ): transport_class = transports.DocumentServiceRestTransport request_init = {} request_init["parent"] = "" - request_init["filter"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -6121,24 +6799,23 @@ def test_purge_documents_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).purge_documents._get_unset_required_fields(jsonified_request) + ).batch_get_documents_metadata._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["parent"] = "parent_value" - jsonified_request["filter"] = "filter_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).purge_documents._get_unset_required_fields(jsonified_request) + ).batch_get_documents_metadata._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("matcher",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" - assert "filter" in jsonified_request - assert jsonified_request["filter"] == "filter_value" client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6147,7 +6824,7 @@ def test_purge_documents_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = document_service.BatchGetDocumentsMetadataResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -6159,45 +6836,49 @@ def test_purge_documents_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = document_service.BatchGetDocumentsMetadataResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.purge_documents(request) + response = client.batch_get_documents_metadata(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_purge_documents_rest_unset_required_fields(): +def test_batch_get_documents_metadata_rest_unset_required_fields(): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.purge_documents._get_unset_required_fields({}) + unset_fields = transport.batch_get_documents_metadata._get_unset_required_fields({}) assert set(unset_fields) == ( - set(()) + set(("matcher",)) & set( ( "parent", - "filter", + "matcher", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_purge_documents_rest_interceptors(null_interceptor): +def test_batch_get_documents_metadata_rest_interceptors(null_interceptor): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -6210,16 +6891,14 @@ def test_purge_documents_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.DocumentServiceRestInterceptor, "post_purge_documents" + transports.DocumentServiceRestInterceptor, "post_batch_get_documents_metadata" ) as post, mock.patch.object( - transports.DocumentServiceRestInterceptor, "pre_purge_documents" + transports.DocumentServiceRestInterceptor, "pre_batch_get_documents_metadata" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = purge_config.PurgeDocumentsRequest.pb( - purge_config.PurgeDocumentsRequest() + pb_message = document_service.BatchGetDocumentsMetadataRequest.pb( + document_service.BatchGetDocumentsMetadataRequest() ) transcode.return_value = { "method": "post", @@ -6231,19 +6910,21 @@ def test_purge_documents_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = ( + document_service.BatchGetDocumentsMetadataResponse.to_json( + document_service.BatchGetDocumentsMetadataResponse() + ) ) - request = purge_config.PurgeDocumentsRequest() + request = document_service.BatchGetDocumentsMetadataRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = document_service.BatchGetDocumentsMetadataResponse() - client.purge_documents( + client.batch_get_documents_metadata( request, metadata=[ ("key", "val"), @@ -6255,8 +6936,9 @@ def test_purge_documents_rest_interceptors(null_interceptor): post.assert_called_once() -def test_purge_documents_rest_bad_request( - transport: str = "rest", request_type=purge_config.PurgeDocumentsRequest +def test_batch_get_documents_metadata_rest_bad_request( + transport: str = "rest", + request_type=document_service.BatchGetDocumentsMetadataRequest, ): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6278,10 +6960,71 @@ def test_purge_documents_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.purge_documents(request) + client.batch_get_documents_metadata(request) -def test_purge_documents_rest_error(): +def test_batch_get_documents_metadata_rest_flattened(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document_service.BatchGetDocumentsMetadataResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = document_service.BatchGetDocumentsMetadataResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.batch_get_documents_metadata(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/dataStores/*/branches/*}/batchGetDocumentsMetadata" + % client.transport._host, + args[1], + ) + + +def test_batch_get_documents_metadata_rest_flattened_error(transport: str = "rest"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.batch_get_documents_metadata( + document_service.BatchGetDocumentsMetadataRequest(), + parent="parent_value", + ) + + +def test_batch_get_documents_metadata_rest_error(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -6433,6 +7176,7 @@ def test_document_service_base_transport(): "delete_document", "import_documents", "purge_documents", + "batch_get_documents_metadata", "get_operation", "cancel_operation", "list_operations", @@ -6737,6 +7481,9 @@ def test_document_service_client_transport_session_collision(transport_name): session1 = client1.transport.purge_documents._session session2 = client2.transport.purge_documents._session assert session1 != session2 + session1 = client1.transport.batch_get_documents_metadata._session + session2 = client2.transport.batch_get_documents_metadata._session + assert session1 != session2 def test_document_service_grpc_transport_channel(): diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_search_tuning_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_search_tuning_service.py new file mode 100644 index 000000000000..7f47ed810349 --- /dev/null +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_search_tuning_service.py @@ -0,0 +1,3772 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import operation_async # type: ignore +from google.api_core import retry as retries +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.discoveryengine_v1.services.search_tuning_service import ( + SearchTuningServiceAsyncClient, + SearchTuningServiceClient, + transports, +) +from google.cloud.discoveryengine_v1.types import ( + custom_tuning_model, + import_config, + search_tuning_service, +) + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert SearchTuningServiceClient._get_default_mtls_endpoint(None) is None + assert ( + SearchTuningServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + SearchTuningServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + SearchTuningServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + SearchTuningServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + SearchTuningServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +def test__read_environment_variables(): + assert SearchTuningServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert SearchTuningServiceClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert SearchTuningServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + SearchTuningServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert SearchTuningServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert SearchTuningServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert SearchTuningServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + SearchTuningServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert SearchTuningServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert SearchTuningServiceClient._get_client_cert_source(None, False) is None + assert ( + SearchTuningServiceClient._get_client_cert_source( + mock_provided_cert_source, False + ) + is None + ) + assert ( + SearchTuningServiceClient._get_client_cert_source( + mock_provided_cert_source, True + ) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + SearchTuningServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + SearchTuningServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + SearchTuningServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SearchTuningServiceClient), +) +@mock.patch.object( + SearchTuningServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SearchTuningServiceAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = SearchTuningServiceClient._DEFAULT_UNIVERSE + default_endpoint = SearchTuningServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = SearchTuningServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + SearchTuningServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + SearchTuningServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == SearchTuningServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + SearchTuningServiceClient._get_api_endpoint( + None, None, default_universe, "auto" + ) + == default_endpoint + ) + assert ( + SearchTuningServiceClient._get_api_endpoint( + None, None, default_universe, "always" + ) + == SearchTuningServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + SearchTuningServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == SearchTuningServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + SearchTuningServiceClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + SearchTuningServiceClient._get_api_endpoint( + None, None, default_universe, "never" + ) + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + SearchTuningServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + SearchTuningServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + SearchTuningServiceClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + SearchTuningServiceClient._get_universe_domain(None, None) + == SearchTuningServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + SearchTuningServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + SearchTuningServiceClient, + transports.SearchTuningServiceGrpcTransport, + "grpc", + ), + ( + SearchTuningServiceClient, + transports.SearchTuningServiceRestTransport, + "rest", + ), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (SearchTuningServiceClient, "grpc"), + (SearchTuningServiceAsyncClient, "grpc_asyncio"), + (SearchTuningServiceClient, "rest"), + ], +) +def test_search_tuning_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com/" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.SearchTuningServiceGrpcTransport, "grpc"), + (transports.SearchTuningServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.SearchTuningServiceRestTransport, "rest"), + ], +) +def test_search_tuning_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (SearchTuningServiceClient, "grpc"), + (SearchTuningServiceAsyncClient, "grpc_asyncio"), + (SearchTuningServiceClient, "rest"), + ], +) +def test_search_tuning_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com/" + ) + + +def test_search_tuning_service_client_get_transport_class(): + transport = SearchTuningServiceClient.get_transport_class() + available_transports = [ + transports.SearchTuningServiceGrpcTransport, + transports.SearchTuningServiceRestTransport, + ] + assert transport in available_transports + + transport = SearchTuningServiceClient.get_transport_class("grpc") + assert transport == transports.SearchTuningServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + SearchTuningServiceClient, + transports.SearchTuningServiceGrpcTransport, + "grpc", + ), + ( + SearchTuningServiceAsyncClient, + transports.SearchTuningServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + SearchTuningServiceClient, + transports.SearchTuningServiceRestTransport, + "rest", + ), + ], +) +@mock.patch.object( + SearchTuningServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SearchTuningServiceClient), +) +@mock.patch.object( + SearchTuningServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SearchTuningServiceAsyncClient), +) +def test_search_tuning_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(SearchTuningServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(SearchTuningServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com/" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com/", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + SearchTuningServiceClient, + transports.SearchTuningServiceGrpcTransport, + "grpc", + "true", + ), + ( + SearchTuningServiceAsyncClient, + transports.SearchTuningServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + SearchTuningServiceClient, + transports.SearchTuningServiceGrpcTransport, + "grpc", + "false", + ), + ( + SearchTuningServiceAsyncClient, + transports.SearchTuningServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + SearchTuningServiceClient, + transports.SearchTuningServiceRestTransport, + "rest", + "true", + ), + ( + SearchTuningServiceClient, + transports.SearchTuningServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + SearchTuningServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SearchTuningServiceClient), +) +@mock.patch.object( + SearchTuningServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SearchTuningServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_search_tuning_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [SearchTuningServiceClient, SearchTuningServiceAsyncClient] +) +@mock.patch.object( + SearchTuningServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SearchTuningServiceClient), +) +@mock.patch.object( + SearchTuningServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SearchTuningServiceAsyncClient), +) +def test_search_tuning_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", [SearchTuningServiceClient, SearchTuningServiceAsyncClient] +) +@mock.patch.object( + SearchTuningServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SearchTuningServiceClient), +) +@mock.patch.object( + SearchTuningServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SearchTuningServiceAsyncClient), +) +def test_search_tuning_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = SearchTuningServiceClient._DEFAULT_UNIVERSE + default_endpoint = SearchTuningServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = SearchTuningServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + SearchTuningServiceClient, + transports.SearchTuningServiceGrpcTransport, + "grpc", + ), + ( + SearchTuningServiceAsyncClient, + transports.SearchTuningServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + SearchTuningServiceClient, + transports.SearchTuningServiceRestTransport, + "rest", + ), + ], +) +def test_search_tuning_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + SearchTuningServiceClient, + transports.SearchTuningServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + SearchTuningServiceAsyncClient, + transports.SearchTuningServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + SearchTuningServiceClient, + transports.SearchTuningServiceRestTransport, + "rest", + None, + ), + ], +) +def test_search_tuning_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_search_tuning_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.discoveryengine_v1.services.search_tuning_service.transports.SearchTuningServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = SearchTuningServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + SearchTuningServiceClient, + transports.SearchTuningServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + SearchTuningServiceAsyncClient, + transports.SearchTuningServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_search_tuning_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "discoveryengine.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="discoveryengine.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + search_tuning_service.TrainCustomModelRequest, + dict, + ], +) +def test_train_custom_model(request_type, transport: str = "grpc"): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.train_custom_model), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.train_custom_model(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = search_tuning_service.TrainCustomModelRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_train_custom_model_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.train_custom_model), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.train_custom_model() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == search_tuning_service.TrainCustomModelRequest() + + +def test_train_custom_model_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = search_tuning_service.TrainCustomModelRequest( + data_store="data_store_value", + model_type="model_type_value", + model_id="model_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.train_custom_model), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.train_custom_model(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == search_tuning_service.TrainCustomModelRequest( + data_store="data_store_value", + model_type="model_type_value", + model_id="model_id_value", + ) + + +def test_train_custom_model_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.train_custom_model in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.train_custom_model + ] = mock_rpc + request = {} + client.train_custom_model(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.train_custom_model(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_train_custom_model_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.train_custom_model), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.train_custom_model() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == search_tuning_service.TrainCustomModelRequest() + + +@pytest.mark.asyncio +async def test_train_custom_model_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.train_custom_model + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.train_custom_model + ] = mock_rpc + + request = {} + await client.train_custom_model(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.train_custom_model(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_train_custom_model_async( + transport: str = "grpc_asyncio", + request_type=search_tuning_service.TrainCustomModelRequest, +): + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.train_custom_model), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.train_custom_model(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = search_tuning_service.TrainCustomModelRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_train_custom_model_async_from_dict(): + await test_train_custom_model_async(request_type=dict) + + +def test_train_custom_model_field_headers(): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = search_tuning_service.TrainCustomModelRequest() + + request.data_store = "data_store_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.train_custom_model), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.train_custom_model(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "data_store=data_store_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_train_custom_model_field_headers_async(): + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = search_tuning_service.TrainCustomModelRequest() + + request.data_store = "data_store_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.train_custom_model), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.train_custom_model(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "data_store=data_store_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + search_tuning_service.ListCustomModelsRequest, + dict, + ], +) +def test_list_custom_models(request_type, transport: str = "grpc"): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_custom_models), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = search_tuning_service.ListCustomModelsResponse() + response = client.list_custom_models(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = search_tuning_service.ListCustomModelsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, search_tuning_service.ListCustomModelsResponse) + + +def test_list_custom_models_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_custom_models), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_custom_models() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == search_tuning_service.ListCustomModelsRequest() + + +def test_list_custom_models_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = search_tuning_service.ListCustomModelsRequest( + data_store="data_store_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_custom_models), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_custom_models(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == search_tuning_service.ListCustomModelsRequest( + data_store="data_store_value", + ) + + +def test_list_custom_models_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_custom_models in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_custom_models + ] = mock_rpc + request = {} + client.list_custom_models(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_custom_models(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_custom_models_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_custom_models), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + search_tuning_service.ListCustomModelsResponse() + ) + response = await client.list_custom_models() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == search_tuning_service.ListCustomModelsRequest() + + +@pytest.mark.asyncio +async def test_list_custom_models_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_custom_models + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_custom_models + ] = mock_rpc + + request = {} + await client.list_custom_models(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_custom_models(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_custom_models_async( + transport: str = "grpc_asyncio", + request_type=search_tuning_service.ListCustomModelsRequest, +): + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_custom_models), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + search_tuning_service.ListCustomModelsResponse() + ) + response = await client.list_custom_models(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = search_tuning_service.ListCustomModelsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, search_tuning_service.ListCustomModelsResponse) + + +@pytest.mark.asyncio +async def test_list_custom_models_async_from_dict(): + await test_list_custom_models_async(request_type=dict) + + +def test_list_custom_models_field_headers(): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = search_tuning_service.ListCustomModelsRequest() + + request.data_store = "data_store_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_custom_models), "__call__" + ) as call: + call.return_value = search_tuning_service.ListCustomModelsResponse() + client.list_custom_models(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "data_store=data_store_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_custom_models_field_headers_async(): + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = search_tuning_service.ListCustomModelsRequest() + + request.data_store = "data_store_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_custom_models), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + search_tuning_service.ListCustomModelsResponse() + ) + await client.list_custom_models(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "data_store=data_store_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + search_tuning_service.TrainCustomModelRequest, + dict, + ], +) +def test_train_custom_model_rest(request_type): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "data_store": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.train_custom_model(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_train_custom_model_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.train_custom_model in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.train_custom_model + ] = mock_rpc + + request = {} + client.train_custom_model(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.train_custom_model(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_train_custom_model_rest_required_fields( + request_type=search_tuning_service.TrainCustomModelRequest, +): + transport_class = transports.SearchTuningServiceRestTransport + + request_init = {} + request_init["data_store"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).train_custom_model._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["dataStore"] = "data_store_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).train_custom_model._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "dataStore" in jsonified_request + assert jsonified_request["dataStore"] == "data_store_value" + + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.train_custom_model(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_train_custom_model_rest_unset_required_fields(): + transport = transports.SearchTuningServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.train_custom_model._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("dataStore",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_train_custom_model_rest_interceptors(null_interceptor): + transport = transports.SearchTuningServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SearchTuningServiceRestInterceptor(), + ) + client = SearchTuningServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.SearchTuningServiceRestInterceptor, "post_train_custom_model" + ) as post, mock.patch.object( + transports.SearchTuningServiceRestInterceptor, "pre_train_custom_model" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = search_tuning_service.TrainCustomModelRequest.pb( + search_tuning_service.TrainCustomModelRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = search_tuning_service.TrainCustomModelRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.train_custom_model( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_train_custom_model_rest_bad_request( + transport: str = "rest", request_type=search_tuning_service.TrainCustomModelRequest +): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "data_store": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.train_custom_model(request) + + +def test_train_custom_model_rest_error(): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + search_tuning_service.ListCustomModelsRequest, + dict, + ], +) +def test_list_custom_models_rest(request_type): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "data_store": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = search_tuning_service.ListCustomModelsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = search_tuning_service.ListCustomModelsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_custom_models(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, search_tuning_service.ListCustomModelsResponse) + + +def test_list_custom_models_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_custom_models in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_custom_models + ] = mock_rpc + + request = {} + client.list_custom_models(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_custom_models(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_custom_models_rest_required_fields( + request_type=search_tuning_service.ListCustomModelsRequest, +): + transport_class = transports.SearchTuningServiceRestTransport + + request_init = {} + request_init["data_store"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_custom_models._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["dataStore"] = "data_store_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_custom_models._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "dataStore" in jsonified_request + assert jsonified_request["dataStore"] == "data_store_value" + + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = search_tuning_service.ListCustomModelsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = search_tuning_service.ListCustomModelsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_custom_models(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_custom_models_rest_unset_required_fields(): + transport = transports.SearchTuningServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_custom_models._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("dataStore",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_custom_models_rest_interceptors(null_interceptor): + transport = transports.SearchTuningServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SearchTuningServiceRestInterceptor(), + ) + client = SearchTuningServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SearchTuningServiceRestInterceptor, "post_list_custom_models" + ) as post, mock.patch.object( + transports.SearchTuningServiceRestInterceptor, "pre_list_custom_models" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = search_tuning_service.ListCustomModelsRequest.pb( + search_tuning_service.ListCustomModelsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + search_tuning_service.ListCustomModelsResponse.to_json( + search_tuning_service.ListCustomModelsResponse() + ) + ) + + request = search_tuning_service.ListCustomModelsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = search_tuning_service.ListCustomModelsResponse() + + client.list_custom_models( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_custom_models_rest_bad_request( + transport: str = "rest", request_type=search_tuning_service.ListCustomModelsRequest +): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "data_store": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_custom_models(request) + + +def test_list_custom_models_rest_error(): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.SearchTuningServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.SearchTuningServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SearchTuningServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.SearchTuningServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = SearchTuningServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = SearchTuningServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.SearchTuningServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SearchTuningServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.SearchTuningServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = SearchTuningServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.SearchTuningServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.SearchTuningServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SearchTuningServiceGrpcTransport, + transports.SearchTuningServiceGrpcAsyncIOTransport, + transports.SearchTuningServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = SearchTuningServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.SearchTuningServiceGrpcTransport, + ) + + +def test_search_tuning_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.SearchTuningServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_search_tuning_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.discoveryengine_v1.services.search_tuning_service.transports.SearchTuningServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.SearchTuningServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "train_custom_model", + "list_custom_models", + "get_operation", + "cancel_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_search_tuning_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.discoveryengine_v1.services.search_tuning_service.transports.SearchTuningServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.SearchTuningServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_search_tuning_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.discoveryengine_v1.services.search_tuning_service.transports.SearchTuningServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.SearchTuningServiceTransport() + adc.assert_called_once() + + +def test_search_tuning_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + SearchTuningServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SearchTuningServiceGrpcTransport, + transports.SearchTuningServiceGrpcAsyncIOTransport, + ], +) +def test_search_tuning_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SearchTuningServiceGrpcTransport, + transports.SearchTuningServiceGrpcAsyncIOTransport, + transports.SearchTuningServiceRestTransport, + ], +) +def test_search_tuning_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com/" + api_audience_tests = [None, "https://language2.com/"] + api_audience_expect = [host, "https://language2.com/"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.SearchTuningServiceGrpcTransport, grpc_helpers), + (transports.SearchTuningServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_search_tuning_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "discoveryengine.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="discoveryengine.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SearchTuningServiceGrpcTransport, + transports.SearchTuningServiceGrpcAsyncIOTransport, + ], +) +def test_search_tuning_service_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_search_tuning_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.SearchTuningServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_search_tuning_service_rest_lro_client(): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_search_tuning_service_host_no_port(transport_name): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="discoveryengine.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com/" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_search_tuning_service_host_with_port(transport_name): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="discoveryengine.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "discoveryengine.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com:8000/" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_search_tuning_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = SearchTuningServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = SearchTuningServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.train_custom_model._session + session2 = client2.transport.train_custom_model._session + assert session1 != session2 + session1 = client1.transport.list_custom_models._session + session2 = client2.transport.list_custom_models._session + assert session1 != session2 + + +def test_search_tuning_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.SearchTuningServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_search_tuning_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.SearchTuningServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.SearchTuningServiceGrpcTransport, + transports.SearchTuningServiceGrpcAsyncIOTransport, + ], +) +def test_search_tuning_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.SearchTuningServiceGrpcTransport, + transports.SearchTuningServiceGrpcAsyncIOTransport, + ], +) +def test_search_tuning_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_search_tuning_service_grpc_lro_client(): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_search_tuning_service_grpc_lro_async_client(): + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_custom_tuning_model_path(): + project = "squid" + location = "clam" + data_store = "whelk" + custom_tuning_model = "octopus" + expected = "projects/{project}/locations/{location}/dataStores/{data_store}/customTuningModels/{custom_tuning_model}".format( + project=project, + location=location, + data_store=data_store, + custom_tuning_model=custom_tuning_model, + ) + actual = SearchTuningServiceClient.custom_tuning_model_path( + project, location, data_store, custom_tuning_model + ) + assert expected == actual + + +def test_parse_custom_tuning_model_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "data_store": "cuttlefish", + "custom_tuning_model": "mussel", + } + path = SearchTuningServiceClient.custom_tuning_model_path(**expected) + + # Check that the path construction is reversible. + actual = SearchTuningServiceClient.parse_custom_tuning_model_path(path) + assert expected == actual + + +def test_data_store_path(): + project = "winkle" + location = "nautilus" + data_store = "scallop" + expected = "projects/{project}/locations/{location}/dataStores/{data_store}".format( + project=project, + location=location, + data_store=data_store, + ) + actual = SearchTuningServiceClient.data_store_path(project, location, data_store) + assert expected == actual + + +def test_parse_data_store_path(): + expected = { + "project": "abalone", + "location": "squid", + "data_store": "clam", + } + path = SearchTuningServiceClient.data_store_path(**expected) + + # Check that the path construction is reversible. + actual = SearchTuningServiceClient.parse_data_store_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = SearchTuningServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = SearchTuningServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = SearchTuningServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = SearchTuningServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = SearchTuningServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = SearchTuningServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = SearchTuningServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = SearchTuningServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = SearchTuningServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format( + project=project, + ) + actual = SearchTuningServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = SearchTuningServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = SearchTuningServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = SearchTuningServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = SearchTuningServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = SearchTuningServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.SearchTuningServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.SearchTuningServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = SearchTuningServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/operations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/operations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/operations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/operations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_cancel_operation(transport: str = "grpc"): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (SearchTuningServiceClient, transports.SearchTuningServiceGrpcTransport), + ( + SearchTuningServiceAsyncClient, + transports.SearchTuningServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_user_event_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_user_event_service.py index 08b5d51e76e2..c56cf06b35ec 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_user_event_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_user_event_service.py @@ -67,6 +67,7 @@ from google.cloud.discoveryengine_v1.types import ( common, import_config, + purge_config, user_event, user_event_service, ) @@ -1809,6 +1810,309 @@ async def test_collect_user_event_field_headers_async(): ) in kw["metadata"] +@pytest.mark.parametrize( + "request_type", + [ + purge_config.PurgeUserEventsRequest, + dict, + ], +) +def test_purge_user_events(request_type, transport: str = "grpc"): + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.purge_user_events), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.purge_user_events(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = purge_config.PurgeUserEventsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_purge_user_events_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.purge_user_events), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.purge_user_events() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == purge_config.PurgeUserEventsRequest() + + +def test_purge_user_events_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = purge_config.PurgeUserEventsRequest( + parent="parent_value", + filter="filter_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.purge_user_events), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.purge_user_events(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == purge_config.PurgeUserEventsRequest( + parent="parent_value", + filter="filter_value", + ) + + +def test_purge_user_events_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.purge_user_events in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.purge_user_events + ] = mock_rpc + request = {} + client.purge_user_events(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.purge_user_events(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_purge_user_events_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = UserEventServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.purge_user_events), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.purge_user_events() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == purge_config.PurgeUserEventsRequest() + + +@pytest.mark.asyncio +async def test_purge_user_events_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = UserEventServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.purge_user_events + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.purge_user_events + ] = mock_rpc + + request = {} + await client.purge_user_events(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.purge_user_events(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_purge_user_events_async( + transport: str = "grpc_asyncio", request_type=purge_config.PurgeUserEventsRequest +): + client = UserEventServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.purge_user_events), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.purge_user_events(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = purge_config.PurgeUserEventsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_purge_user_events_async_from_dict(): + await test_purge_user_events_async(request_type=dict) + + +def test_purge_user_events_field_headers(): + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = purge_config.PurgeUserEventsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.purge_user_events), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.purge_user_events(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_purge_user_events_field_headers_async(): + client = UserEventServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = purge_config.PurgeUserEventsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.purge_user_events), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.purge_user_events(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + @pytest.mark.parametrize( "request_type", [ @@ -2151,6 +2455,7 @@ def test_write_user_event_rest(request_type): "uri": "uri_value", "quantity": 895, "promotion_ids": ["promotion_ids_value1", "promotion_ids_value2"], + "joined": True, } ], "panel": { @@ -2792,6 +3097,265 @@ def test_collect_user_event_rest_error(): ) +@pytest.mark.parametrize( + "request_type", + [ + purge_config.PurgeUserEventsRequest, + dict, + ], +) +def test_purge_user_events_rest(request_type): + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/dataStores/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.purge_user_events(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_purge_user_events_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.purge_user_events in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.purge_user_events + ] = mock_rpc + + request = {} + client.purge_user_events(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.purge_user_events(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_purge_user_events_rest_required_fields( + request_type=purge_config.PurgeUserEventsRequest, +): + transport_class = transports.UserEventServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["filter"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).purge_user_events._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + jsonified_request["filter"] = "filter_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).purge_user_events._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "filter" in jsonified_request + assert jsonified_request["filter"] == "filter_value" + + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.purge_user_events(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_purge_user_events_rest_unset_required_fields(): + transport = transports.UserEventServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.purge_user_events._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "filter", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_purge_user_events_rest_interceptors(null_interceptor): + transport = transports.UserEventServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.UserEventServiceRestInterceptor(), + ) + client = UserEventServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.UserEventServiceRestInterceptor, "post_purge_user_events" + ) as post, mock.patch.object( + transports.UserEventServiceRestInterceptor, "pre_purge_user_events" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = purge_config.PurgeUserEventsRequest.pb( + purge_config.PurgeUserEventsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = purge_config.PurgeUserEventsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.purge_user_events( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_purge_user_events_rest_bad_request( + transport: str = "rest", request_type=purge_config.PurgeUserEventsRequest +): + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/dataStores/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.purge_user_events(request) + + +def test_purge_user_events_rest_error(): + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -3182,6 +3746,7 @@ def test_user_event_service_base_transport(): methods = ( "write_user_event", "collect_user_event", + "purge_user_events", "import_user_events", "get_operation", "cancel_operation", @@ -3472,6 +4037,9 @@ def test_user_event_service_client_transport_session_collision(transport_name): session1 = client1.transport.collect_user_event._session session2 = client2.transport.collect_user_event._session assert session1 != session2 + session1 = client1.transport.purge_user_events._session + session2 = client2.transport.purge_user_events._session + assert session1 != session2 session1 = client1.transport.import_user_events._session session2 = client2.transport.import_user_events._session assert session1 != session2 diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_data_store_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_data_store_service.py index 961c537481ba..b54085a025f3 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_data_store_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_data_store_service.py @@ -4181,6 +4181,10 @@ def test_create_data_store_rest(request_type): "external_idp_config": {"workforce_pool_name": "workforce_pool_name_value"}, }, "acl_enabled": True, + "workspace_config": { + "type_": 1, + "dasher_customer_id": "dasher_customer_id_value", + }, "document_processing_config": { "name": "name_value", "chunking_config": { @@ -5653,6 +5657,10 @@ def test_update_data_store_rest(request_type): "external_idp_config": {"workforce_pool_name": "workforce_pool_name_value"}, }, "acl_enabled": True, + "workspace_config": { + "type_": 1, + "dasher_customer_id": "dasher_customer_id_value", + }, "document_processing_config": { "name": "name_value", "chunking_config": { diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_document_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_document_service.py index 8adf368a1a3b..b7b3e0915308 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_document_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_document_service.py @@ -45,10 +45,12 @@ from google.cloud.location import locations_pb2 from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account +from google.protobuf import any_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import json_format from google.protobuf import struct_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore from google.type import date_pb2 # type: ignore import grpc from grpc.experimental import aio @@ -4302,6 +4304,387 @@ async def test_get_processed_document_flattened_error_async(): ) +@pytest.mark.parametrize( + "request_type", + [ + document_service.BatchGetDocumentsMetadataRequest, + dict, + ], +) +def test_batch_get_documents_metadata(request_type, transport: str = "grpc"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = document_service.BatchGetDocumentsMetadataResponse() + response = client.batch_get_documents_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = document_service.BatchGetDocumentsMetadataRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, document_service.BatchGetDocumentsMetadataResponse) + + +def test_batch_get_documents_metadata_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.batch_get_documents_metadata() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == document_service.BatchGetDocumentsMetadataRequest() + + +def test_batch_get_documents_metadata_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = document_service.BatchGetDocumentsMetadataRequest( + parent="parent_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.batch_get_documents_metadata(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == document_service.BatchGetDocumentsMetadataRequest( + parent="parent_value", + ) + + +def test_batch_get_documents_metadata_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.batch_get_documents_metadata + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.batch_get_documents_metadata + ] = mock_rpc + request = {} + client.batch_get_documents_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.batch_get_documents_metadata(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_batch_get_documents_metadata_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + document_service.BatchGetDocumentsMetadataResponse() + ) + response = await client.batch_get_documents_metadata() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == document_service.BatchGetDocumentsMetadataRequest() + + +@pytest.mark.asyncio +async def test_batch_get_documents_metadata_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.batch_get_documents_metadata + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.batch_get_documents_metadata + ] = mock_rpc + + request = {} + await client.batch_get_documents_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.batch_get_documents_metadata(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_batch_get_documents_metadata_async( + transport: str = "grpc_asyncio", + request_type=document_service.BatchGetDocumentsMetadataRequest, +): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + document_service.BatchGetDocumentsMetadataResponse() + ) + response = await client.batch_get_documents_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = document_service.BatchGetDocumentsMetadataRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, document_service.BatchGetDocumentsMetadataResponse) + + +@pytest.mark.asyncio +async def test_batch_get_documents_metadata_async_from_dict(): + await test_batch_get_documents_metadata_async(request_type=dict) + + +def test_batch_get_documents_metadata_field_headers(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = document_service.BatchGetDocumentsMetadataRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + call.return_value = document_service.BatchGetDocumentsMetadataResponse() + client.batch_get_documents_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_batch_get_documents_metadata_field_headers_async(): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = document_service.BatchGetDocumentsMetadataRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + document_service.BatchGetDocumentsMetadataResponse() + ) + await client.batch_get_documents_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_batch_get_documents_metadata_flattened(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = document_service.BatchGetDocumentsMetadataResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.batch_get_documents_metadata( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_batch_get_documents_metadata_flattened_error(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.batch_get_documents_metadata( + document_service.BatchGetDocumentsMetadataRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_batch_get_documents_metadata_flattened_async(): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = document_service.BatchGetDocumentsMetadataResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + document_service.BatchGetDocumentsMetadataResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.batch_get_documents_metadata( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_batch_get_documents_metadata_flattened_error_async(): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.batch_get_documents_metadata( + document_service.BatchGetDocumentsMetadataRequest(), + parent="parent_value", + ) + + @pytest.mark.parametrize( "request_type", [ @@ -4309,7 +4692,321 @@ async def test_get_processed_document_flattened_error_async(): dict, ], ) -def test_get_document_rest(request_type): +def test_get_document_rest(request_type): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document.Document( + name="name_value", + id="id_value", + schema_id="schema_id_value", + parent_document_id="parent_document_id_value", + json_data="json_data_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = document.Document.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_document(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, document.Document) + assert response.name == "name_value" + assert response.id == "id_value" + assert response.schema_id == "schema_id_value" + assert response.parent_document_id == "parent_document_id_value" + + +def test_get_document_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_document in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_document] = mock_rpc + + request = {} + client.get_document(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_document(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_document_rest_required_fields( + request_type=document_service.GetDocumentRequest, +): + transport_class = transports.DocumentServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_document._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_document._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = document.Document() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = document.Document.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_document(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_document_rest_unset_required_fields(): + transport = transports.DocumentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_document._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_document_rest_interceptors(null_interceptor): + transport = transports.DocumentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentServiceRestInterceptor(), + ) + client = DocumentServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DocumentServiceRestInterceptor, "post_get_document" + ) as post, mock.patch.object( + transports.DocumentServiceRestInterceptor, "pre_get_document" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = document_service.GetDocumentRequest.pb( + document_service.GetDocumentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = document.Document.to_json(document.Document()) + + request = document_service.GetDocumentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = document.Document() + + client.get_document( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_document_rest_bad_request( + transport: str = "rest", request_type=document_service.GetDocumentRequest +): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_document(request) + + +def test_get_document_rest_flattened(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document.Document() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = document.Document.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_document(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=projects/*/locations/*/dataStores/*/branches/*/documents/*}" + % client.transport._host, + args[1], + ) + + +def test_get_document_rest_flattened_error(transport: str = "rest"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_document( + document_service.GetDocumentRequest(), + name="name_value", + ) + + +def test_get_document_rest_error(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + document_service.ListDocumentsRequest, + dict, + ], +) +def test_list_documents_rest(request_type): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4317,41 +5014,34 @@ def test_get_document_rest(request_type): # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = document.Document( - name="name_value", - id="id_value", - schema_id="schema_id_value", - parent_document_id="parent_document_id_value", - json_data="json_data_value", + return_value = document_service.ListDocumentsResponse( + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = document.Document.pb(return_value) + return_value = document_service.ListDocumentsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_document(request) + response = client.list_documents(request) # Establish that the response is the type that we expect. - assert isinstance(response, document.Document) - assert response.name == "name_value" - assert response.id == "id_value" - assert response.schema_id == "schema_id_value" - assert response.parent_document_id == "parent_document_id_value" + assert isinstance(response, pagers.ListDocumentsPager) + assert response.next_page_token == "next_page_token_value" -def test_get_document_rest_use_cached_wrapped_rpc(): +def test_list_documents_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -4365,35 +5055,35 @@ def test_get_document_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_document in client._transport._wrapped_methods + assert client._transport.list_documents in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_document] = mock_rpc + client._transport._wrapped_methods[client._transport.list_documents] = mock_rpc request = {} - client.get_document(request) + client.list_documents(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_document(request) + client.list_documents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_document_rest_required_fields( - request_type=document_service.GetDocumentRequest, +def test_list_documents_rest_required_fields( + request_type=document_service.ListDocumentsRequest, ): transport_class = transports.DocumentServiceRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -4404,21 +5094,28 @@ def test_get_document_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_document._get_unset_required_fields(jsonified_request) + ).list_documents._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_document._get_unset_required_fields(jsonified_request) + ).list_documents._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4427,7 +5124,7 @@ def test_get_document_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = document.Document() + return_value = document_service.ListDocumentsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -4448,30 +5145,38 @@ def test_get_document_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = document.Document.pb(return_value) + return_value = document_service.ListDocumentsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_document(request) + response = client.list_documents(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_document_rest_unset_required_fields(): +def test_list_documents_rest_unset_required_fields(): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_document._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.list_documents._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_document_rest_interceptors(null_interceptor): +def test_list_documents_rest_interceptors(null_interceptor): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -4484,14 +5189,14 @@ def test_get_document_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DocumentServiceRestInterceptor, "post_get_document" + transports.DocumentServiceRestInterceptor, "post_list_documents" ) as post, mock.patch.object( - transports.DocumentServiceRestInterceptor, "pre_get_document" + transports.DocumentServiceRestInterceptor, "pre_list_documents" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = document_service.GetDocumentRequest.pb( - document_service.GetDocumentRequest() + pb_message = document_service.ListDocumentsRequest.pb( + document_service.ListDocumentsRequest() ) transcode.return_value = { "method": "post", @@ -4503,17 +5208,19 @@ def test_get_document_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = document.Document.to_json(document.Document()) + req.return_value._content = document_service.ListDocumentsResponse.to_json( + document_service.ListDocumentsResponse() + ) - request = document_service.GetDocumentRequest() + request = document_service.ListDocumentsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = document.Document() + post.return_value = document_service.ListDocumentsResponse() - client.get_document( + client.list_documents( request, metadata=[ ("key", "val"), @@ -4525,8 +5232,8 @@ def test_get_document_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_document_rest_bad_request( - transport: str = "rest", request_type=document_service.GetDocumentRequest +def test_list_documents_rest_bad_request( + transport: str = "rest", request_type=document_service.ListDocumentsRequest ): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4535,7 +5242,7 @@ def test_get_document_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" } request = request_type(**request_init) @@ -4548,10 +5255,10 @@ def test_get_document_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_document(request) + client.list_documents(request) -def test_get_document_rest_flattened(): +def test_list_documents_rest_flattened(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4560,16 +5267,16 @@ def test_get_document_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = document.Document() + return_value = document_service.ListDocumentsResponse() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" } # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) @@ -4577,25 +5284,25 @@ def test_get_document_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = document.Document.pb(return_value) + return_value = document_service.ListDocumentsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_document(**mock_args) + client.list_documents(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=projects/*/locations/*/dataStores/*/branches/*/documents/*}" + "%s/v1alpha/{parent=projects/*/locations/*/dataStores/*/branches/*}/documents" % client.transport._host, args[1], ) -def test_get_document_rest_flattened_error(transport: str = "rest"): +def test_list_documents_rest_flattened_error(transport: str = "rest"): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4604,26 +5311,85 @@ def test_get_document_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_document( - document_service.GetDocumentRequest(), - name="name_value", + client.list_documents( + document_service.ListDocumentsRequest(), + parent="parent_value", ) -def test_get_document_rest_error(): +def test_list_documents_rest_pager(transport: str = "rest"): client = DocumentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + document_service.ListDocumentsResponse( + documents=[ + document.Document(), + document.Document(), + document.Document(), + ], + next_page_token="abc", + ), + document_service.ListDocumentsResponse( + documents=[], + next_page_token="def", + ), + document_service.ListDocumentsResponse( + documents=[ + document.Document(), + ], + next_page_token="ghi", + ), + document_service.ListDocumentsResponse( + documents=[ + document.Document(), + document.Document(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + document_service.ListDocumentsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + } + + pager = client.list_documents(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, document.Document) for i in results) + + pages = list(client.list_documents(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - document_service.ListDocumentsRequest, + document_service.CreateDocumentRequest, dict, ], ) -def test_list_documents_rest(request_type): +def test_create_document_rest(request_type): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4633,32 +5399,146 @@ def test_list_documents_rest(request_type): request_init = { "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" } + request_init["document"] = { + "struct_data": {"fields": {}}, + "json_data": "json_data_value", + "name": "name_value", + "id": "id_value", + "schema_id": "schema_id_value", + "content": { + "raw_bytes": b"raw_bytes_blob", + "uri": "uri_value", + "mime_type": "mime_type_value", + }, + "parent_document_id": "parent_document_id_value", + "derived_struct_data": {}, + "acl_info": { + "readers": [ + { + "principals": [ + {"user_id": "user_id_value", "group_id": "group_id_value"} + ], + "idp_wide": True, + } + ] + }, + "index_time": {"seconds": 751, "nanos": 543}, + "index_status": { + "index_time": {}, + "error_samples": [ + { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + } + ], + }, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = document_service.CreateDocumentRequest.meta.fields["document"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["document"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["document"][field])): + del request_init["document"][field][i][subfield] + else: + del request_init["document"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = document_service.ListDocumentsResponse( - next_page_token="next_page_token_value", + return_value = gcd_document.Document( + name="name_value", + id="id_value", + schema_id="schema_id_value", + parent_document_id="parent_document_id_value", + json_data="json_data_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = document_service.ListDocumentsResponse.pb(return_value) + return_value = gcd_document.Document.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_documents(request) + response = client.create_document(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDocumentsPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, gcd_document.Document) + assert response.name == "name_value" + assert response.id == "id_value" + assert response.schema_id == "schema_id_value" + assert response.parent_document_id == "parent_document_id_value" -def test_list_documents_rest_use_cached_wrapped_rpc(): +def test_create_document_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -4672,35 +5552,36 @@ def test_list_documents_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_documents in client._transport._wrapped_methods + assert client._transport.create_document in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_documents] = mock_rpc + client._transport._wrapped_methods[client._transport.create_document] = mock_rpc request = {} - client.list_documents(request) + client.create_document(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_documents(request) + client.create_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_documents_rest_required_fields( - request_type=document_service.ListDocumentsRequest, +def test_create_document_rest_required_fields( + request_type=document_service.CreateDocumentRequest, ): transport_class = transports.DocumentServiceRestTransport request_init = {} request_init["parent"] = "" + request_init["document_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -4708,31 +5589,32 @@ def test_list_documents_rest_required_fields( ) # verify fields with default values are dropped + assert "documentId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_documents._get_unset_required_fields(jsonified_request) + ).create_document._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "documentId" in jsonified_request + assert jsonified_request["documentId"] == request_init["document_id"] jsonified_request["parent"] = "parent_value" + jsonified_request["documentId"] = "document_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_documents._get_unset_required_fields(jsonified_request) + ).create_document._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "page_size", - "page_token", - ) - ) + assert not set(unset_fields) - set(("document_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" + assert "documentId" in jsonified_request + assert jsonified_request["documentId"] == "document_id_value" client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4741,7 +5623,7 @@ def test_list_documents_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = document_service.ListDocumentsResponse() + return_value = gcd_document.Document() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -4753,47 +5635,55 @@ def test_list_documents_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = document_service.ListDocumentsResponse.pb(return_value) + return_value = gcd_document.Document.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_documents(request) + response = client.create_document(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "documentId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_documents_rest_unset_required_fields(): +def test_create_document_rest_unset_required_fields(): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_documents._get_unset_required_fields({}) + unset_fields = transport.create_document._get_unset_required_fields({}) assert set(unset_fields) == ( - set( + set(("documentId",)) + & set( ( - "pageSize", - "pageToken", + "parent", + "document", + "documentId", ) ) - & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_documents_rest_interceptors(null_interceptor): +def test_create_document_rest_interceptors(null_interceptor): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -4806,14 +5696,14 @@ def test_list_documents_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DocumentServiceRestInterceptor, "post_list_documents" + transports.DocumentServiceRestInterceptor, "post_create_document" ) as post, mock.patch.object( - transports.DocumentServiceRestInterceptor, "pre_list_documents" + transports.DocumentServiceRestInterceptor, "pre_create_document" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = document_service.ListDocumentsRequest.pb( - document_service.ListDocumentsRequest() + pb_message = document_service.CreateDocumentRequest.pb( + document_service.CreateDocumentRequest() ) transcode.return_value = { "method": "post", @@ -4825,19 +5715,19 @@ def test_list_documents_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = document_service.ListDocumentsResponse.to_json( - document_service.ListDocumentsResponse() + req.return_value._content = gcd_document.Document.to_json( + gcd_document.Document() ) - request = document_service.ListDocumentsRequest() + request = document_service.CreateDocumentRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = document_service.ListDocumentsResponse() + post.return_value = gcd_document.Document() - client.list_documents( + client.create_document( request, metadata=[ ("key", "val"), @@ -4849,8 +5739,8 @@ def test_list_documents_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_documents_rest_bad_request( - transport: str = "rest", request_type=document_service.ListDocumentsRequest +def test_create_document_rest_bad_request( + transport: str = "rest", request_type=document_service.CreateDocumentRequest ): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4872,10 +5762,10 @@ def test_list_documents_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_documents(request) + client.create_document(request) -def test_list_documents_rest_flattened(): +def test_create_document_rest_flattened(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4884,7 +5774,7 @@ def test_list_documents_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = document_service.ListDocumentsResponse() + return_value = gcd_document.Document() # get arguments that satisfy an http rule for this method sample_request = { @@ -4894,6 +5784,16 @@ def test_list_documents_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", + document=gcd_document.Document( + struct_data=struct_pb2.Struct( + fields={ + "key_value": struct_pb2.Value( + null_value=struct_pb2.NullValue.NULL_VALUE + ) + } + ) + ), + document_id="document_id_value", ) mock_args.update(sample_request) @@ -4901,12 +5801,12 @@ def test_list_documents_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = document_service.ListDocumentsResponse.pb(return_value) + return_value = gcd_document.Document.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_documents(**mock_args) + client.create_document(**mock_args) # Establish that the underlying call was made with the expected # request object values. @@ -4919,7 +5819,7 @@ def test_list_documents_rest_flattened(): ) -def test_list_documents_rest_flattened_error(transport: str = "rest"): +def test_create_document_rest_flattened_error(transport: str = "rest"): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4928,85 +5828,36 @@ def test_list_documents_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_documents( - document_service.ListDocumentsRequest(), + client.create_document( + document_service.CreateDocumentRequest(), parent="parent_value", + document=gcd_document.Document( + struct_data=struct_pb2.Struct( + fields={ + "key_value": struct_pb2.Value( + null_value=struct_pb2.NullValue.NULL_VALUE + ) + } + ) + ), + document_id="document_id_value", ) -def test_list_documents_rest_pager(transport: str = "rest"): +def test_create_document_rest_error(): client = DocumentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - document_service.ListDocumentsResponse( - documents=[ - document.Document(), - document.Document(), - document.Document(), - ], - next_page_token="abc", - ), - document_service.ListDocumentsResponse( - documents=[], - next_page_token="def", - ), - document_service.ListDocumentsResponse( - documents=[ - document.Document(), - ], - next_page_token="ghi", - ), - document_service.ListDocumentsResponse( - documents=[ - document.Document(), - document.Document(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - document_service.ListDocumentsResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = { - "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" - } - - pager = client.list_documents(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, document.Document) for i in results) - - pages = list(client.list_documents(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - document_service.CreateDocumentRequest, + document_service.UpdateDocumentRequest, dict, ], ) -def test_create_document_rest(request_type): +def test_update_document_rest(request_type): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5014,12 +5865,14 @@ def test_create_document_rest(request_type): # send a request that will satisfy transcoding request_init = { - "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + "document": { + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + } } request_init["document"] = { "struct_data": {"fields": {}}, "json_data": "json_data_value", - "name": "name_value", + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5", "id": "id_value", "schema_id": "schema_id_value", "content": { @@ -5040,13 +5893,28 @@ def test_create_document_rest(request_type): ] }, "index_time": {"seconds": 751, "nanos": 543}, + "index_status": { + "index_time": {}, + "error_samples": [ + { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + } + ], + }, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = document_service.CreateDocumentRequest.meta.fields["document"] + test_field = document_service.UpdateDocumentRequest.meta.fields["document"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -5130,7 +5998,7 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_document(request) + response = client.update_document(request) # Establish that the response is the type that we expect. assert isinstance(response, gcd_document.Document) @@ -5140,7 +6008,7 @@ def get_message_fields(field): assert response.parent_document_id == "parent_document_id_value" -def test_create_document_rest_use_cached_wrapped_rpc(): +def test_update_document_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5154,36 +6022,34 @@ def test_create_document_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_document in client._transport._wrapped_methods + assert client._transport.update_document in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_document] = mock_rpc + client._transport._wrapped_methods[client._transport.update_document] = mock_rpc request = {} - client.create_document(request) + client.update_document(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_document(request) + client.update_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_document_rest_required_fields( - request_type=document_service.CreateDocumentRequest, +def test_update_document_rest_required_fields( + request_type=document_service.UpdateDocumentRequest, ): transport_class = transports.DocumentServiceRestTransport request_init = {} - request_init["parent"] = "" - request_init["document_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -5191,32 +6057,27 @@ def test_create_document_rest_required_fields( ) # verify fields with default values are dropped - assert "documentId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_document._get_unset_required_fields(jsonified_request) + ).update_document._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "documentId" in jsonified_request - assert jsonified_request["documentId"] == request_init["document_id"] - - jsonified_request["parent"] = "parent_value" - jsonified_request["documentId"] = "document_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_document._get_unset_required_fields(jsonified_request) + ).update_document._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("document_id",)) + assert not set(unset_fields) - set( + ( + "allow_missing", + "update_mask", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "documentId" in jsonified_request - assert jsonified_request["documentId"] == "document_id_value" client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5237,7 +6098,7 @@ def test_create_document_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "patch", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -5253,39 +6114,32 @@ def test_create_document_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_document(request) + response = client.update_document(request) - expected_params = [ - ( - "documentId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_document_rest_unset_required_fields(): +def test_update_document_rest_unset_required_fields(): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_document._get_unset_required_fields({}) + unset_fields = transport.update_document._get_unset_required_fields({}) assert set(unset_fields) == ( - set(("documentId",)) - & set( + set( ( - "parent", - "document", - "documentId", + "allowMissing", + "updateMask", ) ) + & set(("document",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_document_rest_interceptors(null_interceptor): +def test_update_document_rest_interceptors(null_interceptor): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -5298,14 +6152,14 @@ def test_create_document_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DocumentServiceRestInterceptor, "post_create_document" + transports.DocumentServiceRestInterceptor, "post_update_document" ) as post, mock.patch.object( - transports.DocumentServiceRestInterceptor, "pre_create_document" + transports.DocumentServiceRestInterceptor, "pre_update_document" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = document_service.CreateDocumentRequest.pb( - document_service.CreateDocumentRequest() + pb_message = document_service.UpdateDocumentRequest.pb( + document_service.UpdateDocumentRequest() ) transcode.return_value = { "method": "post", @@ -5321,7 +6175,7 @@ def test_create_document_rest_interceptors(null_interceptor): gcd_document.Document() ) - request = document_service.CreateDocumentRequest() + request = document_service.UpdateDocumentRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -5329,7 +6183,7 @@ def test_create_document_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = gcd_document.Document() - client.create_document( + client.update_document( request, metadata=[ ("key", "val"), @@ -5341,8 +6195,8 @@ def test_create_document_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_document_rest_bad_request( - transport: str = "rest", request_type=document_service.CreateDocumentRequest +def test_update_document_rest_bad_request( + transport: str = "rest", request_type=document_service.UpdateDocumentRequest ): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5351,7 +6205,9 @@ def test_create_document_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + "document": { + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + } } request = request_type(**request_init) @@ -5364,10 +6220,10 @@ def test_create_document_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_document(request) + client.update_document(request) -def test_create_document_rest_flattened(): +def test_update_document_rest_flattened(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5380,12 +6236,13 @@ def test_create_document_rest_flattened(): # get arguments that satisfy an http rule for this method sample_request = { - "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + "document": { + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + } } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", document=gcd_document.Document( struct_data=struct_pb2.Struct( fields={ @@ -5395,7 +6252,7 @@ def test_create_document_rest_flattened(): } ) ), - document_id="document_id_value", + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -5408,20 +6265,20 @@ def test_create_document_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_document(**mock_args) + client.update_document(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=projects/*/locations/*/dataStores/*/branches/*}/documents" + "%s/v1alpha/{document.name=projects/*/locations/*/dataStores/*/branches/*/documents/*}" % client.transport._host, args[1], ) -def test_create_document_rest_flattened_error(transport: str = "rest"): +def test_update_document_rest_flattened_error(transport: str = "rest"): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5430,9 +6287,8 @@ def test_create_document_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_document( - document_service.CreateDocumentRequest(), - parent="parent_value", + client.update_document( + document_service.UpdateDocumentRequest(), document=gcd_document.Document( struct_data=struct_pb2.Struct( fields={ @@ -5442,11 +6298,11 @@ def test_create_document_rest_flattened_error(transport: str = "rest"): } ) ), - document_id="document_id_value", + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_create_document_rest_error(): +def test_update_document_rest_error(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -5455,11 +6311,11 @@ def test_create_document_rest_error(): @pytest.mark.parametrize( "request_type", [ - document_service.UpdateDocumentRequest, + document_service.DeleteDocumentRequest, dict, ], ) -def test_update_document_rest(request_type): +def test_delete_document_rest(request_type): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5467,135 +6323,29 @@ def test_update_document_rest(request_type): # send a request that will satisfy transcoding request_init = { - "document": { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" - } - } - request_init["document"] = { - "struct_data": {"fields": {}}, - "json_data": "json_data_value", - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5", - "id": "id_value", - "schema_id": "schema_id_value", - "content": { - "raw_bytes": b"raw_bytes_blob", - "uri": "uri_value", - "mime_type": "mime_type_value", - }, - "parent_document_id": "parent_document_id_value", - "derived_struct_data": {}, - "acl_info": { - "readers": [ - { - "principals": [ - {"user_id": "user_id_value", "group_id": "group_id_value"} - ], - "idp_wide": True, - } - ] - }, - "index_time": {"seconds": 751, "nanos": 543}, + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = document_service.UpdateDocumentRequest.meta.fields["document"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["document"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["document"][field])): - del request_init["document"][field][i][subfield] - else: - del request_init["document"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = gcd_document.Document( - name="name_value", - id="id_value", - schema_id="schema_id_value", - parent_document_id="parent_document_id_value", - json_data="json_data_value", - ) + return_value = None # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gcd_document.Document.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_document(request) + response = client.delete_document(request) # Establish that the response is the type that we expect. - assert isinstance(response, gcd_document.Document) - assert response.name == "name_value" - assert response.id == "id_value" - assert response.schema_id == "schema_id_value" - assert response.parent_document_id == "parent_document_id_value" + assert response is None -def test_update_document_rest_use_cached_wrapped_rpc(): +def test_delete_document_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5609,34 +6359,35 @@ def test_update_document_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_document in client._transport._wrapped_methods + assert client._transport.delete_document in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.update_document] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_document] = mock_rpc request = {} - client.update_document(request) + client.delete_document(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_document(request) + client.delete_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_document_rest_required_fields( - request_type=document_service.UpdateDocumentRequest, +def test_delete_document_rest_required_fields( + request_type=document_service.DeleteDocumentRequest, ): transport_class = transports.DocumentServiceRestTransport request_init = {} + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -5647,24 +6398,21 @@ def test_update_document_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_document._get_unset_required_fields(jsonified_request) + ).delete_document._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_document._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "allow_missing", - "update_mask", - ) - ) + ).delete_document._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5673,7 +6421,7 @@ def test_update_document_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = gcd_document.Document() + return_value = None # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -5685,48 +6433,36 @@ def test_update_document_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = gcd_document.Document.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_document(request) + response = client.delete_document(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_document_rest_unset_required_fields(): +def test_delete_document_rest_unset_required_fields(): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_document._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "allowMissing", - "updateMask", - ) - ) - & set(("document",)) - ) + unset_fields = transport.delete_document._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_document_rest_interceptors(null_interceptor): +def test_delete_document_rest_interceptors(null_interceptor): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -5739,14 +6475,11 @@ def test_update_document_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DocumentServiceRestInterceptor, "post_update_document" - ) as post, mock.patch.object( - transports.DocumentServiceRestInterceptor, "pre_update_document" + transports.DocumentServiceRestInterceptor, "pre_delete_document" ) as pre: pre.assert_not_called() - post.assert_not_called() - pb_message = document_service.UpdateDocumentRequest.pb( - document_service.UpdateDocumentRequest() + pb_message = document_service.DeleteDocumentRequest.pb( + document_service.DeleteDocumentRequest() ) transcode.return_value = { "method": "post", @@ -5758,19 +6491,15 @@ def test_update_document_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = gcd_document.Document.to_json( - gcd_document.Document() - ) - request = document_service.UpdateDocumentRequest() + request = document_service.DeleteDocumentRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = gcd_document.Document() - client.update_document( + client.delete_document( request, metadata=[ ("key", "val"), @@ -5779,11 +6508,10 @@ def test_update_document_rest_interceptors(null_interceptor): ) pre.assert_called_once() - post.assert_called_once() -def test_update_document_rest_bad_request( - transport: str = "rest", request_type=document_service.UpdateDocumentRequest +def test_delete_document_rest_bad_request( + transport: str = "rest", request_type=document_service.DeleteDocumentRequest ): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5792,9 +6520,7 @@ def test_update_document_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "document": { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" - } + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" } request = request_type(**request_init) @@ -5807,10 +6533,10 @@ def test_update_document_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_document(request) + client.delete_document(request) -def test_update_document_rest_flattened(): +def test_delete_document_rest_flattened(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5819,53 +6545,40 @@ def test_update_document_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = gcd_document.Document() + return_value = None # get arguments that satisfy an http rule for this method sample_request = { - "document": { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" - } + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" } - # get truthy value for each flattened field - mock_args = dict( - document=gcd_document.Document( - struct_data=struct_pb2.Struct( - fields={ - "key_value": struct_pb2.Value( - null_value=struct_pb2.NullValue.NULL_VALUE - ) - } - ) - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + # get truthy value for each flattened field + mock_args = dict( + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gcd_document.Document.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_document(**mock_args) + client.delete_document(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{document.name=projects/*/locations/*/dataStores/*/branches/*/documents/*}" + "%s/v1alpha/{name=projects/*/locations/*/dataStores/*/branches/*/documents/*}" % client.transport._host, args[1], ) -def test_update_document_rest_flattened_error(transport: str = "rest"): +def test_delete_document_rest_flattened_error(transport: str = "rest"): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5874,22 +6587,13 @@ def test_update_document_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_document( - document_service.UpdateDocumentRequest(), - document=gcd_document.Document( - struct_data=struct_pb2.Struct( - fields={ - "key_value": struct_pb2.Value( - null_value=struct_pb2.NullValue.NULL_VALUE - ) - } - ) - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.delete_document( + document_service.DeleteDocumentRequest(), + name="name_value", ) -def test_update_document_rest_error(): +def test_delete_document_rest_error(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -5898,11 +6602,11 @@ def test_update_document_rest_error(): @pytest.mark.parametrize( "request_type", [ - document_service.DeleteDocumentRequest, + import_config.ImportDocumentsRequest, dict, ], ) -def test_delete_document_rest(request_type): +def test_import_documents_rest(request_type): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5910,29 +6614,29 @@ def test_delete_document_rest(request_type): # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_document(request) + response = client.import_documents(request) # Establish that the response is the type that we expect. - assert response is None + assert response.operation.name == "operations/spam" -def test_delete_document_rest_use_cached_wrapped_rpc(): +def test_import_documents_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5946,35 +6650,41 @@ def test_delete_document_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_document in client._transport._wrapped_methods + assert client._transport.import_documents in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.delete_document] = mock_rpc + client._transport._wrapped_methods[ + client._transport.import_documents + ] = mock_rpc request = {} - client.delete_document(request) + client.import_documents(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_document(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.import_documents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_document_rest_required_fields( - request_type=document_service.DeleteDocumentRequest, +def test_import_documents_rest_required_fields( + request_type=import_config.ImportDocumentsRequest, ): transport_class = transports.DocumentServiceRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -5985,21 +6695,21 @@ def test_delete_document_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_document._get_unset_required_fields(jsonified_request) + ).import_documents._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_document._get_unset_required_fields(jsonified_request) + ).import_documents._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6008,7 +6718,7 @@ def test_delete_document_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = None + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -6020,36 +6730,37 @@ def test_delete_document_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = "" + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_document(request) + response = client.import_documents(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_document_rest_unset_required_fields(): +def test_import_documents_rest_unset_required_fields(): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_document._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.import_documents._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_document_rest_interceptors(null_interceptor): +def test_import_documents_rest_interceptors(null_interceptor): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -6062,11 +6773,16 @@ def test_delete_document_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DocumentServiceRestInterceptor, "pre_delete_document" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DocumentServiceRestInterceptor, "post_import_documents" + ) as post, mock.patch.object( + transports.DocumentServiceRestInterceptor, "pre_import_documents" ) as pre: pre.assert_not_called() - pb_message = document_service.DeleteDocumentRequest.pb( - document_service.DeleteDocumentRequest() + post.assert_not_called() + pb_message = import_config.ImportDocumentsRequest.pb( + import_config.ImportDocumentsRequest() ) transcode.return_value = { "method": "post", @@ -6078,15 +6794,19 @@ def test_delete_document_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) - request = document_service.DeleteDocumentRequest() + request = import_config.ImportDocumentsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() - client.delete_document( + client.import_documents( request, metadata=[ ("key", "val"), @@ -6095,10 +6815,11 @@ def test_delete_document_rest_interceptors(null_interceptor): ) pre.assert_called_once() + post.assert_called_once() -def test_delete_document_rest_bad_request( - transport: str = "rest", request_type=document_service.DeleteDocumentRequest +def test_import_documents_rest_bad_request( + transport: str = "rest", request_type=import_config.ImportDocumentsRequest ): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6107,7 +6828,7 @@ def test_delete_document_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" } request = request_type(**request_init) @@ -6120,67 +6841,10 @@ def test_delete_document_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_document(request) - - -def test_delete_document_rest_flattened(): - client = DocumentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.delete_document(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1alpha/{name=projects/*/locations/*/dataStores/*/branches/*/documents/*}" - % client.transport._host, - args[1], - ) - - -def test_delete_document_rest_flattened_error(transport: str = "rest"): - client = DocumentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_document( - document_service.DeleteDocumentRequest(), - name="name_value", - ) + client.import_documents(request) -def test_delete_document_rest_error(): +def test_import_documents_rest_error(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -6189,11 +6853,11 @@ def test_delete_document_rest_error(): @pytest.mark.parametrize( "request_type", [ - import_config.ImportDocumentsRequest, + purge_config.PurgeDocumentsRequest, dict, ], ) -def test_import_documents_rest(request_type): +def test_purge_documents_rest(request_type): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -6217,13 +6881,13 @@ def test_import_documents_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.import_documents(request) + response = client.purge_documents(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_import_documents_rest_use_cached_wrapped_rpc(): +def test_purge_documents_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -6237,19 +6901,17 @@ def test_import_documents_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.import_documents in client._transport._wrapped_methods + assert client._transport.purge_documents in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.import_documents - ] = mock_rpc + client._transport._wrapped_methods[client._transport.purge_documents] = mock_rpc request = {} - client.import_documents(request) + client.purge_documents(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -6258,20 +6920,21 @@ def test_import_documents_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.import_documents(request) + client.purge_documents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_import_documents_rest_required_fields( - request_type=import_config.ImportDocumentsRequest, +def test_purge_documents_rest_required_fields( + request_type=purge_config.PurgeDocumentsRequest, ): transport_class = transports.DocumentServiceRestTransport request_init = {} request_init["parent"] = "" + request_init["filter"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -6282,21 +6945,24 @@ def test_import_documents_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).import_documents._get_unset_required_fields(jsonified_request) + ).purge_documents._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["parent"] = "parent_value" + jsonified_request["filter"] = "filter_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).import_documents._get_unset_required_fields(jsonified_request) + ).purge_documents._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" + assert "filter" in jsonified_request + assert jsonified_request["filter"] == "filter_value" client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6330,24 +6996,32 @@ def test_import_documents_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.import_documents(request) + response = client.purge_documents(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_import_documents_rest_unset_required_fields(): +def test_purge_documents_rest_unset_required_fields(): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.import_documents._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent",))) + unset_fields = transport.purge_documents._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "filter", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_import_documents_rest_interceptors(null_interceptor): +def test_purge_documents_rest_interceptors(null_interceptor): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -6362,14 +7036,14 @@ def test_import_documents_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.DocumentServiceRestInterceptor, "post_import_documents" + transports.DocumentServiceRestInterceptor, "post_purge_documents" ) as post, mock.patch.object( - transports.DocumentServiceRestInterceptor, "pre_import_documents" + transports.DocumentServiceRestInterceptor, "pre_purge_documents" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = import_config.ImportDocumentsRequest.pb( - import_config.ImportDocumentsRequest() + pb_message = purge_config.PurgeDocumentsRequest.pb( + purge_config.PurgeDocumentsRequest() ) transcode.return_value = { "method": "post", @@ -6385,7 +7059,7 @@ def test_import_documents_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = import_config.ImportDocumentsRequest() + request = purge_config.PurgeDocumentsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -6393,7 +7067,7 @@ def test_import_documents_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.import_documents( + client.purge_documents( request, metadata=[ ("key", "val"), @@ -6405,8 +7079,8 @@ def test_import_documents_rest_interceptors(null_interceptor): post.assert_called_once() -def test_import_documents_rest_bad_request( - transport: str = "rest", request_type=import_config.ImportDocumentsRequest +def test_purge_documents_rest_bad_request( + transport: str = "rest", request_type=purge_config.PurgeDocumentsRequest ): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6428,10 +7102,10 @@ def test_import_documents_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.import_documents(request) + client.purge_documents(request) -def test_import_documents_rest_error(): +def test_purge_documents_rest_error(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -6440,11 +7114,11 @@ def test_import_documents_rest_error(): @pytest.mark.parametrize( "request_type", [ - purge_config.PurgeDocumentsRequest, + document_service.GetProcessedDocumentRequest, dict, ], ) -def test_purge_documents_rest(request_type): +def test_get_processed_document_rest(request_type): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -6452,29 +7126,35 @@ def test_purge_documents_rest(request_type): # send a request that will satisfy transcoding request_init = { - "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = document.ProcessedDocument( + document="document_value", + json_data="json_data_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = document.ProcessedDocument.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.purge_documents(request) + response = client.get_processed_document(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, document.ProcessedDocument) + assert response.document == "document_value" -def test_purge_documents_rest_use_cached_wrapped_rpc(): +def test_get_processed_document_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -6488,40 +7168,40 @@ def test_purge_documents_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.purge_documents in client._transport._wrapped_methods + assert ( + client._transport.get_processed_document + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.purge_documents] = mock_rpc + client._transport._wrapped_methods[ + client._transport.get_processed_document + ] = mock_rpc request = {} - client.purge_documents(request) + client.get_processed_document(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.purge_documents(request) + client.get_processed_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_purge_documents_rest_required_fields( - request_type=purge_config.PurgeDocumentsRequest, +def test_get_processed_document_rest_required_fields( + request_type=document_service.GetProcessedDocumentRequest, ): transport_class = transports.DocumentServiceRestTransport request_init = {} - request_init["parent"] = "" - request_init["filter"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -6532,24 +7212,28 @@ def test_purge_documents_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).purge_documents._get_unset_required_fields(jsonified_request) + ).get_processed_document._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" - jsonified_request["filter"] = "filter_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).purge_documents._get_unset_required_fields(jsonified_request) + ).get_processed_document._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "processed_document_format", + "processed_document_type", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "filter" in jsonified_request - assert jsonified_request["filter"] == "filter_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6558,7 +7242,7 @@ def test_purge_documents_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = document.ProcessedDocument() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -6570,45 +7254,52 @@ def test_purge_documents_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = document.ProcessedDocument.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.purge_documents(request) + response = client.get_processed_document(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_purge_documents_rest_unset_required_fields(): +def test_get_processed_document_rest_unset_required_fields(): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.purge_documents._get_unset_required_fields({}) + unset_fields = transport.get_processed_document._get_unset_required_fields({}) assert set(unset_fields) == ( - set(()) + set( + ( + "processedDocumentFormat", + "processedDocumentType", + ) + ) & set( ( - "parent", - "filter", + "name", + "processedDocumentType", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_purge_documents_rest_interceptors(null_interceptor): +def test_get_processed_document_rest_interceptors(null_interceptor): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -6621,16 +7312,14 @@ def test_purge_documents_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.DocumentServiceRestInterceptor, "post_purge_documents" + transports.DocumentServiceRestInterceptor, "post_get_processed_document" ) as post, mock.patch.object( - transports.DocumentServiceRestInterceptor, "pre_purge_documents" + transports.DocumentServiceRestInterceptor, "pre_get_processed_document" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = purge_config.PurgeDocumentsRequest.pb( - purge_config.PurgeDocumentsRequest() + pb_message = document_service.GetProcessedDocumentRequest.pb( + document_service.GetProcessedDocumentRequest() ) transcode.return_value = { "method": "post", @@ -6642,19 +7331,19 @@ def test_purge_documents_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = document.ProcessedDocument.to_json( + document.ProcessedDocument() ) - request = purge_config.PurgeDocumentsRequest() + request = document_service.GetProcessedDocumentRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = document.ProcessedDocument() - client.purge_documents( + client.get_processed_document( request, metadata=[ ("key", "val"), @@ -6666,8 +7355,8 @@ def test_purge_documents_rest_interceptors(null_interceptor): post.assert_called_once() -def test_purge_documents_rest_bad_request( - transport: str = "rest", request_type=purge_config.PurgeDocumentsRequest +def test_get_processed_document_rest_bad_request( + transport: str = "rest", request_type=document_service.GetProcessedDocumentRequest ): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6676,7 +7365,7 @@ def test_purge_documents_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" } request = request_type(**request_init) @@ -6689,10 +7378,69 @@ def test_purge_documents_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.purge_documents(request) + client.get_processed_document(request) -def test_purge_documents_rest_error(): +def test_get_processed_document_rest_flattened(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document.ProcessedDocument() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = document.ProcessedDocument.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_processed_document(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=projects/*/locations/*/dataStores/*/branches/*/documents/*}:getProcessedDocument" + % client.transport._host, + args[1], + ) + + +def test_get_processed_document_rest_flattened_error(transport: str = "rest"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_processed_document( + document_service.GetProcessedDocumentRequest(), + name="name_value", + ) + + +def test_get_processed_document_rest_error(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -6701,11 +7449,11 @@ def test_purge_documents_rest_error(): @pytest.mark.parametrize( "request_type", [ - document_service.GetProcessedDocumentRequest, + document_service.BatchGetDocumentsMetadataRequest, dict, ], ) -def test_get_processed_document_rest(request_type): +def test_batch_get_documents_metadata_rest(request_type): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -6713,35 +7461,33 @@ def test_get_processed_document_rest(request_type): # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = document.ProcessedDocument( - document="document_value", - json_data="json_data_value", - ) + return_value = document_service.BatchGetDocumentsMetadataResponse() # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = document.ProcessedDocument.pb(return_value) + return_value = document_service.BatchGetDocumentsMetadataResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_processed_document(request) + response = client.batch_get_documents_metadata(request) # Establish that the response is the type that we expect. - assert isinstance(response, document.ProcessedDocument) - assert response.document == "document_value" + assert isinstance(response, document_service.BatchGetDocumentsMetadataResponse) -def test_get_processed_document_rest_use_cached_wrapped_rpc(): +def test_batch_get_documents_metadata_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -6756,7 +7502,7 @@ def test_get_processed_document_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_processed_document + client._transport.batch_get_documents_metadata in client._transport._wrapped_methods ) @@ -6766,29 +7512,29 @@ def test_get_processed_document_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_processed_document + client._transport.batch_get_documents_metadata ] = mock_rpc request = {} - client.get_processed_document(request) + client.batch_get_documents_metadata(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_processed_document(request) + client.batch_get_documents_metadata(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_processed_document_rest_required_fields( - request_type=document_service.GetProcessedDocumentRequest, +def test_batch_get_documents_metadata_rest_required_fields( + request_type=document_service.BatchGetDocumentsMetadataRequest, ): transport_class = transports.DocumentServiceRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -6799,28 +7545,23 @@ def test_get_processed_document_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_processed_document._get_unset_required_fields(jsonified_request) + ).batch_get_documents_metadata._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_processed_document._get_unset_required_fields(jsonified_request) + ).batch_get_documents_metadata._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "processed_document_format", - "processed_document_type", - ) - ) + assert not set(unset_fields) - set(("matcher",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6829,7 +7570,7 @@ def test_get_processed_document_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = document.ProcessedDocument() + return_value = document_service.BatchGetDocumentsMetadataResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -6850,43 +7591,40 @@ def test_get_processed_document_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = document.ProcessedDocument.pb(return_value) + return_value = document_service.BatchGetDocumentsMetadataResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_processed_document(request) + response = client.batch_get_documents_metadata(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_processed_document_rest_unset_required_fields(): +def test_batch_get_documents_metadata_rest_unset_required_fields(): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_processed_document._get_unset_required_fields({}) + unset_fields = transport.batch_get_documents_metadata._get_unset_required_fields({}) assert set(unset_fields) == ( - set( - ( - "processedDocumentFormat", - "processedDocumentType", - ) - ) + set(("matcher",)) & set( ( - "name", - "processedDocumentType", + "parent", + "matcher", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_processed_document_rest_interceptors(null_interceptor): +def test_batch_get_documents_metadata_rest_interceptors(null_interceptor): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -6899,14 +7637,14 @@ def test_get_processed_document_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DocumentServiceRestInterceptor, "post_get_processed_document" + transports.DocumentServiceRestInterceptor, "post_batch_get_documents_metadata" ) as post, mock.patch.object( - transports.DocumentServiceRestInterceptor, "pre_get_processed_document" + transports.DocumentServiceRestInterceptor, "pre_batch_get_documents_metadata" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = document_service.GetProcessedDocumentRequest.pb( - document_service.GetProcessedDocumentRequest() + pb_message = document_service.BatchGetDocumentsMetadataRequest.pb( + document_service.BatchGetDocumentsMetadataRequest() ) transcode.return_value = { "method": "post", @@ -6918,19 +7656,21 @@ def test_get_processed_document_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = document.ProcessedDocument.to_json( - document.ProcessedDocument() + req.return_value._content = ( + document_service.BatchGetDocumentsMetadataResponse.to_json( + document_service.BatchGetDocumentsMetadataResponse() + ) ) - request = document_service.GetProcessedDocumentRequest() + request = document_service.BatchGetDocumentsMetadataRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = document.ProcessedDocument() + post.return_value = document_service.BatchGetDocumentsMetadataResponse() - client.get_processed_document( + client.batch_get_documents_metadata( request, metadata=[ ("key", "val"), @@ -6942,8 +7682,9 @@ def test_get_processed_document_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_processed_document_rest_bad_request( - transport: str = "rest", request_type=document_service.GetProcessedDocumentRequest +def test_batch_get_documents_metadata_rest_bad_request( + transport: str = "rest", + request_type=document_service.BatchGetDocumentsMetadataRequest, ): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6952,7 +7693,7 @@ def test_get_processed_document_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" } request = request_type(**request_init) @@ -6965,10 +7706,10 @@ def test_get_processed_document_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_processed_document(request) + client.batch_get_documents_metadata(request) -def test_get_processed_document_rest_flattened(): +def test_batch_get_documents_metadata_rest_flattened(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -6977,16 +7718,16 @@ def test_get_processed_document_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = document.ProcessedDocument() + return_value = document_service.BatchGetDocumentsMetadataResponse() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" } # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) @@ -6994,25 +7735,27 @@ def test_get_processed_document_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = document.ProcessedDocument.pb(return_value) + return_value = document_service.BatchGetDocumentsMetadataResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_processed_document(**mock_args) + client.batch_get_documents_metadata(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=projects/*/locations/*/dataStores/*/branches/*/documents/*}:getProcessedDocument" + "%s/v1alpha/{parent=projects/*/locations/*/dataStores/*/branches/*}/batchGetDocumentsMetadata" % client.transport._host, args[1], ) -def test_get_processed_document_rest_flattened_error(transport: str = "rest"): +def test_batch_get_documents_metadata_rest_flattened_error(transport: str = "rest"): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -7021,13 +7764,13 @@ def test_get_processed_document_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_processed_document( - document_service.GetProcessedDocumentRequest(), - name="name_value", + client.batch_get_documents_metadata( + document_service.BatchGetDocumentsMetadataRequest(), + parent="parent_value", ) -def test_get_processed_document_rest_error(): +def test_batch_get_documents_metadata_rest_error(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -7180,6 +7923,7 @@ def test_document_service_base_transport(): "import_documents", "purge_documents", "get_processed_document", + "batch_get_documents_metadata", "get_operation", "cancel_operation", "list_operations", @@ -7487,6 +8231,9 @@ def test_document_service_client_transport_session_collision(transport_name): session1 = client1.transport.get_processed_document._session session2 = client2.transport.get_processed_document._session assert session1 != session2 + session1 = client1.transport.batch_get_documents_metadata._session + session2 = client2.transport.batch_get_documents_metadata._session + assert session1 != session2 def test_document_service_grpc_transport_channel(): diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_user_event_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_user_event_service.py index b23c90ee838d..45144303f6e4 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_user_event_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_user_event_service.py @@ -2455,6 +2455,7 @@ def test_write_user_event_rest(request_type): "uri": "uri_value", "quantity": 895, "promotion_ids": ["promotion_ids_value1", "promotion_ids_value2"], + "joined": True, } ], "panel": { diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_document_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_document_service.py index 6ffa744aae25..9345701436e5 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_document_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_document_service.py @@ -45,10 +45,12 @@ from google.cloud.location import locations_pb2 from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account +from google.protobuf import any_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import json_format from google.protobuf import struct_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore from google.type import date_pb2 # type: ignore import grpc from grpc.experimental import aio @@ -3911,6 +3913,387 @@ async def test_purge_documents_field_headers_async(): ) in kw["metadata"] +@pytest.mark.parametrize( + "request_type", + [ + document_service.BatchGetDocumentsMetadataRequest, + dict, + ], +) +def test_batch_get_documents_metadata(request_type, transport: str = "grpc"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = document_service.BatchGetDocumentsMetadataResponse() + response = client.batch_get_documents_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = document_service.BatchGetDocumentsMetadataRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, document_service.BatchGetDocumentsMetadataResponse) + + +def test_batch_get_documents_metadata_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.batch_get_documents_metadata() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == document_service.BatchGetDocumentsMetadataRequest() + + +def test_batch_get_documents_metadata_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = document_service.BatchGetDocumentsMetadataRequest( + parent="parent_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.batch_get_documents_metadata(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == document_service.BatchGetDocumentsMetadataRequest( + parent="parent_value", + ) + + +def test_batch_get_documents_metadata_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.batch_get_documents_metadata + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.batch_get_documents_metadata + ] = mock_rpc + request = {} + client.batch_get_documents_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.batch_get_documents_metadata(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_batch_get_documents_metadata_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + document_service.BatchGetDocumentsMetadataResponse() + ) + response = await client.batch_get_documents_metadata() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == document_service.BatchGetDocumentsMetadataRequest() + + +@pytest.mark.asyncio +async def test_batch_get_documents_metadata_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.batch_get_documents_metadata + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.batch_get_documents_metadata + ] = mock_rpc + + request = {} + await client.batch_get_documents_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.batch_get_documents_metadata(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_batch_get_documents_metadata_async( + transport: str = "grpc_asyncio", + request_type=document_service.BatchGetDocumentsMetadataRequest, +): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + document_service.BatchGetDocumentsMetadataResponse() + ) + response = await client.batch_get_documents_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = document_service.BatchGetDocumentsMetadataRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, document_service.BatchGetDocumentsMetadataResponse) + + +@pytest.mark.asyncio +async def test_batch_get_documents_metadata_async_from_dict(): + await test_batch_get_documents_metadata_async(request_type=dict) + + +def test_batch_get_documents_metadata_field_headers(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = document_service.BatchGetDocumentsMetadataRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + call.return_value = document_service.BatchGetDocumentsMetadataResponse() + client.batch_get_documents_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_batch_get_documents_metadata_field_headers_async(): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = document_service.BatchGetDocumentsMetadataRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + document_service.BatchGetDocumentsMetadataResponse() + ) + await client.batch_get_documents_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_batch_get_documents_metadata_flattened(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = document_service.BatchGetDocumentsMetadataResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.batch_get_documents_metadata( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_batch_get_documents_metadata_flattened_error(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.batch_get_documents_metadata( + document_service.BatchGetDocumentsMetadataRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_batch_get_documents_metadata_flattened_async(): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = document_service.BatchGetDocumentsMetadataResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + document_service.BatchGetDocumentsMetadataResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.batch_get_documents_metadata( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_batch_get_documents_metadata_flattened_error_async(): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.batch_get_documents_metadata( + document_service.BatchGetDocumentsMetadataRequest(), + parent="parent_value", + ) + + @pytest.mark.parametrize( "request_type", [ @@ -3918,7 +4301,321 @@ async def test_purge_documents_field_headers_async(): dict, ], ) -def test_get_document_rest(request_type): +def test_get_document_rest(request_type): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document.Document( + name="name_value", + id="id_value", + schema_id="schema_id_value", + parent_document_id="parent_document_id_value", + json_data="json_data_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = document.Document.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_document(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, document.Document) + assert response.name == "name_value" + assert response.id == "id_value" + assert response.schema_id == "schema_id_value" + assert response.parent_document_id == "parent_document_id_value" + + +def test_get_document_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_document in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_document] = mock_rpc + + request = {} + client.get_document(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_document(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_document_rest_required_fields( + request_type=document_service.GetDocumentRequest, +): + transport_class = transports.DocumentServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_document._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_document._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = document.Document() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = document.Document.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_document(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_document_rest_unset_required_fields(): + transport = transports.DocumentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_document._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_document_rest_interceptors(null_interceptor): + transport = transports.DocumentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentServiceRestInterceptor(), + ) + client = DocumentServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DocumentServiceRestInterceptor, "post_get_document" + ) as post, mock.patch.object( + transports.DocumentServiceRestInterceptor, "pre_get_document" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = document_service.GetDocumentRequest.pb( + document_service.GetDocumentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = document.Document.to_json(document.Document()) + + request = document_service.GetDocumentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = document.Document() + + client.get_document( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_document_rest_bad_request( + transport: str = "rest", request_type=document_service.GetDocumentRequest +): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_document(request) + + +def test_get_document_rest_flattened(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document.Document() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = document.Document.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_document(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*/documents/*}" + % client.transport._host, + args[1], + ) + + +def test_get_document_rest_flattened_error(transport: str = "rest"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_document( + document_service.GetDocumentRequest(), + name="name_value", + ) + + +def test_get_document_rest_error(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + document_service.ListDocumentsRequest, + dict, + ], +) +def test_list_documents_rest(request_type): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -3926,41 +4623,34 @@ def test_get_document_rest(request_type): # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = document.Document( - name="name_value", - id="id_value", - schema_id="schema_id_value", - parent_document_id="parent_document_id_value", - json_data="json_data_value", + return_value = document_service.ListDocumentsResponse( + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = document.Document.pb(return_value) + return_value = document_service.ListDocumentsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_document(request) + response = client.list_documents(request) # Establish that the response is the type that we expect. - assert isinstance(response, document.Document) - assert response.name == "name_value" - assert response.id == "id_value" - assert response.schema_id == "schema_id_value" - assert response.parent_document_id == "parent_document_id_value" + assert isinstance(response, pagers.ListDocumentsPager) + assert response.next_page_token == "next_page_token_value" -def test_get_document_rest_use_cached_wrapped_rpc(): +def test_list_documents_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -3974,35 +4664,35 @@ def test_get_document_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_document in client._transport._wrapped_methods + assert client._transport.list_documents in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_document] = mock_rpc + client._transport._wrapped_methods[client._transport.list_documents] = mock_rpc request = {} - client.get_document(request) + client.list_documents(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_document(request) + client.list_documents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_document_rest_required_fields( - request_type=document_service.GetDocumentRequest, +def test_list_documents_rest_required_fields( + request_type=document_service.ListDocumentsRequest, ): transport_class = transports.DocumentServiceRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -4013,21 +4703,28 @@ def test_get_document_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_document._get_unset_required_fields(jsonified_request) + ).list_documents._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_document._get_unset_required_fields(jsonified_request) + ).list_documents._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4036,7 +4733,7 @@ def test_get_document_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = document.Document() + return_value = document_service.ListDocumentsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -4057,30 +4754,38 @@ def test_get_document_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = document.Document.pb(return_value) + return_value = document_service.ListDocumentsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_document(request) + response = client.list_documents(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_document_rest_unset_required_fields(): +def test_list_documents_rest_unset_required_fields(): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_document._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.list_documents._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_document_rest_interceptors(null_interceptor): +def test_list_documents_rest_interceptors(null_interceptor): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -4093,14 +4798,14 @@ def test_get_document_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DocumentServiceRestInterceptor, "post_get_document" + transports.DocumentServiceRestInterceptor, "post_list_documents" ) as post, mock.patch.object( - transports.DocumentServiceRestInterceptor, "pre_get_document" + transports.DocumentServiceRestInterceptor, "pre_list_documents" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = document_service.GetDocumentRequest.pb( - document_service.GetDocumentRequest() + pb_message = document_service.ListDocumentsRequest.pb( + document_service.ListDocumentsRequest() ) transcode.return_value = { "method": "post", @@ -4112,17 +4817,19 @@ def test_get_document_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = document.Document.to_json(document.Document()) + req.return_value._content = document_service.ListDocumentsResponse.to_json( + document_service.ListDocumentsResponse() + ) - request = document_service.GetDocumentRequest() + request = document_service.ListDocumentsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = document.Document() + post.return_value = document_service.ListDocumentsResponse() - client.get_document( + client.list_documents( request, metadata=[ ("key", "val"), @@ -4134,8 +4841,8 @@ def test_get_document_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_document_rest_bad_request( - transport: str = "rest", request_type=document_service.GetDocumentRequest +def test_list_documents_rest_bad_request( + transport: str = "rest", request_type=document_service.ListDocumentsRequest ): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4144,7 +4851,7 @@ def test_get_document_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" } request = request_type(**request_init) @@ -4157,10 +4864,10 @@ def test_get_document_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_document(request) + client.list_documents(request) -def test_get_document_rest_flattened(): +def test_list_documents_rest_flattened(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4169,16 +4876,16 @@ def test_get_document_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = document.Document() + return_value = document_service.ListDocumentsResponse() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" } # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) @@ -4186,25 +4893,25 @@ def test_get_document_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = document.Document.pb(return_value) + return_value = document_service.ListDocumentsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_document(**mock_args) + client.list_documents(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*/documents/*}" + "%s/v1beta/{parent=projects/*/locations/*/dataStores/*/branches/*}/documents" % client.transport._host, args[1], ) -def test_get_document_rest_flattened_error(transport: str = "rest"): +def test_list_documents_rest_flattened_error(transport: str = "rest"): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4213,61 +4920,224 @@ def test_get_document_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_document( - document_service.GetDocumentRequest(), - name="name_value", + client.list_documents( + document_service.ListDocumentsRequest(), + parent="parent_value", + ) + + +def test_list_documents_rest_pager(transport: str = "rest"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + document_service.ListDocumentsResponse( + documents=[ + document.Document(), + document.Document(), + document.Document(), + ], + next_page_token="abc", + ), + document_service.ListDocumentsResponse( + documents=[], + next_page_token="def", + ), + document_service.ListDocumentsResponse( + documents=[ + document.Document(), + ], + next_page_token="ghi", + ), + document_service.ListDocumentsResponse( + documents=[ + document.Document(), + document.Document(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + document_service.ListDocumentsResponse.to_json(x) for x in response ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + } + + pager = client.list_documents(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, document.Document) for i in results) + + pages = list(client.list_documents(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + document_service.CreateDocumentRequest, + dict, + ], +) +def test_create_document_rest(request_type): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + } + request_init["document"] = { + "struct_data": {"fields": {}}, + "json_data": "json_data_value", + "name": "name_value", + "id": "id_value", + "schema_id": "schema_id_value", + "content": { + "raw_bytes": b"raw_bytes_blob", + "uri": "uri_value", + "mime_type": "mime_type_value", + }, + "parent_document_id": "parent_document_id_value", + "derived_struct_data": {}, + "index_time": {"seconds": 751, "nanos": 543}, + "index_status": { + "index_time": {}, + "error_samples": [ + { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + } + ], + }, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = document_service.CreateDocumentRequest.meta.fields["document"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] -def test_get_document_rest_error(): - client = DocumentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) + subfields_not_in_runtime = [] + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["document"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value -@pytest.mark.parametrize( - "request_type", - [ - document_service.ListDocumentsRequest, - dict, - ], -) -def test_list_documents_rest(request_type): - client = DocumentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) - # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" - } + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["document"][field])): + del request_init["document"][field][i][subfield] + else: + del request_init["document"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = document_service.ListDocumentsResponse( - next_page_token="next_page_token_value", + return_value = gcd_document.Document( + name="name_value", + id="id_value", + schema_id="schema_id_value", + parent_document_id="parent_document_id_value", + json_data="json_data_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = document_service.ListDocumentsResponse.pb(return_value) + return_value = gcd_document.Document.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_documents(request) + response = client.create_document(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDocumentsPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, gcd_document.Document) + assert response.name == "name_value" + assert response.id == "id_value" + assert response.schema_id == "schema_id_value" + assert response.parent_document_id == "parent_document_id_value" -def test_list_documents_rest_use_cached_wrapped_rpc(): +def test_create_document_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -4281,35 +5151,36 @@ def test_list_documents_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_documents in client._transport._wrapped_methods + assert client._transport.create_document in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_documents] = mock_rpc + client._transport._wrapped_methods[client._transport.create_document] = mock_rpc request = {} - client.list_documents(request) + client.create_document(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_documents(request) + client.create_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_documents_rest_required_fields( - request_type=document_service.ListDocumentsRequest, +def test_create_document_rest_required_fields( + request_type=document_service.CreateDocumentRequest, ): transport_class = transports.DocumentServiceRestTransport request_init = {} request_init["parent"] = "" + request_init["document_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -4317,31 +5188,32 @@ def test_list_documents_rest_required_fields( ) # verify fields with default values are dropped + assert "documentId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_documents._get_unset_required_fields(jsonified_request) + ).create_document._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "documentId" in jsonified_request + assert jsonified_request["documentId"] == request_init["document_id"] jsonified_request["parent"] = "parent_value" + jsonified_request["documentId"] = "document_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_documents._get_unset_required_fields(jsonified_request) + ).create_document._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "page_size", - "page_token", - ) - ) + assert not set(unset_fields) - set(("document_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" + assert "documentId" in jsonified_request + assert jsonified_request["documentId"] == "document_id_value" client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4350,7 +5222,7 @@ def test_list_documents_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = document_service.ListDocumentsResponse() + return_value = gcd_document.Document() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -4362,47 +5234,55 @@ def test_list_documents_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = document_service.ListDocumentsResponse.pb(return_value) + return_value = gcd_document.Document.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_documents(request) + response = client.create_document(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "documentId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_documents_rest_unset_required_fields(): +def test_create_document_rest_unset_required_fields(): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_documents._get_unset_required_fields({}) + unset_fields = transport.create_document._get_unset_required_fields({}) assert set(unset_fields) == ( - set( + set(("documentId",)) + & set( ( - "pageSize", - "pageToken", + "parent", + "document", + "documentId", ) ) - & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_documents_rest_interceptors(null_interceptor): +def test_create_document_rest_interceptors(null_interceptor): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -4415,14 +5295,14 @@ def test_list_documents_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DocumentServiceRestInterceptor, "post_list_documents" + transports.DocumentServiceRestInterceptor, "post_create_document" ) as post, mock.patch.object( - transports.DocumentServiceRestInterceptor, "pre_list_documents" + transports.DocumentServiceRestInterceptor, "pre_create_document" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = document_service.ListDocumentsRequest.pb( - document_service.ListDocumentsRequest() + pb_message = document_service.CreateDocumentRequest.pb( + document_service.CreateDocumentRequest() ) transcode.return_value = { "method": "post", @@ -4434,19 +5314,19 @@ def test_list_documents_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = document_service.ListDocumentsResponse.to_json( - document_service.ListDocumentsResponse() + req.return_value._content = gcd_document.Document.to_json( + gcd_document.Document() ) - request = document_service.ListDocumentsRequest() + request = document_service.CreateDocumentRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = document_service.ListDocumentsResponse() + post.return_value = gcd_document.Document() - client.list_documents( + client.create_document( request, metadata=[ ("key", "val"), @@ -4458,8 +5338,8 @@ def test_list_documents_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_documents_rest_bad_request( - transport: str = "rest", request_type=document_service.ListDocumentsRequest +def test_create_document_rest_bad_request( + transport: str = "rest", request_type=document_service.CreateDocumentRequest ): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4481,10 +5361,10 @@ def test_list_documents_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_documents(request) + client.create_document(request) -def test_list_documents_rest_flattened(): +def test_create_document_rest_flattened(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4493,7 +5373,7 @@ def test_list_documents_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = document_service.ListDocumentsResponse() + return_value = gcd_document.Document() # get arguments that satisfy an http rule for this method sample_request = { @@ -4503,6 +5383,16 @@ def test_list_documents_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", + document=gcd_document.Document( + struct_data=struct_pb2.Struct( + fields={ + "key_value": struct_pb2.Value( + null_value=struct_pb2.NullValue.NULL_VALUE + ) + } + ) + ), + document_id="document_id_value", ) mock_args.update(sample_request) @@ -4510,12 +5400,12 @@ def test_list_documents_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = document_service.ListDocumentsResponse.pb(return_value) + return_value = gcd_document.Document.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_documents(**mock_args) + client.create_document(**mock_args) # Establish that the underlying call was made with the expected # request object values. @@ -4528,94 +5418,45 @@ def test_list_documents_rest_flattened(): ) -def test_list_documents_rest_flattened_error(transport: str = "rest"): - client = DocumentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_documents( - document_service.ListDocumentsRequest(), - parent="parent_value", - ) - - -def test_list_documents_rest_pager(transport: str = "rest"): - client = DocumentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - document_service.ListDocumentsResponse( - documents=[ - document.Document(), - document.Document(), - document.Document(), - ], - next_page_token="abc", - ), - document_service.ListDocumentsResponse( - documents=[], - next_page_token="def", - ), - document_service.ListDocumentsResponse( - documents=[ - document.Document(), - ], - next_page_token="ghi", - ), - document_service.ListDocumentsResponse( - documents=[ - document.Document(), - document.Document(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - document_service.ListDocumentsResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = { - "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" - } +def test_create_document_rest_flattened_error(transport: str = "rest"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - pager = client.list_documents(request=sample_request) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_document( + document_service.CreateDocumentRequest(), + parent="parent_value", + document=gcd_document.Document( + struct_data=struct_pb2.Struct( + fields={ + "key_value": struct_pb2.Value( + null_value=struct_pb2.NullValue.NULL_VALUE + ) + } + ) + ), + document_id="document_id_value", + ) - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, document.Document) for i in results) - pages = list(client.list_documents(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token +def test_create_document_rest_error(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) @pytest.mark.parametrize( "request_type", [ - document_service.CreateDocumentRequest, + document_service.UpdateDocumentRequest, dict, ], ) -def test_create_document_rest(request_type): +def test_update_document_rest(request_type): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4623,12 +5464,14 @@ def test_create_document_rest(request_type): # send a request that will satisfy transcoding request_init = { - "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + "document": { + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + } } request_init["document"] = { "struct_data": {"fields": {}}, "json_data": "json_data_value", - "name": "name_value", + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5", "id": "id_value", "schema_id": "schema_id_value", "content": { @@ -4639,13 +5482,28 @@ def test_create_document_rest(request_type): "parent_document_id": "parent_document_id_value", "derived_struct_data": {}, "index_time": {"seconds": 751, "nanos": 543}, + "index_status": { + "index_time": {}, + "error_samples": [ + { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + } + ], + }, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = document_service.CreateDocumentRequest.meta.fields["document"] + test_field = document_service.UpdateDocumentRequest.meta.fields["document"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -4729,7 +5587,7 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_document(request) + response = client.update_document(request) # Establish that the response is the type that we expect. assert isinstance(response, gcd_document.Document) @@ -4739,7 +5597,7 @@ def get_message_fields(field): assert response.parent_document_id == "parent_document_id_value" -def test_create_document_rest_use_cached_wrapped_rpc(): +def test_update_document_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -4753,36 +5611,34 @@ def test_create_document_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_document in client._transport._wrapped_methods + assert client._transport.update_document in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_document] = mock_rpc + client._transport._wrapped_methods[client._transport.update_document] = mock_rpc request = {} - client.create_document(request) + client.update_document(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_document(request) + client.update_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_document_rest_required_fields( - request_type=document_service.CreateDocumentRequest, +def test_update_document_rest_required_fields( + request_type=document_service.UpdateDocumentRequest, ): transport_class = transports.DocumentServiceRestTransport request_init = {} - request_init["parent"] = "" - request_init["document_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -4790,32 +5646,27 @@ def test_create_document_rest_required_fields( ) # verify fields with default values are dropped - assert "documentId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_document._get_unset_required_fields(jsonified_request) + ).update_document._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "documentId" in jsonified_request - assert jsonified_request["documentId"] == request_init["document_id"] - - jsonified_request["parent"] = "parent_value" - jsonified_request["documentId"] = "document_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_document._get_unset_required_fields(jsonified_request) + ).update_document._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("document_id",)) + assert not set(unset_fields) - set( + ( + "allow_missing", + "update_mask", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "documentId" in jsonified_request - assert jsonified_request["documentId"] == "document_id_value" client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4836,7 +5687,7 @@ def test_create_document_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "patch", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -4852,39 +5703,32 @@ def test_create_document_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_document(request) + response = client.update_document(request) - expected_params = [ - ( - "documentId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_document_rest_unset_required_fields(): +def test_update_document_rest_unset_required_fields(): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_document._get_unset_required_fields({}) + unset_fields = transport.update_document._get_unset_required_fields({}) assert set(unset_fields) == ( - set(("documentId",)) - & set( + set( ( - "parent", - "document", - "documentId", + "allowMissing", + "updateMask", ) ) + & set(("document",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_document_rest_interceptors(null_interceptor): +def test_update_document_rest_interceptors(null_interceptor): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -4897,14 +5741,14 @@ def test_create_document_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DocumentServiceRestInterceptor, "post_create_document" + transports.DocumentServiceRestInterceptor, "post_update_document" ) as post, mock.patch.object( - transports.DocumentServiceRestInterceptor, "pre_create_document" + transports.DocumentServiceRestInterceptor, "pre_update_document" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = document_service.CreateDocumentRequest.pb( - document_service.CreateDocumentRequest() + pb_message = document_service.UpdateDocumentRequest.pb( + document_service.UpdateDocumentRequest() ) transcode.return_value = { "method": "post", @@ -4920,7 +5764,7 @@ def test_create_document_rest_interceptors(null_interceptor): gcd_document.Document() ) - request = document_service.CreateDocumentRequest() + request = document_service.UpdateDocumentRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -4928,7 +5772,7 @@ def test_create_document_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = gcd_document.Document() - client.create_document( + client.update_document( request, metadata=[ ("key", "val"), @@ -4940,8 +5784,8 @@ def test_create_document_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_document_rest_bad_request( - transport: str = "rest", request_type=document_service.CreateDocumentRequest +def test_update_document_rest_bad_request( + transport: str = "rest", request_type=document_service.UpdateDocumentRequest ): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4950,7 +5794,9 @@ def test_create_document_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + "document": { + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + } } request = request_type(**request_init) @@ -4963,10 +5809,10 @@ def test_create_document_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_document(request) + client.update_document(request) -def test_create_document_rest_flattened(): +def test_update_document_rest_flattened(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4979,12 +5825,13 @@ def test_create_document_rest_flattened(): # get arguments that satisfy an http rule for this method sample_request = { - "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + "document": { + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + } } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", document=gcd_document.Document( struct_data=struct_pb2.Struct( fields={ @@ -4994,7 +5841,7 @@ def test_create_document_rest_flattened(): } ) ), - document_id="document_id_value", + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -5007,20 +5854,20 @@ def test_create_document_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_document(**mock_args) + client.update_document(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1beta/{parent=projects/*/locations/*/dataStores/*/branches/*}/documents" + "%s/v1beta/{document.name=projects/*/locations/*/dataStores/*/branches/*/documents/*}" % client.transport._host, args[1], ) -def test_create_document_rest_flattened_error(transport: str = "rest"): +def test_update_document_rest_flattened_error(transport: str = "rest"): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5029,9 +5876,8 @@ def test_create_document_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_document( - document_service.CreateDocumentRequest(), - parent="parent_value", + client.update_document( + document_service.UpdateDocumentRequest(), document=gcd_document.Document( struct_data=struct_pb2.Struct( fields={ @@ -5041,150 +5887,54 @@ def test_create_document_rest_flattened_error(transport: str = "rest"): } ) ), - document_id="document_id_value", + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_create_document_rest_error(): +def test_update_document_rest_error(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - document_service.UpdateDocumentRequest, - dict, - ], -) -def test_update_document_rest(request_type): - client = DocumentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "document": { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" - } - } - request_init["document"] = { - "struct_data": {"fields": {}}, - "json_data": "json_data_value", - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5", - "id": "id_value", - "schema_id": "schema_id_value", - "content": { - "raw_bytes": b"raw_bytes_blob", - "uri": "uri_value", - "mime_type": "mime_type_value", - }, - "parent_document_id": "parent_document_id_value", - "derived_struct_data": {}, - "index_time": {"seconds": 751, "nanos": 543}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = document_service.UpdateDocumentRequest.meta.fields["document"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["document"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) + ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["document"][field])): - del request_init["document"][field][i][subfield] - else: - del request_init["document"][field][subfield] + +@pytest.mark.parametrize( + "request_type", + [ + document_service.DeleteDocumentRequest, + dict, + ], +) +def test_delete_document_rest(request_type): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = gcd_document.Document( - name="name_value", - id="id_value", - schema_id="schema_id_value", - parent_document_id="parent_document_id_value", - json_data="json_data_value", - ) + return_value = None # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gcd_document.Document.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_document(request) + response = client.delete_document(request) # Establish that the response is the type that we expect. - assert isinstance(response, gcd_document.Document) - assert response.name == "name_value" - assert response.id == "id_value" - assert response.schema_id == "schema_id_value" - assert response.parent_document_id == "parent_document_id_value" + assert response is None -def test_update_document_rest_use_cached_wrapped_rpc(): +def test_delete_document_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5198,34 +5948,35 @@ def test_update_document_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_document in client._transport._wrapped_methods + assert client._transport.delete_document in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.update_document] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_document] = mock_rpc request = {} - client.update_document(request) + client.delete_document(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_document(request) + client.delete_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_document_rest_required_fields( - request_type=document_service.UpdateDocumentRequest, +def test_delete_document_rest_required_fields( + request_type=document_service.DeleteDocumentRequest, ): transport_class = transports.DocumentServiceRestTransport request_init = {} + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -5236,24 +5987,21 @@ def test_update_document_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_document._get_unset_required_fields(jsonified_request) + ).delete_document._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_document._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "allow_missing", - "update_mask", - ) - ) + ).delete_document._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5262,7 +6010,7 @@ def test_update_document_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = gcd_document.Document() + return_value = None # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -5274,48 +6022,36 @@ def test_update_document_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = gcd_document.Document.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_document(request) + response = client.delete_document(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_document_rest_unset_required_fields(): +def test_delete_document_rest_unset_required_fields(): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_document._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "allowMissing", - "updateMask", - ) - ) - & set(("document",)) - ) + unset_fields = transport.delete_document._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_document_rest_interceptors(null_interceptor): +def test_delete_document_rest_interceptors(null_interceptor): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -5328,14 +6064,11 @@ def test_update_document_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DocumentServiceRestInterceptor, "post_update_document" - ) as post, mock.patch.object( - transports.DocumentServiceRestInterceptor, "pre_update_document" + transports.DocumentServiceRestInterceptor, "pre_delete_document" ) as pre: pre.assert_not_called() - post.assert_not_called() - pb_message = document_service.UpdateDocumentRequest.pb( - document_service.UpdateDocumentRequest() + pb_message = document_service.DeleteDocumentRequest.pb( + document_service.DeleteDocumentRequest() ) transcode.return_value = { "method": "post", @@ -5347,19 +6080,15 @@ def test_update_document_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = gcd_document.Document.to_json( - gcd_document.Document() - ) - request = document_service.UpdateDocumentRequest() + request = document_service.DeleteDocumentRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = gcd_document.Document() - client.update_document( + client.delete_document( request, metadata=[ ("key", "val"), @@ -5368,11 +6097,10 @@ def test_update_document_rest_interceptors(null_interceptor): ) pre.assert_called_once() - post.assert_called_once() -def test_update_document_rest_bad_request( - transport: str = "rest", request_type=document_service.UpdateDocumentRequest +def test_delete_document_rest_bad_request( + transport: str = "rest", request_type=document_service.DeleteDocumentRequest ): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5381,9 +6109,7 @@ def test_update_document_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "document": { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" - } + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" } request = request_type(**request_init) @@ -5396,10 +6122,10 @@ def test_update_document_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_document(request) + client.delete_document(request) -def test_update_document_rest_flattened(): +def test_delete_document_rest_flattened(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5408,53 +6134,40 @@ def test_update_document_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = gcd_document.Document() + return_value = None # get arguments that satisfy an http rule for this method sample_request = { - "document": { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" - } + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" } # get truthy value for each flattened field mock_args = dict( - document=gcd_document.Document( - struct_data=struct_pb2.Struct( - fields={ - "key_value": struct_pb2.Value( - null_value=struct_pb2.NullValue.NULL_VALUE - ) - } - ) - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gcd_document.Document.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_document(**mock_args) + client.delete_document(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1beta/{document.name=projects/*/locations/*/dataStores/*/branches/*/documents/*}" + "%s/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*/documents/*}" % client.transport._host, args[1], ) -def test_update_document_rest_flattened_error(transport: str = "rest"): +def test_delete_document_rest_flattened_error(transport: str = "rest"): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5463,22 +6176,13 @@ def test_update_document_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_document( - document_service.UpdateDocumentRequest(), - document=gcd_document.Document( - struct_data=struct_pb2.Struct( - fields={ - "key_value": struct_pb2.Value( - null_value=struct_pb2.NullValue.NULL_VALUE - ) - } - ) - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.delete_document( + document_service.DeleteDocumentRequest(), + name="name_value", ) -def test_update_document_rest_error(): +def test_delete_document_rest_error(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -5487,11 +6191,11 @@ def test_update_document_rest_error(): @pytest.mark.parametrize( "request_type", [ - document_service.DeleteDocumentRequest, + import_config.ImportDocumentsRequest, dict, ], ) -def test_delete_document_rest(request_type): +def test_import_documents_rest(request_type): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5499,29 +6203,29 @@ def test_delete_document_rest(request_type): # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_document(request) + response = client.import_documents(request) # Establish that the response is the type that we expect. - assert response is None + assert response.operation.name == "operations/spam" -def test_delete_document_rest_use_cached_wrapped_rpc(): +def test_import_documents_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5535,35 +6239,41 @@ def test_delete_document_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_document in client._transport._wrapped_methods + assert client._transport.import_documents in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.delete_document] = mock_rpc + client._transport._wrapped_methods[ + client._transport.import_documents + ] = mock_rpc request = {} - client.delete_document(request) + client.import_documents(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_document(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.import_documents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_document_rest_required_fields( - request_type=document_service.DeleteDocumentRequest, +def test_import_documents_rest_required_fields( + request_type=import_config.ImportDocumentsRequest, ): transport_class = transports.DocumentServiceRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -5574,21 +6284,21 @@ def test_delete_document_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_document._get_unset_required_fields(jsonified_request) + ).import_documents._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_document._get_unset_required_fields(jsonified_request) + ).import_documents._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5597,7 +6307,7 @@ def test_delete_document_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = None + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -5609,36 +6319,37 @@ def test_delete_document_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = "" + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_document(request) + response = client.import_documents(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_document_rest_unset_required_fields(): +def test_import_documents_rest_unset_required_fields(): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_document._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.import_documents._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_document_rest_interceptors(null_interceptor): +def test_import_documents_rest_interceptors(null_interceptor): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -5651,11 +6362,16 @@ def test_delete_document_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DocumentServiceRestInterceptor, "pre_delete_document" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DocumentServiceRestInterceptor, "post_import_documents" + ) as post, mock.patch.object( + transports.DocumentServiceRestInterceptor, "pre_import_documents" ) as pre: pre.assert_not_called() - pb_message = document_service.DeleteDocumentRequest.pb( - document_service.DeleteDocumentRequest() + post.assert_not_called() + pb_message = import_config.ImportDocumentsRequest.pb( + import_config.ImportDocumentsRequest() ) transcode.return_value = { "method": "post", @@ -5667,15 +6383,19 @@ def test_delete_document_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) - request = document_service.DeleteDocumentRequest() + request = import_config.ImportDocumentsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() - client.delete_document( + client.import_documents( request, metadata=[ ("key", "val"), @@ -5684,10 +6404,11 @@ def test_delete_document_rest_interceptors(null_interceptor): ) pre.assert_called_once() + post.assert_called_once() -def test_delete_document_rest_bad_request( - transport: str = "rest", request_type=document_service.DeleteDocumentRequest +def test_import_documents_rest_bad_request( + transport: str = "rest", request_type=import_config.ImportDocumentsRequest ): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5696,7 +6417,7 @@ def test_delete_document_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" } request = request_type(**request_init) @@ -5709,67 +6430,10 @@ def test_delete_document_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_document(request) - - -def test_delete_document_rest_flattened(): - client = DocumentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.delete_document(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*/documents/*}" - % client.transport._host, - args[1], - ) - - -def test_delete_document_rest_flattened_error(transport: str = "rest"): - client = DocumentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_document( - document_service.DeleteDocumentRequest(), - name="name_value", - ) + client.import_documents(request) -def test_delete_document_rest_error(): +def test_import_documents_rest_error(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -5778,11 +6442,11 @@ def test_delete_document_rest_error(): @pytest.mark.parametrize( "request_type", [ - import_config.ImportDocumentsRequest, + purge_config.PurgeDocumentsRequest, dict, ], ) -def test_import_documents_rest(request_type): +def test_purge_documents_rest(request_type): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5806,13 +6470,13 @@ def test_import_documents_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.import_documents(request) + response = client.purge_documents(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_import_documents_rest_use_cached_wrapped_rpc(): +def test_purge_documents_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5826,19 +6490,17 @@ def test_import_documents_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.import_documents in client._transport._wrapped_methods + assert client._transport.purge_documents in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.import_documents - ] = mock_rpc + client._transport._wrapped_methods[client._transport.purge_documents] = mock_rpc request = {} - client.import_documents(request) + client.purge_documents(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -5847,20 +6509,21 @@ def test_import_documents_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.import_documents(request) + client.purge_documents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_import_documents_rest_required_fields( - request_type=import_config.ImportDocumentsRequest, +def test_purge_documents_rest_required_fields( + request_type=purge_config.PurgeDocumentsRequest, ): transport_class = transports.DocumentServiceRestTransport request_init = {} request_init["parent"] = "" + request_init["filter"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -5871,21 +6534,24 @@ def test_import_documents_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).import_documents._get_unset_required_fields(jsonified_request) + ).purge_documents._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["parent"] = "parent_value" + jsonified_request["filter"] = "filter_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).import_documents._get_unset_required_fields(jsonified_request) + ).purge_documents._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" + assert "filter" in jsonified_request + assert jsonified_request["filter"] == "filter_value" client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5919,24 +6585,32 @@ def test_import_documents_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.import_documents(request) + response = client.purge_documents(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_import_documents_rest_unset_required_fields(): +def test_purge_documents_rest_unset_required_fields(): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.import_documents._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent",))) + unset_fields = transport.purge_documents._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "filter", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_import_documents_rest_interceptors(null_interceptor): +def test_purge_documents_rest_interceptors(null_interceptor): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -5951,14 +6625,14 @@ def test_import_documents_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.DocumentServiceRestInterceptor, "post_import_documents" + transports.DocumentServiceRestInterceptor, "post_purge_documents" ) as post, mock.patch.object( - transports.DocumentServiceRestInterceptor, "pre_import_documents" + transports.DocumentServiceRestInterceptor, "pre_purge_documents" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = import_config.ImportDocumentsRequest.pb( - import_config.ImportDocumentsRequest() + pb_message = purge_config.PurgeDocumentsRequest.pb( + purge_config.PurgeDocumentsRequest() ) transcode.return_value = { "method": "post", @@ -5974,7 +6648,7 @@ def test_import_documents_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = import_config.ImportDocumentsRequest() + request = purge_config.PurgeDocumentsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -5982,7 +6656,7 @@ def test_import_documents_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.import_documents( + client.purge_documents( request, metadata=[ ("key", "val"), @@ -5994,8 +6668,8 @@ def test_import_documents_rest_interceptors(null_interceptor): post.assert_called_once() -def test_import_documents_rest_bad_request( - transport: str = "rest", request_type=import_config.ImportDocumentsRequest +def test_purge_documents_rest_bad_request( + transport: str = "rest", request_type=purge_config.PurgeDocumentsRequest ): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6017,10 +6691,10 @@ def test_import_documents_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.import_documents(request) + client.purge_documents(request) -def test_import_documents_rest_error(): +def test_purge_documents_rest_error(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -6029,11 +6703,11 @@ def test_import_documents_rest_error(): @pytest.mark.parametrize( "request_type", [ - purge_config.PurgeDocumentsRequest, + document_service.BatchGetDocumentsMetadataRequest, dict, ], ) -def test_purge_documents_rest(request_type): +def test_batch_get_documents_metadata_rest(request_type): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -6048,22 +6722,26 @@ def test_purge_documents_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = document_service.BatchGetDocumentsMetadataResponse() # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = document_service.BatchGetDocumentsMetadataResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.purge_documents(request) + response = client.batch_get_documents_metadata(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, document_service.BatchGetDocumentsMetadataResponse) -def test_purge_documents_rest_use_cached_wrapped_rpc(): +def test_batch_get_documents_metadata_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -6077,40 +6755,40 @@ def test_purge_documents_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.purge_documents in client._transport._wrapped_methods + assert ( + client._transport.batch_get_documents_metadata + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.purge_documents] = mock_rpc + client._transport._wrapped_methods[ + client._transport.batch_get_documents_metadata + ] = mock_rpc request = {} - client.purge_documents(request) + client.batch_get_documents_metadata(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.purge_documents(request) + client.batch_get_documents_metadata(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_purge_documents_rest_required_fields( - request_type=purge_config.PurgeDocumentsRequest, +def test_batch_get_documents_metadata_rest_required_fields( + request_type=document_service.BatchGetDocumentsMetadataRequest, ): transport_class = transports.DocumentServiceRestTransport request_init = {} request_init["parent"] = "" - request_init["filter"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -6121,24 +6799,23 @@ def test_purge_documents_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).purge_documents._get_unset_required_fields(jsonified_request) + ).batch_get_documents_metadata._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["parent"] = "parent_value" - jsonified_request["filter"] = "filter_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).purge_documents._get_unset_required_fields(jsonified_request) + ).batch_get_documents_metadata._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("matcher",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" - assert "filter" in jsonified_request - assert jsonified_request["filter"] == "filter_value" client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6147,7 +6824,7 @@ def test_purge_documents_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = document_service.BatchGetDocumentsMetadataResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -6159,45 +6836,49 @@ def test_purge_documents_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = document_service.BatchGetDocumentsMetadataResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.purge_documents(request) + response = client.batch_get_documents_metadata(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_purge_documents_rest_unset_required_fields(): +def test_batch_get_documents_metadata_rest_unset_required_fields(): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.purge_documents._get_unset_required_fields({}) + unset_fields = transport.batch_get_documents_metadata._get_unset_required_fields({}) assert set(unset_fields) == ( - set(()) + set(("matcher",)) & set( ( "parent", - "filter", + "matcher", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_purge_documents_rest_interceptors(null_interceptor): +def test_batch_get_documents_metadata_rest_interceptors(null_interceptor): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -6210,16 +6891,14 @@ def test_purge_documents_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.DocumentServiceRestInterceptor, "post_purge_documents" + transports.DocumentServiceRestInterceptor, "post_batch_get_documents_metadata" ) as post, mock.patch.object( - transports.DocumentServiceRestInterceptor, "pre_purge_documents" + transports.DocumentServiceRestInterceptor, "pre_batch_get_documents_metadata" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = purge_config.PurgeDocumentsRequest.pb( - purge_config.PurgeDocumentsRequest() + pb_message = document_service.BatchGetDocumentsMetadataRequest.pb( + document_service.BatchGetDocumentsMetadataRequest() ) transcode.return_value = { "method": "post", @@ -6231,19 +6910,21 @@ def test_purge_documents_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = ( + document_service.BatchGetDocumentsMetadataResponse.to_json( + document_service.BatchGetDocumentsMetadataResponse() + ) ) - request = purge_config.PurgeDocumentsRequest() + request = document_service.BatchGetDocumentsMetadataRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = document_service.BatchGetDocumentsMetadataResponse() - client.purge_documents( + client.batch_get_documents_metadata( request, metadata=[ ("key", "val"), @@ -6255,8 +6936,9 @@ def test_purge_documents_rest_interceptors(null_interceptor): post.assert_called_once() -def test_purge_documents_rest_bad_request( - transport: str = "rest", request_type=purge_config.PurgeDocumentsRequest +def test_batch_get_documents_metadata_rest_bad_request( + transport: str = "rest", + request_type=document_service.BatchGetDocumentsMetadataRequest, ): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6278,10 +6960,71 @@ def test_purge_documents_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.purge_documents(request) + client.batch_get_documents_metadata(request) -def test_purge_documents_rest_error(): +def test_batch_get_documents_metadata_rest_flattened(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document_service.BatchGetDocumentsMetadataResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = document_service.BatchGetDocumentsMetadataResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.batch_get_documents_metadata(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{parent=projects/*/locations/*/dataStores/*/branches/*}/batchGetDocumentsMetadata" + % client.transport._host, + args[1], + ) + + +def test_batch_get_documents_metadata_rest_flattened_error(transport: str = "rest"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.batch_get_documents_metadata( + document_service.BatchGetDocumentsMetadataRequest(), + parent="parent_value", + ) + + +def test_batch_get_documents_metadata_rest_error(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -6433,6 +7176,7 @@ def test_document_service_base_transport(): "delete_document", "import_documents", "purge_documents", + "batch_get_documents_metadata", "get_operation", "cancel_operation", "list_operations", @@ -6737,6 +7481,9 @@ def test_document_service_client_transport_session_collision(transport_name): session1 = client1.transport.purge_documents._session session2 = client2.transport.purge_documents._session assert session1 != session2 + session1 = client1.transport.batch_get_documents_metadata._session + session2 = client2.transport.batch_get_documents_metadata._session + assert session1 != session2 def test_document_service_grpc_transport_channel(): diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_evaluation_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_evaluation_service.py index 0fd27c82c7e8..e22d9e9f080e 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_evaluation_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_evaluation_service.py @@ -3895,6 +3895,7 @@ def test_create_evaluation_rest(request_type): "include_citations": True, "ignore_adversarial_query": True, "ignore_non_summary_seeking_query": True, + "ignore_low_relevant_content": True, "model_prompt_spec": {"preamble": "preamble_value"}, "language_code": "language_code_value", "model_spec": {"version": "version_value"}, @@ -3934,6 +3935,7 @@ def test_create_evaluation_rest(request_type): "query_id": "query_id_value", "search_result_persistence_count": 3328, }, + "relevance_threshold": 1, }, "query_set_spec": {"sample_query_set": "sample_query_set_value"}, }, diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_serving_config_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_serving_config_service.py index 33f10ee85214..1eddb9e8a3c5 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_serving_config_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_serving_config_service.py @@ -2761,6 +2761,7 @@ def test_update_serving_config_rest(request_type): "include_citations": True, "ignore_adversarial_query": True, "ignore_non_summary_seeking_query": True, + "ignore_low_relevant_content": True, "model_prompt_spec": {"preamble": "preamble_value"}, "language_code": "language_code_value", "model_spec": {"version": "version_value"}, diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_user_event_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_user_event_service.py index fe2c1010670e..b04e0007a9c2 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_user_event_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_user_event_service.py @@ -2455,6 +2455,7 @@ def test_write_user_event_rest(request_type): "uri": "uri_value", "quantity": 895, "promotion_ids": ["promotion_ids_value1", "promotion_ids_value2"], + "joined": True, } ], "panel": { From 4136c10fabc1df012b028a5d407aaec326e448b6 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 4 Sep 2024 08:08:58 -0400 Subject: [PATCH 047/108] docs: [google-cloud-bigquery-datatransfer] add a note to the CreateTransferConfigRequest and UpdateTransferConfigRequest to disable restricting service account usage (#13051) - [ ] Regenerate this pull request now. PiperOrigin-RevId: 670851613 Source-Link: https://github.com/googleapis/googleapis/commit/c13cea289cfb977c7a8776b43dfd63a48ddb2b71 Source-Link: https://github.com/googleapis/googleapis-gen/commit/1f2c334ef5bac2d4a2027759e03b1eeba426f0f2 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWJpZ3F1ZXJ5LWRhdGF0cmFuc2Zlci8uT3dsQm90LnlhbWwiLCJoIjoiMWYyYzMzNGVmNWJhYzJkNGEyMDI3NzU5ZTAzYjFlZWJhNDI2ZjBmMiJ9 --------- Co-authored-by: Owl Bot --- .../bigquery_datatransfer/gapic_version.py | 2 +- .../bigquery_datatransfer_v1/gapic_version.py | 2 +- .../data_transfer_service/async_client.py | 38 +++++++++++-------- .../services/data_transfer_service/client.py | 38 +++++++++++-------- .../data_transfer_service/transports/rest.py | 38 +++++++++++-------- .../types/datatransfer.py | 18 +++++++-- ...google.cloud.bigquery.datatransfer.v1.json | 2 +- 7 files changed, 87 insertions(+), 51 deletions(-) diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/gapic_version.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/gapic_version.py index 769a9d92f8cf..558c8aab67c5 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/gapic_version.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.15.6" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic_version.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic_version.py index 769a9d92f8cf..558c8aab67c5 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic_version.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.15.6" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/async_client.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/async_client.py index 87ab9e5d50f5..ea32882077da 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/async_client.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/async_client.py @@ -554,17 +554,20 @@ async def sample_create_transfer_config(): Args: request (Optional[Union[google.cloud.bigquery_datatransfer_v1.types.CreateTransferConfigRequest, dict]]): - The request object. A request to create a data transfer - configuration. If new credentials are - needed for this transfer configuration, - authorization info must be provided. If - authorization info is provided, the - transfer configuration will be - associated with the user id - corresponding to the authorization info. - Otherwise, the transfer configuration - will be associated with the calling - user. + The request object. A request to create a data transfer configuration. If + new credentials are needed for this transfer + configuration, authorization info must be provided. If + authorization info is provided, the transfer + configuration will be associated with the user id + corresponding to the authorization info. Otherwise, the + transfer configuration will be associated with the + calling user. + + When using a cross project service account for creating + a transfer config, you must enable cross project service + account usage. For more information, see `Disable + attachment of service accounts to resources in other + projects `__. parent (:class:`str`): Required. The BigQuery project id where the transfer configuration should be created. Must be in the format @@ -693,10 +696,15 @@ async def sample_update_transfer_config(): Args: request (Optional[Union[google.cloud.bigquery_datatransfer_v1.types.UpdateTransferConfigRequest, dict]]): - The request object. A request to update a transfer - configuration. To update the user id of - the transfer configuration, - authorization info needs to be provided. + The request object. A request to update a transfer configuration. To update + the user id of the transfer configuration, authorization + info needs to be provided. + + When using a cross project service account for updating + a transfer config, you must enable cross project service + account usage. For more information, see `Disable + attachment of service accounts to resources in other + projects `__. transfer_config (:class:`google.cloud.bigquery_datatransfer_v1.types.TransferConfig`): Required. Data transfer configuration to create. diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py index 7bf64c346481..b5032cd300f4 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py @@ -1002,17 +1002,20 @@ def sample_create_transfer_config(): Args: request (Union[google.cloud.bigquery_datatransfer_v1.types.CreateTransferConfigRequest, dict]): - The request object. A request to create a data transfer - configuration. If new credentials are - needed for this transfer configuration, - authorization info must be provided. If - authorization info is provided, the - transfer configuration will be - associated with the user id - corresponding to the authorization info. - Otherwise, the transfer configuration - will be associated with the calling - user. + The request object. A request to create a data transfer configuration. If + new credentials are needed for this transfer + configuration, authorization info must be provided. If + authorization info is provided, the transfer + configuration will be associated with the user id + corresponding to the authorization info. Otherwise, the + transfer configuration will be associated with the + calling user. + + When using a cross project service account for creating + a transfer config, you must enable cross project service + account usage. For more information, see `Disable + attachment of service accounts to resources in other + projects `__. parent (str): Required. The BigQuery project id where the transfer configuration should be created. Must be in the format @@ -1138,10 +1141,15 @@ def sample_update_transfer_config(): Args: request (Union[google.cloud.bigquery_datatransfer_v1.types.UpdateTransferConfigRequest, dict]): - The request object. A request to update a transfer - configuration. To update the user id of - the transfer configuration, - authorization info needs to be provided. + The request object. A request to update a transfer configuration. To update + the user id of the transfer configuration, authorization + info needs to be provided. + + When using a cross project service account for updating + a transfer config, you must enable cross project service + account usage. For more information, see `Disable + attachment of service accounts to resources in other + projects `__. transfer_config (google.cloud.bigquery_datatransfer_v1.types.TransferConfig): Required. Data transfer configuration to create. diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/rest.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/rest.py index cd9e0af0c4d2..91347f2be728 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/rest.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/rest.py @@ -789,17 +789,20 @@ def __call__( Args: request (~.datatransfer.CreateTransferConfigRequest): - The request object. A request to create a data transfer - configuration. If new credentials are - needed for this transfer configuration, - authorization info must be provided. If - authorization info is provided, the - transfer configuration will be - associated with the user id - corresponding to the authorization info. - Otherwise, the transfer configuration - will be associated with the calling - user. + The request object. A request to create a data transfer configuration. If + new credentials are needed for this transfer + configuration, authorization info must be provided. If + authorization info is provided, the transfer + configuration will be associated with the user id + corresponding to the authorization info. Otherwise, the + transfer configuration will be associated with the + calling user. + + When using a cross project service account for creating + a transfer config, you must enable cross project service + account usage. For more information, see `Disable + attachment of service accounts to resources in other + projects `__. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2104,10 +2107,15 @@ def __call__( Args: request (~.datatransfer.UpdateTransferConfigRequest): - The request object. A request to update a transfer - configuration. To update the user id of - the transfer configuration, - authorization info needs to be provided. + The request object. A request to update a transfer configuration. To update + the user id of the transfer configuration, authorization + info needs to be provided. + + When using a cross project service account for updating + a transfer config, you must enable cross project service + account usage. For more information, see `Disable + attachment of service accounts to resources in other + projects `__. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/types/datatransfer.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/types/datatransfer.py index 30e607bd0c09..e0319b78c3e9 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/types/datatransfer.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/types/datatransfer.py @@ -480,6 +480,12 @@ class CreateTransferConfigRequest(proto.Message): user id corresponding to the authorization info. Otherwise, the transfer configuration will be associated with the calling user. + When using a cross project service account for creating a transfer + config, you must enable cross project service account usage. For + more information, see `Disable attachment of service accounts to + resources in other + projects `__. + Attributes: parent (str): Required. The BigQuery project id where the transfer @@ -577,9 +583,15 @@ class CreateTransferConfigRequest(proto.Message): class UpdateTransferConfigRequest(proto.Message): - r"""A request to update a transfer configuration. To update the - user id of the transfer configuration, authorization info needs - to be provided. + r"""A request to update a transfer configuration. To update the user id + of the transfer configuration, authorization info needs to be + provided. + + When using a cross project service account for updating a transfer + config, you must enable cross project service account usage. For + more information, see `Disable attachment of service accounts to + resources in other + projects `__. Attributes: transfer_config (google.cloud.bigquery_datatransfer_v1.types.TransferConfig): diff --git a/packages/google-cloud-bigquery-datatransfer/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datatransfer.v1.json b/packages/google-cloud-bigquery-datatransfer/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datatransfer.v1.json index 1fbf5757f11d..adc8c281da8f 100644 --- a/packages/google-cloud-bigquery-datatransfer/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datatransfer.v1.json +++ b/packages/google-cloud-bigquery-datatransfer/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datatransfer.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-datatransfer", - "version": "3.15.6" + "version": "0.1.0" }, "snippets": [ { From c28e7cacd325ceea0af1358e1785bb592bd2ec39 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 4 Sep 2024 14:09:36 +0200 Subject: [PATCH 048/108] chore(deps): update dependency cryptography to v43.0.1 [security] (#13050) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [cryptography](https://redirect.github.com/pyca/cryptography) ([changelog](https://cryptography.io/en/latest/changelog/)) | `==43.0.0` -> `==43.0.1` | [![age](https://developer.mend.io/api/mc/badges/age/pypi/cryptography/43.0.1?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://developer.mend.io/api/mc/badges/adoption/pypi/cryptography/43.0.1?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://developer.mend.io/api/mc/badges/compatibility/pypi/cryptography/43.0.0/43.0.1?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://developer.mend.io/api/mc/badges/confidence/pypi/cryptography/43.0.0/43.0.1?slim=true)](https://docs.renovatebot.com/merge-confidence/) | ### GitHub Vulnerability Alerts #### [GHSA-h4gh-qq45-vh27](https://redirect.github.com/pyca/cryptography/security/advisories/GHSA-h4gh-qq45-vh27) pyca/cryptography's wheels include a statically linked copy of OpenSSL. The versions of OpenSSL included in cryptography 37.0.0-43.0.0 are vulnerable to a security issue. More details about the vulnerability itself can be found in https://openssl-library.org/news/secadv/20240903.txt. If you are building cryptography source ("sdist") then you are responsible for upgrading your copy of OpenSSL. Only users installing from wheels built by the cryptography project (i.e., those distributed on PyPI) need to update their cryptography versions. --- ### Release Notes
pyca/cryptography (cryptography) ### [`v43.0.1`](https://redirect.github.com/pyca/cryptography/compare/43.0.0...43.0.1) [Compare Source](https://redirect.github.com/pyca/cryptography/compare/43.0.0...43.0.1)
--- ### Configuration 📅 **Schedule**: Branch creation - "" (UTC), Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR was generated by [Mend Renovate](https://mend.io/renovate/). View the [repository job log](https://developer.mend.io/github/googleapis/google-cloud-python). --- .kokoro/requirements-aoss.txt | 56 +++++++++++++++++------------------ 1 file changed, 28 insertions(+), 28 deletions(-) diff --git a/.kokoro/requirements-aoss.txt b/.kokoro/requirements-aoss.txt index eede56b4ce68..a4566f2179c5 100644 --- a/.kokoro/requirements-aoss.txt +++ b/.kokoro/requirements-aoss.txt @@ -177,34 +177,34 @@ charset-normalizer==3.3.2 \ --hash=sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519 \ --hash=sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561 # via requests -cryptography==43.0.0 \ - --hash=sha256:0663585d02f76929792470451a5ba64424acc3cd5227b03921dab0e2f27b1709 \ - --hash=sha256:08a24a7070b2b6804c1940ff0f910ff728932a9d0e80e7814234269f9d46d069 \ - --hash=sha256:232ce02943a579095a339ac4b390fbbe97f5b5d5d107f8a08260ea2768be8cc2 \ - --hash=sha256:2905ccf93a8a2a416f3ec01b1a7911c3fe4073ef35640e7ee5296754e30b762b \ - --hash=sha256:299d3da8e00b7e2b54bb02ef58d73cd5f55fb31f33ebbf33bd00d9aa6807df7e \ - --hash=sha256:2c6d112bf61c5ef44042c253e4859b3cbbb50df2f78fa8fae6747a7814484a70 \ - --hash=sha256:31e44a986ceccec3d0498e16f3d27b2ee5fdf69ce2ab89b52eaad1d2f33d8778 \ - --hash=sha256:3d9a1eca329405219b605fac09ecfc09ac09e595d6def650a437523fcd08dd22 \ - --hash=sha256:3dcdedae5c7710b9f97ac6bba7e1052b95c7083c9d0e9df96e02a1932e777895 \ - --hash=sha256:47ca71115e545954e6c1d207dd13461ab81f4eccfcb1345eac874828b5e3eaaf \ - --hash=sha256:4a997df8c1c2aae1e1e5ac49c2e4f610ad037fc5a3aadc7b64e39dea42249431 \ - --hash=sha256:51956cf8730665e2bdf8ddb8da0056f699c1a5715648c1b0144670c1ba00b48f \ - --hash=sha256:5bcb8a5620008a8034d39bce21dc3e23735dfdb6a33a06974739bfa04f853947 \ - --hash=sha256:64c3f16e2a4fc51c0d06af28441881f98c5d91009b8caaff40cf3548089e9c74 \ - --hash=sha256:6e2b11c55d260d03a8cf29ac9b5e0608d35f08077d8c087be96287f43af3ccdc \ - --hash=sha256:7b3f5fe74a5ca32d4d0f302ffe6680fcc5c28f8ef0dc0ae8f40c0f3a1b4fca66 \ - --hash=sha256:844b6d608374e7d08f4f6e6f9f7b951f9256db41421917dfb2d003dde4cd6b66 \ - --hash=sha256:9a8d6802e0825767476f62aafed40532bd435e8a5f7d23bd8b4f5fd04cc80ecf \ - --hash=sha256:aae4d918f6b180a8ab8bf6511a419473d107df4dbb4225c7b48c5c9602c38c7f \ - --hash=sha256:ac1955ce000cb29ab40def14fd1bbfa7af2017cca696ee696925615cafd0dce5 \ - --hash=sha256:b88075ada2d51aa9f18283532c9f60e72170041bba88d7f37e49cbb10275299e \ - --hash=sha256:cb013933d4c127349b3948aa8aaf2f12c0353ad0eccd715ca789c8a0f671646f \ - --hash=sha256:cc70b4b581f28d0a254d006f26949245e3657d40d8857066c2ae22a61222ef55 \ - --hash=sha256:e9c5266c432a1e23738d178e51c2c7a5e2ddf790f248be939448c0ba2021f9d1 \ - --hash=sha256:ea9e57f8ea880eeea38ab5abf9fbe39f923544d7884228ec67d666abd60f5a47 \ - --hash=sha256:ee0c405832ade84d4de74b9029bedb7b31200600fa524d218fc29bfa371e97f5 \ - --hash=sha256:fdcb265de28585de5b859ae13e3846a8e805268a823a12a4da2597f1f5afc9f0 +cryptography==43.0.1 \ + --hash=sha256:014f58110f53237ace6a408b5beb6c427b64e084eb451ef25a28308270086494 \ + --hash=sha256:1bbcce1a551e262dfbafb6e6252f1ae36a248e615ca44ba302df077a846a8806 \ + --hash=sha256:203e92a75716d8cfb491dc47c79e17d0d9207ccffcbcb35f598fbe463ae3444d \ + --hash=sha256:27e613d7077ac613e399270253259d9d53872aaf657471473ebfc9a52935c062 \ + --hash=sha256:2bd51274dcd59f09dd952afb696bf9c61a7a49dfc764c04dd33ef7a6b502a1e2 \ + --hash=sha256:38926c50cff6f533f8a2dae3d7f19541432610d114a70808f0926d5aaa7121e4 \ + --hash=sha256:511f4273808ab590912a93ddb4e3914dfd8a388fed883361b02dea3791f292e1 \ + --hash=sha256:58d4e9129985185a06d849aa6df265bdd5a74ca6e1b736a77959b498e0505b85 \ + --hash=sha256:5b43d1ea6b378b54a1dc99dd8a2b5be47658fe9a7ce0a58ff0b55f4b43ef2b84 \ + --hash=sha256:61ec41068b7b74268fa86e3e9e12b9f0c21fcf65434571dbb13d954bceb08042 \ + --hash=sha256:666ae11966643886c2987b3b721899d250855718d6d9ce41b521252a17985f4d \ + --hash=sha256:68aaecc4178e90719e95298515979814bda0cbada1256a4485414860bd7ab962 \ + --hash=sha256:7c05650fe8023c5ed0d46793d4b7d7e6cd9c04e68eabe5b0aeea836e37bdcec2 \ + --hash=sha256:80eda8b3e173f0f247f711eef62be51b599b5d425c429b5d4ca6a05e9e856baa \ + --hash=sha256:8385d98f6a3bf8bb2d65a73e17ed87a3ba84f6991c155691c51112075f9ffc5d \ + --hash=sha256:88cce104c36870d70c49c7c8fd22885875d950d9ee6ab54df2745f83ba0dc365 \ + --hash=sha256:9d3cdb25fa98afdd3d0892d132b8d7139e2c087da1712041f6b762e4f807cc96 \ + --hash=sha256:a575913fb06e05e6b4b814d7f7468c2c660e8bb16d8d5a1faf9b33ccc569dd47 \ + --hash=sha256:ac119bb76b9faa00f48128b7f5679e1d8d437365c5d26f1c2c3f0da4ce1b553d \ + --hash=sha256:c1332724be35d23a854994ff0b66530119500b6053d0bd3363265f7e5e77288d \ + --hash=sha256:d03a475165f3134f773d1388aeb19c2d25ba88b6a9733c5c590b9ff7bbfa2e0c \ + --hash=sha256:d75601ad10b059ec832e78823b348bfa1a59f6b8d545db3a24fd44362a1564cb \ + --hash=sha256:de41fd81a41e53267cb020bb3a7212861da53a7d39f863585d13ea11049cf277 \ + --hash=sha256:e710bf40870f4db63c3d7d929aa9e09e4e7ee219e703f949ec4073b4294f6172 \ + --hash=sha256:ea25acb556320250756e53f9e20a4177515f012c9eaea17eb7587a8c4d8ae034 \ + --hash=sha256:f98bf604c82c416bc829e490c700ca1553eafdf2912a91e23a79d97d9801372a \ + --hash=sha256:fba1007b3ef89946dbbb515aeeb41e30203b004f0b4b00e5e16078b518563289 # via secretstorage google-auth==2.34.0 \ --hash=sha256:72fd4733b80b6d777dcde515628a9eb4a577339437012874ea286bca7261ee65 \ From f5a3ee3653199892a2f2ca967b00a2fbcc9ca5e4 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 4 Sep 2024 12:25:31 +0000 Subject: [PATCH 049/108] chore: release main (#13052) :robot: I have created a release *beep* *boop* ---
google-cloud-bigquery-datatransfer: 3.15.7 ## [3.15.7](https://github.com/googleapis/google-cloud-python/compare/google-cloud-bigquery-datatransfer-v3.15.6...google-cloud-bigquery-datatransfer-v3.15.7) (2024-09-04) ### Documentation * [google-cloud-bigquery-datatransfer] add a note to the CreateTransferConfigRequest and UpdateTransferConfigRequest to disable restricting service account usage ([#13051](https://github.com/googleapis/google-cloud-python/issues/13051)) ([4136c10](https://github.com/googleapis/google-cloud-python/commit/4136c10fabc1df012b028a5d407aaec326e448b6))
google-cloud-discoveryengine: 0.12.2 ## [0.12.2](https://github.com/googleapis/google-cloud-python/compare/google-cloud-discoveryengine-v0.12.1...google-cloud-discoveryengine-v0.12.2) (2024-09-04) ### Features * **v1alpha:** return index status in document ([51947fb](https://github.com/googleapis/google-cloud-python/commit/51947fb5913a3859ee45cd66a0dbad51a84cebe9)) * **v1alpha:** return joined status in user event ([51947fb](https://github.com/googleapis/google-cloud-python/commit/51947fb5913a3859ee45cd66a0dbad51a84cebe9)) * **v1alpha:** return query segment in NL query understanding ([51947fb](https://github.com/googleapis/google-cloud-python/commit/51947fb5913a3859ee45cd66a0dbad51a84cebe9)) * **v1alpha:** support batch get documents metadata by uri patterns ([51947fb](https://github.com/googleapis/google-cloud-python/commit/51947fb5913a3859ee45cd66a0dbad51a84cebe9)) * **v1alpha:** support creating workspace search data stores ([51947fb](https://github.com/googleapis/google-cloud-python/commit/51947fb5913a3859ee45cd66a0dbad51a84cebe9)) * **v1beta:** allow set relevance threshold on search ([51947fb](https://github.com/googleapis/google-cloud-python/commit/51947fb5913a3859ee45cd66a0dbad51a84cebe9)) * **v1beta:** return index status in document ([51947fb](https://github.com/googleapis/google-cloud-python/commit/51947fb5913a3859ee45cd66a0dbad51a84cebe9)) * **v1beta:** return joined status in user event ([51947fb](https://github.com/googleapis/google-cloud-python/commit/51947fb5913a3859ee45cd66a0dbad51a84cebe9)) * **v1beta:** return structured document info in answers ([51947fb](https://github.com/googleapis/google-cloud-python/commit/51947fb5913a3859ee45cd66a0dbad51a84cebe9)) * **v1beta:** support batch documents purge with GCS input ([51947fb](https://github.com/googleapis/google-cloud-python/commit/51947fb5913a3859ee45cd66a0dbad51a84cebe9)) * **v1beta:** support batch get documents metadata by uri patterns ([51947fb](https://github.com/googleapis/google-cloud-python/commit/51947fb5913a3859ee45cd66a0dbad51a84cebe9)) * **v1beta:** support natural language understanding in search ([51947fb](https://github.com/googleapis/google-cloud-python/commit/51947fb5913a3859ee45cd66a0dbad51a84cebe9)) * **v1beta:** support one box search ([51947fb](https://github.com/googleapis/google-cloud-python/commit/51947fb5913a3859ee45cd66a0dbad51a84cebe9)) * **v1:** promot user event purge to v1 ([51947fb](https://github.com/googleapis/google-cloud-python/commit/51947fb5913a3859ee45cd66a0dbad51a84cebe9)) * **v1:** promote search tuning service to v1 ([51947fb](https://github.com/googleapis/google-cloud-python/commit/51947fb5913a3859ee45cd66a0dbad51a84cebe9)) * **v1:** return index status in document ([51947fb](https://github.com/googleapis/google-cloud-python/commit/51947fb5913a3859ee45cd66a0dbad51a84cebe9)) * **v1:** return joined status in user event ([51947fb](https://github.com/googleapis/google-cloud-python/commit/51947fb5913a3859ee45cd66a0dbad51a84cebe9)) * **v1:** return structured document info in answers ([51947fb](https://github.com/googleapis/google-cloud-python/commit/51947fb5913a3859ee45cd66a0dbad51a84cebe9)) * **v1:** support batch documents purge with GCS input ([51947fb](https://github.com/googleapis/google-cloud-python/commit/51947fb5913a3859ee45cd66a0dbad51a84cebe9)) * **v1:** support batch get documents metadata by uri patterns ([51947fb](https://github.com/googleapis/google-cloud-python/commit/51947fb5913a3859ee45cd66a0dbad51a84cebe9)) ### Documentation * **v1alpha:** keep the API doc up-to-date with recent changes ([51947fb](https://github.com/googleapis/google-cloud-python/commit/51947fb5913a3859ee45cd66a0dbad51a84cebe9)) * **v1beta:** keep the API doc up-to-date with recent changes ([51947fb](https://github.com/googleapis/google-cloud-python/commit/51947fb5913a3859ee45cd66a0dbad51a84cebe9)) * **v1:** keep the API doc up-to-date with recent changes ([51947fb](https://github.com/googleapis/google-cloud-python/commit/51947fb5913a3859ee45cd66a0dbad51a84cebe9))
--- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). --------- Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> Co-authored-by: Anthonios Partheniou --- .kokoro/requirements-aoss.txt | 224 ++++++++---------- .release-please-manifest.json | 4 +- .../CHANGELOG.md | 7 + .../bigquery_datatransfer/gapic_version.py | 2 +- .../bigquery_datatransfer_v1/gapic_version.py | 2 +- ...google.cloud.bigquery.datatransfer.v1.json | 2 +- .../google-cloud-discoveryengine/CHANGELOG.md | 33 +++ .../cloud/discoveryengine/gapic_version.py | 2 +- .../cloud/discoveryengine_v1/gapic_version.py | 2 +- .../discoveryengine_v1alpha/gapic_version.py | 2 +- .../discoveryengine_v1beta/gapic_version.py | 2 +- ...adata_google.cloud.discoveryengine.v1.json | 2 +- ..._google.cloud.discoveryengine.v1alpha.json | 2 +- ...a_google.cloud.discoveryengine.v1beta.json | 2 +- renovate.json | 3 +- 15 files changed, 157 insertions(+), 134 deletions(-) diff --git a/.kokoro/requirements-aoss.txt b/.kokoro/requirements-aoss.txt index a4566f2179c5..34b8f631e421 100644 --- a/.kokoro/requirements-aoss.txt +++ b/.kokoro/requirements-aoss.txt @@ -8,82 +8,67 @@ backports-tarfile==1.2.0 \ --hash=sha256:77e284d754527b01fb1e6fa8a1afe577858ebe4e9dad8919e34c862cb399bc34 \ --hash=sha256:d75e02c268746e1b8144c278978b6e98e85de6ad16f8e4b0844a154557eca991 # via jaraco-context -cachetools==5.5.0 \ - --hash=sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292 \ - --hash=sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a +cachetools==5.4.0 \ + --hash=sha256:3ae3b49a3d5e28a77a0be2b37dbcb89005058959cb2323858c2657c4a8cab474 \ + --hash=sha256:b8adc2e7c07f105ced7bc56dbb6dfbe7c4a00acce20e2227b3f355be89bc6827 # via google-auth certifi==2024.7.4 \ --hash=sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b \ --hash=sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90 # via requests -cffi==1.17.0 \ - --hash=sha256:011aff3524d578a9412c8b3cfaa50f2c0bd78e03eb7af7aa5e0df59b158efb2f \ - --hash=sha256:0a048d4f6630113e54bb4b77e315e1ba32a5a31512c31a273807d0027a7e69ab \ - --hash=sha256:0bb15e7acf8ab35ca8b24b90af52c8b391690ef5c4aec3d31f38f0d37d2cc499 \ - --hash=sha256:0d46ee4764b88b91f16661a8befc6bfb24806d885e27436fdc292ed7e6f6d058 \ - --hash=sha256:0e60821d312f99d3e1569202518dddf10ae547e799d75aef3bca3a2d9e8ee693 \ - --hash=sha256:0fdacad9e0d9fc23e519efd5ea24a70348305e8d7d85ecbb1a5fa66dc834e7fb \ - --hash=sha256:14b9cbc8f7ac98a739558eb86fabc283d4d564dafed50216e7f7ee62d0d25377 \ - --hash=sha256:17c6d6d3260c7f2d94f657e6872591fe8733872a86ed1345bda872cfc8c74885 \ - --hash=sha256:1a2ddbac59dc3716bc79f27906c010406155031a1c801410f1bafff17ea304d2 \ - --hash=sha256:2404f3de742f47cb62d023f0ba7c5a916c9c653d5b368cc966382ae4e57da401 \ - --hash=sha256:24658baf6224d8f280e827f0a50c46ad819ec8ba380a42448e24459daf809cf4 \ - --hash=sha256:24aa705a5f5bd3a8bcfa4d123f03413de5d86e497435693b638cbffb7d5d8a1b \ - --hash=sha256:2770bb0d5e3cc0e31e7318db06efcbcdb7b31bcb1a70086d3177692a02256f59 \ - --hash=sha256:331ad15c39c9fe9186ceaf87203a9ecf5ae0ba2538c9e898e3a6967e8ad3db6f \ - --hash=sha256:3aa9d43b02a0c681f0bfbc12d476d47b2b2b6a3f9287f11ee42989a268a1833c \ - --hash=sha256:41f4915e09218744d8bae14759f983e466ab69b178de38066f7579892ff2a555 \ - --hash=sha256:4304d4416ff032ed50ad6bb87416d802e67139e31c0bde4628f36a47a3164bfa \ - --hash=sha256:435a22d00ec7d7ea533db494da8581b05977f9c37338c80bc86314bec2619424 \ - --hash=sha256:45f7cd36186db767d803b1473b3c659d57a23b5fa491ad83c6d40f2af58e4dbb \ - --hash=sha256:48b389b1fd5144603d61d752afd7167dfd205973a43151ae5045b35793232aa2 \ - --hash=sha256:4e67d26532bfd8b7f7c05d5a766d6f437b362c1bf203a3a5ce3593a645e870b8 \ - --hash=sha256:516a405f174fd3b88829eabfe4bb296ac602d6a0f68e0d64d5ac9456194a5b7e \ - --hash=sha256:5ba5c243f4004c750836f81606a9fcb7841f8874ad8f3bf204ff5e56332b72b9 \ - --hash=sha256:5bdc0f1f610d067c70aa3737ed06e2726fd9d6f7bfee4a351f4c40b6831f4e82 \ - --hash=sha256:6107e445faf057c118d5050560695e46d272e5301feffda3c41849641222a828 \ - --hash=sha256:6327b572f5770293fc062a7ec04160e89741e8552bf1c358d1a23eba68166759 \ - --hash=sha256:669b29a9eca6146465cc574659058ed949748f0809a2582d1f1a324eb91054dc \ - --hash=sha256:6ce01337d23884b21c03869d2f68c5523d43174d4fc405490eb0091057943118 \ - --hash=sha256:6d872186c1617d143969defeadac5a904e6e374183e07977eedef9c07c8953bf \ - --hash=sha256:6f76a90c345796c01d85e6332e81cab6d70de83b829cf1d9762d0a3da59c7932 \ - --hash=sha256:70d2aa9fb00cf52034feac4b913181a6e10356019b18ef89bc7c12a283bf5f5a \ - --hash=sha256:7cbc78dc018596315d4e7841c8c3a7ae31cc4d638c9b627f87d52e8abaaf2d29 \ - --hash=sha256:856bf0924d24e7f93b8aee12a3a1095c34085600aa805693fb7f5d1962393206 \ - --hash=sha256:8a98748ed1a1df4ee1d6f927e151ed6c1a09d5ec21684de879c7ea6aa96f58f2 \ - --hash=sha256:93a7350f6706b31f457c1457d3a3259ff9071a66f312ae64dc024f049055f72c \ - --hash=sha256:964823b2fc77b55355999ade496c54dde161c621cb1f6eac61dc30ed1b63cd4c \ - --hash=sha256:a003ac9edc22d99ae1286b0875c460351f4e101f8c9d9d2576e78d7e048f64e0 \ - --hash=sha256:a0ce71725cacc9ebf839630772b07eeec220cbb5f03be1399e0457a1464f8e1a \ - --hash=sha256:a47eef975d2b8b721775a0fa286f50eab535b9d56c70a6e62842134cf7841195 \ - --hash=sha256:a8b5b9712783415695663bd463990e2f00c6750562e6ad1d28e072a611c5f2a6 \ - --hash=sha256:a9015f5b8af1bb6837a3fcb0cdf3b874fe3385ff6274e8b7925d81ccaec3c5c9 \ - --hash=sha256:aec510255ce690d240f7cb23d7114f6b351c733a74c279a84def763660a2c3bc \ - --hash=sha256:b00e7bcd71caa0282cbe3c90966f738e2db91e64092a877c3ff7f19a1628fdcb \ - --hash=sha256:b50aaac7d05c2c26dfd50c3321199f019ba76bb650e346a6ef3616306eed67b0 \ - --hash=sha256:b7b6ea9e36d32582cda3465f54c4b454f62f23cb083ebc7a94e2ca6ef011c3a7 \ - --hash=sha256:bb9333f58fc3a2296fb1d54576138d4cf5d496a2cc118422bd77835e6ae0b9cb \ - --hash=sha256:c1c13185b90bbd3f8b5963cd8ce7ad4ff441924c31e23c975cb150e27c2bf67a \ - --hash=sha256:c3b8bd3133cd50f6b637bb4322822c94c5ce4bf0d724ed5ae70afce62187c492 \ - --hash=sha256:c5d97162c196ce54af6700949ddf9409e9833ef1003b4741c2b39ef46f1d9720 \ - --hash=sha256:c815270206f983309915a6844fe994b2fa47e5d05c4c4cef267c3b30e34dbe42 \ - --hash=sha256:cab2eba3830bf4f6d91e2d6718e0e1c14a2f5ad1af68a89d24ace0c6b17cced7 \ - --hash=sha256:d1df34588123fcc88c872f5acb6f74ae59e9d182a2707097f9e28275ec26a12d \ - --hash=sha256:d6bdcd415ba87846fd317bee0774e412e8792832e7805938987e4ede1d13046d \ - --hash=sha256:db9a30ec064129d605d0f1aedc93e00894b9334ec74ba9c6bdd08147434b33eb \ - --hash=sha256:dbc183e7bef690c9abe5ea67b7b60fdbca81aa8da43468287dae7b5c046107d4 \ - --hash=sha256:dca802c8db0720ce1c49cce1149ff7b06e91ba15fa84b1d59144fef1a1bc7ac2 \ - --hash=sha256:dec6b307ce928e8e112a6bb9921a1cb00a0e14979bf28b98e084a4b8a742bd9b \ - --hash=sha256:df8bb0010fdd0a743b7542589223a2816bdde4d94bb5ad67884348fa2c1c67e8 \ - --hash=sha256:e4094c7b464cf0a858e75cd14b03509e84789abf7b79f8537e6a72152109c76e \ - --hash=sha256:e4760a68cab57bfaa628938e9c2971137e05ce48e762a9cb53b76c9b569f1204 \ - --hash=sha256:eb09b82377233b902d4c3fbeeb7ad731cdab579c6c6fda1f763cd779139e47c3 \ - --hash=sha256:eb862356ee9391dc5a0b3cbc00f416b48c1b9a52d252d898e5b7696a5f9fe150 \ - --hash=sha256:ef9528915df81b8f4c7612b19b8628214c65c9b7f74db2e34a646a0a2a0da2d4 \ - --hash=sha256:f3157624b7558b914cb039fd1af735e5e8049a87c817cc215109ad1c8779df76 \ - --hash=sha256:f3e0992f23bbb0be00a921eae5363329253c3b86287db27092461c887b791e5e \ - --hash=sha256:f9338cc05451f1942d0d8203ec2c346c830f8e86469903d5126c1f0a13a2bcbb \ - --hash=sha256:ffef8fd58a36fb5f1196919638f73dd3ae0db1a878982b27a9a5a176ede4ba91 +cffi==1.16.0 \ + --hash=sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc \ + --hash=sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a \ + --hash=sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417 \ + --hash=sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab \ + --hash=sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520 \ + --hash=sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36 \ + --hash=sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743 \ + --hash=sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8 \ + --hash=sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed \ + --hash=sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684 \ + --hash=sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56 \ + --hash=sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324 \ + --hash=sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d \ + --hash=sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235 \ + --hash=sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e \ + --hash=sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088 \ + --hash=sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000 \ + --hash=sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7 \ + --hash=sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e \ + --hash=sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673 \ + --hash=sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c \ + --hash=sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe \ + --hash=sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2 \ + --hash=sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098 \ + --hash=sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8 \ + --hash=sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a \ + --hash=sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0 \ + --hash=sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b \ + --hash=sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896 \ + --hash=sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e \ + --hash=sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9 \ + --hash=sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2 \ + --hash=sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b \ + --hash=sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6 \ + --hash=sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404 \ + --hash=sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f \ + --hash=sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0 \ + --hash=sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4 \ + --hash=sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc \ + --hash=sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936 \ + --hash=sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba \ + --hash=sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872 \ + --hash=sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb \ + --hash=sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614 \ + --hash=sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1 \ + --hash=sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d \ + --hash=sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969 \ + --hash=sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b \ + --hash=sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4 \ + --hash=sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627 \ + --hash=sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956 \ + --hash=sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357 # via cryptography charset-normalizer==3.3.2 \ --hash=sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027 \ @@ -177,52 +162,52 @@ charset-normalizer==3.3.2 \ --hash=sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519 \ --hash=sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561 # via requests -cryptography==43.0.1 \ - --hash=sha256:014f58110f53237ace6a408b5beb6c427b64e084eb451ef25a28308270086494 \ - --hash=sha256:1bbcce1a551e262dfbafb6e6252f1ae36a248e615ca44ba302df077a846a8806 \ - --hash=sha256:203e92a75716d8cfb491dc47c79e17d0d9207ccffcbcb35f598fbe463ae3444d \ - --hash=sha256:27e613d7077ac613e399270253259d9d53872aaf657471473ebfc9a52935c062 \ - --hash=sha256:2bd51274dcd59f09dd952afb696bf9c61a7a49dfc764c04dd33ef7a6b502a1e2 \ - --hash=sha256:38926c50cff6f533f8a2dae3d7f19541432610d114a70808f0926d5aaa7121e4 \ - --hash=sha256:511f4273808ab590912a93ddb4e3914dfd8a388fed883361b02dea3791f292e1 \ - --hash=sha256:58d4e9129985185a06d849aa6df265bdd5a74ca6e1b736a77959b498e0505b85 \ - --hash=sha256:5b43d1ea6b378b54a1dc99dd8a2b5be47658fe9a7ce0a58ff0b55f4b43ef2b84 \ - --hash=sha256:61ec41068b7b74268fa86e3e9e12b9f0c21fcf65434571dbb13d954bceb08042 \ - --hash=sha256:666ae11966643886c2987b3b721899d250855718d6d9ce41b521252a17985f4d \ - --hash=sha256:68aaecc4178e90719e95298515979814bda0cbada1256a4485414860bd7ab962 \ - --hash=sha256:7c05650fe8023c5ed0d46793d4b7d7e6cd9c04e68eabe5b0aeea836e37bdcec2 \ - --hash=sha256:80eda8b3e173f0f247f711eef62be51b599b5d425c429b5d4ca6a05e9e856baa \ - --hash=sha256:8385d98f6a3bf8bb2d65a73e17ed87a3ba84f6991c155691c51112075f9ffc5d \ - --hash=sha256:88cce104c36870d70c49c7c8fd22885875d950d9ee6ab54df2745f83ba0dc365 \ - --hash=sha256:9d3cdb25fa98afdd3d0892d132b8d7139e2c087da1712041f6b762e4f807cc96 \ - --hash=sha256:a575913fb06e05e6b4b814d7f7468c2c660e8bb16d8d5a1faf9b33ccc569dd47 \ - --hash=sha256:ac119bb76b9faa00f48128b7f5679e1d8d437365c5d26f1c2c3f0da4ce1b553d \ - --hash=sha256:c1332724be35d23a854994ff0b66530119500b6053d0bd3363265f7e5e77288d \ - --hash=sha256:d03a475165f3134f773d1388aeb19c2d25ba88b6a9733c5c590b9ff7bbfa2e0c \ - --hash=sha256:d75601ad10b059ec832e78823b348bfa1a59f6b8d545db3a24fd44362a1564cb \ - --hash=sha256:de41fd81a41e53267cb020bb3a7212861da53a7d39f863585d13ea11049cf277 \ - --hash=sha256:e710bf40870f4db63c3d7d929aa9e09e4e7ee219e703f949ec4073b4294f6172 \ - --hash=sha256:ea25acb556320250756e53f9e20a4177515f012c9eaea17eb7587a8c4d8ae034 \ - --hash=sha256:f98bf604c82c416bc829e490c700ca1553eafdf2912a91e23a79d97d9801372a \ - --hash=sha256:fba1007b3ef89946dbbb515aeeb41e30203b004f0b4b00e5e16078b518563289 +cryptography==43.0.0 \ + --hash=sha256:0663585d02f76929792470451a5ba64424acc3cd5227b03921dab0e2f27b1709 \ + --hash=sha256:08a24a7070b2b6804c1940ff0f910ff728932a9d0e80e7814234269f9d46d069 \ + --hash=sha256:232ce02943a579095a339ac4b390fbbe97f5b5d5d107f8a08260ea2768be8cc2 \ + --hash=sha256:2905ccf93a8a2a416f3ec01b1a7911c3fe4073ef35640e7ee5296754e30b762b \ + --hash=sha256:299d3da8e00b7e2b54bb02ef58d73cd5f55fb31f33ebbf33bd00d9aa6807df7e \ + --hash=sha256:2c6d112bf61c5ef44042c253e4859b3cbbb50df2f78fa8fae6747a7814484a70 \ + --hash=sha256:31e44a986ceccec3d0498e16f3d27b2ee5fdf69ce2ab89b52eaad1d2f33d8778 \ + --hash=sha256:3d9a1eca329405219b605fac09ecfc09ac09e595d6def650a437523fcd08dd22 \ + --hash=sha256:3dcdedae5c7710b9f97ac6bba7e1052b95c7083c9d0e9df96e02a1932e777895 \ + --hash=sha256:47ca71115e545954e6c1d207dd13461ab81f4eccfcb1345eac874828b5e3eaaf \ + --hash=sha256:4a997df8c1c2aae1e1e5ac49c2e4f610ad037fc5a3aadc7b64e39dea42249431 \ + --hash=sha256:51956cf8730665e2bdf8ddb8da0056f699c1a5715648c1b0144670c1ba00b48f \ + --hash=sha256:5bcb8a5620008a8034d39bce21dc3e23735dfdb6a33a06974739bfa04f853947 \ + --hash=sha256:64c3f16e2a4fc51c0d06af28441881f98c5d91009b8caaff40cf3548089e9c74 \ + --hash=sha256:6e2b11c55d260d03a8cf29ac9b5e0608d35f08077d8c087be96287f43af3ccdc \ + --hash=sha256:7b3f5fe74a5ca32d4d0f302ffe6680fcc5c28f8ef0dc0ae8f40c0f3a1b4fca66 \ + --hash=sha256:844b6d608374e7d08f4f6e6f9f7b951f9256db41421917dfb2d003dde4cd6b66 \ + --hash=sha256:9a8d6802e0825767476f62aafed40532bd435e8a5f7d23bd8b4f5fd04cc80ecf \ + --hash=sha256:aae4d918f6b180a8ab8bf6511a419473d107df4dbb4225c7b48c5c9602c38c7f \ + --hash=sha256:ac1955ce000cb29ab40def14fd1bbfa7af2017cca696ee696925615cafd0dce5 \ + --hash=sha256:b88075ada2d51aa9f18283532c9f60e72170041bba88d7f37e49cbb10275299e \ + --hash=sha256:cb013933d4c127349b3948aa8aaf2f12c0353ad0eccd715ca789c8a0f671646f \ + --hash=sha256:cc70b4b581f28d0a254d006f26949245e3657d40d8857066c2ae22a61222ef55 \ + --hash=sha256:e9c5266c432a1e23738d178e51c2c7a5e2ddf790f248be939448c0ba2021f9d1 \ + --hash=sha256:ea9e57f8ea880eeea38ab5abf9fbe39f923544d7884228ec67d666abd60f5a47 \ + --hash=sha256:ee0c405832ade84d4de74b9029bedb7b31200600fa524d218fc29bfa371e97f5 \ + --hash=sha256:fdcb265de28585de5b859ae13e3846a8e805268a823a12a4da2597f1f5afc9f0 # via secretstorage -google-auth==2.34.0 \ - --hash=sha256:72fd4733b80b6d777dcde515628a9eb4a577339437012874ea286bca7261ee65 \ - --hash=sha256:8eb87396435c19b20d32abd2f984e31c191a15284af72eb922f10e5bde9c04cc +google-auth==2.32.0 \ + --hash=sha256:49315be72c55a6a37d62819e3573f6b416aca00721f7e3e31a008d928bf64022 \ + --hash=sha256:53326ea2ebec768070a94bee4e1b9194c9646ea0c2bd72422785bd0f9abfad7b # via keyrings-google-artifactregistry-auth -idna==3.8 \ - --hash=sha256:050b4e5baadcd44d760cedbd2b8e639f2ff89bbc7a5730fcc662954303377aac \ - --hash=sha256:d838c2c0ed6fced7693d5e8ab8e734d5f8fda53a039c0164afb0b82e771e3603 +idna==3.7 \ + --hash=sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc \ + --hash=sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0 # via requests -importlib-metadata==8.4.0 \ - --hash=sha256:66f342cc6ac9818fc6ff340576acd24d65ba0b3efabb2b4ac08b598965a4a2f1 \ - --hash=sha256:9a547d3bc3608b025f93d403fdd1aae741c24fbb8314df4b155675742ce303c5 +importlib-metadata==8.2.0 \ + --hash=sha256:11901fa0c2f97919b288679932bb64febaeacf289d18ac84dd68cb2e74213369 \ + --hash=sha256:72e8d4399996132204f9a16dcc751af254a48f8d1b20b9ff0f98d4a8f901e73d # via keyring jaraco-classes==3.4.0 \ --hash=sha256:47a024b51d0239c0dd8c8540c6c7f484be3b8fcf0b2d85c13825780d3b3f3acd \ --hash=sha256:f662826b6bed8cace05e7ff873ce0f9283b5c924470fe664fff1c2f00f581790 # via keyring -jaraco-context==6.0.1 \ +jaraco-context==5.3.0 \ --hash=sha256:3e16388f7da43d384a1a7cd3452e72e14732ac9fe459678773a3608a812bf266 \ --hash=sha256:c2f67165ce1f9be20f32f650f25d8edfc1646a8aeee48ae06fb35f90763576d2 # via keyring @@ -236,9 +221,9 @@ jeepney==0.8.0 \ # via # keyring # secretstorage -keyring==25.3.0 \ - --hash=sha256:8d85a1ea5d6db8515b59e1c5d1d1678b03cf7fc8b8dcfb1651e8c4a524eb42ef \ - --hash=sha256:8d963da00ccdf06e356acd9bf3b743208878751032d8599c6cc89eb51310ffae +keyring==25.2.1 \ + --hash=sha256:2458681cdefc0dbc0b7eb6cf75d0b98e59f9ad9b2d4edd319d18f68bdca95e50 \ + --hash=sha256:daaffd42dbda25ddafb1ad5fec4024e5bbcfe424597ca1ca452b299861e49f1b # via # -r requirements-aoss.in # keyrings-google-artifactregistry-auth @@ -246,9 +231,9 @@ keyrings-google-artifactregistry-auth==1.1.2 \ --hash=sha256:bd6abb72740d2dfeb4a5c03c3b105c6f7dba169caa29dee3959694f1f02c77de \ --hash=sha256:e3f18b50fa945c786593014dc225810d191671d4f5f8e12d9259e39bad3605a3 # via -r requirements-aoss.in -more-itertools==10.4.0 \ - --hash=sha256:0f7d9f83a0a8dcfa8a2694a770590d98a67ea943e3d9f5298309a484758c4e27 \ - --hash=sha256:fe0e63c4ab068eac62410ab05cccca2dc71ec44ba8ef29916a0090df061cf923 +more-itertools==10.3.0 \ + --hash=sha256:e5d93ef411224fbcef366a6e8ddc4c5781bc6359d43412a65dd5964e46111463 \ + --hash=sha256:ea6a02e24a9161e51faad17a8782b92a0df82c12c1c8886fec7f0c3fa1a1b320 # via # jaraco-classes # jaraco-functools @@ -286,10 +271,7 @@ urllib3==2.2.2 \ --hash=sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472 \ --hash=sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168 # via requests -zipp==3.20.0 \ - --hash=sha256:0145e43d89664cfe1a2e533adc75adafed82fe2da404b4bbb6b026c0157bdb31 \ - --hash=sha256:58da6168be89f0be59beb194da1250516fdaa062ccebd30127ac65d30045e10d +zipp==3.19.2 \ + --hash=sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19 \ + --hash=sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c # via importlib-metadata -jaraco.context==6.0.1 \ - --hash=sha256:9bae4ea555cf0b14938dc0aee7c9f32ed303aa20a3b73e7dc80111628792d1b3 \ - --hash=sha256:f797fc481b490edb305122c9181830a3a5b76d84ef6d1aef2fb9b47ab956f9e4 diff --git a/.release-please-manifest.json b/.release-please-manifest.json index be5aedaf8867..a695d96468bb 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -38,7 +38,7 @@ "packages/google-cloud-bigquery-connection": "1.15.5", "packages/google-cloud-bigquery-data-exchange": "0.5.13", "packages/google-cloud-bigquery-datapolicies": "0.6.8", - "packages/google-cloud-bigquery-datatransfer": "3.15.6", + "packages/google-cloud-bigquery-datatransfer": "3.15.7", "packages/google-cloud-bigquery-logging": "1.4.5", "packages/google-cloud-bigquery-migration": "0.11.9", "packages/google-cloud-bigquery-reservation": "1.13.5", @@ -74,7 +74,7 @@ "packages/google-cloud-developerconnect": "0.1.2", "packages/google-cloud-dialogflow": "2.31.0", "packages/google-cloud-dialogflow-cx": "1.35.0", - "packages/google-cloud-discoveryengine": "0.12.1", + "packages/google-cloud-discoveryengine": "0.12.2", "packages/google-cloud-dlp": "3.22.0", "packages/google-cloud-dms": "1.9.5", "packages/google-cloud-documentai": "2.31.0", diff --git a/packages/google-cloud-bigquery-datatransfer/CHANGELOG.md b/packages/google-cloud-bigquery-datatransfer/CHANGELOG.md index 649945d0a61f..953e701b7f5e 100644 --- a/packages/google-cloud-bigquery-datatransfer/CHANGELOG.md +++ b/packages/google-cloud-bigquery-datatransfer/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-bigquery-datatransfer/#history +## [3.15.7](https://github.com/googleapis/google-cloud-python/compare/google-cloud-bigquery-datatransfer-v3.15.6...google-cloud-bigquery-datatransfer-v3.15.7) (2024-09-04) + + +### Documentation + +* [google-cloud-bigquery-datatransfer] add a note to the CreateTransferConfigRequest and UpdateTransferConfigRequest to disable restricting service account usage ([#13051](https://github.com/googleapis/google-cloud-python/issues/13051)) ([4136c10](https://github.com/googleapis/google-cloud-python/commit/4136c10fabc1df012b028a5d407aaec326e448b6)) + ## [3.15.6](https://github.com/googleapis/google-cloud-python/compare/google-cloud-bigquery-datatransfer-v3.15.5...google-cloud-bigquery-datatransfer-v3.15.6) (2024-08-20) diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/gapic_version.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/gapic_version.py index 558c8aab67c5..fc64b41dd679 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/gapic_version.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "3.15.7" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic_version.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic_version.py index 558c8aab67c5..fc64b41dd679 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic_version.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "3.15.7" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-datatransfer/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datatransfer.v1.json b/packages/google-cloud-bigquery-datatransfer/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datatransfer.v1.json index adc8c281da8f..da58d7e46817 100644 --- a/packages/google-cloud-bigquery-datatransfer/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datatransfer.v1.json +++ b/packages/google-cloud-bigquery-datatransfer/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datatransfer.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-datatransfer", - "version": "0.1.0" + "version": "3.15.7" }, "snippets": [ { diff --git a/packages/google-cloud-discoveryengine/CHANGELOG.md b/packages/google-cloud-discoveryengine/CHANGELOG.md index 06c234fed935..51aeebeac072 100644 --- a/packages/google-cloud-discoveryengine/CHANGELOG.md +++ b/packages/google-cloud-discoveryengine/CHANGELOG.md @@ -1,5 +1,38 @@ # Changelog +## [0.12.2](https://github.com/googleapis/google-cloud-python/compare/google-cloud-discoveryengine-v0.12.1...google-cloud-discoveryengine-v0.12.2) (2024-09-04) + + +### Features + +* **v1alpha:** return index status in document ([51947fb](https://github.com/googleapis/google-cloud-python/commit/51947fb5913a3859ee45cd66a0dbad51a84cebe9)) +* **v1alpha:** return joined status in user event ([51947fb](https://github.com/googleapis/google-cloud-python/commit/51947fb5913a3859ee45cd66a0dbad51a84cebe9)) +* **v1alpha:** return query segment in NL query understanding ([51947fb](https://github.com/googleapis/google-cloud-python/commit/51947fb5913a3859ee45cd66a0dbad51a84cebe9)) +* **v1alpha:** support batch get documents metadata by uri patterns ([51947fb](https://github.com/googleapis/google-cloud-python/commit/51947fb5913a3859ee45cd66a0dbad51a84cebe9)) +* **v1alpha:** support creating workspace search data stores ([51947fb](https://github.com/googleapis/google-cloud-python/commit/51947fb5913a3859ee45cd66a0dbad51a84cebe9)) +* **v1beta:** allow set relevance threshold on search ([51947fb](https://github.com/googleapis/google-cloud-python/commit/51947fb5913a3859ee45cd66a0dbad51a84cebe9)) +* **v1beta:** return index status in document ([51947fb](https://github.com/googleapis/google-cloud-python/commit/51947fb5913a3859ee45cd66a0dbad51a84cebe9)) +* **v1beta:** return joined status in user event ([51947fb](https://github.com/googleapis/google-cloud-python/commit/51947fb5913a3859ee45cd66a0dbad51a84cebe9)) +* **v1beta:** return structured document info in answers ([51947fb](https://github.com/googleapis/google-cloud-python/commit/51947fb5913a3859ee45cd66a0dbad51a84cebe9)) +* **v1beta:** support batch documents purge with GCS input ([51947fb](https://github.com/googleapis/google-cloud-python/commit/51947fb5913a3859ee45cd66a0dbad51a84cebe9)) +* **v1beta:** support batch get documents metadata by uri patterns ([51947fb](https://github.com/googleapis/google-cloud-python/commit/51947fb5913a3859ee45cd66a0dbad51a84cebe9)) +* **v1beta:** support natural language understanding in search ([51947fb](https://github.com/googleapis/google-cloud-python/commit/51947fb5913a3859ee45cd66a0dbad51a84cebe9)) +* **v1beta:** support one box search ([51947fb](https://github.com/googleapis/google-cloud-python/commit/51947fb5913a3859ee45cd66a0dbad51a84cebe9)) +* **v1:** promot user event purge to v1 ([51947fb](https://github.com/googleapis/google-cloud-python/commit/51947fb5913a3859ee45cd66a0dbad51a84cebe9)) +* **v1:** promote search tuning service to v1 ([51947fb](https://github.com/googleapis/google-cloud-python/commit/51947fb5913a3859ee45cd66a0dbad51a84cebe9)) +* **v1:** return index status in document ([51947fb](https://github.com/googleapis/google-cloud-python/commit/51947fb5913a3859ee45cd66a0dbad51a84cebe9)) +* **v1:** return joined status in user event ([51947fb](https://github.com/googleapis/google-cloud-python/commit/51947fb5913a3859ee45cd66a0dbad51a84cebe9)) +* **v1:** return structured document info in answers ([51947fb](https://github.com/googleapis/google-cloud-python/commit/51947fb5913a3859ee45cd66a0dbad51a84cebe9)) +* **v1:** support batch documents purge with GCS input ([51947fb](https://github.com/googleapis/google-cloud-python/commit/51947fb5913a3859ee45cd66a0dbad51a84cebe9)) +* **v1:** support batch get documents metadata by uri patterns ([51947fb](https://github.com/googleapis/google-cloud-python/commit/51947fb5913a3859ee45cd66a0dbad51a84cebe9)) + + +### Documentation + +* **v1alpha:** keep the API doc up-to-date with recent changes ([51947fb](https://github.com/googleapis/google-cloud-python/commit/51947fb5913a3859ee45cd66a0dbad51a84cebe9)) +* **v1beta:** keep the API doc up-to-date with recent changes ([51947fb](https://github.com/googleapis/google-cloud-python/commit/51947fb5913a3859ee45cd66a0dbad51a84cebe9)) +* **v1:** keep the API doc up-to-date with recent changes ([51947fb](https://github.com/googleapis/google-cloud-python/commit/51947fb5913a3859ee45cd66a0dbad51a84cebe9)) + ## [0.12.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-discoveryengine-v0.12.0...google-cloud-discoveryengine-v0.12.1) (2024-08-08) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine/gapic_version.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine/gapic_version.py index 558c8aab67c5..9e3a3e937556 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine/gapic_version.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.12.2" # {x-release-please-version} diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/gapic_version.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/gapic_version.py index 558c8aab67c5..9e3a3e937556 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/gapic_version.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.12.2" # {x-release-please-version} diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/gapic_version.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/gapic_version.py index 558c8aab67c5..9e3a3e937556 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/gapic_version.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.12.2" # {x-release-please-version} diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/gapic_version.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/gapic_version.py index 558c8aab67c5..9e3a3e937556 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/gapic_version.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.12.2" # {x-release-please-version} diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1.json b/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1.json index 7c8ee0acc6ee..747564c4a6ad 100644 --- a/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1.json +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-discoveryengine", - "version": "0.1.0" + "version": "0.12.2" }, "snippets": [ { diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1alpha.json b/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1alpha.json index 4d014b08daf6..c1a9f90dc413 100644 --- a/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1alpha.json +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1alpha.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-discoveryengine", - "version": "0.1.0" + "version": "0.12.2" }, "snippets": [ { diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1beta.json b/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1beta.json index c8efdd7d5106..6946ceb3a447 100644 --- a/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1beta.json +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-discoveryengine", - "version": "0.1.0" + "version": "0.12.2" }, "snippets": [ { diff --git a/renovate.json b/renovate.json index a35fc4d36c4e..789608ba73c9 100644 --- a/renovate.json +++ b/renovate.json @@ -6,6 +6,7 @@ "schedule:weekly" ], "ignorePaths": [ - ".kokoro/requirements.txt" + ".kokoro/requirements.txt", + ".kokoro/requirements-aoss.txt" ] } From 3762ff40e51466bc516939a31732300c8e20211a Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 4 Sep 2024 10:17:09 -0400 Subject: [PATCH 050/108] fix!: [google-cloud-apihub] remove gRPC support for client libraries (#13055) - [ ] Regenerate this pull request now. PiperOrigin-RevId: 670927686 Source-Link: https://github.com/googleapis/googleapis/commit/8c6de209d316e7a33fcd28e743ae893c83b17eed Source-Link: https://github.com/googleapis/googleapis-gen/commit/c3840f8c7d753503d90394b7b1d12f944ebdd501 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWFwaWh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYzM4NDBmOGM3ZDc1MzUwM2Q5MDM5NGI3YjFkMTJmOTQ0ZWJkZDUwMSJ9 --------- Co-authored-by: Owl Bot --- .../google/cloud/apihub/__init__.py | 26 - .../google/cloud/apihub_v1/__init__.py | 22 +- .../cloud/apihub_v1/gapic_metadata.json | 810 +- .../apihub_v1/services/api_hub/__init__.py | 6 +- .../apihub_v1/services/api_hub/client.py | 4 - .../apihub_v1/services/api_hub/pagers.py | 624 - .../services/api_hub/transports/__init__.py | 6 - .../services/api_hub_dependencies/__init__.py | 6 +- .../services/api_hub_dependencies/client.py | 4 - .../services/api_hub_dependencies/pagers.py | 78 - .../transports/__init__.py | 6 - .../services/api_hub_plugin/__init__.py | 6 +- .../services/api_hub_plugin/client.py | 4 - .../api_hub_plugin/transports/__init__.py | 6 - .../__init__.py | 6 +- .../client.py | 6 - .../pagers.py | 93 - .../transports/__init__.py | 6 - .../services/linting_service/__init__.py | 6 +- .../services/linting_service/client.py | 4 - .../linting_service/transports/__init__.py | 6 - .../services/provisioning/__init__.py | 6 +- .../apihub_v1/services/provisioning/client.py | 4 - .../provisioning/transports/__init__.py | 6 - .../__init__.py | 6 +- .../client.py | 6 - .../pagers.py | 93 - .../transports/__init__.py | 8 - ...b_v1_generated_api_hub_create_api_async.py | 56 - ...enerated_api_hub_create_attribute_async.py | 58 - ...nerated_api_hub_create_deployment_async.py | 60 - ...rated_api_hub_create_external_api_async.py | 56 - ..._v1_generated_api_hub_create_spec_async.py | 58 - ..._generated_api_hub_create_version_async.py | 56 - ...b_v1_generated_api_hub_delete_api_async.py | 50 - ...enerated_api_hub_delete_attribute_async.py | 50 - ...nerated_api_hub_delete_deployment_async.py | 50 - ...rated_api_hub_delete_external_api_async.py | 50 - ..._v1_generated_api_hub_delete_spec_async.py | 50 - ..._generated_api_hub_delete_version_async.py | 50 - ...ub_dependencies_create_dependency_async.py | 57 - ...ub_dependencies_delete_dependency_async.py | 50 - ...i_hub_dependencies_get_dependency_async.py | 52 - ...ub_dependencies_list_dependencies_async.py | 53 - ...ub_dependencies_update_dependency_async.py | 56 - ...ihub_v1_generated_api_hub_get_api_async.py | 52 - ...nerated_api_hub_get_api_operation_async.py | 52 - ...1_generated_api_hub_get_attribute_async.py | 52 - ..._generated_api_hub_get_definition_async.py | 52 - ..._generated_api_hub_get_deployment_async.py | 52 - ...enerated_api_hub_get_external_api_async.py | 52 - ...hub_v1_generated_api_hub_get_spec_async.py | 52 - ...nerated_api_hub_get_spec_contents_async.py | 52 - ..._v1_generated_api_hub_get_version_async.py | 52 - ...rated_api_hub_list_api_operations_async.py | 53 - ...ub_v1_generated_api_hub_list_apis_async.py | 53 - ...generated_api_hub_list_attributes_async.py | 53 - ...enerated_api_hub_list_deployments_async.py | 53 - ...erated_api_hub_list_external_apis_async.py | 53 - ...b_v1_generated_api_hub_list_specs_async.py | 53 - ...1_generated_api_hub_list_versions_async.py | 53 - ...ted_api_hub_plugin_disable_plugin_async.py | 52 - ...ated_api_hub_plugin_enable_plugin_async.py | 52 - ...nerated_api_hub_plugin_get_plugin_async.py | 52 - ...enerated_api_hub_search_resources_async.py | 54 - ...b_v1_generated_api_hub_update_api_async.py | 55 - ...enerated_api_hub_update_attribute_async.py | 57 - ...nerated_api_hub_update_deployment_async.py | 59 - ...rated_api_hub_update_external_api_async.py | 55 - ..._v1_generated_api_hub_update_spec_async.py | 57 - ..._generated_api_hub_update_version_async.py | 55 - ..._create_host_project_registration_async.py | 57 - ...ice_get_host_project_registration_async.py | 52 - ...e_list_host_project_registrations_async.py | 53 - ...d_linting_service_get_style_guide_async.py | 52 - ..._service_get_style_guide_contents_async.py | 52 - ...nerated_linting_service_lint_spec_async.py | 50 - ...inting_service_update_style_guide_async.py | 57 - ...visioning_create_api_hub_instance_async.py | 60 - ...provisioning_get_api_hub_instance_async.py | 52 - ...visioning_lookup_api_hub_instance_async.py | 52 - ...create_runtime_project_attachment_async.py | 57 - ...delete_runtime_project_attachment_async.py | 50 - ...ce_get_runtime_project_attachment_async.py | 52 - ..._list_runtime_project_attachments_async.py | 53 - ...lookup_runtime_project_attachment_async.py | 52 - ...ippet_metadata_google.cloud.apihub.v1.json | 7007 +--- .../unit/gapic/apihub_v1/test_api_hub.py | 34405 ++++------------ .../apihub_v1/test_api_hub_dependencies.py | 7052 +--- .../gapic/apihub_v1/test_api_hub_plugin.py | 4550 +- .../test_host_project_registration_service.py | 5560 +-- .../gapic/apihub_v1/test_linting_service.py | 5347 +-- .../unit/gapic/apihub_v1/test_provisioning.py | 5044 +-- ...test_runtime_project_attachment_service.py | 7199 +--- 94 files changed, 18716 insertions(+), 62412 deletions(-) delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_api_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_attribute_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_deployment_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_external_api_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_spec_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_version_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_api_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_attribute_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_deployment_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_external_api_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_spec_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_version_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_dependencies_create_dependency_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_dependencies_delete_dependency_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_dependencies_get_dependency_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_dependencies_list_dependencies_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_dependencies_update_dependency_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_api_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_api_operation_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_attribute_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_definition_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_deployment_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_external_api_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_spec_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_spec_contents_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_version_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_api_operations_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_apis_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_attributes_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_deployments_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_external_apis_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_specs_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_versions_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_plugin_disable_plugin_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_plugin_enable_plugin_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_plugin_get_plugin_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_search_resources_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_api_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_attribute_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_deployment_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_external_api_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_spec_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_version_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_host_project_registration_service_create_host_project_registration_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_host_project_registration_service_get_host_project_registration_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_host_project_registration_service_list_host_project_registrations_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_linting_service_get_style_guide_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_linting_service_get_style_guide_contents_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_linting_service_lint_spec_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_linting_service_update_style_guide_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_provisioning_create_api_hub_instance_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_provisioning_get_api_hub_instance_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_provisioning_lookup_api_hub_instance_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_runtime_project_attachment_service_create_runtime_project_attachment_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_runtime_project_attachment_service_delete_runtime_project_attachment_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_runtime_project_attachment_service_get_runtime_project_attachment_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_runtime_project_attachment_service_list_runtime_project_attachments_async.py delete mode 100644 packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_runtime_project_attachment_service_lookup_runtime_project_attachment_async.py diff --git a/packages/google-cloud-apihub/google/cloud/apihub/__init__.py b/packages/google-cloud-apihub/google/cloud/apihub/__init__.py index 62b04504759e..72b5c1f8fbe5 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub/__init__.py +++ b/packages/google-cloud-apihub/google/cloud/apihub/__init__.py @@ -18,35 +18,16 @@ __version__ = package_version.__version__ -from google.cloud.apihub_v1.services.api_hub.async_client import ApiHubAsyncClient from google.cloud.apihub_v1.services.api_hub.client import ApiHubClient -from google.cloud.apihub_v1.services.api_hub_dependencies.async_client import ( - ApiHubDependenciesAsyncClient, -) from google.cloud.apihub_v1.services.api_hub_dependencies.client import ( ApiHubDependenciesClient, ) -from google.cloud.apihub_v1.services.api_hub_plugin.async_client import ( - ApiHubPluginAsyncClient, -) from google.cloud.apihub_v1.services.api_hub_plugin.client import ApiHubPluginClient -from google.cloud.apihub_v1.services.host_project_registration_service.async_client import ( - HostProjectRegistrationServiceAsyncClient, -) from google.cloud.apihub_v1.services.host_project_registration_service.client import ( HostProjectRegistrationServiceClient, ) -from google.cloud.apihub_v1.services.linting_service.async_client import ( - LintingServiceAsyncClient, -) from google.cloud.apihub_v1.services.linting_service.client import LintingServiceClient -from google.cloud.apihub_v1.services.provisioning.async_client import ( - ProvisioningAsyncClient, -) from google.cloud.apihub_v1.services.provisioning.client import ProvisioningClient -from google.cloud.apihub_v1.services.runtime_project_attachment_service.async_client import ( - RuntimeProjectAttachmentServiceAsyncClient, -) from google.cloud.apihub_v1.services.runtime_project_attachment_service.client import ( RuntimeProjectAttachmentServiceClient, ) @@ -175,19 +156,12 @@ __all__ = ( "ApiHubClient", - "ApiHubAsyncClient", "ApiHubDependenciesClient", - "ApiHubDependenciesAsyncClient", "ApiHubPluginClient", - "ApiHubPluginAsyncClient", "HostProjectRegistrationServiceClient", - "HostProjectRegistrationServiceAsyncClient", "LintingServiceClient", - "LintingServiceAsyncClient", "ProvisioningClient", - "ProvisioningAsyncClient", "RuntimeProjectAttachmentServiceClient", - "RuntimeProjectAttachmentServiceAsyncClient", "ApiHubResource", "CreateApiRequest", "CreateAttributeRequest", diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/__init__.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/__init__.py index 27a89617ee68..ddde89662be7 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/__init__.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/__init__.py @@ -18,20 +18,15 @@ __version__ = package_version.__version__ -from .services.api_hub import ApiHubAsyncClient, ApiHubClient -from .services.api_hub_dependencies import ( - ApiHubDependenciesAsyncClient, - ApiHubDependenciesClient, -) -from .services.api_hub_plugin import ApiHubPluginAsyncClient, ApiHubPluginClient +from .services.api_hub import ApiHubClient +from .services.api_hub_dependencies import ApiHubDependenciesClient +from .services.api_hub_plugin import ApiHubPluginClient from .services.host_project_registration_service import ( - HostProjectRegistrationServiceAsyncClient, HostProjectRegistrationServiceClient, ) -from .services.linting_service import LintingServiceAsyncClient, LintingServiceClient -from .services.provisioning import ProvisioningAsyncClient, ProvisioningClient +from .services.linting_service import LintingServiceClient +from .services.provisioning import ProvisioningClient from .services.runtime_project_attachment_service import ( - RuntimeProjectAttachmentServiceAsyncClient, RuntimeProjectAttachmentServiceClient, ) from .types.apihub_service import ( @@ -158,13 +153,6 @@ ) __all__ = ( - "ApiHubAsyncClient", - "ApiHubDependenciesAsyncClient", - "ApiHubPluginAsyncClient", - "HostProjectRegistrationServiceAsyncClient", - "LintingServiceAsyncClient", - "ProvisioningAsyncClient", - "RuntimeProjectAttachmentServiceAsyncClient", "Api", "ApiHubClient", "ApiHubDependenciesClient", diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/gapic_metadata.json b/packages/google-cloud-apihub/google/cloud/apihub_v1/gapic_metadata.json index 079807dd2715..1585fa5ee448 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/gapic_metadata.json +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/gapic_metadata.json @@ -7,7 +7,7 @@ "services": { "ApiHub": { "clients": { - "grpc": { + "rest": { "libraryClient": "ApiHubClient", "rpcs": { "CreateApi": { @@ -186,728 +186,138 @@ ] } } - }, - "grpc-async": { - "libraryClient": "ApiHubAsyncClient", + } + } + }, + "ApiHubDependencies": { + "clients": { + "rest": { + "libraryClient": "ApiHubDependenciesClient", "rpcs": { - "CreateApi": { - "methods": [ - "create_api" - ] - }, - "CreateAttribute": { - "methods": [ - "create_attribute" - ] - }, - "CreateDeployment": { - "methods": [ - "create_deployment" - ] - }, - "CreateExternalApi": { - "methods": [ - "create_external_api" - ] - }, - "CreateSpec": { - "methods": [ - "create_spec" - ] - }, - "CreateVersion": { - "methods": [ - "create_version" - ] - }, - "DeleteApi": { - "methods": [ - "delete_api" - ] - }, - "DeleteAttribute": { - "methods": [ - "delete_attribute" - ] - }, - "DeleteDeployment": { - "methods": [ - "delete_deployment" - ] - }, - "DeleteExternalApi": { - "methods": [ - "delete_external_api" - ] - }, - "DeleteSpec": { - "methods": [ - "delete_spec" - ] - }, - "DeleteVersion": { - "methods": [ - "delete_version" - ] - }, - "GetApi": { - "methods": [ - "get_api" - ] - }, - "GetApiOperation": { - "methods": [ - "get_api_operation" - ] - }, - "GetAttribute": { - "methods": [ - "get_attribute" - ] - }, - "GetDefinition": { - "methods": [ - "get_definition" - ] - }, - "GetDeployment": { - "methods": [ - "get_deployment" - ] - }, - "GetExternalApi": { - "methods": [ - "get_external_api" - ] - }, - "GetSpec": { - "methods": [ - "get_spec" - ] - }, - "GetSpecContents": { - "methods": [ - "get_spec_contents" - ] - }, - "GetVersion": { - "methods": [ - "get_version" - ] - }, - "ListApiOperations": { - "methods": [ - "list_api_operations" - ] - }, - "ListApis": { - "methods": [ - "list_apis" - ] - }, - "ListAttributes": { - "methods": [ - "list_attributes" - ] - }, - "ListDeployments": { - "methods": [ - "list_deployments" - ] - }, - "ListExternalApis": { - "methods": [ - "list_external_apis" - ] - }, - "ListSpecs": { - "methods": [ - "list_specs" - ] - }, - "ListVersions": { + "CreateDependency": { "methods": [ - "list_versions" + "create_dependency" ] }, - "SearchResources": { + "DeleteDependency": { "methods": [ - "search_resources" + "delete_dependency" ] }, - "UpdateApi": { + "GetDependency": { "methods": [ - "update_api" + "get_dependency" ] }, - "UpdateAttribute": { + "ListDependencies": { "methods": [ - "update_attribute" + "list_dependencies" ] }, - "UpdateDeployment": { + "UpdateDependency": { "methods": [ - "update_deployment" + "update_dependency" ] - }, - "UpdateExternalApi": { + } + } + } + } + }, + "ApiHubPlugin": { + "clients": { + "rest": { + "libraryClient": "ApiHubPluginClient", + "rpcs": { + "DisablePlugin": { "methods": [ - "update_external_api" + "disable_plugin" ] }, - "UpdateSpec": { + "EnablePlugin": { "methods": [ - "update_spec" + "enable_plugin" ] }, - "UpdateVersion": { + "GetPlugin": { "methods": [ - "update_version" + "get_plugin" ] } } - }, + } + } + }, + "HostProjectRegistrationService": { + "clients": { "rest": { - "libraryClient": "ApiHubClient", + "libraryClient": "HostProjectRegistrationServiceClient", "rpcs": { - "CreateApi": { + "CreateHostProjectRegistration": { "methods": [ - "create_api" + "create_host_project_registration" ] }, - "CreateAttribute": { + "GetHostProjectRegistration": { "methods": [ - "create_attribute" + "get_host_project_registration" ] }, - "CreateDeployment": { + "ListHostProjectRegistrations": { "methods": [ - "create_deployment" + "list_host_project_registrations" ] - }, - "CreateExternalApi": { + } + } + } + } + }, + "LintingService": { + "clients": { + "rest": { + "libraryClient": "LintingServiceClient", + "rpcs": { + "GetStyleGuide": { "methods": [ - "create_external_api" + "get_style_guide" ] }, - "CreateSpec": { + "GetStyleGuideContents": { "methods": [ - "create_spec" + "get_style_guide_contents" ] }, - "CreateVersion": { + "LintSpec": { "methods": [ - "create_version" + "lint_spec" ] }, - "DeleteApi": { + "UpdateStyleGuide": { "methods": [ - "delete_api" + "update_style_guide" ] - }, - "DeleteAttribute": { + } + } + } + } + }, + "Provisioning": { + "clients": { + "rest": { + "libraryClient": "ProvisioningClient", + "rpcs": { + "CreateApiHubInstance": { "methods": [ - "delete_attribute" + "create_api_hub_instance" ] }, - "DeleteDeployment": { + "GetApiHubInstance": { "methods": [ - "delete_deployment" + "get_api_hub_instance" ] }, - "DeleteExternalApi": { + "LookupApiHubInstance": { "methods": [ - "delete_external_api" - ] - }, - "DeleteSpec": { - "methods": [ - "delete_spec" - ] - }, - "DeleteVersion": { - "methods": [ - "delete_version" - ] - }, - "GetApi": { - "methods": [ - "get_api" - ] - }, - "GetApiOperation": { - "methods": [ - "get_api_operation" - ] - }, - "GetAttribute": { - "methods": [ - "get_attribute" - ] - }, - "GetDefinition": { - "methods": [ - "get_definition" - ] - }, - "GetDeployment": { - "methods": [ - "get_deployment" - ] - }, - "GetExternalApi": { - "methods": [ - "get_external_api" - ] - }, - "GetSpec": { - "methods": [ - "get_spec" - ] - }, - "GetSpecContents": { - "methods": [ - "get_spec_contents" - ] - }, - "GetVersion": { - "methods": [ - "get_version" - ] - }, - "ListApiOperations": { - "methods": [ - "list_api_operations" - ] - }, - "ListApis": { - "methods": [ - "list_apis" - ] - }, - "ListAttributes": { - "methods": [ - "list_attributes" - ] - }, - "ListDeployments": { - "methods": [ - "list_deployments" - ] - }, - "ListExternalApis": { - "methods": [ - "list_external_apis" - ] - }, - "ListSpecs": { - "methods": [ - "list_specs" - ] - }, - "ListVersions": { - "methods": [ - "list_versions" - ] - }, - "SearchResources": { - "methods": [ - "search_resources" - ] - }, - "UpdateApi": { - "methods": [ - "update_api" - ] - }, - "UpdateAttribute": { - "methods": [ - "update_attribute" - ] - }, - "UpdateDeployment": { - "methods": [ - "update_deployment" - ] - }, - "UpdateExternalApi": { - "methods": [ - "update_external_api" - ] - }, - "UpdateSpec": { - "methods": [ - "update_spec" - ] - }, - "UpdateVersion": { - "methods": [ - "update_version" - ] - } - } - } - } - }, - "ApiHubDependencies": { - "clients": { - "grpc": { - "libraryClient": "ApiHubDependenciesClient", - "rpcs": { - "CreateDependency": { - "methods": [ - "create_dependency" - ] - }, - "DeleteDependency": { - "methods": [ - "delete_dependency" - ] - }, - "GetDependency": { - "methods": [ - "get_dependency" - ] - }, - "ListDependencies": { - "methods": [ - "list_dependencies" - ] - }, - "UpdateDependency": { - "methods": [ - "update_dependency" - ] - } - } - }, - "grpc-async": { - "libraryClient": "ApiHubDependenciesAsyncClient", - "rpcs": { - "CreateDependency": { - "methods": [ - "create_dependency" - ] - }, - "DeleteDependency": { - "methods": [ - "delete_dependency" - ] - }, - "GetDependency": { - "methods": [ - "get_dependency" - ] - }, - "ListDependencies": { - "methods": [ - "list_dependencies" - ] - }, - "UpdateDependency": { - "methods": [ - "update_dependency" - ] - } - } - }, - "rest": { - "libraryClient": "ApiHubDependenciesClient", - "rpcs": { - "CreateDependency": { - "methods": [ - "create_dependency" - ] - }, - "DeleteDependency": { - "methods": [ - "delete_dependency" - ] - }, - "GetDependency": { - "methods": [ - "get_dependency" - ] - }, - "ListDependencies": { - "methods": [ - "list_dependencies" - ] - }, - "UpdateDependency": { - "methods": [ - "update_dependency" - ] - } - } - } - } - }, - "ApiHubPlugin": { - "clients": { - "grpc": { - "libraryClient": "ApiHubPluginClient", - "rpcs": { - "DisablePlugin": { - "methods": [ - "disable_plugin" - ] - }, - "EnablePlugin": { - "methods": [ - "enable_plugin" - ] - }, - "GetPlugin": { - "methods": [ - "get_plugin" - ] - } - } - }, - "grpc-async": { - "libraryClient": "ApiHubPluginAsyncClient", - "rpcs": { - "DisablePlugin": { - "methods": [ - "disable_plugin" - ] - }, - "EnablePlugin": { - "methods": [ - "enable_plugin" - ] - }, - "GetPlugin": { - "methods": [ - "get_plugin" - ] - } - } - }, - "rest": { - "libraryClient": "ApiHubPluginClient", - "rpcs": { - "DisablePlugin": { - "methods": [ - "disable_plugin" - ] - }, - "EnablePlugin": { - "methods": [ - "enable_plugin" - ] - }, - "GetPlugin": { - "methods": [ - "get_plugin" - ] - } - } - } - } - }, - "HostProjectRegistrationService": { - "clients": { - "grpc": { - "libraryClient": "HostProjectRegistrationServiceClient", - "rpcs": { - "CreateHostProjectRegistration": { - "methods": [ - "create_host_project_registration" - ] - }, - "GetHostProjectRegistration": { - "methods": [ - "get_host_project_registration" - ] - }, - "ListHostProjectRegistrations": { - "methods": [ - "list_host_project_registrations" - ] - } - } - }, - "grpc-async": { - "libraryClient": "HostProjectRegistrationServiceAsyncClient", - "rpcs": { - "CreateHostProjectRegistration": { - "methods": [ - "create_host_project_registration" - ] - }, - "GetHostProjectRegistration": { - "methods": [ - "get_host_project_registration" - ] - }, - "ListHostProjectRegistrations": { - "methods": [ - "list_host_project_registrations" - ] - } - } - }, - "rest": { - "libraryClient": "HostProjectRegistrationServiceClient", - "rpcs": { - "CreateHostProjectRegistration": { - "methods": [ - "create_host_project_registration" - ] - }, - "GetHostProjectRegistration": { - "methods": [ - "get_host_project_registration" - ] - }, - "ListHostProjectRegistrations": { - "methods": [ - "list_host_project_registrations" - ] - } - } - } - } - }, - "LintingService": { - "clients": { - "grpc": { - "libraryClient": "LintingServiceClient", - "rpcs": { - "GetStyleGuide": { - "methods": [ - "get_style_guide" - ] - }, - "GetStyleGuideContents": { - "methods": [ - "get_style_guide_contents" - ] - }, - "LintSpec": { - "methods": [ - "lint_spec" - ] - }, - "UpdateStyleGuide": { - "methods": [ - "update_style_guide" - ] - } - } - }, - "grpc-async": { - "libraryClient": "LintingServiceAsyncClient", - "rpcs": { - "GetStyleGuide": { - "methods": [ - "get_style_guide" - ] - }, - "GetStyleGuideContents": { - "methods": [ - "get_style_guide_contents" - ] - }, - "LintSpec": { - "methods": [ - "lint_spec" - ] - }, - "UpdateStyleGuide": { - "methods": [ - "update_style_guide" - ] - } - } - }, - "rest": { - "libraryClient": "LintingServiceClient", - "rpcs": { - "GetStyleGuide": { - "methods": [ - "get_style_guide" - ] - }, - "GetStyleGuideContents": { - "methods": [ - "get_style_guide_contents" - ] - }, - "LintSpec": { - "methods": [ - "lint_spec" - ] - }, - "UpdateStyleGuide": { - "methods": [ - "update_style_guide" - ] - } - } - } - } - }, - "Provisioning": { - "clients": { - "grpc": { - "libraryClient": "ProvisioningClient", - "rpcs": { - "CreateApiHubInstance": { - "methods": [ - "create_api_hub_instance" - ] - }, - "GetApiHubInstance": { - "methods": [ - "get_api_hub_instance" - ] - }, - "LookupApiHubInstance": { - "methods": [ - "lookup_api_hub_instance" - ] - } - } - }, - "grpc-async": { - "libraryClient": "ProvisioningAsyncClient", - "rpcs": { - "CreateApiHubInstance": { - "methods": [ - "create_api_hub_instance" - ] - }, - "GetApiHubInstance": { - "methods": [ - "get_api_hub_instance" - ] - }, - "LookupApiHubInstance": { - "methods": [ - "lookup_api_hub_instance" - ] - } - } - }, - "rest": { - "libraryClient": "ProvisioningClient", - "rpcs": { - "CreateApiHubInstance": { - "methods": [ - "create_api_hub_instance" - ] - }, - "GetApiHubInstance": { - "methods": [ - "get_api_hub_instance" - ] - }, - "LookupApiHubInstance": { - "methods": [ - "lookup_api_hub_instance" + "lookup_api_hub_instance" ] } } @@ -916,66 +326,6 @@ }, "RuntimeProjectAttachmentService": { "clients": { - "grpc": { - "libraryClient": "RuntimeProjectAttachmentServiceClient", - "rpcs": { - "CreateRuntimeProjectAttachment": { - "methods": [ - "create_runtime_project_attachment" - ] - }, - "DeleteRuntimeProjectAttachment": { - "methods": [ - "delete_runtime_project_attachment" - ] - }, - "GetRuntimeProjectAttachment": { - "methods": [ - "get_runtime_project_attachment" - ] - }, - "ListRuntimeProjectAttachments": { - "methods": [ - "list_runtime_project_attachments" - ] - }, - "LookupRuntimeProjectAttachment": { - "methods": [ - "lookup_runtime_project_attachment" - ] - } - } - }, - "grpc-async": { - "libraryClient": "RuntimeProjectAttachmentServiceAsyncClient", - "rpcs": { - "CreateRuntimeProjectAttachment": { - "methods": [ - "create_runtime_project_attachment" - ] - }, - "DeleteRuntimeProjectAttachment": { - "methods": [ - "delete_runtime_project_attachment" - ] - }, - "GetRuntimeProjectAttachment": { - "methods": [ - "get_runtime_project_attachment" - ] - }, - "ListRuntimeProjectAttachments": { - "methods": [ - "list_runtime_project_attachments" - ] - }, - "LookupRuntimeProjectAttachment": { - "methods": [ - "lookup_runtime_project_attachment" - ] - } - } - }, "rest": { "libraryClient": "RuntimeProjectAttachmentServiceClient", "rpcs": { diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/__init__.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/__init__.py index 8da758214ef9..0d50a3548806 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/__init__.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/__init__.py @@ -13,10 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from .async_client import ApiHubAsyncClient from .client import ApiHubClient -__all__ = ( - "ApiHubClient", - "ApiHubAsyncClient", -) +__all__ = ("ApiHubClient",) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/client.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/client.py index 69f73e4792ec..77ddc5472962 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/client.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/client.py @@ -57,8 +57,6 @@ from google.cloud.apihub_v1.types import apihub_service, common_fields from .transports.base import DEFAULT_CLIENT_INFO, ApiHubTransport -from .transports.grpc import ApiHubGrpcTransport -from .transports.grpc_asyncio import ApiHubGrpcAsyncIOTransport from .transports.rest import ApiHubRestTransport @@ -71,8 +69,6 @@ class ApiHubClientMeta(type): """ _transport_registry = OrderedDict() # type: Dict[str, Type[ApiHubTransport]] - _transport_registry["grpc"] = ApiHubGrpcTransport - _transport_registry["grpc_asyncio"] = ApiHubGrpcAsyncIOTransport _transport_registry["rest"] = ApiHubRestTransport def get_transport_class( diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/pagers.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/pagers.py index e5b37bd42192..510192a2f321 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/pagers.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/pagers.py @@ -115,84 +115,6 @@ def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) -class ListApisAsyncPager: - """A pager for iterating through ``list_apis`` requests. - - This class thinly wraps an initial - :class:`google.cloud.apihub_v1.types.ListApisResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``apis`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListApis`` requests and continue to iterate - through the ``apis`` field on the - corresponding responses. - - All the usual :class:`google.cloud.apihub_v1.types.ListApisResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - - def __init__( - self, - method: Callable[..., Awaitable[apihub_service.ListApisResponse]], - request: apihub_service.ListApisRequest, - response: apihub_service.ListApisResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () - ): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.apihub_v1.types.ListApisRequest): - The initial request object. - response (google.cloud.apihub_v1.types.ListApisResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = apihub_service.ListApisRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[apihub_service.ListApisResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method( - self._request, - retry=self._retry, - timeout=self._timeout, - metadata=self._metadata, - ) - yield self._response - - def __aiter__(self) -> AsyncIterator[common_fields.Api]: - async def async_generator(): - async for page in self.pages: - for response in page.apis: - yield response - - return async_generator() - - def __repr__(self) -> str: - return "{0}<{1!r}>".format(self.__class__.__name__, self._response) - - class ListVersionsPager: """A pager for iterating through ``list_versions`` requests. @@ -267,84 +189,6 @@ def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) -class ListVersionsAsyncPager: - """A pager for iterating through ``list_versions`` requests. - - This class thinly wraps an initial - :class:`google.cloud.apihub_v1.types.ListVersionsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``versions`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListVersions`` requests and continue to iterate - through the ``versions`` field on the - corresponding responses. - - All the usual :class:`google.cloud.apihub_v1.types.ListVersionsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - - def __init__( - self, - method: Callable[..., Awaitable[apihub_service.ListVersionsResponse]], - request: apihub_service.ListVersionsRequest, - response: apihub_service.ListVersionsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () - ): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.apihub_v1.types.ListVersionsRequest): - The initial request object. - response (google.cloud.apihub_v1.types.ListVersionsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = apihub_service.ListVersionsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[apihub_service.ListVersionsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method( - self._request, - retry=self._retry, - timeout=self._timeout, - metadata=self._metadata, - ) - yield self._response - - def __aiter__(self) -> AsyncIterator[common_fields.Version]: - async def async_generator(): - async for page in self.pages: - for response in page.versions: - yield response - - return async_generator() - - def __repr__(self) -> str: - return "{0}<{1!r}>".format(self.__class__.__name__, self._response) - - class ListSpecsPager: """A pager for iterating through ``list_specs`` requests. @@ -419,84 +263,6 @@ def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) -class ListSpecsAsyncPager: - """A pager for iterating through ``list_specs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.apihub_v1.types.ListSpecsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``specs`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListSpecs`` requests and continue to iterate - through the ``specs`` field on the - corresponding responses. - - All the usual :class:`google.cloud.apihub_v1.types.ListSpecsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - - def __init__( - self, - method: Callable[..., Awaitable[apihub_service.ListSpecsResponse]], - request: apihub_service.ListSpecsRequest, - response: apihub_service.ListSpecsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () - ): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.apihub_v1.types.ListSpecsRequest): - The initial request object. - response (google.cloud.apihub_v1.types.ListSpecsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = apihub_service.ListSpecsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[apihub_service.ListSpecsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method( - self._request, - retry=self._retry, - timeout=self._timeout, - metadata=self._metadata, - ) - yield self._response - - def __aiter__(self) -> AsyncIterator[common_fields.Spec]: - async def async_generator(): - async for page in self.pages: - for response in page.specs: - yield response - - return async_generator() - - def __repr__(self) -> str: - return "{0}<{1!r}>".format(self.__class__.__name__, self._response) - - class ListApiOperationsPager: """A pager for iterating through ``list_api_operations`` requests. @@ -571,84 +337,6 @@ def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) -class ListApiOperationsAsyncPager: - """A pager for iterating through ``list_api_operations`` requests. - - This class thinly wraps an initial - :class:`google.cloud.apihub_v1.types.ListApiOperationsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``api_operations`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListApiOperations`` requests and continue to iterate - through the ``api_operations`` field on the - corresponding responses. - - All the usual :class:`google.cloud.apihub_v1.types.ListApiOperationsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - - def __init__( - self, - method: Callable[..., Awaitable[apihub_service.ListApiOperationsResponse]], - request: apihub_service.ListApiOperationsRequest, - response: apihub_service.ListApiOperationsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () - ): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.apihub_v1.types.ListApiOperationsRequest): - The initial request object. - response (google.cloud.apihub_v1.types.ListApiOperationsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = apihub_service.ListApiOperationsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[apihub_service.ListApiOperationsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method( - self._request, - retry=self._retry, - timeout=self._timeout, - metadata=self._metadata, - ) - yield self._response - - def __aiter__(self) -> AsyncIterator[common_fields.ApiOperation]: - async def async_generator(): - async for page in self.pages: - for response in page.api_operations: - yield response - - return async_generator() - - def __repr__(self) -> str: - return "{0}<{1!r}>".format(self.__class__.__name__, self._response) - - class ListDeploymentsPager: """A pager for iterating through ``list_deployments`` requests. @@ -723,84 +411,6 @@ def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) -class ListDeploymentsAsyncPager: - """A pager for iterating through ``list_deployments`` requests. - - This class thinly wraps an initial - :class:`google.cloud.apihub_v1.types.ListDeploymentsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``deployments`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListDeployments`` requests and continue to iterate - through the ``deployments`` field on the - corresponding responses. - - All the usual :class:`google.cloud.apihub_v1.types.ListDeploymentsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - - def __init__( - self, - method: Callable[..., Awaitable[apihub_service.ListDeploymentsResponse]], - request: apihub_service.ListDeploymentsRequest, - response: apihub_service.ListDeploymentsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () - ): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.apihub_v1.types.ListDeploymentsRequest): - The initial request object. - response (google.cloud.apihub_v1.types.ListDeploymentsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = apihub_service.ListDeploymentsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[apihub_service.ListDeploymentsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method( - self._request, - retry=self._retry, - timeout=self._timeout, - metadata=self._metadata, - ) - yield self._response - - def __aiter__(self) -> AsyncIterator[common_fields.Deployment]: - async def async_generator(): - async for page in self.pages: - for response in page.deployments: - yield response - - return async_generator() - - def __repr__(self) -> str: - return "{0}<{1!r}>".format(self.__class__.__name__, self._response) - - class ListAttributesPager: """A pager for iterating through ``list_attributes`` requests. @@ -875,84 +485,6 @@ def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) -class ListAttributesAsyncPager: - """A pager for iterating through ``list_attributes`` requests. - - This class thinly wraps an initial - :class:`google.cloud.apihub_v1.types.ListAttributesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``attributes`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListAttributes`` requests and continue to iterate - through the ``attributes`` field on the - corresponding responses. - - All the usual :class:`google.cloud.apihub_v1.types.ListAttributesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - - def __init__( - self, - method: Callable[..., Awaitable[apihub_service.ListAttributesResponse]], - request: apihub_service.ListAttributesRequest, - response: apihub_service.ListAttributesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () - ): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.apihub_v1.types.ListAttributesRequest): - The initial request object. - response (google.cloud.apihub_v1.types.ListAttributesResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = apihub_service.ListAttributesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[apihub_service.ListAttributesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method( - self._request, - retry=self._retry, - timeout=self._timeout, - metadata=self._metadata, - ) - yield self._response - - def __aiter__(self) -> AsyncIterator[common_fields.Attribute]: - async def async_generator(): - async for page in self.pages: - for response in page.attributes: - yield response - - return async_generator() - - def __repr__(self) -> str: - return "{0}<{1!r}>".format(self.__class__.__name__, self._response) - - class SearchResourcesPager: """A pager for iterating through ``search_resources`` requests. @@ -1027,84 +559,6 @@ def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) -class SearchResourcesAsyncPager: - """A pager for iterating through ``search_resources`` requests. - - This class thinly wraps an initial - :class:`google.cloud.apihub_v1.types.SearchResourcesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``search_results`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``SearchResources`` requests and continue to iterate - through the ``search_results`` field on the - corresponding responses. - - All the usual :class:`google.cloud.apihub_v1.types.SearchResourcesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - - def __init__( - self, - method: Callable[..., Awaitable[apihub_service.SearchResourcesResponse]], - request: apihub_service.SearchResourcesRequest, - response: apihub_service.SearchResourcesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () - ): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.apihub_v1.types.SearchResourcesRequest): - The initial request object. - response (google.cloud.apihub_v1.types.SearchResourcesResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = apihub_service.SearchResourcesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[apihub_service.SearchResourcesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method( - self._request, - retry=self._retry, - timeout=self._timeout, - metadata=self._metadata, - ) - yield self._response - - def __aiter__(self) -> AsyncIterator[apihub_service.SearchResult]: - async def async_generator(): - async for page in self.pages: - for response in page.search_results: - yield response - - return async_generator() - - def __repr__(self) -> str: - return "{0}<{1!r}>".format(self.__class__.__name__, self._response) - - class ListExternalApisPager: """A pager for iterating through ``list_external_apis`` requests. @@ -1177,81 +631,3 @@ def __iter__(self) -> Iterator[common_fields.ExternalApi]: def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) - - -class ListExternalApisAsyncPager: - """A pager for iterating through ``list_external_apis`` requests. - - This class thinly wraps an initial - :class:`google.cloud.apihub_v1.types.ListExternalApisResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``external_apis`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListExternalApis`` requests and continue to iterate - through the ``external_apis`` field on the - corresponding responses. - - All the usual :class:`google.cloud.apihub_v1.types.ListExternalApisResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - - def __init__( - self, - method: Callable[..., Awaitable[apihub_service.ListExternalApisResponse]], - request: apihub_service.ListExternalApisRequest, - response: apihub_service.ListExternalApisResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () - ): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.apihub_v1.types.ListExternalApisRequest): - The initial request object. - response (google.cloud.apihub_v1.types.ListExternalApisResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = apihub_service.ListExternalApisRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[apihub_service.ListExternalApisResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method( - self._request, - retry=self._retry, - timeout=self._timeout, - metadata=self._metadata, - ) - yield self._response - - def __aiter__(self) -> AsyncIterator[common_fields.ExternalApi]: - async def async_generator(): - async for page in self.pages: - for response in page.external_apis: - yield response - - return async_generator() - - def __repr__(self) -> str: - return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/transports/__init__.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/transports/__init__.py index ae6fa9e02afe..904125024a7b 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/transports/__init__.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/transports/__init__.py @@ -17,20 +17,14 @@ from typing import Dict, Type from .base import ApiHubTransport -from .grpc import ApiHubGrpcTransport -from .grpc_asyncio import ApiHubGrpcAsyncIOTransport from .rest import ApiHubRestInterceptor, ApiHubRestTransport # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[ApiHubTransport]] -_transport_registry["grpc"] = ApiHubGrpcTransport -_transport_registry["grpc_asyncio"] = ApiHubGrpcAsyncIOTransport _transport_registry["rest"] = ApiHubRestTransport __all__ = ( "ApiHubTransport", - "ApiHubGrpcTransport", - "ApiHubGrpcAsyncIOTransport", "ApiHubRestTransport", "ApiHubRestInterceptor", ) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/__init__.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/__init__.py index 9727d7d5b0d7..146b28fe4729 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/__init__.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/__init__.py @@ -13,10 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from .async_client import ApiHubDependenciesAsyncClient from .client import ApiHubDependenciesClient -__all__ = ( - "ApiHubDependenciesClient", - "ApiHubDependenciesAsyncClient", -) +__all__ = ("ApiHubDependenciesClient",) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/client.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/client.py index 1c70a2416c8e..70a952fe282c 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/client.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/client.py @@ -57,8 +57,6 @@ from google.cloud.apihub_v1.types import apihub_service, common_fields from .transports.base import DEFAULT_CLIENT_INFO, ApiHubDependenciesTransport -from .transports.grpc import ApiHubDependenciesGrpcTransport -from .transports.grpc_asyncio import ApiHubDependenciesGrpcAsyncIOTransport from .transports.rest import ApiHubDependenciesRestTransport @@ -73,8 +71,6 @@ class ApiHubDependenciesClientMeta(type): _transport_registry = ( OrderedDict() ) # type: Dict[str, Type[ApiHubDependenciesTransport]] - _transport_registry["grpc"] = ApiHubDependenciesGrpcTransport - _transport_registry["grpc_asyncio"] = ApiHubDependenciesGrpcAsyncIOTransport _transport_registry["rest"] = ApiHubDependenciesRestTransport def get_transport_class( diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/pagers.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/pagers.py index 89cdfff15348..6f0fa634f84d 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/pagers.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/pagers.py @@ -113,81 +113,3 @@ def __iter__(self) -> Iterator[common_fields.Dependency]: def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) - - -class ListDependenciesAsyncPager: - """A pager for iterating through ``list_dependencies`` requests. - - This class thinly wraps an initial - :class:`google.cloud.apihub_v1.types.ListDependenciesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``dependencies`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListDependencies`` requests and continue to iterate - through the ``dependencies`` field on the - corresponding responses. - - All the usual :class:`google.cloud.apihub_v1.types.ListDependenciesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - - def __init__( - self, - method: Callable[..., Awaitable[apihub_service.ListDependenciesResponse]], - request: apihub_service.ListDependenciesRequest, - response: apihub_service.ListDependenciesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () - ): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.apihub_v1.types.ListDependenciesRequest): - The initial request object. - response (google.cloud.apihub_v1.types.ListDependenciesResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = apihub_service.ListDependenciesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[apihub_service.ListDependenciesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method( - self._request, - retry=self._retry, - timeout=self._timeout, - metadata=self._metadata, - ) - yield self._response - - def __aiter__(self) -> AsyncIterator[common_fields.Dependency]: - async def async_generator(): - async for page in self.pages: - for response in page.dependencies: - yield response - - return async_generator() - - def __repr__(self) -> str: - return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/transports/__init__.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/transports/__init__.py index 8327d8408a2e..5de2b44a3808 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/transports/__init__.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/transports/__init__.py @@ -17,22 +17,16 @@ from typing import Dict, Type from .base import ApiHubDependenciesTransport -from .grpc import ApiHubDependenciesGrpcTransport -from .grpc_asyncio import ApiHubDependenciesGrpcAsyncIOTransport from .rest import ApiHubDependenciesRestInterceptor, ApiHubDependenciesRestTransport # Compile a registry of transports. _transport_registry = ( OrderedDict() ) # type: Dict[str, Type[ApiHubDependenciesTransport]] -_transport_registry["grpc"] = ApiHubDependenciesGrpcTransport -_transport_registry["grpc_asyncio"] = ApiHubDependenciesGrpcAsyncIOTransport _transport_registry["rest"] = ApiHubDependenciesRestTransport __all__ = ( "ApiHubDependenciesTransport", - "ApiHubDependenciesGrpcTransport", - "ApiHubDependenciesGrpcAsyncIOTransport", "ApiHubDependenciesRestTransport", "ApiHubDependenciesRestInterceptor", ) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/__init__.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/__init__.py index 3d883e94c9c6..5cbea89992b0 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/__init__.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/__init__.py @@ -13,10 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from .async_client import ApiHubPluginAsyncClient from .client import ApiHubPluginClient -__all__ = ( - "ApiHubPluginClient", - "ApiHubPluginAsyncClient", -) +__all__ = ("ApiHubPluginClient",) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/client.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/client.py index 5f6283c74876..dbfedb9a41d0 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/client.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/client.py @@ -54,8 +54,6 @@ from google.cloud.apihub_v1.types import common_fields, plugin_service from .transports.base import DEFAULT_CLIENT_INFO, ApiHubPluginTransport -from .transports.grpc import ApiHubPluginGrpcTransport -from .transports.grpc_asyncio import ApiHubPluginGrpcAsyncIOTransport from .transports.rest import ApiHubPluginRestTransport @@ -68,8 +66,6 @@ class ApiHubPluginClientMeta(type): """ _transport_registry = OrderedDict() # type: Dict[str, Type[ApiHubPluginTransport]] - _transport_registry["grpc"] = ApiHubPluginGrpcTransport - _transport_registry["grpc_asyncio"] = ApiHubPluginGrpcAsyncIOTransport _transport_registry["rest"] = ApiHubPluginRestTransport def get_transport_class( diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/transports/__init__.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/transports/__init__.py index 33a3043c2375..9ecb3eaee613 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/transports/__init__.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/transports/__init__.py @@ -17,20 +17,14 @@ from typing import Dict, Type from .base import ApiHubPluginTransport -from .grpc import ApiHubPluginGrpcTransport -from .grpc_asyncio import ApiHubPluginGrpcAsyncIOTransport from .rest import ApiHubPluginRestInterceptor, ApiHubPluginRestTransport # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[ApiHubPluginTransport]] -_transport_registry["grpc"] = ApiHubPluginGrpcTransport -_transport_registry["grpc_asyncio"] = ApiHubPluginGrpcAsyncIOTransport _transport_registry["rest"] = ApiHubPluginRestTransport __all__ = ( "ApiHubPluginTransport", - "ApiHubPluginGrpcTransport", - "ApiHubPluginGrpcAsyncIOTransport", "ApiHubPluginRestTransport", "ApiHubPluginRestInterceptor", ) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/__init__.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/__init__.py index f08d02f0c7b6..f5f90e47cdb9 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/__init__.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/__init__.py @@ -13,10 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from .async_client import HostProjectRegistrationServiceAsyncClient from .client import HostProjectRegistrationServiceClient -__all__ = ( - "HostProjectRegistrationServiceClient", - "HostProjectRegistrationServiceAsyncClient", -) +__all__ = ("HostProjectRegistrationServiceClient",) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/client.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/client.py index 89784ae2dd03..2e5897cdad20 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/client.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/client.py @@ -59,8 +59,6 @@ DEFAULT_CLIENT_INFO, HostProjectRegistrationServiceTransport, ) -from .transports.grpc import HostProjectRegistrationServiceGrpcTransport -from .transports.grpc_asyncio import HostProjectRegistrationServiceGrpcAsyncIOTransport from .transports.rest import HostProjectRegistrationServiceRestTransport @@ -75,10 +73,6 @@ class HostProjectRegistrationServiceClientMeta(type): _transport_registry = ( OrderedDict() ) # type: Dict[str, Type[HostProjectRegistrationServiceTransport]] - _transport_registry["grpc"] = HostProjectRegistrationServiceGrpcTransport - _transport_registry[ - "grpc_asyncio" - ] = HostProjectRegistrationServiceGrpcAsyncIOTransport _transport_registry["rest"] = HostProjectRegistrationServiceRestTransport def get_transport_class( diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/pagers.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/pagers.py index 2248115ec790..4bb7e2ec7541 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/pagers.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/pagers.py @@ -125,96 +125,3 @@ def __iter__( def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) - - -class ListHostProjectRegistrationsAsyncPager: - """A pager for iterating through ``list_host_project_registrations`` requests. - - This class thinly wraps an initial - :class:`google.cloud.apihub_v1.types.ListHostProjectRegistrationsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``host_project_registrations`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListHostProjectRegistrations`` requests and continue to iterate - through the ``host_project_registrations`` field on the - corresponding responses. - - All the usual :class:`google.cloud.apihub_v1.types.ListHostProjectRegistrationsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - - def __init__( - self, - method: Callable[ - ..., - Awaitable[ - host_project_registration_service.ListHostProjectRegistrationsResponse - ], - ], - request: host_project_registration_service.ListHostProjectRegistrationsRequest, - response: host_project_registration_service.ListHostProjectRegistrationsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () - ): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.apihub_v1.types.ListHostProjectRegistrationsRequest): - The initial request object. - response (google.cloud.apihub_v1.types.ListHostProjectRegistrationsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = ( - host_project_registration_service.ListHostProjectRegistrationsRequest( - request - ) - ) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages( - self, - ) -> AsyncIterator[ - host_project_registration_service.ListHostProjectRegistrationsResponse - ]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method( - self._request, - retry=self._retry, - timeout=self._timeout, - metadata=self._metadata, - ) - yield self._response - - def __aiter__( - self, - ) -> AsyncIterator[host_project_registration_service.HostProjectRegistration]: - async def async_generator(): - async for page in self.pages: - for response in page.host_project_registrations: - yield response - - return async_generator() - - def __repr__(self) -> str: - return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/transports/__init__.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/transports/__init__.py index 2352c478fef0..c80657406ff6 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/transports/__init__.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/transports/__init__.py @@ -17,8 +17,6 @@ from typing import Dict, Type from .base import HostProjectRegistrationServiceTransport -from .grpc import HostProjectRegistrationServiceGrpcTransport -from .grpc_asyncio import HostProjectRegistrationServiceGrpcAsyncIOTransport from .rest import ( HostProjectRegistrationServiceRestInterceptor, HostProjectRegistrationServiceRestTransport, @@ -28,14 +26,10 @@ _transport_registry = ( OrderedDict() ) # type: Dict[str, Type[HostProjectRegistrationServiceTransport]] -_transport_registry["grpc"] = HostProjectRegistrationServiceGrpcTransport -_transport_registry["grpc_asyncio"] = HostProjectRegistrationServiceGrpcAsyncIOTransport _transport_registry["rest"] = HostProjectRegistrationServiceRestTransport __all__ = ( "HostProjectRegistrationServiceTransport", - "HostProjectRegistrationServiceGrpcTransport", - "HostProjectRegistrationServiceGrpcAsyncIOTransport", "HostProjectRegistrationServiceRestTransport", "HostProjectRegistrationServiceRestInterceptor", ) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/__init__.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/__init__.py index 70436549c9b7..68f5fe54993b 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/__init__.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/__init__.py @@ -13,10 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from .async_client import LintingServiceAsyncClient from .client import LintingServiceClient -__all__ = ( - "LintingServiceClient", - "LintingServiceAsyncClient", -) +__all__ = ("LintingServiceClient",) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/client.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/client.py index 75930326f9d9..608153448c3b 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/client.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/client.py @@ -55,8 +55,6 @@ from google.cloud.apihub_v1.types import common_fields, linting_service from .transports.base import DEFAULT_CLIENT_INFO, LintingServiceTransport -from .transports.grpc import LintingServiceGrpcTransport -from .transports.grpc_asyncio import LintingServiceGrpcAsyncIOTransport from .transports.rest import LintingServiceRestTransport @@ -71,8 +69,6 @@ class LintingServiceClientMeta(type): _transport_registry = ( OrderedDict() ) # type: Dict[str, Type[LintingServiceTransport]] - _transport_registry["grpc"] = LintingServiceGrpcTransport - _transport_registry["grpc_asyncio"] = LintingServiceGrpcAsyncIOTransport _transport_registry["rest"] = LintingServiceRestTransport def get_transport_class( diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/transports/__init__.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/transports/__init__.py index d9f3131d4481..f8d2f54aac8c 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/transports/__init__.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/transports/__init__.py @@ -17,20 +17,14 @@ from typing import Dict, Type from .base import LintingServiceTransport -from .grpc import LintingServiceGrpcTransport -from .grpc_asyncio import LintingServiceGrpcAsyncIOTransport from .rest import LintingServiceRestInterceptor, LintingServiceRestTransport # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[LintingServiceTransport]] -_transport_registry["grpc"] = LintingServiceGrpcTransport -_transport_registry["grpc_asyncio"] = LintingServiceGrpcAsyncIOTransport _transport_registry["rest"] = LintingServiceRestTransport __all__ = ( "LintingServiceTransport", - "LintingServiceGrpcTransport", - "LintingServiceGrpcAsyncIOTransport", "LintingServiceRestTransport", "LintingServiceRestInterceptor", ) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/__init__.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/__init__.py index 894b56bb9b82..3df245148ed6 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/__init__.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/__init__.py @@ -13,10 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from .async_client import ProvisioningAsyncClient from .client import ProvisioningClient -__all__ = ( - "ProvisioningClient", - "ProvisioningAsyncClient", -) +__all__ = ("ProvisioningClient",) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/client.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/client.py index c39738ccb878..56a83e91bd00 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/client.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/client.py @@ -57,8 +57,6 @@ from google.cloud.apihub_v1.types import common_fields, provisioning_service from .transports.base import DEFAULT_CLIENT_INFO, ProvisioningTransport -from .transports.grpc import ProvisioningGrpcTransport -from .transports.grpc_asyncio import ProvisioningGrpcAsyncIOTransport from .transports.rest import ProvisioningRestTransport @@ -71,8 +69,6 @@ class ProvisioningClientMeta(type): """ _transport_registry = OrderedDict() # type: Dict[str, Type[ProvisioningTransport]] - _transport_registry["grpc"] = ProvisioningGrpcTransport - _transport_registry["grpc_asyncio"] = ProvisioningGrpcAsyncIOTransport _transport_registry["rest"] = ProvisioningRestTransport def get_transport_class( diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/transports/__init__.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/transports/__init__.py index a1a997220440..c82beafe4a3e 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/transports/__init__.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/transports/__init__.py @@ -17,20 +17,14 @@ from typing import Dict, Type from .base import ProvisioningTransport -from .grpc import ProvisioningGrpcTransport -from .grpc_asyncio import ProvisioningGrpcAsyncIOTransport from .rest import ProvisioningRestInterceptor, ProvisioningRestTransport # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[ProvisioningTransport]] -_transport_registry["grpc"] = ProvisioningGrpcTransport -_transport_registry["grpc_asyncio"] = ProvisioningGrpcAsyncIOTransport _transport_registry["rest"] = ProvisioningRestTransport __all__ = ( "ProvisioningTransport", - "ProvisioningGrpcTransport", - "ProvisioningGrpcAsyncIOTransport", "ProvisioningRestTransport", "ProvisioningRestInterceptor", ) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/__init__.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/__init__.py index 53b21be76ec7..28875a7e7af2 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/__init__.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/__init__.py @@ -13,10 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from .async_client import RuntimeProjectAttachmentServiceAsyncClient from .client import RuntimeProjectAttachmentServiceClient -__all__ = ( - "RuntimeProjectAttachmentServiceClient", - "RuntimeProjectAttachmentServiceAsyncClient", -) +__all__ = ("RuntimeProjectAttachmentServiceClient",) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/client.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/client.py index 121ce0bf5470..dc9c1039381a 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/client.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/client.py @@ -59,8 +59,6 @@ DEFAULT_CLIENT_INFO, RuntimeProjectAttachmentServiceTransport, ) -from .transports.grpc import RuntimeProjectAttachmentServiceGrpcTransport -from .transports.grpc_asyncio import RuntimeProjectAttachmentServiceGrpcAsyncIOTransport from .transports.rest import RuntimeProjectAttachmentServiceRestTransport @@ -75,10 +73,6 @@ class RuntimeProjectAttachmentServiceClientMeta(type): _transport_registry = ( OrderedDict() ) # type: Dict[str, Type[RuntimeProjectAttachmentServiceTransport]] - _transport_registry["grpc"] = RuntimeProjectAttachmentServiceGrpcTransport - _transport_registry[ - "grpc_asyncio" - ] = RuntimeProjectAttachmentServiceGrpcAsyncIOTransport _transport_registry["rest"] = RuntimeProjectAttachmentServiceRestTransport def get_transport_class( diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/pagers.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/pagers.py index 05ba311342e2..7e63e765df51 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/pagers.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/pagers.py @@ -126,96 +126,3 @@ def __iter__( def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) - - -class ListRuntimeProjectAttachmentsAsyncPager: - """A pager for iterating through ``list_runtime_project_attachments`` requests. - - This class thinly wraps an initial - :class:`google.cloud.apihub_v1.types.ListRuntimeProjectAttachmentsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``runtime_project_attachments`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListRuntimeProjectAttachments`` requests and continue to iterate - through the ``runtime_project_attachments`` field on the - corresponding responses. - - All the usual :class:`google.cloud.apihub_v1.types.ListRuntimeProjectAttachmentsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - - def __init__( - self, - method: Callable[ - ..., - Awaitable[ - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse - ], - ], - request: runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest, - response: runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () - ): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.apihub_v1.types.ListRuntimeProjectAttachmentsRequest): - The initial request object. - response (google.cloud.apihub_v1.types.ListRuntimeProjectAttachmentsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = ( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest( - request - ) - ) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages( - self, - ) -> AsyncIterator[ - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse - ]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method( - self._request, - retry=self._retry, - timeout=self._timeout, - metadata=self._metadata, - ) - yield self._response - - def __aiter__( - self, - ) -> AsyncIterator[runtime_project_attachment_service.RuntimeProjectAttachment]: - async def async_generator(): - async for page in self.pages: - for response in page.runtime_project_attachments: - yield response - - return async_generator() - - def __repr__(self) -> str: - return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/transports/__init__.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/transports/__init__.py index e7fe76d5503e..604d33074e46 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/transports/__init__.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/transports/__init__.py @@ -17,8 +17,6 @@ from typing import Dict, Type from .base import RuntimeProjectAttachmentServiceTransport -from .grpc import RuntimeProjectAttachmentServiceGrpcTransport -from .grpc_asyncio import RuntimeProjectAttachmentServiceGrpcAsyncIOTransport from .rest import ( RuntimeProjectAttachmentServiceRestInterceptor, RuntimeProjectAttachmentServiceRestTransport, @@ -28,16 +26,10 @@ _transport_registry = ( OrderedDict() ) # type: Dict[str, Type[RuntimeProjectAttachmentServiceTransport]] -_transport_registry["grpc"] = RuntimeProjectAttachmentServiceGrpcTransport -_transport_registry[ - "grpc_asyncio" -] = RuntimeProjectAttachmentServiceGrpcAsyncIOTransport _transport_registry["rest"] = RuntimeProjectAttachmentServiceRestTransport __all__ = ( "RuntimeProjectAttachmentServiceTransport", - "RuntimeProjectAttachmentServiceGrpcTransport", - "RuntimeProjectAttachmentServiceGrpcAsyncIOTransport", "RuntimeProjectAttachmentServiceRestTransport", "RuntimeProjectAttachmentServiceRestInterceptor", ) diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_api_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_api_async.py deleted file mode 100644 index 433fc9066d3e..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_api_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateApi -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_CreateApi_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_create_api(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - api = apihub_v1.Api() - api.display_name = "display_name_value" - - request = apihub_v1.CreateApiRequest( - parent="parent_value", - api=api, - ) - - # Make the request - response = await client.create_api(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_ApiHub_CreateApi_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_attribute_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_attribute_async.py deleted file mode 100644 index 34242e5e4a77..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_attribute_async.py +++ /dev/null @@ -1,58 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateAttribute -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_CreateAttribute_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_create_attribute(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - attribute = apihub_v1.Attribute() - attribute.display_name = "display_name_value" - attribute.scope = "PLUGIN" - attribute.data_type = "STRING" - - request = apihub_v1.CreateAttributeRequest( - parent="parent_value", - attribute=attribute, - ) - - # Make the request - response = await client.create_attribute(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_ApiHub_CreateAttribute_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_deployment_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_deployment_async.py deleted file mode 100644 index 764524b6c030..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_deployment_async.py +++ /dev/null @@ -1,60 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateDeployment -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_CreateDeployment_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_create_deployment(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - deployment = apihub_v1.Deployment() - deployment.display_name = "display_name_value" - deployment.deployment_type.enum_values.values.id = "id_value" - deployment.deployment_type.enum_values.values.display_name = "display_name_value" - deployment.resource_uri = "resource_uri_value" - deployment.endpoints = ['endpoints_value1', 'endpoints_value2'] - - request = apihub_v1.CreateDeploymentRequest( - parent="parent_value", - deployment=deployment, - ) - - # Make the request - response = await client.create_deployment(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_ApiHub_CreateDeployment_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_external_api_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_external_api_async.py deleted file mode 100644 index a9cff017ea20..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_external_api_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateExternalApi -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_CreateExternalApi_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_create_external_api(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - external_api = apihub_v1.ExternalApi() - external_api.display_name = "display_name_value" - - request = apihub_v1.CreateExternalApiRequest( - parent="parent_value", - external_api=external_api, - ) - - # Make the request - response = await client.create_external_api(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_ApiHub_CreateExternalApi_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_spec_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_spec_async.py deleted file mode 100644 index 90481d22eee0..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_spec_async.py +++ /dev/null @@ -1,58 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateSpec -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_CreateSpec_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_create_spec(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - spec = apihub_v1.Spec() - spec.display_name = "display_name_value" - spec.spec_type.enum_values.values.id = "id_value" - spec.spec_type.enum_values.values.display_name = "display_name_value" - - request = apihub_v1.CreateSpecRequest( - parent="parent_value", - spec=spec, - ) - - # Make the request - response = await client.create_spec(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_ApiHub_CreateSpec_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_version_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_version_async.py deleted file mode 100644 index 3ba50ef85c3e..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_version_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateVersion -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_CreateVersion_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_create_version(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - version = apihub_v1.Version() - version.display_name = "display_name_value" - - request = apihub_v1.CreateVersionRequest( - parent="parent_value", - version=version, - ) - - # Make the request - response = await client.create_version(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_ApiHub_CreateVersion_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_api_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_api_async.py deleted file mode 100644 index 9dcedc8d971f..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_api_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteApi -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_DeleteApi_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_delete_api(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.DeleteApiRequest( - name="name_value", - ) - - # Make the request - await client.delete_api(request=request) - - -# [END apihub_v1_generated_ApiHub_DeleteApi_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_attribute_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_attribute_async.py deleted file mode 100644 index 3c139efd73c2..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_attribute_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteAttribute -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_DeleteAttribute_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_delete_attribute(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.DeleteAttributeRequest( - name="name_value", - ) - - # Make the request - await client.delete_attribute(request=request) - - -# [END apihub_v1_generated_ApiHub_DeleteAttribute_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_deployment_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_deployment_async.py deleted file mode 100644 index c187acdcc75e..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_deployment_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteDeployment -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_DeleteDeployment_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_delete_deployment(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.DeleteDeploymentRequest( - name="name_value", - ) - - # Make the request - await client.delete_deployment(request=request) - - -# [END apihub_v1_generated_ApiHub_DeleteDeployment_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_external_api_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_external_api_async.py deleted file mode 100644 index ee72bfdb2cb5..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_external_api_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteExternalApi -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_DeleteExternalApi_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_delete_external_api(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.DeleteExternalApiRequest( - name="name_value", - ) - - # Make the request - await client.delete_external_api(request=request) - - -# [END apihub_v1_generated_ApiHub_DeleteExternalApi_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_spec_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_spec_async.py deleted file mode 100644 index 18bf9aa527da..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_spec_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteSpec -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_DeleteSpec_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_delete_spec(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.DeleteSpecRequest( - name="name_value", - ) - - # Make the request - await client.delete_spec(request=request) - - -# [END apihub_v1_generated_ApiHub_DeleteSpec_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_version_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_version_async.py deleted file mode 100644 index a84532d3859c..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_version_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteVersion -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_DeleteVersion_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_delete_version(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.DeleteVersionRequest( - name="name_value", - ) - - # Make the request - await client.delete_version(request=request) - - -# [END apihub_v1_generated_ApiHub_DeleteVersion_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_dependencies_create_dependency_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_dependencies_create_dependency_async.py deleted file mode 100644 index bc37ee587379..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_dependencies_create_dependency_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateDependency -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHubDependencies_CreateDependency_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_create_dependency(): - # Create a client - client = apihub_v1.ApiHubDependenciesAsyncClient() - - # Initialize request argument(s) - dependency = apihub_v1.Dependency() - dependency.consumer.operation_resource_name = "operation_resource_name_value" - dependency.supplier.operation_resource_name = "operation_resource_name_value" - - request = apihub_v1.CreateDependencyRequest( - parent="parent_value", - dependency=dependency, - ) - - # Make the request - response = await client.create_dependency(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_ApiHubDependencies_CreateDependency_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_dependencies_delete_dependency_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_dependencies_delete_dependency_async.py deleted file mode 100644 index af78f08fa7cd..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_dependencies_delete_dependency_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteDependency -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHubDependencies_DeleteDependency_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_delete_dependency(): - # Create a client - client = apihub_v1.ApiHubDependenciesAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.DeleteDependencyRequest( - name="name_value", - ) - - # Make the request - await client.delete_dependency(request=request) - - -# [END apihub_v1_generated_ApiHubDependencies_DeleteDependency_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_dependencies_get_dependency_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_dependencies_get_dependency_async.py deleted file mode 100644 index 1ef59fb63cc2..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_dependencies_get_dependency_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDependency -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHubDependencies_GetDependency_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_get_dependency(): - # Create a client - client = apihub_v1.ApiHubDependenciesAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.GetDependencyRequest( - name="name_value", - ) - - # Make the request - response = await client.get_dependency(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_ApiHubDependencies_GetDependency_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_dependencies_list_dependencies_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_dependencies_list_dependencies_async.py deleted file mode 100644 index cbac9ee03c0e..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_dependencies_list_dependencies_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDependencies -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHubDependencies_ListDependencies_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_list_dependencies(): - # Create a client - client = apihub_v1.ApiHubDependenciesAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.ListDependenciesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_dependencies(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END apihub_v1_generated_ApiHubDependencies_ListDependencies_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_dependencies_update_dependency_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_dependencies_update_dependency_async.py deleted file mode 100644 index 37b5b624db53..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_dependencies_update_dependency_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateDependency -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHubDependencies_UpdateDependency_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_update_dependency(): - # Create a client - client = apihub_v1.ApiHubDependenciesAsyncClient() - - # Initialize request argument(s) - dependency = apihub_v1.Dependency() - dependency.consumer.operation_resource_name = "operation_resource_name_value" - dependency.supplier.operation_resource_name = "operation_resource_name_value" - - request = apihub_v1.UpdateDependencyRequest( - dependency=dependency, - ) - - # Make the request - response = await client.update_dependency(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_ApiHubDependencies_UpdateDependency_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_api_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_api_async.py deleted file mode 100644 index d9a93e6ec082..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_api_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetApi -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_GetApi_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_get_api(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.GetApiRequest( - name="name_value", - ) - - # Make the request - response = await client.get_api(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_ApiHub_GetApi_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_api_operation_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_api_operation_async.py deleted file mode 100644 index 884fa8a495b2..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_api_operation_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetApiOperation -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_GetApiOperation_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_get_api_operation(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.GetApiOperationRequest( - name="name_value", - ) - - # Make the request - response = await client.get_api_operation(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_ApiHub_GetApiOperation_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_attribute_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_attribute_async.py deleted file mode 100644 index 8de6d7766e83..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_attribute_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetAttribute -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_GetAttribute_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_get_attribute(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.GetAttributeRequest( - name="name_value", - ) - - # Make the request - response = await client.get_attribute(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_ApiHub_GetAttribute_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_definition_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_definition_async.py deleted file mode 100644 index b1b62ba74864..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_definition_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDefinition -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_GetDefinition_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_get_definition(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.GetDefinitionRequest( - name="name_value", - ) - - # Make the request - response = await client.get_definition(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_ApiHub_GetDefinition_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_deployment_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_deployment_async.py deleted file mode 100644 index a16639adc526..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_deployment_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDeployment -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_GetDeployment_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_get_deployment(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.GetDeploymentRequest( - name="name_value", - ) - - # Make the request - response = await client.get_deployment(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_ApiHub_GetDeployment_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_external_api_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_external_api_async.py deleted file mode 100644 index 2e44849e8c40..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_external_api_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetExternalApi -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_GetExternalApi_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_get_external_api(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.GetExternalApiRequest( - name="name_value", - ) - - # Make the request - response = await client.get_external_api(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_ApiHub_GetExternalApi_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_spec_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_spec_async.py deleted file mode 100644 index 5679acd29ccd..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_spec_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetSpec -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_GetSpec_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_get_spec(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.GetSpecRequest( - name="name_value", - ) - - # Make the request - response = await client.get_spec(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_ApiHub_GetSpec_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_spec_contents_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_spec_contents_async.py deleted file mode 100644 index ffaf9eaa238b..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_spec_contents_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetSpecContents -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_GetSpecContents_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_get_spec_contents(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.GetSpecContentsRequest( - name="name_value", - ) - - # Make the request - response = await client.get_spec_contents(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_ApiHub_GetSpecContents_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_version_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_version_async.py deleted file mode 100644 index 2c7b1d375fee..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_version_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetVersion -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_GetVersion_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_get_version(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.GetVersionRequest( - name="name_value", - ) - - # Make the request - response = await client.get_version(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_ApiHub_GetVersion_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_api_operations_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_api_operations_async.py deleted file mode 100644 index 341caf033a8a..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_api_operations_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListApiOperations -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_ListApiOperations_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_list_api_operations(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.ListApiOperationsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_api_operations(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END apihub_v1_generated_ApiHub_ListApiOperations_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_apis_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_apis_async.py deleted file mode 100644 index 5130bd9cbca6..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_apis_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListApis -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_ListApis_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_list_apis(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.ListApisRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_apis(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END apihub_v1_generated_ApiHub_ListApis_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_attributes_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_attributes_async.py deleted file mode 100644 index 9dd769c3831b..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_attributes_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListAttributes -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_ListAttributes_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_list_attributes(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.ListAttributesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_attributes(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END apihub_v1_generated_ApiHub_ListAttributes_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_deployments_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_deployments_async.py deleted file mode 100644 index 7d96c577933f..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_deployments_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDeployments -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_ListDeployments_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_list_deployments(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.ListDeploymentsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_deployments(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END apihub_v1_generated_ApiHub_ListDeployments_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_external_apis_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_external_apis_async.py deleted file mode 100644 index d45eefcb5fa6..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_external_apis_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListExternalApis -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_ListExternalApis_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_list_external_apis(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.ListExternalApisRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_external_apis(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END apihub_v1_generated_ApiHub_ListExternalApis_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_specs_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_specs_async.py deleted file mode 100644 index 33e365b5ebaa..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_specs_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListSpecs -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_ListSpecs_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_list_specs(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.ListSpecsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_specs(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END apihub_v1_generated_ApiHub_ListSpecs_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_versions_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_versions_async.py deleted file mode 100644 index 795e376e27cf..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_versions_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListVersions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_ListVersions_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_list_versions(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.ListVersionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_versions(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END apihub_v1_generated_ApiHub_ListVersions_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_plugin_disable_plugin_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_plugin_disable_plugin_async.py deleted file mode 100644 index 5c1773b84f6a..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_plugin_disable_plugin_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DisablePlugin -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHubPlugin_DisablePlugin_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_disable_plugin(): - # Create a client - client = apihub_v1.ApiHubPluginAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.DisablePluginRequest( - name="name_value", - ) - - # Make the request - response = await client.disable_plugin(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_ApiHubPlugin_DisablePlugin_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_plugin_enable_plugin_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_plugin_enable_plugin_async.py deleted file mode 100644 index c2dbee38cd0b..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_plugin_enable_plugin_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for EnablePlugin -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHubPlugin_EnablePlugin_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_enable_plugin(): - # Create a client - client = apihub_v1.ApiHubPluginAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.EnablePluginRequest( - name="name_value", - ) - - # Make the request - response = await client.enable_plugin(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_ApiHubPlugin_EnablePlugin_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_plugin_get_plugin_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_plugin_get_plugin_async.py deleted file mode 100644 index 8ced7daffafe..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_plugin_get_plugin_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetPlugin -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHubPlugin_GetPlugin_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_get_plugin(): - # Create a client - client = apihub_v1.ApiHubPluginAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.GetPluginRequest( - name="name_value", - ) - - # Make the request - response = await client.get_plugin(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_ApiHubPlugin_GetPlugin_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_search_resources_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_search_resources_async.py deleted file mode 100644 index 538e2cee6af0..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_search_resources_async.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SearchResources -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_SearchResources_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_search_resources(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.SearchResourcesRequest( - location="location_value", - query="query_value", - ) - - # Make the request - page_result = client.search_resources(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END apihub_v1_generated_ApiHub_SearchResources_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_api_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_api_async.py deleted file mode 100644 index 4ba551de8127..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_api_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateApi -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_UpdateApi_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_update_api(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - api = apihub_v1.Api() - api.display_name = "display_name_value" - - request = apihub_v1.UpdateApiRequest( - api=api, - ) - - # Make the request - response = await client.update_api(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_ApiHub_UpdateApi_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_attribute_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_attribute_async.py deleted file mode 100644 index b0583bcadadb..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_attribute_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateAttribute -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_UpdateAttribute_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_update_attribute(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - attribute = apihub_v1.Attribute() - attribute.display_name = "display_name_value" - attribute.scope = "PLUGIN" - attribute.data_type = "STRING" - - request = apihub_v1.UpdateAttributeRequest( - attribute=attribute, - ) - - # Make the request - response = await client.update_attribute(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_ApiHub_UpdateAttribute_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_deployment_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_deployment_async.py deleted file mode 100644 index 2624bd2d844a..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_deployment_async.py +++ /dev/null @@ -1,59 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateDeployment -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_UpdateDeployment_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_update_deployment(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - deployment = apihub_v1.Deployment() - deployment.display_name = "display_name_value" - deployment.deployment_type.enum_values.values.id = "id_value" - deployment.deployment_type.enum_values.values.display_name = "display_name_value" - deployment.resource_uri = "resource_uri_value" - deployment.endpoints = ['endpoints_value1', 'endpoints_value2'] - - request = apihub_v1.UpdateDeploymentRequest( - deployment=deployment, - ) - - # Make the request - response = await client.update_deployment(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_ApiHub_UpdateDeployment_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_external_api_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_external_api_async.py deleted file mode 100644 index d4eb7321ecf4..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_external_api_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateExternalApi -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_UpdateExternalApi_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_update_external_api(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - external_api = apihub_v1.ExternalApi() - external_api.display_name = "display_name_value" - - request = apihub_v1.UpdateExternalApiRequest( - external_api=external_api, - ) - - # Make the request - response = await client.update_external_api(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_ApiHub_UpdateExternalApi_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_spec_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_spec_async.py deleted file mode 100644 index 62e5c6bef8c2..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_spec_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateSpec -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_UpdateSpec_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_update_spec(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - spec = apihub_v1.Spec() - spec.display_name = "display_name_value" - spec.spec_type.enum_values.values.id = "id_value" - spec.spec_type.enum_values.values.display_name = "display_name_value" - - request = apihub_v1.UpdateSpecRequest( - spec=spec, - ) - - # Make the request - response = await client.update_spec(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_ApiHub_UpdateSpec_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_version_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_version_async.py deleted file mode 100644 index 37369a1057bc..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_version_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateVersion -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_UpdateVersion_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_update_version(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - version = apihub_v1.Version() - version.display_name = "display_name_value" - - request = apihub_v1.UpdateVersionRequest( - version=version, - ) - - # Make the request - response = await client.update_version(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_ApiHub_UpdateVersion_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_host_project_registration_service_create_host_project_registration_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_host_project_registration_service_create_host_project_registration_async.py deleted file mode 100644 index 1f3b26540e0c..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_host_project_registration_service_create_host_project_registration_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateHostProjectRegistration -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_HostProjectRegistrationService_CreateHostProjectRegistration_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_create_host_project_registration(): - # Create a client - client = apihub_v1.HostProjectRegistrationServiceAsyncClient() - - # Initialize request argument(s) - host_project_registration = apihub_v1.HostProjectRegistration() - host_project_registration.gcp_project = "gcp_project_value" - - request = apihub_v1.CreateHostProjectRegistrationRequest( - parent="parent_value", - host_project_registration_id="host_project_registration_id_value", - host_project_registration=host_project_registration, - ) - - # Make the request - response = await client.create_host_project_registration(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_HostProjectRegistrationService_CreateHostProjectRegistration_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_host_project_registration_service_get_host_project_registration_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_host_project_registration_service_get_host_project_registration_async.py deleted file mode 100644 index b48f7c69b98c..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_host_project_registration_service_get_host_project_registration_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetHostProjectRegistration -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_HostProjectRegistrationService_GetHostProjectRegistration_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_get_host_project_registration(): - # Create a client - client = apihub_v1.HostProjectRegistrationServiceAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.GetHostProjectRegistrationRequest( - name="name_value", - ) - - # Make the request - response = await client.get_host_project_registration(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_HostProjectRegistrationService_GetHostProjectRegistration_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_host_project_registration_service_list_host_project_registrations_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_host_project_registration_service_list_host_project_registrations_async.py deleted file mode 100644 index 0796d381861a..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_host_project_registration_service_list_host_project_registrations_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListHostProjectRegistrations -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_HostProjectRegistrationService_ListHostProjectRegistrations_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_list_host_project_registrations(): - # Create a client - client = apihub_v1.HostProjectRegistrationServiceAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.ListHostProjectRegistrationsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_host_project_registrations(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END apihub_v1_generated_HostProjectRegistrationService_ListHostProjectRegistrations_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_linting_service_get_style_guide_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_linting_service_get_style_guide_async.py deleted file mode 100644 index d6a18ab4d88e..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_linting_service_get_style_guide_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetStyleGuide -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_LintingService_GetStyleGuide_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_get_style_guide(): - # Create a client - client = apihub_v1.LintingServiceAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.GetStyleGuideRequest( - name="name_value", - ) - - # Make the request - response = await client.get_style_guide(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_LintingService_GetStyleGuide_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_linting_service_get_style_guide_contents_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_linting_service_get_style_guide_contents_async.py deleted file mode 100644 index 9463dc7ebbfd..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_linting_service_get_style_guide_contents_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetStyleGuideContents -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_LintingService_GetStyleGuideContents_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_get_style_guide_contents(): - # Create a client - client = apihub_v1.LintingServiceAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.GetStyleGuideContentsRequest( - name="name_value", - ) - - # Make the request - response = await client.get_style_guide_contents(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_LintingService_GetStyleGuideContents_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_linting_service_lint_spec_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_linting_service_lint_spec_async.py deleted file mode 100644 index 03effe93eca4..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_linting_service_lint_spec_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for LintSpec -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_LintingService_LintSpec_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_lint_spec(): - # Create a client - client = apihub_v1.LintingServiceAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.LintSpecRequest( - name="name_value", - ) - - # Make the request - await client.lint_spec(request=request) - - -# [END apihub_v1_generated_LintingService_LintSpec_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_linting_service_update_style_guide_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_linting_service_update_style_guide_async.py deleted file mode 100644 index 3b819e06d8ba..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_linting_service_update_style_guide_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateStyleGuide -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_LintingService_UpdateStyleGuide_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_update_style_guide(): - # Create a client - client = apihub_v1.LintingServiceAsyncClient() - - # Initialize request argument(s) - style_guide = apihub_v1.StyleGuide() - style_guide.linter = "OTHER" - style_guide.contents.contents = b'contents_blob' - style_guide.contents.mime_type = "mime_type_value" - - request = apihub_v1.UpdateStyleGuideRequest( - style_guide=style_guide, - ) - - # Make the request - response = await client.update_style_guide(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_LintingService_UpdateStyleGuide_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_provisioning_create_api_hub_instance_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_provisioning_create_api_hub_instance_async.py deleted file mode 100644 index abb888a77b21..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_provisioning_create_api_hub_instance_async.py +++ /dev/null @@ -1,60 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateApiHubInstance -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_Provisioning_CreateApiHubInstance_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_create_api_hub_instance(): - # Create a client - client = apihub_v1.ProvisioningAsyncClient() - - # Initialize request argument(s) - api_hub_instance = apihub_v1.ApiHubInstance() - api_hub_instance.config.cmek_key_name = "cmek_key_name_value" - - request = apihub_v1.CreateApiHubInstanceRequest( - parent="parent_value", - api_hub_instance=api_hub_instance, - ) - - # Make the request - operation = client.create_api_hub_instance(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END apihub_v1_generated_Provisioning_CreateApiHubInstance_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_provisioning_get_api_hub_instance_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_provisioning_get_api_hub_instance_async.py deleted file mode 100644 index adeb99e46bf6..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_provisioning_get_api_hub_instance_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetApiHubInstance -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_Provisioning_GetApiHubInstance_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_get_api_hub_instance(): - # Create a client - client = apihub_v1.ProvisioningAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.GetApiHubInstanceRequest( - name="name_value", - ) - - # Make the request - response = await client.get_api_hub_instance(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_Provisioning_GetApiHubInstance_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_provisioning_lookup_api_hub_instance_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_provisioning_lookup_api_hub_instance_async.py deleted file mode 100644 index 6fa9bb31c0d4..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_provisioning_lookup_api_hub_instance_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for LookupApiHubInstance -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_Provisioning_LookupApiHubInstance_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_lookup_api_hub_instance(): - # Create a client - client = apihub_v1.ProvisioningAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.LookupApiHubInstanceRequest( - parent="parent_value", - ) - - # Make the request - response = await client.lookup_api_hub_instance(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_Provisioning_LookupApiHubInstance_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_runtime_project_attachment_service_create_runtime_project_attachment_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_runtime_project_attachment_service_create_runtime_project_attachment_async.py deleted file mode 100644 index 41874f8f4024..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_runtime_project_attachment_service_create_runtime_project_attachment_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateRuntimeProjectAttachment -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_RuntimeProjectAttachmentService_CreateRuntimeProjectAttachment_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_create_runtime_project_attachment(): - # Create a client - client = apihub_v1.RuntimeProjectAttachmentServiceAsyncClient() - - # Initialize request argument(s) - runtime_project_attachment = apihub_v1.RuntimeProjectAttachment() - runtime_project_attachment.runtime_project = "runtime_project_value" - - request = apihub_v1.CreateRuntimeProjectAttachmentRequest( - parent="parent_value", - runtime_project_attachment_id="runtime_project_attachment_id_value", - runtime_project_attachment=runtime_project_attachment, - ) - - # Make the request - response = await client.create_runtime_project_attachment(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_RuntimeProjectAttachmentService_CreateRuntimeProjectAttachment_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_runtime_project_attachment_service_delete_runtime_project_attachment_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_runtime_project_attachment_service_delete_runtime_project_attachment_async.py deleted file mode 100644 index c6d606366236..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_runtime_project_attachment_service_delete_runtime_project_attachment_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteRuntimeProjectAttachment -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_RuntimeProjectAttachmentService_DeleteRuntimeProjectAttachment_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_delete_runtime_project_attachment(): - # Create a client - client = apihub_v1.RuntimeProjectAttachmentServiceAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.DeleteRuntimeProjectAttachmentRequest( - name="name_value", - ) - - # Make the request - await client.delete_runtime_project_attachment(request=request) - - -# [END apihub_v1_generated_RuntimeProjectAttachmentService_DeleteRuntimeProjectAttachment_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_runtime_project_attachment_service_get_runtime_project_attachment_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_runtime_project_attachment_service_get_runtime_project_attachment_async.py deleted file mode 100644 index f174d75570c7..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_runtime_project_attachment_service_get_runtime_project_attachment_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetRuntimeProjectAttachment -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_RuntimeProjectAttachmentService_GetRuntimeProjectAttachment_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_get_runtime_project_attachment(): - # Create a client - client = apihub_v1.RuntimeProjectAttachmentServiceAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.GetRuntimeProjectAttachmentRequest( - name="name_value", - ) - - # Make the request - response = await client.get_runtime_project_attachment(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_RuntimeProjectAttachmentService_GetRuntimeProjectAttachment_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_runtime_project_attachment_service_list_runtime_project_attachments_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_runtime_project_attachment_service_list_runtime_project_attachments_async.py deleted file mode 100644 index 82f990e50294..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_runtime_project_attachment_service_list_runtime_project_attachments_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListRuntimeProjectAttachments -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_RuntimeProjectAttachmentService_ListRuntimeProjectAttachments_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_list_runtime_project_attachments(): - # Create a client - client = apihub_v1.RuntimeProjectAttachmentServiceAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.ListRuntimeProjectAttachmentsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_runtime_project_attachments(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END apihub_v1_generated_RuntimeProjectAttachmentService_ListRuntimeProjectAttachments_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_runtime_project_attachment_service_lookup_runtime_project_attachment_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_runtime_project_attachment_service_lookup_runtime_project_attachment_async.py deleted file mode 100644 index 342d51e39899..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_runtime_project_attachment_service_lookup_runtime_project_attachment_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for LookupRuntimeProjectAttachment -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_RuntimeProjectAttachmentService_LookupRuntimeProjectAttachment_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_lookup_runtime_project_attachment(): - # Create a client - client = apihub_v1.RuntimeProjectAttachmentServiceAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.LookupRuntimeProjectAttachmentRequest( - name="name_value", - ) - - # Make the request - response = await client.lookup_runtime_project_attachment(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_RuntimeProjectAttachmentService_LookupRuntimeProjectAttachment_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/snippet_metadata_google.cloud.apihub.v1.json b/packages/google-cloud-apihub/samples/generated_samples/snippet_metadata_google.cloud.apihub.v1.json index 26cd3b4e3072..475a2011ac20 100644 --- a/packages/google-cloud-apihub/samples/generated_samples/snippet_metadata_google.cloud.apihub.v1.json +++ b/packages/google-cloud-apihub/samples/generated_samples/snippet_metadata_google.cloud.apihub.v1.json @@ -14,12 +14,11 @@ { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.ApiHubDependenciesAsyncClient", - "shortName": "ApiHubDependenciesAsyncClient" + "fullName": "google.cloud.apihub_v1.ApiHubDependenciesClient", + "shortName": "ApiHubDependenciesClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubDependenciesAsyncClient.create_dependency", + "fullName": "google.cloud.apihub_v1.ApiHubDependenciesClient.create_dependency", "method": { "fullName": "google.cloud.apihub.v1.ApiHubDependencies.CreateDependency", "service": { @@ -62,10 +61,10 @@ "shortName": "create_dependency" }, "description": "Sample for CreateDependency", - "file": "apihub_v1_generated_api_hub_dependencies_create_dependency_async.py", + "file": "apihub_v1_generated_api_hub_dependencies_create_dependency_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHubDependencies_CreateDependency_async", + "regionTag": "apihub_v1_generated_ApiHubDependencies_CreateDependency_sync", "segments": [ { "end": 56, @@ -98,7 +97,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_dependencies_create_dependency_async.py" + "title": "apihub_v1_generated_api_hub_dependencies_create_dependency_sync.py" }, { "canonical": true, @@ -107,30 +106,22 @@ "fullName": "google.cloud.apihub_v1.ApiHubDependenciesClient", "shortName": "ApiHubDependenciesClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubDependenciesClient.create_dependency", + "fullName": "google.cloud.apihub_v1.ApiHubDependenciesClient.delete_dependency", "method": { - "fullName": "google.cloud.apihub.v1.ApiHubDependencies.CreateDependency", + "fullName": "google.cloud.apihub.v1.ApiHubDependencies.DeleteDependency", "service": { "fullName": "google.cloud.apihub.v1.ApiHubDependencies", "shortName": "ApiHubDependencies" }, - "shortName": "CreateDependency" + "shortName": "DeleteDependency" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.CreateDependencyRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "dependency", - "type": "google.cloud.apihub_v1.types.Dependency" + "type": "google.cloud.apihub_v1.types.DeleteDependencyRequest" }, { - "name": "dependency_id", + "name": "name", "type": "str" }, { @@ -146,22 +137,21 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.Dependency", - "shortName": "create_dependency" + "shortName": "delete_dependency" }, - "description": "Sample for CreateDependency", - "file": "apihub_v1_generated_api_hub_dependencies_create_dependency_sync.py", + "description": "Sample for DeleteDependency", + "file": "apihub_v1_generated_api_hub_dependencies_delete_dependency_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHubDependencies_CreateDependency_sync", + "regionTag": "apihub_v1_generated_ApiHubDependencies_DeleteDependency_sync", "segments": [ { - "end": 56, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 49, "start": 27, "type": "SHORT" }, @@ -171,44 +161,41 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 51, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_dependencies_create_dependency_sync.py" + "title": "apihub_v1_generated_api_hub_dependencies_delete_dependency_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.ApiHubDependenciesAsyncClient", - "shortName": "ApiHubDependenciesAsyncClient" + "fullName": "google.cloud.apihub_v1.ApiHubDependenciesClient", + "shortName": "ApiHubDependenciesClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubDependenciesAsyncClient.delete_dependency", + "fullName": "google.cloud.apihub_v1.ApiHubDependenciesClient.get_dependency", "method": { - "fullName": "google.cloud.apihub.v1.ApiHubDependencies.DeleteDependency", + "fullName": "google.cloud.apihub.v1.ApiHubDependencies.GetDependency", "service": { "fullName": "google.cloud.apihub.v1.ApiHubDependencies", "shortName": "ApiHubDependencies" }, - "shortName": "DeleteDependency" + "shortName": "GetDependency" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.DeleteDependencyRequest" + "type": "google.cloud.apihub_v1.types.GetDependencyRequest" }, { "name": "name", @@ -227,21 +214,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_dependency" + "resultType": "google.cloud.apihub_v1.types.Dependency", + "shortName": "get_dependency" }, - "description": "Sample for DeleteDependency", - "file": "apihub_v1_generated_api_hub_dependencies_delete_dependency_async.py", + "description": "Sample for GetDependency", + "file": "apihub_v1_generated_api_hub_dependencies_get_dependency_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHubDependencies_DeleteDependency_async", + "regionTag": "apihub_v1_generated_ApiHubDependencies_GetDependency_sync", "segments": [ { - "end": 49, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 51, "start": 27, "type": "SHORT" }, @@ -256,15 +244,17 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_dependencies_delete_dependency_async.py" + "title": "apihub_v1_generated_api_hub_dependencies_get_dependency_sync.py" }, { "canonical": true, @@ -273,22 +263,22 @@ "fullName": "google.cloud.apihub_v1.ApiHubDependenciesClient", "shortName": "ApiHubDependenciesClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubDependenciesClient.delete_dependency", + "fullName": "google.cloud.apihub_v1.ApiHubDependenciesClient.list_dependencies", "method": { - "fullName": "google.cloud.apihub.v1.ApiHubDependencies.DeleteDependency", + "fullName": "google.cloud.apihub.v1.ApiHubDependencies.ListDependencies", "service": { "fullName": "google.cloud.apihub.v1.ApiHubDependencies", "shortName": "ApiHubDependencies" }, - "shortName": "DeleteDependency" + "shortName": "ListDependencies" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.DeleteDependencyRequest" + "type": "google.cloud.apihub_v1.types.ListDependenciesRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -304,21 +294,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_dependency" + "resultType": "google.cloud.apihub_v1.services.api_hub_dependencies.pagers.ListDependenciesPager", + "shortName": "list_dependencies" }, - "description": "Sample for DeleteDependency", - "file": "apihub_v1_generated_api_hub_dependencies_delete_dependency_sync.py", + "description": "Sample for ListDependencies", + "file": "apihub_v1_generated_api_hub_dependencies_list_dependencies_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHubDependencies_DeleteDependency_sync", + "regionTag": "apihub_v1_generated_ApiHubDependencies_ListDependencies_sync", "segments": [ { - "end": 49, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 52, "start": 27, "type": "SHORT" }, @@ -333,41 +324,46 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_dependencies_delete_dependency_sync.py" + "title": "apihub_v1_generated_api_hub_dependencies_list_dependencies_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.ApiHubDependenciesAsyncClient", - "shortName": "ApiHubDependenciesAsyncClient" + "fullName": "google.cloud.apihub_v1.ApiHubDependenciesClient", + "shortName": "ApiHubDependenciesClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubDependenciesAsyncClient.get_dependency", + "fullName": "google.cloud.apihub_v1.ApiHubDependenciesClient.update_dependency", "method": { - "fullName": "google.cloud.apihub.v1.ApiHubDependencies.GetDependency", + "fullName": "google.cloud.apihub.v1.ApiHubDependencies.UpdateDependency", "service": { "fullName": "google.cloud.apihub.v1.ApiHubDependencies", "shortName": "ApiHubDependencies" }, - "shortName": "GetDependency" + "shortName": "UpdateDependency" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.GetDependencyRequest" + "type": "google.cloud.apihub_v1.types.UpdateDependencyRequest" }, { - "name": "name", - "type": "str" + "name": "dependency", + "type": "google.cloud.apihub_v1.types.Dependency" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -383,21 +379,21 @@ } ], "resultType": "google.cloud.apihub_v1.types.Dependency", - "shortName": "get_dependency" + "shortName": "update_dependency" }, - "description": "Sample for GetDependency", - "file": "apihub_v1_generated_api_hub_dependencies_get_dependency_async.py", + "description": "Sample for UpdateDependency", + "file": "apihub_v1_generated_api_hub_dependencies_update_dependency_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHubDependencies_GetDependency_async", + "regionTag": "apihub_v1_generated_ApiHubDependencies_UpdateDependency_sync", "segments": [ { - "end": 51, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 55, "start": 27, "type": "SHORT" }, @@ -407,43 +403,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 49, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 52, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_dependencies_get_dependency_async.py" + "title": "apihub_v1_generated_api_hub_dependencies_update_dependency_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.apihub_v1.ApiHubDependenciesClient", - "shortName": "ApiHubDependenciesClient" + "fullName": "google.cloud.apihub_v1.ApiHubPluginClient", + "shortName": "ApiHubPluginClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubDependenciesClient.get_dependency", + "fullName": "google.cloud.apihub_v1.ApiHubPluginClient.disable_plugin", "method": { - "fullName": "google.cloud.apihub.v1.ApiHubDependencies.GetDependency", + "fullName": "google.cloud.apihub.v1.ApiHubPlugin.DisablePlugin", "service": { - "fullName": "google.cloud.apihub.v1.ApiHubDependencies", - "shortName": "ApiHubDependencies" + "fullName": "google.cloud.apihub.v1.ApiHubPlugin", + "shortName": "ApiHubPlugin" }, - "shortName": "GetDependency" + "shortName": "DisablePlugin" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.GetDependencyRequest" + "type": "google.cloud.apihub_v1.types.DisablePluginRequest" }, { "name": "name", @@ -462,14 +458,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.Dependency", - "shortName": "get_dependency" + "resultType": "google.cloud.apihub_v1.types.Plugin", + "shortName": "disable_plugin" }, - "description": "Sample for GetDependency", - "file": "apihub_v1_generated_api_hub_dependencies_get_dependency_sync.py", + "description": "Sample for DisablePlugin", + "file": "apihub_v1_generated_api_hub_plugin_disable_plugin_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHubDependencies_GetDependency_sync", + "regionTag": "apihub_v1_generated_ApiHubPlugin_DisablePlugin_sync", "segments": [ { "end": 51, @@ -502,32 +498,31 @@ "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_dependencies_get_dependency_sync.py" + "title": "apihub_v1_generated_api_hub_plugin_disable_plugin_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.ApiHubDependenciesAsyncClient", - "shortName": "ApiHubDependenciesAsyncClient" + "fullName": "google.cloud.apihub_v1.ApiHubPluginClient", + "shortName": "ApiHubPluginClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubDependenciesAsyncClient.list_dependencies", + "fullName": "google.cloud.apihub_v1.ApiHubPluginClient.enable_plugin", "method": { - "fullName": "google.cloud.apihub.v1.ApiHubDependencies.ListDependencies", + "fullName": "google.cloud.apihub.v1.ApiHubPlugin.EnablePlugin", "service": { - "fullName": "google.cloud.apihub.v1.ApiHubDependencies", - "shortName": "ApiHubDependencies" + "fullName": "google.cloud.apihub.v1.ApiHubPlugin", + "shortName": "ApiHubPlugin" }, - "shortName": "ListDependencies" + "shortName": "EnablePlugin" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.ListDependenciesRequest" + "type": "google.cloud.apihub_v1.types.EnablePluginRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -543,22 +538,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.services.api_hub_dependencies.pagers.ListDependenciesAsyncPager", - "shortName": "list_dependencies" + "resultType": "google.cloud.apihub_v1.types.Plugin", + "shortName": "enable_plugin" }, - "description": "Sample for ListDependencies", - "file": "apihub_v1_generated_api_hub_dependencies_list_dependencies_async.py", + "description": "Sample for EnablePlugin", + "file": "apihub_v1_generated_api_hub_plugin_enable_plugin_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHubDependencies_ListDependencies_async", + "regionTag": "apihub_v1_generated_ApiHubPlugin_EnablePlugin_sync", "segments": [ { - "end": 52, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 51, "start": 27, "type": "SHORT" }, @@ -578,36 +573,36 @@ "type": "REQUEST_EXECUTION" }, { - "end": 53, + "end": 52, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_dependencies_list_dependencies_async.py" + "title": "apihub_v1_generated_api_hub_plugin_enable_plugin_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.apihub_v1.ApiHubDependenciesClient", - "shortName": "ApiHubDependenciesClient" + "fullName": "google.cloud.apihub_v1.ApiHubPluginClient", + "shortName": "ApiHubPluginClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubDependenciesClient.list_dependencies", + "fullName": "google.cloud.apihub_v1.ApiHubPluginClient.get_plugin", "method": { - "fullName": "google.cloud.apihub.v1.ApiHubDependencies.ListDependencies", + "fullName": "google.cloud.apihub.v1.ApiHubPlugin.GetPlugin", "service": { - "fullName": "google.cloud.apihub.v1.ApiHubDependencies", - "shortName": "ApiHubDependencies" + "fullName": "google.cloud.apihub.v1.ApiHubPlugin", + "shortName": "ApiHubPlugin" }, - "shortName": "ListDependencies" + "shortName": "GetPlugin" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.ListDependenciesRequest" + "type": "google.cloud.apihub_v1.types.GetPluginRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -623,22 +618,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.services.api_hub_dependencies.pagers.ListDependenciesPager", - "shortName": "list_dependencies" + "resultType": "google.cloud.apihub_v1.types.Plugin", + "shortName": "get_plugin" }, - "description": "Sample for ListDependencies", - "file": "apihub_v1_generated_api_hub_dependencies_list_dependencies_sync.py", + "description": "Sample for GetPlugin", + "file": "apihub_v1_generated_api_hub_plugin_get_plugin_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHubDependencies_ListDependencies_sync", + "regionTag": "apihub_v1_generated_ApiHubPlugin_GetPlugin_sync", "segments": [ { - "end": 52, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 51, "start": 27, "type": "SHORT" }, @@ -658,4532 +653,44 @@ "type": "REQUEST_EXECUTION" }, { - "end": 53, + "end": 52, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_dependencies_list_dependencies_sync.py" + "title": "apihub_v1_generated_api_hub_plugin_get_plugin_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.ApiHubDependenciesAsyncClient", - "shortName": "ApiHubDependenciesAsyncClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubDependenciesAsyncClient.update_dependency", + "fullName": "google.cloud.apihub_v1.ApiHubClient.create_api", "method": { - "fullName": "google.cloud.apihub.v1.ApiHubDependencies.UpdateDependency", + "fullName": "google.cloud.apihub.v1.ApiHub.CreateApi", "service": { - "fullName": "google.cloud.apihub.v1.ApiHubDependencies", - "shortName": "ApiHubDependencies" + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" }, - "shortName": "UpdateDependency" + "shortName": "CreateApi" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.UpdateDependencyRequest" + "type": "google.cloud.apihub_v1.types.CreateApiRequest" }, { - "name": "dependency", - "type": "google.cloud.apihub_v1.types.Dependency" + "name": "parent", + "type": "str" }, { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" + "name": "api", + "type": "google.cloud.apihub_v1.types.Api" }, { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Dependency", - "shortName": "update_dependency" - }, - "description": "Sample for UpdateDependency", - "file": "apihub_v1_generated_api_hub_dependencies_update_dependency_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHubDependencies_UpdateDependency_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_dependencies_update_dependency_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubDependenciesClient", - "shortName": "ApiHubDependenciesClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubDependenciesClient.update_dependency", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHubDependencies.UpdateDependency", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHubDependencies", - "shortName": "ApiHubDependencies" - }, - "shortName": "UpdateDependency" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.UpdateDependencyRequest" - }, - { - "name": "dependency", - "type": "google.cloud.apihub_v1.types.Dependency" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Dependency", - "shortName": "update_dependency" - }, - "description": "Sample for UpdateDependency", - "file": "apihub_v1_generated_api_hub_dependencies_update_dependency_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHubDependencies_UpdateDependency_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_dependencies_update_dependency_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubPluginAsyncClient", - "shortName": "ApiHubPluginAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubPluginAsyncClient.disable_plugin", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHubPlugin.DisablePlugin", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHubPlugin", - "shortName": "ApiHubPlugin" - }, - "shortName": "DisablePlugin" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.DisablePluginRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Plugin", - "shortName": "disable_plugin" - }, - "description": "Sample for DisablePlugin", - "file": "apihub_v1_generated_api_hub_plugin_disable_plugin_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHubPlugin_DisablePlugin_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_plugin_disable_plugin_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubPluginClient", - "shortName": "ApiHubPluginClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubPluginClient.disable_plugin", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHubPlugin.DisablePlugin", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHubPlugin", - "shortName": "ApiHubPlugin" - }, - "shortName": "DisablePlugin" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.DisablePluginRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Plugin", - "shortName": "disable_plugin" - }, - "description": "Sample for DisablePlugin", - "file": "apihub_v1_generated_api_hub_plugin_disable_plugin_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHubPlugin_DisablePlugin_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_plugin_disable_plugin_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubPluginAsyncClient", - "shortName": "ApiHubPluginAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubPluginAsyncClient.enable_plugin", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHubPlugin.EnablePlugin", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHubPlugin", - "shortName": "ApiHubPlugin" - }, - "shortName": "EnablePlugin" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.EnablePluginRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Plugin", - "shortName": "enable_plugin" - }, - "description": "Sample for EnablePlugin", - "file": "apihub_v1_generated_api_hub_plugin_enable_plugin_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHubPlugin_EnablePlugin_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_plugin_enable_plugin_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubPluginClient", - "shortName": "ApiHubPluginClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubPluginClient.enable_plugin", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHubPlugin.EnablePlugin", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHubPlugin", - "shortName": "ApiHubPlugin" - }, - "shortName": "EnablePlugin" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.EnablePluginRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Plugin", - "shortName": "enable_plugin" - }, - "description": "Sample for EnablePlugin", - "file": "apihub_v1_generated_api_hub_plugin_enable_plugin_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHubPlugin_EnablePlugin_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_plugin_enable_plugin_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubPluginAsyncClient", - "shortName": "ApiHubPluginAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubPluginAsyncClient.get_plugin", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHubPlugin.GetPlugin", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHubPlugin", - "shortName": "ApiHubPlugin" - }, - "shortName": "GetPlugin" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.GetPluginRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Plugin", - "shortName": "get_plugin" - }, - "description": "Sample for GetPlugin", - "file": "apihub_v1_generated_api_hub_plugin_get_plugin_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHubPlugin_GetPlugin_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_plugin_get_plugin_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubPluginClient", - "shortName": "ApiHubPluginClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubPluginClient.get_plugin", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHubPlugin.GetPlugin", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHubPlugin", - "shortName": "ApiHubPlugin" - }, - "shortName": "GetPlugin" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.GetPluginRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Plugin", - "shortName": "get_plugin" - }, - "description": "Sample for GetPlugin", - "file": "apihub_v1_generated_api_hub_plugin_get_plugin_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHubPlugin_GetPlugin_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_plugin_get_plugin_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.create_api", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.CreateApi", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "CreateApi" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.CreateApiRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "api", - "type": "google.cloud.apihub_v1.types.Api" - }, - { - "name": "api_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Api", - "shortName": "create_api" - }, - "description": "Sample for CreateApi", - "file": "apihub_v1_generated_api_hub_create_api_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_CreateApi_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_create_api_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.create_api", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.CreateApi", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "CreateApi" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.CreateApiRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "api", - "type": "google.cloud.apihub_v1.types.Api" - }, - { - "name": "api_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Api", - "shortName": "create_api" - }, - "description": "Sample for CreateApi", - "file": "apihub_v1_generated_api_hub_create_api_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_CreateApi_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_create_api_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.create_attribute", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.CreateAttribute", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "CreateAttribute" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.CreateAttributeRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "attribute", - "type": "google.cloud.apihub_v1.types.Attribute" - }, - { - "name": "attribute_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Attribute", - "shortName": "create_attribute" - }, - "description": "Sample for CreateAttribute", - "file": "apihub_v1_generated_api_hub_create_attribute_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_CreateAttribute_async", - "segments": [ - { - "end": 57, - "start": 27, - "type": "FULL" - }, - { - "end": 57, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 51, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 54, - "start": 52, - "type": "REQUEST_EXECUTION" - }, - { - "end": 58, - "start": 55, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_create_attribute_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.create_attribute", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.CreateAttribute", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "CreateAttribute" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.CreateAttributeRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "attribute", - "type": "google.cloud.apihub_v1.types.Attribute" - }, - { - "name": "attribute_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Attribute", - "shortName": "create_attribute" - }, - "description": "Sample for CreateAttribute", - "file": "apihub_v1_generated_api_hub_create_attribute_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_CreateAttribute_sync", - "segments": [ - { - "end": 57, - "start": 27, - "type": "FULL" - }, - { - "end": 57, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 51, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 54, - "start": 52, - "type": "REQUEST_EXECUTION" - }, - { - "end": 58, - "start": 55, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_create_attribute_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.create_deployment", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.CreateDeployment", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "CreateDeployment" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.CreateDeploymentRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "deployment", - "type": "google.cloud.apihub_v1.types.Deployment" - }, - { - "name": "deployment_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Deployment", - "shortName": "create_deployment" - }, - "description": "Sample for CreateDeployment", - "file": "apihub_v1_generated_api_hub_create_deployment_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_CreateDeployment_async", - "segments": [ - { - "end": 59, - "start": 27, - "type": "FULL" - }, - { - "end": 59, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 53, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 56, - "start": 54, - "type": "REQUEST_EXECUTION" - }, - { - "end": 60, - "start": 57, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_create_deployment_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.create_deployment", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.CreateDeployment", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "CreateDeployment" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.CreateDeploymentRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "deployment", - "type": "google.cloud.apihub_v1.types.Deployment" - }, - { - "name": "deployment_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Deployment", - "shortName": "create_deployment" - }, - "description": "Sample for CreateDeployment", - "file": "apihub_v1_generated_api_hub_create_deployment_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_CreateDeployment_sync", - "segments": [ - { - "end": 59, - "start": 27, - "type": "FULL" - }, - { - "end": 59, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 53, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 56, - "start": 54, - "type": "REQUEST_EXECUTION" - }, - { - "end": 60, - "start": 57, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_create_deployment_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.create_external_api", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.CreateExternalApi", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "CreateExternalApi" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.CreateExternalApiRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "external_api", - "type": "google.cloud.apihub_v1.types.ExternalApi" - }, - { - "name": "external_api_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.ExternalApi", - "shortName": "create_external_api" - }, - "description": "Sample for CreateExternalApi", - "file": "apihub_v1_generated_api_hub_create_external_api_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_CreateExternalApi_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_create_external_api_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.create_external_api", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.CreateExternalApi", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "CreateExternalApi" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.CreateExternalApiRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "external_api", - "type": "google.cloud.apihub_v1.types.ExternalApi" - }, - { - "name": "external_api_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.ExternalApi", - "shortName": "create_external_api" - }, - "description": "Sample for CreateExternalApi", - "file": "apihub_v1_generated_api_hub_create_external_api_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_CreateExternalApi_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_create_external_api_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.create_spec", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.CreateSpec", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "CreateSpec" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.CreateSpecRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "spec", - "type": "google.cloud.apihub_v1.types.Spec" - }, - { - "name": "spec_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Spec", - "shortName": "create_spec" - }, - "description": "Sample for CreateSpec", - "file": "apihub_v1_generated_api_hub_create_spec_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_CreateSpec_async", - "segments": [ - { - "end": 57, - "start": 27, - "type": "FULL" - }, - { - "end": 57, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 51, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 54, - "start": 52, - "type": "REQUEST_EXECUTION" - }, - { - "end": 58, - "start": 55, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_create_spec_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.create_spec", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.CreateSpec", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "CreateSpec" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.CreateSpecRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "spec", - "type": "google.cloud.apihub_v1.types.Spec" - }, - { - "name": "spec_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Spec", - "shortName": "create_spec" - }, - "description": "Sample for CreateSpec", - "file": "apihub_v1_generated_api_hub_create_spec_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_CreateSpec_sync", - "segments": [ - { - "end": 57, - "start": 27, - "type": "FULL" - }, - { - "end": 57, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 51, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 54, - "start": 52, - "type": "REQUEST_EXECUTION" - }, - { - "end": 58, - "start": 55, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_create_spec_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.create_version", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.CreateVersion", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "CreateVersion" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.CreateVersionRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "version", - "type": "google.cloud.apihub_v1.types.Version" - }, - { - "name": "version_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Version", - "shortName": "create_version" - }, - "description": "Sample for CreateVersion", - "file": "apihub_v1_generated_api_hub_create_version_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_CreateVersion_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_create_version_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.create_version", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.CreateVersion", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "CreateVersion" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.CreateVersionRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "version", - "type": "google.cloud.apihub_v1.types.Version" - }, - { - "name": "version_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Version", - "shortName": "create_version" - }, - "description": "Sample for CreateVersion", - "file": "apihub_v1_generated_api_hub_create_version_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_CreateVersion_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_create_version_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.delete_api", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.DeleteApi", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "DeleteApi" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.DeleteApiRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_api" - }, - "description": "Sample for DeleteApi", - "file": "apihub_v1_generated_api_hub_delete_api_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_DeleteApi_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_delete_api_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.delete_api", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.DeleteApi", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "DeleteApi" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.DeleteApiRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_api" - }, - "description": "Sample for DeleteApi", - "file": "apihub_v1_generated_api_hub_delete_api_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_DeleteApi_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_delete_api_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.delete_attribute", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.DeleteAttribute", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "DeleteAttribute" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.DeleteAttributeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_attribute" - }, - "description": "Sample for DeleteAttribute", - "file": "apihub_v1_generated_api_hub_delete_attribute_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_DeleteAttribute_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_delete_attribute_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.delete_attribute", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.DeleteAttribute", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "DeleteAttribute" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.DeleteAttributeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_attribute" - }, - "description": "Sample for DeleteAttribute", - "file": "apihub_v1_generated_api_hub_delete_attribute_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_DeleteAttribute_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_delete_attribute_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.delete_deployment", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.DeleteDeployment", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "DeleteDeployment" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.DeleteDeploymentRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_deployment" - }, - "description": "Sample for DeleteDeployment", - "file": "apihub_v1_generated_api_hub_delete_deployment_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_DeleteDeployment_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_delete_deployment_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.delete_deployment", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.DeleteDeployment", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "DeleteDeployment" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.DeleteDeploymentRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_deployment" - }, - "description": "Sample for DeleteDeployment", - "file": "apihub_v1_generated_api_hub_delete_deployment_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_DeleteDeployment_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_delete_deployment_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.delete_external_api", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.DeleteExternalApi", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "DeleteExternalApi" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.DeleteExternalApiRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_external_api" - }, - "description": "Sample for DeleteExternalApi", - "file": "apihub_v1_generated_api_hub_delete_external_api_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_DeleteExternalApi_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_delete_external_api_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.delete_external_api", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.DeleteExternalApi", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "DeleteExternalApi" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.DeleteExternalApiRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_external_api" - }, - "description": "Sample for DeleteExternalApi", - "file": "apihub_v1_generated_api_hub_delete_external_api_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_DeleteExternalApi_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_delete_external_api_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.delete_spec", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.DeleteSpec", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "DeleteSpec" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.DeleteSpecRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_spec" - }, - "description": "Sample for DeleteSpec", - "file": "apihub_v1_generated_api_hub_delete_spec_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_DeleteSpec_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_delete_spec_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.delete_spec", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.DeleteSpec", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "DeleteSpec" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.DeleteSpecRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_spec" - }, - "description": "Sample for DeleteSpec", - "file": "apihub_v1_generated_api_hub_delete_spec_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_DeleteSpec_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_delete_spec_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.delete_version", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.DeleteVersion", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "DeleteVersion" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.DeleteVersionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_version" - }, - "description": "Sample for DeleteVersion", - "file": "apihub_v1_generated_api_hub_delete_version_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_DeleteVersion_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_delete_version_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.delete_version", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.DeleteVersion", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "DeleteVersion" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.DeleteVersionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_version" - }, - "description": "Sample for DeleteVersion", - "file": "apihub_v1_generated_api_hub_delete_version_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_DeleteVersion_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_delete_version_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.get_api_operation", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.GetApiOperation", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "GetApiOperation" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.GetApiOperationRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.ApiOperation", - "shortName": "get_api_operation" - }, - "description": "Sample for GetApiOperation", - "file": "apihub_v1_generated_api_hub_get_api_operation_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_GetApiOperation_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_get_api_operation_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.get_api_operation", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.GetApiOperation", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "GetApiOperation" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.GetApiOperationRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.ApiOperation", - "shortName": "get_api_operation" - }, - "description": "Sample for GetApiOperation", - "file": "apihub_v1_generated_api_hub_get_api_operation_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_GetApiOperation_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_get_api_operation_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.get_api", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.GetApi", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "GetApi" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.GetApiRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Api", - "shortName": "get_api" - }, - "description": "Sample for GetApi", - "file": "apihub_v1_generated_api_hub_get_api_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_GetApi_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_get_api_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.get_api", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.GetApi", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "GetApi" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.GetApiRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Api", - "shortName": "get_api" - }, - "description": "Sample for GetApi", - "file": "apihub_v1_generated_api_hub_get_api_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_GetApi_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_get_api_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.get_attribute", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.GetAttribute", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "GetAttribute" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.GetAttributeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Attribute", - "shortName": "get_attribute" - }, - "description": "Sample for GetAttribute", - "file": "apihub_v1_generated_api_hub_get_attribute_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_GetAttribute_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_get_attribute_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.get_attribute", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.GetAttribute", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "GetAttribute" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.GetAttributeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Attribute", - "shortName": "get_attribute" - }, - "description": "Sample for GetAttribute", - "file": "apihub_v1_generated_api_hub_get_attribute_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_GetAttribute_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_get_attribute_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.get_definition", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.GetDefinition", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "GetDefinition" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.GetDefinitionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Definition", - "shortName": "get_definition" - }, - "description": "Sample for GetDefinition", - "file": "apihub_v1_generated_api_hub_get_definition_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_GetDefinition_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_get_definition_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.get_definition", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.GetDefinition", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "GetDefinition" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.GetDefinitionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Definition", - "shortName": "get_definition" - }, - "description": "Sample for GetDefinition", - "file": "apihub_v1_generated_api_hub_get_definition_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_GetDefinition_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_get_definition_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.get_deployment", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.GetDeployment", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "GetDeployment" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.GetDeploymentRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Deployment", - "shortName": "get_deployment" - }, - "description": "Sample for GetDeployment", - "file": "apihub_v1_generated_api_hub_get_deployment_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_GetDeployment_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_get_deployment_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.get_deployment", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.GetDeployment", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "GetDeployment" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.GetDeploymentRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Deployment", - "shortName": "get_deployment" - }, - "description": "Sample for GetDeployment", - "file": "apihub_v1_generated_api_hub_get_deployment_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_GetDeployment_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_get_deployment_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.get_external_api", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.GetExternalApi", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "GetExternalApi" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.GetExternalApiRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.ExternalApi", - "shortName": "get_external_api" - }, - "description": "Sample for GetExternalApi", - "file": "apihub_v1_generated_api_hub_get_external_api_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_GetExternalApi_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_get_external_api_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.get_external_api", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.GetExternalApi", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "GetExternalApi" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.GetExternalApiRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.ExternalApi", - "shortName": "get_external_api" - }, - "description": "Sample for GetExternalApi", - "file": "apihub_v1_generated_api_hub_get_external_api_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_GetExternalApi_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_get_external_api_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.get_spec_contents", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.GetSpecContents", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "GetSpecContents" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.GetSpecContentsRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.SpecContents", - "shortName": "get_spec_contents" - }, - "description": "Sample for GetSpecContents", - "file": "apihub_v1_generated_api_hub_get_spec_contents_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_GetSpecContents_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_get_spec_contents_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.get_spec_contents", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.GetSpecContents", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "GetSpecContents" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.GetSpecContentsRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.SpecContents", - "shortName": "get_spec_contents" - }, - "description": "Sample for GetSpecContents", - "file": "apihub_v1_generated_api_hub_get_spec_contents_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_GetSpecContents_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_get_spec_contents_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.get_spec", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.GetSpec", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "GetSpec" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.GetSpecRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Spec", - "shortName": "get_spec" - }, - "description": "Sample for GetSpec", - "file": "apihub_v1_generated_api_hub_get_spec_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_GetSpec_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_get_spec_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.get_spec", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.GetSpec", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "GetSpec" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.GetSpecRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Spec", - "shortName": "get_spec" - }, - "description": "Sample for GetSpec", - "file": "apihub_v1_generated_api_hub_get_spec_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_GetSpec_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_get_spec_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.get_version", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.GetVersion", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "GetVersion" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.GetVersionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Version", - "shortName": "get_version" - }, - "description": "Sample for GetVersion", - "file": "apihub_v1_generated_api_hub_get_version_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_GetVersion_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_get_version_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.get_version", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.GetVersion", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "GetVersion" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.GetVersionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Version", - "shortName": "get_version" - }, - "description": "Sample for GetVersion", - "file": "apihub_v1_generated_api_hub_get_version_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_GetVersion_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_get_version_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.list_api_operations", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.ListApiOperations", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "ListApiOperations" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.ListApiOperationsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.ListApiOperationsAsyncPager", - "shortName": "list_api_operations" - }, - "description": "Sample for ListApiOperations", - "file": "apihub_v1_generated_api_hub_list_api_operations_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_ListApiOperations_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_list_api_operations_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.list_api_operations", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.ListApiOperations", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "ListApiOperations" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.ListApiOperationsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.ListApiOperationsPager", - "shortName": "list_api_operations" - }, - "description": "Sample for ListApiOperations", - "file": "apihub_v1_generated_api_hub_list_api_operations_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_ListApiOperations_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_list_api_operations_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.list_apis", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.ListApis", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "ListApis" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.ListApisRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.ListApisAsyncPager", - "shortName": "list_apis" - }, - "description": "Sample for ListApis", - "file": "apihub_v1_generated_api_hub_list_apis_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_ListApis_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_list_apis_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.list_apis", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.ListApis", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "ListApis" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.ListApisRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.ListApisPager", - "shortName": "list_apis" - }, - "description": "Sample for ListApis", - "file": "apihub_v1_generated_api_hub_list_apis_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_ListApis_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_list_apis_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.list_attributes", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.ListAttributes", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "ListAttributes" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.ListAttributesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.ListAttributesAsyncPager", - "shortName": "list_attributes" - }, - "description": "Sample for ListAttributes", - "file": "apihub_v1_generated_api_hub_list_attributes_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_ListAttributes_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_list_attributes_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.list_attributes", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.ListAttributes", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "ListAttributes" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.ListAttributesRequest" - }, - { - "name": "parent", + "name": "api_id", "type": "str" }, { @@ -5199,22 +706,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.ListAttributesPager", - "shortName": "list_attributes" + "resultType": "google.cloud.apihub_v1.types.Api", + "shortName": "create_api" }, - "description": "Sample for ListAttributes", - "file": "apihub_v1_generated_api_hub_list_attributes_sync.py", + "description": "Sample for CreateApi", + "file": "apihub_v1_generated_api_hub_create_api_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_ListAttributes_sync", + "regionTag": "apihub_v1_generated_ApiHub_CreateApi_sync", "segments": [ { - "end": 52, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 55, "start": 27, "type": "SHORT" }, @@ -5224,103 +731,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 49, "start": 41, "type": "REQUEST_INITIALIZATION" }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_list_attributes_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.list_deployments", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.ListDeployments", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "ListDeployments" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.ListDeploymentsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.ListDeploymentsAsyncPager", - "shortName": "list_deployments" - }, - "description": "Sample for ListDeployments", - "file": "apihub_v1_generated_api_hub_list_deployments_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_ListDeployments_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, { "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_list_deployments_async.py" + "title": "apihub_v1_generated_api_hub_create_api_sync.py" }, { "canonical": true, @@ -5329,183 +755,30 @@ "fullName": "google.cloud.apihub_v1.ApiHubClient", "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.list_deployments", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.ListDeployments", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "ListDeployments" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.ListDeploymentsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.ListDeploymentsPager", - "shortName": "list_deployments" - }, - "description": "Sample for ListDeployments", - "file": "apihub_v1_generated_api_hub_list_deployments_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_ListDeployments_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_list_deployments_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.list_external_apis", + "fullName": "google.cloud.apihub_v1.ApiHubClient.create_attribute", "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.ListExternalApis", + "fullName": "google.cloud.apihub.v1.ApiHub.CreateAttribute", "service": { "fullName": "google.cloud.apihub.v1.ApiHub", "shortName": "ApiHub" }, - "shortName": "ListExternalApis" + "shortName": "CreateAttribute" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.ListExternalApisRequest" + "type": "google.cloud.apihub_v1.types.CreateAttributeRequest" }, { "name": "parent", "type": "str" }, { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.ListExternalApisAsyncPager", - "shortName": "list_external_apis" - }, - "description": "Sample for ListExternalApis", - "file": "apihub_v1_generated_api_hub_list_external_apis_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_ListExternalApis_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_list_external_apis_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.list_external_apis", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.ListExternalApis", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "ListExternalApis" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.ListExternalApisRequest" + "name": "attribute", + "type": "google.cloud.apihub_v1.types.Attribute" }, { - "name": "parent", + "name": "attribute_id", "type": "str" }, { @@ -5521,22 +794,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.ListExternalApisPager", - "shortName": "list_external_apis" + "resultType": "google.cloud.apihub_v1.types.Attribute", + "shortName": "create_attribute" }, - "description": "Sample for ListExternalApis", - "file": "apihub_v1_generated_api_hub_list_external_apis_sync.py", + "description": "Sample for CreateAttribute", + "file": "apihub_v1_generated_api_hub_create_attribute_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_ListExternalApis_sync", + "regionTag": "apihub_v1_generated_ApiHub_CreateAttribute_sync", "segments": [ { - "end": 52, + "end": 57, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 57, "start": 27, "type": "SHORT" }, @@ -5546,49 +819,56 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 51, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 54, + "start": 52, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 58, + "start": 55, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_list_external_apis_sync.py" + "title": "apihub_v1_generated_api_hub_create_attribute_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.list_specs", + "fullName": "google.cloud.apihub_v1.ApiHubClient.create_deployment", "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.ListSpecs", + "fullName": "google.cloud.apihub.v1.ApiHub.CreateDeployment", "service": { "fullName": "google.cloud.apihub.v1.ApiHub", "shortName": "ApiHub" }, - "shortName": "ListSpecs" + "shortName": "CreateDeployment" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.ListSpecsRequest" + "type": "google.cloud.apihub_v1.types.CreateDeploymentRequest" }, { "name": "parent", "type": "str" }, + { + "name": "deployment", + "type": "google.cloud.apihub_v1.types.Deployment" + }, + { + "name": "deployment_id", + "type": "str" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -5602,22 +882,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.ListSpecsAsyncPager", - "shortName": "list_specs" + "resultType": "google.cloud.apihub_v1.types.Deployment", + "shortName": "create_deployment" }, - "description": "Sample for ListSpecs", - "file": "apihub_v1_generated_api_hub_list_specs_async.py", + "description": "Sample for CreateDeployment", + "file": "apihub_v1_generated_api_hub_create_deployment_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_ListSpecs_async", + "regionTag": "apihub_v1_generated_ApiHub_CreateDeployment_sync", "segments": [ { - "end": 52, + "end": 59, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 59, "start": 27, "type": "SHORT" }, @@ -5627,22 +907,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 53, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 56, + "start": 54, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 60, + "start": 57, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_list_specs_async.py" + "title": "apihub_v1_generated_api_hub_create_deployment_sync.py" }, { "canonical": true, @@ -5651,24 +931,32 @@ "fullName": "google.cloud.apihub_v1.ApiHubClient", "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.list_specs", + "fullName": "google.cloud.apihub_v1.ApiHubClient.create_external_api", "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.ListSpecs", + "fullName": "google.cloud.apihub.v1.ApiHub.CreateExternalApi", "service": { "fullName": "google.cloud.apihub.v1.ApiHub", "shortName": "ApiHub" }, - "shortName": "ListSpecs" + "shortName": "CreateExternalApi" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.ListSpecsRequest" + "type": "google.cloud.apihub_v1.types.CreateExternalApiRequest" }, { "name": "parent", "type": "str" }, + { + "name": "external_api", + "type": "google.cloud.apihub_v1.types.ExternalApi" + }, + { + "name": "external_api_id", + "type": "str" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -5682,22 +970,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.ListSpecsPager", - "shortName": "list_specs" + "resultType": "google.cloud.apihub_v1.types.ExternalApi", + "shortName": "create_external_api" }, - "description": "Sample for ListSpecs", - "file": "apihub_v1_generated_api_hub_list_specs_sync.py", + "description": "Sample for CreateExternalApi", + "file": "apihub_v1_generated_api_hub_create_external_api_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_ListSpecs_sync", + "regionTag": "apihub_v1_generated_ApiHub_CreateExternalApi_sync", "segments": [ { - "end": 52, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 55, "start": 27, "type": "SHORT" }, @@ -5707,49 +995,56 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 49, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 52, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_list_specs_sync.py" + "title": "apihub_v1_generated_api_hub_create_external_api_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.list_versions", + "fullName": "google.cloud.apihub_v1.ApiHubClient.create_spec", "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.ListVersions", + "fullName": "google.cloud.apihub.v1.ApiHub.CreateSpec", "service": { "fullName": "google.cloud.apihub.v1.ApiHub", "shortName": "ApiHub" }, - "shortName": "ListVersions" + "shortName": "CreateSpec" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.ListVersionsRequest" + "type": "google.cloud.apihub_v1.types.CreateSpecRequest" }, { "name": "parent", "type": "str" }, + { + "name": "spec", + "type": "google.cloud.apihub_v1.types.Spec" + }, + { + "name": "spec_id", + "type": "str" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -5763,22 +1058,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.ListVersionsAsyncPager", - "shortName": "list_versions" + "resultType": "google.cloud.apihub_v1.types.Spec", + "shortName": "create_spec" }, - "description": "Sample for ListVersions", - "file": "apihub_v1_generated_api_hub_list_versions_async.py", + "description": "Sample for CreateSpec", + "file": "apihub_v1_generated_api_hub_create_spec_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_ListVersions_async", + "regionTag": "apihub_v1_generated_ApiHub_CreateSpec_sync", "segments": [ { - "end": 52, + "end": 57, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 57, "start": 27, "type": "SHORT" }, @@ -5788,22 +1083,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 51, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 54, + "start": 52, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 58, + "start": 55, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_list_versions_async.py" + "title": "apihub_v1_generated_api_hub_create_spec_sync.py" }, { "canonical": true, @@ -5812,24 +1107,32 @@ "fullName": "google.cloud.apihub_v1.ApiHubClient", "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.list_versions", + "fullName": "google.cloud.apihub_v1.ApiHubClient.create_version", "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.ListVersions", + "fullName": "google.cloud.apihub.v1.ApiHub.CreateVersion", "service": { "fullName": "google.cloud.apihub.v1.ApiHub", "shortName": "ApiHub" }, - "shortName": "ListVersions" + "shortName": "CreateVersion" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.ListVersionsRequest" + "type": "google.cloud.apihub_v1.types.CreateVersionRequest" }, { "name": "parent", "type": "str" }, + { + "name": "version", + "type": "google.cloud.apihub_v1.types.Version" + }, + { + "name": "version_id", + "type": "str" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -5843,22 +1146,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.ListVersionsPager", - "shortName": "list_versions" + "resultType": "google.cloud.apihub_v1.types.Version", + "shortName": "create_version" }, - "description": "Sample for ListVersions", - "file": "apihub_v1_generated_api_hub_list_versions_sync.py", + "description": "Sample for CreateVersion", + "file": "apihub_v1_generated_api_hub_create_version_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_ListVersions_sync", + "regionTag": "apihub_v1_generated_ApiHub_CreateVersion_sync", "segments": [ { - "end": 52, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 55, "start": 27, "type": "SHORT" }, @@ -5868,51 +1171,46 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 49, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 52, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_list_versions_sync.py" + "title": "apihub_v1_generated_api_hub_create_version_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.search_resources", + "fullName": "google.cloud.apihub_v1.ApiHubClient.delete_api", "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.SearchResources", + "fullName": "google.cloud.apihub.v1.ApiHub.DeleteApi", "service": { "fullName": "google.cloud.apihub.v1.ApiHub", "shortName": "ApiHub" }, - "shortName": "SearchResources" + "shortName": "DeleteApi" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.SearchResourcesRequest" - }, - { - "name": "location", - "type": "str" + "type": "google.cloud.apihub_v1.types.DeleteApiRequest" }, { - "name": "query", + "name": "name", "type": "str" }, { @@ -5928,22 +1226,21 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.SearchResourcesAsyncPager", - "shortName": "search_resources" + "shortName": "delete_api" }, - "description": "Sample for SearchResources", - "file": "apihub_v1_generated_api_hub_search_resources_async.py", + "description": "Sample for DeleteApi", + "file": "apihub_v1_generated_api_hub_delete_api_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_SearchResources_async", + "regionTag": "apihub_v1_generated_ApiHub_DeleteApi_sync", "segments": [ { - "end": 53, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 49, "start": 27, "type": "SHORT" }, @@ -5953,22 +1250,20 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 50, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_search_resources_async.py" + "title": "apihub_v1_generated_api_hub_delete_api_sync.py" }, { "canonical": true, @@ -5977,26 +1272,22 @@ "fullName": "google.cloud.apihub_v1.ApiHubClient", "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.search_resources", + "fullName": "google.cloud.apihub_v1.ApiHubClient.delete_attribute", "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.SearchResources", + "fullName": "google.cloud.apihub.v1.ApiHub.DeleteAttribute", "service": { "fullName": "google.cloud.apihub.v1.ApiHub", "shortName": "ApiHub" }, - "shortName": "SearchResources" + "shortName": "DeleteAttribute" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.SearchResourcesRequest" - }, - { - "name": "location", - "type": "str" + "type": "google.cloud.apihub_v1.types.DeleteAttributeRequest" }, { - "name": "query", + "name": "name", "type": "str" }, { @@ -6012,22 +1303,21 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.SearchResourcesPager", - "shortName": "search_resources" + "shortName": "delete_attribute" }, - "description": "Sample for SearchResources", - "file": "apihub_v1_generated_api_hub_search_resources_sync.py", + "description": "Sample for DeleteAttribute", + "file": "apihub_v1_generated_api_hub_delete_attribute_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_SearchResources_sync", + "regionTag": "apihub_v1_generated_ApiHub_DeleteAttribute_sync", "segments": [ { - "end": 53, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 49, "start": 27, "type": "SHORT" }, @@ -6037,52 +1327,45 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 50, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_search_resources_sync.py" + "title": "apihub_v1_generated_api_hub_delete_attribute_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.update_api", + "fullName": "google.cloud.apihub_v1.ApiHubClient.delete_deployment", "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.UpdateApi", + "fullName": "google.cloud.apihub.v1.ApiHub.DeleteDeployment", "service": { "fullName": "google.cloud.apihub.v1.ApiHub", "shortName": "ApiHub" }, - "shortName": "UpdateApi" + "shortName": "DeleteDeployment" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.UpdateApiRequest" - }, - { - "name": "api", - "type": "google.cloud.apihub_v1.types.Api" + "type": "google.cloud.apihub_v1.types.DeleteDeploymentRequest" }, { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" + "name": "name", + "type": "str" }, { "name": "retry", @@ -6097,22 +1380,21 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.Api", - "shortName": "update_api" + "shortName": "delete_deployment" }, - "description": "Sample for UpdateApi", - "file": "apihub_v1_generated_api_hub_update_api_async.py", + "description": "Sample for DeleteDeployment", + "file": "apihub_v1_generated_api_hub_delete_deployment_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_UpdateApi_async", + "regionTag": "apihub_v1_generated_ApiHub_DeleteDeployment_sync", "segments": [ { - "end": 54, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 49, "start": 27, "type": "SHORT" }, @@ -6122,22 +1404,20 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 48, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 51, - "start": 49, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 55, - "start": 52, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_update_api_async.py" + "title": "apihub_v1_generated_api_hub_delete_deployment_sync.py" }, { "canonical": true, @@ -6146,27 +1426,23 @@ "fullName": "google.cloud.apihub_v1.ApiHubClient", "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.update_api", + "fullName": "google.cloud.apihub_v1.ApiHubClient.delete_external_api", "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.UpdateApi", + "fullName": "google.cloud.apihub.v1.ApiHub.DeleteExternalApi", "service": { "fullName": "google.cloud.apihub.v1.ApiHub", "shortName": "ApiHub" }, - "shortName": "UpdateApi" + "shortName": "DeleteExternalApi" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.UpdateApiRequest" - }, - { - "name": "api", - "type": "google.cloud.apihub_v1.types.Api" + "type": "google.cloud.apihub_v1.types.DeleteExternalApiRequest" }, { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" + "name": "name", + "type": "str" }, { "name": "retry", @@ -6181,22 +1457,21 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.Api", - "shortName": "update_api" + "shortName": "delete_external_api" }, - "description": "Sample for UpdateApi", - "file": "apihub_v1_generated_api_hub_update_api_sync.py", + "description": "Sample for DeleteExternalApi", + "file": "apihub_v1_generated_api_hub_delete_external_api_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_UpdateApi_sync", + "regionTag": "apihub_v1_generated_ApiHub_DeleteExternalApi_sync", "segments": [ { - "end": 54, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 49, "start": 27, "type": "SHORT" }, @@ -6206,52 +1481,45 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 48, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 51, - "start": 49, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 55, - "start": 52, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_update_api_sync.py" + "title": "apihub_v1_generated_api_hub_delete_external_api_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.update_attribute", + "fullName": "google.cloud.apihub_v1.ApiHubClient.delete_spec", "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.UpdateAttribute", + "fullName": "google.cloud.apihub.v1.ApiHub.DeleteSpec", "service": { "fullName": "google.cloud.apihub.v1.ApiHub", "shortName": "ApiHub" }, - "shortName": "UpdateAttribute" + "shortName": "DeleteSpec" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.UpdateAttributeRequest" - }, - { - "name": "attribute", - "type": "google.cloud.apihub_v1.types.Attribute" + "type": "google.cloud.apihub_v1.types.DeleteSpecRequest" }, { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" + "name": "name", + "type": "str" }, { "name": "retry", @@ -6266,22 +1534,21 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.Attribute", - "shortName": "update_attribute" + "shortName": "delete_spec" }, - "description": "Sample for UpdateAttribute", - "file": "apihub_v1_generated_api_hub_update_attribute_async.py", + "description": "Sample for DeleteSpec", + "file": "apihub_v1_generated_api_hub_delete_spec_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_UpdateAttribute_async", + "regionTag": "apihub_v1_generated_ApiHub_DeleteSpec_sync", "segments": [ { - "end": 56, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 49, "start": 27, "type": "SHORT" }, @@ -6291,22 +1558,20 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 51, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_update_attribute_async.py" + "title": "apihub_v1_generated_api_hub_delete_spec_sync.py" }, { "canonical": true, @@ -6315,27 +1580,23 @@ "fullName": "google.cloud.apihub_v1.ApiHubClient", "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.update_attribute", + "fullName": "google.cloud.apihub_v1.ApiHubClient.delete_version", "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.UpdateAttribute", + "fullName": "google.cloud.apihub.v1.ApiHub.DeleteVersion", "service": { "fullName": "google.cloud.apihub.v1.ApiHub", "shortName": "ApiHub" }, - "shortName": "UpdateAttribute" + "shortName": "DeleteVersion" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.UpdateAttributeRequest" - }, - { - "name": "attribute", - "type": "google.cloud.apihub_v1.types.Attribute" + "type": "google.cloud.apihub_v1.types.DeleteVersionRequest" }, { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" + "name": "name", + "type": "str" }, { "name": "retry", @@ -6350,22 +1611,21 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.Attribute", - "shortName": "update_attribute" + "shortName": "delete_version" }, - "description": "Sample for UpdateAttribute", - "file": "apihub_v1_generated_api_hub_update_attribute_sync.py", + "description": "Sample for DeleteVersion", + "file": "apihub_v1_generated_api_hub_delete_version_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_UpdateAttribute_sync", + "regionTag": "apihub_v1_generated_ApiHub_DeleteVersion_sync", "segments": [ { - "end": 56, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 49, "start": 27, "type": "SHORT" }, @@ -6375,52 +1635,45 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 51, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_update_attribute_sync.py" + "title": "apihub_v1_generated_api_hub_delete_version_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.update_deployment", + "fullName": "google.cloud.apihub_v1.ApiHubClient.get_api_operation", "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.UpdateDeployment", + "fullName": "google.cloud.apihub.v1.ApiHub.GetApiOperation", "service": { "fullName": "google.cloud.apihub.v1.ApiHub", "shortName": "ApiHub" }, - "shortName": "UpdateDeployment" + "shortName": "GetApiOperation" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.UpdateDeploymentRequest" - }, - { - "name": "deployment", - "type": "google.cloud.apihub_v1.types.Deployment" + "type": "google.cloud.apihub_v1.types.GetApiOperationRequest" }, { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" + "name": "name", + "type": "str" }, { "name": "retry", @@ -6435,22 +1688,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.Deployment", - "shortName": "update_deployment" + "resultType": "google.cloud.apihub_v1.types.ApiOperation", + "shortName": "get_api_operation" }, - "description": "Sample for UpdateDeployment", - "file": "apihub_v1_generated_api_hub_update_deployment_async.py", + "description": "Sample for GetApiOperation", + "file": "apihub_v1_generated_api_hub_get_api_operation_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_UpdateDeployment_async", + "regionTag": "apihub_v1_generated_ApiHub_GetApiOperation_sync", "segments": [ { - "end": 58, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 58, + "end": 51, "start": 27, "type": "SHORT" }, @@ -6460,22 +1713,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 52, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 55, - "start": 53, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 59, - "start": 56, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_update_deployment_async.py" + "title": "apihub_v1_generated_api_hub_get_api_operation_sync.py" }, { "canonical": true, @@ -6484,27 +1737,23 @@ "fullName": "google.cloud.apihub_v1.ApiHubClient", "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.update_deployment", + "fullName": "google.cloud.apihub_v1.ApiHubClient.get_api", "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.UpdateDeployment", + "fullName": "google.cloud.apihub.v1.ApiHub.GetApi", "service": { "fullName": "google.cloud.apihub.v1.ApiHub", "shortName": "ApiHub" }, - "shortName": "UpdateDeployment" + "shortName": "GetApi" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.UpdateDeploymentRequest" - }, - { - "name": "deployment", - "type": "google.cloud.apihub_v1.types.Deployment" + "type": "google.cloud.apihub_v1.types.GetApiRequest" }, { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" + "name": "name", + "type": "str" }, { "name": "retry", @@ -6519,22 +1768,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.Deployment", - "shortName": "update_deployment" + "resultType": "google.cloud.apihub_v1.types.Api", + "shortName": "get_api" }, - "description": "Sample for UpdateDeployment", - "file": "apihub_v1_generated_api_hub_update_deployment_sync.py", + "description": "Sample for GetApi", + "file": "apihub_v1_generated_api_hub_get_api_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_UpdateDeployment_sync", + "regionTag": "apihub_v1_generated_ApiHub_GetApi_sync", "segments": [ { - "end": 58, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 58, + "end": 51, "start": 27, "type": "SHORT" }, @@ -6544,52 +1793,47 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 52, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 55, - "start": 53, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 59, - "start": 56, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_update_deployment_sync.py" + "title": "apihub_v1_generated_api_hub_get_api_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.update_external_api", + "fullName": "google.cloud.apihub_v1.ApiHubClient.get_attribute", "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.UpdateExternalApi", + "fullName": "google.cloud.apihub.v1.ApiHub.GetAttribute", "service": { "fullName": "google.cloud.apihub.v1.ApiHub", "shortName": "ApiHub" }, - "shortName": "UpdateExternalApi" + "shortName": "GetAttribute" }, "parameters": [ { - "name": "request", - "type": "google.cloud.apihub_v1.types.UpdateExternalApiRequest" - }, - { - "name": "external_api", - "type": "google.cloud.apihub_v1.types.ExternalApi" + "name": "request", + "type": "google.cloud.apihub_v1.types.GetAttributeRequest" }, { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" + "name": "name", + "type": "str" }, { "name": "retry", @@ -6604,22 +1848,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.ExternalApi", - "shortName": "update_external_api" + "resultType": "google.cloud.apihub_v1.types.Attribute", + "shortName": "get_attribute" }, - "description": "Sample for UpdateExternalApi", - "file": "apihub_v1_generated_api_hub_update_external_api_async.py", + "description": "Sample for GetAttribute", + "file": "apihub_v1_generated_api_hub_get_attribute_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_UpdateExternalApi_async", + "regionTag": "apihub_v1_generated_ApiHub_GetAttribute_sync", "segments": [ { - "end": 54, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 51, "start": 27, "type": "SHORT" }, @@ -6629,22 +1873,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 48, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 51, - "start": 49, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 55, - "start": 52, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_update_external_api_async.py" + "title": "apihub_v1_generated_api_hub_get_attribute_sync.py" }, { "canonical": true, @@ -6653,27 +1897,23 @@ "fullName": "google.cloud.apihub_v1.ApiHubClient", "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.update_external_api", + "fullName": "google.cloud.apihub_v1.ApiHubClient.get_definition", "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.UpdateExternalApi", + "fullName": "google.cloud.apihub.v1.ApiHub.GetDefinition", "service": { "fullName": "google.cloud.apihub.v1.ApiHub", "shortName": "ApiHub" }, - "shortName": "UpdateExternalApi" + "shortName": "GetDefinition" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.UpdateExternalApiRequest" - }, - { - "name": "external_api", - "type": "google.cloud.apihub_v1.types.ExternalApi" + "type": "google.cloud.apihub_v1.types.GetDefinitionRequest" }, { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" + "name": "name", + "type": "str" }, { "name": "retry", @@ -6688,22 +1928,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.ExternalApi", - "shortName": "update_external_api" + "resultType": "google.cloud.apihub_v1.types.Definition", + "shortName": "get_definition" }, - "description": "Sample for UpdateExternalApi", - "file": "apihub_v1_generated_api_hub_update_external_api_sync.py", + "description": "Sample for GetDefinition", + "file": "apihub_v1_generated_api_hub_get_definition_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_UpdateExternalApi_sync", + "regionTag": "apihub_v1_generated_ApiHub_GetDefinition_sync", "segments": [ { - "end": 54, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 51, "start": 27, "type": "SHORT" }, @@ -6713,52 +1953,47 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 48, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 51, - "start": 49, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 55, - "start": 52, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_update_external_api_sync.py" + "title": "apihub_v1_generated_api_hub_get_definition_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.update_spec", + "fullName": "google.cloud.apihub_v1.ApiHubClient.get_deployment", "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.UpdateSpec", + "fullName": "google.cloud.apihub.v1.ApiHub.GetDeployment", "service": { "fullName": "google.cloud.apihub.v1.ApiHub", "shortName": "ApiHub" }, - "shortName": "UpdateSpec" + "shortName": "GetDeployment" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.UpdateSpecRequest" - }, - { - "name": "spec", - "type": "google.cloud.apihub_v1.types.Spec" + "type": "google.cloud.apihub_v1.types.GetDeploymentRequest" }, { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" + "name": "name", + "type": "str" }, { "name": "retry", @@ -6773,22 +2008,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.Spec", - "shortName": "update_spec" + "resultType": "google.cloud.apihub_v1.types.Deployment", + "shortName": "get_deployment" }, - "description": "Sample for UpdateSpec", - "file": "apihub_v1_generated_api_hub_update_spec_async.py", + "description": "Sample for GetDeployment", + "file": "apihub_v1_generated_api_hub_get_deployment_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_UpdateSpec_async", + "regionTag": "apihub_v1_generated_ApiHub_GetDeployment_sync", "segments": [ { - "end": 56, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 51, "start": 27, "type": "SHORT" }, @@ -6798,22 +2033,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 51, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_update_spec_async.py" + "title": "apihub_v1_generated_api_hub_get_deployment_sync.py" }, { "canonical": true, @@ -6822,27 +2057,23 @@ "fullName": "google.cloud.apihub_v1.ApiHubClient", "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.update_spec", + "fullName": "google.cloud.apihub_v1.ApiHubClient.get_external_api", "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.UpdateSpec", + "fullName": "google.cloud.apihub.v1.ApiHub.GetExternalApi", "service": { "fullName": "google.cloud.apihub.v1.ApiHub", "shortName": "ApiHub" }, - "shortName": "UpdateSpec" + "shortName": "GetExternalApi" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.UpdateSpecRequest" - }, - { - "name": "spec", - "type": "google.cloud.apihub_v1.types.Spec" + "type": "google.cloud.apihub_v1.types.GetExternalApiRequest" }, { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" + "name": "name", + "type": "str" }, { "name": "retry", @@ -6857,22 +2088,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.Spec", - "shortName": "update_spec" + "resultType": "google.cloud.apihub_v1.types.ExternalApi", + "shortName": "get_external_api" }, - "description": "Sample for UpdateSpec", - "file": "apihub_v1_generated_api_hub_update_spec_sync.py", + "description": "Sample for GetExternalApi", + "file": "apihub_v1_generated_api_hub_get_external_api_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_UpdateSpec_sync", + "regionTag": "apihub_v1_generated_ApiHub_GetExternalApi_sync", "segments": [ { - "end": 56, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 51, "start": 27, "type": "SHORT" }, @@ -6882,52 +2113,47 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 51, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_update_spec_sync.py" + "title": "apihub_v1_generated_api_hub_get_external_api_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.update_version", + "fullName": "google.cloud.apihub_v1.ApiHubClient.get_spec_contents", "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.UpdateVersion", + "fullName": "google.cloud.apihub.v1.ApiHub.GetSpecContents", "service": { "fullName": "google.cloud.apihub.v1.ApiHub", "shortName": "ApiHub" }, - "shortName": "UpdateVersion" + "shortName": "GetSpecContents" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.UpdateVersionRequest" - }, - { - "name": "version", - "type": "google.cloud.apihub_v1.types.Version" + "type": "google.cloud.apihub_v1.types.GetSpecContentsRequest" }, { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" + "name": "name", + "type": "str" }, { "name": "retry", @@ -6942,22 +2168,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.Version", - "shortName": "update_version" + "resultType": "google.cloud.apihub_v1.types.SpecContents", + "shortName": "get_spec_contents" }, - "description": "Sample for UpdateVersion", - "file": "apihub_v1_generated_api_hub_update_version_async.py", + "description": "Sample for GetSpecContents", + "file": "apihub_v1_generated_api_hub_get_spec_contents_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_UpdateVersion_async", + "regionTag": "apihub_v1_generated_ApiHub_GetSpecContents_sync", "segments": [ { - "end": 54, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 51, "start": 27, "type": "SHORT" }, @@ -6967,22 +2193,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 48, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 51, - "start": 49, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 55, - "start": 52, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_update_version_async.py" + "title": "apihub_v1_generated_api_hub_get_spec_contents_sync.py" }, { "canonical": true, @@ -6991,27 +2217,23 @@ "fullName": "google.cloud.apihub_v1.ApiHubClient", "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.update_version", + "fullName": "google.cloud.apihub_v1.ApiHubClient.get_spec", "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.UpdateVersion", + "fullName": "google.cloud.apihub.v1.ApiHub.GetSpec", "service": { "fullName": "google.cloud.apihub.v1.ApiHub", "shortName": "ApiHub" }, - "shortName": "UpdateVersion" + "shortName": "GetSpec" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.UpdateVersionRequest" - }, - { - "name": "version", - "type": "google.cloud.apihub_v1.types.Version" + "type": "google.cloud.apihub_v1.types.GetSpecRequest" }, { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" + "name": "name", + "type": "str" }, { "name": "retry", @@ -7026,22 +2248,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.Version", - "shortName": "update_version" + "resultType": "google.cloud.apihub_v1.types.Spec", + "shortName": "get_spec" }, - "description": "Sample for UpdateVersion", - "file": "apihub_v1_generated_api_hub_update_version_sync.py", + "description": "Sample for GetSpec", + "file": "apihub_v1_generated_api_hub_get_spec_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_UpdateVersion_sync", + "regionTag": "apihub_v1_generated_ApiHub_GetSpec_sync", "segments": [ { - "end": 54, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 51, "start": 27, "type": "SHORT" }, @@ -7051,55 +2273,46 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 48, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 51, - "start": 49, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 55, - "start": 52, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_update_version_sync.py" + "title": "apihub_v1_generated_api_hub_get_spec_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.HostProjectRegistrationServiceAsyncClient", - "shortName": "HostProjectRegistrationServiceAsyncClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.HostProjectRegistrationServiceAsyncClient.create_host_project_registration", + "fullName": "google.cloud.apihub_v1.ApiHubClient.get_version", "method": { - "fullName": "google.cloud.apihub.v1.HostProjectRegistrationService.CreateHostProjectRegistration", + "fullName": "google.cloud.apihub.v1.ApiHub.GetVersion", "service": { - "fullName": "google.cloud.apihub.v1.HostProjectRegistrationService", - "shortName": "HostProjectRegistrationService" + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" }, - "shortName": "CreateHostProjectRegistration" + "shortName": "GetVersion" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.CreateHostProjectRegistrationRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "host_project_registration", - "type": "google.cloud.apihub_v1.types.HostProjectRegistration" + "type": "google.cloud.apihub_v1.types.GetVersionRequest" }, { - "name": "host_project_registration_id", + "name": "name", "type": "str" }, { @@ -7115,22 +2328,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.HostProjectRegistration", - "shortName": "create_host_project_registration" + "resultType": "google.cloud.apihub_v1.types.Version", + "shortName": "get_version" }, - "description": "Sample for CreateHostProjectRegistration", - "file": "apihub_v1_generated_host_project_registration_service_create_host_project_registration_async.py", + "description": "Sample for GetVersion", + "file": "apihub_v1_generated_api_hub_get_version_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_HostProjectRegistrationService_CreateHostProjectRegistration_async", + "regionTag": "apihub_v1_generated_ApiHub_GetVersion_sync", "segments": [ { - "end": 56, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 51, "start": 27, "type": "SHORT" }, @@ -7140,56 +2353,48 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 51, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_host_project_registration_service_create_host_project_registration_async.py" + "title": "apihub_v1_generated_api_hub_get_version_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.apihub_v1.HostProjectRegistrationServiceClient", - "shortName": "HostProjectRegistrationServiceClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.HostProjectRegistrationServiceClient.create_host_project_registration", + "fullName": "google.cloud.apihub_v1.ApiHubClient.list_api_operations", "method": { - "fullName": "google.cloud.apihub.v1.HostProjectRegistrationService.CreateHostProjectRegistration", + "fullName": "google.cloud.apihub.v1.ApiHub.ListApiOperations", "service": { - "fullName": "google.cloud.apihub.v1.HostProjectRegistrationService", - "shortName": "HostProjectRegistrationService" + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" }, - "shortName": "CreateHostProjectRegistration" + "shortName": "ListApiOperations" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.CreateHostProjectRegistrationRequest" + "type": "google.cloud.apihub_v1.types.ListApiOperationsRequest" }, { "name": "parent", "type": "str" }, - { - "name": "host_project_registration", - "type": "google.cloud.apihub_v1.types.HostProjectRegistration" - }, - { - "name": "host_project_registration_id", - "type": "str" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -7203,22 +2408,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.HostProjectRegistration", - "shortName": "create_host_project_registration" + "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.ListApiOperationsPager", + "shortName": "list_api_operations" }, - "description": "Sample for CreateHostProjectRegistration", - "file": "apihub_v1_generated_host_project_registration_service_create_host_project_registration_sync.py", + "description": "Sample for ListApiOperations", + "file": "apihub_v1_generated_api_hub_list_api_operations_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_HostProjectRegistrationService_CreateHostProjectRegistration_sync", + "regionTag": "apihub_v1_generated_ApiHub_ListApiOperations_sync", "segments": [ { - "end": 56, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 52, "start": 27, "type": "SHORT" }, @@ -7228,47 +2433,46 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 51, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_host_project_registration_service_create_host_project_registration_sync.py" + "title": "apihub_v1_generated_api_hub_list_api_operations_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.HostProjectRegistrationServiceAsyncClient", - "shortName": "HostProjectRegistrationServiceAsyncClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.HostProjectRegistrationServiceAsyncClient.get_host_project_registration", + "fullName": "google.cloud.apihub_v1.ApiHubClient.list_apis", "method": { - "fullName": "google.cloud.apihub.v1.HostProjectRegistrationService.GetHostProjectRegistration", + "fullName": "google.cloud.apihub.v1.ApiHub.ListApis", "service": { - "fullName": "google.cloud.apihub.v1.HostProjectRegistrationService", - "shortName": "HostProjectRegistrationService" + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" }, - "shortName": "GetHostProjectRegistration" + "shortName": "ListApis" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.GetHostProjectRegistrationRequest" + "type": "google.cloud.apihub_v1.types.ListApisRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -7284,22 +2488,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.HostProjectRegistration", - "shortName": "get_host_project_registration" + "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.ListApisPager", + "shortName": "list_apis" }, - "description": "Sample for GetHostProjectRegistration", - "file": "apihub_v1_generated_host_project_registration_service_get_host_project_registration_async.py", + "description": "Sample for ListApis", + "file": "apihub_v1_generated_api_hub_list_apis_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_HostProjectRegistrationService_GetHostProjectRegistration_async", + "regionTag": "apihub_v1_generated_ApiHub_ListApis_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -7319,36 +2523,36 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_host_project_registration_service_get_host_project_registration_async.py" + "title": "apihub_v1_generated_api_hub_list_apis_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.apihub_v1.HostProjectRegistrationServiceClient", - "shortName": "HostProjectRegistrationServiceClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.HostProjectRegistrationServiceClient.get_host_project_registration", + "fullName": "google.cloud.apihub_v1.ApiHubClient.list_attributes", "method": { - "fullName": "google.cloud.apihub.v1.HostProjectRegistrationService.GetHostProjectRegistration", + "fullName": "google.cloud.apihub.v1.ApiHub.ListAttributes", "service": { - "fullName": "google.cloud.apihub.v1.HostProjectRegistrationService", - "shortName": "HostProjectRegistrationService" + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" }, - "shortName": "GetHostProjectRegistration" + "shortName": "ListAttributes" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.GetHostProjectRegistrationRequest" + "type": "google.cloud.apihub_v1.types.ListAttributesRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -7364,22 +2568,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.HostProjectRegistration", - "shortName": "get_host_project_registration" + "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.ListAttributesPager", + "shortName": "list_attributes" }, - "description": "Sample for GetHostProjectRegistration", - "file": "apihub_v1_generated_host_project_registration_service_get_host_project_registration_sync.py", + "description": "Sample for ListAttributes", + "file": "apihub_v1_generated_api_hub_list_attributes_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_HostProjectRegistrationService_GetHostProjectRegistration_sync", + "regionTag": "apihub_v1_generated_ApiHub_ListAttributes_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -7399,34 +2603,33 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_host_project_registration_service_get_host_project_registration_sync.py" + "title": "apihub_v1_generated_api_hub_list_attributes_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.HostProjectRegistrationServiceAsyncClient", - "shortName": "HostProjectRegistrationServiceAsyncClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.HostProjectRegistrationServiceAsyncClient.list_host_project_registrations", + "fullName": "google.cloud.apihub_v1.ApiHubClient.list_deployments", "method": { - "fullName": "google.cloud.apihub.v1.HostProjectRegistrationService.ListHostProjectRegistrations", + "fullName": "google.cloud.apihub.v1.ApiHub.ListDeployments", "service": { - "fullName": "google.cloud.apihub.v1.HostProjectRegistrationService", - "shortName": "HostProjectRegistrationService" + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" }, - "shortName": "ListHostProjectRegistrations" + "shortName": "ListDeployments" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.ListHostProjectRegistrationsRequest" + "type": "google.cloud.apihub_v1.types.ListDeploymentsRequest" }, { "name": "parent", @@ -7445,14 +2648,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.services.host_project_registration_service.pagers.ListHostProjectRegistrationsAsyncPager", - "shortName": "list_host_project_registrations" + "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.ListDeploymentsPager", + "shortName": "list_deployments" }, - "description": "Sample for ListHostProjectRegistrations", - "file": "apihub_v1_generated_host_project_registration_service_list_host_project_registrations_async.py", + "description": "Sample for ListDeployments", + "file": "apihub_v1_generated_api_hub_list_deployments_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_HostProjectRegistrationService_ListHostProjectRegistrations_async", + "regionTag": "apihub_v1_generated_ApiHub_ListDeployments_sync", "segments": [ { "end": 52, @@ -7485,28 +2688,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_host_project_registration_service_list_host_project_registrations_async.py" + "title": "apihub_v1_generated_api_hub_list_deployments_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.apihub_v1.HostProjectRegistrationServiceClient", - "shortName": "HostProjectRegistrationServiceClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.HostProjectRegistrationServiceClient.list_host_project_registrations", + "fullName": "google.cloud.apihub_v1.ApiHubClient.list_external_apis", "method": { - "fullName": "google.cloud.apihub.v1.HostProjectRegistrationService.ListHostProjectRegistrations", + "fullName": "google.cloud.apihub.v1.ApiHub.ListExternalApis", "service": { - "fullName": "google.cloud.apihub.v1.HostProjectRegistrationService", - "shortName": "HostProjectRegistrationService" + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" }, - "shortName": "ListHostProjectRegistrations" + "shortName": "ListExternalApis" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.ListHostProjectRegistrationsRequest" + "type": "google.cloud.apihub_v1.types.ListExternalApisRequest" }, { "name": "parent", @@ -7525,14 +2728,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.services.host_project_registration_service.pagers.ListHostProjectRegistrationsPager", - "shortName": "list_host_project_registrations" + "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.ListExternalApisPager", + "shortName": "list_external_apis" }, - "description": "Sample for ListHostProjectRegistrations", - "file": "apihub_v1_generated_host_project_registration_service_list_host_project_registrations_sync.py", + "description": "Sample for ListExternalApis", + "file": "apihub_v1_generated_api_hub_list_external_apis_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_HostProjectRegistrationService_ListHostProjectRegistrations_sync", + "regionTag": "apihub_v1_generated_ApiHub_ListExternalApis_sync", "segments": [ { "end": 52, @@ -7565,32 +2768,31 @@ "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_host_project_registration_service_list_host_project_registrations_sync.py" + "title": "apihub_v1_generated_api_hub_list_external_apis_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.LintingServiceAsyncClient", - "shortName": "LintingServiceAsyncClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.LintingServiceAsyncClient.get_style_guide_contents", + "fullName": "google.cloud.apihub_v1.ApiHubClient.list_specs", "method": { - "fullName": "google.cloud.apihub.v1.LintingService.GetStyleGuideContents", + "fullName": "google.cloud.apihub.v1.ApiHub.ListSpecs", "service": { - "fullName": "google.cloud.apihub.v1.LintingService", - "shortName": "LintingService" + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" }, - "shortName": "GetStyleGuideContents" + "shortName": "ListSpecs" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.GetStyleGuideContentsRequest" + "type": "google.cloud.apihub_v1.types.ListSpecsRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -7606,22 +2808,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.StyleGuideContents", - "shortName": "get_style_guide_contents" + "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.ListSpecsPager", + "shortName": "list_specs" }, - "description": "Sample for GetStyleGuideContents", - "file": "apihub_v1_generated_linting_service_get_style_guide_contents_async.py", + "description": "Sample for ListSpecs", + "file": "apihub_v1_generated_api_hub_list_specs_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_LintingService_GetStyleGuideContents_async", + "regionTag": "apihub_v1_generated_ApiHub_ListSpecs_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -7641,36 +2843,36 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_linting_service_get_style_guide_contents_async.py" + "title": "apihub_v1_generated_api_hub_list_specs_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.apihub_v1.LintingServiceClient", - "shortName": "LintingServiceClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.LintingServiceClient.get_style_guide_contents", + "fullName": "google.cloud.apihub_v1.ApiHubClient.list_versions", "method": { - "fullName": "google.cloud.apihub.v1.LintingService.GetStyleGuideContents", + "fullName": "google.cloud.apihub.v1.ApiHub.ListVersions", "service": { - "fullName": "google.cloud.apihub.v1.LintingService", - "shortName": "LintingService" + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" }, - "shortName": "GetStyleGuideContents" + "shortName": "ListVersions" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.GetStyleGuideContentsRequest" + "type": "google.cloud.apihub_v1.types.ListVersionsRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -7686,22 +2888,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.StyleGuideContents", - "shortName": "get_style_guide_contents" + "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.ListVersionsPager", + "shortName": "list_versions" }, - "description": "Sample for GetStyleGuideContents", - "file": "apihub_v1_generated_linting_service_get_style_guide_contents_sync.py", + "description": "Sample for ListVersions", + "file": "apihub_v1_generated_api_hub_list_versions_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_LintingService_GetStyleGuideContents_sync", + "regionTag": "apihub_v1_generated_ApiHub_ListVersions_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -7721,37 +2923,40 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_linting_service_get_style_guide_contents_sync.py" + "title": "apihub_v1_generated_api_hub_list_versions_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.LintingServiceAsyncClient", - "shortName": "LintingServiceAsyncClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.LintingServiceAsyncClient.get_style_guide", + "fullName": "google.cloud.apihub_v1.ApiHubClient.search_resources", "method": { - "fullName": "google.cloud.apihub.v1.LintingService.GetStyleGuide", + "fullName": "google.cloud.apihub.v1.ApiHub.SearchResources", "service": { - "fullName": "google.cloud.apihub.v1.LintingService", - "shortName": "LintingService" + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" }, - "shortName": "GetStyleGuide" + "shortName": "SearchResources" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.GetStyleGuideRequest" + "type": "google.cloud.apihub_v1.types.SearchResourcesRequest" }, { - "name": "name", + "name": "location", + "type": "str" + }, + { + "name": "query", "type": "str" }, { @@ -7767,22 +2972,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.StyleGuide", - "shortName": "get_style_guide" + "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.SearchResourcesPager", + "shortName": "search_resources" }, - "description": "Sample for GetStyleGuide", - "file": "apihub_v1_generated_linting_service_get_style_guide_async.py", + "description": "Sample for SearchResources", + "file": "apihub_v1_generated_api_hub_search_resources_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_LintingService_GetStyleGuide_async", + "regionTag": "apihub_v1_generated_ApiHub_SearchResources_sync", "segments": [ { - "end": 51, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 53, "start": 27, "type": "SHORT" }, @@ -7792,47 +2997,51 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 54, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_linting_service_get_style_guide_async.py" + "title": "apihub_v1_generated_api_hub_search_resources_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.apihub_v1.LintingServiceClient", - "shortName": "LintingServiceClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.LintingServiceClient.get_style_guide", + "fullName": "google.cloud.apihub_v1.ApiHubClient.update_api", "method": { - "fullName": "google.cloud.apihub.v1.LintingService.GetStyleGuide", + "fullName": "google.cloud.apihub.v1.ApiHub.UpdateApi", "service": { - "fullName": "google.cloud.apihub.v1.LintingService", - "shortName": "LintingService" + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" }, - "shortName": "GetStyleGuide" + "shortName": "UpdateApi" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.GetStyleGuideRequest" + "type": "google.cloud.apihub_v1.types.UpdateApiRequest" }, { - "name": "name", - "type": "str" + "name": "api", + "type": "google.cloud.apihub_v1.types.Api" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -7847,22 +3056,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.StyleGuide", - "shortName": "get_style_guide" + "resultType": "google.cloud.apihub_v1.types.Api", + "shortName": "update_api" }, - "description": "Sample for GetStyleGuide", - "file": "apihub_v1_generated_linting_service_get_style_guide_sync.py", + "description": "Sample for UpdateApi", + "file": "apihub_v1_generated_api_hub_update_api_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_LintingService_GetStyleGuide_sync", + "regionTag": "apihub_v1_generated_ApiHub_UpdateApi_sync", "segments": [ { - "end": 51, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 54, "start": 27, "type": "SHORT" }, @@ -7872,44 +3081,51 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 48, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 51, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_linting_service_get_style_guide_sync.py" + "title": "apihub_v1_generated_api_hub_update_api_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.LintingServiceAsyncClient", - "shortName": "LintingServiceAsyncClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.LintingServiceAsyncClient.lint_spec", + "fullName": "google.cloud.apihub_v1.ApiHubClient.update_attribute", "method": { - "fullName": "google.cloud.apihub.v1.LintingService.LintSpec", + "fullName": "google.cloud.apihub.v1.ApiHub.UpdateAttribute", "service": { - "fullName": "google.cloud.apihub.v1.LintingService", - "shortName": "LintingService" + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" }, - "shortName": "LintSpec" + "shortName": "UpdateAttribute" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.LintSpecRequest" + "type": "google.cloud.apihub_v1.types.UpdateAttributeRequest" + }, + { + "name": "attribute", + "type": "google.cloud.apihub_v1.types.Attribute" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -7924,21 +3140,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "lint_spec" + "resultType": "google.cloud.apihub_v1.types.Attribute", + "shortName": "update_attribute" }, - "description": "Sample for LintSpec", - "file": "apihub_v1_generated_linting_service_lint_spec_async.py", + "description": "Sample for UpdateAttribute", + "file": "apihub_v1_generated_api_hub_update_attribute_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_LintingService_LintSpec_async", + "regionTag": "apihub_v1_generated_ApiHub_UpdateAttribute_sync", "segments": [ { - "end": 49, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 56, "start": 27, "type": "SHORT" }, @@ -7948,41 +3165,51 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 50, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 46, + "end": 53, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_linting_service_lint_spec_async.py" + "title": "apihub_v1_generated_api_hub_update_attribute_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.apihub_v1.LintingServiceClient", - "shortName": "LintingServiceClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.LintingServiceClient.lint_spec", + "fullName": "google.cloud.apihub_v1.ApiHubClient.update_deployment", "method": { - "fullName": "google.cloud.apihub.v1.LintingService.LintSpec", + "fullName": "google.cloud.apihub.v1.ApiHub.UpdateDeployment", "service": { - "fullName": "google.cloud.apihub.v1.LintingService", - "shortName": "LintingService" + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" }, - "shortName": "LintSpec" + "shortName": "UpdateDeployment" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.LintSpecRequest" + "type": "google.cloud.apihub_v1.types.UpdateDeploymentRequest" + }, + { + "name": "deployment", + "type": "google.cloud.apihub_v1.types.Deployment" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -7997,21 +3224,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "lint_spec" + "resultType": "google.cloud.apihub_v1.types.Deployment", + "shortName": "update_deployment" }, - "description": "Sample for LintSpec", - "file": "apihub_v1_generated_linting_service_lint_spec_sync.py", + "description": "Sample for UpdateDeployment", + "file": "apihub_v1_generated_api_hub_update_deployment_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_LintingService_LintSpec_sync", + "regionTag": "apihub_v1_generated_ApiHub_UpdateDeployment_sync", "segments": [ { - "end": 49, + "end": 58, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 58, "start": 27, "type": "SHORT" }, @@ -8021,46 +3249,47 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 52, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 46, + "end": 55, + "start": 53, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 59, + "start": 56, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_linting_service_lint_spec_sync.py" + "title": "apihub_v1_generated_api_hub_update_deployment_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.LintingServiceAsyncClient", - "shortName": "LintingServiceAsyncClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.LintingServiceAsyncClient.update_style_guide", + "fullName": "google.cloud.apihub_v1.ApiHubClient.update_external_api", "method": { - "fullName": "google.cloud.apihub.v1.LintingService.UpdateStyleGuide", + "fullName": "google.cloud.apihub.v1.ApiHub.UpdateExternalApi", "service": { - "fullName": "google.cloud.apihub.v1.LintingService", - "shortName": "LintingService" + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" }, - "shortName": "UpdateStyleGuide" + "shortName": "UpdateExternalApi" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.UpdateStyleGuideRequest" + "type": "google.cloud.apihub_v1.types.UpdateExternalApiRequest" }, { - "name": "style_guide", - "type": "google.cloud.apihub_v1.types.StyleGuide" + "name": "external_api", + "type": "google.cloud.apihub_v1.types.ExternalApi" }, { "name": "update_mask", @@ -8079,22 +3308,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.StyleGuide", - "shortName": "update_style_guide" + "resultType": "google.cloud.apihub_v1.types.ExternalApi", + "shortName": "update_external_api" }, - "description": "Sample for UpdateStyleGuide", - "file": "apihub_v1_generated_linting_service_update_style_guide_async.py", + "description": "Sample for UpdateExternalApi", + "file": "apihub_v1_generated_api_hub_update_external_api_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_LintingService_UpdateStyleGuide_async", + "regionTag": "apihub_v1_generated_ApiHub_UpdateExternalApi_sync", "segments": [ { - "end": 56, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 54, "start": 27, "type": "SHORT" }, @@ -8104,47 +3333,47 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 48, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 51, + "end": 51, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_linting_service_update_style_guide_async.py" + "title": "apihub_v1_generated_api_hub_update_external_api_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.apihub_v1.LintingServiceClient", - "shortName": "LintingServiceClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.LintingServiceClient.update_style_guide", + "fullName": "google.cloud.apihub_v1.ApiHubClient.update_spec", "method": { - "fullName": "google.cloud.apihub.v1.LintingService.UpdateStyleGuide", + "fullName": "google.cloud.apihub.v1.ApiHub.UpdateSpec", "service": { - "fullName": "google.cloud.apihub.v1.LintingService", - "shortName": "LintingService" + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" }, - "shortName": "UpdateStyleGuide" + "shortName": "UpdateSpec" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.UpdateStyleGuideRequest" + "type": "google.cloud.apihub_v1.types.UpdateSpecRequest" }, { - "name": "style_guide", - "type": "google.cloud.apihub_v1.types.StyleGuide" + "name": "spec", + "type": "google.cloud.apihub_v1.types.Spec" }, { "name": "update_mask", @@ -8163,14 +3392,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.StyleGuide", - "shortName": "update_style_guide" + "resultType": "google.cloud.apihub_v1.types.Spec", + "shortName": "update_spec" }, - "description": "Sample for UpdateStyleGuide", - "file": "apihub_v1_generated_linting_service_update_style_guide_sync.py", + "description": "Sample for UpdateSpec", + "file": "apihub_v1_generated_api_hub_update_spec_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_LintingService_UpdateStyleGuide_sync", + "regionTag": "apihub_v1_generated_ApiHub_UpdateSpec_sync", "segments": [ { "end": 56, @@ -8203,41 +3432,36 @@ "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_linting_service_update_style_guide_sync.py" + "title": "apihub_v1_generated_api_hub_update_spec_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.ProvisioningAsyncClient", - "shortName": "ProvisioningAsyncClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.ProvisioningAsyncClient.create_api_hub_instance", + "fullName": "google.cloud.apihub_v1.ApiHubClient.update_version", "method": { - "fullName": "google.cloud.apihub.v1.Provisioning.CreateApiHubInstance", + "fullName": "google.cloud.apihub.v1.ApiHub.UpdateVersion", "service": { - "fullName": "google.cloud.apihub.v1.Provisioning", - "shortName": "Provisioning" + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" }, - "shortName": "CreateApiHubInstance" + "shortName": "UpdateVersion" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.CreateApiHubInstanceRequest" - }, - { - "name": "parent", - "type": "str" + "type": "google.cloud.apihub_v1.types.UpdateVersionRequest" }, { - "name": "api_hub_instance", - "type": "google.cloud.apihub_v1.types.ApiHubInstance" + "name": "version", + "type": "google.cloud.apihub_v1.types.Version" }, { - "name": "api_hub_instance_id", - "type": "str" + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -8252,22 +3476,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_api_hub_instance" + "resultType": "google.cloud.apihub_v1.types.Version", + "shortName": "update_version" }, - "description": "Sample for CreateApiHubInstance", - "file": "apihub_v1_generated_provisioning_create_api_hub_instance_async.py", + "description": "Sample for UpdateVersion", + "file": "apihub_v1_generated_api_hub_update_version_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_Provisioning_CreateApiHubInstance_async", + "regionTag": "apihub_v1_generated_ApiHub_UpdateVersion_sync", "segments": [ { - "end": 59, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 59, + "end": 54, "start": 27, "type": "SHORT" }, @@ -8277,54 +3501,54 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 49, + "end": 48, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 56, - "start": 50, + "end": 51, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 60, - "start": 57, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_provisioning_create_api_hub_instance_async.py" + "title": "apihub_v1_generated_api_hub_update_version_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.apihub_v1.ProvisioningClient", - "shortName": "ProvisioningClient" + "fullName": "google.cloud.apihub_v1.HostProjectRegistrationServiceClient", + "shortName": "HostProjectRegistrationServiceClient" }, - "fullName": "google.cloud.apihub_v1.ProvisioningClient.create_api_hub_instance", + "fullName": "google.cloud.apihub_v1.HostProjectRegistrationServiceClient.create_host_project_registration", "method": { - "fullName": "google.cloud.apihub.v1.Provisioning.CreateApiHubInstance", + "fullName": "google.cloud.apihub.v1.HostProjectRegistrationService.CreateHostProjectRegistration", "service": { - "fullName": "google.cloud.apihub.v1.Provisioning", - "shortName": "Provisioning" + "fullName": "google.cloud.apihub.v1.HostProjectRegistrationService", + "shortName": "HostProjectRegistrationService" }, - "shortName": "CreateApiHubInstance" + "shortName": "CreateHostProjectRegistration" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.CreateApiHubInstanceRequest" + "type": "google.cloud.apihub_v1.types.CreateHostProjectRegistrationRequest" }, { "name": "parent", "type": "str" }, { - "name": "api_hub_instance", - "type": "google.cloud.apihub_v1.types.ApiHubInstance" + "name": "host_project_registration", + "type": "google.cloud.apihub_v1.types.HostProjectRegistration" }, { - "name": "api_hub_instance_id", + "name": "host_project_registration_id", "type": "str" }, { @@ -8340,22 +3564,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_api_hub_instance" + "resultType": "google.cloud.apihub_v1.types.HostProjectRegistration", + "shortName": "create_host_project_registration" }, - "description": "Sample for CreateApiHubInstance", - "file": "apihub_v1_generated_provisioning_create_api_hub_instance_sync.py", + "description": "Sample for CreateHostProjectRegistration", + "file": "apihub_v1_generated_host_project_registration_service_create_host_project_registration_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_Provisioning_CreateApiHubInstance_sync", + "regionTag": "apihub_v1_generated_HostProjectRegistrationService_CreateHostProjectRegistration_sync", "segments": [ { - "end": 59, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 59, + "end": 56, "start": 27, "type": "SHORT" }, @@ -8365,44 +3589,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 49, + "end": 50, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 56, - "start": 50, + "end": 53, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 60, - "start": 57, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_provisioning_create_api_hub_instance_sync.py" + "title": "apihub_v1_generated_host_project_registration_service_create_host_project_registration_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.ProvisioningAsyncClient", - "shortName": "ProvisioningAsyncClient" + "fullName": "google.cloud.apihub_v1.HostProjectRegistrationServiceClient", + "shortName": "HostProjectRegistrationServiceClient" }, - "fullName": "google.cloud.apihub_v1.ProvisioningAsyncClient.get_api_hub_instance", + "fullName": "google.cloud.apihub_v1.HostProjectRegistrationServiceClient.get_host_project_registration", "method": { - "fullName": "google.cloud.apihub.v1.Provisioning.GetApiHubInstance", + "fullName": "google.cloud.apihub.v1.HostProjectRegistrationService.GetHostProjectRegistration", "service": { - "fullName": "google.cloud.apihub.v1.Provisioning", - "shortName": "Provisioning" + "fullName": "google.cloud.apihub.v1.HostProjectRegistrationService", + "shortName": "HostProjectRegistrationService" }, - "shortName": "GetApiHubInstance" + "shortName": "GetHostProjectRegistration" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.GetApiHubInstanceRequest" + "type": "google.cloud.apihub_v1.types.GetHostProjectRegistrationRequest" }, { "name": "name", @@ -8421,14 +3644,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.ApiHubInstance", - "shortName": "get_api_hub_instance" + "resultType": "google.cloud.apihub_v1.types.HostProjectRegistration", + "shortName": "get_host_project_registration" }, - "description": "Sample for GetApiHubInstance", - "file": "apihub_v1_generated_provisioning_get_api_hub_instance_async.py", + "description": "Sample for GetHostProjectRegistration", + "file": "apihub_v1_generated_host_project_registration_service_get_host_project_registration_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_Provisioning_GetApiHubInstance_async", + "regionTag": "apihub_v1_generated_HostProjectRegistrationService_GetHostProjectRegistration_sync", "segments": [ { "end": 51, @@ -8461,31 +3684,31 @@ "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_provisioning_get_api_hub_instance_async.py" + "title": "apihub_v1_generated_host_project_registration_service_get_host_project_registration_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.apihub_v1.ProvisioningClient", - "shortName": "ProvisioningClient" + "fullName": "google.cloud.apihub_v1.HostProjectRegistrationServiceClient", + "shortName": "HostProjectRegistrationServiceClient" }, - "fullName": "google.cloud.apihub_v1.ProvisioningClient.get_api_hub_instance", + "fullName": "google.cloud.apihub_v1.HostProjectRegistrationServiceClient.list_host_project_registrations", "method": { - "fullName": "google.cloud.apihub.v1.Provisioning.GetApiHubInstance", + "fullName": "google.cloud.apihub.v1.HostProjectRegistrationService.ListHostProjectRegistrations", "service": { - "fullName": "google.cloud.apihub.v1.Provisioning", - "shortName": "Provisioning" + "fullName": "google.cloud.apihub.v1.HostProjectRegistrationService", + "shortName": "HostProjectRegistrationService" }, - "shortName": "GetApiHubInstance" + "shortName": "ListHostProjectRegistrations" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.GetApiHubInstanceRequest" + "type": "google.cloud.apihub_v1.types.ListHostProjectRegistrationsRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -8501,22 +3724,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.ApiHubInstance", - "shortName": "get_api_hub_instance" + "resultType": "google.cloud.apihub_v1.services.host_project_registration_service.pagers.ListHostProjectRegistrationsPager", + "shortName": "list_host_project_registrations" }, - "description": "Sample for GetApiHubInstance", - "file": "apihub_v1_generated_provisioning_get_api_hub_instance_sync.py", + "description": "Sample for ListHostProjectRegistrations", + "file": "apihub_v1_generated_host_project_registration_service_list_host_project_registrations_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_Provisioning_GetApiHubInstance_sync", + "regionTag": "apihub_v1_generated_HostProjectRegistrationService_ListHostProjectRegistrations_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -8536,37 +3759,36 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_provisioning_get_api_hub_instance_sync.py" + "title": "apihub_v1_generated_host_project_registration_service_list_host_project_registrations_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.ProvisioningAsyncClient", - "shortName": "ProvisioningAsyncClient" + "fullName": "google.cloud.apihub_v1.LintingServiceClient", + "shortName": "LintingServiceClient" }, - "fullName": "google.cloud.apihub_v1.ProvisioningAsyncClient.lookup_api_hub_instance", + "fullName": "google.cloud.apihub_v1.LintingServiceClient.get_style_guide_contents", "method": { - "fullName": "google.cloud.apihub.v1.Provisioning.LookupApiHubInstance", + "fullName": "google.cloud.apihub.v1.LintingService.GetStyleGuideContents", "service": { - "fullName": "google.cloud.apihub.v1.Provisioning", - "shortName": "Provisioning" + "fullName": "google.cloud.apihub.v1.LintingService", + "shortName": "LintingService" }, - "shortName": "LookupApiHubInstance" + "shortName": "GetStyleGuideContents" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.LookupApiHubInstanceRequest" + "type": "google.cloud.apihub_v1.types.GetStyleGuideContentsRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -8582,14 +3804,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.LookupApiHubInstanceResponse", - "shortName": "lookup_api_hub_instance" + "resultType": "google.cloud.apihub_v1.types.StyleGuideContents", + "shortName": "get_style_guide_contents" }, - "description": "Sample for LookupApiHubInstance", - "file": "apihub_v1_generated_provisioning_lookup_api_hub_instance_async.py", + "description": "Sample for GetStyleGuideContents", + "file": "apihub_v1_generated_linting_service_get_style_guide_contents_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_Provisioning_LookupApiHubInstance_async", + "regionTag": "apihub_v1_generated_LintingService_GetStyleGuideContents_sync", "segments": [ { "end": 51, @@ -8622,31 +3844,31 @@ "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_provisioning_lookup_api_hub_instance_async.py" + "title": "apihub_v1_generated_linting_service_get_style_guide_contents_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.apihub_v1.ProvisioningClient", - "shortName": "ProvisioningClient" + "fullName": "google.cloud.apihub_v1.LintingServiceClient", + "shortName": "LintingServiceClient" }, - "fullName": "google.cloud.apihub_v1.ProvisioningClient.lookup_api_hub_instance", + "fullName": "google.cloud.apihub_v1.LintingServiceClient.get_style_guide", "method": { - "fullName": "google.cloud.apihub.v1.Provisioning.LookupApiHubInstance", + "fullName": "google.cloud.apihub.v1.LintingService.GetStyleGuide", "service": { - "fullName": "google.cloud.apihub.v1.Provisioning", - "shortName": "Provisioning" + "fullName": "google.cloud.apihub.v1.LintingService", + "shortName": "LintingService" }, - "shortName": "LookupApiHubInstance" + "shortName": "GetStyleGuide" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.LookupApiHubInstanceRequest" + "type": "google.cloud.apihub_v1.types.GetStyleGuideRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -8662,14 +3884,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.LookupApiHubInstanceResponse", - "shortName": "lookup_api_hub_instance" + "resultType": "google.cloud.apihub_v1.types.StyleGuide", + "shortName": "get_style_guide" }, - "description": "Sample for LookupApiHubInstance", - "file": "apihub_v1_generated_provisioning_lookup_api_hub_instance_sync.py", + "description": "Sample for GetStyleGuide", + "file": "apihub_v1_generated_linting_service_get_style_guide_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_Provisioning_LookupApiHubInstance_sync", + "regionTag": "apihub_v1_generated_LintingService_GetStyleGuide_sync", "segments": [ { "end": 51, @@ -8702,41 +3924,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_provisioning_lookup_api_hub_instance_sync.py" + "title": "apihub_v1_generated_linting_service_get_style_guide_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceAsyncClient", - "shortName": "RuntimeProjectAttachmentServiceAsyncClient" + "fullName": "google.cloud.apihub_v1.LintingServiceClient", + "shortName": "LintingServiceClient" }, - "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceAsyncClient.create_runtime_project_attachment", + "fullName": "google.cloud.apihub_v1.LintingServiceClient.lint_spec", "method": { - "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService.CreateRuntimeProjectAttachment", + "fullName": "google.cloud.apihub.v1.LintingService.LintSpec", "service": { - "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService", - "shortName": "RuntimeProjectAttachmentService" + "fullName": "google.cloud.apihub.v1.LintingService", + "shortName": "LintingService" }, - "shortName": "CreateRuntimeProjectAttachment" + "shortName": "LintSpec" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.CreateRuntimeProjectAttachmentRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "runtime_project_attachment", - "type": "google.cloud.apihub_v1.types.RuntimeProjectAttachment" - }, - { - "name": "runtime_project_attachment_id", - "type": "str" + "type": "google.cloud.apihub_v1.types.LintSpecRequest" }, { "name": "retry", @@ -8751,22 +3960,21 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.RuntimeProjectAttachment", - "shortName": "create_runtime_project_attachment" + "shortName": "lint_spec" }, - "description": "Sample for CreateRuntimeProjectAttachment", - "file": "apihub_v1_generated_runtime_project_attachment_service_create_runtime_project_attachment_async.py", + "description": "Sample for LintSpec", + "file": "apihub_v1_generated_linting_service_lint_spec_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_RuntimeProjectAttachmentService_CreateRuntimeProjectAttachment_async", + "regionTag": "apihub_v1_generated_LintingService_LintSpec_sync", "segments": [ { - "end": 56, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 49, "start": 27, "type": "SHORT" }, @@ -8776,55 +3984,49 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 51, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_runtime_project_attachment_service_create_runtime_project_attachment_async.py" + "title": "apihub_v1_generated_linting_service_lint_spec_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceClient", - "shortName": "RuntimeProjectAttachmentServiceClient" + "fullName": "google.cloud.apihub_v1.LintingServiceClient", + "shortName": "LintingServiceClient" }, - "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceClient.create_runtime_project_attachment", + "fullName": "google.cloud.apihub_v1.LintingServiceClient.update_style_guide", "method": { - "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService.CreateRuntimeProjectAttachment", + "fullName": "google.cloud.apihub.v1.LintingService.UpdateStyleGuide", "service": { - "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService", - "shortName": "RuntimeProjectAttachmentService" + "fullName": "google.cloud.apihub.v1.LintingService", + "shortName": "LintingService" }, - "shortName": "CreateRuntimeProjectAttachment" + "shortName": "UpdateStyleGuide" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.CreateRuntimeProjectAttachmentRequest" - }, - { - "name": "parent", - "type": "str" + "type": "google.cloud.apihub_v1.types.UpdateStyleGuideRequest" }, { - "name": "runtime_project_attachment", - "type": "google.cloud.apihub_v1.types.RuntimeProjectAttachment" + "name": "style_guide", + "type": "google.cloud.apihub_v1.types.StyleGuide" }, { - "name": "runtime_project_attachment_id", - "type": "str" + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -8839,14 +4041,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.RuntimeProjectAttachment", - "shortName": "create_runtime_project_attachment" + "resultType": "google.cloud.apihub_v1.types.StyleGuide", + "shortName": "update_style_guide" }, - "description": "Sample for CreateRuntimeProjectAttachment", - "file": "apihub_v1_generated_runtime_project_attachment_service_create_runtime_project_attachment_sync.py", + "description": "Sample for UpdateStyleGuide", + "file": "apihub_v1_generated_linting_service_update_style_guide_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_RuntimeProjectAttachmentService_CreateRuntimeProjectAttachment_sync", + "regionTag": "apihub_v1_generated_LintingService_UpdateStyleGuide_sync", "segments": [ { "end": 56, @@ -8879,32 +4081,39 @@ "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_runtime_project_attachment_service_create_runtime_project_attachment_sync.py" + "title": "apihub_v1_generated_linting_service_update_style_guide_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceAsyncClient", - "shortName": "RuntimeProjectAttachmentServiceAsyncClient" + "fullName": "google.cloud.apihub_v1.ProvisioningClient", + "shortName": "ProvisioningClient" }, - "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceAsyncClient.delete_runtime_project_attachment", + "fullName": "google.cloud.apihub_v1.ProvisioningClient.create_api_hub_instance", "method": { - "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService.DeleteRuntimeProjectAttachment", + "fullName": "google.cloud.apihub.v1.Provisioning.CreateApiHubInstance", "service": { - "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService", - "shortName": "RuntimeProjectAttachmentService" + "fullName": "google.cloud.apihub.v1.Provisioning", + "shortName": "Provisioning" }, - "shortName": "DeleteRuntimeProjectAttachment" + "shortName": "CreateApiHubInstance" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.DeleteRuntimeProjectAttachmentRequest" + "type": "google.cloud.apihub_v1.types.CreateApiHubInstanceRequest" }, { - "name": "name", + "name": "parent", + "type": "str" + }, + { + "name": "api_hub_instance", + "type": "google.cloud.apihub_v1.types.ApiHubInstance" + }, + { + "name": "api_hub_instance_id", "type": "str" }, { @@ -8920,21 +4129,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_runtime_project_attachment" + "resultType": "google.api_core.operation.Operation", + "shortName": "create_api_hub_instance" }, - "description": "Sample for DeleteRuntimeProjectAttachment", - "file": "apihub_v1_generated_runtime_project_attachment_service_delete_runtime_project_attachment_async.py", + "description": "Sample for CreateApiHubInstance", + "file": "apihub_v1_generated_provisioning_create_api_hub_instance_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_RuntimeProjectAttachmentService_DeleteRuntimeProjectAttachment_async", + "regionTag": "apihub_v1_generated_Provisioning_CreateApiHubInstance_sync", "segments": [ { - "end": 49, + "end": 59, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 59, "start": 27, "type": "SHORT" }, @@ -8944,41 +4154,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 49, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 46, + "end": 56, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 60, + "start": 57, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_runtime_project_attachment_service_delete_runtime_project_attachment_async.py" + "title": "apihub_v1_generated_provisioning_create_api_hub_instance_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceClient", - "shortName": "RuntimeProjectAttachmentServiceClient" + "fullName": "google.cloud.apihub_v1.ProvisioningClient", + "shortName": "ProvisioningClient" }, - "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceClient.delete_runtime_project_attachment", + "fullName": "google.cloud.apihub_v1.ProvisioningClient.get_api_hub_instance", "method": { - "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService.DeleteRuntimeProjectAttachment", + "fullName": "google.cloud.apihub.v1.Provisioning.GetApiHubInstance", "service": { - "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService", - "shortName": "RuntimeProjectAttachmentService" + "fullName": "google.cloud.apihub.v1.Provisioning", + "shortName": "Provisioning" }, - "shortName": "DeleteRuntimeProjectAttachment" + "shortName": "GetApiHubInstance" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.DeleteRuntimeProjectAttachmentRequest" + "type": "google.cloud.apihub_v1.types.GetApiHubInstanceRequest" }, { "name": "name", @@ -8997,21 +4209,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_runtime_project_attachment" + "resultType": "google.cloud.apihub_v1.types.ApiHubInstance", + "shortName": "get_api_hub_instance" }, - "description": "Sample for DeleteRuntimeProjectAttachment", - "file": "apihub_v1_generated_runtime_project_attachment_service_delete_runtime_project_attachment_sync.py", + "description": "Sample for GetApiHubInstance", + "file": "apihub_v1_generated_provisioning_get_api_hub_instance_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_RuntimeProjectAttachmentService_DeleteRuntimeProjectAttachment_sync", + "regionTag": "apihub_v1_generated_Provisioning_GetApiHubInstance_sync", "segments": [ { - "end": 49, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 51, "start": 27, "type": "SHORT" }, @@ -9026,40 +4239,41 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_runtime_project_attachment_service_delete_runtime_project_attachment_sync.py" + "title": "apihub_v1_generated_provisioning_get_api_hub_instance_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceAsyncClient", - "shortName": "RuntimeProjectAttachmentServiceAsyncClient" + "fullName": "google.cloud.apihub_v1.ProvisioningClient", + "shortName": "ProvisioningClient" }, - "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceAsyncClient.get_runtime_project_attachment", + "fullName": "google.cloud.apihub_v1.ProvisioningClient.lookup_api_hub_instance", "method": { - "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService.GetRuntimeProjectAttachment", + "fullName": "google.cloud.apihub.v1.Provisioning.LookupApiHubInstance", "service": { - "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService", - "shortName": "RuntimeProjectAttachmentService" + "fullName": "google.cloud.apihub.v1.Provisioning", + "shortName": "Provisioning" }, - "shortName": "GetRuntimeProjectAttachment" + "shortName": "LookupApiHubInstance" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.GetRuntimeProjectAttachmentRequest" + "type": "google.cloud.apihub_v1.types.LookupApiHubInstanceRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -9075,14 +4289,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.RuntimeProjectAttachment", - "shortName": "get_runtime_project_attachment" + "resultType": "google.cloud.apihub_v1.types.LookupApiHubInstanceResponse", + "shortName": "lookup_api_hub_instance" }, - "description": "Sample for GetRuntimeProjectAttachment", - "file": "apihub_v1_generated_runtime_project_attachment_service_get_runtime_project_attachment_async.py", + "description": "Sample for LookupApiHubInstance", + "file": "apihub_v1_generated_provisioning_lookup_api_hub_instance_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_RuntimeProjectAttachmentService_GetRuntimeProjectAttachment_async", + "regionTag": "apihub_v1_generated_Provisioning_LookupApiHubInstance_sync", "segments": [ { "end": 51, @@ -9115,7 +4329,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_runtime_project_attachment_service_get_runtime_project_attachment_async.py" + "title": "apihub_v1_generated_provisioning_lookup_api_hub_instance_sync.py" }, { "canonical": true, @@ -9124,22 +4338,30 @@ "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceClient", "shortName": "RuntimeProjectAttachmentServiceClient" }, - "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceClient.get_runtime_project_attachment", + "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceClient.create_runtime_project_attachment", "method": { - "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService.GetRuntimeProjectAttachment", + "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService.CreateRuntimeProjectAttachment", "service": { "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService", "shortName": "RuntimeProjectAttachmentService" }, - "shortName": "GetRuntimeProjectAttachment" + "shortName": "CreateRuntimeProjectAttachment" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.GetRuntimeProjectAttachmentRequest" + "type": "google.cloud.apihub_v1.types.CreateRuntimeProjectAttachmentRequest" }, { - "name": "name", + "name": "parent", + "type": "str" + }, + { + "name": "runtime_project_attachment", + "type": "google.cloud.apihub_v1.types.RuntimeProjectAttachment" + }, + { + "name": "runtime_project_attachment_id", "type": "str" }, { @@ -9156,21 +4378,21 @@ } ], "resultType": "google.cloud.apihub_v1.types.RuntimeProjectAttachment", - "shortName": "get_runtime_project_attachment" + "shortName": "create_runtime_project_attachment" }, - "description": "Sample for GetRuntimeProjectAttachment", - "file": "apihub_v1_generated_runtime_project_attachment_service_get_runtime_project_attachment_sync.py", + "description": "Sample for CreateRuntimeProjectAttachment", + "file": "apihub_v1_generated_runtime_project_attachment_service_create_runtime_project_attachment_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_RuntimeProjectAttachmentService_GetRuntimeProjectAttachment_sync", + "regionTag": "apihub_v1_generated_RuntimeProjectAttachmentService_CreateRuntimeProjectAttachment_sync", "segments": [ { - "end": 51, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 56, "start": 27, "type": "SHORT" }, @@ -9180,47 +4402,46 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 50, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 53, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_runtime_project_attachment_service_get_runtime_project_attachment_sync.py" + "title": "apihub_v1_generated_runtime_project_attachment_service_create_runtime_project_attachment_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceAsyncClient", - "shortName": "RuntimeProjectAttachmentServiceAsyncClient" + "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceClient", + "shortName": "RuntimeProjectAttachmentServiceClient" }, - "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceAsyncClient.list_runtime_project_attachments", + "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceClient.delete_runtime_project_attachment", "method": { - "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService.ListRuntimeProjectAttachments", + "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService.DeleteRuntimeProjectAttachment", "service": { "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService", "shortName": "RuntimeProjectAttachmentService" }, - "shortName": "ListRuntimeProjectAttachments" + "shortName": "DeleteRuntimeProjectAttachment" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.ListRuntimeProjectAttachmentsRequest" + "type": "google.cloud.apihub_v1.types.DeleteRuntimeProjectAttachmentRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -9236,22 +4457,21 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.services.runtime_project_attachment_service.pagers.ListRuntimeProjectAttachmentsAsyncPager", - "shortName": "list_runtime_project_attachments" + "shortName": "delete_runtime_project_attachment" }, - "description": "Sample for ListRuntimeProjectAttachments", - "file": "apihub_v1_generated_runtime_project_attachment_service_list_runtime_project_attachments_async.py", + "description": "Sample for DeleteRuntimeProjectAttachment", + "file": "apihub_v1_generated_runtime_project_attachment_service_delete_runtime_project_attachment_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_RuntimeProjectAttachmentService_ListRuntimeProjectAttachments_async", + "regionTag": "apihub_v1_generated_RuntimeProjectAttachmentService_DeleteRuntimeProjectAttachment_sync", "segments": [ { - "end": 52, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 49, "start": 27, "type": "SHORT" }, @@ -9266,17 +4486,15 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_runtime_project_attachment_service_list_runtime_project_attachments_async.py" + "title": "apihub_v1_generated_runtime_project_attachment_service_delete_runtime_project_attachment_sync.py" }, { "canonical": true, @@ -9285,22 +4503,22 @@ "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceClient", "shortName": "RuntimeProjectAttachmentServiceClient" }, - "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceClient.list_runtime_project_attachments", + "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceClient.get_runtime_project_attachment", "method": { - "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService.ListRuntimeProjectAttachments", + "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService.GetRuntimeProjectAttachment", "service": { "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService", "shortName": "RuntimeProjectAttachmentService" }, - "shortName": "ListRuntimeProjectAttachments" + "shortName": "GetRuntimeProjectAttachment" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.ListRuntimeProjectAttachmentsRequest" + "type": "google.cloud.apihub_v1.types.GetRuntimeProjectAttachmentRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -9316,22 +4534,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.services.runtime_project_attachment_service.pagers.ListRuntimeProjectAttachmentsPager", - "shortName": "list_runtime_project_attachments" + "resultType": "google.cloud.apihub_v1.types.RuntimeProjectAttachment", + "shortName": "get_runtime_project_attachment" }, - "description": "Sample for ListRuntimeProjectAttachments", - "file": "apihub_v1_generated_runtime_project_attachment_service_list_runtime_project_attachments_sync.py", + "description": "Sample for GetRuntimeProjectAttachment", + "file": "apihub_v1_generated_runtime_project_attachment_service_get_runtime_project_attachment_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_RuntimeProjectAttachmentService_ListRuntimeProjectAttachments_sync", + "regionTag": "apihub_v1_generated_RuntimeProjectAttachmentService_GetRuntimeProjectAttachment_sync", "segments": [ { - "end": 52, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 51, "start": 27, "type": "SHORT" }, @@ -9351,37 +4569,36 @@ "type": "REQUEST_EXECUTION" }, { - "end": 53, + "end": 52, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_runtime_project_attachment_service_list_runtime_project_attachments_sync.py" + "title": "apihub_v1_generated_runtime_project_attachment_service_get_runtime_project_attachment_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceAsyncClient", - "shortName": "RuntimeProjectAttachmentServiceAsyncClient" + "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceClient", + "shortName": "RuntimeProjectAttachmentServiceClient" }, - "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceAsyncClient.lookup_runtime_project_attachment", + "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceClient.list_runtime_project_attachments", "method": { - "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService.LookupRuntimeProjectAttachment", + "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService.ListRuntimeProjectAttachments", "service": { "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService", "shortName": "RuntimeProjectAttachmentService" }, - "shortName": "LookupRuntimeProjectAttachment" + "shortName": "ListRuntimeProjectAttachments" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.LookupRuntimeProjectAttachmentRequest" + "type": "google.cloud.apihub_v1.types.ListRuntimeProjectAttachmentsRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -9397,22 +4614,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.LookupRuntimeProjectAttachmentResponse", - "shortName": "lookup_runtime_project_attachment" + "resultType": "google.cloud.apihub_v1.services.runtime_project_attachment_service.pagers.ListRuntimeProjectAttachmentsPager", + "shortName": "list_runtime_project_attachments" }, - "description": "Sample for LookupRuntimeProjectAttachment", - "file": "apihub_v1_generated_runtime_project_attachment_service_lookup_runtime_project_attachment_async.py", + "description": "Sample for ListRuntimeProjectAttachments", + "file": "apihub_v1_generated_runtime_project_attachment_service_list_runtime_project_attachments_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_RuntimeProjectAttachmentService_LookupRuntimeProjectAttachment_async", + "regionTag": "apihub_v1_generated_RuntimeProjectAttachmentService_ListRuntimeProjectAttachments_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -9432,12 +4649,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_runtime_project_attachment_service_lookup_runtime_project_attachment_async.py" + "title": "apihub_v1_generated_runtime_project_attachment_service_list_runtime_project_attachments_sync.py" }, { "canonical": true, diff --git a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub.py b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub.py index aaec1e3055f6..5f7c71ab5ffb 100644 --- a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub.py +++ b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub.py @@ -47,12 +47,7 @@ from requests import PreparedRequest, Request, Response from requests.sessions import Session -from google.cloud.apihub_v1.services.api_hub import ( - ApiHubAsyncClient, - ApiHubClient, - pagers, - transports, -) +from google.cloud.apihub_v1.services.api_hub import ApiHubClient, pagers, transports from google.cloud.apihub_v1.types import apihub_service, common_fields @@ -180,11 +175,6 @@ def test__get_client_cert_source(): "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ApiHubClient), ) -@mock.patch.object( - ApiHubAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(ApiHubAsyncClient), -) def test__get_api_endpoint(): api_override = "foo.com" mock_client_cert_source = mock.Mock() @@ -266,7 +256,6 @@ def test__get_universe_domain(): @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - (ApiHubClient, transports.ApiHubGrpcTransport, "grpc"), (ApiHubClient, transports.ApiHubRestTransport, "rest"), ], ) @@ -346,8 +335,6 @@ def test__validate_universe_domain(client_class, transport_class, transport_name @pytest.mark.parametrize( "client_class,transport_name", [ - (ApiHubClient, "grpc"), - (ApiHubAsyncClient, "grpc_asyncio"), (ApiHubClient, "rest"), ], ) @@ -372,8 +359,6 @@ def test_api_hub_client_from_service_account_info(client_class, transport_name): @pytest.mark.parametrize( "transport_class,transport_name", [ - (transports.ApiHubGrpcTransport, "grpc"), - (transports.ApiHubGrpcAsyncIOTransport, "grpc_asyncio"), (transports.ApiHubRestTransport, "rest"), ], ) @@ -396,8 +381,6 @@ def test_api_hub_client_service_account_always_use_jwt(transport_class, transpor @pytest.mark.parametrize( "client_class,transport_name", [ - (ApiHubClient, "grpc"), - (ApiHubAsyncClient, "grpc_asyncio"), (ApiHubClient, "rest"), ], ) @@ -429,20 +412,17 @@ def test_api_hub_client_from_service_account_file(client_class, transport_name): def test_api_hub_client_get_transport_class(): transport = ApiHubClient.get_transport_class() available_transports = [ - transports.ApiHubGrpcTransport, transports.ApiHubRestTransport, ] assert transport in available_transports - transport = ApiHubClient.get_transport_class("grpc") - assert transport == transports.ApiHubGrpcTransport + transport = ApiHubClient.get_transport_class("rest") + assert transport == transports.ApiHubRestTransport @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - (ApiHubClient, transports.ApiHubGrpcTransport, "grpc"), - (ApiHubAsyncClient, transports.ApiHubGrpcAsyncIOTransport, "grpc_asyncio"), (ApiHubClient, transports.ApiHubRestTransport, "rest"), ], ) @@ -451,11 +431,6 @@ def test_api_hub_client_get_transport_class(): "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ApiHubClient), ) -@mock.patch.object( - ApiHubAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(ApiHubAsyncClient), -) def test_api_hub_client_client_options(client_class, transport_class, transport_name): # Check that if channel is provided we won't create a new one. with mock.patch.object(ApiHubClient, "get_transport_class") as gtc: @@ -587,20 +562,6 @@ def test_api_hub_client_client_options(client_class, transport_class, transport_ @pytest.mark.parametrize( "client_class,transport_class,transport_name,use_client_cert_env", [ - (ApiHubClient, transports.ApiHubGrpcTransport, "grpc", "true"), - ( - ApiHubAsyncClient, - transports.ApiHubGrpcAsyncIOTransport, - "grpc_asyncio", - "true", - ), - (ApiHubClient, transports.ApiHubGrpcTransport, "grpc", "false"), - ( - ApiHubAsyncClient, - transports.ApiHubGrpcAsyncIOTransport, - "grpc_asyncio", - "false", - ), (ApiHubClient, transports.ApiHubRestTransport, "rest", "true"), (ApiHubClient, transports.ApiHubRestTransport, "rest", "false"), ], @@ -610,11 +571,6 @@ def test_api_hub_client_client_options(client_class, transport_class, transport_ "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ApiHubClient), ) -@mock.patch.object( - ApiHubAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(ApiHubAsyncClient), -) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) def test_api_hub_client_mtls_env_auto( client_class, transport_class, transport_name, use_client_cert_env @@ -718,13 +674,10 @@ def test_api_hub_client_mtls_env_auto( ) -@pytest.mark.parametrize("client_class", [ApiHubClient, ApiHubAsyncClient]) +@pytest.mark.parametrize("client_class", [ApiHubClient]) @mock.patch.object( ApiHubClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ApiHubClient) ) -@mock.patch.object( - ApiHubAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ApiHubAsyncClient) -) def test_api_hub_client_get_mtls_endpoint_and_cert_source(client_class): mock_client_cert_source = mock.Mock() @@ -816,17 +769,12 @@ def test_api_hub_client_get_mtls_endpoint_and_cert_source(client_class): ) -@pytest.mark.parametrize("client_class", [ApiHubClient, ApiHubAsyncClient]) +@pytest.mark.parametrize("client_class", [ApiHubClient]) @mock.patch.object( ApiHubClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ApiHubClient), ) -@mock.patch.object( - ApiHubAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(ApiHubAsyncClient), -) def test_api_hub_client_client_api_endpoint(client_class): mock_client_cert_source = client_cert_source_callback api_override = "foo.com" @@ -903,8 +851,6 @@ def test_api_hub_client_client_api_endpoint(client_class): @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - (ApiHubClient, transports.ApiHubGrpcTransport, "grpc"), - (ApiHubAsyncClient, transports.ApiHubGrpcAsyncIOTransport, "grpc_asyncio"), (ApiHubClient, transports.ApiHubRestTransport, "rest"), ], ) @@ -936,13 +882,6 @@ def test_api_hub_client_client_options_scopes( @pytest.mark.parametrize( "client_class,transport_class,transport_name,grpc_helpers", [ - (ApiHubClient, transports.ApiHubGrpcTransport, "grpc", grpc_helpers), - ( - ApiHubAsyncClient, - transports.ApiHubGrpcAsyncIOTransport, - "grpc_asyncio", - grpc_helpers_async, - ), (ApiHubClient, transports.ApiHubRestTransport, "rest", None), ], ) @@ -970,89 +909,6 @@ def test_api_hub_client_client_options_credentials_file( ) -def test_api_hub_client_client_options_from_dict(): - with mock.patch( - "google.cloud.apihub_v1.services.api_hub.transports.ApiHubGrpcTransport.__init__" - ) as grpc_transport: - grpc_transport.return_value = None - client = ApiHubClient(client_options={"api_endpoint": "squid.clam.whelk"}) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize( - "client_class,transport_class,transport_name,grpc_helpers", - [ - (ApiHubClient, transports.ApiHubGrpcTransport, "grpc", grpc_helpers), - ( - ApiHubAsyncClient, - transports.ApiHubGrpcAsyncIOTransport, - "grpc_asyncio", - grpc_helpers_async, - ), - ], -) -def test_api_hub_client_create_channel_credentials_file( - client_class, transport_class, transport_name, grpc_helpers -): - # Check the case credentials file is provided. - options = client_options.ClientOptions(credentials_file="credentials.json") - - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE - ), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "apihub.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - scopes=None, - default_host="apihub.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - @pytest.mark.parametrize( "request_type", [ @@ -1060,33 +916,135 @@ def test_api_hub_client_create_channel_credentials_file( dict, ], ) -def test_create_api(request_type, transport: str = "grpc"): +def test_create_api_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["api"] = { + "name": "name_value", + "display_name": "display_name_value", + "description": "description_value", + "documentation": {"external_uri": "external_uri_value"}, + "owner": {"display_name": "display_name_value", "email": "email_value"}, + "versions": ["versions_value1", "versions_value2"], + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "target_user": { + "enum_values": { + "values": [ + { + "id": "id_value", + "display_name": "display_name_value", + "description": "description_value", + "immutable": True, + } + ] + }, + "string_values": {"values": ["values_value1", "values_value2"]}, + "json_values": {}, + "attribute": "attribute_value", + }, + "team": {}, + "business_unit": {}, + "maturity_level": {}, + "attributes": {}, + "api_style": {}, + "selected_version": "selected_version_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = apihub_service.CreateApiRequest.meta.fields["api"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["api"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["api"][field])): + del request_init["api"][field][i][subfield] + else: + del request_init["api"][field][subfield] + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Api( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Api( name="name_value", display_name="display_name_value", description="description_value", versions=["versions_value"], selected_version="selected_version_value", ) - response = client.create_api(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.CreateApiRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Api.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_api(request) # Establish that the response is the type that we expect. assert isinstance(response, common_fields.Api) @@ -1097,62 +1055,13 @@ def test_create_api(request_type, transport: str = "grpc"): assert response.selected_version == "selected_version_value" -def test_create_api_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_api), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_api() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.CreateApiRequest() - - -def test_create_api_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.CreateApiRequest( - parent="parent_value", - api_id="api_id_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_api), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_api(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.CreateApiRequest( - parent="parent_value", - api_id="api_id_value", - ) - - -def test_create_api_use_cached_wrapped_rpc(): +def test_create_api_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -1168,6 +1077,7 @@ def test_create_api_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.create_api] = mock_rpc + request = {} client.create_api(request) @@ -1181,272 +1091,228 @@ def test_create_api_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_api_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Api( - name="name_value", - display_name="display_name_value", - description="description_value", - versions=["versions_value"], - selected_version="selected_version_value", - ) - ) - response = await client.create_api() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.CreateApiRequest() - +def test_create_api_rest_required_fields(request_type=apihub_service.CreateApiRequest): + transport_class = transports.ApiHubRestTransport -@pytest.mark.asyncio -async def test_create_api_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + # verify fields with default values are dropped - # Ensure method has been cached - assert ( - client._client._transport.create_api - in client._client._transport._wrapped_methods - ) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_api._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.create_api - ] = mock_rpc + # verify required fields with default values are now present - request = {} - await client.create_api(request) + jsonified_request["parent"] = "parent_value" - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.create_api(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_api._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("api_id",)) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" -@pytest.mark.asyncio -async def test_create_api_async( - transport: str = "grpc_asyncio", request_type=apihub_service.CreateApiRequest -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = common_fields.Api() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Api( - name="name_value", - display_name="display_name_value", - description="description_value", - versions=["versions_value"], - selected_version="selected_version_value", - ) - ) - response = await client.create_api(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.CreateApiRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = common_fields.Api.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Api) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.versions == ["versions_value"] - assert response.selected_version == "selected_version_value" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_api(request) -@pytest.mark.asyncio -async def test_create_api_async_from_dict(): - await test_create_api_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_create_api_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_create_api_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.CreateApiRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_api), "__call__") as call: - call.return_value = common_fields.Api() - client.create_api(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + unset_fields = transport.create_api._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("apiId",)) + & set( + ( + "parent", + "api", + ) + ) + ) -@pytest.mark.asyncio -async def test_create_api_field_headers_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_api_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_create_api" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_create_api" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.CreateApiRequest.pb( + apihub_service.CreateApiRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.CreateApiRequest() - - request.parent = "parent_value" + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Api.to_json(common_fields.Api()) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_api), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common_fields.Api()) - await client.create_api(request) + request = apihub_service.CreateApiRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Api() - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request + client.create_api( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + pre.assert_called_once() + post.assert_called_once() -def test_create_api_flattened(): +def test_create_api_rest_bad_request( + transport: str = "rest", request_type=apihub_service.CreateApiRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Api() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_api( - parent="parent_value", - api=common_fields.Api(name="name_value"), - api_id="api_id_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].api - mock_val = common_fields.Api(name="name_value") - assert arg == mock_val - arg = args[0].api_id - mock_val = "api_id_value" - assert arg == mock_val - - -def test_create_api_flattened_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_api( - apihub_service.CreateApiRequest(), - parent="parent_value", - api=common_fields.Api(name="name_value"), - api_id="api_id_value", - ) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_api(request) -@pytest.mark.asyncio -async def test_create_api_flattened_async(): - client = ApiHubAsyncClient( +def test_create_api_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Api() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Api() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common_fields.Api()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_api( + # get truthy value for each flattened field + mock_args = dict( parent="parent_value", api=common_fields.Api(name="name_value"), api_id="api_id_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Api.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_api(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].api - mock_val = common_fields.Api(name="name_value") - assert arg == mock_val - arg = args[0].api_id - mock_val = "api_id_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_create_api_flattened_error_async(): - client = ApiHubAsyncClient( + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/apis" % client.transport._host, + args[1], + ) + + +def test_create_api_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_api( + client.create_api( apihub_service.CreateApiRequest(), parent="parent_value", api=common_fields.Api(name="name_value"), @@ -1454,6 +1320,12 @@ async def test_create_api_flattened_error_async(): ) +def test_create_api_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -1461,33 +1333,37 @@ async def test_create_api_flattened_error_async(): dict, ], ) -def test_get_api(request_type, transport: str = "grpc"): +def test_get_api_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/apis/sample3"} + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Api( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Api( name="name_value", display_name="display_name_value", description="description_value", versions=["versions_value"], selected_version="selected_version_value", ) - response = client.get_api(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.GetApiRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Api.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_api(request) # Establish that the response is the type that we expect. assert isinstance(response, common_fields.Api) @@ -1498,60 +1374,13 @@ def test_get_api(request_type, transport: str = "grpc"): assert response.selected_version == "selected_version_value" -def test_get_api_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_api), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_api() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetApiRequest() - - -def test_get_api_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.GetApiRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_api), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_api(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetApiRequest( - name="name_value", - ) - - -def test_get_api_use_cached_wrapped_rpc(): +def test_get_api_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -1567,6 +1396,7 @@ def test_get_api_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.get_api] = mock_rpc + request = {} client.get_api(request) @@ -1580,259 +1410,224 @@ def test_get_api_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_api_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Api( - name="name_value", - display_name="display_name_value", - description="description_value", - versions=["versions_value"], - selected_version="selected_version_value", - ) - ) - response = await client.get_api() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetApiRequest() - - -@pytest.mark.asyncio -async def test_get_api_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() +def test_get_api_rest_required_fields(request_type=apihub_service.GetApiRequest): + transport_class = transports.ApiHubRestTransport - # Ensure method has been cached - assert ( - client._client._transport.get_api - in client._client._transport._wrapped_methods - ) + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.get_api - ] = mock_rpc + # verify fields with default values are dropped - request = {} - await client.get_api(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_api._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.get_api(request) + jsonified_request["name"] = "name_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_api._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -@pytest.mark.asyncio -async def test_get_api_async( - transport: str = "grpc_asyncio", request_type=apihub_service.GetApiRequest -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = common_fields.Api() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Api( - name="name_value", - display_name="display_name_value", - description="description_value", - versions=["versions_value"], - selected_version="selected_version_value", - ) - ) - response = await client.get_api(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.GetApiRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = common_fields.Api.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Api) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.versions == ["versions_value"] - assert response.selected_version == "selected_version_value" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_api(request) -@pytest.mark.asyncio -async def test_get_api_async_from_dict(): - await test_get_api_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_get_api_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_get_api_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.GetApiRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_api), "__call__") as call: - call.return_value = common_fields.Api() - client.get_api(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + unset_fields = transport.get_api._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -@pytest.mark.asyncio -async def test_get_api_field_headers_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_api_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_get_api" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_get_api" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.GetApiRequest.pb(apihub_service.GetApiRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.GetApiRequest() - - request.name = "name_value" + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Api.to_json(common_fields.Api()) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_api), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common_fields.Api()) - await client.get_api(request) + request = apihub_service.GetApiRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Api() - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request + client.get_api( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + pre.assert_called_once() + post.assert_called_once() -def test_get_api_flattened(): +def test_get_api_rest_bad_request( + transport: str = "rest", request_type=apihub_service.GetApiRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Api() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_api( - name="name_value", - ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/apis/sample3"} + request = request_type(**request_init) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_api(request) -def test_get_api_flattened_error(): +def test_get_api_rest_flattened(): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_api( - apihub_service.GetApiRequest(), - name="name_value", - ) - - -@pytest.mark.asyncio -async def test_get_api_flattened_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Api() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Api() + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/apis/sample3"} - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common_fields.Api()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_api( + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Api.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_api(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/apis/*}" % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_get_api_flattened_error_async(): - client = ApiHubAsyncClient( +def test_get_api_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_api( + client.get_api( apihub_service.GetApiRequest(), name="name_value", ) +def test_get_api_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -1840,93 +1635,46 @@ async def test_get_api_flattened_error_async(): dict, ], ) -def test_list_apis(request_type, transport: str = "grpc"): +def test_list_apis_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_apis), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListApisResponse( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListApisResponse( next_page_token="next_page_token_value", ) - response = client.list_apis(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.ListApisRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.ListApisResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_apis(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListApisPager) assert response.next_page_token == "next_page_token_value" -def test_list_apis_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_apis), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_apis() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListApisRequest() - - -def test_list_apis_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.ListApisRequest( - parent="parent_value", - filter="filter_value", - page_token="page_token_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_apis), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_apis(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListApisRequest( - parent="parent_value", - filter="filter_value", - page_token="page_token_value", - ) - - -def test_list_apis_use_cached_wrapped_rpc(): +def test_list_apis_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -1942,6 +1690,7 @@ def test_list_apis_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.list_apis] = mock_rpc + request = {} client.list_apis(request) @@ -1955,262 +1704,250 @@ def test_list_apis_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_apis_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", +def test_list_apis_rest_required_fields(request_type=apihub_service.ListApisRequest): + transport_class = transports.ApiHubRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_apis), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListApisResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_apis() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListApisRequest() + # verify fields with default values are dropped + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_apis._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) -@pytest.mark.asyncio -async def test_list_apis_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # verify required fields with default values are now present - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + jsonified_request["parent"] = "parent_value" - # Ensure method has been cached - assert ( - client._client._transport.list_apis - in client._client._transport._wrapped_methods + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_apis._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", ) + ) + jsonified_request.update(unset_fields) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.list_apis - ] = mock_rpc - - request = {} - await client.list_apis(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_apis(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" -@pytest.mark.asyncio -async def test_list_apis_async( - transport: str = "grpc_asyncio", request_type=apihub_service.ListApisRequest -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListApisResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_apis), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListApisResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_apis(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.ListApisRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = apihub_service.ListApisResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListApisAsyncPager) - assert response.next_page_token == "next_page_token_value" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_apis(request) -@pytest.mark.asyncio -async def test_list_apis_async_from_dict(): - await test_list_apis_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_list_apis_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_list_apis_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.ListApisRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_apis), "__call__") as call: - call.return_value = apihub_service.ListApisResponse() - client.list_apis(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + unset_fields = transport.list_apis._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) -@pytest.mark.asyncio -async def test_list_apis_field_headers_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_apis_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_list_apis" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_list_apis" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.ListApisRequest.pb(apihub_service.ListApisRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.ListApisRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_apis), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = apihub_service.ListApisResponse.to_json( apihub_service.ListApisResponse() ) - await client.list_apis(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - - -def test_list_apis_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + request = apihub_service.ListApisRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = apihub_service.ListApisResponse() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_apis), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListApisResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.list_apis( - parent="parent_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val + pre.assert_called_once() + post.assert_called_once() -def test_list_apis_flattened_error(): +def test_list_apis_rest_bad_request( + transport: str = "rest", request_type=apihub_service.ListApisRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_apis( - apihub_service.ListApisRequest(), - parent="parent_value", - ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_apis(request) -@pytest.mark.asyncio -async def test_list_apis_flattened_async(): - client = ApiHubAsyncClient( +def test_list_apis_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_apis), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListApisResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListApisResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListApisResponse() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_apis( + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( parent="parent_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.ListApisResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_apis(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/apis" % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_list_apis_flattened_error_async(): - client = ApiHubAsyncClient( +def test_list_apis_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_apis( + client.list_apis( apihub_service.ListApisRequest(), parent="parent_value", ) -def test_list_apis_pager(transport_name: str = "grpc"): +def test_list_apis_rest_pager(transport: str = "rest"): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_apis), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListApisResponse( + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + apihub_service.ListApisResponse( apis=[ common_fields.Api(), common_fields.Api(), @@ -2234,162 +1971,27 @@ def test_list_apis_pager(transport_name: str = "grpc"): common_fields.Api(), ], ), - RuntimeError, ) + # Two responses for two calls + response = response + response - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) - pager = client.list_apis(request={}, retry=retry, timeout=timeout) + # Wrap the values into proper Response objs + response = tuple(apihub_service.ListApisResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout + pager = client.list_apis(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, common_fields.Api) for i in results) - -def test_list_apis_pages(transport_name: str = "grpc"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_apis), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListApisResponse( - apis=[ - common_fields.Api(), - common_fields.Api(), - common_fields.Api(), - ], - next_page_token="abc", - ), - apihub_service.ListApisResponse( - apis=[], - next_page_token="def", - ), - apihub_service.ListApisResponse( - apis=[ - common_fields.Api(), - ], - next_page_token="ghi", - ), - apihub_service.ListApisResponse( - apis=[ - common_fields.Api(), - common_fields.Api(), - ], - ), - RuntimeError, - ) - pages = list(client.list_apis(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.asyncio -async def test_list_apis_async_pager(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_apis), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListApisResponse( - apis=[ - common_fields.Api(), - common_fields.Api(), - common_fields.Api(), - ], - next_page_token="abc", - ), - apihub_service.ListApisResponse( - apis=[], - next_page_token="def", - ), - apihub_service.ListApisResponse( - apis=[ - common_fields.Api(), - ], - next_page_token="ghi", - ), - apihub_service.ListApisResponse( - apis=[ - common_fields.Api(), - common_fields.Api(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_apis( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, common_fields.Api) for i in responses) - - -@pytest.mark.asyncio -async def test_list_apis_async_pages(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_apis), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListApisResponse( - apis=[ - common_fields.Api(), - common_fields.Api(), - common_fields.Api(), - ], - next_page_token="abc", - ), - apihub_service.ListApisResponse( - apis=[], - next_page_token="def", - ), - apihub_service.ListApisResponse( - apis=[ - common_fields.Api(), - ], - next_page_token="ghi", - ), - apihub_service.ListApisResponse( - apis=[ - common_fields.Api(), - common_fields.Api(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_apis(request={}) - ).pages: - pages.append(page_) + pages = list(client.list_apis(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -2401,33 +2003,135 @@ async def test_list_apis_async_pages(): dict, ], ) -def test_update_api(request_type, transport: str = "grpc"): +def test_update_api_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"api": {"name": "projects/sample1/locations/sample2/apis/sample3"}} + request_init["api"] = { + "name": "projects/sample1/locations/sample2/apis/sample3", + "display_name": "display_name_value", + "description": "description_value", + "documentation": {"external_uri": "external_uri_value"}, + "owner": {"display_name": "display_name_value", "email": "email_value"}, + "versions": ["versions_value1", "versions_value2"], + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "target_user": { + "enum_values": { + "values": [ + { + "id": "id_value", + "display_name": "display_name_value", + "description": "description_value", + "immutable": True, + } + ] + }, + "string_values": {"values": ["values_value1", "values_value2"]}, + "json_values": {}, + "attribute": "attribute_value", + }, + "team": {}, + "business_unit": {}, + "maturity_level": {}, + "attributes": {}, + "api_style": {}, + "selected_version": "selected_version_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = apihub_service.UpdateApiRequest.meta.fields["api"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["api"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Api( + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["api"][field])): + del request_init["api"][field][i][subfield] + else: + del request_init["api"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Api( name="name_value", display_name="display_name_value", description="description_value", versions=["versions_value"], selected_version="selected_version_value", ) - response = client.update_api(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.UpdateApiRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Api.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_api(request) # Establish that the response is the type that we expect. assert isinstance(response, common_fields.Api) @@ -2438,61 +2142,18 @@ def test_update_api(request_type, transport: str = "grpc"): assert response.selected_version == "selected_version_value" -def test_update_api_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_api), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. +def test_update_api_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - client.update_api() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.UpdateApiRequest() - -def test_update_api_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.UpdateApiRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_api), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.update_api(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.UpdateApiRequest() - - -def test_update_api_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() # Ensure method has been cached assert client._transport.update_api in client._transport._wrapped_methods @@ -2503,6 +2164,7 @@ def test_update_api_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.update_api] = mock_rpc + request = {} client.update_api(request) @@ -2516,210 +2178,218 @@ def test_update_api_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_update_api_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Api( - name="name_value", - display_name="display_name_value", - description="description_value", - versions=["versions_value"], - selected_version="selected_version_value", - ) - ) - response = await client.update_api() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.UpdateApiRequest() - +def test_update_api_rest_required_fields(request_type=apihub_service.UpdateApiRequest): + transport_class = transports.ApiHubRestTransport -@pytest.mark.asyncio -async def test_update_api_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + # verify fields with default values are dropped - # Ensure method has been cached - assert ( - client._client._transport.update_api - in client._client._transport._wrapped_methods - ) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_api._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.update_api - ] = mock_rpc + # verify required fields with default values are now present - request = {} - await client.update_api(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_api._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with non-default values are left alone - await client.update_api(request) + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + # Designate an appropriate value for the returned response. + return_value = common_fields.Api() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + response_value = Response() + response_value.status_code = 200 -@pytest.mark.asyncio -async def test_update_api_async( - transport: str = "grpc_asyncio", request_type=apihub_service.UpdateApiRequest -): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # Convert return value to protobuf type + return_value = common_fields.Api.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Api( - name="name_value", - display_name="display_name_value", - description="description_value", - versions=["versions_value"], - selected_version="selected_version_value", - ) - ) - response = await client.update_api(request) + response = client.update_api(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.UpdateApiRequest() - assert args[0] == request + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Api) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.versions == ["versions_value"] - assert response.selected_version == "selected_version_value" +def test_update_api_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) -@pytest.mark.asyncio -async def test_update_api_async_from_dict(): - await test_update_api_async(request_type=dict) + unset_fields = transport.update_api._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "api", + "updateMask", + ) + ) + ) -def test_update_api_field_headers(): - client = ApiHubClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_api_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_update_api" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_update_api" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.UpdateApiRequest.pb( + apihub_service.UpdateApiRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.UpdateApiRequest() - - request.api.name = "name_value" + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Api.to_json(common_fields.Api()) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_api), "__call__") as call: - call.return_value = common_fields.Api() - client.update_api(request) + request = apihub_service.UpdateApiRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Api() - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + client.update_api( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "api.name=name_value", - ) in kw["metadata"] + pre.assert_called_once() + post.assert_called_once() -@pytest.mark.asyncio -async def test_update_api_field_headers_async(): - client = ApiHubAsyncClient( +def test_update_api_rest_bad_request( + transport: str = "rest", request_type=apihub_service.UpdateApiRequest +): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.UpdateApiRequest() - - request.api.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_api), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common_fields.Api()) - await client.update_api(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request + # send a request that will satisfy transcoding + request_init = {"api": {"name": "projects/sample1/locations/sample2/apis/sample3"}} + request = request_type(**request_init) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "api.name=name_value", - ) in kw["metadata"] + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_api(request) -def test_update_api_flattened(): +def test_update_api_rest_flattened(): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Api() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_api( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Api() + + # get arguments that satisfy an http rule for this method + sample_request = { + "api": {"name": "projects/sample1/locations/sample2/apis/sample3"} + } + + # get truthy value for each flattened field + mock_args = dict( api=common_fields.Api(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Api.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_api(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].api - mock_val = common_fields.Api(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{api.name=projects/*/locations/*/apis/*}" % client.transport._host, + args[1], + ) -def test_update_api_flattened_error(): +def test_update_api_rest_flattened_error(transport: str = "rest"): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -2732,140 +2402,54 @@ def test_update_api_flattened_error(): ) -@pytest.mark.asyncio -async def test_update_api_flattened_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_update_api_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Api() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common_fields.Api()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_api( - api=common_fields.Api(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].api - mock_val = common_fields.Api(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_update_api_flattened_error_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize( + "request_type", + [ + apihub_service.DeleteApiRequest, + dict, + ], +) +def test_delete_api_rest(request_type): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_api( - apihub_service.UpdateApiRequest(), - api=common_fields.Api(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/apis/sample3"} + request = request_type(**request_init) -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.DeleteApiRequest, - dict, - ], -) -def test_delete_api(request_type, transport: str = "grpc"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value response = client.delete_api(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.DeleteApiRequest() - assert args[0] == request - # Establish that the response is the type that we expect. assert response is None -def test_delete_api_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_api), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_api() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.DeleteApiRequest() - - -def test_delete_api_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.DeleteApiRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_api), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_api(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.DeleteApiRequest( - name="name_value", - ) - - -def test_delete_api_use_cached_wrapped_rpc(): +def test_delete_api_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -2881,6 +2465,7 @@ def test_delete_api_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.delete_api] = mock_rpc + request = {} client.delete_api(request) @@ -2894,185 +2479,200 @@ def test_delete_api_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_delete_api_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_api() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.DeleteApiRequest() - - -@pytest.mark.asyncio -async def test_delete_api_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() +def test_delete_api_rest_required_fields(request_type=apihub_service.DeleteApiRequest): + transport_class = transports.ApiHubRestTransport - # Ensure method has been cached - assert ( - client._client._transport.delete_api - in client._client._transport._wrapped_methods - ) + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.delete_api - ] = mock_rpc + # verify fields with default values are dropped - request = {} - await client.delete_api(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_api._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.delete_api(request) + jsonified_request["name"] = "name_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_api._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("force",)) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -@pytest.mark.asyncio -async def test_delete_api_async( - transport: str = "grpc_asyncio", request_type=apihub_service.DeleteApiRequest -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_api(request) + response_value = Response() + response_value.status_code = 200 + json_return_value = "" - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.DeleteApiRequest() - assert args[0] == request + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Establish that the response is the type that we expect. - assert response is None + response = client.delete_api(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -@pytest.mark.asyncio -async def test_delete_api_async_from_dict(): - await test_delete_api_async(request_type=dict) +def test_delete_api_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + unset_fields = transport.delete_api._get_unset_required_fields({}) + assert set(unset_fields) == (set(("force",)) & set(("name",))) -def test_delete_api_field_headers(): - client = ApiHubClient( + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_api_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_delete_api" + ) as pre: + pre.assert_not_called() + pb_message = apihub_service.DeleteApiRequest.pb( + apihub_service.DeleteApiRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.DeleteApiRequest() - - request.name = "name_value" + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_api), "__call__") as call: - call.return_value = None - client.delete_api(request) + request = apihub_service.DeleteApiRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + client.delete_api( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + pre.assert_called_once() -@pytest.mark.asyncio -async def test_delete_api_field_headers_async(): - client = ApiHubAsyncClient( +def test_delete_api_rest_bad_request( + transport: str = "rest", request_type=apihub_service.DeleteApiRequest +): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.DeleteApiRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_api), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_api(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/apis/sample3"} + request = request_type(**request_init) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_api(request) -def test_delete_api_flattened(): +def test_delete_api_rest_flattened(): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_api( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/apis/sample3"} + + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_api(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/apis/*}" % client.transport._host, + args[1], + ) -def test_delete_api_flattened_error(): +def test_delete_api_rest_flattened_error(transport: str = "rest"): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -3084,47 +2684,11 @@ def test_delete_api_flattened_error(): ) -@pytest.mark.asyncio -async def test_delete_api_flattened_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_delete_api_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_api( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_delete_api_flattened_error_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_api( - apihub_service.DeleteApiRequest(), - name="name_value", - ) - @pytest.mark.parametrize( "request_type", @@ -3133,20 +2697,118 @@ async def test_delete_api_flattened_error_async(): dict, ], ) -def test_create_version(request_type, transport: str = "grpc"): +def test_create_version_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/apis/sample3"} + request_init["version"] = { + "name": "name_value", + "display_name": "display_name_value", + "description": "description_value", + "documentation": {"external_uri": "external_uri_value"}, + "specs": ["specs_value1", "specs_value2"], + "api_operations": ["api_operations_value1", "api_operations_value2"], + "definitions": ["definitions_value1", "definitions_value2"], + "deployments": ["deployments_value1", "deployments_value2"], + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "lifecycle": { + "enum_values": { + "values": [ + { + "id": "id_value", + "display_name": "display_name_value", + "description": "description_value", + "immutable": True, + } + ] + }, + "string_values": {"values": ["values_value1", "values_value2"]}, + "json_values": {}, + "attribute": "attribute_value", + }, + "compliance": {}, + "accreditation": {}, + "attributes": {}, + "selected_deployment": "selected_deployment_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = apihub_service.CreateVersionRequest.meta.fields["version"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["version"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_version), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Version( + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["version"][field])): + del request_init["version"][field][i][subfield] + else: + del request_init["version"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Version( name="name_value", display_name="display_name_value", description="description_value", @@ -3156,13 +2818,17 @@ def test_create_version(request_type, transport: str = "grpc"): deployments=["deployments_value"], selected_deployment="selected_deployment_value", ) - response = client.create_version(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.CreateVersionRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Version.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_version(request) # Establish that the response is the type that we expect. assert isinstance(response, common_fields.Version) @@ -3176,62 +2842,13 @@ def test_create_version(request_type, transport: str = "grpc"): assert response.selected_deployment == "selected_deployment_value" -def test_create_version_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_version), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_version() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.CreateVersionRequest() - - -def test_create_version_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.CreateVersionRequest( - parent="parent_value", - version_id="version_id_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_version), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_version(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.CreateVersionRequest( - parent="parent_value", - version_id="version_id_value", - ) - - -def test_create_version_use_cached_wrapped_rpc(): +def test_create_version_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -3247,6 +2864,7 @@ def test_create_version_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.create_version] = mock_rpc + request = {} client.create_version(request) @@ -3260,287 +2878,233 @@ def test_create_version_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_version_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_version), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Version( - name="name_value", - display_name="display_name_value", - description="description_value", - specs=["specs_value"], - api_operations=["api_operations_value"], - definitions=["definitions_value"], - deployments=["deployments_value"], - selected_deployment="selected_deployment_value", - ) - ) - response = await client.create_version() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.CreateVersionRequest() - - -@pytest.mark.asyncio -async def test_create_version_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_create_version_rest_required_fields( + request_type=apihub_service.CreateVersionRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + transport_class = transports.ApiHubRestTransport - # Ensure method has been cached - assert ( - client._client._transport.create_version - in client._client._transport._wrapped_methods - ) + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.create_version - ] = mock_rpc + # verify fields with default values are dropped - request = {} - await client.create_version(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_version._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.create_version(request) + jsonified_request["parent"] = "parent_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_version._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("version_id",)) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" -@pytest.mark.asyncio -async def test_create_version_async( - transport: str = "grpc_asyncio", request_type=apihub_service.CreateVersionRequest -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = common_fields.Version() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_version), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Version( - name="name_value", - display_name="display_name_value", - description="description_value", - specs=["specs_value"], - api_operations=["api_operations_value"], - definitions=["definitions_value"], - deployments=["deployments_value"], - selected_deployment="selected_deployment_value", - ) - ) - response = await client.create_version(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.CreateVersionRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = common_fields.Version.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Version) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.specs == ["specs_value"] - assert response.api_operations == ["api_operations_value"] - assert response.definitions == ["definitions_value"] - assert response.deployments == ["deployments_value"] - assert response.selected_deployment == "selected_deployment_value" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_version(request) -@pytest.mark.asyncio -async def test_create_version_async_from_dict(): - await test_create_version_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_create_version_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_create_version_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.CreateVersionRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_version), "__call__") as call: - call.return_value = common_fields.Version() - client.create_version(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + unset_fields = transport.create_version._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("versionId",)) + & set( + ( + "parent", + "version", + ) + ) + ) -@pytest.mark.asyncio -async def test_create_version_field_headers_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_version_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_create_version" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_create_version" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.CreateVersionRequest.pb( + apihub_service.CreateVersionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.CreateVersionRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_version), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Version.to_json( common_fields.Version() ) - await client.create_version(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - -def test_create_version_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + request = apihub_service.CreateVersionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Version() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_version), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Version() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.create_version( - parent="parent_value", - version=common_fields.Version(name="name_value"), - version_id="version_id_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].version - mock_val = common_fields.Version(name="name_value") - assert arg == mock_val - arg = args[0].version_id - mock_val = "version_id_value" - assert arg == mock_val - - -def test_create_version_flattened_error(): + pre.assert_called_once() + post.assert_called_once() + + +def test_create_version_rest_bad_request( + transport: str = "rest", request_type=apihub_service.CreateVersionRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_version( - apihub_service.CreateVersionRequest(), - parent="parent_value", - version=common_fields.Version(name="name_value"), - version_id="version_id_value", - ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/apis/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_version(request) -@pytest.mark.asyncio -async def test_create_version_flattened_async(): - client = ApiHubAsyncClient( +def test_create_version_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_version), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Version() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Version() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2/apis/sample3"} - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Version() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_version( + # get truthy value for each flattened field + mock_args = dict( parent="parent_value", version=common_fields.Version(name="name_value"), version_id="version_id_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Version.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_version(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].version - mock_val = common_fields.Version(name="name_value") - assert arg == mock_val - arg = args[0].version_id - mock_val = "version_id_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_create_version_flattened_error_async(): - client = ApiHubAsyncClient( + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/apis/*}/versions" + % client.transport._host, + args[1], + ) + + +def test_create_version_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_version( + client.create_version( apihub_service.CreateVersionRequest(), parent="parent_value", version=common_fields.Version(name="name_value"), @@ -3548,6 +3112,12 @@ async def test_create_version_flattened_error_async(): ) +def test_create_version_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -3555,20 +3125,22 @@ async def test_create_version_flattened_error_async(): dict, ], ) -def test_get_version(request_type, transport: str = "grpc"): +def test_get_version_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_version), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Version( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Version( name="name_value", display_name="display_name_value", description="description_value", @@ -3578,13 +3150,17 @@ def test_get_version(request_type, transport: str = "grpc"): deployments=["deployments_value"], selected_deployment="selected_deployment_value", ) - response = client.get_version(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.GetVersionRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Version.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_version(request) # Establish that the response is the type that we expect. assert isinstance(response, common_fields.Version) @@ -3598,60 +3174,13 @@ def test_get_version(request_type, transport: str = "grpc"): assert response.selected_deployment == "selected_deployment_value" -def test_get_version_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_version), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_version() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetVersionRequest() - - -def test_get_version_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.GetVersionRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_version), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_version(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetVersionRequest( - name="name_value", - ) - - -def test_get_version_use_cached_wrapped_rpc(): +def test_get_version_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -3667,6 +3196,7 @@ def test_get_version_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.get_version] = mock_rpc + request = {} client.get_version(request) @@ -3680,274 +3210,235 @@ def test_get_version_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_version_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_version), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Version( - name="name_value", - display_name="display_name_value", - description="description_value", - specs=["specs_value"], - api_operations=["api_operations_value"], - definitions=["definitions_value"], - deployments=["deployments_value"], - selected_deployment="selected_deployment_value", - ) - ) - response = await client.get_version() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetVersionRequest() - - -@pytest.mark.asyncio -async def test_get_version_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_get_version_rest_required_fields( + request_type=apihub_service.GetVersionRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.get_version - in client._client._transport._wrapped_methods - ) + transport_class = transports.ApiHubRestTransport - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.get_version - ] = mock_rpc + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - request = {} - await client.get_version(request) + # verify fields with default values are dropped - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_version._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - await client.get_version(request) + # verify required fields with default values are now present - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + jsonified_request["name"] = "name_value" + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_version._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) -@pytest.mark.asyncio -async def test_get_version_async( - transport: str = "grpc_asyncio", request_type=apihub_service.GetVersionRequest -): - client = ApiHubAsyncClient( + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = common_fields.Version() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_version), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Version( - name="name_value", - display_name="display_name_value", - description="description_value", - specs=["specs_value"], - api_operations=["api_operations_value"], - definitions=["definitions_value"], - deployments=["deployments_value"], - selected_deployment="selected_deployment_value", - ) - ) - response = await client.get_version(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.GetVersionRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = common_fields.Version.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Version) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.specs == ["specs_value"] - assert response.api_operations == ["api_operations_value"] - assert response.definitions == ["definitions_value"] - assert response.deployments == ["deployments_value"] - assert response.selected_deployment == "selected_deployment_value" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_version(request) -@pytest.mark.asyncio -async def test_get_version_async_from_dict(): - await test_get_version_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_get_version_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_get_version_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.GetVersionRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_version), "__call__") as call: - call.return_value = common_fields.Version() - client.get_version(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + unset_fields = transport.get_version._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -@pytest.mark.asyncio -async def test_get_version_field_headers_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_version_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_get_version" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_get_version" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.GetVersionRequest.pb( + apihub_service.GetVersionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.GetVersionRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_version), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Version.to_json( common_fields.Version() ) - await client.get_version(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - -def test_get_version_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + request = apihub_service.GetVersionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Version() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_version), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Version() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.get_version( - name="name_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + pre.assert_called_once() + post.assert_called_once() -def test_get_version_flattened_error(): +def test_get_version_rest_bad_request( + transport: str = "rest", request_type=apihub_service.GetVersionRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_version( - apihub_service.GetVersionRequest(), - name="name_value", - ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_version(request) -@pytest.mark.asyncio -async def test_get_version_flattened_async(): - client = ApiHubAsyncClient( +def test_get_version_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_version), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Version() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Version() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Version() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_version( + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Version.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_version(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/apis/*/versions/*}" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_get_version_flattened_error_async(): - client = ApiHubAsyncClient( +def test_get_version_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_version( + client.get_version( apihub_service.GetVersionRequest(), name="name_value", ) +def test_get_version_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -3955,93 +3446,46 @@ async def test_get_version_flattened_error_async(): dict, ], ) -def test_list_versions(request_type, transport: str = "grpc"): +def test_list_versions_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/apis/sample3"} + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_versions), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListVersionsResponse( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListVersionsResponse( next_page_token="next_page_token_value", ) - response = client.list_versions(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.ListVersionsRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.ListVersionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_versions(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListVersionsPager) assert response.next_page_token == "next_page_token_value" -def test_list_versions_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_versions), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_versions() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListVersionsRequest() - - -def test_list_versions_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.ListVersionsRequest( - parent="parent_value", - filter="filter_value", - page_token="page_token_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_versions), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_versions(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListVersionsRequest( - parent="parent_value", - filter="filter_value", - page_token="page_token_value", - ) - - -def test_list_versions_use_cached_wrapped_rpc(): +def test_list_versions_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -4057,6 +3501,7 @@ def test_list_versions_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.list_versions] = mock_rpc + request = {} client.list_versions(request) @@ -4070,263 +3515,254 @@ def test_list_versions_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_versions_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", +def test_list_versions_rest_required_fields( + request_type=apihub_service.ListVersionsRequest, +): + transport_class = transports.ApiHubRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_versions), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListVersionsResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_versions() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListVersionsRequest() + # verify fields with default values are dropped + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_versions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) -@pytest.mark.asyncio -async def test_list_versions_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # verify required fields with default values are now present - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + jsonified_request["parent"] = "parent_value" - # Ensure method has been cached - assert ( - client._client._transport.list_versions - in client._client._transport._wrapped_methods + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_versions._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", ) + ) + jsonified_request.update(unset_fields) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.list_versions - ] = mock_rpc - - request = {} - await client.list_versions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_versions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" -@pytest.mark.asyncio -async def test_list_versions_async( - transport: str = "grpc_asyncio", request_type=apihub_service.ListVersionsRequest -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListVersionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_versions), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListVersionsResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_versions(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.ListVersionsRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = apihub_service.ListVersionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListVersionsAsyncPager) - assert response.next_page_token == "next_page_token_value" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_versions(request) -@pytest.mark.asyncio -async def test_list_versions_async_from_dict(): - await test_list_versions_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_list_versions_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_list_versions_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.ListVersionsRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_versions), "__call__") as call: - call.return_value = apihub_service.ListVersionsResponse() - client.list_versions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + unset_fields = transport.list_versions._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) -@pytest.mark.asyncio -async def test_list_versions_field_headers_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_versions_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_list_versions" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_list_versions" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.ListVersionsRequest.pb( + apihub_service.ListVersionsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.ListVersionsRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_versions), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = apihub_service.ListVersionsResponse.to_json( apihub_service.ListVersionsResponse() ) - await client.list_versions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + request = apihub_service.ListVersionsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = apihub_service.ListVersionsResponse() -def test_list_versions_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_versions), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListVersionsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.list_versions( - parent="parent_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val + pre.assert_called_once() + post.assert_called_once() -def test_list_versions_flattened_error(): +def test_list_versions_rest_bad_request( + transport: str = "rest", request_type=apihub_service.ListVersionsRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_versions( - apihub_service.ListVersionsRequest(), - parent="parent_value", - ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/apis/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_versions(request) -@pytest.mark.asyncio -async def test_list_versions_flattened_async(): - client = ApiHubAsyncClient( +def test_list_versions_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_versions), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListVersionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListVersionsResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListVersionsResponse() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_versions( + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2/apis/sample3"} + + # get truthy value for each flattened field + mock_args = dict( parent="parent_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.ListVersionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_versions(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/apis/*}/versions" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_list_versions_flattened_error_async(): - client = ApiHubAsyncClient( +def test_list_versions_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_versions( + client.list_versions( apihub_service.ListVersionsRequest(), parent="parent_value", ) -def test_list_versions_pager(transport_name: str = "grpc"): +def test_list_versions_rest_pager(transport: str = "rest"): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_versions), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( apihub_service.ListVersionsResponse( versions=[ common_fields.Version(), @@ -4351,164 +3787,31 @@ def test_list_versions_pager(transport_name: str = "grpc"): common_fields.Version(), ], ), - RuntimeError, ) + # Two responses for two calls + response = response + response - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + # Wrap the values into proper Response objs + response = tuple( + apihub_service.ListVersionsResponse.to_json(x) for x in response ) - pager = client.list_versions(request={}, retry=retry, timeout=timeout) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2/apis/sample3"} - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout + pager = client.list_versions(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, common_fields.Version) for i in results) - -def test_list_versions_pages(transport_name: str = "grpc"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_versions), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListVersionsResponse( - versions=[ - common_fields.Version(), - common_fields.Version(), - common_fields.Version(), - ], - next_page_token="abc", - ), - apihub_service.ListVersionsResponse( - versions=[], - next_page_token="def", - ), - apihub_service.ListVersionsResponse( - versions=[ - common_fields.Version(), - ], - next_page_token="ghi", - ), - apihub_service.ListVersionsResponse( - versions=[ - common_fields.Version(), - common_fields.Version(), - ], - ), - RuntimeError, - ) - pages = list(client.list_versions(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.asyncio -async def test_list_versions_async_pager(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_versions), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListVersionsResponse( - versions=[ - common_fields.Version(), - common_fields.Version(), - common_fields.Version(), - ], - next_page_token="abc", - ), - apihub_service.ListVersionsResponse( - versions=[], - next_page_token="def", - ), - apihub_service.ListVersionsResponse( - versions=[ - common_fields.Version(), - ], - next_page_token="ghi", - ), - apihub_service.ListVersionsResponse( - versions=[ - common_fields.Version(), - common_fields.Version(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_versions( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, common_fields.Version) for i in responses) - - -@pytest.mark.asyncio -async def test_list_versions_async_pages(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_versions), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListVersionsResponse( - versions=[ - common_fields.Version(), - common_fields.Version(), - common_fields.Version(), - ], - next_page_token="abc", - ), - apihub_service.ListVersionsResponse( - versions=[], - next_page_token="def", - ), - apihub_service.ListVersionsResponse( - versions=[ - common_fields.Version(), - ], - next_page_token="ghi", - ), - apihub_service.ListVersionsResponse( - versions=[ - common_fields.Version(), - common_fields.Version(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_versions(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + pages = list(client.list_versions(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.parametrize( @@ -4518,20 +3821,122 @@ async def test_list_versions_async_pages(): dict, ], ) -def test_update_version(request_type, transport: str = "grpc"): +def test_update_version_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "version": { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } + } + request_init["version"] = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4", + "display_name": "display_name_value", + "description": "description_value", + "documentation": {"external_uri": "external_uri_value"}, + "specs": ["specs_value1", "specs_value2"], + "api_operations": ["api_operations_value1", "api_operations_value2"], + "definitions": ["definitions_value1", "definitions_value2"], + "deployments": ["deployments_value1", "deployments_value2"], + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "lifecycle": { + "enum_values": { + "values": [ + { + "id": "id_value", + "display_name": "display_name_value", + "description": "description_value", + "immutable": True, + } + ] + }, + "string_values": {"values": ["values_value1", "values_value2"]}, + "json_values": {}, + "attribute": "attribute_value", + }, + "compliance": {}, + "accreditation": {}, + "attributes": {}, + "selected_deployment": "selected_deployment_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = apihub_service.UpdateVersionRequest.meta.fields["version"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["version"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["version"][field])): + del request_init["version"][field][i][subfield] + else: + del request_init["version"][field][subfield] + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_version), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Version( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Version( name="name_value", display_name="display_name_value", description="description_value", @@ -4541,13 +3946,17 @@ def test_update_version(request_type, transport: str = "grpc"): deployments=["deployments_value"], selected_deployment="selected_deployment_value", ) - response = client.update_version(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.UpdateVersionRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Version.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_version(request) # Establish that the response is the type that we expect. assert isinstance(response, common_fields.Version) @@ -4561,56 +3970,13 @@ def test_update_version(request_type, transport: str = "grpc"): assert response.selected_deployment == "selected_deployment_value" -def test_update_version_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_version), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.update_version() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.UpdateVersionRequest() - - -def test_update_version_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.UpdateVersionRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_version), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.update_version(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.UpdateVersionRequest() - - -def test_update_version_use_cached_wrapped_rpc(): +def test_update_version_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -4626,6 +3992,7 @@ def test_update_version_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.update_version] = mock_rpc + request = {} client.update_version(request) @@ -4639,284 +4006,247 @@ def test_update_version_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_update_version_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_version), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Version( - name="name_value", - display_name="display_name_value", - description="description_value", - specs=["specs_value"], - api_operations=["api_operations_value"], - definitions=["definitions_value"], - deployments=["deployments_value"], - selected_deployment="selected_deployment_value", - ) - ) - response = await client.update_version() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.UpdateVersionRequest() - - -@pytest.mark.asyncio -async def test_update_version_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_update_version_rest_required_fields( + request_type=apihub_service.UpdateVersionRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + transport_class = transports.ApiHubRestTransport - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.update_version - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.update_version - ] = mock_rpc + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - request = {} - await client.update_version(request) + # verify fields with default values are dropped - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_version._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - await client.update_version(request) + # verify required fields with default values are now present - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_version._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone -@pytest.mark.asyncio -async def test_update_version_async( - transport: str = "grpc_asyncio", request_type=apihub_service.UpdateVersionRequest -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = common_fields.Version() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_version), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Version( - name="name_value", - display_name="display_name_value", - description="description_value", - specs=["specs_value"], - api_operations=["api_operations_value"], - definitions=["definitions_value"], - deployments=["deployments_value"], - selected_deployment="selected_deployment_value", - ) - ) - response = await client.update_version(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.UpdateVersionRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = common_fields.Version.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Version) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.specs == ["specs_value"] - assert response.api_operations == ["api_operations_value"] - assert response.definitions == ["definitions_value"] - assert response.deployments == ["deployments_value"] - assert response.selected_deployment == "selected_deployment_value" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_version(request) -@pytest.mark.asyncio -async def test_update_version_async_from_dict(): - await test_update_version_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_update_version_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_update_version_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.UpdateVersionRequest() - - request.version.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_version), "__call__") as call: - call.return_value = common_fields.Version() - client.update_version(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "version.name=name_value", - ) in kw["metadata"] + unset_fields = transport.update_version._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "version", + "updateMask", + ) + ) + ) -@pytest.mark.asyncio -async def test_update_version_field_headers_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_version_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_update_version" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_update_version" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.UpdateVersionRequest.pb( + apihub_service.UpdateVersionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.UpdateVersionRequest() - - request.version.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_version), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Version.to_json( common_fields.Version() ) - await client.update_version(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "version.name=name_value", - ) in kw["metadata"] - - -def test_update_version_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + request = apihub_service.UpdateVersionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Version() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_version), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Version() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.update_version( - version=common_fields.Version(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].version - mock_val = common_fields.Version(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val + pre.assert_called_once() + post.assert_called_once() -def test_update_version_flattened_error(): +def test_update_version_rest_bad_request( + transport: str = "rest", request_type=apihub_service.UpdateVersionRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_version( - apihub_service.UpdateVersionRequest(), - version=common_fields.Version(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) + # send a request that will satisfy transcoding + request_init = { + "version": { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_version(request) -@pytest.mark.asyncio -async def test_update_version_flattened_async(): - client = ApiHubAsyncClient( +def test_update_version_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_version), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Version() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Version() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Version() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_version( + # get arguments that satisfy an http rule for this method + sample_request = { + "version": { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } + } + + # get truthy value for each flattened field + mock_args = dict( version=common_fields.Version(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Version.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_version(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].version - mock_val = common_fields.Version(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_update_version_flattened_error_async(): - client = ApiHubAsyncClient( + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{version.name=projects/*/locations/*/apis/*/versions/*}" + % client.transport._host, + args[1], + ) + + +def test_update_version_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_version( + client.update_version( apihub_service.UpdateVersionRequest(), version=common_fields.Version(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) +def test_update_version_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -4924,86 +4254,43 @@ async def test_update_version_flattened_error_async(): dict, ], ) -def test_delete_version(request_type, transport: str = "grpc"): +def test_delete_version_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_version), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_version(request) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } + request = request_type(**request_init) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.DeleteVersionRequest() - assert args[0] == request + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_version_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_version), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_version() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.DeleteVersionRequest() - - -def test_delete_version_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.DeleteVersionRequest( - name="name_value", - ) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_version(request) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_version), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_version(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.DeleteVersionRequest( - name="name_value", - ) + # Establish that the response is the type that we expect. + assert response is None -def test_delete_version_use_cached_wrapped_rpc(): +def test_delete_version_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -5019,6 +4306,7 @@ def test_delete_version_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.delete_version] = mock_rpc + request = {} client.delete_version(request) @@ -5032,240 +4320,224 @@ def test_delete_version_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_delete_version_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_version), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_version() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.DeleteVersionRequest() - - -@pytest.mark.asyncio -async def test_delete_version_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_delete_version_rest_required_fields( + request_type=apihub_service.DeleteVersionRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + transport_class = transports.ApiHubRestTransport - # Ensure method has been cached - assert ( - client._client._transport.delete_version - in client._client._transport._wrapped_methods - ) + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.delete_version - ] = mock_rpc + # verify fields with default values are dropped - request = {} - await client.delete_version(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_version._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.delete_version(request) + jsonified_request["name"] = "name_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_version._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("force",)) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -@pytest.mark.asyncio -async def test_delete_version_async( - transport: str = "grpc_asyncio", request_type=apihub_service.DeleteVersionRequest -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_version), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_version(request) + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.DeleteVersionRequest() - assert args[0] == request + response_value = Response() + response_value.status_code = 200 + json_return_value = "" - # Establish that the response is the type that we expect. - assert response is None + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_version(request) -@pytest.mark.asyncio -async def test_delete_version_async_from_dict(): - await test_delete_version_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_delete_version_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_delete_version_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.DeleteVersionRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_version), "__call__") as call: - call.return_value = None - client.delete_version(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + unset_fields = transport.delete_version._get_unset_required_fields({}) + assert set(unset_fields) == (set(("force",)) & set(("name",))) -@pytest.mark.asyncio -async def test_delete_version_field_headers_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_version_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_delete_version" + ) as pre: + pre.assert_not_called() + pb_message = apihub_service.DeleteVersionRequest.pb( + apihub_service.DeleteVersionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.DeleteVersionRequest() - - request.name = "name_value" + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_version), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_version(request) + request = apihub_service.DeleteVersionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request + client.delete_version( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + pre.assert_called_once() -def test_delete_version_flattened(): +def test_delete_version_rest_bad_request( + transport: str = "rest", request_type=apihub_service.DeleteVersionRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_version), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_version( - name="name_value", - ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } + request = request_type(**request_init) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_version(request) -def test_delete_version_flattened_error(): +def test_delete_version_rest_flattened(): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_version( - apihub_service.DeleteVersionRequest(), - name="name_value", - ) - - -@pytest.mark.asyncio -async def test_delete_version_flattened_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_version), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_version( + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_version(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/apis/*/versions/*}" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_delete_version_flattened_error_async(): - client = ApiHubAsyncClient( +def test_delete_version_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_version( + client.delete_version( apihub_service.DeleteVersionRequest(), name="name_value", ) +def test_delete_version_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -5273,32 +4545,155 @@ async def test_delete_version_flattened_error_async(): dict, ], ) -def test_create_spec(request_type, transport: str = "grpc"): +def test_create_spec_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_spec), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Spec( - name="name_value", + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } + request_init["spec"] = { + "name": "name_value", + "display_name": "display_name_value", + "spec_type": { + "enum_values": { + "values": [ + { + "id": "id_value", + "display_name": "display_name_value", + "description": "description_value", + "immutable": True, + } + ] + }, + "string_values": {"values": ["values_value1", "values_value2"]}, + "json_values": {}, + "attribute": "attribute_value", + }, + "contents": {"contents": b"contents_blob", "mime_type": "mime_type_value"}, + "details": { + "open_api_spec_details": { + "format_": 1, + "version": "version_value", + "owner": {"display_name": "display_name_value", "email": "email_value"}, + }, + "description": "description_value", + }, + "source_uri": "source_uri_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "lint_response": { + "issues": [ + { + "code": "code_value", + "path": ["path_value1", "path_value2"], + "message": "message_value", + "severity": 1, + "range_": {"start": {"line": 424, "character": 941}, "end": {}}, + } + ], + "summary": [{"severity": 1, "count": 553}], + "state": 1, + "source": "source_value", + "linter": 1, + "create_time": {}, + }, + "attributes": {}, + "documentation": {"external_uri": "external_uri_value"}, + "parsing_mode": 1, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = apihub_service.CreateSpecRequest.meta.fields["spec"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["spec"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["spec"][field])): + del request_init["spec"][field][i][subfield] + else: + del request_init["spec"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Spec( + name="name_value", display_name="display_name_value", source_uri="source_uri_value", parsing_mode=common_fields.Spec.ParsingMode.RELAXED, ) - response = client.create_spec(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.CreateSpecRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Spec.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_spec(request) # Establish that the response is the type that we expect. assert isinstance(response, common_fields.Spec) @@ -5308,62 +4703,13 @@ def test_create_spec(request_type, transport: str = "grpc"): assert response.parsing_mode == common_fields.Spec.ParsingMode.RELAXED -def test_create_spec_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_spec), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_spec() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.CreateSpecRequest() - - -def test_create_spec_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.CreateSpecRequest( - parent="parent_value", - spec_id="spec_id_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_spec), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_spec(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.CreateSpecRequest( - parent="parent_value", - spec_id="spec_id_value", - ) - - -def test_create_spec_use_cached_wrapped_rpc(): +def test_create_spec_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -5379,6 +4725,7 @@ def test_create_spec_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.create_spec] = mock_rpc + request = {} client.create_spec(request) @@ -5392,271 +4739,235 @@ def test_create_spec_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_spec_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_spec), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Spec( - name="name_value", - display_name="display_name_value", - source_uri="source_uri_value", - parsing_mode=common_fields.Spec.ParsingMode.RELAXED, - ) - ) - response = await client.create_spec() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.CreateSpecRequest() - - -@pytest.mark.asyncio -async def test_create_spec_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_create_spec_rest_required_fields( + request_type=apihub_service.CreateSpecRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + transport_class = transports.ApiHubRestTransport - # Ensure method has been cached - assert ( - client._client._transport.create_spec - in client._client._transport._wrapped_methods - ) + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.create_spec - ] = mock_rpc + # verify fields with default values are dropped - request = {} - await client.create_spec(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_spec._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.create_spec(request) + jsonified_request["parent"] = "parent_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_spec._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("spec_id",)) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" -@pytest.mark.asyncio -async def test_create_spec_async( - transport: str = "grpc_asyncio", request_type=apihub_service.CreateSpecRequest -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_spec), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Spec( - name="name_value", - display_name="display_name_value", - source_uri="source_uri_value", - parsing_mode=common_fields.Spec.ParsingMode.RELAXED, - ) - ) - response = await client.create_spec(request) + # Designate an appropriate value for the returned response. + return_value = common_fields.Spec() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.CreateSpecRequest() - assert args[0] == request + response_value = Response() + response_value.status_code = 200 - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Spec) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.source_uri == "source_uri_value" - assert response.parsing_mode == common_fields.Spec.ParsingMode.RELAXED + # Convert return value to protobuf type + return_value = common_fields.Spec.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value -@pytest.mark.asyncio -async def test_create_spec_async_from_dict(): - await test_create_spec_async(request_type=dict) - - -def test_create_spec_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.CreateSpecRequest() + response = client.create_spec(request) - request.parent = "parent_value" + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_spec), "__call__") as call: - call.return_value = common_fields.Spec() - client.create_spec(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_create_spec_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + unset_fields = transport.create_spec._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("specId",)) + & set( + ( + "parent", + "spec", + ) + ) + ) -@pytest.mark.asyncio -async def test_create_spec_field_headers_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_spec_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_create_spec" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_create_spec" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.CreateSpecRequest.pb( + apihub_service.CreateSpecRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.CreateSpecRequest() - - request.parent = "parent_value" + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Spec.to_json(common_fields.Spec()) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_spec), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common_fields.Spec()) - await client.create_spec(request) + request = apihub_service.CreateSpecRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Spec() - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request + client.create_spec( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + pre.assert_called_once() + post.assert_called_once() -def test_create_spec_flattened(): +def test_create_spec_rest_bad_request( + transport: str = "rest", request_type=apihub_service.CreateSpecRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_spec), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Spec() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_spec( - parent="parent_value", - spec=common_fields.Spec(name="name_value"), - spec_id="spec_id_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].spec - mock_val = common_fields.Spec(name="name_value") - assert arg == mock_val - arg = args[0].spec_id - mock_val = "spec_id_value" - assert arg == mock_val - - -def test_create_spec_flattened_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } + request = request_type(**request_init) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_spec( - apihub_service.CreateSpecRequest(), - parent="parent_value", - spec=common_fields.Spec(name="name_value"), - spec_id="spec_id_value", - ) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_spec(request) -@pytest.mark.asyncio -async def test_create_spec_flattened_async(): - client = ApiHubAsyncClient( +def test_create_spec_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_spec), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Spec() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Spec() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common_fields.Spec()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_spec( + # get truthy value for each flattened field + mock_args = dict( parent="parent_value", spec=common_fields.Spec(name="name_value"), spec_id="spec_id_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Spec.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_spec(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].spec - mock_val = common_fields.Spec(name="name_value") - assert arg == mock_val - arg = args[0].spec_id - mock_val = "spec_id_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_create_spec_flattened_error_async(): - client = ApiHubAsyncClient( + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/apis/*/versions/*}/specs" + % client.transport._host, + args[1], + ) + + +def test_create_spec_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_spec( + client.create_spec( apihub_service.CreateSpecRequest(), parent="parent_value", spec=common_fields.Spec(name="name_value"), @@ -5664,6 +4975,12 @@ async def test_create_spec_flattened_error_async(): ) +def test_create_spec_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -5671,32 +4988,38 @@ async def test_create_spec_flattened_error_async(): dict, ], ) -def test_get_spec(request_type, transport: str = "grpc"): +def test_get_spec_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" + } + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_spec), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Spec( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Spec( name="name_value", display_name="display_name_value", source_uri="source_uri_value", parsing_mode=common_fields.Spec.ParsingMode.RELAXED, ) - response = client.get_spec(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.GetSpecRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Spec.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_spec(request) # Establish that the response is the type that we expect. assert isinstance(response, common_fields.Spec) @@ -5706,60 +5029,13 @@ def test_get_spec(request_type, transport: str = "grpc"): assert response.parsing_mode == common_fields.Spec.ParsingMode.RELAXED -def test_get_spec_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_spec), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_spec() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetSpecRequest() - - -def test_get_spec_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.GetSpecRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_spec), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_spec(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetSpecRequest( - name="name_value", - ) - - -def test_get_spec_use_cached_wrapped_rpc(): +def test_get_spec_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -5775,6 +5051,7 @@ def test_get_spec_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.get_spec] = mock_rpc + request = {} client.get_spec(request) @@ -5788,256 +5065,229 @@ def test_get_spec_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_spec_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_spec), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Spec( - name="name_value", - display_name="display_name_value", - source_uri="source_uri_value", - parsing_mode=common_fields.Spec.ParsingMode.RELAXED, - ) - ) - response = await client.get_spec() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetSpecRequest() - +def test_get_spec_rest_required_fields(request_type=apihub_service.GetSpecRequest): + transport_class = transports.ApiHubRestTransport -@pytest.mark.asyncio -async def test_get_spec_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + # verify fields with default values are dropped - # Ensure method has been cached - assert ( - client._client._transport.get_spec - in client._client._transport._wrapped_methods - ) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_spec._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.get_spec - ] = mock_rpc + # verify required fields with default values are now present - request = {} - await client.get_spec(request) + jsonified_request["name"] = "name_value" - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_spec._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - await client.get_spec(request) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_get_spec_async( - transport: str = "grpc_asyncio", request_type=apihub_service.GetSpecRequest -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = common_fields.Spec() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_spec), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Spec( - name="name_value", - display_name="display_name_value", - source_uri="source_uri_value", - parsing_mode=common_fields.Spec.ParsingMode.RELAXED, - ) - ) - response = await client.get_spec(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.GetSpecRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = common_fields.Spec.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Spec) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.source_uri == "source_uri_value" - assert response.parsing_mode == common_fields.Spec.ParsingMode.RELAXED + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_spec(request) -@pytest.mark.asyncio -async def test_get_spec_async_from_dict(): - await test_get_spec_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_get_spec_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_get_spec_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.GetSpecRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_spec), "__call__") as call: - call.return_value = common_fields.Spec() - client.get_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + unset_fields = transport.get_spec._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -@pytest.mark.asyncio -async def test_get_spec_field_headers_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_spec_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_get_spec" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_get_spec" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.GetSpecRequest.pb(apihub_service.GetSpecRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.GetSpecRequest() - - request.name = "name_value" + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Spec.to_json(common_fields.Spec()) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_spec), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common_fields.Spec()) - await client.get_spec(request) + request = apihub_service.GetSpecRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Spec() - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request + client.get_spec( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + pre.assert_called_once() + post.assert_called_once() -def test_get_spec_flattened(): +def test_get_spec_rest_bad_request( + transport: str = "rest", request_type=apihub_service.GetSpecRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_spec), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Spec() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_spec( - name="name_value", - ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" + } + request = request_type(**request_init) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_spec(request) -def test_get_spec_flattened_error(): +def test_get_spec_rest_flattened(): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_spec( - apihub_service.GetSpecRequest(), - name="name_value", - ) - - -@pytest.mark.asyncio -async def test_get_spec_flattened_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Spec() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_spec), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Spec() + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" + } - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common_fields.Spec()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_spec( + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Spec.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_spec(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/apis/*/versions/*/specs/*}" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_get_spec_flattened_error_async(): - client = ApiHubAsyncClient( +def test_get_spec_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_spec( + client.get_spec( apihub_service.GetSpecRequest(), name="name_value", ) +def test_get_spec_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -6045,32 +5295,36 @@ async def test_get_spec_flattened_error_async(): dict, ], ) -def test_get_spec_contents(request_type, transport: str = "grpc"): +def test_get_spec_contents_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" + } + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_spec_contents), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.SpecContents( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.SpecContents( contents=b"contents_blob", mime_type="mime_type_value", ) - response = client.get_spec_contents(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.GetSpecContentsRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.SpecContents.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_spec_contents(request) # Establish that the response is the type that we expect. assert isinstance(response, common_fields.SpecContents) @@ -6078,64 +5332,13 @@ def test_get_spec_contents(request_type, transport: str = "grpc"): assert response.mime_type == "mime_type_value" -def test_get_spec_contents_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_spec_contents), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_spec_contents() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetSpecContentsRequest() - - -def test_get_spec_contents_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.GetSpecContentsRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_spec_contents), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_spec_contents(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetSpecContentsRequest( - name="name_value", - ) - - -def test_get_spec_contents_use_cached_wrapped_rpc(): +def test_get_spec_contents_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -6153,6 +5356,7 @@ def test_get_spec_contents_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.get_spec_contents ] = mock_rpc + request = {} client.get_spec_contents(request) @@ -6166,268 +5370,235 @@ def test_get_spec_contents_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_spec_contents_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", +def test_get_spec_contents_rest_required_fields( + request_type=apihub_service.GetSpecContentsRequest, +): + transport_class = transports.ApiHubRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_spec_contents), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.SpecContents( - contents=b"contents_blob", - mime_type="mime_type_value", - ) - ) - response = await client.get_spec_contents() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetSpecContentsRequest() + # verify fields with default values are dropped + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_spec_contents._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) -@pytest.mark.asyncio -async def test_get_spec_contents_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.get_spec_contents - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.get_spec_contents - ] = mock_rpc - - request = {} - await client.get_spec_contents(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.get_spec_contents(request) + jsonified_request["name"] = "name_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_spec_contents._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -@pytest.mark.asyncio -async def test_get_spec_contents_async( - transport: str = "grpc_asyncio", request_type=apihub_service.GetSpecContentsRequest -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = common_fields.SpecContents() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_spec_contents), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.SpecContents( - contents=b"contents_blob", - mime_type="mime_type_value", - ) - ) - response = await client.get_spec_contents(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.GetSpecContentsRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = common_fields.SpecContents.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.SpecContents) - assert response.contents == b"contents_blob" - assert response.mime_type == "mime_type_value" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_spec_contents(request) -@pytest.mark.asyncio -async def test_get_spec_contents_async_from_dict(): - await test_get_spec_contents_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_get_spec_contents_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_get_spec_contents_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.GetSpecContentsRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_spec_contents), "__call__" - ) as call: - call.return_value = common_fields.SpecContents() - client.get_spec_contents(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + unset_fields = transport.get_spec_contents._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -@pytest.mark.asyncio -async def test_get_spec_contents_field_headers_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_spec_contents_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.GetSpecContentsRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. + client = ApiHubClient(transport=transport) with mock.patch.object( - type(client.transport.get_spec_contents), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.SpecContents() + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_get_spec_contents" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_get_spec_contents" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.GetSpecContentsRequest.pb( + apihub_service.GetSpecContentsRequest() ) - await client.get_spec_contents(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.SpecContents.to_json( + common_fields.SpecContents() + ) -def test_get_spec_contents_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + request = apihub_service.GetSpecContentsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.SpecContents() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_spec_contents), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.SpecContents() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.get_spec_contents( - name="name_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + pre.assert_called_once() + post.assert_called_once() -def test_get_spec_contents_flattened_error(): +def test_get_spec_contents_rest_bad_request( + transport: str = "rest", request_type=apihub_service.GetSpecContentsRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_spec_contents( - apihub_service.GetSpecContentsRequest(), - name="name_value", - ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_spec_contents(request) -@pytest.mark.asyncio -async def test_get_spec_contents_flattened_async(): - client = ApiHubAsyncClient( +def test_get_spec_contents_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_spec_contents), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.SpecContents() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.SpecContents() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.SpecContents() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_spec_contents( + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" + } + + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.SpecContents.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_spec_contents(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/apis/*/versions/*/specs/*}:contents" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_get_spec_contents_flattened_error_async(): - client = ApiHubAsyncClient( +def test_get_spec_contents_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_spec_contents( + client.get_spec_contents( apihub_service.GetSpecContentsRequest(), name="name_value", ) +def test_get_spec_contents_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -6435,93 +5606,48 @@ async def test_get_spec_contents_flattened_error_async(): dict, ], ) -def test_list_specs(request_type, transport: str = "grpc"): +def test_list_specs_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_specs), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListSpecsResponse( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListSpecsResponse( next_page_token="next_page_token_value", ) - response = client.list_specs(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.ListSpecsRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.ListSpecsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_specs(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListSpecsPager) assert response.next_page_token == "next_page_token_value" -def test_list_specs_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_specs), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_specs() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListSpecsRequest() - - -def test_list_specs_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.ListSpecsRequest( - parent="parent_value", - filter="filter_value", - page_token="page_token_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_specs), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_specs(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListSpecsRequest( - parent="parent_value", - filter="filter_value", - page_token="page_token_value", - ) - - -def test_list_specs_use_cached_wrapped_rpc(): +def test_list_specs_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -6537,6 +5663,7 @@ def test_list_specs_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.list_specs] = mock_rpc + request = {} client.list_specs(request) @@ -6550,196 +5677,233 @@ def test_list_specs_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_specs_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", +def test_list_specs_rest_required_fields(request_type=apihub_service.ListSpecsRequest): + transport_class = transports.ApiHubRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_specs), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListSpecsResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_specs() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListSpecsRequest() + # verify fields with default values are dropped + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_specs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) -@pytest.mark.asyncio -async def test_list_specs_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # verify required fields with default values are now present - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + jsonified_request["parent"] = "parent_value" - # Ensure method has been cached - assert ( - client._client._transport.list_specs - in client._client._transport._wrapped_methods + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_specs._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", ) + ) + jsonified_request.update(unset_fields) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.list_specs - ] = mock_rpc - - request = {} - await client.list_specs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" - await client.list_specs(request) + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListSpecsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + response_value = Response() + response_value.status_code = 200 -@pytest.mark.asyncio -async def test_list_specs_async( - transport: str = "grpc_asyncio", request_type=apihub_service.ListSpecsRequest -): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # Convert return value to protobuf type + return_value = apihub_service.ListSpecsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_specs), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListSpecsResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_specs(request) + response = client.list_specs(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.ListSpecsRequest() - assert args[0] == request + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSpecsAsyncPager) - assert response.next_page_token == "next_page_token_value" +def test_list_specs_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) -@pytest.mark.asyncio -async def test_list_specs_async_from_dict(): - await test_list_specs_async(request_type=dict) + unset_fields = transport.list_specs._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) -def test_list_specs_field_headers(): - client = ApiHubClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_specs_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_list_specs" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_list_specs" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.ListSpecsRequest.pb( + apihub_service.ListSpecsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.ListSpecsRequest() - - request.parent = "parent_value" + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = apihub_service.ListSpecsResponse.to_json( + apihub_service.ListSpecsResponse() + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_specs), "__call__") as call: - call.return_value = apihub_service.ListSpecsResponse() - client.list_specs(request) + request = apihub_service.ListSpecsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = apihub_service.ListSpecsResponse() - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + client.list_specs( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + pre.assert_called_once() + post.assert_called_once() -@pytest.mark.asyncio -async def test_list_specs_field_headers_async(): - client = ApiHubAsyncClient( +def test_list_specs_rest_bad_request( + transport: str = "rest", request_type=apihub_service.ListSpecsRequest +): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.ListSpecsRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_specs), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListSpecsResponse() - ) - await client.list_specs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } + request = request_type(**request_init) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_specs(request) -def test_list_specs_flattened(): +def test_list_specs_rest_flattened(): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_specs), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListSpecsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_specs( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListSpecsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( parent="parent_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.ListSpecsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_specs(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/apis/*/versions/*}/specs" + % client.transport._host, + args[1], + ) -def test_list_specs_flattened_error(): +def test_list_specs_rest_flattened_error(transport: str = "rest"): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -6751,60 +5915,18 @@ def test_list_specs_flattened_error(): ) -@pytest.mark.asyncio -async def test_list_specs_flattened_async(): - client = ApiHubAsyncClient( +def test_list_specs_rest_pager(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_specs), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListSpecsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListSpecsResponse() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_specs( - parent="parent_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_list_specs_flattened_error_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_specs( - apihub_service.ListSpecsRequest(), - parent="parent_value", - ) - - -def test_list_specs_pager(transport_name: str = "grpc"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_specs), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( apihub_service.ListSpecsResponse( specs=[ common_fields.Spec(), @@ -6829,162 +5951,29 @@ def test_list_specs_pager(transport_name: str = "grpc"): common_fields.Spec(), ], ), - RuntimeError, ) + # Two responses for two calls + response = response + response - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) - pager = client.list_specs(request={}, retry=retry, timeout=timeout) + # Wrap the values into proper Response objs + response = tuple(apihub_service.ListSpecsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout + sample_request = { + "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } + + pager = client.list_specs(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, common_fields.Spec) for i in results) - -def test_list_specs_pages(transport_name: str = "grpc"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_specs), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListSpecsResponse( - specs=[ - common_fields.Spec(), - common_fields.Spec(), - common_fields.Spec(), - ], - next_page_token="abc", - ), - apihub_service.ListSpecsResponse( - specs=[], - next_page_token="def", - ), - apihub_service.ListSpecsResponse( - specs=[ - common_fields.Spec(), - ], - next_page_token="ghi", - ), - apihub_service.ListSpecsResponse( - specs=[ - common_fields.Spec(), - common_fields.Spec(), - ], - ), - RuntimeError, - ) - pages = list(client.list_specs(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.asyncio -async def test_list_specs_async_pager(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_specs), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListSpecsResponse( - specs=[ - common_fields.Spec(), - common_fields.Spec(), - common_fields.Spec(), - ], - next_page_token="abc", - ), - apihub_service.ListSpecsResponse( - specs=[], - next_page_token="def", - ), - apihub_service.ListSpecsResponse( - specs=[ - common_fields.Spec(), - ], - next_page_token="ghi", - ), - apihub_service.ListSpecsResponse( - specs=[ - common_fields.Spec(), - common_fields.Spec(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_specs( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, common_fields.Spec) for i in responses) - - -@pytest.mark.asyncio -async def test_list_specs_async_pages(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_specs), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListSpecsResponse( - specs=[ - common_fields.Spec(), - common_fields.Spec(), - common_fields.Spec(), - ], - next_page_token="abc", - ), - apihub_service.ListSpecsResponse( - specs=[], - next_page_token="def", - ), - apihub_service.ListSpecsResponse( - specs=[ - common_fields.Spec(), - ], - next_page_token="ghi", - ), - apihub_service.ListSpecsResponse( - specs=[ - common_fields.Spec(), - common_fields.Spec(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_specs(request={}) - ).pages: - pages.append(page_) + pages = list(client.list_specs(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -6996,91 +5985,173 @@ async def test_list_specs_async_pages(): dict, ], ) -def test_update_spec(request_type, transport: str = "grpc"): +def test_update_spec_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_spec), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Spec( - name="name_value", - display_name="display_name_value", - source_uri="source_uri_value", - parsing_mode=common_fields.Spec.ParsingMode.RELAXED, - ) - response = client.update_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.UpdateSpecRequest() - assert args[0] == request + # send a request that will satisfy transcoding + request_init = { + "spec": { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" + } + } + request_init["spec"] = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5", + "display_name": "display_name_value", + "spec_type": { + "enum_values": { + "values": [ + { + "id": "id_value", + "display_name": "display_name_value", + "description": "description_value", + "immutable": True, + } + ] + }, + "string_values": {"values": ["values_value1", "values_value2"]}, + "json_values": {}, + "attribute": "attribute_value", + }, + "contents": {"contents": b"contents_blob", "mime_type": "mime_type_value"}, + "details": { + "open_api_spec_details": { + "format_": 1, + "version": "version_value", + "owner": {"display_name": "display_name_value", "email": "email_value"}, + }, + "description": "description_value", + }, + "source_uri": "source_uri_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "lint_response": { + "issues": [ + { + "code": "code_value", + "path": ["path_value1", "path_value2"], + "message": "message_value", + "severity": 1, + "range_": {"start": {"line": 424, "character": 941}, "end": {}}, + } + ], + "summary": [{"severity": 1, "count": 553}], + "state": 1, + "source": "source_value", + "linter": 1, + "create_time": {}, + }, + "attributes": {}, + "documentation": {"external_uri": "external_uri_value"}, + "parsing_mode": 1, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Spec) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.source_uri == "source_uri_value" - assert response.parsing_mode == common_fields.Spec.ParsingMode.RELAXED + # Determine if the message type is proto-plus or protobuf + test_field = apihub_service.UpdateSpecRequest.meta.fields["spec"] + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] -def test_update_spec_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_spec), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.update_spec() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.UpdateSpecRequest() + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] -def test_update_spec_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + subfields_not_in_runtime = [] - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.UpdateSpecRequest() + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["spec"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_spec), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.update_spec(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.UpdateSpecRequest() + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["spec"][field])): + del request_init["spec"][field][i][subfield] + else: + del request_init["spec"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Spec( + name="name_value", + display_name="display_name_value", + source_uri="source_uri_value", + parsing_mode=common_fields.Spec.ParsingMode.RELAXED, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Spec.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_spec(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, common_fields.Spec) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.source_uri == "source_uri_value" + assert response.parsing_mode == common_fields.Spec.ParsingMode.RELAXED -def test_update_spec_use_cached_wrapped_rpc(): +def test_update_spec_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -7096,6 +6167,7 @@ def test_update_spec_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.update_spec] = mock_rpc + request = {} client.update_spec(request) @@ -7109,268 +6181,245 @@ def test_update_spec_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_update_spec_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_spec), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Spec( - name="name_value", - display_name="display_name_value", - source_uri="source_uri_value", - parsing_mode=common_fields.Spec.ParsingMode.RELAXED, - ) - ) - response = await client.update_spec() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.UpdateSpecRequest() - - -@pytest.mark.asyncio -async def test_update_spec_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_update_spec_rest_required_fields( + request_type=apihub_service.UpdateSpecRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.update_spec - in client._client._transport._wrapped_methods - ) + transport_class = transports.ApiHubRestTransport - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.update_spec - ] = mock_rpc + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - request = {} - await client.update_spec(request) + # verify fields with default values are dropped - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_spec._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - await client.update_spec(request) + # verify required fields with default values are now present - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_spec._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone -@pytest.mark.asyncio -async def test_update_spec_async( - transport: str = "grpc_asyncio", request_type=apihub_service.UpdateSpecRequest -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = common_fields.Spec() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_spec), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Spec( - name="name_value", - display_name="display_name_value", - source_uri="source_uri_value", - parsing_mode=common_fields.Spec.ParsingMode.RELAXED, - ) - ) - response = await client.update_spec(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.UpdateSpecRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = common_fields.Spec.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Spec) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.source_uri == "source_uri_value" - assert response.parsing_mode == common_fields.Spec.ParsingMode.RELAXED + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_spec(request) -@pytest.mark.asyncio -async def test_update_spec_async_from_dict(): - await test_update_spec_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_update_spec_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_update_spec_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.UpdateSpecRequest() - - request.spec.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_spec), "__call__") as call: - call.return_value = common_fields.Spec() - client.update_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "spec.name=name_value", - ) in kw["metadata"] + unset_fields = transport.update_spec._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "spec", + "updateMask", + ) + ) + ) -@pytest.mark.asyncio -async def test_update_spec_field_headers_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_spec_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_update_spec" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_update_spec" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.UpdateSpecRequest.pb( + apihub_service.UpdateSpecRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.UpdateSpecRequest() - - request.spec.name = "name_value" + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Spec.to_json(common_fields.Spec()) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_spec), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common_fields.Spec()) - await client.update_spec(request) + request = apihub_service.UpdateSpecRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Spec() - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request + client.update_spec( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "spec.name=name_value", - ) in kw["metadata"] + pre.assert_called_once() + post.assert_called_once() -def test_update_spec_flattened(): +def test_update_spec_rest_bad_request( + transport: str = "rest", request_type=apihub_service.UpdateSpecRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_spec), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Spec() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_spec( - spec=common_fields.Spec(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) + # send a request that will satisfy transcoding + request_init = { + "spec": { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" + } + } + request = request_type(**request_init) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].spec - mock_val = common_fields.Spec(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_spec(request) -def test_update_spec_flattened_error(): +def test_update_spec_rest_flattened(): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_spec( - apihub_service.UpdateSpecRequest(), - spec=common_fields.Spec(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - - -@pytest.mark.asyncio -async def test_update_spec_flattened_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Spec() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_spec), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Spec() + # get arguments that satisfy an http rule for this method + sample_request = { + "spec": { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" + } + } - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common_fields.Spec()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_spec( + # get truthy value for each flattened field + mock_args = dict( spec=common_fields.Spec(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Spec.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_spec(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].spec - mock_val = common_fields.Spec(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_update_spec_flattened_error_async(): - client = ApiHubAsyncClient( + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{spec.name=projects/*/locations/*/apis/*/versions/*/specs/*}" + % client.transport._host, + args[1], + ) + + +def test_update_spec_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_spec( + client.update_spec( apihub_service.UpdateSpecRequest(), spec=common_fields.Spec(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) +def test_update_spec_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -7378,86 +6427,43 @@ async def test_update_spec_flattened_error_async(): dict, ], ) -def test_delete_spec(request_type, transport: str = "grpc"): +def test_delete_spec_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" + } + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_spec), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_spec(request) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.DeleteSpecRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_spec(request) # Establish that the response is the type that we expect. assert response is None -def test_delete_spec_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_spec), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_spec() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.DeleteSpecRequest() - - -def test_delete_spec_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.DeleteSpecRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_spec), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_spec(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.DeleteSpecRequest( - name="name_value", - ) - - -def test_delete_spec_use_cached_wrapped_rpc(): +def test_delete_spec_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -7473,6 +6479,7 @@ def test_delete_spec_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.delete_spec] = mock_rpc + request = {} client.delete_spec(request) @@ -7486,240 +6493,222 @@ def test_delete_spec_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_delete_spec_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_spec), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_spec() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.DeleteSpecRequest() - - -@pytest.mark.asyncio -async def test_delete_spec_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_delete_spec_rest_required_fields( + request_type=apihub_service.DeleteSpecRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.delete_spec - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.delete_spec - ] = mock_rpc - - request = {} - await client.delete_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_spec(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - + transport_class = transports.ApiHubRestTransport -@pytest.mark.asyncio -async def test_delete_spec_async( - transport: str = "grpc_asyncio", request_type=apihub_service.DeleteSpecRequest -): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_spec), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_spec(request) + # verify fields with default values are dropped - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.DeleteSpecRequest() - assert args[0] == request + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_spec._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the response is the type that we expect. - assert response is None + # verify required fields with default values are now present + jsonified_request["name"] = "name_value" -@pytest.mark.asyncio -async def test_delete_spec_async_from_dict(): - await test_delete_spec_async(request_type=dict) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_spec._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -def test_delete_spec_field_headers(): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request = request_type(**request_init) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.DeleteSpecRequest() + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result - request.name = "name_value" + response_value = Response() + response_value.status_code = 200 + json_return_value = "" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_spec), "__call__") as call: - call.return_value = None - client.delete_spec(request) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.delete_spec(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -@pytest.mark.asyncio -async def test_delete_spec_field_headers_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_delete_spec_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.DeleteSpecRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_spec), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + unset_fields = transport.delete_spec._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -def test_delete_spec_flattened(): - client = ApiHubClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_spec_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_delete_spec" + ) as pre: + pre.assert_not_called() + pb_message = apihub_service.DeleteSpecRequest.pb( + apihub_service.DeleteSpecRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = apihub_service.DeleteSpecRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_spec), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.delete_spec( - name="name_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + pre.assert_called_once() -def test_delete_spec_flattened_error(): +def test_delete_spec_rest_bad_request( + transport: str = "rest", request_type=apihub_service.DeleteSpecRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_spec( - apihub_service.DeleteSpecRequest(), - name="name_value", - ) - - -@pytest.mark.asyncio -async def test_delete_spec_flattened_async(): - client = ApiHubAsyncClient( + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_spec(request) + + +def test_delete_spec_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_spec), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" + } - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_spec( + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_spec(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/apis/*/versions/*/specs/*}" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_delete_spec_flattened_error_async(): - client = ApiHubAsyncClient( +def test_delete_spec_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_spec( + client.delete_spec( apihub_service.DeleteSpecRequest(), name="name_value", ) +def test_delete_spec_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -7727,32 +6716,36 @@ async def test_delete_spec_flattened_error_async(): dict, ], ) -def test_get_api_operation(request_type, transport: str = "grpc"): +def test_get_api_operation_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/operations/sample5" + } + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_api_operation), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.ApiOperation( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.ApiOperation( name="name_value", spec="spec_value", ) - response = client.get_api_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.GetApiOperationRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.ApiOperation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_api_operation(request) # Establish that the response is the type that we expect. assert isinstance(response, common_fields.ApiOperation) @@ -7760,64 +6753,13 @@ def test_get_api_operation(request_type, transport: str = "grpc"): assert response.spec == "spec_value" -def test_get_api_operation_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_api_operation), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_api_operation() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetApiOperationRequest() - - -def test_get_api_operation_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.GetApiOperationRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_api_operation), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_api_operation(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetApiOperationRequest( - name="name_value", - ) - - -def test_get_api_operation_use_cached_wrapped_rpc(): +def test_get_api_operation_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -7835,6 +6777,7 @@ def test_get_api_operation_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.get_api_operation ] = mock_rpc + request = {} client.get_api_operation(request) @@ -7848,268 +6791,235 @@ def test_get_api_operation_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_api_operation_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_api_operation), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.ApiOperation( - name="name_value", - spec="spec_value", - ) - ) - response = await client.get_api_operation() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetApiOperationRequest() - - -@pytest.mark.asyncio -async def test_get_api_operation_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_get_api_operation_rest_required_fields( + request_type=apihub_service.GetApiOperationRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + transport_class = transports.ApiHubRestTransport - # Ensure method has been cached - assert ( - client._client._transport.get_api_operation - in client._client._transport._wrapped_methods - ) + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.get_api_operation - ] = mock_rpc + # verify fields with default values are dropped - request = {} - await client.get_api_operation(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_api_operation._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.get_api_operation(request) + jsonified_request["name"] = "name_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_api_operation._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -@pytest.mark.asyncio -async def test_get_api_operation_async( - transport: str = "grpc_asyncio", request_type=apihub_service.GetApiOperationRequest -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = common_fields.ApiOperation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_api_operation), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.ApiOperation( - name="name_value", - spec="spec_value", - ) - ) - response = await client.get_api_operation(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.GetApiOperationRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = common_fields.ApiOperation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.ApiOperation) - assert response.name == "name_value" - assert response.spec == "spec_value" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_api_operation(request) -@pytest.mark.asyncio -async def test_get_api_operation_async_from_dict(): - await test_get_api_operation_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_get_api_operation_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_get_api_operation_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.GetApiOperationRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_api_operation), "__call__" - ) as call: - call.return_value = common_fields.ApiOperation() - client.get_api_operation(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + unset_fields = transport.get_api_operation._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -@pytest.mark.asyncio -async def test_get_api_operation_field_headers_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_api_operation_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.GetApiOperationRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. + client = ApiHubClient(transport=transport) with mock.patch.object( - type(client.transport.get_api_operation), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.ApiOperation() + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_get_api_operation" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_get_api_operation" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.GetApiOperationRequest.pb( + apihub_service.GetApiOperationRequest() ) - await client.get_api_operation(request) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.ApiOperation.to_json( + common_fields.ApiOperation() + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -def test_get_api_operation_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + request = apihub_service.GetApiOperationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.ApiOperation() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_api_operation), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.ApiOperation() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.get_api_operation( - name="name_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + pre.assert_called_once() + post.assert_called_once() -def test_get_api_operation_flattened_error(): +def test_get_api_operation_rest_bad_request( + transport: str = "rest", request_type=apihub_service.GetApiOperationRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_api_operation( - apihub_service.GetApiOperationRequest(), - name="name_value", - ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/operations/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_api_operation(request) -@pytest.mark.asyncio -async def test_get_api_operation_flattened_async(): - client = ApiHubAsyncClient( +def test_get_api_operation_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_api_operation), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.ApiOperation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.ApiOperation() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.ApiOperation() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_api_operation( + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/operations/sample5" + } + + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.ApiOperation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_api_operation(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/apis/*/versions/*/operations/*}" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_get_api_operation_flattened_error_async(): - client = ApiHubAsyncClient( +def test_get_api_operation_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_api_operation( + client.get_api_operation( apihub_service.GetApiOperationRequest(), name="name_value", ) +def test_get_api_operation_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -8117,99 +7027,48 @@ async def test_get_api_operation_flattened_error_async(): dict, ], ) -def test_list_api_operations(request_type, transport: str = "grpc"): +def test_list_api_operations_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_api_operations), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListApiOperationsResponse( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListApiOperationsResponse( next_page_token="next_page_token_value", ) - response = client.list_api_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.ListApiOperationsRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.ListApiOperationsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_api_operations(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListApiOperationsPager) assert response.next_page_token == "next_page_token_value" -def test_list_api_operations_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_api_operations), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_api_operations() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListApiOperationsRequest() - - -def test_list_api_operations_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.ListApiOperationsRequest( - parent="parent_value", - filter="filter_value", - page_token="page_token_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_api_operations), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_api_operations(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListApiOperationsRequest( - parent="parent_value", - filter="filter_value", - page_token="page_token_value", - ) - - -def test_list_api_operations_use_cached_wrapped_rpc(): +def test_list_api_operations_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -8229,6 +7088,7 @@ def test_list_api_operations_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.list_api_operations ] = mock_rpc + request = {} client.list_api_operations(request) @@ -8242,278 +7102,258 @@ def test_list_api_operations_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_api_operations_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_api_operations), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListApiOperationsResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_api_operations() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListApiOperationsRequest() - - -@pytest.mark.asyncio -async def test_list_api_operations_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_list_api_operations_rest_required_fields( + request_type=apihub_service.ListApiOperationsRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + transport_class = transports.ApiHubRestTransport - # Ensure method has been cached - assert ( - client._client._transport.list_api_operations - in client._client._transport._wrapped_methods - ) + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.list_api_operations - ] = mock_rpc + # verify fields with default values are dropped - request = {} - await client.list_api_operations(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_api_operations._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.list_api_operations(request) + jsonified_request["parent"] = "parent_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_api_operations._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" -@pytest.mark.asyncio -async def test_list_api_operations_async( - transport: str = "grpc_asyncio", - request_type=apihub_service.ListApiOperationsRequest, -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_api_operations), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListApiOperationsResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_api_operations(request) + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListApiOperationsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.ListApiOperationsRequest() - assert args[0] == request + response_value = Response() + response_value.status_code = 200 - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListApiOperationsAsyncPager) - assert response.next_page_token == "next_page_token_value" + # Convert return value to protobuf type + return_value = apihub_service.ListApiOperationsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value -@pytest.mark.asyncio -async def test_list_api_operations_async_from_dict(): - await test_list_api_operations_async(request_type=dict) + response = client.list_api_operations(request) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_list_api_operations_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.ListApiOperationsRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_api_operations), "__call__" - ) as call: - call.return_value = apihub_service.ListApiOperationsResponse() - client.list_api_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_list_api_operations_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + unset_fields = transport.list_api_operations._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) -@pytest.mark.asyncio -async def test_list_api_operations_field_headers_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_api_operations_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.ListApiOperationsRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. + client = ApiHubClient(transport=transport) with mock.patch.object( - type(client.transport.list_api_operations), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListApiOperationsResponse() + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_list_api_operations" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_list_api_operations" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.ListApiOperationsRequest.pb( + apihub_service.ListApiOperationsRequest() ) - await client.list_api_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = apihub_service.ListApiOperationsResponse.to_json( + apihub_service.ListApiOperationsResponse() + ) -def test_list_api_operations_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + request = apihub_service.ListApiOperationsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = apihub_service.ListApiOperationsResponse() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_api_operations), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListApiOperationsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.list_api_operations( - parent="parent_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val + pre.assert_called_once() + post.assert_called_once() -def test_list_api_operations_flattened_error(): +def test_list_api_operations_rest_bad_request( + transport: str = "rest", request_type=apihub_service.ListApiOperationsRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_api_operations( - apihub_service.ListApiOperationsRequest(), - parent="parent_value", - ) + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_api_operations(request) -@pytest.mark.asyncio -async def test_list_api_operations_flattened_async(): - client = ApiHubAsyncClient( + +def test_list_api_operations_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_api_operations), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListApiOperationsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListApiOperationsResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListApiOperationsResponse() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_api_operations( + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( parent="parent_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.ListApiOperationsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_api_operations(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/apis/*/versions/*}/operations" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_list_api_operations_flattened_error_async(): - client = ApiHubAsyncClient( +def test_list_api_operations_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_api_operations( + client.list_api_operations( apihub_service.ListApiOperationsRequest(), parent="parent_value", ) -def test_list_api_operations_pager(transport_name: str = "grpc"): +def test_list_api_operations_rest_pager(transport: str = "rest"): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_api_operations), "__call__" - ) as call: - # Set the response to a series of pages. - call.side_effect = ( + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( apihub_service.ListApiOperationsResponse( api_operations=[ common_fields.ApiOperation(), @@ -8538,205 +7378,74 @@ def test_list_api_operations_pager(transport_name: str = "grpc"): common_fields.ApiOperation(), ], ), - RuntimeError, ) + # Two responses for two calls + response = response + response - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + # Wrap the values into proper Response objs + response = tuple( + apihub_service.ListApiOperationsResponse.to_json(x) for x in response ) - pager = client.list_api_operations(request={}, retry=retry, timeout=timeout) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout + pager = client.list_api_operations(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, common_fields.ApiOperation) for i in results) - -def test_list_api_operations_pages(transport_name: str = "grpc"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_api_operations), "__call__" - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListApiOperationsResponse( - api_operations=[ - common_fields.ApiOperation(), - common_fields.ApiOperation(), - common_fields.ApiOperation(), - ], - next_page_token="abc", - ), - apihub_service.ListApiOperationsResponse( - api_operations=[], - next_page_token="def", - ), - apihub_service.ListApiOperationsResponse( - api_operations=[ - common_fields.ApiOperation(), - ], - next_page_token="ghi", - ), - apihub_service.ListApiOperationsResponse( - api_operations=[ - common_fields.ApiOperation(), - common_fields.ApiOperation(), - ], - ), - RuntimeError, - ) - pages = list(client.list_api_operations(request={}).pages) + pages = list(client.list_api_operations(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -@pytest.mark.asyncio -async def test_list_api_operations_async_pager(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize( + "request_type", + [ + apihub_service.GetDefinitionRequest, + dict, + ], +) +def test_get_definition_rest(request_type): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_api_operations), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListApiOperationsResponse( - api_operations=[ - common_fields.ApiOperation(), - common_fields.ApiOperation(), - common_fields.ApiOperation(), - ], - next_page_token="abc", - ), - apihub_service.ListApiOperationsResponse( - api_operations=[], - next_page_token="def", - ), - apihub_service.ListApiOperationsResponse( - api_operations=[ - common_fields.ApiOperation(), - ], - next_page_token="ghi", - ), - apihub_service.ListApiOperationsResponse( - api_operations=[ - common_fields.ApiOperation(), - common_fields.ApiOperation(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_api_operations( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, common_fields.ApiOperation) for i in responses) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/definitions/sample5" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Definition( + name="name_value", + spec="spec_value", + type_=common_fields.Definition.Type.SCHEMA, + ) -@pytest.mark.asyncio -async def test_list_api_operations_async_pages(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_api_operations), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListApiOperationsResponse( - api_operations=[ - common_fields.ApiOperation(), - common_fields.ApiOperation(), - common_fields.ApiOperation(), - ], - next_page_token="abc", - ), - apihub_service.ListApiOperationsResponse( - api_operations=[], - next_page_token="def", - ), - apihub_service.ListApiOperationsResponse( - api_operations=[ - common_fields.ApiOperation(), - ], - next_page_token="ghi", - ), - apihub_service.ListApiOperationsResponse( - api_operations=[ - common_fields.ApiOperation(), - common_fields.ApiOperation(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_api_operations(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.GetDefinitionRequest, - dict, - ], -) -def test_get_definition(request_type, transport: str = "grpc"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Definition.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_definition), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Definition( - name="name_value", - spec="spec_value", - type_=common_fields.Definition.Type.SCHEMA, - ) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value response = client.get_definition(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.GetDefinitionRequest() - assert args[0] == request - # Establish that the response is the type that we expect. assert isinstance(response, common_fields.Definition) assert response.name == "name_value" @@ -8744,60 +7453,13 @@ def test_get_definition(request_type, transport: str = "grpc"): assert response.type_ == common_fields.Definition.Type.SCHEMA -def test_get_definition_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_definition), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_definition() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetDefinitionRequest() - - -def test_get_definition_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.GetDefinitionRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_definition), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_definition(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetDefinitionRequest( - name="name_value", - ) - - -def test_get_definition_use_cached_wrapped_rpc(): +def test_get_definition_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -8813,6 +7475,7 @@ def test_get_definition_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.get_definition] = mock_rpc + request = {} client.get_definition(request) @@ -8826,259 +7489,235 @@ def test_get_definition_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_definition_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_definition), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Definition( - name="name_value", - spec="spec_value", - type_=common_fields.Definition.Type.SCHEMA, - ) - ) - response = await client.get_definition() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetDefinitionRequest() - - -@pytest.mark.asyncio -async def test_get_definition_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_get_definition_rest_required_fields( + request_type=apihub_service.GetDefinitionRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + transport_class = transports.ApiHubRestTransport - # Ensure method has been cached - assert ( - client._client._transport.get_definition - in client._client._transport._wrapped_methods - ) + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.get_definition - ] = mock_rpc + # verify fields with default values are dropped - request = {} - await client.get_definition(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_definition._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.get_definition(request) + jsonified_request["name"] = "name_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_definition._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -@pytest.mark.asyncio -async def test_get_definition_async( - transport: str = "grpc_asyncio", request_type=apihub_service.GetDefinitionRequest -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = common_fields.Definition() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_definition), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Definition( - name="name_value", - spec="spec_value", - type_=common_fields.Definition.Type.SCHEMA, - ) - ) - response = await client.get_definition(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.GetDefinitionRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = common_fields.Definition.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Definition) - assert response.name == "name_value" - assert response.spec == "spec_value" - assert response.type_ == common_fields.Definition.Type.SCHEMA + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_definition(request) -@pytest.mark.asyncio -async def test_get_definition_async_from_dict(): - await test_get_definition_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_get_definition_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_get_definition_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.GetDefinitionRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_definition), "__call__") as call: - call.return_value = common_fields.Definition() - client.get_definition(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + unset_fields = transport.get_definition._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -@pytest.mark.asyncio -async def test_get_definition_field_headers_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_definition_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_get_definition" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_get_definition" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.GetDefinitionRequest.pb( + apihub_service.GetDefinitionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.GetDefinitionRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_definition), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Definition.to_json( common_fields.Definition() ) - await client.get_definition(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -def test_get_definition_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + request = apihub_service.GetDefinitionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Definition() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_definition), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Definition() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.get_definition( - name="name_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + pre.assert_called_once() + post.assert_called_once() -def test_get_definition_flattened_error(): +def test_get_definition_rest_bad_request( + transport: str = "rest", request_type=apihub_service.GetDefinitionRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_definition( - apihub_service.GetDefinitionRequest(), - name="name_value", - ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/definitions/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_definition(request) -@pytest.mark.asyncio -async def test_get_definition_flattened_async(): - client = ApiHubAsyncClient( +def test_get_definition_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_definition), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Definition() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Definition() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/definitions/sample5" + } - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Definition() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_definition( + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Definition.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_definition(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/apis/*/versions/*/definitions/*}" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_get_definition_flattened_error_async(): - client = ApiHubAsyncClient( +def test_get_definition_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_definition( + client.get_definition( apihub_service.GetDefinitionRequest(), name="name_value", ) +def test_get_definition_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -9086,22 +7725,116 @@ async def test_get_definition_flattened_error_async(): dict, ], ) -def test_create_deployment(request_type, transport: str = "grpc"): +def test_create_deployment_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["deployment"] = { + "name": "name_value", + "display_name": "display_name_value", + "description": "description_value", + "documentation": {"external_uri": "external_uri_value"}, + "deployment_type": { + "enum_values": { + "values": [ + { + "id": "id_value", + "display_name": "display_name_value", + "description": "description_value", + "immutable": True, + } + ] + }, + "string_values": {"values": ["values_value1", "values_value2"]}, + "json_values": {}, + "attribute": "attribute_value", + }, + "resource_uri": "resource_uri_value", + "endpoints": ["endpoints_value1", "endpoints_value2"], + "api_versions": ["api_versions_value1", "api_versions_value2"], + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "slo": {}, + "environment": {}, + "attributes": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_deployment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Deployment( + # Determine if the message type is proto-plus or protobuf + test_field = apihub_service.CreateDeploymentRequest.meta.fields["deployment"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["deployment"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["deployment"][field])): + del request_init["deployment"][field][i][subfield] + else: + del request_init["deployment"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Deployment( name="name_value", display_name="display_name_value", description="description_value", @@ -9109,13 +7842,17 @@ def test_create_deployment(request_type, transport: str = "grpc"): endpoints=["endpoints_value"], api_versions=["api_versions_value"], ) - response = client.create_deployment(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.CreateDeploymentRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Deployment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_deployment(request) # Establish that the response is the type that we expect. assert isinstance(response, common_fields.Deployment) @@ -9127,66 +7864,13 @@ def test_create_deployment(request_type, transport: str = "grpc"): assert response.api_versions == ["api_versions_value"] -def test_create_deployment_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_deployment), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_deployment() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.CreateDeploymentRequest() - - -def test_create_deployment_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.CreateDeploymentRequest( - parent="parent_value", - deployment_id="deployment_id_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_deployment), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_deployment(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.CreateDeploymentRequest( - parent="parent_value", - deployment_id="deployment_id_value", - ) - - -def test_create_deployment_use_cached_wrapped_rpc(): +def test_create_deployment_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -9204,6 +7888,7 @@ def test_create_deployment_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.create_deployment ] = mock_rpc + request = {} client.create_deployment(request) @@ -9217,293 +7902,233 @@ def test_create_deployment_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_deployment_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", +def test_create_deployment_rest_required_fields( + request_type=apihub_service.CreateDeploymentRequest, +): + transport_class = transports.ApiHubRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_deployment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Deployment( - name="name_value", - display_name="display_name_value", - description="description_value", - resource_uri="resource_uri_value", - endpoints=["endpoints_value"], - api_versions=["api_versions_value"], - ) - ) - response = await client.create_deployment() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.CreateDeploymentRequest() + # verify fields with default values are dropped + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) -@pytest.mark.asyncio -async def test_create_deployment_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # verify required fields with default values are now present - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + jsonified_request["parent"] = "parent_value" - # Ensure method has been cached - assert ( - client._client._transport.create_deployment - in client._client._transport._wrapped_methods - ) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_deployment._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("deployment_id",)) + jsonified_request.update(unset_fields) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.create_deployment - ] = mock_rpc + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" - request = {} - await client.create_deployment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.create_deployment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_create_deployment_async( - transport: str = "grpc_asyncio", request_type=apihub_service.CreateDeploymentRequest -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = common_fields.Deployment() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_deployment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Deployment( - name="name_value", - display_name="display_name_value", - description="description_value", - resource_uri="resource_uri_value", - endpoints=["endpoints_value"], - api_versions=["api_versions_value"], - ) - ) - response = await client.create_deployment(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.CreateDeploymentRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = common_fields.Deployment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Deployment) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.resource_uri == "resource_uri_value" - assert response.endpoints == ["endpoints_value"] - assert response.api_versions == ["api_versions_value"] + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_deployment(request) -@pytest.mark.asyncio -async def test_create_deployment_async_from_dict(): - await test_create_deployment_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_create_deployment_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_create_deployment_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.CreateDeploymentRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_deployment), "__call__" - ) as call: - call.return_value = common_fields.Deployment() - client.create_deployment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + unset_fields = transport.create_deployment._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("deploymentId",)) + & set( + ( + "parent", + "deployment", + ) + ) + ) -@pytest.mark.asyncio -async def test_create_deployment_field_headers_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_deployment_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.CreateDeploymentRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. + client = ApiHubClient(transport=transport) with mock.patch.object( - type(client.transport.create_deployment), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Deployment() + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_create_deployment" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_create_deployment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.CreateDeploymentRequest.pb( + apihub_service.CreateDeploymentRequest() ) - await client.create_deployment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Deployment.to_json( + common_fields.Deployment() + ) -def test_create_deployment_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + request = apihub_service.CreateDeploymentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Deployment() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_deployment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Deployment() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.create_deployment( - parent="parent_value", - deployment=common_fields.Deployment(name="name_value"), - deployment_id="deployment_id_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].deployment - mock_val = common_fields.Deployment(name="name_value") - assert arg == mock_val - arg = args[0].deployment_id - mock_val = "deployment_id_value" - assert arg == mock_val - - -def test_create_deployment_flattened_error(): + pre.assert_called_once() + post.assert_called_once() + + +def test_create_deployment_rest_bad_request( + transport: str = "rest", request_type=apihub_service.CreateDeploymentRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_deployment( - apihub_service.CreateDeploymentRequest(), - parent="parent_value", - deployment=common_fields.Deployment(name="name_value"), - deployment_id="deployment_id_value", - ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_deployment(request) -@pytest.mark.asyncio -async def test_create_deployment_flattened_async(): - client = ApiHubAsyncClient( +def test_create_deployment_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_deployment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Deployment() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Deployment() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Deployment() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_deployment( + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( parent="parent_value", deployment=common_fields.Deployment(name="name_value"), deployment_id="deployment_id_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Deployment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_deployment(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].deployment - mock_val = common_fields.Deployment(name="name_value") - assert arg == mock_val - arg = args[0].deployment_id - mock_val = "deployment_id_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_create_deployment_flattened_error_async(): - client = ApiHubAsyncClient( + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/deployments" + % client.transport._host, + args[1], + ) + + +def test_create_deployment_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_deployment( + client.create_deployment( apihub_service.CreateDeploymentRequest(), parent="parent_value", deployment=common_fields.Deployment(name="name_value"), @@ -9511,6 +8136,12 @@ async def test_create_deployment_flattened_error_async(): ) +def test_create_deployment_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -9518,20 +8149,20 @@ async def test_create_deployment_flattened_error_async(): dict, ], ) -def test_get_deployment(request_type, transport: str = "grpc"): +def test_get_deployment_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_deployment), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Deployment( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Deployment( name="name_value", display_name="display_name_value", description="description_value", @@ -9539,13 +8170,17 @@ def test_get_deployment(request_type, transport: str = "grpc"): endpoints=["endpoints_value"], api_versions=["api_versions_value"], ) - response = client.get_deployment(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.GetDeploymentRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Deployment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_deployment(request) # Establish that the response is the type that we expect. assert isinstance(response, common_fields.Deployment) @@ -9557,65 +8192,18 @@ def test_get_deployment(request_type, transport: str = "grpc"): assert response.api_versions == ["api_versions_value"] -def test_get_deployment_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_deployment), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. +def test_get_deployment_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - client.get_deployment() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetDeploymentRequest() - -def test_get_deployment_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.GetDeploymentRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_deployment), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_deployment(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetDeploymentRequest( - name="name_value", - ) - - -def test_get_deployment_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() # Ensure method has been cached assert client._transport.get_deployment in client._transport._wrapped_methods @@ -9626,6 +8214,7 @@ def test_get_deployment_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.get_deployment] = mock_rpc + request = {} client.get_deployment(request) @@ -9639,362 +8228,280 @@ def test_get_deployment_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_deployment_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_deployment), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Deployment( - name="name_value", - display_name="display_name_value", - description="description_value", - resource_uri="resource_uri_value", - endpoints=["endpoints_value"], - api_versions=["api_versions_value"], - ) - ) - response = await client.get_deployment() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetDeploymentRequest() - - -@pytest.mark.asyncio -async def test_get_deployment_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_get_deployment_rest_required_fields( + request_type=apihub_service.GetDeploymentRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + transport_class = transports.ApiHubRestTransport - # Ensure method has been cached - assert ( - client._client._transport.get_deployment - in client._client._transport._wrapped_methods - ) + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.get_deployment - ] = mock_rpc + # verify fields with default values are dropped - request = {} - await client.get_deployment(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.get_deployment(request) + jsonified_request["name"] = "name_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -@pytest.mark.asyncio -async def test_get_deployment_async( - transport: str = "grpc_asyncio", request_type=apihub_service.GetDeploymentRequest -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = common_fields.Deployment() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_deployment), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Deployment( - name="name_value", - display_name="display_name_value", - description="description_value", - resource_uri="resource_uri_value", - endpoints=["endpoints_value"], - api_versions=["api_versions_value"], - ) - ) - response = await client.get_deployment(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.GetDeploymentRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = common_fields.Deployment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Deployment) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.resource_uri == "resource_uri_value" - assert response.endpoints == ["endpoints_value"] - assert response.api_versions == ["api_versions_value"] + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_deployment(request) -@pytest.mark.asyncio -async def test_get_deployment_async_from_dict(): - await test_get_deployment_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_get_deployment_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_get_deployment_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.GetDeploymentRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_deployment), "__call__") as call: - call.return_value = common_fields.Deployment() - client.get_deployment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + unset_fields = transport.get_deployment._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -@pytest.mark.asyncio -async def test_get_deployment_field_headers_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_deployment_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_get_deployment" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_get_deployment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.GetDeploymentRequest.pb( + apihub_service.GetDeploymentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.GetDeploymentRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_deployment), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Deployment.to_json( common_fields.Deployment() ) - await client.get_deployment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - -def test_get_deployment_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + request = apihub_service.GetDeploymentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Deployment() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_deployment), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Deployment() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.get_deployment( - name="name_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + pre.assert_called_once() + post.assert_called_once() -def test_get_deployment_flattened_error(): +def test_get_deployment_rest_bad_request( + transport: str = "rest", request_type=apihub_service.GetDeploymentRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_deployment( - apihub_service.GetDeploymentRequest(), - name="name_value", - ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_deployment(request) -@pytest.mark.asyncio -async def test_get_deployment_flattened_async(): - client = ApiHubAsyncClient( +def test_get_deployment_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_deployment), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Deployment() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Deployment() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Deployment() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_deployment( + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/deployments/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Deployment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_deployment(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/deployments/*}" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_get_deployment_flattened_error_async(): - client = ApiHubAsyncClient( +def test_get_deployment_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_deployment( + client.get_deployment( apihub_service.GetDeploymentRequest(), name="name_value", ) -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.ListDeploymentsRequest, +def test_get_deployment_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + apihub_service.ListDeploymentsRequest, dict, ], ) -def test_list_deployments(request_type, transport: str = "grpc"): +def test_list_deployments_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_deployments), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListDeploymentsResponse( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListDeploymentsResponse( next_page_token="next_page_token_value", ) - response = client.list_deployments(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.ListDeploymentsRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.ListDeploymentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_deployments(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListDeploymentsPager) assert response.next_page_token == "next_page_token_value" -def test_list_deployments_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_deployments), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_deployments() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListDeploymentsRequest() - - -def test_list_deployments_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.ListDeploymentsRequest( - parent="parent_value", - filter="filter_value", - page_token="page_token_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_deployments), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_deployments(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListDeploymentsRequest( - parent="parent_value", - filter="filter_value", - page_token="page_token_value", - ) - - -def test_list_deployments_use_cached_wrapped_rpc(): +def test_list_deployments_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -10012,6 +8519,7 @@ def test_list_deployments_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.list_deployments ] = mock_rpc + request = {} client.list_deployments(request) @@ -10025,263 +8533,254 @@ def test_list_deployments_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_deployments_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_deployments), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListDeploymentsResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_deployments() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListDeploymentsRequest() - - -@pytest.mark.asyncio -async def test_list_deployments_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_list_deployments_rest_required_fields( + request_type=apihub_service.ListDeploymentsRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + transport_class = transports.ApiHubRestTransport - # Ensure method has been cached - assert ( - client._client._transport.list_deployments - in client._client._transport._wrapped_methods - ) + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.list_deployments - ] = mock_rpc + # verify fields with default values are dropped - request = {} - await client.list_deployments(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_deployments._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.list_deployments(request) + jsonified_request["parent"] = "parent_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_deployments._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" -@pytest.mark.asyncio -async def test_list_deployments_async( - transport: str = "grpc_asyncio", request_type=apihub_service.ListDeploymentsRequest -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListDeploymentsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_deployments), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListDeploymentsResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_deployments(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.ListDeploymentsRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = apihub_service.ListDeploymentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDeploymentsAsyncPager) - assert response.next_page_token == "next_page_token_value" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_deployments(request) -@pytest.mark.asyncio -async def test_list_deployments_async_from_dict(): - await test_list_deployments_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_list_deployments_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_list_deployments_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.ListDeploymentsRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_deployments), "__call__") as call: - call.return_value = apihub_service.ListDeploymentsResponse() - client.list_deployments(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + unset_fields = transport.list_deployments._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) -@pytest.mark.asyncio -async def test_list_deployments_field_headers_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_deployments_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_list_deployments" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_list_deployments" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.ListDeploymentsRequest.pb( + apihub_service.ListDeploymentsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.ListDeploymentsRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_deployments), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = apihub_service.ListDeploymentsResponse.to_json( apihub_service.ListDeploymentsResponse() ) - await client.list_deployments(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request + request = apihub_service.ListDeploymentsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = apihub_service.ListDeploymentsResponse() - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + client.list_deployments( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + pre.assert_called_once() + post.assert_called_once() -def test_list_deployments_flattened(): + +def test_list_deployments_rest_bad_request( + transport: str = "rest", request_type=apihub_service.ListDeploymentsRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_deployments), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListDeploymentsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_deployments( - parent="parent_value", - ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_deployments(request) -def test_list_deployments_flattened_error(): +def test_list_deployments_rest_flattened(): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_deployments( - apihub_service.ListDeploymentsRequest(), - parent="parent_value", - ) - - -@pytest.mark.asyncio -async def test_list_deployments_flattened_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListDeploymentsResponse() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_deployments), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListDeploymentsResponse() + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListDeploymentsResponse() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_deployments( + # get truthy value for each flattened field + mock_args = dict( parent="parent_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.ListDeploymentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_deployments(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/deployments" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_list_deployments_flattened_error_async(): - client = ApiHubAsyncClient( +def test_list_deployments_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_deployments( + client.list_deployments( apihub_service.ListDeploymentsRequest(), parent="parent_value", ) -def test_list_deployments_pager(transport_name: str = "grpc"): +def test_list_deployments_rest_pager(transport: str = "rest"): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_deployments), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( apihub_service.ListDeploymentsResponse( deployments=[ common_fields.Deployment(), @@ -10306,162 +8805,29 @@ def test_list_deployments_pager(transport_name: str = "grpc"): common_fields.Deployment(), ], ), - RuntimeError, ) + # Two responses for two calls + response = response + response - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + # Wrap the values into proper Response objs + response = tuple( + apihub_service.ListDeploymentsResponse.to_json(x) for x in response ) - pager = client.list_deployments(request={}, retry=retry, timeout=timeout) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout + pager = client.list_deployments(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, common_fields.Deployment) for i in results) - -def test_list_deployments_pages(transport_name: str = "grpc"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_deployments), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListDeploymentsResponse( - deployments=[ - common_fields.Deployment(), - common_fields.Deployment(), - common_fields.Deployment(), - ], - next_page_token="abc", - ), - apihub_service.ListDeploymentsResponse( - deployments=[], - next_page_token="def", - ), - apihub_service.ListDeploymentsResponse( - deployments=[ - common_fields.Deployment(), - ], - next_page_token="ghi", - ), - apihub_service.ListDeploymentsResponse( - deployments=[ - common_fields.Deployment(), - common_fields.Deployment(), - ], - ), - RuntimeError, - ) - pages = list(client.list_deployments(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.asyncio -async def test_list_deployments_async_pager(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_deployments), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListDeploymentsResponse( - deployments=[ - common_fields.Deployment(), - common_fields.Deployment(), - common_fields.Deployment(), - ], - next_page_token="abc", - ), - apihub_service.ListDeploymentsResponse( - deployments=[], - next_page_token="def", - ), - apihub_service.ListDeploymentsResponse( - deployments=[ - common_fields.Deployment(), - ], - next_page_token="ghi", - ), - apihub_service.ListDeploymentsResponse( - deployments=[ - common_fields.Deployment(), - common_fields.Deployment(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_deployments( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, common_fields.Deployment) for i in responses) - - -@pytest.mark.asyncio -async def test_list_deployments_async_pages(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_deployments), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListDeploymentsResponse( - deployments=[ - common_fields.Deployment(), - common_fields.Deployment(), - common_fields.Deployment(), - ], - next_page_token="abc", - ), - apihub_service.ListDeploymentsResponse( - deployments=[], - next_page_token="def", - ), - apihub_service.ListDeploymentsResponse( - deployments=[ - common_fields.Deployment(), - ], - next_page_token="ghi", - ), - apihub_service.ListDeploymentsResponse( - deployments=[ - common_fields.Deployment(), - common_fields.Deployment(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_deployments(request={}) - ).pages: - pages.append(page_) + pages = list(client.list_deployments(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -10473,101 +8839,154 @@ async def test_list_deployments_async_pages(): dict, ], ) -def test_update_deployment(request_type, transport: str = "grpc"): +def test_update_deployment_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_deployment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Deployment( - name="name_value", - display_name="display_name_value", - description="description_value", - resource_uri="resource_uri_value", - endpoints=["endpoints_value"], - api_versions=["api_versions_value"], - ) - response = client.update_deployment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.UpdateDeploymentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Deployment) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.resource_uri == "resource_uri_value" - assert response.endpoints == ["endpoints_value"] - assert response.api_versions == ["api_versions_value"] + # send a request that will satisfy transcoding + request_init = { + "deployment": {"name": "projects/sample1/locations/sample2/deployments/sample3"} + } + request_init["deployment"] = { + "name": "projects/sample1/locations/sample2/deployments/sample3", + "display_name": "display_name_value", + "description": "description_value", + "documentation": {"external_uri": "external_uri_value"}, + "deployment_type": { + "enum_values": { + "values": [ + { + "id": "id_value", + "display_name": "display_name_value", + "description": "description_value", + "immutable": True, + } + ] + }, + "string_values": {"values": ["values_value1", "values_value2"]}, + "json_values": {}, + "attribute": "attribute_value", + }, + "resource_uri": "resource_uri_value", + "endpoints": ["endpoints_value1", "endpoints_value2"], + "api_versions": ["api_versions_value1", "api_versions_value2"], + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "slo": {}, + "environment": {}, + "attributes": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + # Determine if the message type is proto-plus or protobuf + test_field = apihub_service.UpdateDeploymentRequest.meta.fields["deployment"] -def test_update_deployment_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_deployment), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.update_deployment() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.UpdateDeploymentRequest() + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields -def test_update_deployment_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.UpdateDeploymentRequest() + subfields_not_in_runtime = [] - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_deployment), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["deployment"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["deployment"][field])): + del request_init["deployment"][field][i][subfield] + else: + del request_init["deployment"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Deployment( + name="name_value", + display_name="display_name_value", + description="description_value", + resource_uri="resource_uri_value", + endpoints=["endpoints_value"], + api_versions=["api_versions_value"], ) - client.update_deployment(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.UpdateDeploymentRequest() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Deployment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_deployment(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, common_fields.Deployment) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.resource_uri == "resource_uri_value" + assert response.endpoints == ["endpoints_value"] + assert response.api_versions == ["api_versions_value"] -def test_update_deployment_use_cached_wrapped_rpc(): +def test_update_deployment_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -10585,6 +9004,7 @@ def test_update_deployment_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.update_deployment ] = mock_rpc + request = {} client.update_deployment(request) @@ -10598,290 +9018,245 @@ def test_update_deployment_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_update_deployment_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_deployment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Deployment( - name="name_value", - display_name="display_name_value", - description="description_value", - resource_uri="resource_uri_value", - endpoints=["endpoints_value"], - api_versions=["api_versions_value"], - ) - ) - response = await client.update_deployment() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.UpdateDeploymentRequest() - - -@pytest.mark.asyncio -async def test_update_deployment_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_update_deployment_rest_required_fields( + request_type=apihub_service.UpdateDeploymentRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.update_deployment - in client._client._transport._wrapped_methods - ) + transport_class = transports.ApiHubRestTransport - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.update_deployment - ] = mock_rpc + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - request = {} - await client.update_deployment(request) + # verify fields with default values are dropped - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - await client.update_deployment(request) + # verify required fields with default values are now present - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_deployment._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone -@pytest.mark.asyncio -async def test_update_deployment_async( - transport: str = "grpc_asyncio", request_type=apihub_service.UpdateDeploymentRequest -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = common_fields.Deployment() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_deployment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Deployment( - name="name_value", - display_name="display_name_value", - description="description_value", - resource_uri="resource_uri_value", - endpoints=["endpoints_value"], - api_versions=["api_versions_value"], - ) - ) - response = await client.update_deployment(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.UpdateDeploymentRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = common_fields.Deployment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Deployment) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.resource_uri == "resource_uri_value" - assert response.endpoints == ["endpoints_value"] - assert response.api_versions == ["api_versions_value"] + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_deployment(request) -@pytest.mark.asyncio -async def test_update_deployment_async_from_dict(): - await test_update_deployment_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_update_deployment_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_update_deployment_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.UpdateDeploymentRequest() - - request.deployment.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_deployment), "__call__" - ) as call: - call.return_value = common_fields.Deployment() - client.update_deployment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "deployment.name=name_value", - ) in kw["metadata"] + unset_fields = transport.update_deployment._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "deployment", + "updateMask", + ) + ) + ) -@pytest.mark.asyncio -async def test_update_deployment_field_headers_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_deployment_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.UpdateDeploymentRequest() - - request.deployment.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. + client = ApiHubClient(transport=transport) with mock.patch.object( - type(client.transport.update_deployment), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Deployment() + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_update_deployment" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_update_deployment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.UpdateDeploymentRequest.pb( + apihub_service.UpdateDeploymentRequest() ) - await client.update_deployment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "deployment.name=name_value", - ) in kw["metadata"] + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Deployment.to_json( + common_fields.Deployment() + ) -def test_update_deployment_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + request = apihub_service.UpdateDeploymentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Deployment() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_deployment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Deployment() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.update_deployment( - deployment=common_fields.Deployment(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].deployment - mock_val = common_fields.Deployment(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val + pre.assert_called_once() + post.assert_called_once() -def test_update_deployment_flattened_error(): +def test_update_deployment_rest_bad_request( + transport: str = "rest", request_type=apihub_service.UpdateDeploymentRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_deployment( - apihub_service.UpdateDeploymentRequest(), - deployment=common_fields.Deployment(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) + # send a request that will satisfy transcoding + request_init = { + "deployment": {"name": "projects/sample1/locations/sample2/deployments/sample3"} + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_deployment(request) -@pytest.mark.asyncio -async def test_update_deployment_flattened_async(): - client = ApiHubAsyncClient( +def test_update_deployment_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_deployment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Deployment() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Deployment() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Deployment() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_deployment( + # get arguments that satisfy an http rule for this method + sample_request = { + "deployment": { + "name": "projects/sample1/locations/sample2/deployments/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( deployment=common_fields.Deployment(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Deployment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_deployment(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].deployment - mock_val = common_fields.Deployment(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_update_deployment_flattened_error_async(): - client = ApiHubAsyncClient( + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{deployment.name=projects/*/locations/*/deployments/*}" + % client.transport._host, + args[1], + ) + + +def test_update_deployment_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_deployment( + client.update_deployment( apihub_service.UpdateDeploymentRequest(), deployment=common_fields.Deployment(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) +def test_update_deployment_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -10889,92 +9264,41 @@ async def test_update_deployment_flattened_error_async(): dict, ], ) -def test_delete_deployment(request_type, transport: str = "grpc"): +def test_delete_deployment_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_deployment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_deployment(request) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.DeleteDeploymentRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_deployment(request) # Establish that the response is the type that we expect. assert response is None -def test_delete_deployment_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_deployment), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_deployment() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.DeleteDeploymentRequest() - - -def test_delete_deployment_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.DeleteDeploymentRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_deployment), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_deployment(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.DeleteDeploymentRequest( - name="name_value", - ) - - -def test_delete_deployment_use_cached_wrapped_rpc(): +def test_delete_deployment_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -10992,6 +9316,7 @@ def test_delete_deployment_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.delete_deployment ] = mock_rpc + request = {} client.delete_deployment(request) @@ -11005,252 +9330,220 @@ def test_delete_deployment_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_delete_deployment_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_deployment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_deployment() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.DeleteDeploymentRequest() - - -@pytest.mark.asyncio -async def test_delete_deployment_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_delete_deployment_rest_required_fields( + request_type=apihub_service.DeleteDeploymentRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.delete_deployment - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.delete_deployment - ] = mock_rpc - - request = {} - await client.delete_deployment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_deployment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - + transport_class = transports.ApiHubRestTransport -@pytest.mark.asyncio -async def test_delete_deployment_async( - transport: str = "grpc_asyncio", request_type=apihub_service.DeleteDeploymentRequest -): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_deployment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_deployment(request) + # verify fields with default values are dropped - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.DeleteDeploymentRequest() - assert args[0] == request + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the response is the type that we expect. - assert response is None + # verify required fields with default values are now present + jsonified_request["name"] = "name_value" -@pytest.mark.asyncio -async def test_delete_deployment_async_from_dict(): - await test_delete_deployment_async(request_type=dict) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -def test_delete_deployment_field_headers(): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request = request_type(**request_init) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.DeleteDeploymentRequest() + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result - request.name = "name_value" + response_value = Response() + response_value.status_code = 200 + json_return_value = "" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_deployment), "__call__" - ) as call: - call.return_value = None - client.delete_deployment(request) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.delete_deployment(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -@pytest.mark.asyncio -async def test_delete_deployment_field_headers_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_delete_deployment_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.DeleteDeploymentRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_deployment), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_deployment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + unset_fields = transport.delete_deployment._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -def test_delete_deployment_flattened(): - client = ApiHubClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_deployment_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) - - # Mock the actual call within the gRPC stub, and fake the request. + client = ApiHubClient(transport=transport) with mock.patch.object( - type(client.transport.delete_deployment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_deployment( - name="name_value", + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_delete_deployment" + ) as pre: + pre.assert_not_called() + pb_message = apihub_service.DeleteDeploymentRequest.pb( + apihub_service.DeleteDeploymentRequest() ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = apihub_service.DeleteDeploymentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_deployment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() -def test_delete_deployment_flattened_error(): +def test_delete_deployment_rest_bad_request( + transport: str = "rest", request_type=apihub_service.DeleteDeploymentRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_deployment( - apihub_service.DeleteDeploymentRequest(), - name="name_value", - ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_deployment(request) -@pytest.mark.asyncio -async def test_delete_deployment_flattened_async(): - client = ApiHubAsyncClient( +def test_delete_deployment_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_deployment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_deployment( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/deployments/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_deployment(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/deployments/*}" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_delete_deployment_flattened_error_async(): - client = ApiHubAsyncClient( +def test_delete_deployment_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_deployment( + client.delete_deployment( apihub_service.DeleteDeploymentRequest(), name="name_value", ) +def test_delete_deployment_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -11258,20 +9551,107 @@ async def test_delete_deployment_flattened_error_async(): dict, ], ) -def test_create_attribute(request_type, transport: str = "grpc"): +def test_create_attribute_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["attribute"] = { + "name": "name_value", + "display_name": "display_name_value", + "description": "description_value", + "definition_type": 1, + "scope": 1, + "data_type": 1, + "allowed_values": [ + { + "id": "id_value", + "display_name": "display_name_value", + "description": "description_value", + "immutable": True, + } + ], + "cardinality": 1172, + "mandatory": True, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = apihub_service.CreateAttributeRequest.meta.fields["attribute"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["attribute"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["attribute"][field])): + del request_init["attribute"][field][i][subfield] + else: + del request_init["attribute"][field][subfield] + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_attribute), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Attribute( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Attribute( name="name_value", display_name="display_name_value", description="description_value", @@ -11281,13 +9661,17 @@ def test_create_attribute(request_type, transport: str = "grpc"): cardinality=1172, mandatory=True, ) - response = client.create_attribute(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.CreateAttributeRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Attribute.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_attribute(request) # Establish that the response is the type that we expect. assert isinstance(response, common_fields.Attribute) @@ -11304,62 +9688,13 @@ def test_create_attribute(request_type, transport: str = "grpc"): assert response.mandatory is True -def test_create_attribute_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_attribute), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_attribute() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.CreateAttributeRequest() - - -def test_create_attribute_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.CreateAttributeRequest( - parent="parent_value", - attribute_id="attribute_id_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_attribute), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_attribute(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.CreateAttributeRequest( - parent="parent_value", - attribute_id="attribute_id_value", - ) - - -def test_create_attribute_use_cached_wrapped_rpc(): +def test_create_attribute_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -11377,6 +9712,7 @@ def test_create_attribute_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.create_attribute ] = mock_rpc + request = {} client.create_attribute(request) @@ -11390,290 +9726,232 @@ def test_create_attribute_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_attribute_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_attribute), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Attribute( - name="name_value", - display_name="display_name_value", - description="description_value", - definition_type=common_fields.Attribute.DefinitionType.SYSTEM_DEFINED, - scope=common_fields.Attribute.Scope.API, - data_type=common_fields.Attribute.DataType.ENUM, - cardinality=1172, - mandatory=True, - ) - ) - response = await client.create_attribute() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.CreateAttributeRequest() +def test_create_attribute_rest_required_fields( + request_type=apihub_service.CreateAttributeRequest, +): + transport_class = transports.ApiHubRestTransport + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) -@pytest.mark.asyncio -async def test_create_attribute_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # verify fields with default values are dropped - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_attribute._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Ensure method has been cached - assert ( - client._client._transport.create_attribute - in client._client._transport._wrapped_methods - ) + # verify required fields with default values are now present - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.create_attribute - ] = mock_rpc - - request = {} - await client.create_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.create_attribute(request) + jsonified_request["parent"] = "parent_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_attribute._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("attribute_id",)) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" -@pytest.mark.asyncio -async def test_create_attribute_async( - transport: str = "grpc_asyncio", request_type=apihub_service.CreateAttributeRequest -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = common_fields.Attribute() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_attribute), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Attribute( - name="name_value", - display_name="display_name_value", - description="description_value", - definition_type=common_fields.Attribute.DefinitionType.SYSTEM_DEFINED, - scope=common_fields.Attribute.Scope.API, - data_type=common_fields.Attribute.DataType.ENUM, - cardinality=1172, - mandatory=True, - ) - ) - response = await client.create_attribute(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.CreateAttributeRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = common_fields.Attribute.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Attribute) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert ( - response.definition_type - == common_fields.Attribute.DefinitionType.SYSTEM_DEFINED - ) - assert response.scope == common_fields.Attribute.Scope.API - assert response.data_type == common_fields.Attribute.DataType.ENUM - assert response.cardinality == 1172 - assert response.mandatory is True + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_attribute(request) -@pytest.mark.asyncio -async def test_create_attribute_async_from_dict(): - await test_create_attribute_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_create_attribute_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_create_attribute_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.CreateAttributeRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_attribute), "__call__") as call: - call.return_value = common_fields.Attribute() - client.create_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + unset_fields = transport.create_attribute._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("attributeId",)) + & set( + ( + "parent", + "attribute", + ) + ) + ) -@pytest.mark.asyncio -async def test_create_attribute_field_headers_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_attribute_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_create_attribute" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_create_attribute" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.CreateAttributeRequest.pb( + apihub_service.CreateAttributeRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.CreateAttributeRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_attribute), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Attribute.to_json( common_fields.Attribute() ) - await client.create_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - - -def test_create_attribute_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + request = apihub_service.CreateAttributeRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Attribute() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_attribute), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Attribute() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.create_attribute( - parent="parent_value", - attribute=common_fields.Attribute(name="name_value"), - attribute_id="attribute_id_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].attribute - mock_val = common_fields.Attribute(name="name_value") - assert arg == mock_val - arg = args[0].attribute_id - mock_val = "attribute_id_value" - assert arg == mock_val - - -def test_create_attribute_flattened_error(): + pre.assert_called_once() + post.assert_called_once() + + +def test_create_attribute_rest_bad_request( + transport: str = "rest", request_type=apihub_service.CreateAttributeRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_attribute( - apihub_service.CreateAttributeRequest(), - parent="parent_value", - attribute=common_fields.Attribute(name="name_value"), - attribute_id="attribute_id_value", - ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_attribute(request) -@pytest.mark.asyncio -async def test_create_attribute_flattened_async(): - client = ApiHubAsyncClient( +def test_create_attribute_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_attribute), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Attribute() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Attribute() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Attribute() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_attribute( + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( parent="parent_value", attribute=common_fields.Attribute(name="name_value"), attribute_id="attribute_id_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Attribute.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_attribute(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].attribute - mock_val = common_fields.Attribute(name="name_value") - assert arg == mock_val - arg = args[0].attribute_id - mock_val = "attribute_id_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_create_attribute_flattened_error_async(): - client = ApiHubAsyncClient( + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/attributes" % client.transport._host, + args[1], + ) + + +def test_create_attribute_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_attribute( + client.create_attribute( apihub_service.CreateAttributeRequest(), parent="parent_value", attribute=common_fields.Attribute(name="name_value"), @@ -11681,6 +9959,12 @@ async def test_create_attribute_flattened_error_async(): ) +def test_create_attribute_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -11688,20 +9972,20 @@ async def test_create_attribute_flattened_error_async(): dict, ], ) -def test_get_attribute(request_type, transport: str = "grpc"): +def test_get_attribute_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/attributes/sample3"} + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_attribute), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Attribute( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Attribute( name="name_value", display_name="display_name_value", description="description_value", @@ -11711,13 +9995,17 @@ def test_get_attribute(request_type, transport: str = "grpc"): cardinality=1172, mandatory=True, ) - response = client.get_attribute(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.GetAttributeRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Attribute.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_attribute(request) # Establish that the response is the type that we expect. assert isinstance(response, common_fields.Attribute) @@ -11734,60 +10022,13 @@ def test_get_attribute(request_type, transport: str = "grpc"): assert response.mandatory is True -def test_get_attribute_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_attribute), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_attribute() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetAttributeRequest() - - -def test_get_attribute_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.GetAttributeRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_attribute), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_attribute(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetAttributeRequest( - name="name_value", - ) - - -def test_get_attribute_use_cached_wrapped_rpc(): +def test_get_attribute_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -11803,6 +10044,7 @@ def test_get_attribute_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.get_attribute] = mock_rpc + request = {} client.get_attribute(request) @@ -11816,277 +10058,232 @@ def test_get_attribute_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_attribute_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_attribute), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Attribute( - name="name_value", - display_name="display_name_value", - description="description_value", - definition_type=common_fields.Attribute.DefinitionType.SYSTEM_DEFINED, - scope=common_fields.Attribute.Scope.API, - data_type=common_fields.Attribute.DataType.ENUM, - cardinality=1172, - mandatory=True, - ) - ) - response = await client.get_attribute() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetAttributeRequest() - - -@pytest.mark.asyncio -async def test_get_attribute_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_get_attribute_rest_required_fields( + request_type=apihub_service.GetAttributeRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + transport_class = transports.ApiHubRestTransport - # Ensure method has been cached - assert ( - client._client._transport.get_attribute - in client._client._transport._wrapped_methods - ) + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.get_attribute - ] = mock_rpc + # verify fields with default values are dropped - request = {} - await client.get_attribute(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_attribute._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.get_attribute(request) + jsonified_request["name"] = "name_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_attribute._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -@pytest.mark.asyncio -async def test_get_attribute_async( - transport: str = "grpc_asyncio", request_type=apihub_service.GetAttributeRequest -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = common_fields.Attribute() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_attribute), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Attribute( - name="name_value", - display_name="display_name_value", - description="description_value", - definition_type=common_fields.Attribute.DefinitionType.SYSTEM_DEFINED, - scope=common_fields.Attribute.Scope.API, - data_type=common_fields.Attribute.DataType.ENUM, - cardinality=1172, - mandatory=True, - ) - ) - response = await client.get_attribute(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.GetAttributeRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = common_fields.Attribute.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Attribute) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert ( - response.definition_type - == common_fields.Attribute.DefinitionType.SYSTEM_DEFINED - ) - assert response.scope == common_fields.Attribute.Scope.API - assert response.data_type == common_fields.Attribute.DataType.ENUM - assert response.cardinality == 1172 - assert response.mandatory is True + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_attribute(request) -@pytest.mark.asyncio -async def test_get_attribute_async_from_dict(): - await test_get_attribute_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_get_attribute_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_get_attribute_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.GetAttributeRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_attribute), "__call__") as call: - call.return_value = common_fields.Attribute() - client.get_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + unset_fields = transport.get_attribute._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -@pytest.mark.asyncio -async def test_get_attribute_field_headers_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_attribute_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_get_attribute" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_get_attribute" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.GetAttributeRequest.pb( + apihub_service.GetAttributeRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.GetAttributeRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_attribute), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Attribute.to_json( common_fields.Attribute() ) - await client.get_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -def test_get_attribute_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + request = apihub_service.GetAttributeRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Attribute() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_attribute), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Attribute() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.get_attribute( - name="name_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + pre.assert_called_once() + post.assert_called_once() -def test_get_attribute_flattened_error(): +def test_get_attribute_rest_bad_request( + transport: str = "rest", request_type=apihub_service.GetAttributeRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_attribute( - apihub_service.GetAttributeRequest(), - name="name_value", - ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/attributes/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_attribute(request) -@pytest.mark.asyncio -async def test_get_attribute_flattened_async(): - client = ApiHubAsyncClient( +def test_get_attribute_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_attribute), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Attribute() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Attribute() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Attribute() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_attribute( + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/attributes/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Attribute.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_attribute(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/attributes/*}" % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_get_attribute_flattened_error_async(): - client = ApiHubAsyncClient( +def test_get_attribute_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_attribute( + client.get_attribute( apihub_service.GetAttributeRequest(), name="name_value", ) +def test_get_attribute_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -12094,20 +10291,109 @@ async def test_get_attribute_flattened_error_async(): dict, ], ) -def test_update_attribute(request_type, transport: str = "grpc"): +def test_update_attribute_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "attribute": {"name": "projects/sample1/locations/sample2/attributes/sample3"} + } + request_init["attribute"] = { + "name": "projects/sample1/locations/sample2/attributes/sample3", + "display_name": "display_name_value", + "description": "description_value", + "definition_type": 1, + "scope": 1, + "data_type": 1, + "allowed_values": [ + { + "id": "id_value", + "display_name": "display_name_value", + "description": "description_value", + "immutable": True, + } + ], + "cardinality": 1172, + "mandatory": True, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = apihub_service.UpdateAttributeRequest.meta.fields["attribute"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["attribute"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["attribute"][field])): + del request_init["attribute"][field][i][subfield] + else: + del request_init["attribute"][field][subfield] + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_attribute), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Attribute( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Attribute( name="name_value", display_name="display_name_value", description="description_value", @@ -12117,13 +10403,17 @@ def test_update_attribute(request_type, transport: str = "grpc"): cardinality=1172, mandatory=True, ) - response = client.update_attribute(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.UpdateAttributeRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Attribute.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_attribute(request) # Establish that the response is the type that we expect. assert isinstance(response, common_fields.Attribute) @@ -12140,56 +10430,13 @@ def test_update_attribute(request_type, transport: str = "grpc"): assert response.mandatory is True -def test_update_attribute_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_attribute), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.update_attribute() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.UpdateAttributeRequest() - - -def test_update_attribute_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.UpdateAttributeRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_attribute), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.update_attribute(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.UpdateAttributeRequest() - - -def test_update_attribute_use_cached_wrapped_rpc(): +def test_update_attribute_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -12207,6 +10454,7 @@ def test_update_attribute_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.update_attribute ] = mock_rpc + request = {} client.update_attribute(request) @@ -12220,287 +10468,245 @@ def test_update_attribute_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_update_attribute_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_attribute), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Attribute( - name="name_value", - display_name="display_name_value", - description="description_value", - definition_type=common_fields.Attribute.DefinitionType.SYSTEM_DEFINED, - scope=common_fields.Attribute.Scope.API, - data_type=common_fields.Attribute.DataType.ENUM, - cardinality=1172, - mandatory=True, - ) - ) - response = await client.update_attribute() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.UpdateAttributeRequest() - - -@pytest.mark.asyncio -async def test_update_attribute_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_update_attribute_rest_required_fields( + request_type=apihub_service.UpdateAttributeRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.update_attribute - in client._client._transport._wrapped_methods - ) + transport_class = transports.ApiHubRestTransport - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.update_attribute - ] = mock_rpc + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - request = {} - await client.update_attribute(request) + # verify fields with default values are dropped - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_attribute._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - await client.update_attribute(request) + # verify required fields with default values are now present - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_attribute._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone -@pytest.mark.asyncio -async def test_update_attribute_async( - transport: str = "grpc_asyncio", request_type=apihub_service.UpdateAttributeRequest -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = common_fields.Attribute() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_attribute), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Attribute( - name="name_value", - display_name="display_name_value", - description="description_value", - definition_type=common_fields.Attribute.DefinitionType.SYSTEM_DEFINED, - scope=common_fields.Attribute.Scope.API, - data_type=common_fields.Attribute.DataType.ENUM, - cardinality=1172, - mandatory=True, - ) - ) - response = await client.update_attribute(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.UpdateAttributeRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = common_fields.Attribute.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Attribute) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert ( - response.definition_type - == common_fields.Attribute.DefinitionType.SYSTEM_DEFINED - ) - assert response.scope == common_fields.Attribute.Scope.API - assert response.data_type == common_fields.Attribute.DataType.ENUM - assert response.cardinality == 1172 - assert response.mandatory is True + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_attribute(request) -@pytest.mark.asyncio -async def test_update_attribute_async_from_dict(): - await test_update_attribute_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_update_attribute_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_update_attribute_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.UpdateAttributeRequest() - - request.attribute.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_attribute), "__call__") as call: - call.return_value = common_fields.Attribute() - client.update_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "attribute.name=name_value", - ) in kw["metadata"] + unset_fields = transport.update_attribute._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "attribute", + "updateMask", + ) + ) + ) -@pytest.mark.asyncio -async def test_update_attribute_field_headers_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_attribute_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_update_attribute" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_update_attribute" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.UpdateAttributeRequest.pb( + apihub_service.UpdateAttributeRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.UpdateAttributeRequest() - - request.attribute.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_attribute), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Attribute.to_json( common_fields.Attribute() ) - await client.update_attribute(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "attribute.name=name_value", - ) in kw["metadata"] - - -def test_update_attribute_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + request = apihub_service.UpdateAttributeRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Attribute() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_attribute), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Attribute() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.update_attribute( - attribute=common_fields.Attribute(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].attribute - mock_val = common_fields.Attribute(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val + pre.assert_called_once() + post.assert_called_once() -def test_update_attribute_flattened_error(): +def test_update_attribute_rest_bad_request( + transport: str = "rest", request_type=apihub_service.UpdateAttributeRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_attribute( - apihub_service.UpdateAttributeRequest(), - attribute=common_fields.Attribute(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) + # send a request that will satisfy transcoding + request_init = { + "attribute": {"name": "projects/sample1/locations/sample2/attributes/sample3"} + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_attribute(request) -@pytest.mark.asyncio -async def test_update_attribute_flattened_async(): - client = ApiHubAsyncClient( +def test_update_attribute_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_attribute), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Attribute() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Attribute() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Attribute() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_attribute( + # get arguments that satisfy an http rule for this method + sample_request = { + "attribute": { + "name": "projects/sample1/locations/sample2/attributes/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( attribute=common_fields.Attribute(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Attribute.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_attribute(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].attribute - mock_val = common_fields.Attribute(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_update_attribute_flattened_error_async(): - client = ApiHubAsyncClient( + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{attribute.name=projects/*/locations/*/attributes/*}" + % client.transport._host, + args[1], + ) + + +def test_update_attribute_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_attribute( + client.update_attribute( apihub_service.UpdateAttributeRequest(), attribute=common_fields.Attribute(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) +def test_update_attribute_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -12508,86 +10714,41 @@ async def test_update_attribute_flattened_error_async(): dict, ], ) -def test_delete_attribute(request_type, transport: str = "grpc"): +def test_delete_attribute_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/attributes/sample3"} + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_attribute), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_attribute(request) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.DeleteAttributeRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_attribute(request) # Establish that the response is the type that we expect. assert response is None -def test_delete_attribute_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_attribute), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_attribute() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.DeleteAttributeRequest() - - -def test_delete_attribute_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.DeleteAttributeRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_attribute), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_attribute(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.DeleteAttributeRequest( - name="name_value", - ) - - -def test_delete_attribute_use_cached_wrapped_rpc(): +def test_delete_attribute_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -12605,6 +10766,7 @@ def test_delete_attribute_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.delete_attribute ] = mock_rpc + request = {} client.delete_attribute(request) @@ -12618,240 +10780,219 @@ def test_delete_attribute_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_delete_attribute_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_attribute), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_attribute() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.DeleteAttributeRequest() - - -@pytest.mark.asyncio -async def test_delete_attribute_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_delete_attribute_rest_required_fields( + request_type=apihub_service.DeleteAttributeRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.delete_attribute - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.delete_attribute - ] = mock_rpc - - request = {} - await client.delete_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_attribute(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - + transport_class = transports.ApiHubRestTransport -@pytest.mark.asyncio -async def test_delete_attribute_async( - transport: str = "grpc_asyncio", request_type=apihub_service.DeleteAttributeRequest -): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_attribute), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_attribute(request) + # verify fields with default values are dropped - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.DeleteAttributeRequest() - assert args[0] == request + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_attribute._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the response is the type that we expect. - assert response is None + # verify required fields with default values are now present + jsonified_request["name"] = "name_value" -@pytest.mark.asyncio -async def test_delete_attribute_async_from_dict(): - await test_delete_attribute_async(request_type=dict) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_attribute._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -def test_delete_attribute_field_headers(): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request = request_type(**request_init) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.DeleteAttributeRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_attribute), "__call__") as call: - call.return_value = None - client.delete_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_delete_attribute_field_headers_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.DeleteAttributeRequest() + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - request.name = "name_value" + response = client.delete_attribute(request) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_attribute), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_attribute(request) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] +def test_delete_attribute_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_attribute._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -def test_delete_attribute_flattened(): - client = ApiHubClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_attribute_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_delete_attribute" + ) as pre: + pre.assert_not_called() + pb_message = apihub_service.DeleteAttributeRequest.pb( + apihub_service.DeleteAttributeRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = apihub_service.DeleteAttributeRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_attribute), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.delete_attribute( - name="name_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + pre.assert_called_once() -def test_delete_attribute_flattened_error(): +def test_delete_attribute_rest_bad_request( + transport: str = "rest", request_type=apihub_service.DeleteAttributeRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_attribute( - apihub_service.DeleteAttributeRequest(), - name="name_value", - ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/attributes/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_attribute(request) -@pytest.mark.asyncio -async def test_delete_attribute_flattened_async(): - client = ApiHubAsyncClient( +def test_delete_attribute_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_attribute), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/attributes/sample3" + } - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_attribute( + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_attribute(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/attributes/*}" % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_delete_attribute_flattened_error_async(): - client = ApiHubAsyncClient( +def test_delete_attribute_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_attribute( + client.delete_attribute( apihub_service.DeleteAttributeRequest(), name="name_value", ) +def test_delete_attribute_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -12859,93 +11000,46 @@ async def test_delete_attribute_flattened_error_async(): dict, ], ) -def test_list_attributes(request_type, transport: str = "grpc"): +def test_list_attributes_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_attributes), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListAttributesResponse( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListAttributesResponse( next_page_token="next_page_token_value", ) - response = client.list_attributes(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.ListAttributesRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.ListAttributesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_attributes(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListAttributesPager) assert response.next_page_token == "next_page_token_value" -def test_list_attributes_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_attributes), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_attributes() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListAttributesRequest() - - -def test_list_attributes_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.ListAttributesRequest( - parent="parent_value", - filter="filter_value", - page_token="page_token_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_attributes), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_attributes(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListAttributesRequest( - parent="parent_value", - filter="filter_value", - page_token="page_token_value", - ) - - -def test_list_attributes_use_cached_wrapped_rpc(): +def test_list_attributes_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -12961,6 +11055,7 @@ def test_list_attributes_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.list_attributes] = mock_rpc + request = {} client.list_attributes(request) @@ -12974,263 +11069,253 @@ def test_list_attributes_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_attributes_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_attributes), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListAttributesResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_attributes() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListAttributesRequest() - - -@pytest.mark.asyncio -async def test_list_attributes_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_list_attributes_rest_required_fields( + request_type=apihub_service.ListAttributesRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + transport_class = transports.ApiHubRestTransport - # Ensure method has been cached - assert ( - client._client._transport.list_attributes - in client._client._transport._wrapped_methods - ) + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.list_attributes - ] = mock_rpc + # verify fields with default values are dropped - request = {} - await client.list_attributes(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_attributes._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.list_attributes(request) + jsonified_request["parent"] = "parent_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_attributes._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" -@pytest.mark.asyncio -async def test_list_attributes_async( - transport: str = "grpc_asyncio", request_type=apihub_service.ListAttributesRequest -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListAttributesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_attributes), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListAttributesResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_attributes(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.ListAttributesRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = apihub_service.ListAttributesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAttributesAsyncPager) - assert response.next_page_token == "next_page_token_value" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_attributes(request) -@pytest.mark.asyncio -async def test_list_attributes_async_from_dict(): - await test_list_attributes_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_list_attributes_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_list_attributes_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.ListAttributesRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_attributes), "__call__") as call: - call.return_value = apihub_service.ListAttributesResponse() - client.list_attributes(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + unset_fields = transport.list_attributes._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) -@pytest.mark.asyncio -async def test_list_attributes_field_headers_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_attributes_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_list_attributes" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_list_attributes" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.ListAttributesRequest.pb( + apihub_service.ListAttributesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.ListAttributesRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_attributes), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = apihub_service.ListAttributesResponse.to_json( apihub_service.ListAttributesResponse() ) - await client.list_attributes(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - -def test_list_attributes_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + request = apihub_service.ListAttributesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = apihub_service.ListAttributesResponse() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_attributes), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListAttributesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.list_attributes( - parent="parent_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val + pre.assert_called_once() + post.assert_called_once() -def test_list_attributes_flattened_error(): +def test_list_attributes_rest_bad_request( + transport: str = "rest", request_type=apihub_service.ListAttributesRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_attributes( - apihub_service.ListAttributesRequest(), - parent="parent_value", - ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_attributes(request) -@pytest.mark.asyncio -async def test_list_attributes_flattened_async(): - client = ApiHubAsyncClient( +def test_list_attributes_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_attributes), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListAttributesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListAttributesResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListAttributesResponse() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_attributes( + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( parent="parent_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.ListAttributesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_attributes(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/attributes" % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_list_attributes_flattened_error_async(): - client = ApiHubAsyncClient( +def test_list_attributes_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_attributes( + client.list_attributes( apihub_service.ListAttributesRequest(), parent="parent_value", ) -def test_list_attributes_pager(transport_name: str = "grpc"): +def test_list_attributes_rest_pager(transport: str = "rest"): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_attributes), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( apihub_service.ListAttributesResponse( attributes=[ common_fields.Attribute(), @@ -13255,262 +11340,80 @@ def test_list_attributes_pager(transport_name: str = "grpc"): common_fields.Attribute(), ], ), - RuntimeError, ) + # Two responses for two calls + response = response + response - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + # Wrap the values into proper Response objs + response = tuple( + apihub_service.ListAttributesResponse.to_json(x) for x in response ) - pager = client.list_attributes(request={}, retry=retry, timeout=timeout) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_attributes(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, common_fields.Attribute) for i in results) + pages = list(client.list_attributes(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + -def test_list_attributes_pages(transport_name: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + apihub_service.SearchResourcesRequest, + dict, + ], +) +def test_search_resources_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_attributes), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListAttributesResponse( - attributes=[ - common_fields.Attribute(), - common_fields.Attribute(), - common_fields.Attribute(), - ], - next_page_token="abc", - ), - apihub_service.ListAttributesResponse( - attributes=[], - next_page_token="def", - ), - apihub_service.ListAttributesResponse( - attributes=[ - common_fields.Attribute(), - ], - next_page_token="ghi", - ), - apihub_service.ListAttributesResponse( - attributes=[ - common_fields.Attribute(), - common_fields.Attribute(), - ], - ), - RuntimeError, + # send a request that will satisfy transcoding + request_init = {"location": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.SearchResourcesResponse( + next_page_token="next_page_token_value", ) - pages = list(client.list_attributes(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.SearchResourcesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) -@pytest.mark.asyncio -async def test_list_attributes_async_pager(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_attributes), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListAttributesResponse( - attributes=[ - common_fields.Attribute(), - common_fields.Attribute(), - common_fields.Attribute(), - ], - next_page_token="abc", - ), - apihub_service.ListAttributesResponse( - attributes=[], - next_page_token="def", - ), - apihub_service.ListAttributesResponse( - attributes=[ - common_fields.Attribute(), - ], - next_page_token="ghi", - ), - apihub_service.ListAttributesResponse( - attributes=[ - common_fields.Attribute(), - common_fields.Attribute(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_attributes( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, common_fields.Attribute) for i in responses) - - -@pytest.mark.asyncio -async def test_list_attributes_async_pages(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_attributes), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListAttributesResponse( - attributes=[ - common_fields.Attribute(), - common_fields.Attribute(), - common_fields.Attribute(), - ], - next_page_token="abc", - ), - apihub_service.ListAttributesResponse( - attributes=[], - next_page_token="def", - ), - apihub_service.ListAttributesResponse( - attributes=[ - common_fields.Attribute(), - ], - next_page_token="ghi", - ), - apihub_service.ListAttributesResponse( - attributes=[ - common_fields.Attribute(), - common_fields.Attribute(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_attributes(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.SearchResourcesRequest, - dict, - ], -) -def test_search_resources(request_type, transport: str = "grpc"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.search_resources), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.SearchResourcesResponse( - next_page_token="next_page_token_value", - ) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value response = client.search_resources(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.SearchResourcesRequest() - assert args[0] == request - # Establish that the response is the type that we expect. assert isinstance(response, pagers.SearchResourcesPager) assert response.next_page_token == "next_page_token_value" -def test_search_resources_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.search_resources), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.search_resources() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.SearchResourcesRequest() - - -def test_search_resources_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.SearchResourcesRequest( - location="location_value", - query="query_value", - filter="filter_value", - page_token="page_token_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.search_resources), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.search_resources(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.SearchResourcesRequest( - location="location_value", - query="query_value", - filter="filter_value", - page_token="page_token_value", - ) - - -def test_search_resources_use_cached_wrapped_rpc(): +def test_search_resources_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -13528,6 +11431,7 @@ def test_search_resources_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.search_resources ] = mock_rpc + request = {} client.search_resources(request) @@ -13541,273 +11445,252 @@ def test_search_resources_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_search_resources_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.search_resources), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.SearchResourcesResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.search_resources() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.SearchResourcesRequest() - - -@pytest.mark.asyncio -async def test_search_resources_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_search_resources_rest_required_fields( + request_type=apihub_service.SearchResourcesRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.search_resources - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.search_resources - ] = mock_rpc - - request = {} - await client.search_resources(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.search_resources(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - + transport_class = transports.ApiHubRestTransport -@pytest.mark.asyncio -async def test_search_resources_async( - transport: str = "grpc_asyncio", request_type=apihub_service.SearchResourcesRequest -): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + request_init = {} + request_init["location"] = "" + request_init["query"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.search_resources), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.SearchResourcesResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.search_resources(request) + # verify fields with default values are dropped - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.SearchResourcesRequest() - assert args[0] == request + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).search_resources._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.SearchResourcesAsyncPager) - assert response.next_page_token == "next_page_token_value" + # verify required fields with default values are now present + jsonified_request["location"] = "location_value" + jsonified_request["query"] = "query_value" -@pytest.mark.asyncio -async def test_search_resources_async_from_dict(): - await test_search_resources_async(request_type=dict) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).search_resources._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "location" in jsonified_request + assert jsonified_request["location"] == "location_value" + assert "query" in jsonified_request + assert jsonified_request["query"] == "query_value" -def test_search_resources_field_headers(): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request = request_type(**request_init) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.SearchResourcesRequest() - - request.location = "location_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.search_resources), "__call__") as call: - call.return_value = apihub_service.SearchResourcesResponse() - client.search_resources(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "location=location_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_search_resources_field_headers_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Designate an appropriate value for the returned response. + return_value = apihub_service.SearchResourcesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.SearchResourcesRequest() + response_value = Response() + response_value.status_code = 200 - request.location = "location_value" + # Convert return value to protobuf type + return_value = apihub_service.SearchResourcesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.search_resources), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.SearchResourcesResponse() - ) - await client.search_resources(request) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.search_resources(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "location=location_value", - ) in kw["metadata"] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_search_resources_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_search_resources_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.search_resources), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.SearchResourcesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.search_resources( - location="location_value", - query="query_value", + unset_fields = transport.search_resources._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "location", + "query", + ) ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].location - mock_val = "location_value" - assert arg == mock_val - arg = args[0].query - mock_val = "query_value" - assert arg == mock_val + ) -def test_search_resources_flattened_error(): - client = ApiHubClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_search_resources_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_search_resources" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_search_resources" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.SearchResourcesRequest.pb( + apihub_service.SearchResourcesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = apihub_service.SearchResourcesResponse.to_json( + apihub_service.SearchResourcesResponse() + ) + + request = apihub_service.SearchResourcesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = apihub_service.SearchResourcesResponse() + client.search_resources( - apihub_service.SearchResourcesRequest(), - location="location_value", - query="query_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) + pre.assert_called_once() + post.assert_called_once() + -@pytest.mark.asyncio -async def test_search_resources_flattened_async(): - client = ApiHubAsyncClient( +def test_search_resources_rest_bad_request( + transport: str = "rest", request_type=apihub_service.SearchResourcesRequest +): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.search_resources), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.SearchResourcesResponse() + # send a request that will satisfy transcoding + request_init = {"location": "projects/sample1/locations/sample2"} + request = request_type(**request_init) - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.SearchResourcesResponse() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.search_resources( + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.search_resources(request) + + +def test_search_resources_rest_flattened(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.SearchResourcesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"location": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( location="location_value", query="query_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.SearchResourcesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.search_resources(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].location - mock_val = "location_value" - assert arg == mock_val - arg = args[0].query - mock_val = "query_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_search_resources_flattened_error_async(): - client = ApiHubAsyncClient( + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{location=projects/*/locations/*}:searchResources" + % client.transport._host, + args[1], + ) + + +def test_search_resources_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.search_resources( + client.search_resources( apihub_service.SearchResourcesRequest(), location="location_value", query="query_value", ) -def test_search_resources_pager(transport_name: str = "grpc"): +def test_search_resources_rest_pager(transport: str = "rest"): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.search_resources), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( apihub_service.SearchResourcesResponse( search_results=[ apihub_service.SearchResult(), @@ -13832,162 +11715,29 @@ def test_search_resources_pager(transport_name: str = "grpc"): apihub_service.SearchResult(), ], ), - RuntimeError, ) + # Two responses for two calls + response = response + response - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("location", ""),)), + # Wrap the values into proper Response objs + response = tuple( + apihub_service.SearchResourcesResponse.to_json(x) for x in response ) - pager = client.search_resources(request={}, retry=retry, timeout=timeout) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"location": "projects/sample1/locations/sample2"} - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout + pager = client.search_resources(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, apihub_service.SearchResult) for i in results) - -def test_search_resources_pages(transport_name: str = "grpc"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.search_resources), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.SearchResourcesResponse( - search_results=[ - apihub_service.SearchResult(), - apihub_service.SearchResult(), - apihub_service.SearchResult(), - ], - next_page_token="abc", - ), - apihub_service.SearchResourcesResponse( - search_results=[], - next_page_token="def", - ), - apihub_service.SearchResourcesResponse( - search_results=[ - apihub_service.SearchResult(), - ], - next_page_token="ghi", - ), - apihub_service.SearchResourcesResponse( - search_results=[ - apihub_service.SearchResult(), - apihub_service.SearchResult(), - ], - ), - RuntimeError, - ) - pages = list(client.search_resources(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.asyncio -async def test_search_resources_async_pager(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_resources), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.SearchResourcesResponse( - search_results=[ - apihub_service.SearchResult(), - apihub_service.SearchResult(), - apihub_service.SearchResult(), - ], - next_page_token="abc", - ), - apihub_service.SearchResourcesResponse( - search_results=[], - next_page_token="def", - ), - apihub_service.SearchResourcesResponse( - search_results=[ - apihub_service.SearchResult(), - ], - next_page_token="ghi", - ), - apihub_service.SearchResourcesResponse( - search_results=[ - apihub_service.SearchResult(), - apihub_service.SearchResult(), - ], - ), - RuntimeError, - ) - async_pager = await client.search_resources( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, apihub_service.SearchResult) for i in responses) - - -@pytest.mark.asyncio -async def test_search_resources_async_pages(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_resources), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.SearchResourcesResponse( - search_results=[ - apihub_service.SearchResult(), - apihub_service.SearchResult(), - apihub_service.SearchResult(), - ], - next_page_token="abc", - ), - apihub_service.SearchResourcesResponse( - search_results=[], - next_page_token="def", - ), - apihub_service.SearchResourcesResponse( - search_results=[ - apihub_service.SearchResult(), - ], - next_page_token="ghi", - ), - apihub_service.SearchResourcesResponse( - search_results=[ - apihub_service.SearchResult(), - apihub_service.SearchResult(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.search_resources(request={}) - ).pages: - pages.append(page_) + pages = list(client.search_resources(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -13999,35 +11749,115 @@ async def test_search_resources_async_pages(): dict, ], ) -def test_create_external_api(request_type, transport: str = "grpc"): +def test_create_external_api_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["external_api"] = { + "name": "name_value", + "display_name": "display_name_value", + "description": "description_value", + "endpoints": ["endpoints_value1", "endpoints_value2"], + "paths": ["paths_value1", "paths_value2"], + "documentation": {"external_uri": "external_uri_value"}, + "attributes": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_external_api), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.ExternalApi( - name="name_value", - display_name="display_name_value", - description="description_value", - endpoints=["endpoints_value"], + # Determine if the message type is proto-plus or protobuf + test_field = apihub_service.CreateExternalApiRequest.meta.fields["external_api"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["external_api"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["external_api"][field])): + del request_init["external_api"][field][i][subfield] + else: + del request_init["external_api"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.ExternalApi( + name="name_value", + display_name="display_name_value", + description="description_value", + endpoints=["endpoints_value"], paths=["paths_value"], ) - response = client.create_external_api(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.CreateExternalApiRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.ExternalApi.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_external_api(request) # Establish that the response is the type that we expect. assert isinstance(response, common_fields.ExternalApi) @@ -14038,66 +11868,13 @@ def test_create_external_api(request_type, transport: str = "grpc"): assert response.paths == ["paths_value"] -def test_create_external_api_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_external_api), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_external_api() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.CreateExternalApiRequest() - - -def test_create_external_api_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.CreateExternalApiRequest( - parent="parent_value", - external_api_id="external_api_id_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_external_api), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_external_api(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.CreateExternalApiRequest( - parent="parent_value", - external_api_id="external_api_id_value", - ) - - -def test_create_external_api_use_cached_wrapped_rpc(): +def test_create_external_api_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -14117,6 +11894,7 @@ def test_create_external_api_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.create_external_api ] = mock_rpc + request = {} client.create_external_api(request) @@ -14130,291 +11908,233 @@ def test_create_external_api_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_external_api_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_external_api), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.ExternalApi( - name="name_value", - display_name="display_name_value", - description="description_value", - endpoints=["endpoints_value"], - paths=["paths_value"], - ) - ) - response = await client.create_external_api() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.CreateExternalApiRequest() - - -@pytest.mark.asyncio -async def test_create_external_api_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.create_external_api - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.create_external_api - ] = mock_rpc - - request = {} - await client.create_external_api(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.create_external_api(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_create_external_api_async( - transport: str = "grpc_asyncio", +def test_create_external_api_rest_required_fields( request_type=apihub_service.CreateExternalApiRequest, ): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + transport_class = transports.ApiHubRestTransport - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_external_api), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.ExternalApi( - name="name_value", - display_name="display_name_value", - description="description_value", - endpoints=["endpoints_value"], - paths=["paths_value"], - ) - ) - response = await client.create_external_api(request) + # verify fields with default values are dropped - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.CreateExternalApiRequest() - assert args[0] == request + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_external_api._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.ExternalApi) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.endpoints == ["endpoints_value"] - assert response.paths == ["paths_value"] + # verify required fields with default values are now present + jsonified_request["parent"] = "parent_value" -@pytest.mark.asyncio -async def test_create_external_api_async_from_dict(): - await test_create_external_api_async(request_type=dict) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_external_api._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("external_api_id",)) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" -def test_create_external_api_field_headers(): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request = request_type(**request_init) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.CreateExternalApiRequest() - - request.parent = "parent_value" + # Designate an appropriate value for the returned response. + return_value = common_fields.ExternalApi() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_external_api), "__call__" - ) as call: - call.return_value = common_fields.ExternalApi() - client.create_external_api(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + # Convert return value to protobuf type + return_value = common_fields.ExternalApi.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_external_api(request) -@pytest.mark.asyncio -async def test_create_external_api_field_headers_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.CreateExternalApiRequest() - request.parent = "parent_value" +def test_create_external_api_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_external_api), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.ExternalApi() + unset_fields = transport.create_external_api._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("externalApiId",)) + & set( + ( + "parent", + "externalApi", + ) ) - await client.create_external_api(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + ) -def test_create_external_api_flattened(): - client = ApiHubClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_external_api_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) - - # Mock the actual call within the gRPC stub, and fake the request. + client = ApiHubClient(transport=transport) with mock.patch.object( - type(client.transport.create_external_api), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.ExternalApi() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_external_api( - parent="parent_value", - external_api=common_fields.ExternalApi(name="name_value"), - external_api_id="external_api_id_value", + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_create_external_api" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_create_external_api" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.CreateExternalApiRequest.pb( + apihub_service.CreateExternalApiRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.ExternalApi.to_json( + common_fields.ExternalApi() ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].external_api - mock_val = common_fields.ExternalApi(name="name_value") - assert arg == mock_val - arg = args[0].external_api_id - mock_val = "external_api_id_value" - assert arg == mock_val - - -def test_create_external_api_flattened_error(): + request = apihub_service.CreateExternalApiRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.ExternalApi() + + client.create_external_api( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_external_api_rest_bad_request( + transport: str = "rest", request_type=apihub_service.CreateExternalApiRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_external_api( - apihub_service.CreateExternalApiRequest(), - parent="parent_value", - external_api=common_fields.ExternalApi(name="name_value"), - external_api_id="external_api_id_value", - ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_external_api(request) -@pytest.mark.asyncio -async def test_create_external_api_flattened_async(): - client = ApiHubAsyncClient( +def test_create_external_api_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_external_api), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.ExternalApi() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.ExternalApi() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.ExternalApi() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_external_api( + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( parent="parent_value", external_api=common_fields.ExternalApi(name="name_value"), external_api_id="external_api_id_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.ExternalApi.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_external_api(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].external_api - mock_val = common_fields.ExternalApi(name="name_value") - assert arg == mock_val - arg = args[0].external_api_id - mock_val = "external_api_id_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_create_external_api_flattened_error_async(): - client = ApiHubAsyncClient( + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/externalApis" + % client.transport._host, + args[1], + ) + + +def test_create_external_api_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_external_api( + client.create_external_api( apihub_service.CreateExternalApiRequest(), parent="parent_value", external_api=common_fields.ExternalApi(name="name_value"), @@ -14422,6 +12142,12 @@ async def test_create_external_api_flattened_error_async(): ) +def test_create_external_api_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -14429,33 +12155,37 @@ async def test_create_external_api_flattened_error_async(): dict, ], ) -def test_get_external_api(request_type, transport: str = "grpc"): +def test_get_external_api_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/externalApis/sample3"} + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_external_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.ExternalApi( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.ExternalApi( name="name_value", display_name="display_name_value", description="description_value", endpoints=["endpoints_value"], paths=["paths_value"], ) - response = client.get_external_api(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.GetExternalApiRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.ExternalApi.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_external_api(request) # Establish that the response is the type that we expect. assert isinstance(response, common_fields.ExternalApi) @@ -14466,60 +12196,13 @@ def test_get_external_api(request_type, transport: str = "grpc"): assert response.paths == ["paths_value"] -def test_get_external_api_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_external_api), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_external_api() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetExternalApiRequest() - - -def test_get_external_api_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.GetExternalApiRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_external_api), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_external_api(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetExternalApiRequest( - name="name_value", - ) - - -def test_get_external_api_use_cached_wrapped_rpc(): +def test_get_external_api_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -14537,6 +12220,7 @@ def test_get_external_api_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.get_external_api ] = mock_rpc + request = {} client.get_external_api(request) @@ -14550,265 +12234,233 @@ def test_get_external_api_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_external_api_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_external_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.ExternalApi( - name="name_value", - display_name="display_name_value", - description="description_value", - endpoints=["endpoints_value"], - paths=["paths_value"], - ) - ) - response = await client.get_external_api() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetExternalApiRequest() - - -@pytest.mark.asyncio -async def test_get_external_api_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_get_external_api_rest_required_fields( + request_type=apihub_service.GetExternalApiRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + transport_class = transports.ApiHubRestTransport - # Ensure method has been cached - assert ( - client._client._transport.get_external_api - in client._client._transport._wrapped_methods - ) + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.get_external_api - ] = mock_rpc + # verify fields with default values are dropped - request = {} - await client.get_external_api(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_external_api._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.get_external_api(request) + jsonified_request["name"] = "name_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_external_api._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -@pytest.mark.asyncio -async def test_get_external_api_async( - transport: str = "grpc_asyncio", request_type=apihub_service.GetExternalApiRequest -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = common_fields.ExternalApi() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_external_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.ExternalApi( - name="name_value", - display_name="display_name_value", - description="description_value", - endpoints=["endpoints_value"], - paths=["paths_value"], - ) - ) - response = await client.get_external_api(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.GetExternalApiRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = common_fields.ExternalApi.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.ExternalApi) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.endpoints == ["endpoints_value"] - assert response.paths == ["paths_value"] + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_external_api(request) -@pytest.mark.asyncio -async def test_get_external_api_async_from_dict(): - await test_get_external_api_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_get_external_api_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_get_external_api_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.GetExternalApiRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_external_api), "__call__") as call: - call.return_value = common_fields.ExternalApi() - client.get_external_api(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + unset_fields = transport.get_external_api._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -@pytest.mark.asyncio -async def test_get_external_api_field_headers_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_external_api_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_get_external_api" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_get_external_api" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.GetExternalApiRequest.pb( + apihub_service.GetExternalApiRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.GetExternalApiRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_external_api), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.ExternalApi.to_json( common_fields.ExternalApi() ) - await client.get_external_api(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + request = apihub_service.GetExternalApiRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.ExternalApi() -def test_get_external_api_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_external_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.ExternalApi() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.get_external_api( - name="name_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + pre.assert_called_once() + post.assert_called_once() -def test_get_external_api_flattened_error(): +def test_get_external_api_rest_bad_request( + transport: str = "rest", request_type=apihub_service.GetExternalApiRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_external_api( - apihub_service.GetExternalApiRequest(), - name="name_value", - ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/externalApis/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_external_api(request) -@pytest.mark.asyncio -async def test_get_external_api_flattened_async(): - client = ApiHubAsyncClient( +def test_get_external_api_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_external_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.ExternalApi() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.ExternalApi() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.ExternalApi() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_external_api( + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/externalApis/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.ExternalApi.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_external_api(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/externalApis/*}" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_get_external_api_flattened_error_async(): - client = ApiHubAsyncClient( +def test_get_external_api_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_external_api( + client.get_external_api( apihub_service.GetExternalApiRequest(), name="name_value", ) +def test_get_external_api_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -14816,35 +12468,119 @@ async def test_get_external_api_flattened_error_async(): dict, ], ) -def test_update_external_api(request_type, transport: str = "grpc"): +def test_update_external_api_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "external_api": { + "name": "projects/sample1/locations/sample2/externalApis/sample3" + } + } + request_init["external_api"] = { + "name": "projects/sample1/locations/sample2/externalApis/sample3", + "display_name": "display_name_value", + "description": "description_value", + "endpoints": ["endpoints_value1", "endpoints_value2"], + "paths": ["paths_value1", "paths_value2"], + "documentation": {"external_uri": "external_uri_value"}, + "attributes": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_external_api), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.ExternalApi( + # Determine if the message type is proto-plus or protobuf + test_field = apihub_service.UpdateExternalApiRequest.meta.fields["external_api"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["external_api"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["external_api"][field])): + del request_init["external_api"][field][i][subfield] + else: + del request_init["external_api"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.ExternalApi( name="name_value", display_name="display_name_value", description="description_value", endpoints=["endpoints_value"], paths=["paths_value"], ) - response = client.update_external_api(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.UpdateExternalApiRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.ExternalApi.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_external_api(request) # Establish that the response is the type that we expect. assert isinstance(response, common_fields.ExternalApi) @@ -14855,60 +12591,13 @@ def test_update_external_api(request_type, transport: str = "grpc"): assert response.paths == ["paths_value"] -def test_update_external_api_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_external_api), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.update_external_api() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.UpdateExternalApiRequest() - - -def test_update_external_api_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.UpdateExternalApiRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_external_api), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.update_external_api(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.UpdateExternalApiRequest() - - -def test_update_external_api_use_cached_wrapped_rpc(): +def test_update_external_api_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -14928,6 +12617,7 @@ def test_update_external_api_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.update_external_api ] = mock_rpc + request = {} client.update_external_api(request) @@ -14941,225 +12631,229 @@ def test_update_external_api_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_update_external_api_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_external_api), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.ExternalApi( - name="name_value", - display_name="display_name_value", - description="description_value", - endpoints=["endpoints_value"], - paths=["paths_value"], - ) - ) - response = await client.update_external_api() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.UpdateExternalApiRequest() - - -@pytest.mark.asyncio -async def test_update_external_api_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_update_external_api_rest_required_fields( + request_type=apihub_service.UpdateExternalApiRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.update_external_api - in client._client._transport._wrapped_methods - ) + transport_class = transports.ApiHubRestTransport - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.update_external_api - ] = mock_rpc + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - request = {} - await client.update_external_api(request) + # verify fields with default values are dropped - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_external_api._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - await client.update_external_api(request) + # verify required fields with default values are now present - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_external_api._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone -@pytest.mark.asyncio -async def test_update_external_api_async( - transport: str = "grpc_asyncio", - request_type=apihub_service.UpdateExternalApiRequest, -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = common_fields.ExternalApi() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_external_api), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.ExternalApi( - name="name_value", - display_name="display_name_value", - description="description_value", - endpoints=["endpoints_value"], - paths=["paths_value"], - ) - ) - response = await client.update_external_api(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.UpdateExternalApiRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = common_fields.ExternalApi.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.ExternalApi) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.endpoints == ["endpoints_value"] - assert response.paths == ["paths_value"] + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_external_api(request) -@pytest.mark.asyncio -async def test_update_external_api_async_from_dict(): - await test_update_external_api_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_update_external_api_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_update_external_api_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.UpdateExternalApiRequest() + unset_fields = transport.update_external_api._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "externalApi", + "updateMask", + ) + ) + ) - request.external_api.name = "name_value" - # Mock the actual call within the gRPC stub, and fake the request. +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_external_api_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), + ) + client = ApiHubClient(transport=transport) with mock.patch.object( - type(client.transport.update_external_api), "__call__" - ) as call: - call.return_value = common_fields.ExternalApi() - client.update_external_api(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_update_external_api" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_update_external_api" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.UpdateExternalApiRequest.pb( + apihub_service.UpdateExternalApiRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "external_api.name=name_value", - ) in kw["metadata"] + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.ExternalApi.to_json( + common_fields.ExternalApi() + ) + request = apihub_service.UpdateExternalApiRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.ExternalApi() -@pytest.mark.asyncio -async def test_update_external_api_field_headers_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + client.update_external_api( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.UpdateExternalApiRequest() + pre.assert_called_once() + post.assert_called_once() - request.external_api.name = "name_value" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_external_api), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.ExternalApi() - ) - await client.update_external_api(request) +def test_update_external_api_rest_bad_request( + transport: str = "rest", request_type=apihub_service.UpdateExternalApiRequest +): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request + # send a request that will satisfy transcoding + request_init = { + "external_api": { + "name": "projects/sample1/locations/sample2/externalApis/sample3" + } + } + request = request_type(**request_init) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "external_api.name=name_value", - ) in kw["metadata"] + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_external_api(request) -def test_update_external_api_flattened(): +def test_update_external_api_rest_flattened(): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_external_api), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.ExternalApi() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_external_api( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.ExternalApi() + + # get arguments that satisfy an http rule for this method + sample_request = { + "external_api": { + "name": "projects/sample1/locations/sample2/externalApis/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( external_api=common_fields.ExternalApi(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.ExternalApi.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_external_api(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].external_api - mock_val = common_fields.ExternalApi(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{external_api.name=projects/*/locations/*/externalApis/*}" + % client.transport._host, + args[1], + ) -def test_update_external_api_flattened_error(): +def test_update_external_api_rest_flattened_error(transport: str = "rest"): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -15172,56 +12866,11 @@ def test_update_external_api_flattened_error(): ) -@pytest.mark.asyncio -async def test_update_external_api_flattened_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_external_api), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.ExternalApi() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.ExternalApi() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_external_api( - external_api=common_fields.ExternalApi(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].external_api - mock_val = common_fields.ExternalApi(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_update_external_api_flattened_error_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_update_external_api_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_external_api( - apihub_service.UpdateExternalApiRequest(), - external_api=common_fields.ExternalApi(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - @pytest.mark.parametrize( "request_type", @@ -15230,92 +12879,41 @@ async def test_update_external_api_flattened_error_async(): dict, ], ) -def test_delete_external_api(request_type, transport: str = "grpc"): +def test_delete_external_api_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/externalApis/sample3"} + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_external_api), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_external_api(request) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.DeleteExternalApiRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_external_api(request) # Establish that the response is the type that we expect. assert response is None -def test_delete_external_api_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_external_api), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_external_api() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.DeleteExternalApiRequest() - - -def test_delete_external_api_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.DeleteExternalApiRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_external_api), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_external_api(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.DeleteExternalApiRequest( - name="name_value", - ) - - -def test_delete_external_api_use_cached_wrapped_rpc(): +def test_delete_external_api_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -15335,6 +12933,7 @@ def test_delete_external_api_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.delete_external_api ] = mock_rpc + request = {} client.delete_external_api(request) @@ -15348,253 +12947,220 @@ def test_delete_external_api_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_delete_external_api_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_external_api), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_external_api() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.DeleteExternalApiRequest() - - -@pytest.mark.asyncio -async def test_delete_external_api_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.delete_external_api - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.delete_external_api - ] = mock_rpc - - request = {} - await client.delete_external_api(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_external_api(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_delete_external_api_async( - transport: str = "grpc_asyncio", +def test_delete_external_api_rest_required_fields( request_type=apihub_service.DeleteExternalApiRequest, ): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + transport_class = transports.ApiHubRestTransport - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_external_api), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_external_api(request) + # verify fields with default values are dropped - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.DeleteExternalApiRequest() - assert args[0] == request + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_external_api._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the response is the type that we expect. - assert response is None + # verify required fields with default values are now present + jsonified_request["name"] = "name_value" -@pytest.mark.asyncio -async def test_delete_external_api_async_from_dict(): - await test_delete_external_api_async(request_type=dict) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_external_api._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -def test_delete_external_api_field_headers(): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request = request_type(**request_init) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.DeleteExternalApiRequest() + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result - request.name = "name_value" + response_value = Response() + response_value.status_code = 200 + json_return_value = "" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_external_api), "__call__" - ) as call: - call.return_value = None - client.delete_external_api(request) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.delete_external_api(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -@pytest.mark.asyncio -async def test_delete_external_api_field_headers_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_delete_external_api_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.DeleteExternalApiRequest() + unset_fields = transport.delete_external_api._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) - request.name = "name_value" - # Mock the actual call within the gRPC stub, and fake the request. +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_external_api_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), + ) + client = ApiHubClient(transport=transport) with mock.patch.object( - type(client.transport.delete_external_api), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_external_api(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_delete_external_api" + ) as pre: + pre.assert_not_called() + pb_message = apihub_service.DeleteExternalApiRequest.pb( + apihub_service.DeleteExternalApiRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() -def test_delete_external_api_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + request = apihub_service.DeleteExternalApiRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_external_api), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.delete_external_api( - name="name_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + pre.assert_called_once() -def test_delete_external_api_flattened_error(): +def test_delete_external_api_rest_bad_request( + transport: str = "rest", request_type=apihub_service.DeleteExternalApiRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_external_api( - apihub_service.DeleteExternalApiRequest(), - name="name_value", - ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/externalApis/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_external_api(request) -@pytest.mark.asyncio -async def test_delete_external_api_flattened_async(): - client = ApiHubAsyncClient( +def test_delete_external_api_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_external_api), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_external_api( - name="name_value", - ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/externalApis/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_external_api(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/externalApis/*}" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_delete_external_api_flattened_error_async(): - client = ApiHubAsyncClient( +def test_delete_external_api_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_external_api( + client.delete_external_api( apihub_service.DeleteExternalApiRequest(), name="name_value", ) +def test_delete_external_api_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -15602,97 +13168,46 @@ async def test_delete_external_api_flattened_error_async(): dict, ], ) -def test_list_external_apis(request_type, transport: str = "grpc"): +def test_list_external_apis_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_external_apis), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListExternalApisResponse( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListExternalApisResponse( next_page_token="next_page_token_value", ) - response = client.list_external_apis(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.ListExternalApisRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.ListExternalApisResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_external_apis(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListExternalApisPager) assert response.next_page_token == "next_page_token_value" -def test_list_external_apis_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_external_apis), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_external_apis() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListExternalApisRequest() - - -def test_list_external_apis_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.ListExternalApisRequest( - parent="parent_value", - page_token="page_token_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_external_apis), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_external_apis(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListExternalApisRequest( - parent="parent_value", - page_token="page_token_value", - ) - - -def test_list_external_apis_use_cached_wrapped_rpc(): +def test_list_external_apis_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -15712,6 +13227,7 @@ def test_list_external_apis_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.list_external_apis ] = mock_rpc + request = {} client.list_external_apis(request) @@ -15725,277 +13241,252 @@ def test_list_external_apis_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_external_apis_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_external_apis), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListExternalApisResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_external_apis() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListExternalApisRequest() - - -@pytest.mark.asyncio -async def test_list_external_apis_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_list_external_apis_rest_required_fields( + request_type=apihub_service.ListExternalApisRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.list_external_apis - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.list_external_apis - ] = mock_rpc - - request = {} - await client.list_external_apis(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_external_apis(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - + transport_class = transports.ApiHubRestTransport -@pytest.mark.asyncio -async def test_list_external_apis_async( - transport: str = "grpc_asyncio", request_type=apihub_service.ListExternalApisRequest -): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_external_apis), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListExternalApisResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_external_apis(request) + # verify fields with default values are dropped - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.ListExternalApisRequest() - assert args[0] == request + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_external_apis._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListExternalApisAsyncPager) - assert response.next_page_token == "next_page_token_value" + # verify required fields with default values are now present + jsonified_request["parent"] = "parent_value" -@pytest.mark.asyncio -async def test_list_external_apis_async_from_dict(): - await test_list_external_apis_async(request_type=dict) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_external_apis._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" -def test_list_external_apis_field_headers(): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request = request_type(**request_init) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.ListExternalApisRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_external_apis), "__call__" - ) as call: - call.return_value = apihub_service.ListExternalApisResponse() - client.list_external_apis(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_list_external_apis_field_headers_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListExternalApisResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.ListExternalApisRequest() + response_value = Response() + response_value.status_code = 200 - request.parent = "parent_value" + # Convert return value to protobuf type + return_value = apihub_service.ListExternalApisResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_external_apis), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListExternalApisResponse() - ) - await client.list_external_apis(request) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.list_external_apis(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_list_external_apis_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_list_external_apis_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_external_apis), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListExternalApisResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_external_apis( - parent="parent_value", + unset_fields = transport.list_external_apis._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - - -def test_list_external_apis_flattened_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), + & set(("parent",)) ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_external_apis( - apihub_service.ListExternalApisRequest(), - parent="parent_value", - ) - -@pytest.mark.asyncio -async def test_list_external_apis_flattened_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_external_apis_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) - - # Mock the actual call within the gRPC stub, and fake the request. + client = ApiHubClient(transport=transport) with mock.patch.object( - type(client.transport.list_external_apis), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListExternalApisResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListExternalApisResponse() + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_list_external_apis" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_list_external_apis" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.ListExternalApisRequest.pb( + apihub_service.ListExternalApisRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = apihub_service.ListExternalApisResponse.to_json( + apihub_service.ListExternalApisResponse() + ) + + request = apihub_service.ListExternalApisRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = apihub_service.ListExternalApisResponse() + + client.list_external_apis( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_external_apis( + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_external_apis_rest_bad_request( + transport: str = "rest", request_type=apihub_service.ListExternalApisRequest +): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_external_apis(request) + + +def test_list_external_apis_rest_flattened(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListExternalApisResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( parent="parent_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.ListExternalApisResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_external_apis(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/externalApis" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_list_external_apis_flattened_error_async(): - client = ApiHubAsyncClient( +def test_list_external_apis_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_external_apis( + client.list_external_apis( apihub_service.ListExternalApisRequest(), parent="parent_value", ) -def test_list_external_apis_pager(transport_name: str = "grpc"): +def test_list_external_apis_rest_pager(transport: str = "rest"): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_external_apis), "__call__" - ) as call: - # Set the response to a series of pages. - call.side_effect = ( + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( apihub_service.ListExternalApisResponse( external_apis=[ common_fields.ExternalApi(), @@ -16020,15040 +13511,1132 @@ def test_list_external_apis_pager(transport_name: str = "grpc"): common_fields.ExternalApi(), ], ), - RuntimeError, ) + # Two responses for two calls + response = response + response - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + # Wrap the values into proper Response objs + response = tuple( + apihub_service.ListExternalApisResponse.to_json(x) for x in response ) - pager = client.list_external_apis(request={}, retry=retry, timeout=timeout) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout + pager = client.list_external_apis(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, common_fields.ExternalApi) for i in results) + pages = list(client.list_external_apis(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token -def test_list_external_apis_pages(transport_name: str = "grpc"): - client = ApiHubClient( + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_external_apis), "__call__" - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListExternalApisResponse( - external_apis=[ - common_fields.ExternalApi(), - common_fields.ExternalApi(), - common_fields.ExternalApi(), - ], - next_page_token="abc", - ), - apihub_service.ListExternalApisResponse( - external_apis=[], - next_page_token="def", - ), - apihub_service.ListExternalApisResponse( - external_apis=[ - common_fields.ExternalApi(), - ], - next_page_token="ghi", - ), - apihub_service.ListExternalApisResponse( - external_apis=[ - common_fields.ExternalApi(), - common_fields.ExternalApi(), - ], - ), - RuntimeError, + with pytest.raises(ValueError): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - pages = list(client.list_external_apis(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_external_apis_async_pager(): - client = ApiHubAsyncClient( + # It is an error to provide a credentials file and a transport instance. + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), ) + with pytest.raises(ValueError): + client = ApiHubClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_external_apis), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListExternalApisResponse( - external_apis=[ - common_fields.ExternalApi(), - common_fields.ExternalApi(), - common_fields.ExternalApi(), - ], - next_page_token="abc", - ), - apihub_service.ListExternalApisResponse( - external_apis=[], - next_page_token="def", - ), - apihub_service.ListExternalApisResponse( - external_apis=[ - common_fields.ExternalApi(), - ], - next_page_token="ghi", - ), - apihub_service.ListExternalApisResponse( - external_apis=[ - common_fields.ExternalApi(), - common_fields.ExternalApi(), - ], - ), - RuntimeError, + # It is an error to provide an api_key and a transport instance. + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ApiHubClient( + client_options=options, + transport=transport, ) - async_pager = await client.list_external_apis( - request={}, + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ApiHubClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - assert len(responses) == 6 - assert all(isinstance(i, common_fields.ExternalApi) for i in responses) + # It is an error to provide scopes and a transport instance. + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ApiHubClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) -@pytest.mark.asyncio -async def test_list_external_apis_async_pages(): - client = ApiHubAsyncClient( +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_external_apis), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListExternalApisResponse( - external_apis=[ - common_fields.ExternalApi(), - common_fields.ExternalApi(), - common_fields.ExternalApi(), - ], - next_page_token="abc", - ), - apihub_service.ListExternalApisResponse( - external_apis=[], - next_page_token="def", - ), - apihub_service.ListExternalApisResponse( - external_apis=[ - common_fields.ExternalApi(), - ], - next_page_token="ghi", - ), - apihub_service.ListExternalApisResponse( - external_apis=[ - common_fields.ExternalApi(), - common_fields.ExternalApi(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_external_apis(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.CreateApiRequest, - dict, - ], -) -def test_create_api_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["api"] = { - "name": "name_value", - "display_name": "display_name_value", - "description": "description_value", - "documentation": {"external_uri": "external_uri_value"}, - "owner": {"display_name": "display_name_value", "email": "email_value"}, - "versions": ["versions_value1", "versions_value2"], - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "target_user": { - "enum_values": { - "values": [ - { - "id": "id_value", - "display_name": "display_name_value", - "description": "description_value", - "immutable": True, - } - ] - }, - "string_values": {"values": ["values_value1", "values_value2"]}, - "json_values": {}, - "attribute": "attribute_value", - }, - "team": {}, - "business_unit": {}, - "maturity_level": {}, - "attributes": {}, - "api_style": {}, - "selected_version": "selected_version_value", - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = apihub_service.CreateApiRequest.meta.fields["api"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["api"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["api"][field])): - del request_init["api"][field][i][subfield] - else: - del request_init["api"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Api( - name="name_value", - display_name="display_name_value", - description="description_value", - versions=["versions_value"], - selected_version="selected_version_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Api.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.create_api(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Api) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.versions == ["versions_value"] - assert response.selected_version == "selected_version_value" - - -def test_create_api_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_api in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.create_api] = mock_rpc - - request = {} - client.create_api(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_api(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_api_rest_required_fields(request_type=apihub_service.CreateApiRequest): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_api._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_api._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("api_id",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.Api() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.Api.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.create_api(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_create_api_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.create_api._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("apiId",)) - & set( - ( - "parent", - "api", - ) - ) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_api_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_create_api" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_create_api" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.CreateApiRequest.pb( - apihub_service.CreateApiRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.Api.to_json(common_fields.Api()) - - request = apihub_service.CreateApiRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.Api() - - client.create_api( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_api_rest_bad_request( - transport: str = "rest", request_type=apihub_service.CreateApiRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_api(request) - - -def test_create_api_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Api() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - api=common_fields.Api(name="name_value"), - api_id="api_id_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Api.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.create_api(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/apis" % client.transport._host, - args[1], - ) - - -def test_create_api_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_api( - apihub_service.CreateApiRequest(), - parent="parent_value", - api=common_fields.Api(name="name_value"), - api_id="api_id_value", - ) - - -def test_create_api_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.GetApiRequest, - dict, - ], -) -def test_get_api_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/apis/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Api( - name="name_value", - display_name="display_name_value", - description="description_value", - versions=["versions_value"], - selected_version="selected_version_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Api.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_api(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Api) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.versions == ["versions_value"] - assert response.selected_version == "selected_version_value" - - -def test_get_api_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_api in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.get_api] = mock_rpc - - request = {} - client.get_api(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_api(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_api_rest_required_fields(request_type=apihub_service.GetApiRequest): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_api._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_api._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.Api() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.Api.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_api(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_get_api_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.get_api._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_api_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_get_api" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_get_api" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.GetApiRequest.pb(apihub_service.GetApiRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.Api.to_json(common_fields.Api()) - - request = apihub_service.GetApiRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.Api() - - client.get_api( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_api_rest_bad_request( - transport: str = "rest", request_type=apihub_service.GetApiRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/apis/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_api(request) - - -def test_get_api_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Api() - - # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/locations/sample2/apis/sample3"} - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Api.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.get_api(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/apis/*}" % client.transport._host, - args[1], - ) - - -def test_get_api_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_api( - apihub_service.GetApiRequest(), - name="name_value", - ) - - -def test_get_api_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.ListApisRequest, - dict, - ], -) -def test_list_apis_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListApisResponse( - next_page_token="next_page_token_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = apihub_service.ListApisResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_apis(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListApisPager) - assert response.next_page_token == "next_page_token_value" - - -def test_list_apis_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_apis in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.list_apis] = mock_rpc - - request = {} - client.list_apis(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_apis(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_apis_rest_required_fields(request_type=apihub_service.ListApisRequest): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_apis._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_apis._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "page_size", - "page_token", - ) - ) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListApisResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = apihub_service.ListApisResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.list_apis(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_list_apis_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.list_apis._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_apis_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_list_apis" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_list_apis" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.ListApisRequest.pb(apihub_service.ListApisRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = apihub_service.ListApisResponse.to_json( - apihub_service.ListApisResponse() - ) - - request = apihub_service.ListApisRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = apihub_service.ListApisResponse() - - client.list_apis( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_apis_rest_bad_request( - transport: str = "rest", request_type=apihub_service.ListApisRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_apis(request) - - -def test_list_apis_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListApisResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = apihub_service.ListApisResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.list_apis(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/apis" % client.transport._host, - args[1], - ) - - -def test_list_apis_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_apis( - apihub_service.ListApisRequest(), - parent="parent_value", - ) - - -def test_list_apis_rest_pager(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - apihub_service.ListApisResponse( - apis=[ - common_fields.Api(), - common_fields.Api(), - common_fields.Api(), - ], - next_page_token="abc", - ), - apihub_service.ListApisResponse( - apis=[], - next_page_token="def", - ), - apihub_service.ListApisResponse( - apis=[ - common_fields.Api(), - ], - next_page_token="ghi", - ), - apihub_service.ListApisResponse( - apis=[ - common_fields.Api(), - common_fields.Api(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(apihub_service.ListApisResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_apis(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, common_fields.Api) for i in results) - - pages = list(client.list_apis(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.UpdateApiRequest, - dict, - ], -) -def test_update_api_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"api": {"name": "projects/sample1/locations/sample2/apis/sample3"}} - request_init["api"] = { - "name": "projects/sample1/locations/sample2/apis/sample3", - "display_name": "display_name_value", - "description": "description_value", - "documentation": {"external_uri": "external_uri_value"}, - "owner": {"display_name": "display_name_value", "email": "email_value"}, - "versions": ["versions_value1", "versions_value2"], - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "target_user": { - "enum_values": { - "values": [ - { - "id": "id_value", - "display_name": "display_name_value", - "description": "description_value", - "immutable": True, - } - ] - }, - "string_values": {"values": ["values_value1", "values_value2"]}, - "json_values": {}, - "attribute": "attribute_value", - }, - "team": {}, - "business_unit": {}, - "maturity_level": {}, - "attributes": {}, - "api_style": {}, - "selected_version": "selected_version_value", - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = apihub_service.UpdateApiRequest.meta.fields["api"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["api"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["api"][field])): - del request_init["api"][field][i][subfield] - else: - del request_init["api"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Api( - name="name_value", - display_name="display_name_value", - description="description_value", - versions=["versions_value"], - selected_version="selected_version_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Api.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.update_api(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Api) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.versions == ["versions_value"] - assert response.selected_version == "selected_version_value" - - -def test_update_api_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_api in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.update_api] = mock_rpc - - request = {} - client.update_api(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_api(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_api_rest_required_fields(request_type=apihub_service.UpdateApiRequest): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_api._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_api._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.Api() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "patch", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.Api.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.update_api(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_update_api_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.update_api._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("updateMask",)) - & set( - ( - "api", - "updateMask", - ) - ) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_api_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_update_api" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_update_api" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.UpdateApiRequest.pb( - apihub_service.UpdateApiRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.Api.to_json(common_fields.Api()) - - request = apihub_service.UpdateApiRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.Api() - - client.update_api( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_api_rest_bad_request( - transport: str = "rest", request_type=apihub_service.UpdateApiRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"api": {"name": "projects/sample1/locations/sample2/apis/sample3"}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_api(request) - - -def test_update_api_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Api() - - # get arguments that satisfy an http rule for this method - sample_request = { - "api": {"name": "projects/sample1/locations/sample2/apis/sample3"} - } - - # get truthy value for each flattened field - mock_args = dict( - api=common_fields.Api(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Api.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.update_api(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{api.name=projects/*/locations/*/apis/*}" % client.transport._host, - args[1], - ) - - -def test_update_api_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_api( - apihub_service.UpdateApiRequest(), - api=common_fields.Api(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - - -def test_update_api_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.DeleteApiRequest, - dict, - ], -) -def test_delete_api_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/apis/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.delete_api(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_api_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_api in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.delete_api] = mock_rpc - - request = {} - client.delete_api(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_api(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_api_rest_required_fields(request_type=apihub_service.DeleteApiRequest): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_api._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_api._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("force",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "delete", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.delete_api(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_delete_api_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.delete_api._get_unset_required_fields({}) - assert set(unset_fields) == (set(("force",)) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_api_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_delete_api" - ) as pre: - pre.assert_not_called() - pb_message = apihub_service.DeleteApiRequest.pb( - apihub_service.DeleteApiRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = apihub_service.DeleteApiRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_api( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - - -def test_delete_api_rest_bad_request( - transport: str = "rest", request_type=apihub_service.DeleteApiRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/apis/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_api(request) - - -def test_delete_api_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/locations/sample2/apis/sample3"} - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.delete_api(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/apis/*}" % client.transport._host, - args[1], - ) - - -def test_delete_api_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_api( - apihub_service.DeleteApiRequest(), - name="name_value", - ) - - -def test_delete_api_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.CreateVersionRequest, - dict, - ], -) -def test_create_version_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/apis/sample3"} - request_init["version"] = { - "name": "name_value", - "display_name": "display_name_value", - "description": "description_value", - "documentation": {"external_uri": "external_uri_value"}, - "specs": ["specs_value1", "specs_value2"], - "api_operations": ["api_operations_value1", "api_operations_value2"], - "definitions": ["definitions_value1", "definitions_value2"], - "deployments": ["deployments_value1", "deployments_value2"], - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "lifecycle": { - "enum_values": { - "values": [ - { - "id": "id_value", - "display_name": "display_name_value", - "description": "description_value", - "immutable": True, - } - ] - }, - "string_values": {"values": ["values_value1", "values_value2"]}, - "json_values": {}, - "attribute": "attribute_value", - }, - "compliance": {}, - "accreditation": {}, - "attributes": {}, - "selected_deployment": "selected_deployment_value", - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = apihub_service.CreateVersionRequest.meta.fields["version"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["version"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["version"][field])): - del request_init["version"][field][i][subfield] - else: - del request_init["version"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Version( - name="name_value", - display_name="display_name_value", - description="description_value", - specs=["specs_value"], - api_operations=["api_operations_value"], - definitions=["definitions_value"], - deployments=["deployments_value"], - selected_deployment="selected_deployment_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Version.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.create_version(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Version) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.specs == ["specs_value"] - assert response.api_operations == ["api_operations_value"] - assert response.definitions == ["definitions_value"] - assert response.deployments == ["deployments_value"] - assert response.selected_deployment == "selected_deployment_value" - - -def test_create_version_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_version in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.create_version] = mock_rpc - - request = {} - client.create_version(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_version(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_version_rest_required_fields( - request_type=apihub_service.CreateVersionRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_version._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_version._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("version_id",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.Version() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.Version.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.create_version(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_create_version_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.create_version._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("versionId",)) - & set( - ( - "parent", - "version", - ) - ) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_version_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_create_version" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_create_version" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.CreateVersionRequest.pb( - apihub_service.CreateVersionRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.Version.to_json( - common_fields.Version() - ) - - request = apihub_service.CreateVersionRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.Version() - - client.create_version( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_version_rest_bad_request( - transport: str = "rest", request_type=apihub_service.CreateVersionRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/apis/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_version(request) - - -def test_create_version_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Version() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2/apis/sample3"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - version=common_fields.Version(name="name_value"), - version_id="version_id_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Version.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.create_version(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/apis/*}/versions" - % client.transport._host, - args[1], - ) - - -def test_create_version_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_version( - apihub_service.CreateVersionRequest(), - parent="parent_value", - version=common_fields.Version(name="name_value"), - version_id="version_id_value", - ) - - -def test_create_version_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.GetVersionRequest, - dict, - ], -) -def test_get_version_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Version( - name="name_value", - display_name="display_name_value", - description="description_value", - specs=["specs_value"], - api_operations=["api_operations_value"], - definitions=["definitions_value"], - deployments=["deployments_value"], - selected_deployment="selected_deployment_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Version.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_version(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Version) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.specs == ["specs_value"] - assert response.api_operations == ["api_operations_value"] - assert response.definitions == ["definitions_value"] - assert response.deployments == ["deployments_value"] - assert response.selected_deployment == "selected_deployment_value" - - -def test_get_version_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_version in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.get_version] = mock_rpc - - request = {} - client.get_version(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_version(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_version_rest_required_fields( - request_type=apihub_service.GetVersionRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_version._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_version._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.Version() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.Version.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_version(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_get_version_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.get_version._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_version_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_get_version" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_get_version" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.GetVersionRequest.pb( - apihub_service.GetVersionRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.Version.to_json( - common_fields.Version() - ) - - request = apihub_service.GetVersionRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.Version() - - client.get_version( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_version_rest_bad_request( - transport: str = "rest", request_type=apihub_service.GetVersionRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_version(request) - - -def test_get_version_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Version() - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Version.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.get_version(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/apis/*/versions/*}" - % client.transport._host, - args[1], - ) - - -def test_get_version_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_version( - apihub_service.GetVersionRequest(), - name="name_value", - ) - - -def test_get_version_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.ListVersionsRequest, - dict, - ], -) -def test_list_versions_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/apis/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListVersionsResponse( - next_page_token="next_page_token_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = apihub_service.ListVersionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_versions(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListVersionsPager) - assert response.next_page_token == "next_page_token_value" - - -def test_list_versions_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_versions in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.list_versions] = mock_rpc - - request = {} - client.list_versions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_versions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_versions_rest_required_fields( - request_type=apihub_service.ListVersionsRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_versions._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_versions._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "page_size", - "page_token", - ) - ) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListVersionsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = apihub_service.ListVersionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.list_versions(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_list_versions_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.list_versions._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_versions_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_list_versions" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_list_versions" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.ListVersionsRequest.pb( - apihub_service.ListVersionsRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = apihub_service.ListVersionsResponse.to_json( - apihub_service.ListVersionsResponse() - ) - - request = apihub_service.ListVersionsRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = apihub_service.ListVersionsResponse() - - client.list_versions( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_versions_rest_bad_request( - transport: str = "rest", request_type=apihub_service.ListVersionsRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/apis/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_versions(request) - - -def test_list_versions_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListVersionsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2/apis/sample3"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = apihub_service.ListVersionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.list_versions(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/apis/*}/versions" - % client.transport._host, - args[1], - ) - - -def test_list_versions_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_versions( - apihub_service.ListVersionsRequest(), - parent="parent_value", - ) - - -def test_list_versions_rest_pager(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - apihub_service.ListVersionsResponse( - versions=[ - common_fields.Version(), - common_fields.Version(), - common_fields.Version(), - ], - next_page_token="abc", - ), - apihub_service.ListVersionsResponse( - versions=[], - next_page_token="def", - ), - apihub_service.ListVersionsResponse( - versions=[ - common_fields.Version(), - ], - next_page_token="ghi", - ), - apihub_service.ListVersionsResponse( - versions=[ - common_fields.Version(), - common_fields.Version(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - apihub_service.ListVersionsResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2/apis/sample3"} - - pager = client.list_versions(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, common_fields.Version) for i in results) - - pages = list(client.list_versions(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.UpdateVersionRequest, - dict, - ], -) -def test_update_version_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "version": { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" - } - } - request_init["version"] = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4", - "display_name": "display_name_value", - "description": "description_value", - "documentation": {"external_uri": "external_uri_value"}, - "specs": ["specs_value1", "specs_value2"], - "api_operations": ["api_operations_value1", "api_operations_value2"], - "definitions": ["definitions_value1", "definitions_value2"], - "deployments": ["deployments_value1", "deployments_value2"], - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "lifecycle": { - "enum_values": { - "values": [ - { - "id": "id_value", - "display_name": "display_name_value", - "description": "description_value", - "immutable": True, - } - ] - }, - "string_values": {"values": ["values_value1", "values_value2"]}, - "json_values": {}, - "attribute": "attribute_value", - }, - "compliance": {}, - "accreditation": {}, - "attributes": {}, - "selected_deployment": "selected_deployment_value", - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = apihub_service.UpdateVersionRequest.meta.fields["version"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["version"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["version"][field])): - del request_init["version"][field][i][subfield] - else: - del request_init["version"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Version( - name="name_value", - display_name="display_name_value", - description="description_value", - specs=["specs_value"], - api_operations=["api_operations_value"], - definitions=["definitions_value"], - deployments=["deployments_value"], - selected_deployment="selected_deployment_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Version.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.update_version(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Version) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.specs == ["specs_value"] - assert response.api_operations == ["api_operations_value"] - assert response.definitions == ["definitions_value"] - assert response.deployments == ["deployments_value"] - assert response.selected_deployment == "selected_deployment_value" - - -def test_update_version_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_version in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.update_version] = mock_rpc - - request = {} - client.update_version(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_version(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_version_rest_required_fields( - request_type=apihub_service.UpdateVersionRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_version._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_version._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.Version() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "patch", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.Version.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.update_version(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_update_version_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.update_version._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("updateMask",)) - & set( - ( - "version", - "updateMask", - ) - ) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_version_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_update_version" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_update_version" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.UpdateVersionRequest.pb( - apihub_service.UpdateVersionRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.Version.to_json( - common_fields.Version() - ) - - request = apihub_service.UpdateVersionRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.Version() - - client.update_version( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_version_rest_bad_request( - transport: str = "rest", request_type=apihub_service.UpdateVersionRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "version": { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" - } - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_version(request) - - -def test_update_version_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Version() - - # get arguments that satisfy an http rule for this method - sample_request = { - "version": { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" - } - } - - # get truthy value for each flattened field - mock_args = dict( - version=common_fields.Version(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Version.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.update_version(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{version.name=projects/*/locations/*/apis/*/versions/*}" - % client.transport._host, - args[1], - ) - - -def test_update_version_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_version( - apihub_service.UpdateVersionRequest(), - version=common_fields.Version(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - - -def test_update_version_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.DeleteVersionRequest, - dict, - ], -) -def test_delete_version_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.delete_version(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_version_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_version in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.delete_version] = mock_rpc - - request = {} - client.delete_version(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_version(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_version_rest_required_fields( - request_type=apihub_service.DeleteVersionRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_version._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_version._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("force",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "delete", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.delete_version(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_delete_version_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.delete_version._get_unset_required_fields({}) - assert set(unset_fields) == (set(("force",)) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_version_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_delete_version" - ) as pre: - pre.assert_not_called() - pb_message = apihub_service.DeleteVersionRequest.pb( - apihub_service.DeleteVersionRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = apihub_service.DeleteVersionRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_version( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - - -def test_delete_version_rest_bad_request( - transport: str = "rest", request_type=apihub_service.DeleteVersionRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_version(request) - - -def test_delete_version_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.delete_version(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/apis/*/versions/*}" - % client.transport._host, - args[1], - ) - - -def test_delete_version_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_version( - apihub_service.DeleteVersionRequest(), - name="name_value", - ) - - -def test_delete_version_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.CreateSpecRequest, - dict, - ], -) -def test_create_spec_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" - } - request_init["spec"] = { - "name": "name_value", - "display_name": "display_name_value", - "spec_type": { - "enum_values": { - "values": [ - { - "id": "id_value", - "display_name": "display_name_value", - "description": "description_value", - "immutable": True, - } - ] - }, - "string_values": {"values": ["values_value1", "values_value2"]}, - "json_values": {}, - "attribute": "attribute_value", - }, - "contents": {"contents": b"contents_blob", "mime_type": "mime_type_value"}, - "details": { - "open_api_spec_details": { - "format_": 1, - "version": "version_value", - "owner": {"display_name": "display_name_value", "email": "email_value"}, - }, - "description": "description_value", - }, - "source_uri": "source_uri_value", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "lint_response": { - "issues": [ - { - "code": "code_value", - "path": ["path_value1", "path_value2"], - "message": "message_value", - "severity": 1, - "range_": {"start": {"line": 424, "character": 941}, "end": {}}, - } - ], - "summary": [{"severity": 1, "count": 553}], - "state": 1, - "source": "source_value", - "linter": 1, - "create_time": {}, - }, - "attributes": {}, - "documentation": {"external_uri": "external_uri_value"}, - "parsing_mode": 1, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = apihub_service.CreateSpecRequest.meta.fields["spec"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["spec"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["spec"][field])): - del request_init["spec"][field][i][subfield] - else: - del request_init["spec"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Spec( - name="name_value", - display_name="display_name_value", - source_uri="source_uri_value", - parsing_mode=common_fields.Spec.ParsingMode.RELAXED, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Spec.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.create_spec(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Spec) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.source_uri == "source_uri_value" - assert response.parsing_mode == common_fields.Spec.ParsingMode.RELAXED - - -def test_create_spec_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_spec in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.create_spec] = mock_rpc - - request = {} - client.create_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_spec(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_spec_rest_required_fields( - request_type=apihub_service.CreateSpecRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_spec._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_spec._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("spec_id",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.Spec() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.Spec.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.create_spec(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_create_spec_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.create_spec._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("specId",)) - & set( - ( - "parent", - "spec", - ) - ) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_spec_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_create_spec" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_create_spec" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.CreateSpecRequest.pb( - apihub_service.CreateSpecRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.Spec.to_json(common_fields.Spec()) - - request = apihub_service.CreateSpecRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.Spec() - - client.create_spec( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_spec_rest_bad_request( - transport: str = "rest", request_type=apihub_service.CreateSpecRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_spec(request) - - -def test_create_spec_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Spec() - - # get arguments that satisfy an http rule for this method - sample_request = { - "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" - } - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - spec=common_fields.Spec(name="name_value"), - spec_id="spec_id_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Spec.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.create_spec(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/apis/*/versions/*}/specs" - % client.transport._host, - args[1], - ) - - -def test_create_spec_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_spec( - apihub_service.CreateSpecRequest(), - parent="parent_value", - spec=common_fields.Spec(name="name_value"), - spec_id="spec_id_value", - ) - - -def test_create_spec_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.GetSpecRequest, - dict, - ], -) -def test_get_spec_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Spec( - name="name_value", - display_name="display_name_value", - source_uri="source_uri_value", - parsing_mode=common_fields.Spec.ParsingMode.RELAXED, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Spec.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_spec(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Spec) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.source_uri == "source_uri_value" - assert response.parsing_mode == common_fields.Spec.ParsingMode.RELAXED - - -def test_get_spec_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_spec in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.get_spec] = mock_rpc - - request = {} - client.get_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_spec(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_spec_rest_required_fields(request_type=apihub_service.GetSpecRequest): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_spec._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_spec._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.Spec() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.Spec.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_spec(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_get_spec_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.get_spec._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_spec_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_get_spec" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_get_spec" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.GetSpecRequest.pb(apihub_service.GetSpecRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.Spec.to_json(common_fields.Spec()) - - request = apihub_service.GetSpecRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.Spec() - - client.get_spec( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_spec_rest_bad_request( - transport: str = "rest", request_type=apihub_service.GetSpecRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_spec(request) - - -def test_get_spec_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Spec() - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Spec.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.get_spec(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/apis/*/versions/*/specs/*}" - % client.transport._host, - args[1], - ) - - -def test_get_spec_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_spec( - apihub_service.GetSpecRequest(), - name="name_value", - ) - - -def test_get_spec_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.GetSpecContentsRequest, - dict, - ], -) -def test_get_spec_contents_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.SpecContents( - contents=b"contents_blob", - mime_type="mime_type_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.SpecContents.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_spec_contents(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.SpecContents) - assert response.contents == b"contents_blob" - assert response.mime_type == "mime_type_value" - - -def test_get_spec_contents_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_spec_contents in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.get_spec_contents - ] = mock_rpc - - request = {} - client.get_spec_contents(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_spec_contents(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_spec_contents_rest_required_fields( - request_type=apihub_service.GetSpecContentsRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_spec_contents._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_spec_contents._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.SpecContents() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.SpecContents.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_spec_contents(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_get_spec_contents_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.get_spec_contents._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_spec_contents_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_get_spec_contents" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_get_spec_contents" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.GetSpecContentsRequest.pb( - apihub_service.GetSpecContentsRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.SpecContents.to_json( - common_fields.SpecContents() - ) - - request = apihub_service.GetSpecContentsRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.SpecContents() - - client.get_spec_contents( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_spec_contents_rest_bad_request( - transport: str = "rest", request_type=apihub_service.GetSpecContentsRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_spec_contents(request) - - -def test_get_spec_contents_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.SpecContents() - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.SpecContents.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.get_spec_contents(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/apis/*/versions/*/specs/*}:contents" - % client.transport._host, - args[1], - ) - - -def test_get_spec_contents_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_spec_contents( - apihub_service.GetSpecContentsRequest(), - name="name_value", - ) - - -def test_get_spec_contents_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.ListSpecsRequest, - dict, - ], -) -def test_list_specs_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListSpecsResponse( - next_page_token="next_page_token_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = apihub_service.ListSpecsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_specs(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSpecsPager) - assert response.next_page_token == "next_page_token_value" - - -def test_list_specs_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_specs in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.list_specs] = mock_rpc - - request = {} - client.list_specs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_specs(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_specs_rest_required_fields(request_type=apihub_service.ListSpecsRequest): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_specs._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_specs._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "page_size", - "page_token", - ) - ) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListSpecsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = apihub_service.ListSpecsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.list_specs(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_list_specs_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.list_specs._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_specs_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_list_specs" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_list_specs" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.ListSpecsRequest.pb( - apihub_service.ListSpecsRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = apihub_service.ListSpecsResponse.to_json( - apihub_service.ListSpecsResponse() - ) - - request = apihub_service.ListSpecsRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = apihub_service.ListSpecsResponse() - - client.list_specs( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_specs_rest_bad_request( - transport: str = "rest", request_type=apihub_service.ListSpecsRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_specs(request) - - -def test_list_specs_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListSpecsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = { - "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" - } - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = apihub_service.ListSpecsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.list_specs(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/apis/*/versions/*}/specs" - % client.transport._host, - args[1], - ) - - -def test_list_specs_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_specs( - apihub_service.ListSpecsRequest(), - parent="parent_value", - ) - - -def test_list_specs_rest_pager(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - apihub_service.ListSpecsResponse( - specs=[ - common_fields.Spec(), - common_fields.Spec(), - common_fields.Spec(), - ], - next_page_token="abc", - ), - apihub_service.ListSpecsResponse( - specs=[], - next_page_token="def", - ), - apihub_service.ListSpecsResponse( - specs=[ - common_fields.Spec(), - ], - next_page_token="ghi", - ), - apihub_service.ListSpecsResponse( - specs=[ - common_fields.Spec(), - common_fields.Spec(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(apihub_service.ListSpecsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = { - "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" - } - - pager = client.list_specs(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, common_fields.Spec) for i in results) - - pages = list(client.list_specs(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.UpdateSpecRequest, - dict, - ], -) -def test_update_spec_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "spec": { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" - } - } - request_init["spec"] = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5", - "display_name": "display_name_value", - "spec_type": { - "enum_values": { - "values": [ - { - "id": "id_value", - "display_name": "display_name_value", - "description": "description_value", - "immutable": True, - } - ] - }, - "string_values": {"values": ["values_value1", "values_value2"]}, - "json_values": {}, - "attribute": "attribute_value", - }, - "contents": {"contents": b"contents_blob", "mime_type": "mime_type_value"}, - "details": { - "open_api_spec_details": { - "format_": 1, - "version": "version_value", - "owner": {"display_name": "display_name_value", "email": "email_value"}, - }, - "description": "description_value", - }, - "source_uri": "source_uri_value", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "lint_response": { - "issues": [ - { - "code": "code_value", - "path": ["path_value1", "path_value2"], - "message": "message_value", - "severity": 1, - "range_": {"start": {"line": 424, "character": 941}, "end": {}}, - } - ], - "summary": [{"severity": 1, "count": 553}], - "state": 1, - "source": "source_value", - "linter": 1, - "create_time": {}, - }, - "attributes": {}, - "documentation": {"external_uri": "external_uri_value"}, - "parsing_mode": 1, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = apihub_service.UpdateSpecRequest.meta.fields["spec"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["spec"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["spec"][field])): - del request_init["spec"][field][i][subfield] - else: - del request_init["spec"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Spec( - name="name_value", - display_name="display_name_value", - source_uri="source_uri_value", - parsing_mode=common_fields.Spec.ParsingMode.RELAXED, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Spec.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.update_spec(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Spec) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.source_uri == "source_uri_value" - assert response.parsing_mode == common_fields.Spec.ParsingMode.RELAXED - - -def test_update_spec_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_spec in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.update_spec] = mock_rpc - - request = {} - client.update_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_spec(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_spec_rest_required_fields( - request_type=apihub_service.UpdateSpecRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_spec._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_spec._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.Spec() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "patch", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.Spec.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.update_spec(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_update_spec_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.update_spec._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("updateMask",)) - & set( - ( - "spec", - "updateMask", - ) - ) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_spec_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_update_spec" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_update_spec" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.UpdateSpecRequest.pb( - apihub_service.UpdateSpecRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.Spec.to_json(common_fields.Spec()) - - request = apihub_service.UpdateSpecRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.Spec() - - client.update_spec( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_spec_rest_bad_request( - transport: str = "rest", request_type=apihub_service.UpdateSpecRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "spec": { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" - } - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_spec(request) - - -def test_update_spec_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Spec() - - # get arguments that satisfy an http rule for this method - sample_request = { - "spec": { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" - } - } - - # get truthy value for each flattened field - mock_args = dict( - spec=common_fields.Spec(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Spec.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.update_spec(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{spec.name=projects/*/locations/*/apis/*/versions/*/specs/*}" - % client.transport._host, - args[1], - ) - - -def test_update_spec_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_spec( - apihub_service.UpdateSpecRequest(), - spec=common_fields.Spec(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - - -def test_update_spec_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.DeleteSpecRequest, - dict, - ], -) -def test_delete_spec_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.delete_spec(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_spec_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_spec in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.delete_spec] = mock_rpc - - request = {} - client.delete_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_spec(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_spec_rest_required_fields( - request_type=apihub_service.DeleteSpecRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_spec._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_spec._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "delete", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.delete_spec(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_delete_spec_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.delete_spec._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_spec_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_delete_spec" - ) as pre: - pre.assert_not_called() - pb_message = apihub_service.DeleteSpecRequest.pb( - apihub_service.DeleteSpecRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = apihub_service.DeleteSpecRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_spec( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - - -def test_delete_spec_rest_bad_request( - transport: str = "rest", request_type=apihub_service.DeleteSpecRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_spec(request) - - -def test_delete_spec_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.delete_spec(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/apis/*/versions/*/specs/*}" - % client.transport._host, - args[1], - ) - - -def test_delete_spec_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_spec( - apihub_service.DeleteSpecRequest(), - name="name_value", - ) - - -def test_delete_spec_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.GetApiOperationRequest, - dict, - ], -) -def test_get_api_operation_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/operations/sample5" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.ApiOperation( - name="name_value", - spec="spec_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.ApiOperation.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_api_operation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.ApiOperation) - assert response.name == "name_value" - assert response.spec == "spec_value" - - -def test_get_api_operation_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_api_operation in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.get_api_operation - ] = mock_rpc - - request = {} - client.get_api_operation(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_api_operation(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_api_operation_rest_required_fields( - request_type=apihub_service.GetApiOperationRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_api_operation._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_api_operation._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.ApiOperation() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.ApiOperation.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_api_operation(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_get_api_operation_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.get_api_operation._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_api_operation_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_get_api_operation" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_get_api_operation" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.GetApiOperationRequest.pb( - apihub_service.GetApiOperationRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.ApiOperation.to_json( - common_fields.ApiOperation() - ) - - request = apihub_service.GetApiOperationRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.ApiOperation() - - client.get_api_operation( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_api_operation_rest_bad_request( - transport: str = "rest", request_type=apihub_service.GetApiOperationRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/operations/sample5" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_api_operation(request) - - -def test_get_api_operation_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.ApiOperation() - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/operations/sample5" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.ApiOperation.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.get_api_operation(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/apis/*/versions/*/operations/*}" - % client.transport._host, - args[1], - ) - - -def test_get_api_operation_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_api_operation( - apihub_service.GetApiOperationRequest(), - name="name_value", - ) - - -def test_get_api_operation_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.ListApiOperationsRequest, - dict, - ], -) -def test_list_api_operations_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListApiOperationsResponse( - next_page_token="next_page_token_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = apihub_service.ListApiOperationsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_api_operations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListApiOperationsPager) - assert response.next_page_token == "next_page_token_value" - - -def test_list_api_operations_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.list_api_operations in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.list_api_operations - ] = mock_rpc - - request = {} - client.list_api_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_api_operations(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_api_operations_rest_required_fields( - request_type=apihub_service.ListApiOperationsRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_api_operations._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_api_operations._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "page_size", - "page_token", - ) - ) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListApiOperationsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = apihub_service.ListApiOperationsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.list_api_operations(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_list_api_operations_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.list_api_operations._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_api_operations_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_list_api_operations" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_list_api_operations" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.ListApiOperationsRequest.pb( - apihub_service.ListApiOperationsRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = apihub_service.ListApiOperationsResponse.to_json( - apihub_service.ListApiOperationsResponse() - ) - - request = apihub_service.ListApiOperationsRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = apihub_service.ListApiOperationsResponse() - - client.list_api_operations( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_api_operations_rest_bad_request( - transport: str = "rest", request_type=apihub_service.ListApiOperationsRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_api_operations(request) - - -def test_list_api_operations_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListApiOperationsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = { - "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" - } - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = apihub_service.ListApiOperationsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.list_api_operations(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/apis/*/versions/*}/operations" - % client.transport._host, - args[1], - ) - - -def test_list_api_operations_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_api_operations( - apihub_service.ListApiOperationsRequest(), - parent="parent_value", - ) - - -def test_list_api_operations_rest_pager(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - apihub_service.ListApiOperationsResponse( - api_operations=[ - common_fields.ApiOperation(), - common_fields.ApiOperation(), - common_fields.ApiOperation(), - ], - next_page_token="abc", - ), - apihub_service.ListApiOperationsResponse( - api_operations=[], - next_page_token="def", - ), - apihub_service.ListApiOperationsResponse( - api_operations=[ - common_fields.ApiOperation(), - ], - next_page_token="ghi", - ), - apihub_service.ListApiOperationsResponse( - api_operations=[ - common_fields.ApiOperation(), - common_fields.ApiOperation(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - apihub_service.ListApiOperationsResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = { - "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" - } - - pager = client.list_api_operations(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, common_fields.ApiOperation) for i in results) - - pages = list(client.list_api_operations(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.GetDefinitionRequest, - dict, - ], -) -def test_get_definition_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/definitions/sample5" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Definition( - name="name_value", - spec="spec_value", - type_=common_fields.Definition.Type.SCHEMA, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Definition.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_definition(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Definition) - assert response.name == "name_value" - assert response.spec == "spec_value" - assert response.type_ == common_fields.Definition.Type.SCHEMA - - -def test_get_definition_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_definition in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.get_definition] = mock_rpc - - request = {} - client.get_definition(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_definition(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_definition_rest_required_fields( - request_type=apihub_service.GetDefinitionRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_definition._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_definition._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.Definition() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.Definition.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_definition(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_get_definition_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.get_definition._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_definition_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_get_definition" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_get_definition" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.GetDefinitionRequest.pb( - apihub_service.GetDefinitionRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.Definition.to_json( - common_fields.Definition() - ) - - request = apihub_service.GetDefinitionRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.Definition() - - client.get_definition( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_definition_rest_bad_request( - transport: str = "rest", request_type=apihub_service.GetDefinitionRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/definitions/sample5" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_definition(request) - - -def test_get_definition_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Definition() - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/definitions/sample5" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Definition.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.get_definition(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/apis/*/versions/*/definitions/*}" - % client.transport._host, - args[1], - ) - - -def test_get_definition_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_definition( - apihub_service.GetDefinitionRequest(), - name="name_value", - ) - - -def test_get_definition_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.CreateDeploymentRequest, - dict, - ], -) -def test_create_deployment_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["deployment"] = { - "name": "name_value", - "display_name": "display_name_value", - "description": "description_value", - "documentation": {"external_uri": "external_uri_value"}, - "deployment_type": { - "enum_values": { - "values": [ - { - "id": "id_value", - "display_name": "display_name_value", - "description": "description_value", - "immutable": True, - } - ] - }, - "string_values": {"values": ["values_value1", "values_value2"]}, - "json_values": {}, - "attribute": "attribute_value", - }, - "resource_uri": "resource_uri_value", - "endpoints": ["endpoints_value1", "endpoints_value2"], - "api_versions": ["api_versions_value1", "api_versions_value2"], - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "slo": {}, - "environment": {}, - "attributes": {}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = apihub_service.CreateDeploymentRequest.meta.fields["deployment"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["deployment"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["deployment"][field])): - del request_init["deployment"][field][i][subfield] - else: - del request_init["deployment"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Deployment( - name="name_value", - display_name="display_name_value", - description="description_value", - resource_uri="resource_uri_value", - endpoints=["endpoints_value"], - api_versions=["api_versions_value"], - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Deployment.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.create_deployment(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Deployment) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.resource_uri == "resource_uri_value" - assert response.endpoints == ["endpoints_value"] - assert response.api_versions == ["api_versions_value"] - - -def test_create_deployment_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_deployment in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.create_deployment - ] = mock_rpc - - request = {} - client.create_deployment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_deployment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_deployment_rest_required_fields( - request_type=apihub_service.CreateDeploymentRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_deployment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_deployment._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("deployment_id",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.Deployment() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.Deployment.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.create_deployment(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_create_deployment_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.create_deployment._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("deploymentId",)) - & set( - ( - "parent", - "deployment", - ) - ) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_deployment_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_create_deployment" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_create_deployment" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.CreateDeploymentRequest.pb( - apihub_service.CreateDeploymentRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.Deployment.to_json( - common_fields.Deployment() - ) - - request = apihub_service.CreateDeploymentRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.Deployment() - - client.create_deployment( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_deployment_rest_bad_request( - transport: str = "rest", request_type=apihub_service.CreateDeploymentRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_deployment(request) - - -def test_create_deployment_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Deployment() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - deployment=common_fields.Deployment(name="name_value"), - deployment_id="deployment_id_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Deployment.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.create_deployment(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/deployments" - % client.transport._host, - args[1], - ) - - -def test_create_deployment_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_deployment( - apihub_service.CreateDeploymentRequest(), - parent="parent_value", - deployment=common_fields.Deployment(name="name_value"), - deployment_id="deployment_id_value", - ) - - -def test_create_deployment_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.GetDeploymentRequest, - dict, - ], -) -def test_get_deployment_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Deployment( - name="name_value", - display_name="display_name_value", - description="description_value", - resource_uri="resource_uri_value", - endpoints=["endpoints_value"], - api_versions=["api_versions_value"], - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Deployment.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_deployment(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Deployment) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.resource_uri == "resource_uri_value" - assert response.endpoints == ["endpoints_value"] - assert response.api_versions == ["api_versions_value"] - - -def test_get_deployment_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_deployment in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.get_deployment] = mock_rpc - - request = {} - client.get_deployment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_deployment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_deployment_rest_required_fields( - request_type=apihub_service.GetDeploymentRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_deployment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_deployment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.Deployment() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.Deployment.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_deployment(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_get_deployment_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.get_deployment._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_deployment_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_get_deployment" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_get_deployment" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.GetDeploymentRequest.pb( - apihub_service.GetDeploymentRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.Deployment.to_json( - common_fields.Deployment() - ) - - request = apihub_service.GetDeploymentRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.Deployment() - - client.get_deployment( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_deployment_rest_bad_request( - transport: str = "rest", request_type=apihub_service.GetDeploymentRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_deployment(request) - - -def test_get_deployment_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Deployment() - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/deployments/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Deployment.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.get_deployment(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/deployments/*}" - % client.transport._host, - args[1], - ) - - -def test_get_deployment_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_deployment( - apihub_service.GetDeploymentRequest(), - name="name_value", - ) - - -def test_get_deployment_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.ListDeploymentsRequest, - dict, - ], -) -def test_list_deployments_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListDeploymentsResponse( - next_page_token="next_page_token_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = apihub_service.ListDeploymentsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_deployments(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDeploymentsPager) - assert response.next_page_token == "next_page_token_value" - - -def test_list_deployments_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_deployments in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.list_deployments - ] = mock_rpc - - request = {} - client.list_deployments(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_deployments(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_deployments_rest_required_fields( - request_type=apihub_service.ListDeploymentsRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_deployments._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_deployments._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "page_size", - "page_token", - ) - ) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListDeploymentsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = apihub_service.ListDeploymentsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.list_deployments(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_list_deployments_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.list_deployments._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_deployments_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_list_deployments" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_list_deployments" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.ListDeploymentsRequest.pb( - apihub_service.ListDeploymentsRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = apihub_service.ListDeploymentsResponse.to_json( - apihub_service.ListDeploymentsResponse() - ) - - request = apihub_service.ListDeploymentsRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = apihub_service.ListDeploymentsResponse() - - client.list_deployments( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_deployments_rest_bad_request( - transport: str = "rest", request_type=apihub_service.ListDeploymentsRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_deployments(request) - - -def test_list_deployments_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListDeploymentsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = apihub_service.ListDeploymentsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.list_deployments(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/deployments" - % client.transport._host, - args[1], - ) - - -def test_list_deployments_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_deployments( - apihub_service.ListDeploymentsRequest(), - parent="parent_value", - ) - - -def test_list_deployments_rest_pager(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - apihub_service.ListDeploymentsResponse( - deployments=[ - common_fields.Deployment(), - common_fields.Deployment(), - common_fields.Deployment(), - ], - next_page_token="abc", - ), - apihub_service.ListDeploymentsResponse( - deployments=[], - next_page_token="def", - ), - apihub_service.ListDeploymentsResponse( - deployments=[ - common_fields.Deployment(), - ], - next_page_token="ghi", - ), - apihub_service.ListDeploymentsResponse( - deployments=[ - common_fields.Deployment(), - common_fields.Deployment(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - apihub_service.ListDeploymentsResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_deployments(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, common_fields.Deployment) for i in results) - - pages = list(client.list_deployments(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.UpdateDeploymentRequest, - dict, - ], -) -def test_update_deployment_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "deployment": {"name": "projects/sample1/locations/sample2/deployments/sample3"} - } - request_init["deployment"] = { - "name": "projects/sample1/locations/sample2/deployments/sample3", - "display_name": "display_name_value", - "description": "description_value", - "documentation": {"external_uri": "external_uri_value"}, - "deployment_type": { - "enum_values": { - "values": [ - { - "id": "id_value", - "display_name": "display_name_value", - "description": "description_value", - "immutable": True, - } - ] - }, - "string_values": {"values": ["values_value1", "values_value2"]}, - "json_values": {}, - "attribute": "attribute_value", - }, - "resource_uri": "resource_uri_value", - "endpoints": ["endpoints_value1", "endpoints_value2"], - "api_versions": ["api_versions_value1", "api_versions_value2"], - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "slo": {}, - "environment": {}, - "attributes": {}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = apihub_service.UpdateDeploymentRequest.meta.fields["deployment"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["deployment"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["deployment"][field])): - del request_init["deployment"][field][i][subfield] - else: - del request_init["deployment"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Deployment( - name="name_value", - display_name="display_name_value", - description="description_value", - resource_uri="resource_uri_value", - endpoints=["endpoints_value"], - api_versions=["api_versions_value"], - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Deployment.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.update_deployment(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Deployment) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.resource_uri == "resource_uri_value" - assert response.endpoints == ["endpoints_value"] - assert response.api_versions == ["api_versions_value"] - - -def test_update_deployment_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_deployment in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.update_deployment - ] = mock_rpc - - request = {} - client.update_deployment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_deployment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_deployment_rest_required_fields( - request_type=apihub_service.UpdateDeploymentRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_deployment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_deployment._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.Deployment() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "patch", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.Deployment.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.update_deployment(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_update_deployment_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.update_deployment._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("updateMask",)) - & set( - ( - "deployment", - "updateMask", - ) - ) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_deployment_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_update_deployment" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_update_deployment" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.UpdateDeploymentRequest.pb( - apihub_service.UpdateDeploymentRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.Deployment.to_json( - common_fields.Deployment() - ) - - request = apihub_service.UpdateDeploymentRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.Deployment() - - client.update_deployment( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_deployment_rest_bad_request( - transport: str = "rest", request_type=apihub_service.UpdateDeploymentRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "deployment": {"name": "projects/sample1/locations/sample2/deployments/sample3"} - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_deployment(request) - - -def test_update_deployment_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Deployment() - - # get arguments that satisfy an http rule for this method - sample_request = { - "deployment": { - "name": "projects/sample1/locations/sample2/deployments/sample3" - } - } - - # get truthy value for each flattened field - mock_args = dict( - deployment=common_fields.Deployment(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Deployment.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.update_deployment(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{deployment.name=projects/*/locations/*/deployments/*}" - % client.transport._host, - args[1], - ) - - -def test_update_deployment_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_deployment( - apihub_service.UpdateDeploymentRequest(), - deployment=common_fields.Deployment(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - - -def test_update_deployment_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.DeleteDeploymentRequest, - dict, - ], -) -def test_delete_deployment_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.delete_deployment(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_deployment_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_deployment in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.delete_deployment - ] = mock_rpc - - request = {} - client.delete_deployment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_deployment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_deployment_rest_required_fields( - request_type=apihub_service.DeleteDeploymentRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_deployment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_deployment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "delete", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.delete_deployment(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_delete_deployment_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.delete_deployment._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_deployment_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_delete_deployment" - ) as pre: - pre.assert_not_called() - pb_message = apihub_service.DeleteDeploymentRequest.pb( - apihub_service.DeleteDeploymentRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = apihub_service.DeleteDeploymentRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_deployment( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - - -def test_delete_deployment_rest_bad_request( - transport: str = "rest", request_type=apihub_service.DeleteDeploymentRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_deployment(request) - - -def test_delete_deployment_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/deployments/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.delete_deployment(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/deployments/*}" - % client.transport._host, - args[1], - ) - - -def test_delete_deployment_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_deployment( - apihub_service.DeleteDeploymentRequest(), - name="name_value", - ) - - -def test_delete_deployment_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.CreateAttributeRequest, - dict, - ], -) -def test_create_attribute_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["attribute"] = { - "name": "name_value", - "display_name": "display_name_value", - "description": "description_value", - "definition_type": 1, - "scope": 1, - "data_type": 1, - "allowed_values": [ - { - "id": "id_value", - "display_name": "display_name_value", - "description": "description_value", - "immutable": True, - } - ], - "cardinality": 1172, - "mandatory": True, - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = apihub_service.CreateAttributeRequest.meta.fields["attribute"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["attribute"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["attribute"][field])): - del request_init["attribute"][field][i][subfield] - else: - del request_init["attribute"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Attribute( - name="name_value", - display_name="display_name_value", - description="description_value", - definition_type=common_fields.Attribute.DefinitionType.SYSTEM_DEFINED, - scope=common_fields.Attribute.Scope.API, - data_type=common_fields.Attribute.DataType.ENUM, - cardinality=1172, - mandatory=True, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Attribute.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.create_attribute(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Attribute) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert ( - response.definition_type - == common_fields.Attribute.DefinitionType.SYSTEM_DEFINED - ) - assert response.scope == common_fields.Attribute.Scope.API - assert response.data_type == common_fields.Attribute.DataType.ENUM - assert response.cardinality == 1172 - assert response.mandatory is True - - -def test_create_attribute_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_attribute in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.create_attribute - ] = mock_rpc - - request = {} - client.create_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_attribute(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_attribute_rest_required_fields( - request_type=apihub_service.CreateAttributeRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_attribute._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_attribute._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("attribute_id",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.Attribute() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.Attribute.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.create_attribute(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_create_attribute_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.create_attribute._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("attributeId",)) - & set( - ( - "parent", - "attribute", - ) - ) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_attribute_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_create_attribute" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_create_attribute" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.CreateAttributeRequest.pb( - apihub_service.CreateAttributeRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.Attribute.to_json( - common_fields.Attribute() - ) - - request = apihub_service.CreateAttributeRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.Attribute() - - client.create_attribute( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_attribute_rest_bad_request( - transport: str = "rest", request_type=apihub_service.CreateAttributeRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_attribute(request) - - -def test_create_attribute_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Attribute() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - attribute=common_fields.Attribute(name="name_value"), - attribute_id="attribute_id_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Attribute.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.create_attribute(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/attributes" % client.transport._host, - args[1], - ) - - -def test_create_attribute_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_attribute( - apihub_service.CreateAttributeRequest(), - parent="parent_value", - attribute=common_fields.Attribute(name="name_value"), - attribute_id="attribute_id_value", - ) - - -def test_create_attribute_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.GetAttributeRequest, - dict, - ], -) -def test_get_attribute_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/attributes/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Attribute( - name="name_value", - display_name="display_name_value", - description="description_value", - definition_type=common_fields.Attribute.DefinitionType.SYSTEM_DEFINED, - scope=common_fields.Attribute.Scope.API, - data_type=common_fields.Attribute.DataType.ENUM, - cardinality=1172, - mandatory=True, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Attribute.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_attribute(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Attribute) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert ( - response.definition_type - == common_fields.Attribute.DefinitionType.SYSTEM_DEFINED - ) - assert response.scope == common_fields.Attribute.Scope.API - assert response.data_type == common_fields.Attribute.DataType.ENUM - assert response.cardinality == 1172 - assert response.mandatory is True - - -def test_get_attribute_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_attribute in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.get_attribute] = mock_rpc - - request = {} - client.get_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_attribute(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_attribute_rest_required_fields( - request_type=apihub_service.GetAttributeRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_attribute._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_attribute._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.Attribute() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.Attribute.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_attribute(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_get_attribute_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.get_attribute._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_attribute_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_get_attribute" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_get_attribute" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.GetAttributeRequest.pb( - apihub_service.GetAttributeRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.Attribute.to_json( - common_fields.Attribute() - ) - - request = apihub_service.GetAttributeRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.Attribute() - - client.get_attribute( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_attribute_rest_bad_request( - transport: str = "rest", request_type=apihub_service.GetAttributeRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/attributes/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_attribute(request) - - -def test_get_attribute_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Attribute() - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/attributes/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Attribute.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.get_attribute(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/attributes/*}" % client.transport._host, - args[1], - ) - - -def test_get_attribute_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_attribute( - apihub_service.GetAttributeRequest(), - name="name_value", - ) - - -def test_get_attribute_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.UpdateAttributeRequest, - dict, - ], -) -def test_update_attribute_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "attribute": {"name": "projects/sample1/locations/sample2/attributes/sample3"} - } - request_init["attribute"] = { - "name": "projects/sample1/locations/sample2/attributes/sample3", - "display_name": "display_name_value", - "description": "description_value", - "definition_type": 1, - "scope": 1, - "data_type": 1, - "allowed_values": [ - { - "id": "id_value", - "display_name": "display_name_value", - "description": "description_value", - "immutable": True, - } - ], - "cardinality": 1172, - "mandatory": True, - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = apihub_service.UpdateAttributeRequest.meta.fields["attribute"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["attribute"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["attribute"][field])): - del request_init["attribute"][field][i][subfield] - else: - del request_init["attribute"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Attribute( - name="name_value", - display_name="display_name_value", - description="description_value", - definition_type=common_fields.Attribute.DefinitionType.SYSTEM_DEFINED, - scope=common_fields.Attribute.Scope.API, - data_type=common_fields.Attribute.DataType.ENUM, - cardinality=1172, - mandatory=True, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Attribute.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.update_attribute(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Attribute) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert ( - response.definition_type - == common_fields.Attribute.DefinitionType.SYSTEM_DEFINED - ) - assert response.scope == common_fields.Attribute.Scope.API - assert response.data_type == common_fields.Attribute.DataType.ENUM - assert response.cardinality == 1172 - assert response.mandatory is True - - -def test_update_attribute_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_attribute in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.update_attribute - ] = mock_rpc - - request = {} - client.update_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_attribute(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_attribute_rest_required_fields( - request_type=apihub_service.UpdateAttributeRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_attribute._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_attribute._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.Attribute() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "patch", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.Attribute.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.update_attribute(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_update_attribute_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.update_attribute._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("updateMask",)) - & set( - ( - "attribute", - "updateMask", - ) - ) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_attribute_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_update_attribute" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_update_attribute" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.UpdateAttributeRequest.pb( - apihub_service.UpdateAttributeRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.Attribute.to_json( - common_fields.Attribute() - ) - - request = apihub_service.UpdateAttributeRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.Attribute() - - client.update_attribute( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_attribute_rest_bad_request( - transport: str = "rest", request_type=apihub_service.UpdateAttributeRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "attribute": {"name": "projects/sample1/locations/sample2/attributes/sample3"} - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_attribute(request) - - -def test_update_attribute_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Attribute() - - # get arguments that satisfy an http rule for this method - sample_request = { - "attribute": { - "name": "projects/sample1/locations/sample2/attributes/sample3" - } - } - - # get truthy value for each flattened field - mock_args = dict( - attribute=common_fields.Attribute(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Attribute.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.update_attribute(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{attribute.name=projects/*/locations/*/attributes/*}" - % client.transport._host, - args[1], - ) - - -def test_update_attribute_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_attribute( - apihub_service.UpdateAttributeRequest(), - attribute=common_fields.Attribute(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - - -def test_update_attribute_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.DeleteAttributeRequest, - dict, - ], -) -def test_delete_attribute_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/attributes/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.delete_attribute(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_attribute_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_attribute in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.delete_attribute - ] = mock_rpc - - request = {} - client.delete_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_attribute(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_attribute_rest_required_fields( - request_type=apihub_service.DeleteAttributeRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_attribute._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_attribute._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "delete", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.delete_attribute(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_delete_attribute_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.delete_attribute._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_attribute_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_delete_attribute" - ) as pre: - pre.assert_not_called() - pb_message = apihub_service.DeleteAttributeRequest.pb( - apihub_service.DeleteAttributeRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = apihub_service.DeleteAttributeRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_attribute( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - - -def test_delete_attribute_rest_bad_request( - transport: str = "rest", request_type=apihub_service.DeleteAttributeRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/attributes/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_attribute(request) - - -def test_delete_attribute_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/attributes/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.delete_attribute(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/attributes/*}" % client.transport._host, - args[1], - ) - - -def test_delete_attribute_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_attribute( - apihub_service.DeleteAttributeRequest(), - name="name_value", - ) - - -def test_delete_attribute_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.ListAttributesRequest, - dict, - ], -) -def test_list_attributes_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListAttributesResponse( - next_page_token="next_page_token_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = apihub_service.ListAttributesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_attributes(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAttributesPager) - assert response.next_page_token == "next_page_token_value" - - -def test_list_attributes_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_attributes in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.list_attributes] = mock_rpc - - request = {} - client.list_attributes(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_attributes(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_attributes_rest_required_fields( - request_type=apihub_service.ListAttributesRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_attributes._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_attributes._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "page_size", - "page_token", - ) - ) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListAttributesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = apihub_service.ListAttributesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.list_attributes(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_list_attributes_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.list_attributes._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_attributes_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_list_attributes" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_list_attributes" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.ListAttributesRequest.pb( - apihub_service.ListAttributesRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = apihub_service.ListAttributesResponse.to_json( - apihub_service.ListAttributesResponse() - ) - - request = apihub_service.ListAttributesRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = apihub_service.ListAttributesResponse() - - client.list_attributes( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_attributes_rest_bad_request( - transport: str = "rest", request_type=apihub_service.ListAttributesRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_attributes(request) - - -def test_list_attributes_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListAttributesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = apihub_service.ListAttributesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.list_attributes(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/attributes" % client.transport._host, - args[1], - ) - - -def test_list_attributes_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_attributes( - apihub_service.ListAttributesRequest(), - parent="parent_value", - ) - - -def test_list_attributes_rest_pager(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - apihub_service.ListAttributesResponse( - attributes=[ - common_fields.Attribute(), - common_fields.Attribute(), - common_fields.Attribute(), - ], - next_page_token="abc", - ), - apihub_service.ListAttributesResponse( - attributes=[], - next_page_token="def", - ), - apihub_service.ListAttributesResponse( - attributes=[ - common_fields.Attribute(), - ], - next_page_token="ghi", - ), - apihub_service.ListAttributesResponse( - attributes=[ - common_fields.Attribute(), - common_fields.Attribute(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - apihub_service.ListAttributesResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_attributes(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, common_fields.Attribute) for i in results) - - pages = list(client.list_attributes(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.SearchResourcesRequest, - dict, - ], -) -def test_search_resources_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"location": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = apihub_service.SearchResourcesResponse( - next_page_token="next_page_token_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = apihub_service.SearchResourcesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.search_resources(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.SearchResourcesPager) - assert response.next_page_token == "next_page_token_value" - - -def test_search_resources_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.search_resources in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.search_resources - ] = mock_rpc - - request = {} - client.search_resources(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.search_resources(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_search_resources_rest_required_fields( - request_type=apihub_service.SearchResourcesRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["location"] = "" - request_init["query"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).search_resources._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["location"] = "location_value" - jsonified_request["query"] = "query_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).search_resources._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "location" in jsonified_request - assert jsonified_request["location"] == "location_value" - assert "query" in jsonified_request - assert jsonified_request["query"] == "query_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = apihub_service.SearchResourcesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = apihub_service.SearchResourcesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.search_resources(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_search_resources_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.search_resources._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(()) - & set( - ( - "location", - "query", - ) - ) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_search_resources_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_search_resources" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_search_resources" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.SearchResourcesRequest.pb( - apihub_service.SearchResourcesRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = apihub_service.SearchResourcesResponse.to_json( - apihub_service.SearchResourcesResponse() - ) - - request = apihub_service.SearchResourcesRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = apihub_service.SearchResourcesResponse() - - client.search_resources( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_search_resources_rest_bad_request( - transport: str = "rest", request_type=apihub_service.SearchResourcesRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"location": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.search_resources(request) - - -def test_search_resources_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = apihub_service.SearchResourcesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {"location": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - location="location_value", - query="query_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = apihub_service.SearchResourcesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.search_resources(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{location=projects/*/locations/*}:searchResources" - % client.transport._host, - args[1], - ) - - -def test_search_resources_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.search_resources( - apihub_service.SearchResourcesRequest(), - location="location_value", - query="query_value", - ) - - -def test_search_resources_rest_pager(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - apihub_service.SearchResourcesResponse( - search_results=[ - apihub_service.SearchResult(), - apihub_service.SearchResult(), - apihub_service.SearchResult(), - ], - next_page_token="abc", - ), - apihub_service.SearchResourcesResponse( - search_results=[], - next_page_token="def", - ), - apihub_service.SearchResourcesResponse( - search_results=[ - apihub_service.SearchResult(), - ], - next_page_token="ghi", - ), - apihub_service.SearchResourcesResponse( - search_results=[ - apihub_service.SearchResult(), - apihub_service.SearchResult(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - apihub_service.SearchResourcesResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"location": "projects/sample1/locations/sample2"} - - pager = client.search_resources(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, apihub_service.SearchResult) for i in results) - - pages = list(client.search_resources(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.CreateExternalApiRequest, - dict, - ], -) -def test_create_external_api_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["external_api"] = { - "name": "name_value", - "display_name": "display_name_value", - "description": "description_value", - "endpoints": ["endpoints_value1", "endpoints_value2"], - "paths": ["paths_value1", "paths_value2"], - "documentation": {"external_uri": "external_uri_value"}, - "attributes": {}, - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = apihub_service.CreateExternalApiRequest.meta.fields["external_api"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["external_api"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["external_api"][field])): - del request_init["external_api"][field][i][subfield] - else: - del request_init["external_api"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.ExternalApi( - name="name_value", - display_name="display_name_value", - description="description_value", - endpoints=["endpoints_value"], - paths=["paths_value"], - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.ExternalApi.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.create_external_api(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.ExternalApi) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.endpoints == ["endpoints_value"] - assert response.paths == ["paths_value"] - - -def test_create_external_api_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.create_external_api in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.create_external_api - ] = mock_rpc - - request = {} - client.create_external_api(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_external_api(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_external_api_rest_required_fields( - request_type=apihub_service.CreateExternalApiRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_external_api._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_external_api._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("external_api_id",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.ExternalApi() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.ExternalApi.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.create_external_api(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_create_external_api_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.create_external_api._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("externalApiId",)) - & set( - ( - "parent", - "externalApi", - ) - ) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_external_api_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_create_external_api" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_create_external_api" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.CreateExternalApiRequest.pb( - apihub_service.CreateExternalApiRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.ExternalApi.to_json( - common_fields.ExternalApi() - ) - - request = apihub_service.CreateExternalApiRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.ExternalApi() - - client.create_external_api( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_external_api_rest_bad_request( - transport: str = "rest", request_type=apihub_service.CreateExternalApiRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_external_api(request) - - -def test_create_external_api_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.ExternalApi() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - external_api=common_fields.ExternalApi(name="name_value"), - external_api_id="external_api_id_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.ExternalApi.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.create_external_api(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/externalApis" - % client.transport._host, - args[1], - ) - - -def test_create_external_api_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_external_api( - apihub_service.CreateExternalApiRequest(), - parent="parent_value", - external_api=common_fields.ExternalApi(name="name_value"), - external_api_id="external_api_id_value", - ) - - -def test_create_external_api_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.GetExternalApiRequest, - dict, - ], -) -def test_get_external_api_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/externalApis/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.ExternalApi( - name="name_value", - display_name="display_name_value", - description="description_value", - endpoints=["endpoints_value"], - paths=["paths_value"], - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.ExternalApi.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_external_api(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.ExternalApi) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.endpoints == ["endpoints_value"] - assert response.paths == ["paths_value"] - - -def test_get_external_api_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_external_api in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.get_external_api - ] = mock_rpc - - request = {} - client.get_external_api(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_external_api(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_external_api_rest_required_fields( - request_type=apihub_service.GetExternalApiRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_external_api._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_external_api._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.ExternalApi() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.ExternalApi.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_external_api(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_get_external_api_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.get_external_api._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_external_api_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_get_external_api" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_get_external_api" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.GetExternalApiRequest.pb( - apihub_service.GetExternalApiRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.ExternalApi.to_json( - common_fields.ExternalApi() - ) - - request = apihub_service.GetExternalApiRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.ExternalApi() - - client.get_external_api( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_external_api_rest_bad_request( - transport: str = "rest", request_type=apihub_service.GetExternalApiRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/externalApis/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_external_api(request) - - -def test_get_external_api_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.ExternalApi() - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/externalApis/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.ExternalApi.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.get_external_api(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/externalApis/*}" - % client.transport._host, - args[1], - ) - - -def test_get_external_api_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_external_api( - apihub_service.GetExternalApiRequest(), - name="name_value", - ) - - -def test_get_external_api_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.UpdateExternalApiRequest, - dict, - ], -) -def test_update_external_api_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "external_api": { - "name": "projects/sample1/locations/sample2/externalApis/sample3" - } - } - request_init["external_api"] = { - "name": "projects/sample1/locations/sample2/externalApis/sample3", - "display_name": "display_name_value", - "description": "description_value", - "endpoints": ["endpoints_value1", "endpoints_value2"], - "paths": ["paths_value1", "paths_value2"], - "documentation": {"external_uri": "external_uri_value"}, - "attributes": {}, - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = apihub_service.UpdateExternalApiRequest.meta.fields["external_api"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["external_api"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["external_api"][field])): - del request_init["external_api"][field][i][subfield] - else: - del request_init["external_api"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.ExternalApi( - name="name_value", - display_name="display_name_value", - description="description_value", - endpoints=["endpoints_value"], - paths=["paths_value"], - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.ExternalApi.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.update_external_api(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.ExternalApi) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.endpoints == ["endpoints_value"] - assert response.paths == ["paths_value"] - - -def test_update_external_api_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.update_external_api in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.update_external_api - ] = mock_rpc - - request = {} - client.update_external_api(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_external_api(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_external_api_rest_required_fields( - request_type=apihub_service.UpdateExternalApiRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_external_api._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_external_api._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.ExternalApi() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "patch", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.ExternalApi.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.update_external_api(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_update_external_api_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.update_external_api._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("updateMask",)) - & set( - ( - "externalApi", - "updateMask", - ) - ) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_external_api_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_update_external_api" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_update_external_api" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.UpdateExternalApiRequest.pb( - apihub_service.UpdateExternalApiRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.ExternalApi.to_json( - common_fields.ExternalApi() - ) - - request = apihub_service.UpdateExternalApiRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.ExternalApi() - - client.update_external_api( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_external_api_rest_bad_request( - transport: str = "rest", request_type=apihub_service.UpdateExternalApiRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "external_api": { - "name": "projects/sample1/locations/sample2/externalApis/sample3" - } - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_external_api(request) - - -def test_update_external_api_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.ExternalApi() - - # get arguments that satisfy an http rule for this method - sample_request = { - "external_api": { - "name": "projects/sample1/locations/sample2/externalApis/sample3" - } - } - - # get truthy value for each flattened field - mock_args = dict( - external_api=common_fields.ExternalApi(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.ExternalApi.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.update_external_api(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{external_api.name=projects/*/locations/*/externalApis/*}" - % client.transport._host, - args[1], - ) - - -def test_update_external_api_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_external_api( - apihub_service.UpdateExternalApiRequest(), - external_api=common_fields.ExternalApi(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - - -def test_update_external_api_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.DeleteExternalApiRequest, - dict, - ], -) -def test_delete_external_api_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/externalApis/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.delete_external_api(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_external_api_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.delete_external_api in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.delete_external_api - ] = mock_rpc - - request = {} - client.delete_external_api(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_external_api(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_external_api_rest_required_fields( - request_type=apihub_service.DeleteExternalApiRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_external_api._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_external_api._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "delete", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.delete_external_api(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_delete_external_api_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.delete_external_api._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_external_api_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_delete_external_api" - ) as pre: - pre.assert_not_called() - pb_message = apihub_service.DeleteExternalApiRequest.pb( - apihub_service.DeleteExternalApiRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = apihub_service.DeleteExternalApiRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_external_api( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - - -def test_delete_external_api_rest_bad_request( - transport: str = "rest", request_type=apihub_service.DeleteExternalApiRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/externalApis/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_external_api(request) - - -def test_delete_external_api_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/externalApis/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.delete_external_api(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/externalApis/*}" - % client.transport._host, - args[1], - ) - - -def test_delete_external_api_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_external_api( - apihub_service.DeleteExternalApiRequest(), - name="name_value", - ) - - -def test_delete_external_api_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.ListExternalApisRequest, - dict, - ], -) -def test_list_external_apis_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListExternalApisResponse( - next_page_token="next_page_token_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = apihub_service.ListExternalApisResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_external_apis(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListExternalApisPager) - assert response.next_page_token == "next_page_token_value" - - -def test_list_external_apis_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.list_external_apis in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.list_external_apis - ] = mock_rpc - - request = {} - client.list_external_apis(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_external_apis(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_external_apis_rest_required_fields( - request_type=apihub_service.ListExternalApisRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_external_apis._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_external_apis._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "page_size", - "page_token", - ) - ) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListExternalApisResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = apihub_service.ListExternalApisResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.list_external_apis(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_list_external_apis_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.list_external_apis._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_external_apis_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_list_external_apis" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_list_external_apis" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.ListExternalApisRequest.pb( - apihub_service.ListExternalApisRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = apihub_service.ListExternalApisResponse.to_json( - apihub_service.ListExternalApisResponse() - ) - - request = apihub_service.ListExternalApisRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = apihub_service.ListExternalApisResponse() - - client.list_external_apis( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_external_apis_rest_bad_request( - transport: str = "rest", request_type=apihub_service.ListExternalApisRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_external_apis(request) - - -def test_list_external_apis_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListExternalApisResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = apihub_service.ListExternalApisResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.list_external_apis(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/externalApis" - % client.transport._host, - args[1], - ) - - -def test_list_external_apis_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_external_apis( - apihub_service.ListExternalApisRequest(), - parent="parent_value", - ) - - -def test_list_external_apis_rest_pager(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - apihub_service.ListExternalApisResponse( - external_apis=[ - common_fields.ExternalApi(), - common_fields.ExternalApi(), - common_fields.ExternalApi(), - ], - next_page_token="abc", - ), - apihub_service.ListExternalApisResponse( - external_apis=[], - next_page_token="def", - ), - apihub_service.ListExternalApisResponse( - external_apis=[ - common_fields.ExternalApi(), - ], - next_page_token="ghi", - ), - apihub_service.ListExternalApisResponse( - external_apis=[ - common_fields.ExternalApi(), - common_fields.ExternalApi(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - apihub_service.ListExternalApisResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_external_apis(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, common_fields.ExternalApi) for i in results) - - pages = list(client.list_external_apis(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.ApiHubGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.ApiHubGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ApiHubClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.ApiHubGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = ApiHubClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = ApiHubClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.ApiHubGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ApiHubClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.ApiHubGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = ApiHubClient(transport=transport) - assert client.transport is transport - - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.ApiHubGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.ApiHubGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.ApiHubGrpcTransport, - transports.ApiHubGrpcAsyncIOTransport, - transports.ApiHubRestTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "rest", - ], -) -def test_transport_kind(transport_name): - transport = ApiHubClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.ApiHubGrpcTransport, - ) - - -def test_api_hub_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.ApiHubTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json", - ) - - -def test_api_hub_base_transport(): - # Instantiate the base transport. - with mock.patch( - "google.cloud.apihub_v1.services.api_hub.transports.ApiHubTransport.__init__" - ) as Transport: - Transport.return_value = None - transport = transports.ApiHubTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - "create_api", - "get_api", - "list_apis", - "update_api", - "delete_api", - "create_version", - "get_version", - "list_versions", - "update_version", - "delete_version", - "create_spec", - "get_spec", - "get_spec_contents", - "list_specs", - "update_spec", - "delete_spec", - "get_api_operation", - "list_api_operations", - "get_definition", - "create_deployment", - "get_deployment", - "list_deployments", - "update_deployment", - "delete_deployment", - "create_attribute", - "get_attribute", - "update_attribute", - "delete_attribute", - "list_attributes", - "search_resources", - "create_external_api", - "get_external_api", - "update_external_api", - "delete_external_api", - "list_external_apis", - "get_location", - "list_locations", - "get_operation", - "cancel_operation", - "delete_operation", - "list_operations", - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - "kind", - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_api_hub_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.apihub_v1.services.api_hub.transports.ApiHubTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.ApiHubTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id="octopus", - ) - - -def test_api_hub_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( - "google.cloud.apihub_v1.services.api_hub.transports.ApiHubTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.ApiHubTransport() - adc.assert_called_once() - - -def test_api_hub_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - ApiHubClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.ApiHubGrpcTransport, - transports.ApiHubGrpcAsyncIOTransport, - ], -) -def test_api_hub_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.ApiHubGrpcTransport, - transports.ApiHubGrpcAsyncIOTransport, - transports.ApiHubRestTransport, - ], -) -def test_api_hub_transport_auth_gdch_credentials(transport_class): - host = "https://language.com/" - api_audience_tests = [None, "https://language2.com/"] - api_audience_expect = [host, "https://language2.com/"] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, "default", autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock( - return_value=gdch_mock - ) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with(e) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.ApiHubGrpcTransport, grpc_helpers), - (transports.ApiHubGrpcAsyncIOTransport, grpc_helpers_async), - ], -) -def test_api_hub_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - - create_channel.assert_called_with( - "apihub.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - scopes=["1", "2"], - default_host="apihub.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize( - "transport_class", - [transports.ApiHubGrpcTransport, transports.ApiHubGrpcAsyncIOTransport], -) -def test_api_hub_grpc_transport_client_cert_source_for_mtls(transport_class): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds, - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback, - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, private_key=expected_key - ) - - -def test_api_hub_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch( - "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" - ) as mock_configure_mtls_channel: - transports.ApiHubRestTransport( - credentials=cred, client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "grpc_asyncio", - "rest", - ], -) -def test_api_hub_host_no_port(transport_name): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="apihub.googleapis.com" - ), - transport=transport_name, - ) - assert client.transport._host == ( - "apihub.googleapis.com:443" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://apihub.googleapis.com/" - ) - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "grpc_asyncio", - "rest", - ], -) -def test_api_hub_host_with_port(transport_name): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="apihub.googleapis.com:8000" - ), - transport=transport_name, - ) - assert client.transport._host == ( - "apihub.googleapis.com:8000" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://apihub.googleapis.com:8000/" - ) - - -@pytest.mark.parametrize( - "transport_name", - [ - "rest", - ], -) -def test_api_hub_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = ApiHubClient( - credentials=creds1, - transport=transport_name, - ) - client2 = ApiHubClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.create_api._session - session2 = client2.transport.create_api._session - assert session1 != session2 - session1 = client1.transport.get_api._session - session2 = client2.transport.get_api._session - assert session1 != session2 - session1 = client1.transport.list_apis._session - session2 = client2.transport.list_apis._session - assert session1 != session2 - session1 = client1.transport.update_api._session - session2 = client2.transport.update_api._session - assert session1 != session2 - session1 = client1.transport.delete_api._session - session2 = client2.transport.delete_api._session - assert session1 != session2 - session1 = client1.transport.create_version._session - session2 = client2.transport.create_version._session - assert session1 != session2 - session1 = client1.transport.get_version._session - session2 = client2.transport.get_version._session - assert session1 != session2 - session1 = client1.transport.list_versions._session - session2 = client2.transport.list_versions._session - assert session1 != session2 - session1 = client1.transport.update_version._session - session2 = client2.transport.update_version._session - assert session1 != session2 - session1 = client1.transport.delete_version._session - session2 = client2.transport.delete_version._session - assert session1 != session2 - session1 = client1.transport.create_spec._session - session2 = client2.transport.create_spec._session - assert session1 != session2 - session1 = client1.transport.get_spec._session - session2 = client2.transport.get_spec._session - assert session1 != session2 - session1 = client1.transport.get_spec_contents._session - session2 = client2.transport.get_spec_contents._session - assert session1 != session2 - session1 = client1.transport.list_specs._session - session2 = client2.transport.list_specs._session - assert session1 != session2 - session1 = client1.transport.update_spec._session - session2 = client2.transport.update_spec._session - assert session1 != session2 - session1 = client1.transport.delete_spec._session - session2 = client2.transport.delete_spec._session - assert session1 != session2 - session1 = client1.transport.get_api_operation._session - session2 = client2.transport.get_api_operation._session - assert session1 != session2 - session1 = client1.transport.list_api_operations._session - session2 = client2.transport.list_api_operations._session - assert session1 != session2 - session1 = client1.transport.get_definition._session - session2 = client2.transport.get_definition._session - assert session1 != session2 - session1 = client1.transport.create_deployment._session - session2 = client2.transport.create_deployment._session - assert session1 != session2 - session1 = client1.transport.get_deployment._session - session2 = client2.transport.get_deployment._session - assert session1 != session2 - session1 = client1.transport.list_deployments._session - session2 = client2.transport.list_deployments._session - assert session1 != session2 - session1 = client1.transport.update_deployment._session - session2 = client2.transport.update_deployment._session - assert session1 != session2 - session1 = client1.transport.delete_deployment._session - session2 = client2.transport.delete_deployment._session - assert session1 != session2 - session1 = client1.transport.create_attribute._session - session2 = client2.transport.create_attribute._session - assert session1 != session2 - session1 = client1.transport.get_attribute._session - session2 = client2.transport.get_attribute._session - assert session1 != session2 - session1 = client1.transport.update_attribute._session - session2 = client2.transport.update_attribute._session - assert session1 != session2 - session1 = client1.transport.delete_attribute._session - session2 = client2.transport.delete_attribute._session - assert session1 != session2 - session1 = client1.transport.list_attributes._session - session2 = client2.transport.list_attributes._session - assert session1 != session2 - session1 = client1.transport.search_resources._session - session2 = client2.transport.search_resources._session - assert session1 != session2 - session1 = client1.transport.create_external_api._session - session2 = client2.transport.create_external_api._session - assert session1 != session2 - session1 = client1.transport.get_external_api._session - session2 = client2.transport.get_external_api._session - assert session1 != session2 - session1 = client1.transport.update_external_api._session - session2 = client2.transport.update_external_api._session - assert session1 != session2 - session1 = client1.transport.delete_external_api._session - session2 = client2.transport.delete_external_api._session - assert session1 != session2 - session1 = client1.transport.list_external_apis._session - session2 = client2.transport.list_external_apis._session - assert session1 != session2 - - -def test_api_hub_grpc_transport_channel(): - channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.ApiHubGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_api_hub_grpc_asyncio_transport_channel(): - channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.ApiHubGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize( - "transport_class", - [transports.ApiHubGrpcTransport, transports.ApiHubGrpcAsyncIOTransport], -) -def test_api_hub_transport_channel_mtls_with_client_cert_source(transport_class): - with mock.patch( - "grpc.ssl_channel_credentials", autospec=True - ) as grpc_ssl_channel_cred: - with mock.patch.object( - transport_class, "create_channel" - ) as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize( - "transport_class", - [transports.ApiHubGrpcTransport, transports.ApiHubGrpcAsyncIOTransport], -) -def test_api_hub_transport_channel_mtls_with_adc(transport_class): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object( - transport_class, "create_channel" - ) as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_api_path(): - project = "squid" - location = "clam" - api = "whelk" - expected = "projects/{project}/locations/{location}/apis/{api}".format( - project=project, - location=location, - api=api, - ) - actual = ApiHubClient.api_path(project, location, api) - assert expected == actual - - -def test_parse_api_path(): - expected = { - "project": "octopus", - "location": "oyster", - "api": "nudibranch", - } - path = ApiHubClient.api_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubClient.parse_api_path(path) - assert expected == actual - - -def test_api_operation_path(): - project = "cuttlefish" - location = "mussel" - api = "winkle" - version = "nautilus" - operation = "scallop" - expected = "projects/{project}/locations/{location}/apis/{api}/versions/{version}/operations/{operation}".format( - project=project, - location=location, - api=api, - version=version, - operation=operation, - ) - actual = ApiHubClient.api_operation_path(project, location, api, version, operation) - assert expected == actual - - -def test_parse_api_operation_path(): - expected = { - "project": "abalone", - "location": "squid", - "api": "clam", - "version": "whelk", - "operation": "octopus", - } - path = ApiHubClient.api_operation_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubClient.parse_api_operation_path(path) - assert expected == actual - - -def test_attribute_path(): - project = "oyster" - location = "nudibranch" - attribute = "cuttlefish" - expected = "projects/{project}/locations/{location}/attributes/{attribute}".format( - project=project, - location=location, - attribute=attribute, - ) - actual = ApiHubClient.attribute_path(project, location, attribute) - assert expected == actual - - -def test_parse_attribute_path(): - expected = { - "project": "mussel", - "location": "winkle", - "attribute": "nautilus", - } - path = ApiHubClient.attribute_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubClient.parse_attribute_path(path) - assert expected == actual - - -def test_definition_path(): - project = "scallop" - location = "abalone" - api = "squid" - version = "clam" - definition = "whelk" - expected = "projects/{project}/locations/{location}/apis/{api}/versions/{version}/definitions/{definition}".format( - project=project, - location=location, - api=api, - version=version, - definition=definition, - ) - actual = ApiHubClient.definition_path(project, location, api, version, definition) - assert expected == actual - - -def test_parse_definition_path(): - expected = { - "project": "octopus", - "location": "oyster", - "api": "nudibranch", - "version": "cuttlefish", - "definition": "mussel", - } - path = ApiHubClient.definition_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubClient.parse_definition_path(path) - assert expected == actual - - -def test_deployment_path(): - project = "winkle" - location = "nautilus" - deployment = "scallop" - expected = ( - "projects/{project}/locations/{location}/deployments/{deployment}".format( - project=project, - location=location, - deployment=deployment, - ) - ) - actual = ApiHubClient.deployment_path(project, location, deployment) - assert expected == actual - - -def test_parse_deployment_path(): - expected = { - "project": "abalone", - "location": "squid", - "deployment": "clam", - } - path = ApiHubClient.deployment_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubClient.parse_deployment_path(path) - assert expected == actual - - -def test_external_api_path(): - project = "whelk" - location = "octopus" - external_api = "oyster" - expected = ( - "projects/{project}/locations/{location}/externalApis/{external_api}".format( - project=project, - location=location, - external_api=external_api, - ) - ) - actual = ApiHubClient.external_api_path(project, location, external_api) - assert expected == actual - - -def test_parse_external_api_path(): - expected = { - "project": "nudibranch", - "location": "cuttlefish", - "external_api": "mussel", - } - path = ApiHubClient.external_api_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubClient.parse_external_api_path(path) - assert expected == actual - - -def test_spec_path(): - project = "winkle" - location = "nautilus" - api = "scallop" - version = "abalone" - spec = "squid" - expected = "projects/{project}/locations/{location}/apis/{api}/versions/{version}/specs/{spec}".format( - project=project, - location=location, - api=api, - version=version, - spec=spec, - ) - actual = ApiHubClient.spec_path(project, location, api, version, spec) - assert expected == actual - - -def test_parse_spec_path(): - expected = { - "project": "clam", - "location": "whelk", - "api": "octopus", - "version": "oyster", - "spec": "nudibranch", - } - path = ApiHubClient.spec_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubClient.parse_spec_path(path) - assert expected == actual - - -def test_version_path(): - project = "cuttlefish" - location = "mussel" - api = "winkle" - version = "nautilus" - expected = ( - "projects/{project}/locations/{location}/apis/{api}/versions/{version}".format( - project=project, - location=location, - api=api, - version=version, - ) - ) - actual = ApiHubClient.version_path(project, location, api, version) - assert expected == actual - - -def test_parse_version_path(): - expected = { - "project": "scallop", - "location": "abalone", - "api": "squid", - "version": "clam", - } - path = ApiHubClient.version_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubClient.parse_version_path(path) - assert expected == actual - - -def test_common_billing_account_path(): - billing_account = "whelk" - expected = "billingAccounts/{billing_account}".format( - billing_account=billing_account, - ) - actual = ApiHubClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "octopus", - } - path = ApiHubClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubClient.parse_common_billing_account_path(path) - assert expected == actual - - -def test_common_folder_path(): - folder = "oyster" - expected = "folders/{folder}".format( - folder=folder, - ) - actual = ApiHubClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "nudibranch", - } - path = ApiHubClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubClient.parse_common_folder_path(path) - assert expected == actual - - -def test_common_organization_path(): - organization = "cuttlefish" - expected = "organizations/{organization}".format( - organization=organization, - ) - actual = ApiHubClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "mussel", - } - path = ApiHubClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubClient.parse_common_organization_path(path) - assert expected == actual - - -def test_common_project_path(): - project = "winkle" - expected = "projects/{project}".format( - project=project, - ) - actual = ApiHubClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "nautilus", - } - path = ApiHubClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubClient.parse_common_project_path(path) - assert expected == actual - - -def test_common_location_path(): - project = "scallop" - location = "abalone" - expected = "projects/{project}/locations/{location}".format( - project=project, - location=location, - ) - actual = ApiHubClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "squid", - "location": "clam", - } - path = ApiHubClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object( - transports.ApiHubTransport, "_prep_wrapped_messages" - ) as prep: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object( - transports.ApiHubTransport, "_prep_wrapped_messages" - ) as prep: - transport_class = ApiHubClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -@pytest.mark.asyncio -async def test_transport_close_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object( - type(getattr(client.transport, "grpc_channel")), "close" - ) as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_get_location_rest_bad_request( - transport: str = "rest", request_type=locations_pb2.GetLocationRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_location(request) + client = ApiHubClient(transport=transport) + assert client.transport is transport @pytest.mark.parametrize( - "request_type", + "transport_class", [ - locations_pb2.GetLocationRequest, - dict, + transports.ApiHubRestTransport, ], ) -def test_get_location_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.Location() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_location(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - - -def test_list_locations_rest_bad_request( - transport: str = "rest", request_type=locations_pb2.ListLocationsRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({"name": "projects/sample1"}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_locations(request) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() @pytest.mark.parametrize( - "request_type", + "transport_name", [ - locations_pb2.ListLocationsRequest, - dict, + "rest", ], ) -def test_list_locations_rest(request_type): - client = ApiHubClient( +def test_transport_kind(transport_name): + transport = ApiHubClient.get_transport_class(transport_name)( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - request_init = {"name": "projects/sample1"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.ListLocationsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + assert transport.kind == transport_name - response = client.list_locations(request) - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) +def test_api_hub_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.ApiHubTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) -def test_cancel_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.CancelOperationRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) +def test_api_hub_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.apihub_v1.services.api_hub.transports.ApiHubTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.ApiHubTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_api", + "get_api", + "list_apis", + "update_api", + "delete_api", + "create_version", + "get_version", + "list_versions", + "update_version", + "delete_version", + "create_spec", + "get_spec", + "get_spec_contents", + "list_specs", + "update_spec", + "delete_spec", + "get_api_operation", + "list_api_operations", + "get_definition", + "create_deployment", + "get_deployment", + "list_deployments", + "update_deployment", + "delete_deployment", + "create_attribute", + "get_attribute", + "update_attribute", + "delete_attribute", + "list_attributes", + "search_resources", + "create_external_api", + "get_external_api", + "update_external_api", + "delete_external_api", + "list_external_apis", + "get_location", + "list_locations", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.cancel_operation(request) - + with pytest.raises(NotImplementedError): + transport.close() -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.CancelOperationRequest, - dict, - ], -) -def test_cancel_operation_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "{}" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value +def test_api_hub_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.apihub_v1.services.api_hub.transports.ApiHubTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ApiHubTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) - response = client.cancel_operation(request) - # Establish that the response is the type that we expect. - assert response is None +def test_api_hub_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.apihub_v1.services.api_hub.transports.ApiHubTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ApiHubTransport() + adc.assert_called_once() -def test_delete_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) +def test_api_hub_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ApiHubClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request - ) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_operation(request) +def test_api_hub_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.ApiHubRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) @pytest.mark.parametrize( - "request_type", + "transport_name", [ - operations_pb2.DeleteOperationRequest, - dict, + "rest", ], ) -def test_delete_operation_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "{}" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.delete_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_get_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.GetOperationRequest -): +def test_api_hub_host_no_port(transport_name): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + client_options=client_options.ClientOptions( + api_endpoint="apihub.googleapis.com" + ), + transport=transport_name, ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + assert client.transport._host == ( + "apihub.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://apihub.googleapis.com/" ) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_operation(request) - @pytest.mark.parametrize( - "request_type", + "transport_name", [ - operations_pb2.GetOperationRequest, - dict, + "rest", ], ) -def test_get_operation_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_operation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - - -def test_list_operations_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.ListOperationsRequest -): +def test_api_hub_host_with_port(transport_name): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + client_options=client_options.ClientOptions( + api_endpoint="apihub.googleapis.com:8000" + ), + transport=transport_name, ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2"}, request + assert client.transport._host == ( + "apihub.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://apihub.googleapis.com:8000/" ) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_operations(request) - @pytest.mark.parametrize( - "request_type", + "transport_name", [ - operations_pb2.ListOperationsRequest, - dict, + "rest", ], ) -def test_list_operations_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", +def test_api_hub_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = ApiHubClient( + credentials=creds1, + transport=transport_name, ) - request_init = {"name": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.ListOperationsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.list_operations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) + client2 = ApiHubClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_api._session + session2 = client2.transport.create_api._session + assert session1 != session2 + session1 = client1.transport.get_api._session + session2 = client2.transport.get_api._session + assert session1 != session2 + session1 = client1.transport.list_apis._session + session2 = client2.transport.list_apis._session + assert session1 != session2 + session1 = client1.transport.update_api._session + session2 = client2.transport.update_api._session + assert session1 != session2 + session1 = client1.transport.delete_api._session + session2 = client2.transport.delete_api._session + assert session1 != session2 + session1 = client1.transport.create_version._session + session2 = client2.transport.create_version._session + assert session1 != session2 + session1 = client1.transport.get_version._session + session2 = client2.transport.get_version._session + assert session1 != session2 + session1 = client1.transport.list_versions._session + session2 = client2.transport.list_versions._session + assert session1 != session2 + session1 = client1.transport.update_version._session + session2 = client2.transport.update_version._session + assert session1 != session2 + session1 = client1.transport.delete_version._session + session2 = client2.transport.delete_version._session + assert session1 != session2 + session1 = client1.transport.create_spec._session + session2 = client2.transport.create_spec._session + assert session1 != session2 + session1 = client1.transport.get_spec._session + session2 = client2.transport.get_spec._session + assert session1 != session2 + session1 = client1.transport.get_spec_contents._session + session2 = client2.transport.get_spec_contents._session + assert session1 != session2 + session1 = client1.transport.list_specs._session + session2 = client2.transport.list_specs._session + assert session1 != session2 + session1 = client1.transport.update_spec._session + session2 = client2.transport.update_spec._session + assert session1 != session2 + session1 = client1.transport.delete_spec._session + session2 = client2.transport.delete_spec._session + assert session1 != session2 + session1 = client1.transport.get_api_operation._session + session2 = client2.transport.get_api_operation._session + assert session1 != session2 + session1 = client1.transport.list_api_operations._session + session2 = client2.transport.list_api_operations._session + assert session1 != session2 + session1 = client1.transport.get_definition._session + session2 = client2.transport.get_definition._session + assert session1 != session2 + session1 = client1.transport.create_deployment._session + session2 = client2.transport.create_deployment._session + assert session1 != session2 + session1 = client1.transport.get_deployment._session + session2 = client2.transport.get_deployment._session + assert session1 != session2 + session1 = client1.transport.list_deployments._session + session2 = client2.transport.list_deployments._session + assert session1 != session2 + session1 = client1.transport.update_deployment._session + session2 = client2.transport.update_deployment._session + assert session1 != session2 + session1 = client1.transport.delete_deployment._session + session2 = client2.transport.delete_deployment._session + assert session1 != session2 + session1 = client1.transport.create_attribute._session + session2 = client2.transport.create_attribute._session + assert session1 != session2 + session1 = client1.transport.get_attribute._session + session2 = client2.transport.get_attribute._session + assert session1 != session2 + session1 = client1.transport.update_attribute._session + session2 = client2.transport.update_attribute._session + assert session1 != session2 + session1 = client1.transport.delete_attribute._session + session2 = client2.transport.delete_attribute._session + assert session1 != session2 + session1 = client1.transport.list_attributes._session + session2 = client2.transport.list_attributes._session + assert session1 != session2 + session1 = client1.transport.search_resources._session + session2 = client2.transport.search_resources._session + assert session1 != session2 + session1 = client1.transport.create_external_api._session + session2 = client2.transport.create_external_api._session + assert session1 != session2 + session1 = client1.transport.get_external_api._session + session2 = client2.transport.get_external_api._session + assert session1 != session2 + session1 = client1.transport.update_external_api._session + session2 = client2.transport.update_external_api._session + assert session1 != session2 + session1 = client1.transport.delete_external_api._session + session2 = client2.transport.delete_external_api._session + assert session1 != session2 + session1 = client1.transport.list_external_apis._session + session2 = client2.transport.list_external_apis._session + assert session1 != session2 -def test_delete_operation(transport: str = "grpc"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +def test_api_path(): + project = "squid" + location = "clam" + api = "whelk" + expected = "projects/{project}/locations/{location}/apis/{api}".format( + project=project, + location=location, + api=api, ) + actual = ApiHubClient.api_path(project, location, api) + assert expected == actual - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_api_path(): + expected = { + "project": "octopus", + "location": "oyster", + "api": "nudibranch", + } + path = ApiHubClient.api_path(**expected) - # Establish that the response is the type that we expect. - assert response is None + # Check that the path construction is reversible. + actual = ApiHubClient.parse_api_path(path) + assert expected == actual -@pytest.mark.asyncio -async def test_delete_operation_async(transport: str = "grpc_asyncio"): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +def test_api_operation_path(): + project = "cuttlefish" + location = "mussel" + api = "winkle" + version = "nautilus" + operation = "scallop" + expected = "projects/{project}/locations/{location}/apis/{api}/versions/{version}/operations/{operation}".format( + project=project, + location=location, + api=api, + version=version, + operation=operation, ) + actual = ApiHubClient.api_operation_path(project, location, api, version, operation) + assert expected == actual - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_api_operation_path(): + expected = { + "project": "abalone", + "location": "squid", + "api": "clam", + "version": "whelk", + "operation": "octopus", + } + path = ApiHubClient.api_operation_path(**expected) - # Establish that the response is the type that we expect. - assert response is None + # Check that the path construction is reversible. + actual = ApiHubClient.parse_api_operation_path(path) + assert expected == actual -def test_delete_operation_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_attribute_path(): + project = "oyster" + location = "nudibranch" + attribute = "cuttlefish" + expected = "projects/{project}/locations/{location}/attributes/{attribute}".format( + project=project, + location=location, + attribute=attribute, ) + actual = ApiHubClient.attribute_path(project, location, attribute) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = None - client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_attribute_path(): + expected = { + "project": "mussel", + "location": "winkle", + "attribute": "nautilus", + } + path = ApiHubClient.attribute_path(**expected) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Check that the path construction is reversible. + actual = ApiHubClient.parse_attribute_path(path) + assert expected == actual -@pytest.mark.asyncio -async def test_delete_operation_field_headers_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_definition_path(): + project = "scallop" + location = "abalone" + api = "squid" + version = "clam" + definition = "whelk" + expected = "projects/{project}/locations/{location}/apis/{api}/versions/{version}/definitions/{definition}".format( + project=project, + location=location, + api=api, + version=version, + definition=definition, ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + actual = ApiHubClient.definition_path(project, location, api, version, definition) + assert expected == actual -def test_delete_operation_from_dict(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None +def test_parse_definition_path(): + expected = { + "project": "octopus", + "location": "oyster", + "api": "nudibranch", + "version": "cuttlefish", + "definition": "mussel", + } + path = ApiHubClient.definition_path(**expected) - response = client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() + # Check that the path construction is reversible. + actual = ApiHubClient.parse_definition_path(path) + assert expected == actual -@pytest.mark.asyncio -async def test_delete_operation_from_dict_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_operation( - request={ - "name": "locations", - } +def test_deployment_path(): + project = "winkle" + location = "nautilus" + deployment = "scallop" + expected = ( + "projects/{project}/locations/{location}/deployments/{deployment}".format( + project=project, + location=location, + deployment=deployment, ) - call.assert_called() - - -def test_cancel_operation(transport: str = "grpc"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) + actual = ApiHubClient.deployment_path(project, location, deployment) + assert expected == actual - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_deployment_path(): + expected = { + "project": "abalone", + "location": "squid", + "deployment": "clam", + } + path = ApiHubClient.deployment_path(**expected) - # Establish that the response is the type that we expect. - assert response is None + # Check that the path construction is reversible. + actual = ApiHubClient.parse_deployment_path(path) + assert expected == actual -@pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc_asyncio"): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +def test_external_api_path(): + project = "whelk" + location = "octopus" + external_api = "oyster" + expected = ( + "projects/{project}/locations/{location}/externalApis/{external_api}".format( + project=project, + location=location, + external_api=external_api, + ) ) + actual = ApiHubClient.external_api_path(project, location, external_api) + assert expected == actual - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_external_api_path(): + expected = { + "project": "nudibranch", + "location": "cuttlefish", + "external_api": "mussel", + } + path = ApiHubClient.external_api_path(**expected) - # Establish that the response is the type that we expect. - assert response is None + # Check that the path construction is reversible. + actual = ApiHubClient.parse_external_api_path(path) + assert expected == actual -def test_cancel_operation_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_spec_path(): + project = "winkle" + location = "nautilus" + api = "scallop" + version = "abalone" + spec = "squid" + expected = "projects/{project}/locations/{location}/apis/{api}/versions/{version}/specs/{spec}".format( + project=project, + location=location, + api=api, + version=version, + spec=spec, ) + actual = ApiHubClient.spec_path(project, location, api, version, spec) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None - client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_spec_path(): + expected = { + "project": "clam", + "location": "whelk", + "api": "octopus", + "version": "oyster", + "spec": "nudibranch", + } + path = ApiHubClient.spec_path(**expected) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Check that the path construction is reversible. + actual = ApiHubClient.parse_spec_path(path) + assert expected == actual -@pytest.mark.asyncio -async def test_cancel_operation_field_headers_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_version_path(): + project = "cuttlefish" + location = "mussel" + api = "winkle" + version = "nautilus" + expected = ( + "projects/{project}/locations/{location}/apis/{api}/versions/{version}".format( + project=project, + location=location, + api=api, + version=version, + ) ) + actual = ApiHubClient.version_path(project, location, api, version) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_version_path(): + expected = { + "project": "scallop", + "location": "abalone", + "api": "squid", + "version": "clam", + } + path = ApiHubClient.version_path(**expected) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Check that the path construction is reversible. + actual = ApiHubClient.parse_version_path(path) + assert expected == actual -def test_cancel_operation_from_dict(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None + actual = ApiHubClient.common_billing_account_path(billing_account) + assert expected == actual - response = client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = ApiHubClient.common_billing_account_path(**expected) -@pytest.mark.asyncio -async def test_cancel_operation_from_dict_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() + # Check that the path construction is reversible. + actual = ApiHubClient.parse_common_billing_account_path(path) + assert expected == actual -def test_get_operation(transport: str = "grpc"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format( + folder=folder, ) + actual = ApiHubClient.common_folder_path(folder) + assert expected == actual - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = ApiHubClient.common_folder_path(**expected) - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) + # Check that the path construction is reversible. + actual = ApiHubClient.parse_common_folder_path(path) + assert expected == actual -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc_asyncio"): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format( + organization=organization, ) + actual = ApiHubClient.common_organization_path(organization) + assert expected == actual - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = ApiHubClient.common_organization_path(**expected) - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) + # Check that the path construction is reversible. + actual = ApiHubClient.parse_common_organization_path(path) + assert expected == actual -def test_get_operation_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format( + project=project, ) + actual = ApiHubClient.common_project_path(project) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = ApiHubClient.common_project_path(**expected) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Check that the path construction is reversible. + actual = ApiHubClient.parse_common_project_path(path) + assert expected == actual -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, ) + actual = ApiHubClient.common_location_path(project, location) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = ApiHubClient.common_location_path(**expected) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Check that the path construction is reversible. + actual = ApiHubClient.parse_common_location_path(path) + assert expected == actual -def test_get_operation_from_dict(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() - response = client.get_operation( - request={ - "name": "locations", - } + with mock.patch.object( + transports.ApiHubTransport, "_prep_wrapped_messages" + ) as prep: + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) - call.assert_called() - + prep.assert_called_once_with(client_info) -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation( - request={ - "name": "locations", - } + with mock.patch.object( + transports.ApiHubTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = ApiHubClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) - call.assert_called() + prep.assert_called_once_with(client_info) -def test_list_operations(transport: str = "grpc"): +def test_get_location_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.GetLocationRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - response = client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - - -@pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc_asyncio"): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_location(request) -def test_list_operations_field_headers(): +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) +def test_get_location_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = operations_pb2.ListOperationsResponse() - - client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_list_operations_field_headers_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.get_location(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) -def test_list_operations_from_dict(): +def test_list_locations_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.ListLocationsRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - - response = client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) -@pytest.mark.asyncio -async def test_list_operations_from_dict_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_locations(request) -def test_list_locations(transport: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) +def test_list_locations_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() response = client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, locations_pb2.ListLocationsResponse) -@pytest.mark.asyncio -async def test_list_locations_async(transport: str = "grpc_asyncio"): - client = ApiHubAsyncClient( +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) -def test_list_locations_field_headers(): +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = locations_pb2.ListLocationsResponse() + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.cancel_operation(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Establish that the response is the type that we expect. + assert response is None -@pytest.mark.asyncio -async def test_list_locations_field_headers_async(): - client = ApiHubAsyncClient( +def test_delete_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest +): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_operation(request) -def test_list_locations_from_dict(): +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None - response = client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value -@pytest.mark.asyncio -async def test_list_locations_from_dict_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() + response = client.delete_operation(request) + # Establish that the response is the type that we expect. + assert response is None -def test_get_location(transport: str = "grpc"): + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - response = client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) -@pytest.mark.asyncio -async def test_get_location_async(transport: str = "grpc_asyncio"): - client = ApiHubAsyncClient( +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - - -def test_get_location_field_headers(): - client = ApiHubClient(credentials=ga_credentials.AnonymousCredentials()) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = locations_pb2.Location() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) - client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations/abc", - ) in kw["metadata"] + response = client.get_operation(request) + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) -@pytest.mark.asyncio -async def test_get_location_field_headers_async(): - client = ApiHubAsyncClient(credentials=ga_credentials.AnonymousCredentials()) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations/abc", - ) in kw["metadata"] + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) -def test_get_location_from_dict(): +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() - response = client.get_location( - request={ - "name": "locations/abc", - } - ) - call.assert_called() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value -@pytest.mark.asyncio -async def test_get_location_from_dict_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location( - request={ - "name": "locations", - } - ) - call.assert_called() + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) def test_transport_close(): transports = { "rest": "_session", - "grpc": "_grpc_channel", } for transport, close_name in transports.items(): @@ -31071,7 +14654,6 @@ def test_transport_close(): def test_client_ctx(): transports = [ "rest", - "grpc", ] for transport in transports: client = ApiHubClient( @@ -31088,8 +14670,7 @@ def test_client_ctx(): @pytest.mark.parametrize( "client_class,transport_class", [ - (ApiHubClient, transports.ApiHubGrpcTransport), - (ApiHubAsyncClient, transports.ApiHubGrpcAsyncIOTransport), + (ApiHubClient, transports.ApiHubRestTransport), ], ) def test_api_key_credentials(client_class, transport_class): diff --git a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub_dependencies.py b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub_dependencies.py index bf0a3da3c9e3..525149783ded 100644 --- a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub_dependencies.py +++ b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub_dependencies.py @@ -48,7 +48,6 @@ from requests.sessions import Session from google.cloud.apihub_v1.services.api_hub_dependencies import ( - ApiHubDependenciesAsyncClient, ApiHubDependenciesClient, pagers, transports, @@ -222,11 +221,6 @@ def test__get_client_cert_source(): "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ApiHubDependenciesClient), ) -@mock.patch.object( - ApiHubDependenciesAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(ApiHubDependenciesAsyncClient), -) def test__get_api_endpoint(): api_override = "foo.com" mock_client_cert_source = mock.Mock() @@ -315,7 +309,6 @@ def test__get_universe_domain(): @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - (ApiHubDependenciesClient, transports.ApiHubDependenciesGrpcTransport, "grpc"), (ApiHubDependenciesClient, transports.ApiHubDependenciesRestTransport, "rest"), ], ) @@ -395,8 +388,6 @@ def test__validate_universe_domain(client_class, transport_class, transport_name @pytest.mark.parametrize( "client_class,transport_name", [ - (ApiHubDependenciesClient, "grpc"), - (ApiHubDependenciesAsyncClient, "grpc_asyncio"), (ApiHubDependenciesClient, "rest"), ], ) @@ -423,8 +414,6 @@ def test_api_hub_dependencies_client_from_service_account_info( @pytest.mark.parametrize( "transport_class,transport_name", [ - (transports.ApiHubDependenciesGrpcTransport, "grpc"), - (transports.ApiHubDependenciesGrpcAsyncIOTransport, "grpc_asyncio"), (transports.ApiHubDependenciesRestTransport, "rest"), ], ) @@ -449,8 +438,6 @@ def test_api_hub_dependencies_client_service_account_always_use_jwt( @pytest.mark.parametrize( "client_class,transport_name", [ - (ApiHubDependenciesClient, "grpc"), - (ApiHubDependenciesAsyncClient, "grpc_asyncio"), (ApiHubDependenciesClient, "rest"), ], ) @@ -484,24 +471,17 @@ def test_api_hub_dependencies_client_from_service_account_file( def test_api_hub_dependencies_client_get_transport_class(): transport = ApiHubDependenciesClient.get_transport_class() available_transports = [ - transports.ApiHubDependenciesGrpcTransport, transports.ApiHubDependenciesRestTransport, ] assert transport in available_transports - transport = ApiHubDependenciesClient.get_transport_class("grpc") - assert transport == transports.ApiHubDependenciesGrpcTransport + transport = ApiHubDependenciesClient.get_transport_class("rest") + assert transport == transports.ApiHubDependenciesRestTransport @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - (ApiHubDependenciesClient, transports.ApiHubDependenciesGrpcTransport, "grpc"), - ( - ApiHubDependenciesAsyncClient, - transports.ApiHubDependenciesGrpcAsyncIOTransport, - "grpc_asyncio", - ), (ApiHubDependenciesClient, transports.ApiHubDependenciesRestTransport, "rest"), ], ) @@ -510,11 +490,6 @@ def test_api_hub_dependencies_client_get_transport_class(): "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ApiHubDependenciesClient), ) -@mock.patch.object( - ApiHubDependenciesAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(ApiHubDependenciesAsyncClient), -) def test_api_hub_dependencies_client_client_options( client_class, transport_class, transport_name ): @@ -648,30 +623,6 @@ def test_api_hub_dependencies_client_client_options( @pytest.mark.parametrize( "client_class,transport_class,transport_name,use_client_cert_env", [ - ( - ApiHubDependenciesClient, - transports.ApiHubDependenciesGrpcTransport, - "grpc", - "true", - ), - ( - ApiHubDependenciesAsyncClient, - transports.ApiHubDependenciesGrpcAsyncIOTransport, - "grpc_asyncio", - "true", - ), - ( - ApiHubDependenciesClient, - transports.ApiHubDependenciesGrpcTransport, - "grpc", - "false", - ), - ( - ApiHubDependenciesAsyncClient, - transports.ApiHubDependenciesGrpcAsyncIOTransport, - "grpc_asyncio", - "false", - ), ( ApiHubDependenciesClient, transports.ApiHubDependenciesRestTransport, @@ -691,11 +642,6 @@ def test_api_hub_dependencies_client_client_options( "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ApiHubDependenciesClient), ) -@mock.patch.object( - ApiHubDependenciesAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(ApiHubDependenciesAsyncClient), -) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) def test_api_hub_dependencies_client_mtls_env_auto( client_class, transport_class, transport_name, use_client_cert_env @@ -799,19 +745,12 @@ def test_api_hub_dependencies_client_mtls_env_auto( ) -@pytest.mark.parametrize( - "client_class", [ApiHubDependenciesClient, ApiHubDependenciesAsyncClient] -) +@pytest.mark.parametrize("client_class", [ApiHubDependenciesClient]) @mock.patch.object( ApiHubDependenciesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ApiHubDependenciesClient), ) -@mock.patch.object( - ApiHubDependenciesAsyncClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(ApiHubDependenciesAsyncClient), -) def test_api_hub_dependencies_client_get_mtls_endpoint_and_cert_source(client_class): mock_client_cert_source = mock.Mock() @@ -903,19 +842,12 @@ def test_api_hub_dependencies_client_get_mtls_endpoint_and_cert_source(client_cl ) -@pytest.mark.parametrize( - "client_class", [ApiHubDependenciesClient, ApiHubDependenciesAsyncClient] -) +@pytest.mark.parametrize("client_class", [ApiHubDependenciesClient]) @mock.patch.object( ApiHubDependenciesClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ApiHubDependenciesClient), ) -@mock.patch.object( - ApiHubDependenciesAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(ApiHubDependenciesAsyncClient), -) def test_api_hub_dependencies_client_client_api_endpoint(client_class): mock_client_cert_source = client_cert_source_callback api_override = "foo.com" @@ -992,12 +924,6 @@ def test_api_hub_dependencies_client_client_api_endpoint(client_class): @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - (ApiHubDependenciesClient, transports.ApiHubDependenciesGrpcTransport, "grpc"), - ( - ApiHubDependenciesAsyncClient, - transports.ApiHubDependenciesGrpcAsyncIOTransport, - "grpc_asyncio", - ), (ApiHubDependenciesClient, transports.ApiHubDependenciesRestTransport, "rest"), ], ) @@ -1029,18 +955,6 @@ def test_api_hub_dependencies_client_client_options_scopes( @pytest.mark.parametrize( "client_class,transport_class,transport_name,grpc_helpers", [ - ( - ApiHubDependenciesClient, - transports.ApiHubDependenciesGrpcTransport, - "grpc", - grpc_helpers, - ), - ( - ApiHubDependenciesAsyncClient, - transports.ApiHubDependenciesGrpcAsyncIOTransport, - "grpc_asyncio", - grpc_helpers_async, - ), ( ApiHubDependenciesClient, transports.ApiHubDependenciesRestTransport, @@ -1073,96 +987,6 @@ def test_api_hub_dependencies_client_client_options_credentials_file( ) -def test_api_hub_dependencies_client_client_options_from_dict(): - with mock.patch( - "google.cloud.apihub_v1.services.api_hub_dependencies.transports.ApiHubDependenciesGrpcTransport.__init__" - ) as grpc_transport: - grpc_transport.return_value = None - client = ApiHubDependenciesClient( - client_options={"api_endpoint": "squid.clam.whelk"} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize( - "client_class,transport_class,transport_name,grpc_helpers", - [ - ( - ApiHubDependenciesClient, - transports.ApiHubDependenciesGrpcTransport, - "grpc", - grpc_helpers, - ), - ( - ApiHubDependenciesAsyncClient, - transports.ApiHubDependenciesGrpcAsyncIOTransport, - "grpc_asyncio", - grpc_helpers_async, - ), - ], -) -def test_api_hub_dependencies_client_create_channel_credentials_file( - client_class, transport_class, transport_name, grpc_helpers -): - # Check the case credentials file is provided. - options = client_options.ClientOptions(credentials_file="credentials.json") - - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE - ), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "apihub.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - scopes=None, - default_host="apihub.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - @pytest.mark.parametrize( "request_type", [ @@ -1170,34 +994,119 @@ def test_api_hub_dependencies_client_create_channel_credentials_file( dict, ], ) -def test_create_dependency(request_type, transport: str = "grpc"): +def test_create_dependency_rest(request_type): client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["dependency"] = { + "name": "name_value", + "consumer": { + "operation_resource_name": "operation_resource_name_value", + "external_api_resource_name": "external_api_resource_name_value", + "display_name": "display_name_value", + }, + "supplier": {}, + "state": 1, + "description": "description_value", + "discovery_mode": 1, + "error_detail": {"error": 1, "error_time": {"seconds": 751, "nanos": 543}}, + "create_time": {}, + "update_time": {}, + "attributes": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dependency), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Dependency( + # Determine if the message type is proto-plus or protobuf + test_field = apihub_service.CreateDependencyRequest.meta.fields["dependency"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["dependency"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["dependency"][field])): + del request_init["dependency"][field][i][subfield] + else: + del request_init["dependency"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Dependency( name="name_value", state=common_fields.Dependency.State.PROPOSED, description="description_value", discovery_mode=common_fields.Dependency.DiscoveryMode.MANUAL, ) - response = client.create_dependency(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.CreateDependencyRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Dependency.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_dependency(request) # Establish that the response is the type that we expect. assert isinstance(response, common_fields.Dependency) @@ -1207,66 +1116,13 @@ def test_create_dependency(request_type, transport: str = "grpc"): assert response.discovery_mode == common_fields.Dependency.DiscoveryMode.MANUAL -def test_create_dependency_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dependency), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_dependency() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.CreateDependencyRequest() - - -def test_create_dependency_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.CreateDependencyRequest( - parent="parent_value", - dependency_id="dependency_id_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dependency), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_dependency(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.CreateDependencyRequest( - parent="parent_value", - dependency_id="dependency_id_value", - ) - - -def test_create_dependency_use_cached_wrapped_rpc(): +def test_create_dependency_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -1284,6 +1140,7 @@ def test_create_dependency_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.create_dependency ] = mock_rpc + request = {} client.create_dependency(request) @@ -1297,287 +1154,235 @@ def test_create_dependency_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_dependency_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", +def test_create_dependency_rest_required_fields( + request_type=apihub_service.CreateDependencyRequest, +): + transport_class = transports.ApiHubDependenciesRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dependency), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Dependency( - name="name_value", - state=common_fields.Dependency.State.PROPOSED, - description="description_value", - discovery_mode=common_fields.Dependency.DiscoveryMode.MANUAL, - ) - ) - response = await client.create_dependency() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.CreateDependencyRequest() + # verify fields with default values are dropped + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_dependency._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) -@pytest.mark.asyncio -async def test_create_dependency_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.create_dependency - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.create_dependency - ] = mock_rpc - - request = {} - await client.create_dependency(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.create_dependency(request) + jsonified_request["parent"] = "parent_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_dependency._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("dependency_id",)) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" -@pytest.mark.asyncio -async def test_create_dependency_async( - transport: str = "grpc_asyncio", request_type=apihub_service.CreateDependencyRequest -): - client = ApiHubDependenciesAsyncClient( + client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = common_fields.Dependency() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dependency), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Dependency( - name="name_value", - state=common_fields.Dependency.State.PROPOSED, - description="description_value", - discovery_mode=common_fields.Dependency.DiscoveryMode.MANUAL, - ) - ) - response = await client.create_dependency(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.CreateDependencyRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = common_fields.Dependency.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Dependency) - assert response.name == "name_value" - assert response.state == common_fields.Dependency.State.PROPOSED - assert response.description == "description_value" - assert response.discovery_mode == common_fields.Dependency.DiscoveryMode.MANUAL + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_dependency(request) -@pytest.mark.asyncio -async def test_create_dependency_async_from_dict(): - await test_create_dependency_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_create_dependency_field_headers(): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_create_dependency_rest_unset_required_fields(): + transport = transports.ApiHubDependenciesRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.CreateDependencyRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dependency), "__call__" - ) as call: - call.return_value = common_fields.Dependency() - client.create_dependency(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + unset_fields = transport.create_dependency._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("dependencyId",)) + & set( + ( + "parent", + "dependency", + ) + ) + ) -@pytest.mark.asyncio -async def test_create_dependency_field_headers_async(): - client = ApiHubDependenciesAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_dependency_rest_interceptors(null_interceptor): + transport = transports.ApiHubDependenciesRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ApiHubDependenciesRestInterceptor(), ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.CreateDependencyRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. + client = ApiHubDependenciesClient(transport=transport) with mock.patch.object( - type(client.transport.create_dependency), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Dependency() + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubDependenciesRestInterceptor, "post_create_dependency" + ) as post, mock.patch.object( + transports.ApiHubDependenciesRestInterceptor, "pre_create_dependency" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.CreateDependencyRequest.pb( + apihub_service.CreateDependencyRequest() ) - await client.create_dependency(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Dependency.to_json( + common_fields.Dependency() + ) -def test_create_dependency_flattened(): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + request = apihub_service.CreateDependencyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Dependency() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dependency), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Dependency() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.create_dependency( - parent="parent_value", - dependency=common_fields.Dependency(name="name_value"), - dependency_id="dependency_id_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].dependency - mock_val = common_fields.Dependency(name="name_value") - assert arg == mock_val - arg = args[0].dependency_id - mock_val = "dependency_id_value" - assert arg == mock_val - - -def test_create_dependency_flattened_error(): + pre.assert_called_once() + post.assert_called_once() + + +def test_create_dependency_rest_bad_request( + transport: str = "rest", request_type=apihub_service.CreateDependencyRequest +): client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_dependency( - apihub_service.CreateDependencyRequest(), - parent="parent_value", - dependency=common_fields.Dependency(name="name_value"), - dependency_id="dependency_id_value", - ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_dependency(request) -@pytest.mark.asyncio -async def test_create_dependency_flattened_async(): - client = ApiHubDependenciesAsyncClient( +def test_create_dependency_rest_flattened(): + client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dependency), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Dependency() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Dependency() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Dependency() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_dependency( + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( parent="parent_value", dependency=common_fields.Dependency(name="name_value"), dependency_id="dependency_id_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Dependency.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_dependency(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].dependency - mock_val = common_fields.Dependency(name="name_value") - assert arg == mock_val - arg = args[0].dependency_id - mock_val = "dependency_id_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_create_dependency_flattened_error_async(): - client = ApiHubDependenciesAsyncClient( + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/dependencies" + % client.transport._host, + args[1], + ) + + +def test_create_dependency_rest_flattened_error(transport: str = "rest"): + client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_dependency( + client.create_dependency( apihub_service.CreateDependencyRequest(), parent="parent_value", dependency=common_fields.Dependency(name="name_value"), @@ -1585,6 +1390,12 @@ async def test_create_dependency_flattened_error_async(): ) +def test_create_dependency_rest_error(): + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -1592,34 +1403,38 @@ async def test_create_dependency_flattened_error_async(): dict, ], ) -def test_get_dependency(request_type, transport: str = "grpc"): +def test_get_dependency_rest(request_type): client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/dependencies/sample3"} + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_dependency), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Dependency( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Dependency( name="name_value", state=common_fields.Dependency.State.PROPOSED, description="description_value", discovery_mode=common_fields.Dependency.DiscoveryMode.MANUAL, ) - response = client.get_dependency(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.GetDependencyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Dependency.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_dependency(request) + + # Establish that the response is the type that we expect. assert isinstance(response, common_fields.Dependency) assert response.name == "name_value" assert response.state == common_fields.Dependency.State.PROPOSED @@ -1627,60 +1442,13 @@ def test_get_dependency(request_type, transport: str = "grpc"): assert response.discovery_mode == common_fields.Dependency.DiscoveryMode.MANUAL -def test_get_dependency_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_dependency), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_dependency() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetDependencyRequest() - - -def test_get_dependency_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.GetDependencyRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_dependency), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_dependency(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetDependencyRequest( - name="name_value", - ) - - -def test_get_dependency_use_cached_wrapped_rpc(): +def test_get_dependency_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -1696,6 +1464,7 @@ def test_get_dependency_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.get_dependency] = mock_rpc + request = {} client.get_dependency(request) @@ -1709,207 +1478,218 @@ def test_get_dependency_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_dependency_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_dependency), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Dependency( - name="name_value", - state=common_fields.Dependency.State.PROPOSED, - description="description_value", - discovery_mode=common_fields.Dependency.DiscoveryMode.MANUAL, - ) - ) - response = await client.get_dependency() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetDependencyRequest() - - -@pytest.mark.asyncio -async def test_get_dependency_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_get_dependency_rest_required_fields( + request_type=apihub_service.GetDependencyRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + transport_class = transports.ApiHubDependenciesRestTransport - # Ensure method has been cached - assert ( - client._client._transport.get_dependency - in client._client._transport._wrapped_methods - ) + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.get_dependency - ] = mock_rpc + # verify fields with default values are dropped - request = {} - await client.get_dependency(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_dependency._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.get_dependency(request) + jsonified_request["name"] = "name_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_dependency._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -@pytest.mark.asyncio -async def test_get_dependency_async( - transport: str = "grpc_asyncio", request_type=apihub_service.GetDependencyRequest -): - client = ApiHubDependenciesAsyncClient( + client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = common_fields.Dependency() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_dependency), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Dependency( - name="name_value", - state=common_fields.Dependency.State.PROPOSED, - description="description_value", - discovery_mode=common_fields.Dependency.DiscoveryMode.MANUAL, - ) - ) - response = await client.get_dependency(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.GetDependencyRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = common_fields.Dependency.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Dependency) - assert response.name == "name_value" - assert response.state == common_fields.Dependency.State.PROPOSED - assert response.description == "description_value" - assert response.discovery_mode == common_fields.Dependency.DiscoveryMode.MANUAL + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_dependency(request) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -@pytest.mark.asyncio -async def test_get_dependency_async_from_dict(): - await test_get_dependency_async(request_type=dict) +def test_get_dependency_rest_unset_required_fields(): + transport = transports.ApiHubDependenciesRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) -def test_get_dependency_field_headers(): - client = ApiHubDependenciesClient( + unset_fields = transport.get_dependency._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_dependency_rest_interceptors(null_interceptor): + transport = transports.ApiHubDependenciesRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ApiHubDependenciesRestInterceptor(), ) + client = ApiHubDependenciesClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubDependenciesRestInterceptor, "post_get_dependency" + ) as post, mock.patch.object( + transports.ApiHubDependenciesRestInterceptor, "pre_get_dependency" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.GetDependencyRequest.pb( + apihub_service.GetDependencyRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.GetDependencyRequest() - - request.name = "name_value" + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Dependency.to_json( + common_fields.Dependency() + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_dependency), "__call__") as call: - call.return_value = common_fields.Dependency() - client.get_dependency(request) + request = apihub_service.GetDependencyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Dependency() - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + client.get_dependency( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + pre.assert_called_once() + post.assert_called_once() -@pytest.mark.asyncio -async def test_get_dependency_field_headers_async(): - client = ApiHubDependenciesAsyncClient( +def test_get_dependency_rest_bad_request( + transport: str = "rest", request_type=apihub_service.GetDependencyRequest +): + client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.GetDependencyRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_dependency), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Dependency() - ) - await client.get_dependency(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/dependencies/sample3"} + request = request_type(**request_init) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_dependency(request) -def test_get_dependency_flattened(): +def test_get_dependency_rest_flattened(): client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_dependency), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Dependency() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_dependency( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Dependency() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/dependencies/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Dependency.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_dependency(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/dependencies/*}" + % client.transport._host, + args[1], + ) -def test_get_dependency_flattened_error(): +def test_get_dependency_rest_flattened_error(transport: str = "rest"): client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -1921,49 +1701,11 @@ def test_get_dependency_flattened_error(): ) -@pytest.mark.asyncio -async def test_get_dependency_flattened_async(): - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_dependency), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Dependency() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Dependency() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_dependency( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_get_dependency_flattened_error_async(): - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_get_dependency_rest_error(): + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_dependency( - apihub_service.GetDependencyRequest(), - name="name_value", - ) - @pytest.mark.parametrize( "request_type", @@ -1972,34 +1714,123 @@ async def test_get_dependency_flattened_error_async(): dict, ], ) -def test_update_dependency(request_type, transport: str = "grpc"): +def test_update_dependency_rest(request_type): client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "dependency": { + "name": "projects/sample1/locations/sample2/dependencies/sample3" + } + } + request_init["dependency"] = { + "name": "projects/sample1/locations/sample2/dependencies/sample3", + "consumer": { + "operation_resource_name": "operation_resource_name_value", + "external_api_resource_name": "external_api_resource_name_value", + "display_name": "display_name_value", + }, + "supplier": {}, + "state": 1, + "description": "description_value", + "discovery_mode": 1, + "error_detail": {"error": 1, "error_time": {"seconds": 751, "nanos": 543}}, + "create_time": {}, + "update_time": {}, + "attributes": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_dependency), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Dependency( + # Determine if the message type is proto-plus or protobuf + test_field = apihub_service.UpdateDependencyRequest.meta.fields["dependency"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["dependency"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["dependency"][field])): + del request_init["dependency"][field][i][subfield] + else: + del request_init["dependency"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Dependency( name="name_value", state=common_fields.Dependency.State.PROPOSED, description="description_value", discovery_mode=common_fields.Dependency.DiscoveryMode.MANUAL, ) - response = client.update_dependency(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.UpdateDependencyRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Dependency.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_dependency(request) # Establish that the response is the type that we expect. assert isinstance(response, common_fields.Dependency) @@ -2009,60 +1840,13 @@ def test_update_dependency(request_type, transport: str = "grpc"): assert response.discovery_mode == common_fields.Dependency.DiscoveryMode.MANUAL -def test_update_dependency_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_dependency), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.update_dependency() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.UpdateDependencyRequest() - - -def test_update_dependency_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.UpdateDependencyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_dependency), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.update_dependency(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.UpdateDependencyRequest() - - -def test_update_dependency_use_cached_wrapped_rpc(): +def test_update_dependency_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -2080,6 +1864,7 @@ def test_update_dependency_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.update_dependency ] = mock_rpc + request = {} client.update_dependency(request) @@ -2093,284 +1878,249 @@ def test_update_dependency_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_update_dependency_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_dependency), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Dependency( - name="name_value", - state=common_fields.Dependency.State.PROPOSED, - description="description_value", - discovery_mode=common_fields.Dependency.DiscoveryMode.MANUAL, - ) - ) - response = await client.update_dependency() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.UpdateDependencyRequest() - - -@pytest.mark.asyncio -async def test_update_dependency_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_update_dependency_rest_required_fields( + request_type=apihub_service.UpdateDependencyRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.update_dependency - in client._client._transport._wrapped_methods - ) + transport_class = transports.ApiHubDependenciesRestTransport - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.update_dependency - ] = mock_rpc + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - request = {} - await client.update_dependency(request) + # verify fields with default values are dropped - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_dependency._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - await client.update_dependency(request) + # verify required fields with default values are now present - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_dependency._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone -@pytest.mark.asyncio -async def test_update_dependency_async( - transport: str = "grpc_asyncio", request_type=apihub_service.UpdateDependencyRequest -): - client = ApiHubDependenciesAsyncClient( + client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = common_fields.Dependency() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_dependency), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Dependency( - name="name_value", - state=common_fields.Dependency.State.PROPOSED, - description="description_value", - discovery_mode=common_fields.Dependency.DiscoveryMode.MANUAL, - ) - ) - response = await client.update_dependency(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.UpdateDependencyRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = common_fields.Dependency.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Dependency) - assert response.name == "name_value" - assert response.state == common_fields.Dependency.State.PROPOSED - assert response.description == "description_value" - assert response.discovery_mode == common_fields.Dependency.DiscoveryMode.MANUAL + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_dependency(request) -@pytest.mark.asyncio -async def test_update_dependency_async_from_dict(): - await test_update_dependency_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_update_dependency_field_headers(): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.UpdateDependencyRequest() - - request.dependency.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_dependency), "__call__" - ) as call: - call.return_value = common_fields.Dependency() - client.update_dependency(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_update_dependency_rest_unset_required_fields(): + transport = transports.ApiHubDependenciesRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "dependency.name=name_value", - ) in kw["metadata"] + unset_fields = transport.update_dependency._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "dependency", + "updateMask", + ) + ) + ) -@pytest.mark.asyncio -async def test_update_dependency_field_headers_async(): - client = ApiHubDependenciesAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_dependency_rest_interceptors(null_interceptor): + transport = transports.ApiHubDependenciesRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ApiHubDependenciesRestInterceptor(), ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.UpdateDependencyRequest() - - request.dependency.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. + client = ApiHubDependenciesClient(transport=transport) with mock.patch.object( - type(client.transport.update_dependency), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Dependency() + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubDependenciesRestInterceptor, "post_update_dependency" + ) as post, mock.patch.object( + transports.ApiHubDependenciesRestInterceptor, "pre_update_dependency" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.UpdateDependencyRequest.pb( + apihub_service.UpdateDependencyRequest() ) - await client.update_dependency(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "dependency.name=name_value", - ) in kw["metadata"] + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Dependency.to_json( + common_fields.Dependency() + ) -def test_update_dependency_flattened(): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + request = apihub_service.UpdateDependencyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Dependency() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_dependency), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Dependency() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.update_dependency( - dependency=common_fields.Dependency(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].dependency - mock_val = common_fields.Dependency(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val + pre.assert_called_once() + post.assert_called_once() -def test_update_dependency_flattened_error(): +def test_update_dependency_rest_bad_request( + transport: str = "rest", request_type=apihub_service.UpdateDependencyRequest +): client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_dependency( - apihub_service.UpdateDependencyRequest(), - dependency=common_fields.Dependency(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) + # send a request that will satisfy transcoding + request_init = { + "dependency": { + "name": "projects/sample1/locations/sample2/dependencies/sample3" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_dependency(request) -@pytest.mark.asyncio -async def test_update_dependency_flattened_async(): - client = ApiHubDependenciesAsyncClient( +def test_update_dependency_rest_flattened(): + client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_dependency), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Dependency() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Dependency() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Dependency() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_dependency( + # get arguments that satisfy an http rule for this method + sample_request = { + "dependency": { + "name": "projects/sample1/locations/sample2/dependencies/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( dependency=common_fields.Dependency(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Dependency.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_dependency(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].dependency - mock_val = common_fields.Dependency(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_update_dependency_flattened_error_async(): - client = ApiHubDependenciesAsyncClient( + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{dependency.name=projects/*/locations/*/dependencies/*}" + % client.transport._host, + args[1], + ) + + +def test_update_dependency_rest_flattened_error(transport: str = "rest"): + client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_dependency( + client.update_dependency( apihub_service.UpdateDependencyRequest(), dependency=common_fields.Dependency(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) +def test_update_dependency_rest_error(): + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -2378,92 +2128,41 @@ async def test_update_dependency_flattened_error_async(): dict, ], ) -def test_delete_dependency(request_type, transport: str = "grpc"): +def test_delete_dependency_rest(request_type): client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/dependencies/sample3"} + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dependency), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_dependency(request) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.DeleteDependencyRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_dependency(request) # Establish that the response is the type that we expect. assert response is None -def test_delete_dependency_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dependency), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_dependency() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.DeleteDependencyRequest() - - -def test_delete_dependency_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.DeleteDependencyRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dependency), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_dependency(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.DeleteDependencyRequest( - name="name_value", - ) - - -def test_delete_dependency_use_cached_wrapped_rpc(): +def test_delete_dependency_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -2481,6 +2180,7 @@ def test_delete_dependency_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.delete_dependency ] = mock_rpc + request = {} client.delete_dependency(request) @@ -2494,252 +2194,222 @@ def test_delete_dependency_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_delete_dependency_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dependency), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_dependency() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.DeleteDependencyRequest() - - -@pytest.mark.asyncio -async def test_delete_dependency_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_delete_dependency_rest_required_fields( + request_type=apihub_service.DeleteDependencyRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + transport_class = transports.ApiHubDependenciesRestTransport - # Ensure method has been cached - assert ( - client._client._transport.delete_dependency - in client._client._transport._wrapped_methods - ) + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.delete_dependency - ] = mock_rpc + # verify fields with default values are dropped - request = {} - await client.delete_dependency(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_dependency._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.delete_dependency(request) + jsonified_request["name"] = "name_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_dependency._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -@pytest.mark.asyncio -async def test_delete_dependency_async( - transport: str = "grpc_asyncio", request_type=apihub_service.DeleteDependencyRequest -): - client = ApiHubDependenciesAsyncClient( + client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dependency), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_dependency(request) + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.DeleteDependencyRequest() - assert args[0] == request + response_value = Response() + response_value.status_code = 200 + json_return_value = "" - # Establish that the response is the type that we expect. - assert response is None + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_dependency(request) -@pytest.mark.asyncio -async def test_delete_dependency_async_from_dict(): - await test_delete_dependency_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_delete_dependency_field_headers(): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_delete_dependency_rest_unset_required_fields(): + transport = transports.ApiHubDependenciesRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.DeleteDependencyRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dependency), "__call__" - ) as call: - call.return_value = None - client.delete_dependency(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + unset_fields = transport.delete_dependency._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -@pytest.mark.asyncio -async def test_delete_dependency_field_headers_async(): - client = ApiHubDependenciesAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_dependency_rest_interceptors(null_interceptor): + transport = transports.ApiHubDependenciesRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ApiHubDependenciesRestInterceptor(), ) + client = ApiHubDependenciesClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubDependenciesRestInterceptor, "pre_delete_dependency" + ) as pre: + pre.assert_not_called() + pb_message = apihub_service.DeleteDependencyRequest.pb( + apihub_service.DeleteDependencyRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.DeleteDependencyRequest() - - request.name = "name_value" + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dependency), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_dependency(request) + request = apihub_service.DeleteDependencyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request + client.delete_dependency( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + pre.assert_called_once() -def test_delete_dependency_flattened(): +def test_delete_dependency_rest_bad_request( + transport: str = "rest", request_type=apihub_service.DeleteDependencyRequest +): client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dependency), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_dependency( - name="name_value", - ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/dependencies/sample3"} + request = request_type(**request_init) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_dependency(request) -def test_delete_dependency_flattened_error(): +def test_delete_dependency_rest_flattened(): client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_dependency( - apihub_service.DeleteDependencyRequest(), - name="name_value", - ) - + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None -@pytest.mark.asyncio -async def test_delete_dependency_flattened_async(): - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/dependencies/sample3" + } - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dependency), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_dependency( + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_dependency(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/dependencies/*}" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_delete_dependency_flattened_error_async(): - client = ApiHubDependenciesAsyncClient( +def test_delete_dependency_rest_flattened_error(transport: str = "rest"): + client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_dependency( + client.delete_dependency( apihub_service.DeleteDependencyRequest(), name="name_value", ) +def test_delete_dependency_rest_error(): + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -2747,99 +2417,46 @@ async def test_delete_dependency_flattened_error_async(): dict, ], ) -def test_list_dependencies(request_type, transport: str = "grpc"): +def test_list_dependencies_rest(request_type): client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dependencies), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListDependenciesResponse( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListDependenciesResponse( next_page_token="next_page_token_value", ) - response = client.list_dependencies(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.ListDependenciesRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.ListDependenciesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_dependencies(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListDependenciesPager) assert response.next_page_token == "next_page_token_value" -def test_list_dependencies_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dependencies), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_dependencies() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListDependenciesRequest() - - -def test_list_dependencies_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.ListDependenciesRequest( - parent="parent_value", - filter="filter_value", - page_token="page_token_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dependencies), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_dependencies(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListDependenciesRequest( - parent="parent_value", - filter="filter_value", - page_token="page_token_value", - ) - - -def test_list_dependencies_use_cached_wrapped_rpc(): +def test_list_dependencies_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -2857,6 +2474,7 @@ def test_list_dependencies_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.list_dependencies ] = mock_rpc + request = {} client.list_dependencies(request) @@ -2870,277 +2488,256 @@ def test_list_dependencies_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_dependencies_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", +def test_list_dependencies_rest_required_fields( + request_type=apihub_service.ListDependenciesRequest, +): + transport_class = transports.ApiHubDependenciesRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dependencies), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListDependenciesResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_dependencies() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListDependenciesRequest() + # verify fields with default values are dropped + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_dependencies._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) -@pytest.mark.asyncio -async def test_list_dependencies_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # verify required fields with default values are now present - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + jsonified_request["parent"] = "parent_value" - # Ensure method has been cached - assert ( - client._client._transport.list_dependencies - in client._client._transport._wrapped_methods + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_dependencies._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", ) + ) + jsonified_request.update(unset_fields) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.list_dependencies - ] = mock_rpc - - request = {} - await client.list_dependencies(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_dependencies(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" -@pytest.mark.asyncio -async def test_list_dependencies_async( - transport: str = "grpc_asyncio", request_type=apihub_service.ListDependenciesRequest -): - client = ApiHubDependenciesAsyncClient( + client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListDependenciesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dependencies), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListDependenciesResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_dependencies(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.ListDependenciesRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = apihub_service.ListDependenciesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDependenciesAsyncPager) - assert response.next_page_token == "next_page_token_value" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_dependencies(request) -@pytest.mark.asyncio -async def test_list_dependencies_async_from_dict(): - await test_list_dependencies_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_list_dependencies_field_headers(): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_list_dependencies_rest_unset_required_fields(): + transport = transports.ApiHubDependenciesRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.ListDependenciesRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dependencies), "__call__" - ) as call: - call.return_value = apihub_service.ListDependenciesResponse() - client.list_dependencies(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + unset_fields = transport.list_dependencies._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) -@pytest.mark.asyncio -async def test_list_dependencies_field_headers_async(): - client = ApiHubDependenciesAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_dependencies_rest_interceptors(null_interceptor): + transport = transports.ApiHubDependenciesRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ApiHubDependenciesRestInterceptor(), ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.ListDependenciesRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. + client = ApiHubDependenciesClient(transport=transport) with mock.patch.object( - type(client.transport.list_dependencies), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListDependenciesResponse() + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubDependenciesRestInterceptor, "post_list_dependencies" + ) as post, mock.patch.object( + transports.ApiHubDependenciesRestInterceptor, "pre_list_dependencies" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.ListDependenciesRequest.pb( + apihub_service.ListDependenciesRequest() ) - await client.list_dependencies(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = apihub_service.ListDependenciesResponse.to_json( + apihub_service.ListDependenciesResponse() + ) -def test_list_dependencies_flattened(): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + request = apihub_service.ListDependenciesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = apihub_service.ListDependenciesResponse() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dependencies), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListDependenciesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.list_dependencies( - parent="parent_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val + pre.assert_called_once() + post.assert_called_once() -def test_list_dependencies_flattened_error(): +def test_list_dependencies_rest_bad_request( + transport: str = "rest", request_type=apihub_service.ListDependenciesRequest +): client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_dependencies( - apihub_service.ListDependenciesRequest(), - parent="parent_value", - ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_dependencies(request) -@pytest.mark.asyncio -async def test_list_dependencies_flattened_async(): - client = ApiHubDependenciesAsyncClient( +def test_list_dependencies_rest_flattened(): + client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dependencies), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListDependenciesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListDependenciesResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListDependenciesResponse() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_dependencies( + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( parent="parent_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.ListDependenciesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_dependencies(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/dependencies" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_list_dependencies_flattened_error_async(): - client = ApiHubDependenciesAsyncClient( +def test_list_dependencies_rest_flattened_error(transport: str = "rest"): + client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_dependencies( + client.list_dependencies( apihub_service.ListDependenciesRequest(), parent="parent_value", ) -def test_list_dependencies_pager(transport_name: str = "grpc"): +def test_list_dependencies_rest_pager(transport: str = "rest"): client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dependencies), "__call__" - ) as call: - # Set the response to a series of pages. - call.side_effect = ( + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( apihub_service.ListDependenciesResponse( dependencies=[ common_fields.Dependency(), @@ -3165,3927 +2762,831 @@ def test_list_dependencies_pager(transport_name: str = "grpc"): common_fields.Dependency(), ], ), - RuntimeError, ) + # Two responses for two calls + response = response + response - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + # Wrap the values into proper Response objs + response = tuple( + apihub_service.ListDependenciesResponse.to_json(x) for x in response ) - pager = client.list_dependencies(request={}, retry=retry, timeout=timeout) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout + pager = client.list_dependencies(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, common_fields.Dependency) for i in results) + pages = list(client.list_dependencies(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + -def test_list_dependencies_pages(transport_name: str = "grpc"): - client = ApiHubDependenciesClient( +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ApiHubDependenciesRestTransport( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, ) + with pytest.raises(ValueError): + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dependencies), "__call__" - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListDependenciesResponse( - dependencies=[ - common_fields.Dependency(), - common_fields.Dependency(), - common_fields.Dependency(), - ], - next_page_token="abc", - ), - apihub_service.ListDependenciesResponse( - dependencies=[], - next_page_token="def", - ), - apihub_service.ListDependenciesResponse( - dependencies=[ - common_fields.Dependency(), - ], - next_page_token="ghi", - ), - apihub_service.ListDependenciesResponse( - dependencies=[ - common_fields.Dependency(), - common_fields.Dependency(), - ], - ), - RuntimeError, - ) - pages = list(client.list_dependencies(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.asyncio -async def test_list_dependencies_async_pager(): - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dependencies), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListDependenciesResponse( - dependencies=[ - common_fields.Dependency(), - common_fields.Dependency(), - common_fields.Dependency(), - ], - next_page_token="abc", - ), - apihub_service.ListDependenciesResponse( - dependencies=[], - next_page_token="def", - ), - apihub_service.ListDependenciesResponse( - dependencies=[ - common_fields.Dependency(), - ], - next_page_token="ghi", - ), - apihub_service.ListDependenciesResponse( - dependencies=[ - common_fields.Dependency(), - common_fields.Dependency(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_dependencies( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, common_fields.Dependency) for i in responses) - - -@pytest.mark.asyncio -async def test_list_dependencies_async_pages(): - client = ApiHubDependenciesAsyncClient( + # It is an error to provide a credentials file and a transport instance. + transport = transports.ApiHubDependenciesRestTransport( credentials=ga_credentials.AnonymousCredentials(), ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dependencies), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListDependenciesResponse( - dependencies=[ - common_fields.Dependency(), - common_fields.Dependency(), - common_fields.Dependency(), - ], - next_page_token="abc", - ), - apihub_service.ListDependenciesResponse( - dependencies=[], - next_page_token="def", - ), - apihub_service.ListDependenciesResponse( - dependencies=[ - common_fields.Dependency(), - ], - next_page_token="ghi", - ), - apihub_service.ListDependenciesResponse( - dependencies=[ - common_fields.Dependency(), - common_fields.Dependency(), - ], - ), - RuntimeError, + with pytest.raises(ValueError): + client = ApiHubDependenciesClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_dependencies(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.CreateDependencyRequest, - dict, - ], -) -def test_create_dependency_rest(request_type): - client = ApiHubDependenciesClient( + # It is an error to provide an api_key and a transport instance. + transport = transports.ApiHubDependenciesRestTransport( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["dependency"] = { - "name": "name_value", - "consumer": { - "operation_resource_name": "operation_resource_name_value", - "external_api_resource_name": "external_api_resource_name_value", - "display_name": "display_name_value", - }, - "supplier": {}, - "state": 1, - "description": "description_value", - "discovery_mode": 1, - "error_detail": {"error": 1, "error_time": {"seconds": 751, "nanos": 543}}, - "create_time": {}, - "update_time": {}, - "attributes": {}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = apihub_service.CreateDependencyRequest.meta.fields["dependency"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["dependency"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["dependency"][field])): - del request_init["dependency"][field][i][subfield] - else: - del request_init["dependency"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Dependency( - name="name_value", - state=common_fields.Dependency.State.PROPOSED, - description="description_value", - discovery_mode=common_fields.Dependency.DiscoveryMode.MANUAL, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Dependency.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.create_dependency(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Dependency) - assert response.name == "name_value" - assert response.state == common_fields.Dependency.State.PROPOSED - assert response.description == "description_value" - assert response.discovery_mode == common_fields.Dependency.DiscoveryMode.MANUAL - - -def test_create_dependency_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_dependency in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.create_dependency - ] = mock_rpc - - request = {} - client.create_dependency(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_dependency(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_dependency_rest_required_fields( - request_type=apihub_service.CreateDependencyRequest, -): - transport_class = transports.ApiHubDependenciesRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_dependency._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_dependency._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("dependency_id",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.Dependency() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.Dependency.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.create_dependency(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_create_dependency_rest_unset_required_fields(): - transport = transports.ApiHubDependenciesRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.create_dependency._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("dependencyId",)) - & set( - ( - "parent", - "dependency", - ) - ) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_dependency_rest_interceptors(null_interceptor): - transport = transports.ApiHubDependenciesRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ApiHubDependenciesRestInterceptor(), - ) - client = ApiHubDependenciesClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubDependenciesRestInterceptor, "post_create_dependency" - ) as post, mock.patch.object( - transports.ApiHubDependenciesRestInterceptor, "pre_create_dependency" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.CreateDependencyRequest.pb( - apihub_service.CreateDependencyRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.Dependency.to_json( - common_fields.Dependency() - ) - - request = apihub_service.CreateDependencyRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.Dependency() - - client.create_dependency( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_dependency_rest_bad_request( - transport: str = "rest", request_type=apihub_service.CreateDependencyRequest -): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_dependency(request) - - -def test_create_dependency_rest_flattened(): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Dependency() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - dependency=common_fields.Dependency(name="name_value"), - dependency_id="dependency_id_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Dependency.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.create_dependency(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/dependencies" - % client.transport._host, - args[1], - ) - - -def test_create_dependency_rest_flattened_error(transport: str = "rest"): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_dependency( - apihub_service.CreateDependencyRequest(), - parent="parent_value", - dependency=common_fields.Dependency(name="name_value"), - dependency_id="dependency_id_value", - ) - - -def test_create_dependency_rest_error(): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.GetDependencyRequest, - dict, - ], -) -def test_get_dependency_rest(request_type): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/dependencies/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Dependency( - name="name_value", - state=common_fields.Dependency.State.PROPOSED, - description="description_value", - discovery_mode=common_fields.Dependency.DiscoveryMode.MANUAL, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Dependency.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_dependency(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Dependency) - assert response.name == "name_value" - assert response.state == common_fields.Dependency.State.PROPOSED - assert response.description == "description_value" - assert response.discovery_mode == common_fields.Dependency.DiscoveryMode.MANUAL - - -def test_get_dependency_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_dependency in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.get_dependency] = mock_rpc - - request = {} - client.get_dependency(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_dependency(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_dependency_rest_required_fields( - request_type=apihub_service.GetDependencyRequest, -): - transport_class = transports.ApiHubDependenciesRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_dependency._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_dependency._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.Dependency() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.Dependency.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_dependency(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_get_dependency_rest_unset_required_fields(): - transport = transports.ApiHubDependenciesRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.get_dependency._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_dependency_rest_interceptors(null_interceptor): - transport = transports.ApiHubDependenciesRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ApiHubDependenciesRestInterceptor(), - ) - client = ApiHubDependenciesClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubDependenciesRestInterceptor, "post_get_dependency" - ) as post, mock.patch.object( - transports.ApiHubDependenciesRestInterceptor, "pre_get_dependency" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.GetDependencyRequest.pb( - apihub_service.GetDependencyRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.Dependency.to_json( - common_fields.Dependency() - ) - - request = apihub_service.GetDependencyRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.Dependency() - - client.get_dependency( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_dependency_rest_bad_request( - transport: str = "rest", request_type=apihub_service.GetDependencyRequest -): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/dependencies/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_dependency(request) - - -def test_get_dependency_rest_flattened(): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Dependency() - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/dependencies/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Dependency.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.get_dependency(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/dependencies/*}" - % client.transport._host, - args[1], - ) - - -def test_get_dependency_rest_flattened_error(transport: str = "rest"): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_dependency( - apihub_service.GetDependencyRequest(), - name="name_value", - ) - - -def test_get_dependency_rest_error(): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.UpdateDependencyRequest, - dict, - ], -) -def test_update_dependency_rest(request_type): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "dependency": { - "name": "projects/sample1/locations/sample2/dependencies/sample3" - } - } - request_init["dependency"] = { - "name": "projects/sample1/locations/sample2/dependencies/sample3", - "consumer": { - "operation_resource_name": "operation_resource_name_value", - "external_api_resource_name": "external_api_resource_name_value", - "display_name": "display_name_value", - }, - "supplier": {}, - "state": 1, - "description": "description_value", - "discovery_mode": 1, - "error_detail": {"error": 1, "error_time": {"seconds": 751, "nanos": 543}}, - "create_time": {}, - "update_time": {}, - "attributes": {}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = apihub_service.UpdateDependencyRequest.meta.fields["dependency"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["dependency"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["dependency"][field])): - del request_init["dependency"][field][i][subfield] - else: - del request_init["dependency"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Dependency( - name="name_value", - state=common_fields.Dependency.State.PROPOSED, - description="description_value", - discovery_mode=common_fields.Dependency.DiscoveryMode.MANUAL, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Dependency.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.update_dependency(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Dependency) - assert response.name == "name_value" - assert response.state == common_fields.Dependency.State.PROPOSED - assert response.description == "description_value" - assert response.discovery_mode == common_fields.Dependency.DiscoveryMode.MANUAL - - -def test_update_dependency_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_dependency in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.update_dependency - ] = mock_rpc - - request = {} - client.update_dependency(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_dependency(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_dependency_rest_required_fields( - request_type=apihub_service.UpdateDependencyRequest, -): - transport_class = transports.ApiHubDependenciesRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_dependency._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_dependency._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.Dependency() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "patch", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.Dependency.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.update_dependency(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_update_dependency_rest_unset_required_fields(): - transport = transports.ApiHubDependenciesRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.update_dependency._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("updateMask",)) - & set( - ( - "dependency", - "updateMask", - ) - ) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_dependency_rest_interceptors(null_interceptor): - transport = transports.ApiHubDependenciesRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ApiHubDependenciesRestInterceptor(), - ) - client = ApiHubDependenciesClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubDependenciesRestInterceptor, "post_update_dependency" - ) as post, mock.patch.object( - transports.ApiHubDependenciesRestInterceptor, "pre_update_dependency" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.UpdateDependencyRequest.pb( - apihub_service.UpdateDependencyRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.Dependency.to_json( - common_fields.Dependency() - ) - - request = apihub_service.UpdateDependencyRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.Dependency() - - client.update_dependency( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_dependency_rest_bad_request( - transport: str = "rest", request_type=apihub_service.UpdateDependencyRequest -): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "dependency": { - "name": "projects/sample1/locations/sample2/dependencies/sample3" - } - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_dependency(request) - - -def test_update_dependency_rest_flattened(): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Dependency() - - # get arguments that satisfy an http rule for this method - sample_request = { - "dependency": { - "name": "projects/sample1/locations/sample2/dependencies/sample3" - } - } - - # get truthy value for each flattened field - mock_args = dict( - dependency=common_fields.Dependency(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Dependency.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.update_dependency(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{dependency.name=projects/*/locations/*/dependencies/*}" - % client.transport._host, - args[1], - ) - - -def test_update_dependency_rest_flattened_error(transport: str = "rest"): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_dependency( - apihub_service.UpdateDependencyRequest(), - dependency=common_fields.Dependency(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - - -def test_update_dependency_rest_error(): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.DeleteDependencyRequest, - dict, - ], -) -def test_delete_dependency_rest(request_type): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/dependencies/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.delete_dependency(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_dependency_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_dependency in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.delete_dependency - ] = mock_rpc - - request = {} - client.delete_dependency(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_dependency(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_dependency_rest_required_fields( - request_type=apihub_service.DeleteDependencyRequest, -): - transport_class = transports.ApiHubDependenciesRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_dependency._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_dependency._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "delete", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.delete_dependency(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_delete_dependency_rest_unset_required_fields(): - transport = transports.ApiHubDependenciesRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.delete_dependency._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_dependency_rest_interceptors(null_interceptor): - transport = transports.ApiHubDependenciesRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ApiHubDependenciesRestInterceptor(), - ) - client = ApiHubDependenciesClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubDependenciesRestInterceptor, "pre_delete_dependency" - ) as pre: - pre.assert_not_called() - pb_message = apihub_service.DeleteDependencyRequest.pb( - apihub_service.DeleteDependencyRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = apihub_service.DeleteDependencyRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_dependency( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - - -def test_delete_dependency_rest_bad_request( - transport: str = "rest", request_type=apihub_service.DeleteDependencyRequest -): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/dependencies/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_dependency(request) - - -def test_delete_dependency_rest_flattened(): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/dependencies/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.delete_dependency(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/dependencies/*}" - % client.transport._host, - args[1], - ) - - -def test_delete_dependency_rest_flattened_error(transport: str = "rest"): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_dependency( - apihub_service.DeleteDependencyRequest(), - name="name_value", - ) - - -def test_delete_dependency_rest_error(): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.ListDependenciesRequest, - dict, - ], -) -def test_list_dependencies_rest(request_type): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListDependenciesResponse( - next_page_token="next_page_token_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = apihub_service.ListDependenciesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_dependencies(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDependenciesPager) - assert response.next_page_token == "next_page_token_value" - - -def test_list_dependencies_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_dependencies in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.list_dependencies - ] = mock_rpc - - request = {} - client.list_dependencies(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_dependencies(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_dependencies_rest_required_fields( - request_type=apihub_service.ListDependenciesRequest, -): - transport_class = transports.ApiHubDependenciesRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_dependencies._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_dependencies._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "page_size", - "page_token", - ) - ) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListDependenciesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = apihub_service.ListDependenciesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.list_dependencies(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_list_dependencies_rest_unset_required_fields(): - transport = transports.ApiHubDependenciesRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.list_dependencies._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_dependencies_rest_interceptors(null_interceptor): - transport = transports.ApiHubDependenciesRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ApiHubDependenciesRestInterceptor(), - ) - client = ApiHubDependenciesClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubDependenciesRestInterceptor, "post_list_dependencies" - ) as post, mock.patch.object( - transports.ApiHubDependenciesRestInterceptor, "pre_list_dependencies" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.ListDependenciesRequest.pb( - apihub_service.ListDependenciesRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = apihub_service.ListDependenciesResponse.to_json( - apihub_service.ListDependenciesResponse() - ) - - request = apihub_service.ListDependenciesRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = apihub_service.ListDependenciesResponse() - - client.list_dependencies( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_dependencies_rest_bad_request( - transport: str = "rest", request_type=apihub_service.ListDependenciesRequest -): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_dependencies(request) - - -def test_list_dependencies_rest_flattened(): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListDependenciesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = apihub_service.ListDependenciesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.list_dependencies(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/dependencies" - % client.transport._host, - args[1], - ) - - -def test_list_dependencies_rest_flattened_error(transport: str = "rest"): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_dependencies( - apihub_service.ListDependenciesRequest(), - parent="parent_value", - ) - - -def test_list_dependencies_rest_pager(transport: str = "rest"): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - apihub_service.ListDependenciesResponse( - dependencies=[ - common_fields.Dependency(), - common_fields.Dependency(), - common_fields.Dependency(), - ], - next_page_token="abc", - ), - apihub_service.ListDependenciesResponse( - dependencies=[], - next_page_token="def", - ), - apihub_service.ListDependenciesResponse( - dependencies=[ - common_fields.Dependency(), - ], - next_page_token="ghi", - ), - apihub_service.ListDependenciesResponse( - dependencies=[ - common_fields.Dependency(), - common_fields.Dependency(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - apihub_service.ListDependenciesResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_dependencies(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, common_fields.Dependency) for i in results) - - pages = list(client.list_dependencies(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.ApiHubDependenciesGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.ApiHubDependenciesGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ApiHubDependenciesClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.ApiHubDependenciesGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = ApiHubDependenciesClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = ApiHubDependenciesClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.ApiHubDependenciesGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ApiHubDependenciesClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.ApiHubDependenciesGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = ApiHubDependenciesClient(transport=transport) - assert client.transport is transport - - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.ApiHubDependenciesGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.ApiHubDependenciesGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.ApiHubDependenciesGrpcTransport, - transports.ApiHubDependenciesGrpcAsyncIOTransport, - transports.ApiHubDependenciesRestTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "rest", - ], -) -def test_transport_kind(transport_name): - transport = ApiHubDependenciesClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.ApiHubDependenciesGrpcTransport, - ) - - -def test_api_hub_dependencies_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.ApiHubDependenciesTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json", - ) - - -def test_api_hub_dependencies_base_transport(): - # Instantiate the base transport. - with mock.patch( - "google.cloud.apihub_v1.services.api_hub_dependencies.transports.ApiHubDependenciesTransport.__init__" - ) as Transport: - Transport.return_value = None - transport = transports.ApiHubDependenciesTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - "create_dependency", - "get_dependency", - "update_dependency", - "delete_dependency", - "list_dependencies", - "get_location", - "list_locations", - "get_operation", - "cancel_operation", - "delete_operation", - "list_operations", - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - "kind", - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_api_hub_dependencies_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.apihub_v1.services.api_hub_dependencies.transports.ApiHubDependenciesTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.ApiHubDependenciesTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id="octopus", - ) - - -def test_api_hub_dependencies_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( - "google.cloud.apihub_v1.services.api_hub_dependencies.transports.ApiHubDependenciesTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.ApiHubDependenciesTransport() - adc.assert_called_once() - - -def test_api_hub_dependencies_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - ApiHubDependenciesClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.ApiHubDependenciesGrpcTransport, - transports.ApiHubDependenciesGrpcAsyncIOTransport, - ], -) -def test_api_hub_dependencies_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.ApiHubDependenciesGrpcTransport, - transports.ApiHubDependenciesGrpcAsyncIOTransport, - transports.ApiHubDependenciesRestTransport, - ], -) -def test_api_hub_dependencies_transport_auth_gdch_credentials(transport_class): - host = "https://language.com/" - api_audience_tests = [None, "https://language2.com/"] - api_audience_expect = [host, "https://language2.com/"] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, "default", autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock( - return_value=gdch_mock - ) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with(e) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.ApiHubDependenciesGrpcTransport, grpc_helpers), - (transports.ApiHubDependenciesGrpcAsyncIOTransport, grpc_helpers_async), - ], -) -def test_api_hub_dependencies_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - - create_channel.assert_called_with( - "apihub.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - scopes=["1", "2"], - default_host="apihub.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.ApiHubDependenciesGrpcTransport, - transports.ApiHubDependenciesGrpcAsyncIOTransport, - ], -) -def test_api_hub_dependencies_grpc_transport_client_cert_source_for_mtls( - transport_class, -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds, - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback, - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, private_key=expected_key - ) - - -def test_api_hub_dependencies_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch( - "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" - ) as mock_configure_mtls_channel: - transports.ApiHubDependenciesRestTransport( - credentials=cred, client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "grpc_asyncio", - "rest", - ], -) -def test_api_hub_dependencies_host_no_port(transport_name): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="apihub.googleapis.com" - ), - transport=transport_name, - ) - assert client.transport._host == ( - "apihub.googleapis.com:443" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://apihub.googleapis.com/" - ) - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "grpc_asyncio", - "rest", - ], -) -def test_api_hub_dependencies_host_with_port(transport_name): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="apihub.googleapis.com:8000" - ), - transport=transport_name, - ) - assert client.transport._host == ( - "apihub.googleapis.com:8000" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://apihub.googleapis.com:8000/" - ) - - -@pytest.mark.parametrize( - "transport_name", - [ - "rest", - ], -) -def test_api_hub_dependencies_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = ApiHubDependenciesClient( - credentials=creds1, - transport=transport_name, - ) - client2 = ApiHubDependenciesClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.create_dependency._session - session2 = client2.transport.create_dependency._session - assert session1 != session2 - session1 = client1.transport.get_dependency._session - session2 = client2.transport.get_dependency._session - assert session1 != session2 - session1 = client1.transport.update_dependency._session - session2 = client2.transport.update_dependency._session - assert session1 != session2 - session1 = client1.transport.delete_dependency._session - session2 = client2.transport.delete_dependency._session - assert session1 != session2 - session1 = client1.transport.list_dependencies._session - session2 = client2.transport.list_dependencies._session - assert session1 != session2 - - -def test_api_hub_dependencies_grpc_transport_channel(): - channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.ApiHubDependenciesGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_api_hub_dependencies_grpc_asyncio_transport_channel(): - channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.ApiHubDependenciesGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize( - "transport_class", - [ - transports.ApiHubDependenciesGrpcTransport, - transports.ApiHubDependenciesGrpcAsyncIOTransport, - ], -) -def test_api_hub_dependencies_transport_channel_mtls_with_client_cert_source( - transport_class, -): - with mock.patch( - "grpc.ssl_channel_credentials", autospec=True - ) as grpc_ssl_channel_cred: - with mock.patch.object( - transport_class, "create_channel" - ) as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize( - "transport_class", - [ - transports.ApiHubDependenciesGrpcTransport, - transports.ApiHubDependenciesGrpcAsyncIOTransport, - ], -) -def test_api_hub_dependencies_transport_channel_mtls_with_adc(transport_class): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object( - transport_class, "create_channel" - ) as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_attribute_path(): - project = "squid" - location = "clam" - attribute = "whelk" - expected = "projects/{project}/locations/{location}/attributes/{attribute}".format( - project=project, - location=location, - attribute=attribute, - ) - actual = ApiHubDependenciesClient.attribute_path(project, location, attribute) - assert expected == actual - - -def test_parse_attribute_path(): - expected = { - "project": "octopus", - "location": "oyster", - "attribute": "nudibranch", - } - path = ApiHubDependenciesClient.attribute_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubDependenciesClient.parse_attribute_path(path) - assert expected == actual - - -def test_dependency_path(): - project = "cuttlefish" - location = "mussel" - dependency = "winkle" - expected = ( - "projects/{project}/locations/{location}/dependencies/{dependency}".format( - project=project, - location=location, - dependency=dependency, - ) - ) - actual = ApiHubDependenciesClient.dependency_path(project, location, dependency) - assert expected == actual - - -def test_parse_dependency_path(): - expected = { - "project": "nautilus", - "location": "scallop", - "dependency": "abalone", - } - path = ApiHubDependenciesClient.dependency_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubDependenciesClient.parse_dependency_path(path) - assert expected == actual - - -def test_common_billing_account_path(): - billing_account = "squid" - expected = "billingAccounts/{billing_account}".format( - billing_account=billing_account, - ) - actual = ApiHubDependenciesClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "clam", - } - path = ApiHubDependenciesClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubDependenciesClient.parse_common_billing_account_path(path) - assert expected == actual - - -def test_common_folder_path(): - folder = "whelk" - expected = "folders/{folder}".format( - folder=folder, - ) - actual = ApiHubDependenciesClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "octopus", - } - path = ApiHubDependenciesClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubDependenciesClient.parse_common_folder_path(path) - assert expected == actual - - -def test_common_organization_path(): - organization = "oyster" - expected = "organizations/{organization}".format( - organization=organization, - ) - actual = ApiHubDependenciesClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "nudibranch", - } - path = ApiHubDependenciesClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubDependenciesClient.parse_common_organization_path(path) - assert expected == actual - - -def test_common_project_path(): - project = "cuttlefish" - expected = "projects/{project}".format( - project=project, - ) - actual = ApiHubDependenciesClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "mussel", - } - path = ApiHubDependenciesClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubDependenciesClient.parse_common_project_path(path) - assert expected == actual - - -def test_common_location_path(): - project = "winkle" - location = "nautilus" - expected = "projects/{project}/locations/{location}".format( - project=project, - location=location, - ) - actual = ApiHubDependenciesClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "scallop", - "location": "abalone", - } - path = ApiHubDependenciesClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubDependenciesClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object( - transports.ApiHubDependenciesTransport, "_prep_wrapped_messages" - ) as prep: - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object( - transports.ApiHubDependenciesTransport, "_prep_wrapped_messages" - ) as prep: - transport_class = ApiHubDependenciesClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -@pytest.mark.asyncio -async def test_transport_close_async(): - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object( - type(getattr(client.transport, "grpc_channel")), "close" - ) as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_get_location_rest_bad_request( - transport: str = "rest", request_type=locations_pb2.GetLocationRequest -): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_location(request) - - -@pytest.mark.parametrize( - "request_type", - [ - locations_pb2.GetLocationRequest, - dict, - ], -) -def test_get_location_rest(request_type): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.Location() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_location(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - - -def test_list_locations_rest_bad_request( - transport: str = "rest", request_type=locations_pb2.ListLocationsRequest -): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({"name": "projects/sample1"}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_locations(request) - - -@pytest.mark.parametrize( - "request_type", - [ - locations_pb2.ListLocationsRequest, - dict, - ], -) -def test_list_locations_rest(request_type): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.ListLocationsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.list_locations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) - - -def test_cancel_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.CancelOperationRequest -): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.cancel_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.CancelOperationRequest, - dict, - ], -) -def test_cancel_operation_rest(request_type): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "{}" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.cancel_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest -): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.DeleteOperationRequest, - dict, - ], -) -def test_delete_operation_rest(request_type): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "{}" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.delete_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_get_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.GetOperationRequest -): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.GetOperationRequest, - dict, - ], -) -def test_get_operation_rest(request_type): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_operation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - - -def test_list_operations_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.ListOperationsRequest -): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_operations(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.ListOperationsRequest, - dict, - ], -) -def test_list_operations_rest(request_type): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.ListOperationsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + client_options=options, + transport=transport, + ) - response = client.list_operations(request) + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ApiHubDependenciesClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) + # It is an error to provide scopes and a transport instance. + transport = transports.ApiHubDependenciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ApiHubDependenciesClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) -def test_delete_operation(transport: str = "grpc"): - client = ApiHubDependenciesClient( +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ApiHubDependenciesRestTransport( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) + client = ApiHubDependenciesClient(transport=transport) + assert client.transport is transport - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the response is the type that we expect. - assert response is None +@pytest.mark.parametrize( + "transport_class", + [ + transports.ApiHubDependenciesRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() -@pytest.mark.asyncio -async def test_delete_operation_async(transport: str = "grpc_asyncio"): - client = ApiHubDependenciesAsyncClient( +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_transport_kind(transport_name): + transport = ApiHubDependenciesClient.get_transport_class(transport_name)( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) + assert transport.kind == transport_name - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_api_hub_dependencies_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.ApiHubDependenciesTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) - # Establish that the response is the type that we expect. - assert response is None +def test_api_hub_dependencies_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.apihub_v1.services.api_hub_dependencies.transports.ApiHubDependenciesTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.ApiHubDependenciesTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) -def test_delete_operation_field_headers(): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_dependency", + "get_dependency", + "update_dependency", + "delete_dependency", + "list_dependencies", + "get_location", + "list_locations", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = None - - client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + with pytest.raises(NotImplementedError): + transport.close() + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() -@pytest.mark.asyncio -async def test_delete_operation_field_headers_async(): - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" +def test_api_hub_dependencies_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.apihub_v1.services.api_hub_dependencies.transports.ApiHubDependenciesTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ApiHubDependenciesTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] +def test_api_hub_dependencies_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.apihub_v1.services.api_hub_dependencies.transports.ApiHubDependenciesTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ApiHubDependenciesTransport() + adc.assert_called_once() -def test_delete_operation_from_dict(): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.delete_operation( - request={ - "name": "locations", - } +def test_api_hub_dependencies_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ApiHubDependenciesClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, ) - call.assert_called() -@pytest.mark.asyncio -async def test_delete_operation_from_dict_async(): - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_operation( - request={ - "name": "locations", - } +def test_api_hub_dependencies_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.ApiHubDependenciesRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback ) - call.assert_called() + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) -def test_cancel_operation(transport: str = "grpc"): +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_api_hub_dependencies_host_no_port(transport_name): client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + client_options=client_options.ClientOptions( + api_endpoint="apihub.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "apihub.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://apihub.googleapis.com/" ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc_asyncio"): - client = ApiHubDependenciesAsyncClient( +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_api_hub_dependencies_host_with_port(transport_name): + client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + client_options=client_options.ClientOptions( + api_endpoint="apihub.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "apihub.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://apihub.googleapis.com:8000/" ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the response is the type that we expect. - assert response is None +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_api_hub_dependencies_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = ApiHubDependenciesClient( + credentials=creds1, + transport=transport_name, + ) + client2 = ApiHubDependenciesClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_dependency._session + session2 = client2.transport.create_dependency._session + assert session1 != session2 + session1 = client1.transport.get_dependency._session + session2 = client2.transport.get_dependency._session + assert session1 != session2 + session1 = client1.transport.update_dependency._session + session2 = client2.transport.update_dependency._session + assert session1 != session2 + session1 = client1.transport.delete_dependency._session + session2 = client2.transport.delete_dependency._session + assert session1 != session2 + session1 = client1.transport.list_dependencies._session + session2 = client2.transport.list_dependencies._session + assert session1 != session2 -def test_cancel_operation_field_headers(): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_attribute_path(): + project = "squid" + location = "clam" + attribute = "whelk" + expected = "projects/{project}/locations/{location}/attributes/{attribute}".format( + project=project, + location=location, + attribute=attribute, ) + actual = ApiHubDependenciesClient.attribute_path(project, location, attribute) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None - client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_attribute_path(): + expected = { + "project": "octopus", + "location": "oyster", + "attribute": "nudibranch", + } + path = ApiHubDependenciesClient.attribute_path(**expected) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Check that the path construction is reversible. + actual = ApiHubDependenciesClient.parse_attribute_path(path) + assert expected == actual -@pytest.mark.asyncio -async def test_cancel_operation_field_headers_async(): - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_dependency_path(): + project = "cuttlefish" + location = "mussel" + dependency = "winkle" + expected = ( + "projects/{project}/locations/{location}/dependencies/{dependency}".format( + project=project, + location=location, + dependency=dependency, + ) ) + actual = ApiHubDependenciesClient.dependency_path(project, location, dependency) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_dependency_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "dependency": "abalone", + } + path = ApiHubDependenciesClient.dependency_path(**expected) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Check that the path construction is reversible. + actual = ApiHubDependenciesClient.parse_dependency_path(path) + assert expected == actual -def test_cancel_operation_from_dict(): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() + actual = ApiHubDependenciesClient.common_billing_account_path(billing_account) + assert expected == actual -@pytest.mark.asyncio -async def test_cancel_operation_from_dict_async(): - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = ApiHubDependenciesClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = ApiHubDependenciesClient.parse_common_billing_account_path(path) + assert expected == actual -def test_get_operation(transport: str = "grpc"): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format( + folder=folder, ) + actual = ApiHubDependenciesClient.common_folder_path(folder) + assert expected == actual - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = ApiHubDependenciesClient.common_folder_path(**expected) - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) + # Check that the path construction is reversible. + actual = ApiHubDependenciesClient.parse_common_folder_path(path) + assert expected == actual -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc_asyncio"): - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format( + organization=organization, ) + actual = ApiHubDependenciesClient.common_organization_path(organization) + assert expected == actual - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = ApiHubDependenciesClient.common_organization_path(**expected) - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) + # Check that the path construction is reversible. + actual = ApiHubDependenciesClient.parse_common_organization_path(path) + assert expected == actual -def test_get_operation_field_headers(): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format( + project=project, ) + actual = ApiHubDependenciesClient.common_project_path(project) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = ApiHubDependenciesClient.common_project_path(**expected) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Check that the path construction is reversible. + actual = ApiHubDependenciesClient.parse_common_project_path(path) + assert expected == actual -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, ) + actual = ApiHubDependenciesClient.common_location_path(project, location) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = ApiHubDependenciesClient.common_location_path(**expected) + # Check that the path construction is reversible. + actual = ApiHubDependenciesClient.parse_common_location_path(path) + assert expected == actual -def test_get_operation_from_dict(): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - - response = client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() + with mock.patch.object( + transports.ApiHubDependenciesTransport, "_prep_wrapped_messages" + ) as prep: + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) - response = await client.get_operation( - request={ - "name": "locations", - } + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.ApiHubDependenciesTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = ApiHubDependenciesClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) - call.assert_called() + prep.assert_called_once_with(client_info) -def test_list_operations(transport: str = "grpc"): +def test_get_location_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.GetLocationRequest +): client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - response = client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - - -@pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc_asyncio"): - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_location(request) -def test_list_operations_field_headers(): +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) +def test_get_location_rest(request_type): client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = operations_pb2.ListOperationsResponse() - - client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_list_operations_field_headers_async(): - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.get_location(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) -def test_list_operations_from_dict(): +def test_list_locations_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.ListLocationsRequest +): client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - - response = client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) -@pytest.mark.asyncio -async def test_list_operations_from_dict_async(): - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_locations(request) -def test_list_locations(transport: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) +def test_list_locations_rest(request_type): client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() response = client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, locations_pb2.ListLocationsResponse) -@pytest.mark.asyncio -async def test_list_locations_async(transport: str = "grpc_asyncio"): - client = ApiHubDependenciesAsyncClient( +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) -def test_list_locations_field_headers(): +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = locations_pb2.ListLocationsResponse() - - client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_list_locations_field_headers_async(): - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.cancel_operation(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Establish that the response is the type that we expect. + assert response is None -def test_list_locations_from_dict(): +def test_delete_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest +): client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() - - response = client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() - -@pytest.mark.asyncio -async def test_list_locations_from_dict_async(): - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_operation(request) -def test_get_location(transport: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - response = client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_operation(request) # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) + assert response is None -@pytest.mark.asyncio -async def test_get_location_async(transport: str = "grpc_asyncio"): - client = ApiHubDependenciesAsyncClient( +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) -def test_get_location_field_headers(): - client = ApiHubDependenciesClient(credentials=ga_credentials.AnonymousCredentials()) +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = locations_pb2.Location() + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.get_operation(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations/abc", - ) in kw["metadata"] + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) -@pytest.mark.asyncio -async def test_get_location_field_headers_async(): - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials() +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations/abc", - ) in kw["metadata"] + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) -def test_get_location_from_dict(): +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - - response = client.get_location( - request={ - "name": "locations/abc", - } - ) - call.assert_called() + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value -@pytest.mark.asyncio -async def test_get_location_from_dict_async(): - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location( - request={ - "name": "locations", - } - ) - call.assert_called() + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) def test_transport_close(): transports = { "rest": "_session", - "grpc": "_grpc_channel", } for transport, close_name in transports.items(): @@ -7103,7 +3604,6 @@ def test_transport_close(): def test_client_ctx(): transports = [ "rest", - "grpc", ] for transport in transports: client = ApiHubDependenciesClient( @@ -7120,11 +3620,7 @@ def test_client_ctx(): @pytest.mark.parametrize( "client_class,transport_class", [ - (ApiHubDependenciesClient, transports.ApiHubDependenciesGrpcTransport), - ( - ApiHubDependenciesAsyncClient, - transports.ApiHubDependenciesGrpcAsyncIOTransport, - ), + (ApiHubDependenciesClient, transports.ApiHubDependenciesRestTransport), ], ) def test_api_key_credentials(client_class, transport_class): diff --git a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub_plugin.py b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub_plugin.py index 97adab5ecf39..dc92c3df1475 100644 --- a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub_plugin.py +++ b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub_plugin.py @@ -46,7 +46,6 @@ from requests.sessions import Session from google.cloud.apihub_v1.services.api_hub_plugin import ( - ApiHubPluginAsyncClient, ApiHubPluginClient, transports, ) @@ -195,11 +194,6 @@ def test__get_client_cert_source(): "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ApiHubPluginClient), ) -@mock.patch.object( - ApiHubPluginAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(ApiHubPluginAsyncClient), -) def test__get_api_endpoint(): api_override = "foo.com" mock_client_cert_source = mock.Mock() @@ -284,7 +278,6 @@ def test__get_universe_domain(): @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - (ApiHubPluginClient, transports.ApiHubPluginGrpcTransport, "grpc"), (ApiHubPluginClient, transports.ApiHubPluginRestTransport, "rest"), ], ) @@ -364,8 +357,6 @@ def test__validate_universe_domain(client_class, transport_class, transport_name @pytest.mark.parametrize( "client_class,transport_name", [ - (ApiHubPluginClient, "grpc"), - (ApiHubPluginAsyncClient, "grpc_asyncio"), (ApiHubPluginClient, "rest"), ], ) @@ -390,8 +381,6 @@ def test_api_hub_plugin_client_from_service_account_info(client_class, transport @pytest.mark.parametrize( "transport_class,transport_name", [ - (transports.ApiHubPluginGrpcTransport, "grpc"), - (transports.ApiHubPluginGrpcAsyncIOTransport, "grpc_asyncio"), (transports.ApiHubPluginRestTransport, "rest"), ], ) @@ -416,8 +405,6 @@ def test_api_hub_plugin_client_service_account_always_use_jwt( @pytest.mark.parametrize( "client_class,transport_name", [ - (ApiHubPluginClient, "grpc"), - (ApiHubPluginAsyncClient, "grpc_asyncio"), (ApiHubPluginClient, "rest"), ], ) @@ -449,24 +436,17 @@ def test_api_hub_plugin_client_from_service_account_file(client_class, transport def test_api_hub_plugin_client_get_transport_class(): transport = ApiHubPluginClient.get_transport_class() available_transports = [ - transports.ApiHubPluginGrpcTransport, transports.ApiHubPluginRestTransport, ] assert transport in available_transports - transport = ApiHubPluginClient.get_transport_class("grpc") - assert transport == transports.ApiHubPluginGrpcTransport + transport = ApiHubPluginClient.get_transport_class("rest") + assert transport == transports.ApiHubPluginRestTransport @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - (ApiHubPluginClient, transports.ApiHubPluginGrpcTransport, "grpc"), - ( - ApiHubPluginAsyncClient, - transports.ApiHubPluginGrpcAsyncIOTransport, - "grpc_asyncio", - ), (ApiHubPluginClient, transports.ApiHubPluginRestTransport, "rest"), ], ) @@ -475,11 +455,6 @@ def test_api_hub_plugin_client_get_transport_class(): "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ApiHubPluginClient), ) -@mock.patch.object( - ApiHubPluginAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(ApiHubPluginAsyncClient), -) def test_api_hub_plugin_client_client_options( client_class, transport_class, transport_name ): @@ -613,20 +588,6 @@ def test_api_hub_plugin_client_client_options( @pytest.mark.parametrize( "client_class,transport_class,transport_name,use_client_cert_env", [ - (ApiHubPluginClient, transports.ApiHubPluginGrpcTransport, "grpc", "true"), - ( - ApiHubPluginAsyncClient, - transports.ApiHubPluginGrpcAsyncIOTransport, - "grpc_asyncio", - "true", - ), - (ApiHubPluginClient, transports.ApiHubPluginGrpcTransport, "grpc", "false"), - ( - ApiHubPluginAsyncClient, - transports.ApiHubPluginGrpcAsyncIOTransport, - "grpc_asyncio", - "false", - ), (ApiHubPluginClient, transports.ApiHubPluginRestTransport, "rest", "true"), (ApiHubPluginClient, transports.ApiHubPluginRestTransport, "rest", "false"), ], @@ -636,11 +597,6 @@ def test_api_hub_plugin_client_client_options( "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ApiHubPluginClient), ) -@mock.patch.object( - ApiHubPluginAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(ApiHubPluginAsyncClient), -) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) def test_api_hub_plugin_client_mtls_env_auto( client_class, transport_class, transport_name, use_client_cert_env @@ -744,15 +700,10 @@ def test_api_hub_plugin_client_mtls_env_auto( ) -@pytest.mark.parametrize("client_class", [ApiHubPluginClient, ApiHubPluginAsyncClient]) +@pytest.mark.parametrize("client_class", [ApiHubPluginClient]) @mock.patch.object( ApiHubPluginClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ApiHubPluginClient) ) -@mock.patch.object( - ApiHubPluginAsyncClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(ApiHubPluginAsyncClient), -) def test_api_hub_plugin_client_get_mtls_endpoint_and_cert_source(client_class): mock_client_cert_source = mock.Mock() @@ -844,17 +795,12 @@ def test_api_hub_plugin_client_get_mtls_endpoint_and_cert_source(client_class): ) -@pytest.mark.parametrize("client_class", [ApiHubPluginClient, ApiHubPluginAsyncClient]) +@pytest.mark.parametrize("client_class", [ApiHubPluginClient]) @mock.patch.object( ApiHubPluginClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ApiHubPluginClient), ) -@mock.patch.object( - ApiHubPluginAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(ApiHubPluginAsyncClient), -) def test_api_hub_plugin_client_client_api_endpoint(client_class): mock_client_cert_source = client_cert_source_callback api_override = "foo.com" @@ -931,12 +877,6 @@ def test_api_hub_plugin_client_client_api_endpoint(client_class): @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - (ApiHubPluginClient, transports.ApiHubPluginGrpcTransport, "grpc"), - ( - ApiHubPluginAsyncClient, - transports.ApiHubPluginGrpcAsyncIOTransport, - "grpc_asyncio", - ), (ApiHubPluginClient, transports.ApiHubPluginRestTransport, "rest"), ], ) @@ -968,18 +908,6 @@ def test_api_hub_plugin_client_client_options_scopes( @pytest.mark.parametrize( "client_class,transport_class,transport_name,grpc_helpers", [ - ( - ApiHubPluginClient, - transports.ApiHubPluginGrpcTransport, - "grpc", - grpc_helpers, - ), - ( - ApiHubPluginAsyncClient, - transports.ApiHubPluginGrpcAsyncIOTransport, - "grpc_asyncio", - grpc_helpers_async, - ), (ApiHubPluginClient, transports.ApiHubPluginRestTransport, "rest", None), ], ) @@ -1007,94 +935,6 @@ def test_api_hub_plugin_client_client_options_credentials_file( ) -def test_api_hub_plugin_client_client_options_from_dict(): - with mock.patch( - "google.cloud.apihub_v1.services.api_hub_plugin.transports.ApiHubPluginGrpcTransport.__init__" - ) as grpc_transport: - grpc_transport.return_value = None - client = ApiHubPluginClient(client_options={"api_endpoint": "squid.clam.whelk"}) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize( - "client_class,transport_class,transport_name,grpc_helpers", - [ - ( - ApiHubPluginClient, - transports.ApiHubPluginGrpcTransport, - "grpc", - grpc_helpers, - ), - ( - ApiHubPluginAsyncClient, - transports.ApiHubPluginGrpcAsyncIOTransport, - "grpc_asyncio", - grpc_helpers_async, - ), - ], -) -def test_api_hub_plugin_client_create_channel_credentials_file( - client_class, transport_class, transport_name, grpc_helpers -): - # Check the case credentials file is provided. - options = client_options.ClientOptions(credentials_file="credentials.json") - - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE - ), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "apihub.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - scopes=None, - default_host="apihub.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - @pytest.mark.parametrize( "request_type", [ @@ -1102,32 +942,36 @@ def test_api_hub_plugin_client_create_channel_credentials_file( dict, ], ) -def test_get_plugin(request_type, transport: str = "grpc"): +def test_get_plugin_rest(request_type): client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/plugins/sample3"} + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_plugin), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = plugin_service.Plugin( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = plugin_service.Plugin( name="name_value", display_name="display_name_value", description="description_value", state=plugin_service.Plugin.State.ENABLED, ) - response = client.get_plugin(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = plugin_service.GetPluginRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = plugin_service.Plugin.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_plugin(request) # Establish that the response is the type that we expect. assert isinstance(response, plugin_service.Plugin) @@ -1137,60 +981,13 @@ def test_get_plugin(request_type, transport: str = "grpc"): assert response.state == plugin_service.Plugin.State.ENABLED -def test_get_plugin_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_plugin), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_plugin() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == plugin_service.GetPluginRequest() - - -def test_get_plugin_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = plugin_service.GetPluginRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_plugin), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_plugin(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == plugin_service.GetPluginRequest( - name="name_value", - ) - - -def test_get_plugin_use_cached_wrapped_rpc(): +def test_get_plugin_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -1206,6 +1003,7 @@ def test_get_plugin_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.get_plugin] = mock_rpc + request = {} client.get_plugin(request) @@ -1219,260 +1017,230 @@ def test_get_plugin_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_plugin_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_plugin), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - plugin_service.Plugin( - name="name_value", - display_name="display_name_value", - description="description_value", - state=plugin_service.Plugin.State.ENABLED, - ) - ) - response = await client.get_plugin() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == plugin_service.GetPluginRequest() - - -@pytest.mark.asyncio -async def test_get_plugin_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.get_plugin - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.get_plugin - ] = mock_rpc - - request = {} - await client.get_plugin(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_plugin(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - +def test_get_plugin_rest_required_fields(request_type=plugin_service.GetPluginRequest): + transport_class = transports.ApiHubPluginRestTransport -@pytest.mark.asyncio -async def test_get_plugin_async( - transport: str = "grpc_asyncio", request_type=plugin_service.GetPluginRequest -): - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_plugin), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - plugin_service.Plugin( - name="name_value", - display_name="display_name_value", - description="description_value", - state=plugin_service.Plugin.State.ENABLED, - ) - ) - response = await client.get_plugin(request) + # verify fields with default values are dropped - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = plugin_service.GetPluginRequest() - assert args[0] == request + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_plugin._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the response is the type that we expect. - assert isinstance(response, plugin_service.Plugin) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.state == plugin_service.Plugin.State.ENABLED + # verify required fields with default values are now present + jsonified_request["name"] = "name_value" -@pytest.mark.asyncio -async def test_get_plugin_async_from_dict(): - await test_get_plugin_async(request_type=dict) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_plugin._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -def test_get_plugin_field_headers(): client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request = request_type(**request_init) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = plugin_service.GetPluginRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_plugin), "__call__") as call: - call.return_value = plugin_service.Plugin() - client.get_plugin(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_get_plugin_field_headers_async(): - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Designate an appropriate value for the returned response. + return_value = plugin_service.Plugin() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = plugin_service.GetPluginRequest() + response_value = Response() + response_value.status_code = 200 - request.name = "name_value" + # Convert return value to protobuf type + return_value = plugin_service.Plugin.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_plugin), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - plugin_service.Plugin() - ) - await client.get_plugin(request) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.get_plugin(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_get_plugin_flattened(): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_get_plugin_rest_unset_required_fields(): + transport = transports.ApiHubPluginRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_plugin), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = plugin_service.Plugin() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_plugin( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + unset_fields = transport.get_plugin._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -def test_get_plugin_flattened_error(): - client = ApiHubPluginClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_plugin_rest_interceptors(null_interceptor): + transport = transports.ApiHubPluginRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ApiHubPluginRestInterceptor(), ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): + client = ApiHubPluginClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubPluginRestInterceptor, "post_get_plugin" + ) as post, mock.patch.object( + transports.ApiHubPluginRestInterceptor, "pre_get_plugin" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = plugin_service.GetPluginRequest.pb( + plugin_service.GetPluginRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = plugin_service.Plugin.to_json( + plugin_service.Plugin() + ) + + request = plugin_service.GetPluginRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = plugin_service.Plugin() + client.get_plugin( - plugin_service.GetPluginRequest(), - name="name_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) + pre.assert_called_once() + post.assert_called_once() + -@pytest.mark.asyncio -async def test_get_plugin_flattened_async(): - client = ApiHubPluginAsyncClient( +def test_get_plugin_rest_bad_request( + transport: str = "rest", request_type=plugin_service.GetPluginRequest +): + client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_plugin), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = plugin_service.Plugin() + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/plugins/sample3"} + request = request_type(**request_init) - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - plugin_service.Plugin() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_plugin( + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_plugin(request) + + +def test_get_plugin_rest_flattened(): + client = ApiHubPluginClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = plugin_service.Plugin() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/plugins/sample3"} + + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = plugin_service.Plugin.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_plugin(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/plugins/*}" % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_get_plugin_flattened_error_async(): - client = ApiHubPluginAsyncClient( +def test_get_plugin_rest_flattened_error(transport: str = "rest"): + client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_plugin( + client.get_plugin( plugin_service.GetPluginRequest(), name="name_value", ) +def test_get_plugin_rest_error(): + client = ApiHubPluginClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -1480,32 +1248,36 @@ async def test_get_plugin_flattened_error_async(): dict, ], ) -def test_enable_plugin(request_type, transport: str = "grpc"): +def test_enable_plugin_rest(request_type): client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/plugins/sample3"} + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.enable_plugin), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = plugin_service.Plugin( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = plugin_service.Plugin( name="name_value", display_name="display_name_value", description="description_value", state=plugin_service.Plugin.State.ENABLED, ) - response = client.enable_plugin(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = plugin_service.EnablePluginRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = plugin_service.Plugin.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.enable_plugin(request) # Establish that the response is the type that we expect. assert isinstance(response, plugin_service.Plugin) @@ -1515,60 +1287,13 @@ def test_enable_plugin(request_type, transport: str = "grpc"): assert response.state == plugin_service.Plugin.State.ENABLED -def test_enable_plugin_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.enable_plugin), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.enable_plugin() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == plugin_service.EnablePluginRequest() - - -def test_enable_plugin_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = plugin_service.EnablePluginRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.enable_plugin), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.enable_plugin(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == plugin_service.EnablePluginRequest( - name="name_value", - ) - - -def test_enable_plugin_use_cached_wrapped_rpc(): +def test_enable_plugin_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -1584,6 +1309,7 @@ def test_enable_plugin_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.enable_plugin] = mock_rpc + request = {} client.enable_plugin(request) @@ -1597,262 +1323,234 @@ def test_enable_plugin_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_enable_plugin_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.enable_plugin), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - plugin_service.Plugin( - name="name_value", - display_name="display_name_value", - description="description_value", - state=plugin_service.Plugin.State.ENABLED, - ) - ) - response = await client.enable_plugin() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == plugin_service.EnablePluginRequest() - - -@pytest.mark.asyncio -async def test_enable_plugin_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_enable_plugin_rest_required_fields( + request_type=plugin_service.EnablePluginRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + transport_class = transports.ApiHubPluginRestTransport - # Ensure method has been cached - assert ( - client._client._transport.enable_plugin - in client._client._transport._wrapped_methods - ) + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.enable_plugin - ] = mock_rpc + # verify fields with default values are dropped - request = {} - await client.enable_plugin(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).enable_plugin._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.enable_plugin(request) + jsonified_request["name"] = "name_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).enable_plugin._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -@pytest.mark.asyncio -async def test_enable_plugin_async( - transport: str = "grpc_asyncio", request_type=plugin_service.EnablePluginRequest -): - client = ApiHubPluginAsyncClient( + client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.enable_plugin), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - plugin_service.Plugin( - name="name_value", - display_name="display_name_value", - description="description_value", - state=plugin_service.Plugin.State.ENABLED, - ) - ) - response = await client.enable_plugin(request) + # Designate an appropriate value for the returned response. + return_value = plugin_service.Plugin() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = plugin_service.EnablePluginRequest() - assert args[0] == request + response_value = Response() + response_value.status_code = 200 - # Establish that the response is the type that we expect. - assert isinstance(response, plugin_service.Plugin) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.state == plugin_service.Plugin.State.ENABLED + # Convert return value to protobuf type + return_value = plugin_service.Plugin.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value -@pytest.mark.asyncio -async def test_enable_plugin_async_from_dict(): - await test_enable_plugin_async(request_type=dict) + response = client.enable_plugin(request) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_enable_plugin_field_headers(): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = plugin_service.EnablePluginRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.enable_plugin), "__call__") as call: - call.return_value = plugin_service.Plugin() - client.enable_plugin(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_enable_plugin_rest_unset_required_fields(): + transport = transports.ApiHubPluginRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + unset_fields = transport.enable_plugin._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -@pytest.mark.asyncio -async def test_enable_plugin_field_headers_async(): - client = ApiHubPluginAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_enable_plugin_rest_interceptors(null_interceptor): + transport = transports.ApiHubPluginRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ApiHubPluginRestInterceptor(), ) + client = ApiHubPluginClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubPluginRestInterceptor, "post_enable_plugin" + ) as post, mock.patch.object( + transports.ApiHubPluginRestInterceptor, "pre_enable_plugin" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = plugin_service.EnablePluginRequest.pb( + plugin_service.EnablePluginRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = plugin_service.EnablePluginRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.enable_plugin), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = plugin_service.Plugin.to_json( plugin_service.Plugin() ) - await client.enable_plugin(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - -def test_enable_plugin_flattened(): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + request = plugin_service.EnablePluginRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = plugin_service.Plugin() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.enable_plugin), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = plugin_service.Plugin() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.enable_plugin( - name="name_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + pre.assert_called_once() + post.assert_called_once() -def test_enable_plugin_flattened_error(): +def test_enable_plugin_rest_bad_request( + transport: str = "rest", request_type=plugin_service.EnablePluginRequest +): client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.enable_plugin( - plugin_service.EnablePluginRequest(), - name="name_value", - ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/plugins/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.enable_plugin(request) -@pytest.mark.asyncio -async def test_enable_plugin_flattened_async(): - client = ApiHubPluginAsyncClient( +def test_enable_plugin_rest_flattened(): + client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.enable_plugin), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = plugin_service.Plugin() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = plugin_service.Plugin() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - plugin_service.Plugin() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.enable_plugin( + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/plugins/sample3"} + + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = plugin_service.Plugin.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.enable_plugin(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/plugins/*}:enable" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_enable_plugin_flattened_error_async(): - client = ApiHubPluginAsyncClient( +def test_enable_plugin_rest_flattened_error(transport: str = "rest"): + client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.enable_plugin( + client.enable_plugin( plugin_service.EnablePluginRequest(), name="name_value", ) +def test_enable_plugin_rest_error(): + client = ApiHubPluginClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -1860,32 +1558,36 @@ async def test_enable_plugin_flattened_error_async(): dict, ], ) -def test_disable_plugin(request_type, transport: str = "grpc"): +def test_disable_plugin_rest(request_type): client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/plugins/sample3"} + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.disable_plugin), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = plugin_service.Plugin( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = plugin_service.Plugin( name="name_value", display_name="display_name_value", description="description_value", state=plugin_service.Plugin.State.ENABLED, ) - response = client.disable_plugin(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = plugin_service.DisablePluginRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = plugin_service.Plugin.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.disable_plugin(request) # Establish that the response is the type that we expect. assert isinstance(response, plugin_service.Plugin) @@ -1895,60 +1597,13 @@ def test_disable_plugin(request_type, transport: str = "grpc"): assert response.state == plugin_service.Plugin.State.ENABLED -def test_disable_plugin_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.disable_plugin), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.disable_plugin() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == plugin_service.DisablePluginRequest() - - -def test_disable_plugin_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = plugin_service.DisablePluginRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.disable_plugin), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.disable_plugin(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == plugin_service.DisablePluginRequest( - name="name_value", - ) - - -def test_disable_plugin_use_cached_wrapped_rpc(): +def test_disable_plugin_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -1964,6 +1619,7 @@ def test_disable_plugin_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.disable_plugin] = mock_rpc + request = {} client.disable_plugin(request) @@ -1977,380 +1633,44 @@ def test_disable_plugin_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_disable_plugin_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.disable_plugin), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - plugin_service.Plugin( - name="name_value", - display_name="display_name_value", - description="description_value", - state=plugin_service.Plugin.State.ENABLED, - ) - ) - response = await client.disable_plugin() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == plugin_service.DisablePluginRequest() - - -@pytest.mark.asyncio -async def test_disable_plugin_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_disable_plugin_rest_required_fields( + request_type=plugin_service.DisablePluginRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + transport_class = transports.ApiHubPluginRestTransport - # Ensure method has been cached - assert ( - client._client._transport.disable_plugin - in client._client._transport._wrapped_methods - ) + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.disable_plugin - ] = mock_rpc + # verify fields with default values are dropped - request = {} - await client.disable_plugin(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).disable_plugin._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.disable_plugin(request) + jsonified_request["name"] = "name_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).disable_plugin._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -@pytest.mark.asyncio -async def test_disable_plugin_async( - transport: str = "grpc_asyncio", request_type=plugin_service.DisablePluginRequest -): - client = ApiHubPluginAsyncClient( + client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.disable_plugin), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - plugin_service.Plugin( - name="name_value", - display_name="display_name_value", - description="description_value", - state=plugin_service.Plugin.State.ENABLED, - ) - ) - response = await client.disable_plugin(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = plugin_service.DisablePluginRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, plugin_service.Plugin) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.state == plugin_service.Plugin.State.ENABLED - - -@pytest.mark.asyncio -async def test_disable_plugin_async_from_dict(): - await test_disable_plugin_async(request_type=dict) - - -def test_disable_plugin_field_headers(): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = plugin_service.DisablePluginRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.disable_plugin), "__call__") as call: - call.return_value = plugin_service.Plugin() - client.disable_plugin(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_disable_plugin_field_headers_async(): - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = plugin_service.DisablePluginRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.disable_plugin), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - plugin_service.Plugin() - ) - await client.disable_plugin(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -def test_disable_plugin_flattened(): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.disable_plugin), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = plugin_service.Plugin() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.disable_plugin( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -def test_disable_plugin_flattened_error(): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.disable_plugin( - plugin_service.DisablePluginRequest(), - name="name_value", - ) - - -@pytest.mark.asyncio -async def test_disable_plugin_flattened_async(): - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.disable_plugin), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = plugin_service.Plugin() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - plugin_service.Plugin() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.disable_plugin( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_disable_plugin_flattened_error_async(): - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.disable_plugin( - plugin_service.DisablePluginRequest(), - name="name_value", - ) - - -@pytest.mark.parametrize( - "request_type", - [ - plugin_service.GetPluginRequest, - dict, - ], -) -def test_get_plugin_rest(request_type): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/plugins/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = plugin_service.Plugin( - name="name_value", - display_name="display_name_value", - description="description_value", - state=plugin_service.Plugin.State.ENABLED, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = plugin_service.Plugin.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_plugin(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, plugin_service.Plugin) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.state == plugin_service.Plugin.State.ENABLED - - -def test_get_plugin_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_plugin in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.get_plugin] = mock_rpc - - request = {} - client.get_plugin(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_plugin(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_plugin_rest_required_fields(request_type=plugin_service.GetPluginRequest): - transport_class = transports.ApiHubPluginRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_plugin._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_plugin._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) + request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = plugin_service.Plugin() @@ -2365,9 +1685,10 @@ def test_get_plugin_rest_required_fields(request_type=plugin_service.GetPluginRe pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -2380,24 +1701,24 @@ def test_get_plugin_rest_required_fields(request_type=plugin_service.GetPluginRe response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_plugin(request) + response = client.disable_plugin(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_plugin_rest_unset_required_fields(): +def test_disable_plugin_rest_unset_required_fields(): transport = transports.ApiHubPluginRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_plugin._get_unset_required_fields({}) + unset_fields = transport.disable_plugin._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_plugin_rest_interceptors(null_interceptor): +def test_disable_plugin_rest_interceptors(null_interceptor): transport = transports.ApiHubPluginRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -2410,14 +1731,14 @@ def test_get_plugin_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ApiHubPluginRestInterceptor, "post_get_plugin" + transports.ApiHubPluginRestInterceptor, "post_disable_plugin" ) as post, mock.patch.object( - transports.ApiHubPluginRestInterceptor, "pre_get_plugin" + transports.ApiHubPluginRestInterceptor, "pre_disable_plugin" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = plugin_service.GetPluginRequest.pb( - plugin_service.GetPluginRequest() + pb_message = plugin_service.DisablePluginRequest.pb( + plugin_service.DisablePluginRequest() ) transcode.return_value = { "method": "post", @@ -2433,7 +1754,7 @@ def test_get_plugin_rest_interceptors(null_interceptor): plugin_service.Plugin() ) - request = plugin_service.GetPluginRequest() + request = plugin_service.DisablePluginRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -2441,7 +1762,7 @@ def test_get_plugin_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = plugin_service.Plugin() - client.get_plugin( + client.disable_plugin( request, metadata=[ ("key", "val"), @@ -2453,8 +1774,8 @@ def test_get_plugin_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_plugin_rest_bad_request( - transport: str = "rest", request_type=plugin_service.GetPluginRequest +def test_disable_plugin_rest_bad_request( + transport: str = "rest", request_type=plugin_service.DisablePluginRequest ): client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2474,10 +1795,10 @@ def test_get_plugin_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_plugin(request) + client.disable_plugin(request) -def test_get_plugin_rest_flattened(): +def test_disable_plugin_rest_flattened(): client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -2506,19 +1827,20 @@ def test_get_plugin_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_plugin(**mock_args) + client.disable_plugin(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/plugins/*}" % client.transport._host, + "%s/v1/{name=projects/*/locations/*/plugins/*}:disable" + % client.transport._host, args[1], ) -def test_get_plugin_rest_flattened_error(transport: str = "rest"): +def test_disable_plugin_rest_flattened_error(transport: str = "rest"): client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2527,2566 +1849,806 @@ def test_get_plugin_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_plugin( - plugin_service.GetPluginRequest(), + client.disable_plugin( + plugin_service.DisablePluginRequest(), name="name_value", ) -def test_get_plugin_rest_error(): +def test_disable_plugin_rest_error(): client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) -@pytest.mark.parametrize( - "request_type", - [ - plugin_service.EnablePluginRequest, - dict, - ], -) -def test_enable_plugin_rest(request_type): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/plugins/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = plugin_service.Plugin( - name="name_value", - display_name="display_name_value", - description="description_value", - state=plugin_service.Plugin.State.ENABLED, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = plugin_service.Plugin.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.enable_plugin(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, plugin_service.Plugin) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.state == plugin_service.Plugin.State.ENABLED - - -def test_enable_plugin_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.enable_plugin in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.enable_plugin] = mock_rpc - - request = {} - client.enable_plugin(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.enable_plugin(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_enable_plugin_rest_required_fields( - request_type=plugin_service.EnablePluginRequest, -): - transport_class = transports.ApiHubPluginRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).enable_plugin._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).enable_plugin._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = plugin_service.Plugin() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = plugin_service.Plugin.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.enable_plugin(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_enable_plugin_rest_unset_required_fields(): - transport = transports.ApiHubPluginRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.enable_plugin._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_enable_plugin_rest_interceptors(null_interceptor): +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. transport = transports.ApiHubPluginRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ApiHubPluginRestInterceptor(), - ) - client = ApiHubPluginClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubPluginRestInterceptor, "post_enable_plugin" - ) as post, mock.patch.object( - transports.ApiHubPluginRestInterceptor, "pre_enable_plugin" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = plugin_service.EnablePluginRequest.pb( - plugin_service.EnablePluginRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = plugin_service.Plugin.to_json( - plugin_service.Plugin() - ) - - request = plugin_service.EnablePluginRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = plugin_service.Plugin() - - client.enable_plugin( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_enable_plugin_rest_bad_request( - transport: str = "rest", request_type=plugin_service.EnablePluginRequest -): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/plugins/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.enable_plugin(request) - - -def test_enable_plugin_rest_flattened(): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = plugin_service.Plugin() - - # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/locations/sample2/plugins/sample3"} - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = plugin_service.Plugin.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.enable_plugin(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/plugins/*}:enable" - % client.transport._host, - args[1], - ) - - -def test_enable_plugin_rest_flattened_error(transport: str = "rest"): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. with pytest.raises(ValueError): - client.enable_plugin( - plugin_service.EnablePluginRequest(), - name="name_value", - ) - - -def test_enable_plugin_rest_error(): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - plugin_service.DisablePluginRequest, - dict, - ], -) -def test_disable_plugin_rest(request_type): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/plugins/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = plugin_service.Plugin( - name="name_value", - display_name="display_name_value", - description="description_value", - state=plugin_service.Plugin.State.ENABLED, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = plugin_service.Plugin.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.disable_plugin(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, plugin_service.Plugin) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.state == plugin_service.Plugin.State.ENABLED - - -def test_disable_plugin_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.disable_plugin in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.disable_plugin] = mock_rpc - - request = {} - client.disable_plugin(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.disable_plugin(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_disable_plugin_rest_required_fields( - request_type=plugin_service.DisablePluginRequest, -): - transport_class = transports.ApiHubPluginRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).disable_plugin._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).disable_plugin._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = plugin_service.Plugin() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = plugin_service.Plugin.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.disable_plugin(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_disable_plugin_rest_unset_required_fields(): - transport = transports.ApiHubPluginRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.disable_plugin._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_disable_plugin_rest_interceptors(null_interceptor): - transport = transports.ApiHubPluginRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ApiHubPluginRestInterceptor(), - ) - client = ApiHubPluginClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubPluginRestInterceptor, "post_disable_plugin" - ) as post, mock.patch.object( - transports.ApiHubPluginRestInterceptor, "pre_disable_plugin" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = plugin_service.DisablePluginRequest.pb( - plugin_service.DisablePluginRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = plugin_service.Plugin.to_json( - plugin_service.Plugin() - ) - - request = plugin_service.DisablePluginRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = plugin_service.Plugin() - - client.disable_plugin( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_disable_plugin_rest_bad_request( - transport: str = "rest", request_type=plugin_service.DisablePluginRequest -): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/plugins/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.disable_plugin(request) - - -def test_disable_plugin_rest_flattened(): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = plugin_service.Plugin() - - # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/locations/sample2/plugins/sample3"} - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = plugin_service.Plugin.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.disable_plugin(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/plugins/*}:disable" - % client.transport._host, - args[1], - ) - - -def test_disable_plugin_rest_flattened_error(transport: str = "rest"): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.disable_plugin( - plugin_service.DisablePluginRequest(), - name="name_value", - ) - - -def test_disable_plugin_rest_error(): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.ApiHubPluginGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.ApiHubPluginGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ApiHubPluginClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.ApiHubPluginGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = ApiHubPluginClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = ApiHubPluginClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.ApiHubPluginGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ApiHubPluginClient( - client_options={"scopes": ["1", "2"]}, transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.ApiHubPluginGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = ApiHubPluginClient(transport=transport) - assert client.transport is transport - - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.ApiHubPluginGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.ApiHubPluginGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.ApiHubPluginGrpcTransport, - transports.ApiHubPluginGrpcAsyncIOTransport, - transports.ApiHubPluginRestTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "rest", - ], -) -def test_transport_kind(transport_name): - transport = ApiHubPluginClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.ApiHubPluginGrpcTransport, - ) - - -def test_api_hub_plugin_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.ApiHubPluginTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json", - ) - - -def test_api_hub_plugin_base_transport(): - # Instantiate the base transport. - with mock.patch( - "google.cloud.apihub_v1.services.api_hub_plugin.transports.ApiHubPluginTransport.__init__" - ) as Transport: - Transport.return_value = None - transport = transports.ApiHubPluginTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - "get_plugin", - "enable_plugin", - "disable_plugin", - "get_location", - "list_locations", - "get_operation", - "cancel_operation", - "delete_operation", - "list_operations", - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - "kind", - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_api_hub_plugin_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.apihub_v1.services.api_hub_plugin.transports.ApiHubPluginTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.ApiHubPluginTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id="octopus", - ) - - -def test_api_hub_plugin_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( - "google.cloud.apihub_v1.services.api_hub_plugin.transports.ApiHubPluginTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.ApiHubPluginTransport() - adc.assert_called_once() - - -def test_api_hub_plugin_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - ApiHubPluginClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.ApiHubPluginGrpcTransport, - transports.ApiHubPluginGrpcAsyncIOTransport, - ], -) -def test_api_hub_plugin_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.ApiHubPluginGrpcTransport, - transports.ApiHubPluginGrpcAsyncIOTransport, - transports.ApiHubPluginRestTransport, - ], -) -def test_api_hub_plugin_transport_auth_gdch_credentials(transport_class): - host = "https://language.com/" - api_audience_tests = [None, "https://language2.com/"] - api_audience_expect = [host, "https://language2.com/"] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, "default", autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock( - return_value=gdch_mock - ) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with(e) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.ApiHubPluginGrpcTransport, grpc_helpers), - (transports.ApiHubPluginGrpcAsyncIOTransport, grpc_helpers_async), - ], -) -def test_api_hub_plugin_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - - create_channel.assert_called_with( - "apihub.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - scopes=["1", "2"], - default_host="apihub.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize( - "transport_class", - [transports.ApiHubPluginGrpcTransport, transports.ApiHubPluginGrpcAsyncIOTransport], -) -def test_api_hub_plugin_grpc_transport_client_cert_source_for_mtls(transport_class): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds, - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback, - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, private_key=expected_key - ) - - -def test_api_hub_plugin_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch( - "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" - ) as mock_configure_mtls_channel: - transports.ApiHubPluginRestTransport( - credentials=cred, client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "grpc_asyncio", - "rest", - ], -) -def test_api_hub_plugin_host_no_port(transport_name): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="apihub.googleapis.com" - ), - transport=transport_name, - ) - assert client.transport._host == ( - "apihub.googleapis.com:443" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://apihub.googleapis.com/" - ) - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "grpc_asyncio", - "rest", - ], -) -def test_api_hub_plugin_host_with_port(transport_name): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="apihub.googleapis.com:8000" - ), - transport=transport_name, - ) - assert client.transport._host == ( - "apihub.googleapis.com:8000" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://apihub.googleapis.com:8000/" - ) - - -@pytest.mark.parametrize( - "transport_name", - [ - "rest", - ], -) -def test_api_hub_plugin_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = ApiHubPluginClient( - credentials=creds1, - transport=transport_name, - ) - client2 = ApiHubPluginClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.get_plugin._session - session2 = client2.transport.get_plugin._session - assert session1 != session2 - session1 = client1.transport.enable_plugin._session - session2 = client2.transport.enable_plugin._session - assert session1 != session2 - session1 = client1.transport.disable_plugin._session - session2 = client2.transport.disable_plugin._session - assert session1 != session2 - - -def test_api_hub_plugin_grpc_transport_channel(): - channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.ApiHubPluginGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_api_hub_plugin_grpc_asyncio_transport_channel(): - channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.ApiHubPluginGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize( - "transport_class", - [transports.ApiHubPluginGrpcTransport, transports.ApiHubPluginGrpcAsyncIOTransport], -) -def test_api_hub_plugin_transport_channel_mtls_with_client_cert_source(transport_class): - with mock.patch( - "grpc.ssl_channel_credentials", autospec=True - ) as grpc_ssl_channel_cred: - with mock.patch.object( - transport_class, "create_channel" - ) as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize( - "transport_class", - [transports.ApiHubPluginGrpcTransport, transports.ApiHubPluginGrpcAsyncIOTransport], -) -def test_api_hub_plugin_transport_channel_mtls_with_adc(transport_class): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object( - transport_class, "create_channel" - ) as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_attribute_path(): - project = "squid" - location = "clam" - attribute = "whelk" - expected = "projects/{project}/locations/{location}/attributes/{attribute}".format( - project=project, - location=location, - attribute=attribute, - ) - actual = ApiHubPluginClient.attribute_path(project, location, attribute) - assert expected == actual - - -def test_parse_attribute_path(): - expected = { - "project": "octopus", - "location": "oyster", - "attribute": "nudibranch", - } - path = ApiHubPluginClient.attribute_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubPluginClient.parse_attribute_path(path) - assert expected == actual - - -def test_plugin_path(): - project = "cuttlefish" - location = "mussel" - plugin = "winkle" - expected = "projects/{project}/locations/{location}/plugins/{plugin}".format( - project=project, - location=location, - plugin=plugin, - ) - actual = ApiHubPluginClient.plugin_path(project, location, plugin) - assert expected == actual - - -def test_parse_plugin_path(): - expected = { - "project": "nautilus", - "location": "scallop", - "plugin": "abalone", - } - path = ApiHubPluginClient.plugin_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubPluginClient.parse_plugin_path(path) - assert expected == actual - - -def test_common_billing_account_path(): - billing_account = "squid" - expected = "billingAccounts/{billing_account}".format( - billing_account=billing_account, - ) - actual = ApiHubPluginClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "clam", - } - path = ApiHubPluginClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubPluginClient.parse_common_billing_account_path(path) - assert expected == actual - - -def test_common_folder_path(): - folder = "whelk" - expected = "folders/{folder}".format( - folder=folder, - ) - actual = ApiHubPluginClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "octopus", - } - path = ApiHubPluginClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubPluginClient.parse_common_folder_path(path) - assert expected == actual - - -def test_common_organization_path(): - organization = "oyster" - expected = "organizations/{organization}".format( - organization=organization, - ) - actual = ApiHubPluginClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "nudibranch", - } - path = ApiHubPluginClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubPluginClient.parse_common_organization_path(path) - assert expected == actual - - -def test_common_project_path(): - project = "cuttlefish" - expected = "projects/{project}".format( - project=project, - ) - actual = ApiHubPluginClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "mussel", - } - path = ApiHubPluginClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubPluginClient.parse_common_project_path(path) - assert expected == actual - - -def test_common_location_path(): - project = "winkle" - location = "nautilus" - expected = "projects/{project}/locations/{location}".format( - project=project, - location=location, - ) - actual = ApiHubPluginClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "scallop", - "location": "abalone", - } - path = ApiHubPluginClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubPluginClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object( - transports.ApiHubPluginTransport, "_prep_wrapped_messages" - ) as prep: - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object( - transports.ApiHubPluginTransport, "_prep_wrapped_messages" - ) as prep: - transport_class = ApiHubPluginClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -@pytest.mark.asyncio -async def test_transport_close_async(): - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object( - type(getattr(client.transport, "grpc_channel")), "close" - ) as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_get_location_rest_bad_request( - transport: str = "rest", request_type=locations_pb2.GetLocationRequest -): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_location(request) - - -@pytest.mark.parametrize( - "request_type", - [ - locations_pb2.GetLocationRequest, - dict, - ], -) -def test_get_location_rest(request_type): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.Location() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_location(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - - -def test_list_locations_rest_bad_request( - transport: str = "rest", request_type=locations_pb2.ListLocationsRequest -): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({"name": "projects/sample1"}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_locations(request) - - -@pytest.mark.parametrize( - "request_type", - [ - locations_pb2.ListLocationsRequest, - dict, - ], -) -def test_list_locations_rest(request_type): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.ListLocationsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.list_locations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) - - -def test_cancel_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.CancelOperationRequest -): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.cancel_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.CancelOperationRequest, - dict, - ], -) -def test_cancel_operation_rest(request_type): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "{}" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.cancel_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest -): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.DeleteOperationRequest, - dict, - ], -) -def test_delete_operation_rest(request_type): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "{}" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.delete_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_get_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.GetOperationRequest -): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.GetOperationRequest, - dict, - ], -) -def test_get_operation_rest(request_type): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_operation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - - -def test_list_operations_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.ListOperationsRequest -): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_operations(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.ListOperationsRequest, - dict, - ], -) -def test_list_operations_rest(request_type): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.ListOperationsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.list_operations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - + ) -def test_delete_operation(transport: str = "grpc"): - client = ApiHubPluginClient( + # It is an error to provide a credentials file and a transport instance. + transport = transports.ApiHubPluginRestTransport( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) + with pytest.raises(ValueError): + client = ApiHubPluginClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() + # It is an error to provide an api_key and a transport instance. + transport = transports.ApiHubPluginRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ApiHubPluginClient( + client_options=options, + transport=transport, + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ApiHubPluginClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) - # Establish that the response is the type that we expect. - assert response is None + # It is an error to provide scopes and a transport instance. + transport = transports.ApiHubPluginRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ApiHubPluginClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) -@pytest.mark.asyncio -async def test_delete_operation_async(transport: str = "grpc_asyncio"): - client = ApiHubPluginAsyncClient( +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ApiHubPluginRestTransport( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) + client = ApiHubPluginClient(transport=transport) + assert client.transport is transport - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the response is the type that we expect. - assert response is None +@pytest.mark.parametrize( + "transport_class", + [ + transports.ApiHubPluginRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() -def test_delete_operation_field_headers(): - client = ApiHubPluginClient( +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_transport_kind(transport_name): + transport = ApiHubPluginClient.get_transport_class(transport_name)( credentials=ga_credentials.AnonymousCredentials(), ) + assert transport.kind == transport_name - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = None - client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_api_hub_plugin_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.ApiHubPluginTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] +def test_api_hub_plugin_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.apihub_v1.services.api_hub_plugin.transports.ApiHubPluginTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.ApiHubPluginTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) -@pytest.mark.asyncio -async def test_delete_operation_field_headers_async(): - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "get_plugin", + "enable_plugin", + "disable_plugin", + "get_location", + "list_locations", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" + with pytest.raises(NotImplementedError): + transport.close() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + +def test_api_hub_plugin_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.apihub_v1.services.api_hub_plugin.transports.ApiHubPluginTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ApiHubPluginTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) -def test_delete_operation_from_dict(): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.delete_operation( - request={ - "name": "locations", - } +def test_api_hub_plugin_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.apihub_v1.services.api_hub_plugin.transports.ApiHubPluginTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ApiHubPluginTransport() + adc.assert_called_once() + + +def test_api_hub_plugin_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ApiHubPluginClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, ) - call.assert_called() -@pytest.mark.asyncio -async def test_delete_operation_from_dict_async(): - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_operation( - request={ - "name": "locations", - } +def test_api_hub_plugin_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.ApiHubPluginRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback ) - call.assert_called() + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) -def test_cancel_operation(transport: str = "grpc"): +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_api_hub_plugin_host_no_port(transport_name): client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + client_options=client_options.ClientOptions( + api_endpoint="apihub.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "apihub.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://apihub.googleapis.com/" ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc_asyncio"): - client = ApiHubPluginAsyncClient( +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_api_hub_plugin_host_with_port(transport_name): + client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + client_options=client_options.ClientOptions( + api_endpoint="apihub.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "apihub.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://apihub.googleapis.com:8000/" ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the response is the type that we expect. - assert response is None +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_api_hub_plugin_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = ApiHubPluginClient( + credentials=creds1, + transport=transport_name, + ) + client2 = ApiHubPluginClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get_plugin._session + session2 = client2.transport.get_plugin._session + assert session1 != session2 + session1 = client1.transport.enable_plugin._session + session2 = client2.transport.enable_plugin._session + assert session1 != session2 + session1 = client1.transport.disable_plugin._session + session2 = client2.transport.disable_plugin._session + assert session1 != session2 -def test_cancel_operation_field_headers(): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_attribute_path(): + project = "squid" + location = "clam" + attribute = "whelk" + expected = "projects/{project}/locations/{location}/attributes/{attribute}".format( + project=project, + location=location, + attribute=attribute, ) + actual = ApiHubPluginClient.attribute_path(project, location, attribute) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None - client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_attribute_path(): + expected = { + "project": "octopus", + "location": "oyster", + "attribute": "nudibranch", + } + path = ApiHubPluginClient.attribute_path(**expected) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Check that the path construction is reversible. + actual = ApiHubPluginClient.parse_attribute_path(path) + assert expected == actual -@pytest.mark.asyncio -async def test_cancel_operation_field_headers_async(): - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_plugin_path(): + project = "cuttlefish" + location = "mussel" + plugin = "winkle" + expected = "projects/{project}/locations/{location}/plugins/{plugin}".format( + project=project, + location=location, + plugin=plugin, ) + actual = ApiHubPluginClient.plugin_path(project, location, plugin) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_plugin_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "plugin": "abalone", + } + path = ApiHubPluginClient.plugin_path(**expected) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Check that the path construction is reversible. + actual = ApiHubPluginClient.parse_plugin_path(path) + assert expected == actual -def test_cancel_operation_from_dict(): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() + actual = ApiHubPluginClient.common_billing_account_path(billing_account) + assert expected == actual -@pytest.mark.asyncio -async def test_cancel_operation_from_dict_async(): - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = ApiHubPluginClient.common_billing_account_path(**expected) + # Check that the path construction is reversible. + actual = ApiHubPluginClient.parse_common_billing_account_path(path) + assert expected == actual -def test_get_operation(transport: str = "grpc"): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format( + folder=folder, ) + actual = ApiHubPluginClient.common_folder_path(folder) + assert expected == actual - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = ApiHubPluginClient.common_folder_path(**expected) - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) + # Check that the path construction is reversible. + actual = ApiHubPluginClient.parse_common_folder_path(path) + assert expected == actual -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc_asyncio"): - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format( + organization=organization, ) + actual = ApiHubPluginClient.common_organization_path(organization) + assert expected == actual - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = ApiHubPluginClient.common_organization_path(**expected) - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) + # Check that the path construction is reversible. + actual = ApiHubPluginClient.parse_common_organization_path(path) + assert expected == actual -def test_get_operation_field_headers(): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format( + project=project, ) + actual = ApiHubPluginClient.common_project_path(project) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = ApiHubPluginClient.common_project_path(**expected) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Check that the path construction is reversible. + actual = ApiHubPluginClient.parse_common_project_path(path) + assert expected == actual -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, ) + actual = ApiHubPluginClient.common_location_path(project, location) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = ApiHubPluginClient.common_location_path(**expected) + # Check that the path construction is reversible. + actual = ApiHubPluginClient.parse_common_location_path(path) + assert expected == actual -def test_get_operation_from_dict(): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - - response = client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() + with mock.patch.object( + transports.ApiHubPluginTransport, "_prep_wrapped_messages" + ) as prep: + client = ApiHubPluginClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) - response = await client.get_operation( - request={ - "name": "locations", - } + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.ApiHubPluginTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = ApiHubPluginClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) - call.assert_called() + prep.assert_called_once_with(client_info) -def test_list_operations(transport: str = "grpc"): +def test_get_location_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.GetLocationRequest +): client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - response = client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - - -@pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc_asyncio"): - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_location(request) -def test_list_operations_field_headers(): +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) +def test_get_location_rest(request_type): client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = operations_pb2.ListOperationsResponse() - - client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_list_operations_field_headers_async(): - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.get_location(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) -def test_list_operations_from_dict(): +def test_list_locations_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.ListLocationsRequest +): client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - - response = client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) -@pytest.mark.asyncio -async def test_list_operations_from_dict_async(): - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_locations(request) -def test_list_locations(transport: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) +def test_list_locations_rest(request_type): client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() response = client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, locations_pb2.ListLocationsResponse) -@pytest.mark.asyncio -async def test_list_locations_async(transport: str = "grpc_asyncio"): - client = ApiHubPluginAsyncClient( +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) -def test_list_locations_field_headers(): +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = locations_pb2.ListLocationsResponse() - - client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_list_locations_field_headers_async(): - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.cancel_operation(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Establish that the response is the type that we expect. + assert response is None -def test_list_locations_from_dict(): +def test_delete_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest +): client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() - - response = client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() - -@pytest.mark.asyncio -async def test_list_locations_from_dict_async(): - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_operation(request) -def test_get_location(transport: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - response = client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.delete_operation(request) # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) + assert response is None -@pytest.mark.asyncio -async def test_get_location_async(transport: str = "grpc_asyncio"): - client = ApiHubPluginAsyncClient( +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) -def test_get_location_field_headers(): - client = ApiHubPluginClient(credentials=ga_credentials.AnonymousCredentials()) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = ApiHubPluginClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = locations_pb2.Location() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) - client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations/abc", - ) in kw["metadata"] + response = client.get_operation(request) + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) -@pytest.mark.asyncio -async def test_get_location_field_headers_async(): - client = ApiHubPluginAsyncClient(credentials=ga_credentials.AnonymousCredentials()) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = ApiHubPluginClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations/abc", - ) in kw["metadata"] + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) -def test_get_location_from_dict(): +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - - response = client.get_location( - request={ - "name": "locations/abc", - } - ) - call.assert_called() + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) -@pytest.mark.asyncio -async def test_get_location_from_dict_async(): - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location( - request={ - "name": "locations", - } - ) - call.assert_called() + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) def test_transport_close(): transports = { "rest": "_session", - "grpc": "_grpc_channel", } for transport, close_name in transports.items(): @@ -5104,7 +2666,6 @@ def test_transport_close(): def test_client_ctx(): transports = [ "rest", - "grpc", ] for transport in transports: client = ApiHubPluginClient( @@ -5121,8 +2682,7 @@ def test_client_ctx(): @pytest.mark.parametrize( "client_class,transport_class", [ - (ApiHubPluginClient, transports.ApiHubPluginGrpcTransport), - (ApiHubPluginAsyncClient, transports.ApiHubPluginGrpcAsyncIOTransport), + (ApiHubPluginClient, transports.ApiHubPluginRestTransport), ], ) def test_api_key_credentials(client_class, transport_class): diff --git a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_host_project_registration_service.py b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_host_project_registration_service.py index e951e616c819..15813aa93505 100644 --- a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_host_project_registration_service.py +++ b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_host_project_registration_service.py @@ -47,7 +47,6 @@ from requests.sessions import Session from google.cloud.apihub_v1.services.host_project_registration_service import ( - HostProjectRegistrationServiceAsyncClient, HostProjectRegistrationServiceClient, pagers, transports, @@ -230,11 +229,6 @@ def test__get_client_cert_source(): "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(HostProjectRegistrationServiceClient), ) -@mock.patch.object( - HostProjectRegistrationServiceAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(HostProjectRegistrationServiceAsyncClient), -) def test__get_api_endpoint(): api_override = "foo.com" mock_client_cert_source = mock.Mock() @@ -333,11 +327,6 @@ def test__get_universe_domain(): @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - ( - HostProjectRegistrationServiceClient, - transports.HostProjectRegistrationServiceGrpcTransport, - "grpc", - ), ( HostProjectRegistrationServiceClient, transports.HostProjectRegistrationServiceRestTransport, @@ -421,8 +410,6 @@ def test__validate_universe_domain(client_class, transport_class, transport_name @pytest.mark.parametrize( "client_class,transport_name", [ - (HostProjectRegistrationServiceClient, "grpc"), - (HostProjectRegistrationServiceAsyncClient, "grpc_asyncio"), (HostProjectRegistrationServiceClient, "rest"), ], ) @@ -449,8 +436,6 @@ def test_host_project_registration_service_client_from_service_account_info( @pytest.mark.parametrize( "transport_class,transport_name", [ - (transports.HostProjectRegistrationServiceGrpcTransport, "grpc"), - (transports.HostProjectRegistrationServiceGrpcAsyncIOTransport, "grpc_asyncio"), (transports.HostProjectRegistrationServiceRestTransport, "rest"), ], ) @@ -475,8 +460,6 @@ def test_host_project_registration_service_client_service_account_always_use_jwt @pytest.mark.parametrize( "client_class,transport_name", [ - (HostProjectRegistrationServiceClient, "grpc"), - (HostProjectRegistrationServiceAsyncClient, "grpc_asyncio"), (HostProjectRegistrationServiceClient, "rest"), ], ) @@ -510,28 +493,17 @@ def test_host_project_registration_service_client_from_service_account_file( def test_host_project_registration_service_client_get_transport_class(): transport = HostProjectRegistrationServiceClient.get_transport_class() available_transports = [ - transports.HostProjectRegistrationServiceGrpcTransport, transports.HostProjectRegistrationServiceRestTransport, ] assert transport in available_transports - transport = HostProjectRegistrationServiceClient.get_transport_class("grpc") - assert transport == transports.HostProjectRegistrationServiceGrpcTransport + transport = HostProjectRegistrationServiceClient.get_transport_class("rest") + assert transport == transports.HostProjectRegistrationServiceRestTransport @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - ( - HostProjectRegistrationServiceClient, - transports.HostProjectRegistrationServiceGrpcTransport, - "grpc", - ), - ( - HostProjectRegistrationServiceAsyncClient, - transports.HostProjectRegistrationServiceGrpcAsyncIOTransport, - "grpc_asyncio", - ), ( HostProjectRegistrationServiceClient, transports.HostProjectRegistrationServiceRestTransport, @@ -544,11 +516,6 @@ def test_host_project_registration_service_client_get_transport_class(): "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(HostProjectRegistrationServiceClient), ) -@mock.patch.object( - HostProjectRegistrationServiceAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(HostProjectRegistrationServiceAsyncClient), -) def test_host_project_registration_service_client_client_options( client_class, transport_class, transport_name ): @@ -686,30 +653,6 @@ def test_host_project_registration_service_client_client_options( @pytest.mark.parametrize( "client_class,transport_class,transport_name,use_client_cert_env", [ - ( - HostProjectRegistrationServiceClient, - transports.HostProjectRegistrationServiceGrpcTransport, - "grpc", - "true", - ), - ( - HostProjectRegistrationServiceAsyncClient, - transports.HostProjectRegistrationServiceGrpcAsyncIOTransport, - "grpc_asyncio", - "true", - ), - ( - HostProjectRegistrationServiceClient, - transports.HostProjectRegistrationServiceGrpcTransport, - "grpc", - "false", - ), - ( - HostProjectRegistrationServiceAsyncClient, - transports.HostProjectRegistrationServiceGrpcAsyncIOTransport, - "grpc_asyncio", - "false", - ), ( HostProjectRegistrationServiceClient, transports.HostProjectRegistrationServiceRestTransport, @@ -729,11 +672,6 @@ def test_host_project_registration_service_client_client_options( "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(HostProjectRegistrationServiceClient), ) -@mock.patch.object( - HostProjectRegistrationServiceAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(HostProjectRegistrationServiceAsyncClient), -) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) def test_host_project_registration_service_client_mtls_env_auto( client_class, transport_class, transport_name, use_client_cert_env @@ -837,20 +775,12 @@ def test_host_project_registration_service_client_mtls_env_auto( ) -@pytest.mark.parametrize( - "client_class", - [HostProjectRegistrationServiceClient, HostProjectRegistrationServiceAsyncClient], -) +@pytest.mark.parametrize("client_class", [HostProjectRegistrationServiceClient]) @mock.patch.object( HostProjectRegistrationServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(HostProjectRegistrationServiceClient), ) -@mock.patch.object( - HostProjectRegistrationServiceAsyncClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(HostProjectRegistrationServiceAsyncClient), -) def test_host_project_registration_service_client_get_mtls_endpoint_and_cert_source( client_class, ): @@ -944,20 +874,12 @@ def test_host_project_registration_service_client_get_mtls_endpoint_and_cert_sou ) -@pytest.mark.parametrize( - "client_class", - [HostProjectRegistrationServiceClient, HostProjectRegistrationServiceAsyncClient], -) +@pytest.mark.parametrize("client_class", [HostProjectRegistrationServiceClient]) @mock.patch.object( HostProjectRegistrationServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(HostProjectRegistrationServiceClient), ) -@mock.patch.object( - HostProjectRegistrationServiceAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(HostProjectRegistrationServiceAsyncClient), -) def test_host_project_registration_service_client_client_api_endpoint(client_class): mock_client_cert_source = client_cert_source_callback api_override = "foo.com" @@ -1038,16 +960,6 @@ def test_host_project_registration_service_client_client_api_endpoint(client_cla @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - ( - HostProjectRegistrationServiceClient, - transports.HostProjectRegistrationServiceGrpcTransport, - "grpc", - ), - ( - HostProjectRegistrationServiceAsyncClient, - transports.HostProjectRegistrationServiceGrpcAsyncIOTransport, - "grpc_asyncio", - ), ( HostProjectRegistrationServiceClient, transports.HostProjectRegistrationServiceRestTransport, @@ -1083,18 +995,6 @@ def test_host_project_registration_service_client_client_options_scopes( @pytest.mark.parametrize( "client_class,transport_class,transport_name,grpc_helpers", [ - ( - HostProjectRegistrationServiceClient, - transports.HostProjectRegistrationServiceGrpcTransport, - "grpc", - grpc_helpers, - ), - ( - HostProjectRegistrationServiceAsyncClient, - transports.HostProjectRegistrationServiceGrpcAsyncIOTransport, - "grpc_asyncio", - grpc_helpers_async, - ), ( HostProjectRegistrationServiceClient, transports.HostProjectRegistrationServiceRestTransport, @@ -1127,96 +1027,6 @@ def test_host_project_registration_service_client_client_options_credentials_fil ) -def test_host_project_registration_service_client_client_options_from_dict(): - with mock.patch( - "google.cloud.apihub_v1.services.host_project_registration_service.transports.HostProjectRegistrationServiceGrpcTransport.__init__" - ) as grpc_transport: - grpc_transport.return_value = None - client = HostProjectRegistrationServiceClient( - client_options={"api_endpoint": "squid.clam.whelk"} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize( - "client_class,transport_class,transport_name,grpc_helpers", - [ - ( - HostProjectRegistrationServiceClient, - transports.HostProjectRegistrationServiceGrpcTransport, - "grpc", - grpc_helpers, - ), - ( - HostProjectRegistrationServiceAsyncClient, - transports.HostProjectRegistrationServiceGrpcAsyncIOTransport, - "grpc_asyncio", - grpc_helpers_async, - ), - ], -) -def test_host_project_registration_service_client_create_channel_credentials_file( - client_class, transport_class, transport_name, grpc_helpers -): - # Check the case credentials file is provided. - options = client_options.ClientOptions(credentials_file="credentials.json") - - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE - ), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "apihub.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - scopes=None, - default_host="apihub.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - @pytest.mark.parametrize( "request_type", [ @@ -1224,34 +1034,114 @@ def test_host_project_registration_service_client_create_channel_credentials_fil dict, ], ) -def test_create_host_project_registration(request_type, transport: str = "grpc"): +def test_create_host_project_registration_rest(request_type): client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["host_project_registration"] = { + "name": "name_value", + "gcp_project": "gcp_project_value", + "create_time": {"seconds": 751, "nanos": 543}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_host_project_registration), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = host_project_registration_service.HostProjectRegistration( + # Determine if the message type is proto-plus or protobuf + test_field = host_project_registration_service.CreateHostProjectRegistrationRequest.meta.fields[ + "host_project_registration" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "host_project_registration" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["host_project_registration"][field]) + ): + del request_init["host_project_registration"][field][i][subfield] + else: + del request_init["host_project_registration"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = host_project_registration_service.HostProjectRegistration( name="name_value", gcp_project="gcp_project_value", ) - response = client.create_host_project_registration(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = ( - host_project_registration_service.CreateHostProjectRegistrationRequest() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = host_project_registration_service.HostProjectRegistration.pb( + return_value ) - assert args[0] == request + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_host_project_registration(request) # Establish that the response is the type that we expect. assert isinstance( @@ -1261,71 +1151,13 @@ def test_create_host_project_registration(request_type, transport: str = "grpc") assert response.gcp_project == "gcp_project_value" -def test_create_host_project_registration_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_host_project_registration), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_host_project_registration() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert ( - args[0] - == host_project_registration_service.CreateHostProjectRegistrationRequest() - ) - - -def test_create_host_project_registration_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = host_project_registration_service.CreateHostProjectRegistrationRequest( - parent="parent_value", - host_project_registration_id="host_project_registration_id_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_host_project_registration), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_host_project_registration(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[ - 0 - ] == host_project_registration_service.CreateHostProjectRegistrationRequest( - parent="parent_value", - host_project_registration_id="host_project_registration_id_value", - ) - - -def test_create_host_project_registration_use_cached_wrapped_rpc(): +def test_create_host_project_registration_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -1346,6 +1178,7 @@ def test_create_host_project_registration_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.create_host_project_registration ] = mock_rpc + request = {} client.create_host_project_registration(request) @@ -1359,231 +1192,268 @@ def test_create_host_project_registration_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_host_project_registration_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", +def test_create_host_project_registration_rest_required_fields( + request_type=host_project_registration_service.CreateHostProjectRegistrationRequest, +): + transport_class = transports.HostProjectRegistrationServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["host_project_registration_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_host_project_registration), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - host_project_registration_service.HostProjectRegistration( - name="name_value", - gcp_project="gcp_project_value", - ) - ) - response = await client.create_host_project_registration() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert ( - args[0] - == host_project_registration_service.CreateHostProjectRegistrationRequest() - ) + # verify fields with default values are dropped + assert "hostProjectRegistrationId" not in jsonified_request + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_host_project_registration._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) -@pytest.mark.asyncio -async def test_create_host_project_registration_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # verify required fields with default values are now present + assert "hostProjectRegistrationId" in jsonified_request + assert ( + jsonified_request["hostProjectRegistrationId"] + == request_init["host_project_registration_id"] + ) - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + jsonified_request["parent"] = "parent_value" + jsonified_request[ + "hostProjectRegistrationId" + ] = "host_project_registration_id_value" - # Ensure method has been cached - assert ( - client._client._transport.create_host_project_registration - in client._client._transport._wrapped_methods - ) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_host_project_registration._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("host_project_registration_id",)) + jsonified_request.update(unset_fields) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.create_host_project_registration - ] = mock_rpc + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "hostProjectRegistrationId" in jsonified_request + assert ( + jsonified_request["hostProjectRegistrationId"] + == "host_project_registration_id_value" + ) - request = {} - await client.create_host_project_registration(request) + client = HostProjectRegistrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # Designate an appropriate value for the returned response. + return_value = host_project_registration_service.HostProjectRegistration() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - await client.create_host_project_registration(request) + response_value = Response() + response_value.status_code = 200 - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + # Convert return value to protobuf type + return_value = host_project_registration_service.HostProjectRegistration.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value -@pytest.mark.asyncio -async def test_create_host_project_registration_async( - transport: str = "grpc_asyncio", - request_type=host_project_registration_service.CreateHostProjectRegistrationRequest, -): - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + response = client.create_host_project_registration(request) + + expected_params = [ + ( + "hostProjectRegistrationId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_host_project_registration_rest_unset_required_fields(): + transport = transports.HostProjectRegistrationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + unset_fields = ( + transport.create_host_project_registration._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set(("hostProjectRegistrationId",)) + & set( + ( + "parent", + "hostProjectRegistrationId", + "hostProjectRegistration", + ) + ) + ) - # Mock the actual call within the gRPC stub, and fake the request. + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_host_project_registration_rest_interceptors(null_interceptor): + transport = transports.HostProjectRegistrationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.HostProjectRegistrationServiceRestInterceptor(), + ) + client = HostProjectRegistrationServiceClient(transport=transport) with mock.patch.object( - type(client.transport.create_host_project_registration), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - host_project_registration_service.HostProjectRegistration( - name="name_value", - gcp_project="gcp_project_value", + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.HostProjectRegistrationServiceRestInterceptor, + "post_create_host_project_registration", + ) as post, mock.patch.object( + transports.HostProjectRegistrationServiceRestInterceptor, + "pre_create_host_project_registration", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = ( + host_project_registration_service.CreateHostProjectRegistrationRequest.pb( + host_project_registration_service.CreateHostProjectRegistrationRequest() + ) + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + host_project_registration_service.HostProjectRegistration.to_json( + host_project_registration_service.HostProjectRegistration() ) ) - response = await client.create_host_project_registration(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] request = ( host_project_registration_service.CreateHostProjectRegistrationRequest() ) - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance( - response, host_project_registration_service.HostProjectRegistration - ) - assert response.name == "name_value" - assert response.gcp_project == "gcp_project_value" + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = host_project_registration_service.HostProjectRegistration() + client.create_host_project_registration( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) -@pytest.mark.asyncio -async def test_create_host_project_registration_async_from_dict(): - await test_create_host_project_registration_async(request_type=dict) + pre.assert_called_once() + post.assert_called_once() -def test_create_host_project_registration_field_headers(): +def test_create_host_project_registration_rest_bad_request( + transport: str = "rest", + request_type=host_project_registration_service.CreateHostProjectRegistrationRequest, +): client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = host_project_registration_service.CreateHostProjectRegistrationRequest() - - request.parent = "parent_value" + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_host_project_registration), "__call__" - ) as call: - call.return_value = host_project_registration_service.HostProjectRegistration() + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value client.create_host_project_registration(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - -@pytest.mark.asyncio -async def test_create_host_project_registration_field_headers_async(): - client = HostProjectRegistrationServiceAsyncClient( +def test_create_host_project_registration_rest_flattened(): + client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = host_project_registration_service.CreateHostProjectRegistrationRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_host_project_registration), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - host_project_registration_service.HostProjectRegistration() - ) - await client.create_host_project_registration(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = host_project_registration_service.HostProjectRegistration() -def test_create_host_project_registration_flattened(): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_host_project_registration), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = host_project_registration_service.HostProjectRegistration() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_host_project_registration( + # get truthy value for each flattened field + mock_args = dict( parent="parent_value", host_project_registration=host_project_registration_service.HostProjectRegistration( name="name_value" ), host_project_registration_id="host_project_registration_id_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = host_project_registration_service.HostProjectRegistration.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_host_project_registration(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].host_project_registration - mock_val = host_project_registration_service.HostProjectRegistration( - name="name_value" + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/hostProjectRegistrations" + % client.transport._host, + args[1], ) - assert arg == mock_val - arg = args[0].host_project_registration_id - mock_val = "host_project_registration_id_value" - assert arg == mock_val -def test_create_host_project_registration_flattened_error(): +def test_create_host_project_registration_rest_flattened_error(transport: str = "rest"): client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -1599,67 +1469,11 @@ def test_create_host_project_registration_flattened_error(): ) -@pytest.mark.asyncio -async def test_create_host_project_registration_flattened_async(): - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_host_project_registration), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = host_project_registration_service.HostProjectRegistration() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - host_project_registration_service.HostProjectRegistration() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_host_project_registration( - parent="parent_value", - host_project_registration=host_project_registration_service.HostProjectRegistration( - name="name_value" - ), - host_project_registration_id="host_project_registration_id_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].host_project_registration - mock_val = host_project_registration_service.HostProjectRegistration( - name="name_value" - ) - assert arg == mock_val - arg = args[0].host_project_registration_id - mock_val = "host_project_registration_id_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_create_host_project_registration_flattened_error_async(): - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_create_host_project_registration_rest_error(): + client = HostProjectRegistrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_host_project_registration( - host_project_registration_service.CreateHostProjectRegistrationRequest(), - parent="parent_value", - host_project_registration=host_project_registration_service.HostProjectRegistration( - name="name_value" - ), - host_project_registration_id="host_project_registration_id_value", - ) - @pytest.mark.parametrize( "request_type", @@ -1668,32 +1482,38 @@ async def test_create_host_project_registration_flattened_error_async(): dict, ], ) -def test_get_host_project_registration(request_type, transport: str = "grpc"): +def test_get_host_project_registration_rest(request_type): client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/hostProjectRegistrations/sample3" + } + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_host_project_registration), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = host_project_registration_service.HostProjectRegistration( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = host_project_registration_service.HostProjectRegistration( name="name_value", gcp_project="gcp_project_value", ) - response = client.get_host_project_registration(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = host_project_registration_service.GetHostProjectRegistrationRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = host_project_registration_service.HostProjectRegistration.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_host_project_registration(request) # Establish that the response is the type that we expect. assert isinstance( @@ -1703,69 +1523,13 @@ def test_get_host_project_registration(request_type, transport: str = "grpc"): assert response.gcp_project == "gcp_project_value" -def test_get_host_project_registration_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_host_project_registration), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_host_project_registration() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert ( - args[0] - == host_project_registration_service.GetHostProjectRegistrationRequest() - ) - - -def test_get_host_project_registration_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = host_project_registration_service.GetHostProjectRegistrationRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_host_project_registration), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_host_project_registration(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[ - 0 - ] == host_project_registration_service.GetHostProjectRegistrationRequest( - name="name_value", - ) - - -def test_get_host_project_registration_use_cached_wrapped_rpc(): +def test_get_host_project_registration_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -1786,6 +1550,7 @@ def test_get_host_project_registration_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.get_host_project_registration ] = mock_rpc + request = {} client.get_host_project_registration(request) @@ -1799,274 +1564,250 @@ def test_get_host_project_registration_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_host_project_registration_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_host_project_registration), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - host_project_registration_service.HostProjectRegistration( - name="name_value", - gcp_project="gcp_project_value", - ) - ) - response = await client.get_host_project_registration() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert ( - args[0] - == host_project_registration_service.GetHostProjectRegistrationRequest() - ) - - -@pytest.mark.asyncio -async def test_get_host_project_registration_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_get_host_project_registration_rest_required_fields( + request_type=host_project_registration_service.GetHostProjectRegistrationRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + transport_class = transports.HostProjectRegistrationServiceRestTransport - # Ensure method has been cached - assert ( - client._client._transport.get_host_project_registration - in client._client._transport._wrapped_methods - ) + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.get_host_project_registration - ] = mock_rpc + # verify fields with default values are dropped - request = {} - await client.get_host_project_registration(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_host_project_registration._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.get_host_project_registration(request) + jsonified_request["name"] = "name_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_host_project_registration._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -@pytest.mark.asyncio -async def test_get_host_project_registration_async( - transport: str = "grpc_asyncio", - request_type=host_project_registration_service.GetHostProjectRegistrationRequest, -): - client = HostProjectRegistrationServiceAsyncClient( + client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = host_project_registration_service.HostProjectRegistration() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_host_project_registration), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - host_project_registration_service.HostProjectRegistration( - name="name_value", - gcp_project="gcp_project_value", - ) - ) - response = await client.get_host_project_registration(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = host_project_registration_service.GetHostProjectRegistrationRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = host_project_registration_service.HostProjectRegistration.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance( - response, host_project_registration_service.HostProjectRegistration - ) - assert response.name == "name_value" - assert response.gcp_project == "gcp_project_value" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_host_project_registration(request) -@pytest.mark.asyncio -async def test_get_host_project_registration_async_from_dict(): - await test_get_host_project_registration_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_get_host_project_registration_field_headers(): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_get_host_project_registration_rest_unset_required_fields(): + transport = transports.HostProjectRegistrationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = host_project_registration_service.GetHostProjectRegistrationRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_host_project_registration), "__call__" - ) as call: - call.return_value = host_project_registration_service.HostProjectRegistration() - client.get_host_project_registration(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + unset_fields = transport.get_host_project_registration._get_unset_required_fields( + {} + ) + assert set(unset_fields) == (set(()) & set(("name",))) -@pytest.mark.asyncio -async def test_get_host_project_registration_field_headers_async(): - client = HostProjectRegistrationServiceAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_host_project_registration_rest_interceptors(null_interceptor): + transport = transports.HostProjectRegistrationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.HostProjectRegistrationServiceRestInterceptor(), ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = host_project_registration_service.GetHostProjectRegistrationRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. + client = HostProjectRegistrationServiceClient(transport=transport) with mock.patch.object( - type(client.transport.get_host_project_registration), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - host_project_registration_service.HostProjectRegistration() + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.HostProjectRegistrationServiceRestInterceptor, + "post_get_host_project_registration", + ) as post, mock.patch.object( + transports.HostProjectRegistrationServiceRestInterceptor, + "pre_get_host_project_registration", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = ( + host_project_registration_service.GetHostProjectRegistrationRequest.pb( + host_project_registration_service.GetHostProjectRegistrationRequest() + ) ) - await client.get_host_project_registration(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -def test_get_host_project_registration_flattened(): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_host_project_registration), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = host_project_registration_service.HostProjectRegistration() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_host_project_registration( - name="name_value", + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + host_project_registration_service.HostProjectRegistration.to_json( + host_project_registration_service.HostProjectRegistration() + ) ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + request = host_project_registration_service.GetHostProjectRegistrationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = host_project_registration_service.HostProjectRegistration() + + client.get_host_project_registration( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() -def test_get_host_project_registration_flattened_error(): +def test_get_host_project_registration_rest_bad_request( + transport: str = "rest", + request_type=host_project_registration_service.GetHostProjectRegistrationRequest, +): client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_host_project_registration( - host_project_registration_service.GetHostProjectRegistrationRequest(), - name="name_value", - ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/hostProjectRegistrations/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_host_project_registration(request) -@pytest.mark.asyncio -async def test_get_host_project_registration_flattened_async(): - client = HostProjectRegistrationServiceAsyncClient( +def test_get_host_project_registration_rest_flattened(): + client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_host_project_registration), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = host_project_registration_service.HostProjectRegistration() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = host_project_registration_service.HostProjectRegistration() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - host_project_registration_service.HostProjectRegistration() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_host_project_registration( + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/hostProjectRegistrations/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = host_project_registration_service.HostProjectRegistration.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_host_project_registration(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/hostProjectRegistrations/*}" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_get_host_project_registration_flattened_error_async(): - client = HostProjectRegistrationServiceAsyncClient( +def test_get_host_project_registration_rest_flattened_error(transport: str = "rest"): + client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_host_project_registration( + client.get_host_project_registration( host_project_registration_service.GetHostProjectRegistrationRequest(), name="name_value", ) +def test_get_host_project_registration_rest_error(): + client = HostProjectRegistrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -2074,110 +1815,52 @@ async def test_get_host_project_registration_flattened_error_async(): dict, ], ) -def test_list_host_project_registrations(request_type, transport: str = "grpc"): +def test_list_host_project_registrations_rest(request_type): client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_host_project_registrations), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = ( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = ( host_project_registration_service.ListHostProjectRegistrationsResponse( next_page_token="next_page_token_value", ) ) - response = client.list_host_project_registrations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = ( - host_project_registration_service.ListHostProjectRegistrationsRequest() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = ( + host_project_registration_service.ListHostProjectRegistrationsResponse.pb( + return_value + ) ) - assert args[0] == request + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_host_project_registrations(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListHostProjectRegistrationsPager) assert response.next_page_token == "next_page_token_value" -def test_list_host_project_registrations_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_host_project_registrations), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_host_project_registrations() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert ( - args[0] - == host_project_registration_service.ListHostProjectRegistrationsRequest() - ) - - -def test_list_host_project_registrations_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = host_project_registration_service.ListHostProjectRegistrationsRequest( - parent="parent_value", - page_token="page_token_value", - filter="filter_value", - order_by="order_by_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_host_project_registrations), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_host_project_registrations(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[ - 0 - ] == host_project_registration_service.ListHostProjectRegistrationsRequest( - parent="parent_value", - page_token="page_token_value", - filter="filter_value", - order_by="order_by_value", - ) - - -def test_list_host_project_registrations_use_cached_wrapped_rpc(): +def test_list_host_project_registrations_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -2198,6 +1881,7 @@ def test_list_host_project_registrations_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.list_host_project_registrations ] = mock_rpc + request = {} client.list_host_project_registrations(request) @@ -2211,289 +1895,279 @@ def test_list_host_project_registrations_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_host_project_registrations_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", +def test_list_host_project_registrations_rest_required_fields( + request_type=host_project_registration_service.ListHostProjectRegistrationsRequest, +): + transport_class = transports.HostProjectRegistrationServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_host_project_registrations), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - host_project_registration_service.ListHostProjectRegistrationsResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_host_project_registrations() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert ( - args[0] - == host_project_registration_service.ListHostProjectRegistrationsRequest() - ) + # verify fields with default values are dropped + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_host_project_registrations._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) -@pytest.mark.asyncio -async def test_list_host_project_registrations_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # verify required fields with default values are now present - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + jsonified_request["parent"] = "parent_value" - # Ensure method has been cached - assert ( - client._client._transport.list_host_project_registrations - in client._client._transport._wrapped_methods + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_host_project_registrations._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", ) + ) + jsonified_request.update(unset_fields) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.list_host_project_registrations - ] = mock_rpc + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" - request = {} - await client.list_host_project_registrations(request) + client = HostProjectRegistrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # Designate an appropriate value for the returned response. + return_value = ( + host_project_registration_service.ListHostProjectRegistrationsResponse() + ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - await client.list_host_project_registrations(request) + response_value = Response() + response_value.status_code = 200 - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + # Convert return value to protobuf type + return_value = host_project_registration_service.ListHostProjectRegistrationsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value -@pytest.mark.asyncio -async def test_list_host_project_registrations_async( - transport: str = "grpc_asyncio", - request_type=host_project_registration_service.ListHostProjectRegistrationsRequest, -): - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + response = client.list_host_project_registrations(request) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_host_project_registrations), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - host_project_registration_service.ListHostProjectRegistrationsResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_host_project_registrations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = ( - host_project_registration_service.ListHostProjectRegistrationsRequest() - ) - assert args[0] == request - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListHostProjectRegistrationsAsyncPager) - assert response.next_page_token == "next_page_token_value" - - -@pytest.mark.asyncio -async def test_list_host_project_registrations_async_from_dict(): - await test_list_host_project_registrations_async(request_type=dict) - - -def test_list_host_project_registrations_field_headers(): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_list_host_project_registrations_rest_unset_required_fields(): + transport = transports.HostProjectRegistrationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = host_project_registration_service.ListHostProjectRegistrationsRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_host_project_registrations), "__call__" - ) as call: - call.return_value = ( - host_project_registration_service.ListHostProjectRegistrationsResponse() + unset_fields = transport.list_host_project_registrations._get_unset_required_fields( + {} + ) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) ) - client.list_host_project_registrations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + & set(("parent",)) + ) -@pytest.mark.asyncio -async def test_list_host_project_registrations_field_headers_async(): - client = HostProjectRegistrationServiceAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_host_project_registrations_rest_interceptors(null_interceptor): + transport = transports.HostProjectRegistrationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.HostProjectRegistrationServiceRestInterceptor(), ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = host_project_registration_service.ListHostProjectRegistrationsRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. + client = HostProjectRegistrationServiceClient(transport=transport) with mock.patch.object( - type(client.transport.list_host_project_registrations), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - host_project_registration_service.ListHostProjectRegistrationsResponse() + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.HostProjectRegistrationServiceRestInterceptor, + "post_list_host_project_registrations", + ) as post, mock.patch.object( + transports.HostProjectRegistrationServiceRestInterceptor, + "pre_list_host_project_registrations", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = ( + host_project_registration_service.ListHostProjectRegistrationsRequest.pb( + host_project_registration_service.ListHostProjectRegistrationsRequest() + ) ) - await client.list_host_project_registrations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } -def test_list_host_project_registrations_flattened(): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = host_project_registration_service.ListHostProjectRegistrationsResponse.to_json( + host_project_registration_service.ListHostProjectRegistrationsResponse() + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_host_project_registrations), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = ( + request = ( + host_project_registration_service.ListHostProjectRegistrationsRequest() + ) + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = ( host_project_registration_service.ListHostProjectRegistrationsResponse() ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. + client.list_host_project_registrations( - parent="parent_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val + pre.assert_called_once() + post.assert_called_once() -def test_list_host_project_registrations_flattened_error(): +def test_list_host_project_registrations_rest_bad_request( + transport: str = "rest", + request_type=host_project_registration_service.ListHostProjectRegistrationsRequest, +): client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_host_project_registrations( - host_project_registration_service.ListHostProjectRegistrationsRequest(), - parent="parent_value", - ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_host_project_registrations(request) -@pytest.mark.asyncio -async def test_list_host_project_registrations_flattened_async(): - client = HostProjectRegistrationServiceAsyncClient( +def test_list_host_project_registrations_rest_flattened(): + client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_host_project_registrations), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = ( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = ( host_project_registration_service.ListHostProjectRegistrationsResponse() ) - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - host_project_registration_service.ListHostProjectRegistrationsResponse() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_host_project_registrations( + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( parent="parent_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = ( + host_project_registration_service.ListHostProjectRegistrationsResponse.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_host_project_registrations(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/hostProjectRegistrations" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_list_host_project_registrations_flattened_error_async(): - client = HostProjectRegistrationServiceAsyncClient( +def test_list_host_project_registrations_rest_flattened_error(transport: str = "rest"): + client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_host_project_registrations( + client.list_host_project_registrations( host_project_registration_service.ListHostProjectRegistrationsRequest(), parent="parent_value", ) -def test_list_host_project_registrations_pager(transport_name: str = "grpc"): +def test_list_host_project_registrations_rest_pager(transport: str = "rest"): client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_host_project_registrations), "__call__" - ) as call: - # Set the response to a series of pages. - call.side_effect = ( + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( host_project_registration_service.ListHostProjectRegistrationsResponse( host_project_registrations=[ host_project_registration_service.HostProjectRegistration(), @@ -2518,22 +2192,26 @@ def test_list_host_project_registrations_pager(transport_name: str = "grpc"): host_project_registration_service.HostProjectRegistration(), ], ), - RuntimeError, ) + # Two responses for two calls + response = response + response - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) - pager = client.list_host_project_registrations( - request={}, retry=retry, timeout=timeout + # Wrap the values into proper Response objs + response = tuple( + host_project_registration_service.ListHostProjectRegistrationsResponse.to_json( + x + ) + for x in response ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout + pager = client.list_host_project_registrations(request=sample_request) results = list(pager) assert len(results) == 6 @@ -2542,3300 +2220,791 @@ def test_list_host_project_registrations_pager(transport_name: str = "grpc"): for i in results ) - -def test_list_host_project_registrations_pages(transport_name: str = "grpc"): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_host_project_registrations), "__call__" - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - host_project_registration_service.ListHostProjectRegistrationsResponse( - host_project_registrations=[ - host_project_registration_service.HostProjectRegistration(), - host_project_registration_service.HostProjectRegistration(), - host_project_registration_service.HostProjectRegistration(), - ], - next_page_token="abc", - ), - host_project_registration_service.ListHostProjectRegistrationsResponse( - host_project_registrations=[], - next_page_token="def", - ), - host_project_registration_service.ListHostProjectRegistrationsResponse( - host_project_registrations=[ - host_project_registration_service.HostProjectRegistration(), - ], - next_page_token="ghi", - ), - host_project_registration_service.ListHostProjectRegistrationsResponse( - host_project_registrations=[ - host_project_registration_service.HostProjectRegistration(), - host_project_registration_service.HostProjectRegistration(), - ], - ), - RuntimeError, + pages = list( + client.list_host_project_registrations(request=sample_request).pages ) - pages = list(client.list_host_project_registrations(request={}).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -@pytest.mark.asyncio -async def test_list_host_project_registrations_async_pager(): - client = HostProjectRegistrationServiceAsyncClient( +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.HostProjectRegistrationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), ) + with pytest.raises(ValueError): + client = HostProjectRegistrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_host_project_registrations), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - host_project_registration_service.ListHostProjectRegistrationsResponse( - host_project_registrations=[ - host_project_registration_service.HostProjectRegistration(), - host_project_registration_service.HostProjectRegistration(), - host_project_registration_service.HostProjectRegistration(), - ], - next_page_token="abc", - ), - host_project_registration_service.ListHostProjectRegistrationsResponse( - host_project_registrations=[], - next_page_token="def", - ), - host_project_registration_service.ListHostProjectRegistrationsResponse( - host_project_registrations=[ - host_project_registration_service.HostProjectRegistration(), - ], - next_page_token="ghi", - ), - host_project_registration_service.ListHostProjectRegistrationsResponse( - host_project_registrations=[ - host_project_registration_service.HostProjectRegistration(), - host_project_registration_service.HostProjectRegistration(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_host_project_registrations( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all( - isinstance(i, host_project_registration_service.HostProjectRegistration) - for i in responses - ) - - -@pytest.mark.asyncio -async def test_list_host_project_registrations_async_pages(): - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_host_project_registrations), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - host_project_registration_service.ListHostProjectRegistrationsResponse( - host_project_registrations=[ - host_project_registration_service.HostProjectRegistration(), - host_project_registration_service.HostProjectRegistration(), - host_project_registration_service.HostProjectRegistration(), - ], - next_page_token="abc", - ), - host_project_registration_service.ListHostProjectRegistrationsResponse( - host_project_registrations=[], - next_page_token="def", - ), - host_project_registration_service.ListHostProjectRegistrationsResponse( - host_project_registrations=[ - host_project_registration_service.HostProjectRegistration(), - ], - next_page_token="ghi", - ), - host_project_registration_service.ListHostProjectRegistrationsResponse( - host_project_registrations=[ - host_project_registration_service.HostProjectRegistration(), - host_project_registration_service.HostProjectRegistration(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_host_project_registrations(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - host_project_registration_service.CreateHostProjectRegistrationRequest, - dict, - ], -) -def test_create_host_project_registration_rest(request_type): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["host_project_registration"] = { - "name": "name_value", - "gcp_project": "gcp_project_value", - "create_time": {"seconds": 751, "nanos": 543}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = host_project_registration_service.CreateHostProjectRegistrationRequest.meta.fields[ - "host_project_registration" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init[ - "host_project_registration" - ].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range( - 0, len(request_init["host_project_registration"][field]) - ): - del request_init["host_project_registration"][field][i][subfield] - else: - del request_init["host_project_registration"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = host_project_registration_service.HostProjectRegistration( - name="name_value", - gcp_project="gcp_project_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = host_project_registration_service.HostProjectRegistration.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.create_host_project_registration(request) - - # Establish that the response is the type that we expect. - assert isinstance( - response, host_project_registration_service.HostProjectRegistration - ) - assert response.name == "name_value" - assert response.gcp_project == "gcp_project_value" - - -def test_create_host_project_registration_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.create_host_project_registration - in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.create_host_project_registration - ] = mock_rpc - - request = {} - client.create_host_project_registration(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_host_project_registration(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_host_project_registration_rest_required_fields( - request_type=host_project_registration_service.CreateHostProjectRegistrationRequest, -): - transport_class = transports.HostProjectRegistrationServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["host_project_registration_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - assert "hostProjectRegistrationId" not in jsonified_request - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_host_project_registration._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - assert "hostProjectRegistrationId" in jsonified_request - assert ( - jsonified_request["hostProjectRegistrationId"] - == request_init["host_project_registration_id"] - ) - - jsonified_request["parent"] = "parent_value" - jsonified_request[ - "hostProjectRegistrationId" - ] = "host_project_registration_id_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_host_project_registration._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("host_project_registration_id",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "hostProjectRegistrationId" in jsonified_request - assert ( - jsonified_request["hostProjectRegistrationId"] - == "host_project_registration_id_value" - ) - - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = host_project_registration_service.HostProjectRegistration() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = host_project_registration_service.HostProjectRegistration.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.create_host_project_registration(request) - - expected_params = [ - ( - "hostProjectRegistrationId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_create_host_project_registration_rest_unset_required_fields(): - transport = transports.HostProjectRegistrationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = ( - transport.create_host_project_registration._get_unset_required_fields({}) - ) - assert set(unset_fields) == ( - set(("hostProjectRegistrationId",)) - & set( - ( - "parent", - "hostProjectRegistrationId", - "hostProjectRegistration", - ) - ) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_host_project_registration_rest_interceptors(null_interceptor): - transport = transports.HostProjectRegistrationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.HostProjectRegistrationServiceRestInterceptor(), - ) - client = HostProjectRegistrationServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.HostProjectRegistrationServiceRestInterceptor, - "post_create_host_project_registration", - ) as post, mock.patch.object( - transports.HostProjectRegistrationServiceRestInterceptor, - "pre_create_host_project_registration", - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = ( - host_project_registration_service.CreateHostProjectRegistrationRequest.pb( - host_project_registration_service.CreateHostProjectRegistrationRequest() - ) - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = ( - host_project_registration_service.HostProjectRegistration.to_json( - host_project_registration_service.HostProjectRegistration() - ) - ) - - request = ( - host_project_registration_service.CreateHostProjectRegistrationRequest() - ) - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = host_project_registration_service.HostProjectRegistration() - - client.create_host_project_registration( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_host_project_registration_rest_bad_request( - transport: str = "rest", - request_type=host_project_registration_service.CreateHostProjectRegistrationRequest, -): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_host_project_registration(request) - - -def test_create_host_project_registration_rest_flattened(): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = host_project_registration_service.HostProjectRegistration() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - host_project_registration=host_project_registration_service.HostProjectRegistration( - name="name_value" - ), - host_project_registration_id="host_project_registration_id_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = host_project_registration_service.HostProjectRegistration.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.create_host_project_registration(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/hostProjectRegistrations" - % client.transport._host, - args[1], - ) - - -def test_create_host_project_registration_rest_flattened_error(transport: str = "rest"): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_host_project_registration( - host_project_registration_service.CreateHostProjectRegistrationRequest(), - parent="parent_value", - host_project_registration=host_project_registration_service.HostProjectRegistration( - name="name_value" - ), - host_project_registration_id="host_project_registration_id_value", - ) - - -def test_create_host_project_registration_rest_error(): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - host_project_registration_service.GetHostProjectRegistrationRequest, - dict, - ], -) -def test_get_host_project_registration_rest(request_type): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/hostProjectRegistrations/sample3" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = host_project_registration_service.HostProjectRegistration( - name="name_value", - gcp_project="gcp_project_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = host_project_registration_service.HostProjectRegistration.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_host_project_registration(request) - - # Establish that the response is the type that we expect. - assert isinstance( - response, host_project_registration_service.HostProjectRegistration - ) - assert response.name == "name_value" - assert response.gcp_project == "gcp_project_value" - - -def test_get_host_project_registration_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.get_host_project_registration - in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.get_host_project_registration - ] = mock_rpc - - request = {} - client.get_host_project_registration(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_host_project_registration(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_host_project_registration_rest_required_fields( - request_type=host_project_registration_service.GetHostProjectRegistrationRequest, -): - transport_class = transports.HostProjectRegistrationServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_host_project_registration._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_host_project_registration._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = host_project_registration_service.HostProjectRegistration() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = host_project_registration_service.HostProjectRegistration.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_host_project_registration(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_get_host_project_registration_rest_unset_required_fields(): - transport = transports.HostProjectRegistrationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.get_host_project_registration._get_unset_required_fields( - {} - ) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_host_project_registration_rest_interceptors(null_interceptor): - transport = transports.HostProjectRegistrationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.HostProjectRegistrationServiceRestInterceptor(), - ) - client = HostProjectRegistrationServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.HostProjectRegistrationServiceRestInterceptor, - "post_get_host_project_registration", - ) as post, mock.patch.object( - transports.HostProjectRegistrationServiceRestInterceptor, - "pre_get_host_project_registration", - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = ( - host_project_registration_service.GetHostProjectRegistrationRequest.pb( - host_project_registration_service.GetHostProjectRegistrationRequest() - ) - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = ( - host_project_registration_service.HostProjectRegistration.to_json( - host_project_registration_service.HostProjectRegistration() - ) - ) - - request = host_project_registration_service.GetHostProjectRegistrationRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = host_project_registration_service.HostProjectRegistration() - - client.get_host_project_registration( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_host_project_registration_rest_bad_request( - transport: str = "rest", - request_type=host_project_registration_service.GetHostProjectRegistrationRequest, -): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/hostProjectRegistrations/sample3" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_host_project_registration(request) - - -def test_get_host_project_registration_rest_flattened(): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = host_project_registration_service.HostProjectRegistration() - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/hostProjectRegistrations/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = host_project_registration_service.HostProjectRegistration.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.get_host_project_registration(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/hostProjectRegistrations/*}" - % client.transport._host, - args[1], - ) - - -def test_get_host_project_registration_rest_flattened_error(transport: str = "rest"): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_host_project_registration( - host_project_registration_service.GetHostProjectRegistrationRequest(), - name="name_value", - ) - - -def test_get_host_project_registration_rest_error(): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - host_project_registration_service.ListHostProjectRegistrationsRequest, - dict, - ], -) -def test_list_host_project_registrations_rest(request_type): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = ( - host_project_registration_service.ListHostProjectRegistrationsResponse( - next_page_token="next_page_token_value", - ) - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = ( - host_project_registration_service.ListHostProjectRegistrationsResponse.pb( - return_value - ) - ) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_host_project_registrations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListHostProjectRegistrationsPager) - assert response.next_page_token == "next_page_token_value" - - -def test_list_host_project_registrations_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.list_host_project_registrations - in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.list_host_project_registrations - ] = mock_rpc - - request = {} - client.list_host_project_registrations(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_host_project_registrations(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_host_project_registrations_rest_required_fields( - request_type=host_project_registration_service.ListHostProjectRegistrationsRequest, -): - transport_class = transports.HostProjectRegistrationServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_host_project_registrations._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_host_project_registrations._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", - ) - ) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = ( - host_project_registration_service.ListHostProjectRegistrationsResponse() - ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = host_project_registration_service.ListHostProjectRegistrationsResponse.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.list_host_project_registrations(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_list_host_project_registrations_rest_unset_required_fields(): - transport = transports.HostProjectRegistrationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.list_host_project_registrations._get_unset_required_fields( - {} - ) - assert set(unset_fields) == ( - set( - ( - "filter", - "orderBy", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_host_project_registrations_rest_interceptors(null_interceptor): - transport = transports.HostProjectRegistrationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.HostProjectRegistrationServiceRestInterceptor(), - ) - client = HostProjectRegistrationServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.HostProjectRegistrationServiceRestInterceptor, - "post_list_host_project_registrations", - ) as post, mock.patch.object( - transports.HostProjectRegistrationServiceRestInterceptor, - "pre_list_host_project_registrations", - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = ( - host_project_registration_service.ListHostProjectRegistrationsRequest.pb( - host_project_registration_service.ListHostProjectRegistrationsRequest() - ) - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = host_project_registration_service.ListHostProjectRegistrationsResponse.to_json( - host_project_registration_service.ListHostProjectRegistrationsResponse() - ) - - request = ( - host_project_registration_service.ListHostProjectRegistrationsRequest() - ) - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = ( - host_project_registration_service.ListHostProjectRegistrationsResponse() - ) - - client.list_host_project_registrations( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_host_project_registrations_rest_bad_request( - transport: str = "rest", - request_type=host_project_registration_service.ListHostProjectRegistrationsRequest, -): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_host_project_registrations(request) - - -def test_list_host_project_registrations_rest_flattened(): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = ( - host_project_registration_service.ListHostProjectRegistrationsResponse() - ) - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = ( - host_project_registration_service.ListHostProjectRegistrationsResponse.pb( - return_value - ) - ) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.list_host_project_registrations(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/hostProjectRegistrations" - % client.transport._host, - args[1], - ) - - -def test_list_host_project_registrations_rest_flattened_error(transport: str = "rest"): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_host_project_registrations( - host_project_registration_service.ListHostProjectRegistrationsRequest(), - parent="parent_value", - ) - - -def test_list_host_project_registrations_rest_pager(transport: str = "rest"): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - host_project_registration_service.ListHostProjectRegistrationsResponse( - host_project_registrations=[ - host_project_registration_service.HostProjectRegistration(), - host_project_registration_service.HostProjectRegistration(), - host_project_registration_service.HostProjectRegistration(), - ], - next_page_token="abc", - ), - host_project_registration_service.ListHostProjectRegistrationsResponse( - host_project_registrations=[], - next_page_token="def", - ), - host_project_registration_service.ListHostProjectRegistrationsResponse( - host_project_registrations=[ - host_project_registration_service.HostProjectRegistration(), - ], - next_page_token="ghi", - ), - host_project_registration_service.ListHostProjectRegistrationsResponse( - host_project_registrations=[ - host_project_registration_service.HostProjectRegistration(), - host_project_registration_service.HostProjectRegistration(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - host_project_registration_service.ListHostProjectRegistrationsResponse.to_json( - x - ) - for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_host_project_registrations(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all( - isinstance(i, host_project_registration_service.HostProjectRegistration) - for i in results - ) - - pages = list( - client.list_host_project_registrations(request=sample_request).pages - ) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.HostProjectRegistrationServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.HostProjectRegistrationServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = HostProjectRegistrationServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.HostProjectRegistrationServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = HostProjectRegistrationServiceClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = HostProjectRegistrationServiceClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.HostProjectRegistrationServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = HostProjectRegistrationServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.HostProjectRegistrationServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = HostProjectRegistrationServiceClient(transport=transport) - assert client.transport is transport - - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.HostProjectRegistrationServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.HostProjectRegistrationServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.HostProjectRegistrationServiceGrpcTransport, - transports.HostProjectRegistrationServiceGrpcAsyncIOTransport, - transports.HostProjectRegistrationServiceRestTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "rest", - ], -) -def test_transport_kind(transport_name): - transport = HostProjectRegistrationServiceClient.get_transport_class( - transport_name - )( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.HostProjectRegistrationServiceGrpcTransport, - ) - - -def test_host_project_registration_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.HostProjectRegistrationServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json", - ) - - -def test_host_project_registration_service_base_transport(): - # Instantiate the base transport. - with mock.patch( - "google.cloud.apihub_v1.services.host_project_registration_service.transports.HostProjectRegistrationServiceTransport.__init__" - ) as Transport: - Transport.return_value = None - transport = transports.HostProjectRegistrationServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - "create_host_project_registration", - "get_host_project_registration", - "list_host_project_registrations", - "get_location", - "list_locations", - "get_operation", - "cancel_operation", - "delete_operation", - "list_operations", - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - "kind", - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_host_project_registration_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.apihub_v1.services.host_project_registration_service.transports.HostProjectRegistrationServiceTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.HostProjectRegistrationServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id="octopus", - ) - - -def test_host_project_registration_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( - "google.cloud.apihub_v1.services.host_project_registration_service.transports.HostProjectRegistrationServiceTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.HostProjectRegistrationServiceTransport() - adc.assert_called_once() - - -def test_host_project_registration_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - HostProjectRegistrationServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.HostProjectRegistrationServiceGrpcTransport, - transports.HostProjectRegistrationServiceGrpcAsyncIOTransport, - ], -) -def test_host_project_registration_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.HostProjectRegistrationServiceGrpcTransport, - transports.HostProjectRegistrationServiceGrpcAsyncIOTransport, - transports.HostProjectRegistrationServiceRestTransport, - ], -) -def test_host_project_registration_service_transport_auth_gdch_credentials( - transport_class, -): - host = "https://language.com/" - api_audience_tests = [None, "https://language2.com/"] - api_audience_expect = [host, "https://language2.com/"] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, "default", autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock( - return_value=gdch_mock - ) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with(e) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.HostProjectRegistrationServiceGrpcTransport, grpc_helpers), - ( - transports.HostProjectRegistrationServiceGrpcAsyncIOTransport, - grpc_helpers_async, - ), - ], -) -def test_host_project_registration_service_transport_create_channel( - transport_class, grpc_helpers -): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - - create_channel.assert_called_with( - "apihub.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - scopes=["1", "2"], - default_host="apihub.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.HostProjectRegistrationServiceGrpcTransport, - transports.HostProjectRegistrationServiceGrpcAsyncIOTransport, - ], -) -def test_host_project_registration_service_grpc_transport_client_cert_source_for_mtls( - transport_class, -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds, - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback, - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, private_key=expected_key - ) - - -def test_host_project_registration_service_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch( - "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" - ) as mock_configure_mtls_channel: - transports.HostProjectRegistrationServiceRestTransport( - credentials=cred, client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "grpc_asyncio", - "rest", - ], -) -def test_host_project_registration_service_host_no_port(transport_name): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="apihub.googleapis.com" - ), - transport=transport_name, - ) - assert client.transport._host == ( - "apihub.googleapis.com:443" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://apihub.googleapis.com/" - ) - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "grpc_asyncio", - "rest", - ], -) -def test_host_project_registration_service_host_with_port(transport_name): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="apihub.googleapis.com:8000" - ), - transport=transport_name, - ) - assert client.transport._host == ( - "apihub.googleapis.com:8000" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://apihub.googleapis.com:8000/" - ) - - -@pytest.mark.parametrize( - "transport_name", - [ - "rest", - ], -) -def test_host_project_registration_service_client_transport_session_collision( - transport_name, -): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = HostProjectRegistrationServiceClient( - credentials=creds1, - transport=transport_name, - ) - client2 = HostProjectRegistrationServiceClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.create_host_project_registration._session - session2 = client2.transport.create_host_project_registration._session - assert session1 != session2 - session1 = client1.transport.get_host_project_registration._session - session2 = client2.transport.get_host_project_registration._session - assert session1 != session2 - session1 = client1.transport.list_host_project_registrations._session - session2 = client2.transport.list_host_project_registrations._session - assert session1 != session2 - - -def test_host_project_registration_service_grpc_transport_channel(): - channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.HostProjectRegistrationServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_host_project_registration_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.HostProjectRegistrationServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize( - "transport_class", - [ - transports.HostProjectRegistrationServiceGrpcTransport, - transports.HostProjectRegistrationServiceGrpcAsyncIOTransport, - ], -) -def test_host_project_registration_service_transport_channel_mtls_with_client_cert_source( - transport_class, -): - with mock.patch( - "grpc.ssl_channel_credentials", autospec=True - ) as grpc_ssl_channel_cred: - with mock.patch.object( - transport_class, "create_channel" - ) as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize( - "transport_class", - [ - transports.HostProjectRegistrationServiceGrpcTransport, - transports.HostProjectRegistrationServiceGrpcAsyncIOTransport, - ], -) -def test_host_project_registration_service_transport_channel_mtls_with_adc( - transport_class, -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object( - transport_class, "create_channel" - ) as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_host_project_registration_path(): - project = "squid" - location = "clam" - host_project_registration = "whelk" - expected = "projects/{project}/locations/{location}/hostProjectRegistrations/{host_project_registration}".format( - project=project, - location=location, - host_project_registration=host_project_registration, - ) - actual = HostProjectRegistrationServiceClient.host_project_registration_path( - project, location, host_project_registration - ) - assert expected == actual - - -def test_parse_host_project_registration_path(): - expected = { - "project": "octopus", - "location": "oyster", - "host_project_registration": "nudibranch", - } - path = HostProjectRegistrationServiceClient.host_project_registration_path( - **expected - ) - - # Check that the path construction is reversible. - actual = HostProjectRegistrationServiceClient.parse_host_project_registration_path( - path - ) - assert expected == actual - - -def test_common_billing_account_path(): - billing_account = "cuttlefish" - expected = "billingAccounts/{billing_account}".format( - billing_account=billing_account, - ) - actual = HostProjectRegistrationServiceClient.common_billing_account_path( - billing_account - ) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "mussel", - } - path = HostProjectRegistrationServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = HostProjectRegistrationServiceClient.parse_common_billing_account_path( - path - ) - assert expected == actual - - -def test_common_folder_path(): - folder = "winkle" - expected = "folders/{folder}".format( - folder=folder, - ) - actual = HostProjectRegistrationServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "nautilus", - } - path = HostProjectRegistrationServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = HostProjectRegistrationServiceClient.parse_common_folder_path(path) - assert expected == actual - - -def test_common_organization_path(): - organization = "scallop" - expected = "organizations/{organization}".format( - organization=organization, - ) - actual = HostProjectRegistrationServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "abalone", - } - path = HostProjectRegistrationServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = HostProjectRegistrationServiceClient.parse_common_organization_path(path) - assert expected == actual - - -def test_common_project_path(): - project = "squid" - expected = "projects/{project}".format( - project=project, - ) - actual = HostProjectRegistrationServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "clam", - } - path = HostProjectRegistrationServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = HostProjectRegistrationServiceClient.parse_common_project_path(path) - assert expected == actual - - -def test_common_location_path(): - project = "whelk" - location = "octopus" - expected = "projects/{project}/locations/{location}".format( - project=project, - location=location, - ) - actual = HostProjectRegistrationServiceClient.common_location_path( - project, location - ) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "oyster", - "location": "nudibranch", - } - path = HostProjectRegistrationServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = HostProjectRegistrationServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object( - transports.HostProjectRegistrationServiceTransport, "_prep_wrapped_messages" - ) as prep: - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object( - transports.HostProjectRegistrationServiceTransport, "_prep_wrapped_messages" - ) as prep: - transport_class = HostProjectRegistrationServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -@pytest.mark.asyncio -async def test_transport_close_async(): - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object( - type(getattr(client.transport, "grpc_channel")), "close" - ) as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_get_location_rest_bad_request( - transport: str = "rest", request_type=locations_pb2.GetLocationRequest -): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_location(request) - - -@pytest.mark.parametrize( - "request_type", - [ - locations_pb2.GetLocationRequest, - dict, - ], -) -def test_get_location_rest(request_type): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.Location() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_location(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - - -def test_list_locations_rest_bad_request( - transport: str = "rest", request_type=locations_pb2.ListLocationsRequest -): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({"name": "projects/sample1"}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_locations(request) - - -@pytest.mark.parametrize( - "request_type", - [ - locations_pb2.ListLocationsRequest, - dict, - ], -) -def test_list_locations_rest(request_type): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.ListLocationsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.list_locations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) - - -def test_cancel_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.CancelOperationRequest -): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.cancel_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.CancelOperationRequest, - dict, - ], -) -def test_cancel_operation_rest(request_type): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "{}" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.cancel_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest -): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.DeleteOperationRequest, - dict, - ], -) -def test_delete_operation_rest(request_type): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "{}" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.delete_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_get_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.GetOperationRequest -): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.GetOperationRequest, - dict, - ], -) -def test_get_operation_rest(request_type): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_operation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - - -def test_list_operations_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.ListOperationsRequest -): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_operations(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.ListOperationsRequest, - dict, - ], -) -def test_list_operations_rest(request_type): - client = HostProjectRegistrationServiceClient( + # It is an error to provide a credentials file and a transport instance. + transport = transports.HostProjectRegistrationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - request_init = {"name": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.ListOperationsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + with pytest.raises(ValueError): + client = HostProjectRegistrationServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + # It is an error to provide an api_key and a transport instance. + transport = transports.HostProjectRegistrationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = HostProjectRegistrationServiceClient( + client_options=options, + transport=transport, + ) - response = client.list_operations(request) + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = HostProjectRegistrationServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) + # It is an error to provide scopes and a transport instance. + transport = transports.HostProjectRegistrationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = HostProjectRegistrationServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) -def test_delete_operation(transport: str = "grpc"): - client = HostProjectRegistrationServiceClient( +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.HostProjectRegistrationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) + client = HostProjectRegistrationServiceClient(transport=transport) + assert client.transport is transport - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the response is the type that we expect. - assert response is None +@pytest.mark.parametrize( + "transport_class", + [ + transports.HostProjectRegistrationServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() -@pytest.mark.asyncio -async def test_delete_operation_async(transport: str = "grpc_asyncio"): - client = HostProjectRegistrationServiceAsyncClient( +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_transport_kind(transport_name): + transport = HostProjectRegistrationServiceClient.get_transport_class( + transport_name + )( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) + assert transport.kind == transport_name - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_host_project_registration_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.HostProjectRegistrationServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) - # Establish that the response is the type that we expect. - assert response is None +def test_host_project_registration_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.apihub_v1.services.host_project_registration_service.transports.HostProjectRegistrationServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.HostProjectRegistrationServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) -def test_delete_operation_field_headers(): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_host_project_registration", + "get_host_project_registration", + "list_host_project_registrations", + "get_location", + "list_locations", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = None - - client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + with pytest.raises(NotImplementedError): + transport.close() + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() -@pytest.mark.asyncio -async def test_delete_operation_field_headers_async(): - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" +def test_host_project_registration_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.apihub_v1.services.host_project_registration_service.transports.HostProjectRegistrationServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.HostProjectRegistrationServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] +def test_host_project_registration_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.apihub_v1.services.host_project_registration_service.transports.HostProjectRegistrationServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.HostProjectRegistrationServiceTransport() + adc.assert_called_once() -def test_delete_operation_from_dict(): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.delete_operation( - request={ - "name": "locations", - } +def test_host_project_registration_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + HostProjectRegistrationServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, ) - call.assert_called() -@pytest.mark.asyncio -async def test_delete_operation_from_dict_async(): - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_operation( - request={ - "name": "locations", - } +def test_host_project_registration_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.HostProjectRegistrationServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback ) - call.assert_called() + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) -def test_cancel_operation(transport: str = "grpc"): +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_host_project_registration_service_host_no_port(transport_name): client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + client_options=client_options.ClientOptions( + api_endpoint="apihub.googleapis.com" + ), + transport=transport_name, ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc_asyncio"): - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + assert client.transport._host == ( + "apihub.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://apihub.googleapis.com/" ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_cancel_operation_field_headers(): +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_host_project_registration_service_host_with_port(transport_name): client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="apihub.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "apihub.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://apihub.googleapis.com:8000/" ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None - - client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_host_project_registration_service_client_transport_session_collision( + transport_name, +): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = HostProjectRegistrationServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = HostProjectRegistrationServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_host_project_registration._session + session2 = client2.transport.create_host_project_registration._session + assert session1 != session2 + session1 = client1.transport.get_host_project_registration._session + session2 = client2.transport.get_host_project_registration._session + assert session1 != session2 + session1 = client1.transport.list_host_project_registrations._session + session2 = client2.transport.list_host_project_registrations._session + assert session1 != session2 -@pytest.mark.asyncio -async def test_cancel_operation_field_headers_async(): - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_host_project_registration_path(): + project = "squid" + location = "clam" + host_project_registration = "whelk" + expected = "projects/{project}/locations/{location}/hostProjectRegistrations/{host_project_registration}".format( + project=project, + location=location, + host_project_registration=host_project_registration, + ) + actual = HostProjectRegistrationServiceClient.host_project_registration_path( + project, location, host_project_registration ) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_host_project_registration_path(): + expected = { + "project": "octopus", + "location": "oyster", + "host_project_registration": "nudibranch", + } + path = HostProjectRegistrationServiceClient.host_project_registration_path( + **expected + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Check that the path construction is reversible. + actual = HostProjectRegistrationServiceClient.parse_host_project_registration_path( + path + ) + assert expected == actual -def test_cancel_operation_from_dict(): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_common_billing_account_path(): + billing_account = "cuttlefish" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() + actual = HostProjectRegistrationServiceClient.common_billing_account_path( + billing_account + ) + assert expected == actual -@pytest.mark.asyncio -async def test_cancel_operation_from_dict_async(): - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "mussel", + } + path = HostProjectRegistrationServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = HostProjectRegistrationServiceClient.parse_common_billing_account_path( + path ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() + assert expected == actual -def test_get_operation(transport: str = "grpc"): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +def test_common_folder_path(): + folder = "winkle" + expected = "folders/{folder}".format( + folder=folder, ) + actual = HostProjectRegistrationServiceClient.common_folder_path(folder) + assert expected == actual - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_common_folder_path(): + expected = { + "folder": "nautilus", + } + path = HostProjectRegistrationServiceClient.common_folder_path(**expected) - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) + # Check that the path construction is reversible. + actual = HostProjectRegistrationServiceClient.parse_common_folder_path(path) + assert expected == actual -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc_asyncio"): - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +def test_common_organization_path(): + organization = "scallop" + expected = "organizations/{organization}".format( + organization=organization, ) + actual = HostProjectRegistrationServiceClient.common_organization_path(organization) + assert expected == actual - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_common_organization_path(): + expected = { + "organization": "abalone", + } + path = HostProjectRegistrationServiceClient.common_organization_path(**expected) - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) + # Check that the path construction is reversible. + actual = HostProjectRegistrationServiceClient.parse_common_organization_path(path) + assert expected == actual -def test_get_operation_field_headers(): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_common_project_path(): + project = "squid" + expected = "projects/{project}".format( + project=project, ) + actual = HostProjectRegistrationServiceClient.common_project_path(project) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_common_project_path(): + expected = { + "project": "clam", + } + path = HostProjectRegistrationServiceClient.common_project_path(**expected) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Check that the path construction is reversible. + actual = HostProjectRegistrationServiceClient.parse_common_project_path(path) + assert expected == actual -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_common_location_path(): + project = "whelk" + location = "octopus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, ) + actual = HostProjectRegistrationServiceClient.common_location_path( + project, location + ) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] +def test_parse_common_location_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = HostProjectRegistrationServiceClient.common_location_path(**expected) + # Check that the path construction is reversible. + actual = HostProjectRegistrationServiceClient.parse_common_location_path(path) + assert expected == actual -def test_get_operation_from_dict(): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - - response = client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() + with mock.patch.object( + transports.HostProjectRegistrationServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = HostProjectRegistrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) - response = await client.get_operation( - request={ - "name": "locations", - } + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.HostProjectRegistrationServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = HostProjectRegistrationServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) - call.assert_called() + prep.assert_called_once_with(client_info) -def test_list_operations(transport: str = "grpc"): +def test_get_location_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.GetLocationRequest +): client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - response = client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - - -@pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc_asyncio"): - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_location(request) -def test_list_operations_field_headers(): +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) +def test_get_location_rest(request_type): client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = operations_pb2.ListOperationsResponse() - - client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_list_operations_field_headers_async(): - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.get_location(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) -def test_list_operations_from_dict(): +def test_list_locations_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.ListLocationsRequest +): client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - - response = client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) -@pytest.mark.asyncio -async def test_list_operations_from_dict_async(): - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_locations(request) -def test_list_locations(transport: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) +def test_list_locations_rest(request_type): client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() response = client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, locations_pb2.ListLocationsResponse) -@pytest.mark.asyncio -async def test_list_locations_async(transport: str = "grpc_asyncio"): - client = HostProjectRegistrationServiceAsyncClient( +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) -def test_list_locations_field_headers(): +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = locations_pb2.ListLocationsResponse() - - client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_list_locations_field_headers_async(): - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.cancel_operation(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Establish that the response is the type that we expect. + assert response is None -def test_list_locations_from_dict(): +def test_delete_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest +): client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() - - response = client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() - -@pytest.mark.asyncio -async def test_list_locations_from_dict_async(): - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_operation(request) -def test_get_location(transport: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - response = client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_operation(request) # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) + assert response is None -@pytest.mark.asyncio -async def test_get_location_async(transport: str = "grpc_asyncio"): - client = HostProjectRegistrationServiceAsyncClient( +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) -def test_get_location_field_headers(): +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials() + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = locations_pb2.Location() + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.get_operation(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations/abc", - ) in kw["metadata"] + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) -@pytest.mark.asyncio -async def test_get_location_field_headers_async(): - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials() +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = HostProjectRegistrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations/abc", - ) in kw["metadata"] + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) -def test_get_location_from_dict(): +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - - response = client.get_location( - request={ - "name": "locations/abc", - } - ) - call.assert_called() + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value -@pytest.mark.asyncio -async def test_get_location_from_dict_async(): - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location( - request={ - "name": "locations", - } - ) - call.assert_called() + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) def test_transport_close(): transports = { "rest": "_session", - "grpc": "_grpc_channel", } for transport, close_name in transports.items(): @@ -5853,7 +3022,6 @@ def test_transport_close(): def test_client_ctx(): transports = [ "rest", - "grpc", ] for transport in transports: client = HostProjectRegistrationServiceClient( @@ -5872,11 +3040,7 @@ def test_client_ctx(): [ ( HostProjectRegistrationServiceClient, - transports.HostProjectRegistrationServiceGrpcTransport, - ), - ( - HostProjectRegistrationServiceAsyncClient, - transports.HostProjectRegistrationServiceGrpcAsyncIOTransport, + transports.HostProjectRegistrationServiceRestTransport, ), ], ) diff --git a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_linting_service.py b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_linting_service.py index 947a54eb86f7..db139191d3f8 100644 --- a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_linting_service.py +++ b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_linting_service.py @@ -47,7 +47,6 @@ from requests.sessions import Session from google.cloud.apihub_v1.services.linting_service import ( - LintingServiceAsyncClient, LintingServiceClient, transports, ) @@ -211,11 +210,6 @@ def test__get_client_cert_source(): "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(LintingServiceClient), ) -@mock.patch.object( - LintingServiceAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(LintingServiceAsyncClient), -) def test__get_api_endpoint(): api_override = "foo.com" mock_client_cert_source = mock.Mock() @@ -300,7 +294,6 @@ def test__get_universe_domain(): @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - (LintingServiceClient, transports.LintingServiceGrpcTransport, "grpc"), (LintingServiceClient, transports.LintingServiceRestTransport, "rest"), ], ) @@ -380,8 +373,6 @@ def test__validate_universe_domain(client_class, transport_class, transport_name @pytest.mark.parametrize( "client_class,transport_name", [ - (LintingServiceClient, "grpc"), - (LintingServiceAsyncClient, "grpc_asyncio"), (LintingServiceClient, "rest"), ], ) @@ -406,8 +397,6 @@ def test_linting_service_client_from_service_account_info(client_class, transpor @pytest.mark.parametrize( "transport_class,transport_name", [ - (transports.LintingServiceGrpcTransport, "grpc"), - (transports.LintingServiceGrpcAsyncIOTransport, "grpc_asyncio"), (transports.LintingServiceRestTransport, "rest"), ], ) @@ -432,8 +421,6 @@ def test_linting_service_client_service_account_always_use_jwt( @pytest.mark.parametrize( "client_class,transport_name", [ - (LintingServiceClient, "grpc"), - (LintingServiceAsyncClient, "grpc_asyncio"), (LintingServiceClient, "rest"), ], ) @@ -465,24 +452,17 @@ def test_linting_service_client_from_service_account_file(client_class, transpor def test_linting_service_client_get_transport_class(): transport = LintingServiceClient.get_transport_class() available_transports = [ - transports.LintingServiceGrpcTransport, transports.LintingServiceRestTransport, ] assert transport in available_transports - transport = LintingServiceClient.get_transport_class("grpc") - assert transport == transports.LintingServiceGrpcTransport + transport = LintingServiceClient.get_transport_class("rest") + assert transport == transports.LintingServiceRestTransport @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - (LintingServiceClient, transports.LintingServiceGrpcTransport, "grpc"), - ( - LintingServiceAsyncClient, - transports.LintingServiceGrpcAsyncIOTransport, - "grpc_asyncio", - ), (LintingServiceClient, transports.LintingServiceRestTransport, "rest"), ], ) @@ -491,11 +471,6 @@ def test_linting_service_client_get_transport_class(): "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(LintingServiceClient), ) -@mock.patch.object( - LintingServiceAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(LintingServiceAsyncClient), -) def test_linting_service_client_client_options( client_class, transport_class, transport_name ): @@ -629,20 +604,6 @@ def test_linting_service_client_client_options( @pytest.mark.parametrize( "client_class,transport_class,transport_name,use_client_cert_env", [ - (LintingServiceClient, transports.LintingServiceGrpcTransport, "grpc", "true"), - ( - LintingServiceAsyncClient, - transports.LintingServiceGrpcAsyncIOTransport, - "grpc_asyncio", - "true", - ), - (LintingServiceClient, transports.LintingServiceGrpcTransport, "grpc", "false"), - ( - LintingServiceAsyncClient, - transports.LintingServiceGrpcAsyncIOTransport, - "grpc_asyncio", - "false", - ), (LintingServiceClient, transports.LintingServiceRestTransport, "rest", "true"), (LintingServiceClient, transports.LintingServiceRestTransport, "rest", "false"), ], @@ -652,11 +613,6 @@ def test_linting_service_client_client_options( "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(LintingServiceClient), ) -@mock.patch.object( - LintingServiceAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(LintingServiceAsyncClient), -) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) def test_linting_service_client_mtls_env_auto( client_class, transport_class, transport_name, use_client_cert_env @@ -760,19 +716,12 @@ def test_linting_service_client_mtls_env_auto( ) -@pytest.mark.parametrize( - "client_class", [LintingServiceClient, LintingServiceAsyncClient] -) +@pytest.mark.parametrize("client_class", [LintingServiceClient]) @mock.patch.object( LintingServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LintingServiceClient), ) -@mock.patch.object( - LintingServiceAsyncClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(LintingServiceAsyncClient), -) def test_linting_service_client_get_mtls_endpoint_and_cert_source(client_class): mock_client_cert_source = mock.Mock() @@ -864,19 +813,12 @@ def test_linting_service_client_get_mtls_endpoint_and_cert_source(client_class): ) -@pytest.mark.parametrize( - "client_class", [LintingServiceClient, LintingServiceAsyncClient] -) +@pytest.mark.parametrize("client_class", [LintingServiceClient]) @mock.patch.object( LintingServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(LintingServiceClient), ) -@mock.patch.object( - LintingServiceAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(LintingServiceAsyncClient), -) def test_linting_service_client_client_api_endpoint(client_class): mock_client_cert_source = client_cert_source_callback api_override = "foo.com" @@ -953,12 +895,6 @@ def test_linting_service_client_client_api_endpoint(client_class): @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - (LintingServiceClient, transports.LintingServiceGrpcTransport, "grpc"), - ( - LintingServiceAsyncClient, - transports.LintingServiceGrpcAsyncIOTransport, - "grpc_asyncio", - ), (LintingServiceClient, transports.LintingServiceRestTransport, "rest"), ], ) @@ -990,18 +926,6 @@ def test_linting_service_client_client_options_scopes( @pytest.mark.parametrize( "client_class,transport_class,transport_name,grpc_helpers", [ - ( - LintingServiceClient, - transports.LintingServiceGrpcTransport, - "grpc", - grpc_helpers, - ), - ( - LintingServiceAsyncClient, - transports.LintingServiceGrpcAsyncIOTransport, - "grpc_asyncio", - grpc_helpers_async, - ), (LintingServiceClient, transports.LintingServiceRestTransport, "rest", None), ], ) @@ -1029,96 +953,6 @@ def test_linting_service_client_client_options_credentials_file( ) -def test_linting_service_client_client_options_from_dict(): - with mock.patch( - "google.cloud.apihub_v1.services.linting_service.transports.LintingServiceGrpcTransport.__init__" - ) as grpc_transport: - grpc_transport.return_value = None - client = LintingServiceClient( - client_options={"api_endpoint": "squid.clam.whelk"} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize( - "client_class,transport_class,transport_name,grpc_helpers", - [ - ( - LintingServiceClient, - transports.LintingServiceGrpcTransport, - "grpc", - grpc_helpers, - ), - ( - LintingServiceAsyncClient, - transports.LintingServiceGrpcAsyncIOTransport, - "grpc_asyncio", - grpc_helpers_async, - ), - ], -) -def test_linting_service_client_create_channel_credentials_file( - client_class, transport_class, transport_name, grpc_helpers -): - # Check the case credentials file is provided. - options = client_options.ClientOptions(credentials_file="credentials.json") - - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE - ), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "apihub.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - scopes=None, - default_host="apihub.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - @pytest.mark.parametrize( "request_type", [ @@ -1126,30 +960,36 @@ def test_linting_service_client_create_channel_credentials_file( dict, ], ) -def test_get_style_guide(request_type, transport: str = "grpc"): +def test_get_style_guide_rest(request_type): client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/plugins/sample3/styleGuide" + } + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_style_guide), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = linting_service.StyleGuide( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = linting_service.StyleGuide( name="name_value", linter=common_fields.Linter.SPECTRAL, ) - response = client.get_style_guide(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = linting_service.GetStyleGuideRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = linting_service.StyleGuide.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_style_guide(request) # Establish that the response is the type that we expect. assert isinstance(response, linting_service.StyleGuide) @@ -1157,60 +997,13 @@ def test_get_style_guide(request_type, transport: str = "grpc"): assert response.linter == common_fields.Linter.SPECTRAL -def test_get_style_guide_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_style_guide), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_style_guide() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == linting_service.GetStyleGuideRequest() - - -def test_get_style_guide_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = linting_service.GetStyleGuideRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_style_guide), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_style_guide(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == linting_service.GetStyleGuideRequest( - name="name_value", - ) - - -def test_get_style_guide_use_cached_wrapped_rpc(): +def test_get_style_guide_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -1226,6 +1019,7 @@ def test_get_style_guide_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.get_style_guide] = mock_rpc + request = {} client.get_style_guide(request) @@ -1239,256 +1033,237 @@ def test_get_style_guide_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_style_guide_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_style_guide), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - linting_service.StyleGuide( - name="name_value", - linter=common_fields.Linter.SPECTRAL, - ) - ) - response = await client.get_style_guide() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == linting_service.GetStyleGuideRequest() - - -@pytest.mark.asyncio -async def test_get_style_guide_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_get_style_guide_rest_required_fields( + request_type=linting_service.GetStyleGuideRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.get_style_guide - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.get_style_guide - ] = mock_rpc - - request = {} - await client.get_style_guide(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_style_guide(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - + transport_class = transports.LintingServiceRestTransport -@pytest.mark.asyncio -async def test_get_style_guide_async( - transport: str = "grpc_asyncio", request_type=linting_service.GetStyleGuideRequest -): - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_style_guide), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - linting_service.StyleGuide( - name="name_value", - linter=common_fields.Linter.SPECTRAL, - ) - ) - response = await client.get_style_guide(request) + # verify fields with default values are dropped - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = linting_service.GetStyleGuideRequest() - assert args[0] == request + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_style_guide._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the response is the type that we expect. - assert isinstance(response, linting_service.StyleGuide) - assert response.name == "name_value" - assert response.linter == common_fields.Linter.SPECTRAL + # verify required fields with default values are now present + jsonified_request["name"] = "name_value" -@pytest.mark.asyncio -async def test_get_style_guide_async_from_dict(): - await test_get_style_guide_async(request_type=dict) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_style_guide._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -def test_get_style_guide_field_headers(): client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request = request_type(**request_init) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = linting_service.GetStyleGuideRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_style_guide), "__call__") as call: - call.return_value = linting_service.StyleGuide() - client.get_style_guide(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + # Designate an appropriate value for the returned response. + return_value = linting_service.StyleGuide() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = linting_service.StyleGuide.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) -@pytest.mark.asyncio -async def test_get_style_guide_field_headers_async(): - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = linting_service.GetStyleGuideRequest() + response = client.get_style_guide(request) - request.name = "name_value" + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_style_guide), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - linting_service.StyleGuide() - ) - await client.get_style_guide(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_get_style_guide_rest_unset_required_fields(): + transport = transports.LintingServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + unset_fields = transport.get_style_guide._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -def test_get_style_guide_flattened(): - client = LintingServiceClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_style_guide_rest_interceptors(null_interceptor): + transport = transports.LintingServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.LintingServiceRestInterceptor(), ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_style_guide), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = linting_service.StyleGuide() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_style_guide( - name="name_value", + client = LintingServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.LintingServiceRestInterceptor, "post_get_style_guide" + ) as post, mock.patch.object( + transports.LintingServiceRestInterceptor, "pre_get_style_guide" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = linting_service.GetStyleGuideRequest.pb( + linting_service.GetStyleGuideRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = linting_service.StyleGuide.to_json( + linting_service.StyleGuide() ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + request = linting_service.GetStyleGuideRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = linting_service.StyleGuide() + + client.get_style_guide( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() -def test_get_style_guide_flattened_error(): +def test_get_style_guide_rest_bad_request( + transport: str = "rest", request_type=linting_service.GetStyleGuideRequest +): client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_style_guide( - linting_service.GetStyleGuideRequest(), - name="name_value", - ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/plugins/sample3/styleGuide" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_style_guide(request) -@pytest.mark.asyncio -async def test_get_style_guide_flattened_async(): - client = LintingServiceAsyncClient( + +def test_get_style_guide_rest_flattened(): + client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_style_guide), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = linting_service.StyleGuide() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = linting_service.StyleGuide() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/plugins/sample3/styleGuide" + } - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - linting_service.StyleGuide() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_style_guide( + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = linting_service.StyleGuide.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_style_guide(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/plugins/*/styleGuide}" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_get_style_guide_flattened_error_async(): - client = LintingServiceAsyncClient( +def test_get_style_guide_rest_flattened_error(transport: str = "rest"): + client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_style_guide( + client.get_style_guide( linting_service.GetStyleGuideRequest(), name="name_value", ) +def test_get_style_guide_rest_error(): + client = LintingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -1496,93 +1271,124 @@ async def test_get_style_guide_flattened_error_async(): dict, ], ) -def test_update_style_guide(request_type, transport: str = "grpc"): +def test_update_style_guide_rest(request_type): client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "style_guide": { + "name": "projects/sample1/locations/sample2/plugins/sample3/styleGuide" + } + } + request_init["style_guide"] = { + "name": "projects/sample1/locations/sample2/plugins/sample3/styleGuide", + "linter": 1, + "contents": {"contents": b"contents_blob", "mime_type": "mime_type_value"}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_style_guide), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = linting_service.StyleGuide( - name="name_value", - linter=common_fields.Linter.SPECTRAL, - ) - response = client.update_style_guide(request) + # Determine if the message type is proto-plus or protobuf + test_field = linting_service.UpdateStyleGuideRequest.meta.fields["style_guide"] - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = linting_service.UpdateStyleGuideRequest() - assert args[0] == request + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] - # Establish that the response is the type that we expect. - assert isinstance(response, linting_service.StyleGuide) - assert response.name == "name_value" - assert response.linter == common_fields.Linter.SPECTRAL + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields -def test_update_style_guide_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_style_guide), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.update_style_guide() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == linting_service.UpdateStyleGuideRequest() + subfields_not_in_runtime = [] + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["style_guide"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value -def test_update_style_guide_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = linting_service.UpdateStyleGuideRequest() + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["style_guide"][field])): + del request_init["style_guide"][field][i][subfield] + else: + del request_init["style_guide"][field][subfield] + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_style_guide), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = linting_service.StyleGuide( + name="name_value", + linter=common_fields.Linter.SPECTRAL, ) - client.update_style_guide(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == linting_service.UpdateStyleGuideRequest() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = linting_service.StyleGuide.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_style_guide(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, linting_service.StyleGuide) + assert response.name == "name_value" + assert response.linter == common_fields.Linter.SPECTRAL -def test_update_style_guide_use_cached_wrapped_rpc(): +def test_update_style_guide_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -1602,6 +1408,7 @@ def test_update_style_guide_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.update_style_guide ] = mock_rpc + request = {} client.update_style_guide(request) @@ -1615,216 +1422,223 @@ def test_update_style_guide_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_update_style_guide_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", +def test_update_style_guide_rest_required_fields( + request_type=linting_service.UpdateStyleGuideRequest, +): + transport_class = transports.LintingServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_style_guide), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - linting_service.StyleGuide( - name="name_value", - linter=common_fields.Linter.SPECTRAL, - ) - ) - response = await client.update_style_guide() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == linting_service.UpdateStyleGuideRequest() + # verify fields with default values are dropped + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_style_guide._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) -@pytest.mark.asyncio -async def test_update_style_guide_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # verify required fields with default values are now present - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_style_guide._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) - # Ensure method has been cached - assert ( - client._client._transport.update_style_guide - in client._client._transport._wrapped_methods - ) + # verify required fields with non-default values are left alone - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.update_style_guide - ] = mock_rpc + client = LintingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) - request = {} - await client.update_style_guide(request) + # Designate an appropriate value for the returned response. + return_value = linting_service.StyleGuide() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + response_value = Response() + response_value.status_code = 200 - await client.update_style_guide(request) + # Convert return value to protobuf type + return_value = linting_service.StyleGuide.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_style_guide(request) -@pytest.mark.asyncio -async def test_update_style_guide_async( - transport: str = "grpc_asyncio", - request_type=linting_service.UpdateStyleGuideRequest, -): - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_style_guide_rest_unset_required_fields(): + transport = transports.LintingServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + unset_fields = transport.update_style_guide._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("styleGuide",))) - # Mock the actual call within the gRPC stub, and fake the request. + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_style_guide_rest_interceptors(null_interceptor): + transport = transports.LintingServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.LintingServiceRestInterceptor(), + ) + client = LintingServiceClient(transport=transport) with mock.patch.object( - type(client.transport.update_style_guide), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - linting_service.StyleGuide( - name="name_value", - linter=common_fields.Linter.SPECTRAL, - ) + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.LintingServiceRestInterceptor, "post_update_style_guide" + ) as post, mock.patch.object( + transports.LintingServiceRestInterceptor, "pre_update_style_guide" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = linting_service.UpdateStyleGuideRequest.pb( + linting_service.UpdateStyleGuideRequest() ) - response = await client.update_style_guide(request) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = linting_service.UpdateStyleGuideRequest() - assert args[0] == request + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = linting_service.StyleGuide.to_json( + linting_service.StyleGuide() + ) - # Establish that the response is the type that we expect. - assert isinstance(response, linting_service.StyleGuide) - assert response.name == "name_value" - assert response.linter == common_fields.Linter.SPECTRAL + request = linting_service.UpdateStyleGuideRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = linting_service.StyleGuide() + client.update_style_guide( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) -@pytest.mark.asyncio -async def test_update_style_guide_async_from_dict(): - await test_update_style_guide_async(request_type=dict) + pre.assert_called_once() + post.assert_called_once() -def test_update_style_guide_field_headers(): +def test_update_style_guide_rest_bad_request( + transport: str = "rest", request_type=linting_service.UpdateStyleGuideRequest +): client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = linting_service.UpdateStyleGuideRequest() - - request.style_guide.name = "name_value" + # send a request that will satisfy transcoding + request_init = { + "style_guide": { + "name": "projects/sample1/locations/sample2/plugins/sample3/styleGuide" + } + } + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_style_guide), "__call__" - ) as call: - call.return_value = linting_service.StyleGuide() + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value client.update_style_guide(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "style_guide.name=name_value", - ) in kw["metadata"] - -@pytest.mark.asyncio -async def test_update_style_guide_field_headers_async(): - client = LintingServiceAsyncClient( +def test_update_style_guide_rest_flattened(): + client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = linting_service.UpdateStyleGuideRequest() - - request.style_guide.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_style_guide), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - linting_service.StyleGuide() - ) - await client.update_style_guide(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "style_guide.name=name_value", - ) in kw["metadata"] - + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = linting_service.StyleGuide() -def test_update_style_guide_flattened(): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # get arguments that satisfy an http rule for this method + sample_request = { + "style_guide": { + "name": "projects/sample1/locations/sample2/plugins/sample3/styleGuide" + } + } - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_style_guide), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = linting_service.StyleGuide() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_style_guide( + # get truthy value for each flattened field + mock_args = dict( style_guide=linting_service.StyleGuide(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = linting_service.StyleGuide.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_style_guide(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].style_guide - mock_val = linting_service.StyleGuide(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{style_guide.name=projects/*/locations/*/plugins/*/styleGuide}" + % client.transport._host, + args[1], + ) -def test_update_style_guide_flattened_error(): +def test_update_style_guide_rest_flattened_error(transport: str = "rest"): client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -1837,56 +1651,11 @@ def test_update_style_guide_flattened_error(): ) -@pytest.mark.asyncio -async def test_update_style_guide_flattened_async(): - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_style_guide), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = linting_service.StyleGuide() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - linting_service.StyleGuide() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_style_guide( - style_guide=linting_service.StyleGuide(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].style_guide - mock_val = linting_service.StyleGuide(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_update_style_guide_flattened_error_async(): - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_update_style_guide_rest_error(): + client = LintingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_style_guide( - linting_service.UpdateStyleGuideRequest(), - style_guide=linting_service.StyleGuide(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - @pytest.mark.parametrize( "request_type", @@ -1895,97 +1664,50 @@ async def test_update_style_guide_flattened_error_async(): dict, ], ) -def test_get_style_guide_contents(request_type, transport: str = "grpc"): +def test_get_style_guide_contents_rest(request_type): client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/plugins/sample3/styleGuide" + } + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_style_guide_contents), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = linting_service.StyleGuideContents( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = linting_service.StyleGuideContents( contents=b"contents_blob", mime_type="mime_type_value", ) - response = client.get_style_guide_contents(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = linting_service.GetStyleGuideContentsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = linting_service.StyleGuideContents.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_style_guide_contents(request) + + # Establish that the response is the type that we expect. assert isinstance(response, linting_service.StyleGuideContents) assert response.contents == b"contents_blob" assert response.mime_type == "mime_type_value" -def test_get_style_guide_contents_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_style_guide_contents), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_style_guide_contents() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == linting_service.GetStyleGuideContentsRequest() - - -def test_get_style_guide_contents_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = linting_service.GetStyleGuideContentsRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_style_guide_contents), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_style_guide_contents(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == linting_service.GetStyleGuideContentsRequest( - name="name_value", - ) - - -def test_get_style_guide_contents_use_cached_wrapped_rpc(): +def test_get_style_guide_contents_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -2006,6 +1728,7 @@ def test_get_style_guide_contents_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.get_style_guide_contents ] = mock_rpc + request = {} client.get_style_guide_contents(request) @@ -2019,544 +1742,245 @@ def test_get_style_guide_contents_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_style_guide_contents_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_style_guide_contents), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - linting_service.StyleGuideContents( - contents=b"contents_blob", - mime_type="mime_type_value", - ) - ) - response = await client.get_style_guide_contents() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == linting_service.GetStyleGuideContentsRequest() - - -@pytest.mark.asyncio -async def test_get_style_guide_contents_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_get_style_guide_contents_rest_required_fields( + request_type=linting_service.GetStyleGuideContentsRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + transport_class = transports.LintingServiceRestTransport - # Ensure method has been cached - assert ( - client._client._transport.get_style_guide_contents - in client._client._transport._wrapped_methods - ) + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.get_style_guide_contents - ] = mock_rpc + # verify fields with default values are dropped - request = {} - await client.get_style_guide_contents(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_style_guide_contents._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.get_style_guide_contents(request) + jsonified_request["name"] = "name_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_style_guide_contents._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -@pytest.mark.asyncio -async def test_get_style_guide_contents_async( - transport: str = "grpc_asyncio", - request_type=linting_service.GetStyleGuideContentsRequest, -): - client = LintingServiceAsyncClient( + client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = linting_service.StyleGuideContents() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_style_guide_contents), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - linting_service.StyleGuideContents( - contents=b"contents_blob", - mime_type="mime_type_value", - ) - ) - response = await client.get_style_guide_contents(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = linting_service.GetStyleGuideContentsRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = linting_service.StyleGuideContents.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, linting_service.StyleGuideContents) - assert response.contents == b"contents_blob" - assert response.mime_type == "mime_type_value" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_style_guide_contents(request) -@pytest.mark.asyncio -async def test_get_style_guide_contents_async_from_dict(): - await test_get_style_guide_contents_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_get_style_guide_contents_field_headers(): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_get_style_guide_contents_rest_unset_required_fields(): + transport = transports.LintingServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = linting_service.GetStyleGuideContentsRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_style_guide_contents), "__call__" - ) as call: - call.return_value = linting_service.StyleGuideContents() - client.get_style_guide_contents(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + unset_fields = transport.get_style_guide_contents._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -@pytest.mark.asyncio -async def test_get_style_guide_contents_field_headers_async(): - client = LintingServiceAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_style_guide_contents_rest_interceptors(null_interceptor): + transport = transports.LintingServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.LintingServiceRestInterceptor(), ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = linting_service.GetStyleGuideContentsRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. + client = LintingServiceClient(transport=transport) with mock.patch.object( - type(client.transport.get_style_guide_contents), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - linting_service.StyleGuideContents() + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.LintingServiceRestInterceptor, "post_get_style_guide_contents" + ) as post, mock.patch.object( + transports.LintingServiceRestInterceptor, "pre_get_style_guide_contents" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = linting_service.GetStyleGuideContentsRequest.pb( + linting_service.GetStyleGuideContentsRequest() ) - await client.get_style_guide_contents(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = linting_service.StyleGuideContents.to_json( + linting_service.StyleGuideContents() + ) -def test_get_style_guide_contents_flattened(): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + request = linting_service.GetStyleGuideContentsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = linting_service.StyleGuideContents() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_style_guide_contents), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = linting_service.StyleGuideContents() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.get_style_guide_contents( - name="name_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + pre.assert_called_once() + post.assert_called_once() -def test_get_style_guide_contents_flattened_error(): +def test_get_style_guide_contents_rest_bad_request( + transport: str = "rest", request_type=linting_service.GetStyleGuideContentsRequest +): client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_style_guide_contents( - linting_service.GetStyleGuideContentsRequest(), - name="name_value", - ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/plugins/sample3/styleGuide" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_style_guide_contents(request) -@pytest.mark.asyncio -async def test_get_style_guide_contents_flattened_async(): - client = LintingServiceAsyncClient( + +def test_get_style_guide_contents_rest_flattened(): + client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_style_guide_contents), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = linting_service.StyleGuideContents() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = linting_service.StyleGuideContents() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - linting_service.StyleGuideContents() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_style_guide_contents( + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/plugins/sample3/styleGuide" + } + + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = linting_service.StyleGuideContents.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value -@pytest.mark.asyncio -async def test_get_style_guide_contents_flattened_error_async(): - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + client.get_style_guide_contents(**mock_args) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_style_guide_contents( - linting_service.GetStyleGuideContentsRequest(), - name="name_value", + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/plugins/*/styleGuide}:contents" + % client.transport._host, + args[1], ) -@pytest.mark.parametrize( - "request_type", - [ - linting_service.LintSpecRequest, - dict, - ], -) -def test_lint_spec(request_type, transport: str = "grpc"): +def test_get_style_guide_contents_rest_flattened_error(transport: str = "rest"): client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.lint_spec), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.lint_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = linting_service.LintSpecRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_lint_spec_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.lint_spec), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.lint_spec() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == linting_service.LintSpecRequest() - - -def test_lint_spec_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = linting_service.LintSpecRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.lint_spec), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.lint_spec(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == linting_service.LintSpecRequest( + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_style_guide_contents( + linting_service.GetStyleGuideContentsRequest(), name="name_value", ) -def test_lint_spec_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.lint_spec in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.lint_spec] = mock_rpc - request = {} - client.lint_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.lint_spec(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_lint_spec_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.lint_spec), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.lint_spec() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == linting_service.LintSpecRequest() - - -@pytest.mark.asyncio -async def test_lint_spec_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.lint_spec - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.lint_spec - ] = mock_rpc - - request = {} - await client.lint_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.lint_spec(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_lint_spec_async( - transport: str = "grpc_asyncio", request_type=linting_service.LintSpecRequest -): - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.lint_spec), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.lint_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = linting_service.LintSpecRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_lint_spec_async_from_dict(): - await test_lint_spec_async(request_type=dict) - - -def test_lint_spec_field_headers(): +def test_get_style_guide_contents_rest_error(): client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = linting_service.LintSpecRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.lint_spec), "__call__") as call: - call.return_value = None - client.lint_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_lint_spec_field_headers_async(): - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = linting_service.LintSpecRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.lint_spec), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.lint_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - @pytest.mark.parametrize( "request_type", [ - linting_service.GetStyleGuideRequest, + linting_service.LintSpecRequest, dict, ], ) -def test_get_style_guide_rest(request_type): +def test_lint_spec_rest(request_type): client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -2564,36 +1988,29 @@ def test_get_style_guide_rest(request_type): # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/plugins/sample3/styleGuide" + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = linting_service.StyleGuide( - name="name_value", - linter=common_fields.Linter.SPECTRAL, - ) + return_value = None # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = linting_service.StyleGuide.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_style_guide(request) + response = client.lint_spec(request) # Establish that the response is the type that we expect. - assert isinstance(response, linting_service.StyleGuide) - assert response.name == "name_value" - assert response.linter == common_fields.Linter.SPECTRAL + assert response is None -def test_get_style_guide_rest_use_cached_wrapped_rpc(): +def test_lint_spec_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -2607,31 +2024,29 @@ def test_get_style_guide_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_style_guide in client._transport._wrapped_methods + assert client._transport.lint_spec in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_style_guide] = mock_rpc + client._transport._wrapped_methods[client._transport.lint_spec] = mock_rpc request = {} - client.get_style_guide(request) + client.lint_spec(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_style_guide(request) + client.lint_spec(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_style_guide_rest_required_fields( - request_type=linting_service.GetStyleGuideRequest, -): +def test_lint_spec_rest_required_fields(request_type=linting_service.LintSpecRequest): transport_class = transports.LintingServiceRestTransport request_init = {} @@ -2646,7 +2061,7 @@ def test_get_style_guide_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_style_guide._get_unset_required_fields(jsonified_request) + ).lint_spec._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -2655,7 +2070,7 @@ def test_get_style_guide_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_style_guide._get_unset_required_fields(jsonified_request) + ).lint_spec._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -2669,7 +2084,7 @@ def test_get_style_guide_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = linting_service.StyleGuide() + return_value = None # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -2681,39 +2096,37 @@ def test_get_style_guide_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = linting_service.StyleGuide.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_style_guide(request) + response = client.lint_spec(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_style_guide_rest_unset_required_fields(): +def test_lint_spec_rest_unset_required_fields(): transport = transports.LintingServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_style_guide._get_unset_required_fields({}) + unset_fields = transport.lint_spec._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_style_guide_rest_interceptors(null_interceptor): +def test_lint_spec_rest_interceptors(null_interceptor): transport = transports.LintingServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -2726,14 +2139,11 @@ def test_get_style_guide_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.LintingServiceRestInterceptor, "post_get_style_guide" - ) as post, mock.patch.object( - transports.LintingServiceRestInterceptor, "pre_get_style_guide" + transports.LintingServiceRestInterceptor, "pre_lint_spec" ) as pre: pre.assert_not_called() - post.assert_not_called() - pb_message = linting_service.GetStyleGuideRequest.pb( - linting_service.GetStyleGuideRequest() + pb_message = linting_service.LintSpecRequest.pb( + linting_service.LintSpecRequest() ) transcode.return_value = { "method": "post", @@ -2745,19 +2155,15 @@ def test_get_style_guide_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = linting_service.StyleGuide.to_json( - linting_service.StyleGuide() - ) - request = linting_service.GetStyleGuideRequest() + request = linting_service.LintSpecRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = linting_service.StyleGuide() - client.get_style_guide( + client.lint_spec( request, metadata=[ ("key", "val"), @@ -2766,11 +2172,10 @@ def test_get_style_guide_rest_interceptors(null_interceptor): ) pre.assert_called_once() - post.assert_called_once() -def test_get_style_guide_rest_bad_request( - transport: str = "rest", request_type=linting_service.GetStyleGuideRequest +def test_lint_spec_rest_bad_request( + transport: str = "rest", request_type=linting_service.LintSpecRequest ): client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2779,7 +2184,7 @@ def test_get_style_guide_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/plugins/sample3/styleGuide" + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" } request = request_type(**request_init) @@ -2792,2969 +2197,815 @@ def test_get_style_guide_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_style_guide(request) + client.lint_spec(request) -def test_get_style_guide_rest_flattened(): +def test_lint_spec_rest_error(): client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = linting_service.StyleGuide() - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/plugins/sample3/styleGuide" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = linting_service.StyleGuide.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.get_style_guide(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/plugins/*/styleGuide}" - % client.transport._host, - args[1], - ) - -def test_get_style_guide_rest_flattened_error(transport: str = "rest"): - client = LintingServiceClient( +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.LintingServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. with pytest.raises(ValueError): - client.get_style_guide( - linting_service.GetStyleGuideRequest(), - name="name_value", + client = LintingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - -def test_get_style_guide_rest_error(): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - linting_service.UpdateStyleGuideRequest, - dict, - ], -) -def test_update_style_guide_rest(request_type): - client = LintingServiceClient( + # It is an error to provide a credentials file and a transport instance. + transport = transports.LintingServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - - # send a request that will satisfy transcoding - request_init = { - "style_guide": { - "name": "projects/sample1/locations/sample2/plugins/sample3/styleGuide" - } - } - request_init["style_guide"] = { - "name": "projects/sample1/locations/sample2/plugins/sample3/styleGuide", - "linter": 1, - "contents": {"contents": b"contents_blob", "mime_type": "mime_type_value"}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = linting_service.UpdateStyleGuideRequest.meta.fields["style_guide"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["style_guide"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["style_guide"][field])): - del request_init["style_guide"][field][i][subfield] - else: - del request_init["style_guide"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = linting_service.StyleGuide( - name="name_value", - linter=common_fields.Linter.SPECTRAL, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = linting_service.StyleGuide.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.update_style_guide(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, linting_service.StyleGuide) - assert response.name == "name_value" - assert response.linter == common_fields.Linter.SPECTRAL - - -def test_update_style_guide_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + with pytest.raises(ValueError): client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.update_style_guide in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + client_options={"credentials_file": "credentials.json"}, + transport=transport, ) - client._transport._wrapped_methods[ - client._transport.update_style_guide - ] = mock_rpc - - request = {} - client.update_style_guide(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_style_guide(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_style_guide_rest_required_fields( - request_type=linting_service.UpdateStyleGuideRequest, -): - transport_class = transports.LintingServiceRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_style_guide._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_style_guide._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - client = LintingServiceClient( + # It is an error to provide an api_key and a transport instance. + transport = transports.LintingServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = linting_service.StyleGuide() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "patch", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = linting_service.StyleGuide.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.update_style_guide(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_update_style_guide_rest_unset_required_fields(): - transport = transports.LintingServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.update_style_guide._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask",)) & set(("styleGuide",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_style_guide_rest_interceptors(null_interceptor): - transport = transports.LintingServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.LintingServiceRestInterceptor(), - ) - client = LintingServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.LintingServiceRestInterceptor, "post_update_style_guide" - ) as post, mock.patch.object( - transports.LintingServiceRestInterceptor, "pre_update_style_guide" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = linting_service.UpdateStyleGuideRequest.pb( - linting_service.UpdateStyleGuideRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = linting_service.StyleGuide.to_json( - linting_service.StyleGuide() - ) - - request = linting_service.UpdateStyleGuideRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = linting_service.StyleGuide() - - client.update_style_guide( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_style_guide_rest_bad_request( - transport: str = "rest", request_type=linting_service.UpdateStyleGuideRequest -): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "style_guide": { - "name": "projects/sample1/locations/sample2/plugins/sample3/styleGuide" - } - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_style_guide(request) - - -def test_update_style_guide_rest_flattened(): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = linting_service.StyleGuide() - - # get arguments that satisfy an http rule for this method - sample_request = { - "style_guide": { - "name": "projects/sample1/locations/sample2/plugins/sample3/styleGuide" - } - } - - # get truthy value for each flattened field - mock_args = dict( - style_guide=linting_service.StyleGuide(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = linting_service.StyleGuide.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.update_style_guide(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{style_guide.name=projects/*/locations/*/plugins/*/styleGuide}" - % client.transport._host, - args[1], - ) - - -def test_update_style_guide_rest_flattened_error(transport: str = "rest"): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. + options = client_options.ClientOptions() + options.api_key = "api_key" with pytest.raises(ValueError): - client.update_style_guide( - linting_service.UpdateStyleGuideRequest(), - style_guide=linting_service.StyleGuide(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - - -def test_update_style_guide_rest_error(): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - linting_service.GetStyleGuideContentsRequest, - dict, - ], -) -def test_get_style_guide_contents_rest(request_type): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/plugins/sample3/styleGuide" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = linting_service.StyleGuideContents( - contents=b"contents_blob", - mime_type="mime_type_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = linting_service.StyleGuideContents.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_style_guide_contents(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, linting_service.StyleGuideContents) - assert response.contents == b"contents_blob" - assert response.mime_type == "mime_type_value" - - -def test_get_style_guide_contents_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.get_style_guide_contents - in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.get_style_guide_contents - ] = mock_rpc - - request = {} - client.get_style_guide_contents(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_style_guide_contents(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_style_guide_contents_rest_required_fields( - request_type=linting_service.GetStyleGuideContentsRequest, -): - transport_class = transports.LintingServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_style_guide_contents._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_style_guide_contents._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = linting_service.StyleGuideContents() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = linting_service.StyleGuideContents.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_style_guide_contents(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_get_style_guide_contents_rest_unset_required_fields(): - transport = transports.LintingServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.get_style_guide_contents._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_style_guide_contents_rest_interceptors(null_interceptor): - transport = transports.LintingServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.LintingServiceRestInterceptor(), - ) - client = LintingServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.LintingServiceRestInterceptor, "post_get_style_guide_contents" - ) as post, mock.patch.object( - transports.LintingServiceRestInterceptor, "pre_get_style_guide_contents" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = linting_service.GetStyleGuideContentsRequest.pb( - linting_service.GetStyleGuideContentsRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = linting_service.StyleGuideContents.to_json( - linting_service.StyleGuideContents() - ) - - request = linting_service.GetStyleGuideContentsRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = linting_service.StyleGuideContents() - - client.get_style_guide_contents( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_style_guide_contents_rest_bad_request( - transport: str = "rest", request_type=linting_service.GetStyleGuideContentsRequest -): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/plugins/sample3/styleGuide" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_style_guide_contents(request) - - -def test_get_style_guide_contents_rest_flattened(): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = linting_service.StyleGuideContents() - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/plugins/sample3/styleGuide" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = linting_service.StyleGuideContents.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.get_style_guide_contents(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/plugins/*/styleGuide}:contents" - % client.transport._host, - args[1], - ) - - -def test_get_style_guide_contents_rest_flattened_error(transport: str = "rest"): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_style_guide_contents( - linting_service.GetStyleGuideContentsRequest(), - name="name_value", - ) - - -def test_get_style_guide_contents_rest_error(): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - linting_service.LintSpecRequest, - dict, - ], -) -def test_lint_spec_rest(request_type): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.lint_spec(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_lint_spec_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.lint_spec in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.lint_spec] = mock_rpc - - request = {} - client.lint_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.lint_spec(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_lint_spec_rest_required_fields(request_type=linting_service.LintSpecRequest): - transport_class = transports.LintingServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).lint_spec._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).lint_spec._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.lint_spec(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_lint_spec_rest_unset_required_fields(): - transport = transports.LintingServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.lint_spec._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_lint_spec_rest_interceptors(null_interceptor): - transport = transports.LintingServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.LintingServiceRestInterceptor(), - ) - client = LintingServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.LintingServiceRestInterceptor, "pre_lint_spec" - ) as pre: - pre.assert_not_called() - pb_message = linting_service.LintSpecRequest.pb( - linting_service.LintSpecRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = linting_service.LintSpecRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.lint_spec( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - - -def test_lint_spec_rest_bad_request( - transport: str = "rest", request_type=linting_service.LintSpecRequest -): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.lint_spec(request) - - -def test_lint_spec_rest_error(): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.LintingServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.LintingServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = LintingServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.LintingServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = LintingServiceClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = LintingServiceClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.LintingServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = LintingServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.LintingServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = LintingServiceClient(transport=transport) - assert client.transport is transport - - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.LintingServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.LintingServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.LintingServiceGrpcTransport, - transports.LintingServiceGrpcAsyncIOTransport, - transports.LintingServiceRestTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "rest", - ], -) -def test_transport_kind(transport_name): - transport = LintingServiceClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.LintingServiceGrpcTransport, - ) - - -def test_linting_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.LintingServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json", - ) - - -def test_linting_service_base_transport(): - # Instantiate the base transport. - with mock.patch( - "google.cloud.apihub_v1.services.linting_service.transports.LintingServiceTransport.__init__" - ) as Transport: - Transport.return_value = None - transport = transports.LintingServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - "get_style_guide", - "update_style_guide", - "get_style_guide_contents", - "lint_spec", - "get_location", - "list_locations", - "get_operation", - "cancel_operation", - "delete_operation", - "list_operations", - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - "kind", - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_linting_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.apihub_v1.services.linting_service.transports.LintingServiceTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.LintingServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id="octopus", - ) - - -def test_linting_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( - "google.cloud.apihub_v1.services.linting_service.transports.LintingServiceTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.LintingServiceTransport() - adc.assert_called_once() - - -def test_linting_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - LintingServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.LintingServiceGrpcTransport, - transports.LintingServiceGrpcAsyncIOTransport, - ], -) -def test_linting_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.LintingServiceGrpcTransport, - transports.LintingServiceGrpcAsyncIOTransport, - transports.LintingServiceRestTransport, - ], -) -def test_linting_service_transport_auth_gdch_credentials(transport_class): - host = "https://language.com/" - api_audience_tests = [None, "https://language2.com/"] - api_audience_expect = [host, "https://language2.com/"] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, "default", autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock( - return_value=gdch_mock - ) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with(e) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.LintingServiceGrpcTransport, grpc_helpers), - (transports.LintingServiceGrpcAsyncIOTransport, grpc_helpers_async), - ], -) -def test_linting_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - - create_channel.assert_called_with( - "apihub.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - scopes=["1", "2"], - default_host="apihub.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.LintingServiceGrpcTransport, - transports.LintingServiceGrpcAsyncIOTransport, - ], -) -def test_linting_service_grpc_transport_client_cert_source_for_mtls(transport_class): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds, - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback, - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, private_key=expected_key - ) - - -def test_linting_service_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch( - "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" - ) as mock_configure_mtls_channel: - transports.LintingServiceRestTransport( - credentials=cred, client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "grpc_asyncio", - "rest", - ], -) -def test_linting_service_host_no_port(transport_name): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="apihub.googleapis.com" - ), - transport=transport_name, - ) - assert client.transport._host == ( - "apihub.googleapis.com:443" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://apihub.googleapis.com/" - ) - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "grpc_asyncio", - "rest", - ], -) -def test_linting_service_host_with_port(transport_name): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="apihub.googleapis.com:8000" - ), - transport=transport_name, - ) - assert client.transport._host == ( - "apihub.googleapis.com:8000" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://apihub.googleapis.com:8000/" - ) - - -@pytest.mark.parametrize( - "transport_name", - [ - "rest", - ], -) -def test_linting_service_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = LintingServiceClient( - credentials=creds1, - transport=transport_name, - ) - client2 = LintingServiceClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.get_style_guide._session - session2 = client2.transport.get_style_guide._session - assert session1 != session2 - session1 = client1.transport.update_style_guide._session - session2 = client2.transport.update_style_guide._session - assert session1 != session2 - session1 = client1.transport.get_style_guide_contents._session - session2 = client2.transport.get_style_guide_contents._session - assert session1 != session2 - session1 = client1.transport.lint_spec._session - session2 = client2.transport.lint_spec._session - assert session1 != session2 - - -def test_linting_service_grpc_transport_channel(): - channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.LintingServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_linting_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.LintingServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize( - "transport_class", - [ - transports.LintingServiceGrpcTransport, - transports.LintingServiceGrpcAsyncIOTransport, - ], -) -def test_linting_service_transport_channel_mtls_with_client_cert_source( - transport_class, -): - with mock.patch( - "grpc.ssl_channel_credentials", autospec=True - ) as grpc_ssl_channel_cred: - with mock.patch.object( - transport_class, "create_channel" - ) as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize( - "transport_class", - [ - transports.LintingServiceGrpcTransport, - transports.LintingServiceGrpcAsyncIOTransport, - ], -) -def test_linting_service_transport_channel_mtls_with_adc(transport_class): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object( - transport_class, "create_channel" - ) as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_spec_path(): - project = "squid" - location = "clam" - api = "whelk" - version = "octopus" - spec = "oyster" - expected = "projects/{project}/locations/{location}/apis/{api}/versions/{version}/specs/{spec}".format( - project=project, - location=location, - api=api, - version=version, - spec=spec, - ) - actual = LintingServiceClient.spec_path(project, location, api, version, spec) - assert expected == actual - - -def test_parse_spec_path(): - expected = { - "project": "nudibranch", - "location": "cuttlefish", - "api": "mussel", - "version": "winkle", - "spec": "nautilus", - } - path = LintingServiceClient.spec_path(**expected) - - # Check that the path construction is reversible. - actual = LintingServiceClient.parse_spec_path(path) - assert expected == actual - - -def test_style_guide_path(): - project = "scallop" - location = "abalone" - plugin = "squid" - expected = ( - "projects/{project}/locations/{location}/plugins/{plugin}/styleGuide".format( - project=project, - location=location, - plugin=plugin, - ) - ) - actual = LintingServiceClient.style_guide_path(project, location, plugin) - assert expected == actual - - -def test_parse_style_guide_path(): - expected = { - "project": "clam", - "location": "whelk", - "plugin": "octopus", - } - path = LintingServiceClient.style_guide_path(**expected) - - # Check that the path construction is reversible. - actual = LintingServiceClient.parse_style_guide_path(path) - assert expected == actual - - -def test_common_billing_account_path(): - billing_account = "oyster" - expected = "billingAccounts/{billing_account}".format( - billing_account=billing_account, - ) - actual = LintingServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "nudibranch", - } - path = LintingServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = LintingServiceClient.parse_common_billing_account_path(path) - assert expected == actual - - -def test_common_folder_path(): - folder = "cuttlefish" - expected = "folders/{folder}".format( - folder=folder, - ) - actual = LintingServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "mussel", - } - path = LintingServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = LintingServiceClient.parse_common_folder_path(path) - assert expected == actual - - -def test_common_organization_path(): - organization = "winkle" - expected = "organizations/{organization}".format( - organization=organization, - ) - actual = LintingServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "nautilus", - } - path = LintingServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = LintingServiceClient.parse_common_organization_path(path) - assert expected == actual - - -def test_common_project_path(): - project = "scallop" - expected = "projects/{project}".format( - project=project, - ) - actual = LintingServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "abalone", - } - path = LintingServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = LintingServiceClient.parse_common_project_path(path) - assert expected == actual - - -def test_common_location_path(): - project = "squid" - location = "clam" - expected = "projects/{project}/locations/{location}".format( - project=project, - location=location, - ) - actual = LintingServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "whelk", - "location": "octopus", - } - path = LintingServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = LintingServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object( - transports.LintingServiceTransport, "_prep_wrapped_messages" - ) as prep: - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object( - transports.LintingServiceTransport, "_prep_wrapped_messages" - ) as prep: - transport_class = LintingServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -@pytest.mark.asyncio -async def test_transport_close_async(): - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object( - type(getattr(client.transport, "grpc_channel")), "close" - ) as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_get_location_rest_bad_request( - transport: str = "rest", request_type=locations_pb2.GetLocationRequest -): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_location(request) - - -@pytest.mark.parametrize( - "request_type", - [ - locations_pb2.GetLocationRequest, - dict, - ], -) -def test_get_location_rest(request_type): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.Location() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_location(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - - -def test_list_locations_rest_bad_request( - transport: str = "rest", request_type=locations_pb2.ListLocationsRequest -): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({"name": "projects/sample1"}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_locations(request) - - -@pytest.mark.parametrize( - "request_type", - [ - locations_pb2.ListLocationsRequest, - dict, - ], -) -def test_list_locations_rest(request_type): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.ListLocationsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.list_locations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) - - -def test_cancel_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.CancelOperationRequest -): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.cancel_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.CancelOperationRequest, - dict, - ], -) -def test_cancel_operation_rest(request_type): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "{}" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.cancel_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest -): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.DeleteOperationRequest, - dict, - ], -) -def test_delete_operation_rest(request_type): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "{}" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.delete_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_get_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.GetOperationRequest -): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.GetOperationRequest, - dict, - ], -) -def test_get_operation_rest(request_type): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_operation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - - -def test_list_operations_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.ListOperationsRequest -): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_operations(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.ListOperationsRequest, - dict, - ], -) -def test_list_operations_rest(request_type): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.ListOperationsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + client_options=options, + transport=transport, + ) - response = client.list_operations(request) + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = LintingServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) + # It is an error to provide scopes and a transport instance. + transport = transports.LintingServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = LintingServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) -def test_delete_operation(transport: str = "grpc"): - client = LintingServiceClient( +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.LintingServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) + client = LintingServiceClient(transport=transport) + assert client.transport is transport - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the response is the type that we expect. - assert response is None +@pytest.mark.parametrize( + "transport_class", + [ + transports.LintingServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() -@pytest.mark.asyncio -async def test_delete_operation_async(transport: str = "grpc_asyncio"): - client = LintingServiceAsyncClient( +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_transport_kind(transport_name): + transport = LintingServiceClient.get_transport_class(transport_name)( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) + assert transport.kind == transport_name - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_linting_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.LintingServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) - # Establish that the response is the type that we expect. - assert response is None +def test_linting_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.apihub_v1.services.linting_service.transports.LintingServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.LintingServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) -def test_delete_operation_field_headers(): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "get_style_guide", + "update_style_guide", + "get_style_guide_contents", + "lint_spec", + "get_location", + "list_locations", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = None - - client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + with pytest.raises(NotImplementedError): + transport.close() + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() -@pytest.mark.asyncio -async def test_delete_operation_field_headers_async(): - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" +def test_linting_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.apihub_v1.services.linting_service.transports.LintingServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.LintingServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] +def test_linting_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.apihub_v1.services.linting_service.transports.LintingServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.LintingServiceTransport() + adc.assert_called_once() -def test_delete_operation_from_dict(): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.delete_operation( - request={ - "name": "locations", - } +def test_linting_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + LintingServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, ) - call.assert_called() -@pytest.mark.asyncio -async def test_delete_operation_from_dict_async(): - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_operation( - request={ - "name": "locations", - } +def test_linting_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.LintingServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback ) - call.assert_called() + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) -def test_cancel_operation(transport: str = "grpc"): +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_linting_service_host_no_port(transport_name): client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + client_options=client_options.ClientOptions( + api_endpoint="apihub.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "apihub.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://apihub.googleapis.com/" ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc_asyncio"): - client = LintingServiceAsyncClient( +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_linting_service_host_with_port(transport_name): + client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + client_options=client_options.ClientOptions( + api_endpoint="apihub.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "apihub.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://apihub.googleapis.com:8000/" ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the response is the type that we expect. - assert response is None +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_linting_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = LintingServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = LintingServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get_style_guide._session + session2 = client2.transport.get_style_guide._session + assert session1 != session2 + session1 = client1.transport.update_style_guide._session + session2 = client2.transport.update_style_guide._session + assert session1 != session2 + session1 = client1.transport.get_style_guide_contents._session + session2 = client2.transport.get_style_guide_contents._session + assert session1 != session2 + session1 = client1.transport.lint_spec._session + session2 = client2.transport.lint_spec._session + assert session1 != session2 -def test_cancel_operation_field_headers(): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_spec_path(): + project = "squid" + location = "clam" + api = "whelk" + version = "octopus" + spec = "oyster" + expected = "projects/{project}/locations/{location}/apis/{api}/versions/{version}/specs/{spec}".format( + project=project, + location=location, + api=api, + version=version, + spec=spec, ) + actual = LintingServiceClient.spec_path(project, location, api, version, spec) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None - client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_spec_path(): + expected = { + "project": "nudibranch", + "location": "cuttlefish", + "api": "mussel", + "version": "winkle", + "spec": "nautilus", + } + path = LintingServiceClient.spec_path(**expected) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Check that the path construction is reversible. + actual = LintingServiceClient.parse_spec_path(path) + assert expected == actual -@pytest.mark.asyncio -async def test_cancel_operation_field_headers_async(): - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_style_guide_path(): + project = "scallop" + location = "abalone" + plugin = "squid" + expected = ( + "projects/{project}/locations/{location}/plugins/{plugin}/styleGuide".format( + project=project, + location=location, + plugin=plugin, + ) ) + actual = LintingServiceClient.style_guide_path(project, location, plugin) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_style_guide_path(): + expected = { + "project": "clam", + "location": "whelk", + "plugin": "octopus", + } + path = LintingServiceClient.style_guide_path(**expected) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Check that the path construction is reversible. + actual = LintingServiceClient.parse_style_guide_path(path) + assert expected == actual -def test_cancel_operation_from_dict(): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_common_billing_account_path(): + billing_account = "oyster" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() + actual = LintingServiceClient.common_billing_account_path(billing_account) + assert expected == actual -@pytest.mark.asyncio -async def test_cancel_operation_from_dict_async(): - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nudibranch", + } + path = LintingServiceClient.common_billing_account_path(**expected) + # Check that the path construction is reversible. + actual = LintingServiceClient.parse_common_billing_account_path(path) + assert expected == actual -def test_get_operation(transport: str = "grpc"): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + +def test_common_folder_path(): + folder = "cuttlefish" + expected = "folders/{folder}".format( + folder=folder, ) + actual = LintingServiceClient.common_folder_path(folder) + assert expected == actual - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_common_folder_path(): + expected = { + "folder": "mussel", + } + path = LintingServiceClient.common_folder_path(**expected) - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) + # Check that the path construction is reversible. + actual = LintingServiceClient.parse_common_folder_path(path) + assert expected == actual -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc_asyncio"): - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +def test_common_organization_path(): + organization = "winkle" + expected = "organizations/{organization}".format( + organization=organization, ) + actual = LintingServiceClient.common_organization_path(organization) + assert expected == actual - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_common_organization_path(): + expected = { + "organization": "nautilus", + } + path = LintingServiceClient.common_organization_path(**expected) - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) + # Check that the path construction is reversible. + actual = LintingServiceClient.parse_common_organization_path(path) + assert expected == actual -def test_get_operation_field_headers(): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_common_project_path(): + project = "scallop" + expected = "projects/{project}".format( + project=project, ) + actual = LintingServiceClient.common_project_path(project) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_common_project_path(): + expected = { + "project": "abalone", + } + path = LintingServiceClient.common_project_path(**expected) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Check that the path construction is reversible. + actual = LintingServiceClient.parse_common_project_path(path) + assert expected == actual -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_common_location_path(): + project = "squid" + location = "clam" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, ) + actual = LintingServiceClient.common_location_path(project, location) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] +def test_parse_common_location_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = LintingServiceClient.common_location_path(**expected) + # Check that the path construction is reversible. + actual = LintingServiceClient.parse_common_location_path(path) + assert expected == actual -def test_get_operation_from_dict(): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - - response = client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() + with mock.patch.object( + transports.LintingServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = LintingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) - response = await client.get_operation( - request={ - "name": "locations", - } + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.LintingServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = LintingServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) - call.assert_called() + prep.assert_called_once_with(client_info) -def test_list_operations(transport: str = "grpc"): +def test_get_location_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.GetLocationRequest +): client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - response = client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - - -@pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc_asyncio"): - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_location(request) -def test_list_operations_field_headers(): +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) +def test_get_location_rest(request_type): client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = operations_pb2.ListOperationsResponse() - - client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_list_operations_field_headers_async(): - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.get_location(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) -def test_list_operations_from_dict(): +def test_list_locations_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.ListLocationsRequest +): client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - - response = client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) -@pytest.mark.asyncio -async def test_list_operations_from_dict_async(): - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_locations(request) -def test_list_locations(transport: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) +def test_list_locations_rest(request_type): client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() response = client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, locations_pb2.ListLocationsResponse) -@pytest.mark.asyncio -async def test_list_locations_async(transport: str = "grpc_asyncio"): - client = LintingServiceAsyncClient( +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) -def test_list_locations_field_headers(): +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = locations_pb2.ListLocationsResponse() - - client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_list_locations_field_headers_async(): - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.cancel_operation(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Establish that the response is the type that we expect. + assert response is None -def test_list_locations_from_dict(): +def test_delete_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest +): client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() - - response = client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() - -@pytest.mark.asyncio -async def test_list_locations_from_dict_async(): - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_operation(request) -def test_get_location(transport: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - response = client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.delete_operation(request) # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) + assert response is None -@pytest.mark.asyncio -async def test_get_location_async(transport: str = "grpc_asyncio"): - client = LintingServiceAsyncClient( +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) -def test_get_location_field_headers(): - client = LintingServiceClient(credentials=ga_credentials.AnonymousCredentials()) +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = LintingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = locations_pb2.Location() + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.get_operation(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations/abc", - ) in kw["metadata"] + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) -@pytest.mark.asyncio -async def test_get_location_field_headers_async(): - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials() +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = LintingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations/abc", - ) in kw["metadata"] + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) -def test_get_location_from_dict(): +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - - response = client.get_location( - request={ - "name": "locations/abc", - } - ) - call.assert_called() + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) -@pytest.mark.asyncio -async def test_get_location_from_dict_async(): - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location( - request={ - "name": "locations", - } - ) - call.assert_called() + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) def test_transport_close(): transports = { "rest": "_session", - "grpc": "_grpc_channel", } for transport, close_name in transports.items(): @@ -5772,7 +3023,6 @@ def test_transport_close(): def test_client_ctx(): transports = [ "rest", - "grpc", ] for transport in transports: client = LintingServiceClient( @@ -5789,8 +3039,7 @@ def test_client_ctx(): @pytest.mark.parametrize( "client_class,transport_class", [ - (LintingServiceClient, transports.LintingServiceGrpcTransport), - (LintingServiceAsyncClient, transports.LintingServiceGrpcAsyncIOTransport), + (LintingServiceClient, transports.LintingServiceRestTransport), ], ) def test_api_key_credentials(client_class, transport_class): diff --git a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_provisioning.py b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_provisioning.py index f3cf6227c307..06f68007eef1 100644 --- a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_provisioning.py +++ b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_provisioning.py @@ -55,11 +55,7 @@ from requests import PreparedRequest, Request, Response from requests.sessions import Session -from google.cloud.apihub_v1.services.provisioning import ( - ProvisioningAsyncClient, - ProvisioningClient, - transports, -) +from google.cloud.apihub_v1.services.provisioning import ProvisioningClient, transports from google.cloud.apihub_v1.types import common_fields, provisioning_service @@ -205,11 +201,6 @@ def test__get_client_cert_source(): "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ProvisioningClient), ) -@mock.patch.object( - ProvisioningAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(ProvisioningAsyncClient), -) def test__get_api_endpoint(): api_override = "foo.com" mock_client_cert_source = mock.Mock() @@ -294,7 +285,6 @@ def test__get_universe_domain(): @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - (ProvisioningClient, transports.ProvisioningGrpcTransport, "grpc"), (ProvisioningClient, transports.ProvisioningRestTransport, "rest"), ], ) @@ -374,8 +364,6 @@ def test__validate_universe_domain(client_class, transport_class, transport_name @pytest.mark.parametrize( "client_class,transport_name", [ - (ProvisioningClient, "grpc"), - (ProvisioningAsyncClient, "grpc_asyncio"), (ProvisioningClient, "rest"), ], ) @@ -400,8 +388,6 @@ def test_provisioning_client_from_service_account_info(client_class, transport_n @pytest.mark.parametrize( "transport_class,transport_name", [ - (transports.ProvisioningGrpcTransport, "grpc"), - (transports.ProvisioningGrpcAsyncIOTransport, "grpc_asyncio"), (transports.ProvisioningRestTransport, "rest"), ], ) @@ -426,8 +412,6 @@ def test_provisioning_client_service_account_always_use_jwt( @pytest.mark.parametrize( "client_class,transport_name", [ - (ProvisioningClient, "grpc"), - (ProvisioningAsyncClient, "grpc_asyncio"), (ProvisioningClient, "rest"), ], ) @@ -459,24 +443,17 @@ def test_provisioning_client_from_service_account_file(client_class, transport_n def test_provisioning_client_get_transport_class(): transport = ProvisioningClient.get_transport_class() available_transports = [ - transports.ProvisioningGrpcTransport, transports.ProvisioningRestTransport, ] assert transport in available_transports - transport = ProvisioningClient.get_transport_class("grpc") - assert transport == transports.ProvisioningGrpcTransport + transport = ProvisioningClient.get_transport_class("rest") + assert transport == transports.ProvisioningRestTransport @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - (ProvisioningClient, transports.ProvisioningGrpcTransport, "grpc"), - ( - ProvisioningAsyncClient, - transports.ProvisioningGrpcAsyncIOTransport, - "grpc_asyncio", - ), (ProvisioningClient, transports.ProvisioningRestTransport, "rest"), ], ) @@ -485,11 +462,6 @@ def test_provisioning_client_get_transport_class(): "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ProvisioningClient), ) -@mock.patch.object( - ProvisioningAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(ProvisioningAsyncClient), -) def test_provisioning_client_client_options( client_class, transport_class, transport_name ): @@ -623,20 +595,6 @@ def test_provisioning_client_client_options( @pytest.mark.parametrize( "client_class,transport_class,transport_name,use_client_cert_env", [ - (ProvisioningClient, transports.ProvisioningGrpcTransport, "grpc", "true"), - ( - ProvisioningAsyncClient, - transports.ProvisioningGrpcAsyncIOTransport, - "grpc_asyncio", - "true", - ), - (ProvisioningClient, transports.ProvisioningGrpcTransport, "grpc", "false"), - ( - ProvisioningAsyncClient, - transports.ProvisioningGrpcAsyncIOTransport, - "grpc_asyncio", - "false", - ), (ProvisioningClient, transports.ProvisioningRestTransport, "rest", "true"), (ProvisioningClient, transports.ProvisioningRestTransport, "rest", "false"), ], @@ -646,11 +604,6 @@ def test_provisioning_client_client_options( "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ProvisioningClient), ) -@mock.patch.object( - ProvisioningAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(ProvisioningAsyncClient), -) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) def test_provisioning_client_mtls_env_auto( client_class, transport_class, transport_name, use_client_cert_env @@ -754,15 +707,10 @@ def test_provisioning_client_mtls_env_auto( ) -@pytest.mark.parametrize("client_class", [ProvisioningClient, ProvisioningAsyncClient]) +@pytest.mark.parametrize("client_class", [ProvisioningClient]) @mock.patch.object( ProvisioningClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ProvisioningClient) ) -@mock.patch.object( - ProvisioningAsyncClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(ProvisioningAsyncClient), -) def test_provisioning_client_get_mtls_endpoint_and_cert_source(client_class): mock_client_cert_source = mock.Mock() @@ -854,17 +802,12 @@ def test_provisioning_client_get_mtls_endpoint_and_cert_source(client_class): ) -@pytest.mark.parametrize("client_class", [ProvisioningClient, ProvisioningAsyncClient]) +@pytest.mark.parametrize("client_class", [ProvisioningClient]) @mock.patch.object( ProvisioningClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ProvisioningClient), ) -@mock.patch.object( - ProvisioningAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(ProvisioningAsyncClient), -) def test_provisioning_client_client_api_endpoint(client_class): mock_client_cert_source = client_cert_source_callback api_override = "foo.com" @@ -941,12 +884,6 @@ def test_provisioning_client_client_api_endpoint(client_class): @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - (ProvisioningClient, transports.ProvisioningGrpcTransport, "grpc"), - ( - ProvisioningAsyncClient, - transports.ProvisioningGrpcAsyncIOTransport, - "grpc_asyncio", - ), (ProvisioningClient, transports.ProvisioningRestTransport, "rest"), ], ) @@ -978,18 +915,6 @@ def test_provisioning_client_client_options_scopes( @pytest.mark.parametrize( "client_class,transport_class,transport_name,grpc_helpers", [ - ( - ProvisioningClient, - transports.ProvisioningGrpcTransport, - "grpc", - grpc_helpers, - ), - ( - ProvisioningAsyncClient, - transports.ProvisioningGrpcAsyncIOTransport, - "grpc_asyncio", - grpc_helpers_async, - ), (ProvisioningClient, transports.ProvisioningRestTransport, "rest", None), ], ) @@ -1017,94 +942,6 @@ def test_provisioning_client_client_options_credentials_file( ) -def test_provisioning_client_client_options_from_dict(): - with mock.patch( - "google.cloud.apihub_v1.services.provisioning.transports.ProvisioningGrpcTransport.__init__" - ) as grpc_transport: - grpc_transport.return_value = None - client = ProvisioningClient(client_options={"api_endpoint": "squid.clam.whelk"}) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize( - "client_class,transport_class,transport_name,grpc_helpers", - [ - ( - ProvisioningClient, - transports.ProvisioningGrpcTransport, - "grpc", - grpc_helpers, - ), - ( - ProvisioningAsyncClient, - transports.ProvisioningGrpcAsyncIOTransport, - "grpc_asyncio", - grpc_helpers_async, - ), - ], -) -def test_provisioning_client_create_channel_credentials_file( - client_class, transport_class, transport_name, grpc_helpers -): - # Check the case credentials file is provided. - options = client_options.ClientOptions(credentials_file="credentials.json") - - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE - ), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "apihub.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - scopes=None, - default_host="apihub.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - @pytest.mark.parametrize( "request_type", [ @@ -1112,94 +949,120 @@ def test_provisioning_client_create_channel_credentials_file( dict, ], ) -def test_create_api_hub_instance(request_type, transport: str = "grpc"): +def test_create_api_hub_instance_rest(request_type): client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["api_hub_instance"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "state": 1, + "state_message": "state_message_value", + "config": {"cmek_key_name": "cmek_key_name_value"}, + "labels": {}, + "description": "description_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_api_hub_instance), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.create_api_hub_instance(request) + # Determine if the message type is proto-plus or protobuf + test_field = provisioning_service.CreateApiHubInstanceRequest.meta.fields[ + "api_hub_instance" + ] - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = provisioning_service.CreateApiHubInstanceRequest() - assert args[0] == request + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields -def test_create_api_hub_instance_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_api_hub_instance), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_api_hub_instance() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == provisioning_service.CreateApiHubInstanceRequest() + subfields_not_in_runtime = [] + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["api_hub_instance"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value -def test_create_api_hub_instance_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = provisioning_service.CreateApiHubInstanceRequest( - parent="parent_value", - api_hub_instance_id="api_hub_instance_id_value", - ) + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["api_hub_instance"][field])): + del request_init["api_hub_instance"][field][i][subfield] + else: + del request_init["api_hub_instance"][field][subfield] + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_api_hub_instance), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_api_hub_instance(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == provisioning_service.CreateApiHubInstanceRequest( - parent="parent_value", - api_hub_instance_id="api_hub_instance_id_value", - ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_api_hub_instance(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" -def test_create_api_hub_instance_use_cached_wrapped_rpc(): +def test_create_api_hub_instance_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -1220,15 +1083,15 @@ def test_create_api_hub_instance_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.create_api_hub_instance ] = mock_rpc + request = {} client.create_api_hub_instance(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_api_hub_instance(request) @@ -1238,279 +1101,233 @@ def test_create_api_hub_instance_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_api_hub_instance_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_api_hub_instance), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - response = await client.create_api_hub_instance() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == provisioning_service.CreateApiHubInstanceRequest() - - -@pytest.mark.asyncio -async def test_create_api_hub_instance_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_create_api_hub_instance_rest_required_fields( + request_type=provisioning_service.CreateApiHubInstanceRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.create_api_hub_instance - in client._client._transport._wrapped_methods - ) + transport_class = transports.ProvisioningRestTransport - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.create_api_hub_instance - ] = mock_rpc + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - request = {} - await client.create_api_hub_instance(request) + # verify fields with default values are dropped - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_api_hub_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() + # verify required fields with default values are now present - await client.create_api_hub_instance(request) + jsonified_request["parent"] = "parent_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_api_hub_instance._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("api_hub_instance_id",)) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" -@pytest.mark.asyncio -async def test_create_api_hub_instance_async( - transport: str = "grpc_asyncio", - request_type=provisioning_service.CreateApiHubInstanceRequest, -): - client = ProvisioningAsyncClient( + client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_api_hub_instance), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - response = await client.create_api_hub_instance(request) + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = provisioning_service.CreateApiHubInstanceRequest() - assert args[0] == request + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_api_hub_instance(request) -@pytest.mark.asyncio -async def test_create_api_hub_instance_async_from_dict(): - await test_create_api_hub_instance_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_create_api_hub_instance_field_headers(): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_create_api_hub_instance_rest_unset_required_fields(): + transport = transports.ProvisioningRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = provisioning_service.CreateApiHubInstanceRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_api_hub_instance), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.create_api_hub_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + unset_fields = transport.create_api_hub_instance._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("apiHubInstanceId",)) + & set( + ( + "parent", + "apiHubInstance", + ) + ) + ) -@pytest.mark.asyncio -async def test_create_api_hub_instance_field_headers_async(): - client = ProvisioningAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_api_hub_instance_rest_interceptors(null_interceptor): + transport = transports.ProvisioningRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ProvisioningRestInterceptor(), ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = provisioning_service.CreateApiHubInstanceRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. + client = ProvisioningClient(transport=transport) with mock.patch.object( - type(client.transport.create_api_hub_instance), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ProvisioningRestInterceptor, "post_create_api_hub_instance" + ) as post, mock.patch.object( + transports.ProvisioningRestInterceptor, "pre_create_api_hub_instance" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = provisioning_service.CreateApiHubInstanceRequest.pb( + provisioning_service.CreateApiHubInstanceRequest() ) - await client.create_api_hub_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) -def test_create_api_hub_instance_flattened(): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + request = provisioning_service.CreateApiHubInstanceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_api_hub_instance), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.create_api_hub_instance( - parent="parent_value", - api_hub_instance=common_fields.ApiHubInstance(name="name_value"), - api_hub_instance_id="api_hub_instance_id_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].api_hub_instance - mock_val = common_fields.ApiHubInstance(name="name_value") - assert arg == mock_val - arg = args[0].api_hub_instance_id - mock_val = "api_hub_instance_id_value" - assert arg == mock_val - - -def test_create_api_hub_instance_flattened_error(): + pre.assert_called_once() + post.assert_called_once() + + +def test_create_api_hub_instance_rest_bad_request( + transport: str = "rest", + request_type=provisioning_service.CreateApiHubInstanceRequest, +): client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_api_hub_instance( - provisioning_service.CreateApiHubInstanceRequest(), - parent="parent_value", - api_hub_instance=common_fields.ApiHubInstance(name="name_value"), - api_hub_instance_id="api_hub_instance_id_value", - ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_api_hub_instance(request) -@pytest.mark.asyncio -async def test_create_api_hub_instance_flattened_async(): - client = ProvisioningAsyncClient( +def test_create_api_hub_instance_rest_flattened(): + client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_api_hub_instance), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_api_hub_instance( + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( parent="parent_value", api_hub_instance=common_fields.ApiHubInstance(name="name_value"), api_hub_instance_id="api_hub_instance_id_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_api_hub_instance(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].api_hub_instance - mock_val = common_fields.ApiHubInstance(name="name_value") - assert arg == mock_val - arg = args[0].api_hub_instance_id - mock_val = "api_hub_instance_id_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_create_api_hub_instance_flattened_error_async(): - client = ProvisioningAsyncClient( + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/apiHubInstances" + % client.transport._host, + args[1], + ) + + +def test_create_api_hub_instance_rest_flattened_error(transport: str = "rest"): + client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_api_hub_instance( + client.create_api_hub_instance( provisioning_service.CreateApiHubInstanceRequest(), parent="parent_value", api_hub_instance=common_fields.ApiHubInstance(name="name_value"), @@ -1518,6 +1335,12 @@ async def test_create_api_hub_instance_flattened_error_async(): ) +def test_create_api_hub_instance_rest_error(): + client = ProvisioningClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -1525,34 +1348,38 @@ async def test_create_api_hub_instance_flattened_error_async(): dict, ], ) -def test_get_api_hub_instance(request_type, transport: str = "grpc"): +def test_get_api_hub_instance_rest(request_type): client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/apiHubInstances/sample3" + } + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_api_hub_instance), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.ApiHubInstance( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.ApiHubInstance( name="name_value", state=common_fields.ApiHubInstance.State.INACTIVE, state_message="state_message_value", description="description_value", ) - response = client.get_api_hub_instance(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = provisioning_service.GetApiHubInstanceRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.ApiHubInstance.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_api_hub_instance(request) # Establish that the response is the type that we expect. assert isinstance(response, common_fields.ApiHubInstance) @@ -1562,64 +1389,13 @@ def test_get_api_hub_instance(request_type, transport: str = "grpc"): assert response.description == "description_value" -def test_get_api_hub_instance_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_api_hub_instance), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_api_hub_instance() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == provisioning_service.GetApiHubInstanceRequest() - - -def test_get_api_hub_instance_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = provisioning_service.GetApiHubInstanceRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_api_hub_instance), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_api_hub_instance(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == provisioning_service.GetApiHubInstanceRequest( - name="name_value", - ) - - -def test_get_api_hub_instance_use_cached_wrapped_rpc(): +def test_get_api_hub_instance_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -1639,6 +1415,7 @@ def test_get_api_hub_instance_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.get_api_hub_instance ] = mock_rpc + request = {} client.get_api_hub_instance(request) @@ -1652,275 +1429,237 @@ def test_get_api_hub_instance_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_api_hub_instance_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", +def test_get_api_hub_instance_rest_required_fields( + request_type=provisioning_service.GetApiHubInstanceRequest, +): + transport_class = transports.ProvisioningRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_api_hub_instance), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.ApiHubInstance( - name="name_value", - state=common_fields.ApiHubInstance.State.INACTIVE, - state_message="state_message_value", - description="description_value", - ) - ) - response = await client.get_api_hub_instance() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == provisioning_service.GetApiHubInstanceRequest() - - -@pytest.mark.asyncio -async def test_get_api_hub_instance_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.get_api_hub_instance - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.get_api_hub_instance - ] = mock_rpc + # verify fields with default values are dropped - request = {} - await client.get_api_hub_instance(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_api_hub_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.get_api_hub_instance(request) + jsonified_request["name"] = "name_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_api_hub_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -@pytest.mark.asyncio -async def test_get_api_hub_instance_async( - transport: str = "grpc_asyncio", - request_type=provisioning_service.GetApiHubInstanceRequest, -): - client = ProvisioningAsyncClient( + client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = common_fields.ApiHubInstance() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_api_hub_instance), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.ApiHubInstance( - name="name_value", - state=common_fields.ApiHubInstance.State.INACTIVE, - state_message="state_message_value", - description="description_value", - ) - ) - response = await client.get_api_hub_instance(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = provisioning_service.GetApiHubInstanceRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = common_fields.ApiHubInstance.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.ApiHubInstance) - assert response.name == "name_value" - assert response.state == common_fields.ApiHubInstance.State.INACTIVE - assert response.state_message == "state_message_value" - assert response.description == "description_value" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_api_hub_instance(request) -@pytest.mark.asyncio -async def test_get_api_hub_instance_async_from_dict(): - await test_get_api_hub_instance_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_get_api_hub_instance_field_headers(): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_get_api_hub_instance_rest_unset_required_fields(): + transport = transports.ProvisioningRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = provisioning_service.GetApiHubInstanceRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_api_hub_instance), "__call__" - ) as call: - call.return_value = common_fields.ApiHubInstance() - client.get_api_hub_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + unset_fields = transport.get_api_hub_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -@pytest.mark.asyncio -async def test_get_api_hub_instance_field_headers_async(): - client = ProvisioningAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_api_hub_instance_rest_interceptors(null_interceptor): + transport = transports.ProvisioningRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ProvisioningRestInterceptor(), ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = provisioning_service.GetApiHubInstanceRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. + client = ProvisioningClient(transport=transport) with mock.patch.object( - type(client.transport.get_api_hub_instance), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.ApiHubInstance() + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ProvisioningRestInterceptor, "post_get_api_hub_instance" + ) as post, mock.patch.object( + transports.ProvisioningRestInterceptor, "pre_get_api_hub_instance" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = provisioning_service.GetApiHubInstanceRequest.pb( + provisioning_service.GetApiHubInstanceRequest() ) - await client.get_api_hub_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.ApiHubInstance.to_json( + common_fields.ApiHubInstance() + ) -def test_get_api_hub_instance_flattened(): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + request = provisioning_service.GetApiHubInstanceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.ApiHubInstance() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_api_hub_instance), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.ApiHubInstance() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.get_api_hub_instance( - name="name_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + pre.assert_called_once() + post.assert_called_once() -def test_get_api_hub_instance_flattened_error(): +def test_get_api_hub_instance_rest_bad_request( + transport: str = "rest", request_type=provisioning_service.GetApiHubInstanceRequest +): client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_api_hub_instance( - provisioning_service.GetApiHubInstanceRequest(), - name="name_value", - ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/apiHubInstances/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_api_hub_instance(request) -@pytest.mark.asyncio -async def test_get_api_hub_instance_flattened_async(): - client = ProvisioningAsyncClient( +def test_get_api_hub_instance_rest_flattened(): + client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_api_hub_instance), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.ApiHubInstance() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.ApiHubInstance() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.ApiHubInstance() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_api_hub_instance( + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/apiHubInstances/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.ApiHubInstance.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_api_hub_instance(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/apiHubInstances/*}" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_get_api_hub_instance_flattened_error_async(): - client = ProvisioningAsyncClient( +def test_get_api_hub_instance_rest_flattened_error(transport: str = "rest"): + client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_api_hub_instance( + client.get_api_hub_instance( provisioning_service.GetApiHubInstanceRequest(), name="name_value", ) +def test_get_api_hub_instance_rest_error(): + client = ProvisioningClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -1928,92 +1667,45 @@ async def test_get_api_hub_instance_flattened_error_async(): dict, ], ) -def test_lookup_api_hub_instance(request_type, transport: str = "grpc"): +def test_lookup_api_hub_instance_rest(request_type): client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup_api_hub_instance), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = provisioning_service.LookupApiHubInstanceResponse() - response = client.lookup_api_hub_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = provisioning_service.LookupApiHubInstanceRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, provisioning_service.LookupApiHubInstanceResponse) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = provisioning_service.LookupApiHubInstanceResponse() -def test_lookup_api_hub_instance_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup_api_hub_instance), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = provisioning_service.LookupApiHubInstanceResponse.pb( + return_value ) - client.lookup_api_hub_instance() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == provisioning_service.LookupApiHubInstanceRequest() - - -def test_lookup_api_hub_instance_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + json_return_value = json_format.MessageToJson(return_value) - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = provisioning_service.LookupApiHubInstanceRequest( - parent="parent_value", - ) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.lookup_api_hub_instance(request) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup_api_hub_instance), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.lookup_api_hub_instance(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == provisioning_service.LookupApiHubInstanceRequest( - parent="parent_value", - ) + # Establish that the response is the type that we expect. + assert isinstance(response, provisioning_service.LookupApiHubInstanceResponse) -def test_lookup_api_hub_instance_use_cached_wrapped_rpc(): +def test_lookup_api_hub_instance_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -2034,6 +1726,7 @@ def test_lookup_api_hub_instance_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.lookup_api_hub_instance ] = mock_rpc + request = {} client.lookup_api_hub_instance(request) @@ -2047,204 +1740,223 @@ def test_lookup_api_hub_instance_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_lookup_api_hub_instance_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup_api_hub_instance), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - provisioning_service.LookupApiHubInstanceResponse() - ) - response = await client.lookup_api_hub_instance() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == provisioning_service.LookupApiHubInstanceRequest() - - -@pytest.mark.asyncio -async def test_lookup_api_hub_instance_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_lookup_api_hub_instance_rest_required_fields( + request_type=provisioning_service.LookupApiHubInstanceRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + transport_class = transports.ProvisioningRestTransport - # Ensure method has been cached - assert ( - client._client._transport.lookup_api_hub_instance - in client._client._transport._wrapped_methods - ) + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.lookup_api_hub_instance - ] = mock_rpc + # verify fields with default values are dropped - request = {} - await client.lookup_api_hub_instance(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).lookup_api_hub_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.lookup_api_hub_instance(request) + jsonified_request["parent"] = "parent_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).lookup_api_hub_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" -@pytest.mark.asyncio -async def test_lookup_api_hub_instance_async( - transport: str = "grpc_asyncio", - request_type=provisioning_service.LookupApiHubInstanceRequest, -): - client = ProvisioningAsyncClient( + client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = provisioning_service.LookupApiHubInstanceResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup_api_hub_instance), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - provisioning_service.LookupApiHubInstanceResponse() - ) - response = await client.lookup_api_hub_instance(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = provisioning_service.LookupApiHubInstanceRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = provisioning_service.LookupApiHubInstanceResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, provisioning_service.LookupApiHubInstanceResponse) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.lookup_api_hub_instance(request) -@pytest.mark.asyncio -async def test_lookup_api_hub_instance_async_from_dict(): - await test_lookup_api_hub_instance_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_lookup_api_hub_instance_field_headers(): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_lookup_api_hub_instance_rest_unset_required_fields(): + transport = transports.ProvisioningRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = provisioning_service.LookupApiHubInstanceRequest() + unset_fields = transport.lookup_api_hub_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) - request.parent = "parent_value" - # Mock the actual call within the gRPC stub, and fake the request. +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_lookup_api_hub_instance_rest_interceptors(null_interceptor): + transport = transports.ProvisioningRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ProvisioningRestInterceptor(), + ) + client = ProvisioningClient(transport=transport) with mock.patch.object( - type(client.transport.lookup_api_hub_instance), "__call__" - ) as call: - call.return_value = provisioning_service.LookupApiHubInstanceResponse() - client.lookup_api_hub_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ProvisioningRestInterceptor, "post_lookup_api_hub_instance" + ) as post, mock.patch.object( + transports.ProvisioningRestInterceptor, "pre_lookup_api_hub_instance" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = provisioning_service.LookupApiHubInstanceRequest.pb( + provisioning_service.LookupApiHubInstanceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + provisioning_service.LookupApiHubInstanceResponse.to_json( + provisioning_service.LookupApiHubInstanceResponse() + ) + ) + request = provisioning_service.LookupApiHubInstanceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = provisioning_service.LookupApiHubInstanceResponse() -@pytest.mark.asyncio -async def test_lookup_api_hub_instance_field_headers_async(): - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + client.lookup_api_hub_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = provisioning_service.LookupApiHubInstanceRequest() + pre.assert_called_once() + post.assert_called_once() - request.parent = "parent_value" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup_api_hub_instance), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - provisioning_service.LookupApiHubInstanceResponse() - ) - await client.lookup_api_hub_instance(request) +def test_lookup_api_hub_instance_rest_bad_request( + transport: str = "rest", + request_type=provisioning_service.LookupApiHubInstanceRequest, +): + client = ProvisioningClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.lookup_api_hub_instance(request) -def test_lookup_api_hub_instance_flattened(): +def test_lookup_api_hub_instance_rest_flattened(): client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup_api_hub_instance), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = provisioning_service.LookupApiHubInstanceResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.lookup_api_hub_instance( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = provisioning_service.LookupApiHubInstanceResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( parent="parent_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = provisioning_service.LookupApiHubInstanceResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.lookup_api_hub_instance(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/apiHubInstances:lookup" + % client.transport._host, + args[1], + ) -def test_lookup_api_hub_instance_flattened_error(): +def test_lookup_api_hub_instance_rest_flattened_error(transport: str = "rest"): client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -2256,3044 +1968,798 @@ def test_lookup_api_hub_instance_flattened_error(): ) -@pytest.mark.asyncio -async def test_lookup_api_hub_instance_flattened_async(): - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup_api_hub_instance), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = provisioning_service.LookupApiHubInstanceResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - provisioning_service.LookupApiHubInstanceResponse() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.lookup_api_hub_instance( - parent="parent_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val +def test_lookup_api_hub_instance_rest_error(): + client = ProvisioningClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) -@pytest.mark.asyncio -async def test_lookup_api_hub_instance_flattened_error_async(): - client = ProvisioningAsyncClient( +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ProvisioningRestTransport( credentials=ga_credentials.AnonymousCredentials(), ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. with pytest.raises(ValueError): - await client.lookup_api_hub_instance( - provisioning_service.LookupApiHubInstanceRequest(), - parent="parent_value", + client = ProvisioningClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - -@pytest.mark.parametrize( - "request_type", - [ - provisioning_service.CreateApiHubInstanceRequest, - dict, - ], -) -def test_create_api_hub_instance_rest(request_type): - client = ProvisioningClient( + # It is an error to provide a credentials file and a transport instance. + transport = transports.ProvisioningRestTransport( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["api_hub_instance"] = { - "name": "name_value", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "state": 1, - "state_message": "state_message_value", - "config": {"cmek_key_name": "cmek_key_name_value"}, - "labels": {}, - "description": "description_value", - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = provisioning_service.CreateApiHubInstanceRequest.meta.fields[ - "api_hub_instance" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["api_hub_instance"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["api_hub_instance"][field])): - del request_init["api_hub_instance"][field][i][subfield] - else: - del request_init["api_hub_instance"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.create_api_hub_instance(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - -def test_create_api_hub_instance_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + with pytest.raises(ValueError): client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + client_options={"credentials_file": "credentials.json"}, + transport=transport, ) - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.create_api_hub_instance - in client._transport._wrapped_methods + # It is an error to provide an api_key and a transport instance. + transport = transports.ProvisioningRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ProvisioningClient( + client_options=options, + transport=transport, ) - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ProvisioningClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() ) - client._transport._wrapped_methods[ - client._transport.create_api_hub_instance - ] = mock_rpc - - request = {} - client.create_api_hub_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_api_hub_instance(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_api_hub_instance_rest_required_fields( - request_type=provisioning_service.CreateApiHubInstanceRequest, -): - transport_class = transports.ProvisioningRestTransport - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) + # It is an error to provide scopes and a transport instance. + transport = transports.ProvisioningRestTransport( + credentials=ga_credentials.AnonymousCredentials(), ) + with pytest.raises(ValueError): + client = ProvisioningClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_api_hub_instance._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_api_hub_instance._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("api_hub_instance_id",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - client = ProvisioningClient( +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ProvisioningRestTransport( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.create_api_hub_instance(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_create_api_hub_instance_rest_unset_required_fields(): - transport = transports.ProvisioningRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.create_api_hub_instance._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("apiHubInstanceId",)) - & set( - ( - "parent", - "apiHubInstance", - ) - ) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_api_hub_instance_rest_interceptors(null_interceptor): - transport = transports.ProvisioningRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ProvisioningRestInterceptor(), - ) - client = ProvisioningClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.ProvisioningRestInterceptor, "post_create_api_hub_instance" - ) as post, mock.patch.object( - transports.ProvisioningRestInterceptor, "pre_create_api_hub_instance" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = provisioning_service.CreateApiHubInstanceRequest.pb( - provisioning_service.CreateApiHubInstanceRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() - ) - - request = provisioning_service.CreateApiHubInstanceRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.create_api_hub_instance( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_api_hub_instance_rest_bad_request( - transport: str = "rest", - request_type=provisioning_service.CreateApiHubInstanceRequest, -): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_api_hub_instance(request) - - -def test_create_api_hub_instance_rest_flattened(): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - api_hub_instance=common_fields.ApiHubInstance(name="name_value"), - api_hub_instance_id="api_hub_instance_id_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.create_api_hub_instance(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/apiHubInstances" - % client.transport._host, - args[1], - ) - - -def test_create_api_hub_instance_rest_flattened_error(transport: str = "rest"): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_api_hub_instance( - provisioning_service.CreateApiHubInstanceRequest(), - parent="parent_value", - api_hub_instance=common_fields.ApiHubInstance(name="name_value"), - api_hub_instance_id="api_hub_instance_id_value", - ) - - -def test_create_api_hub_instance_rest_error(): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - provisioning_service.GetApiHubInstanceRequest, - dict, - ], -) -def test_get_api_hub_instance_rest(request_type): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/apiHubInstances/sample3" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.ApiHubInstance( - name="name_value", - state=common_fields.ApiHubInstance.State.INACTIVE, - state_message="state_message_value", - description="description_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.ApiHubInstance.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_api_hub_instance(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.ApiHubInstance) - assert response.name == "name_value" - assert response.state == common_fields.ApiHubInstance.State.INACTIVE - assert response.state_message == "state_message_value" - assert response.description == "description_value" - - -def test_get_api_hub_instance_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.get_api_hub_instance in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.get_api_hub_instance - ] = mock_rpc - - request = {} - client.get_api_hub_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_api_hub_instance(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_api_hub_instance_rest_required_fields( - request_type=provisioning_service.GetApiHubInstanceRequest, -): - transport_class = transports.ProvisioningRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_api_hub_instance._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_api_hub_instance._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.ApiHubInstance() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.ApiHubInstance.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_api_hub_instance(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_get_api_hub_instance_rest_unset_required_fields(): - transport = transports.ProvisioningRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.get_api_hub_instance._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_api_hub_instance_rest_interceptors(null_interceptor): - transport = transports.ProvisioningRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ProvisioningRestInterceptor(), - ) - client = ProvisioningClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ProvisioningRestInterceptor, "post_get_api_hub_instance" - ) as post, mock.patch.object( - transports.ProvisioningRestInterceptor, "pre_get_api_hub_instance" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = provisioning_service.GetApiHubInstanceRequest.pb( - provisioning_service.GetApiHubInstanceRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.ApiHubInstance.to_json( - common_fields.ApiHubInstance() - ) - - request = provisioning_service.GetApiHubInstanceRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.ApiHubInstance() - - client.get_api_hub_instance( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_api_hub_instance_rest_bad_request( - transport: str = "rest", request_type=provisioning_service.GetApiHubInstanceRequest -): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/apiHubInstances/sample3" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_api_hub_instance(request) - - -def test_get_api_hub_instance_rest_flattened(): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.ApiHubInstance() - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/apiHubInstances/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.ApiHubInstance.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.get_api_hub_instance(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/apiHubInstances/*}" - % client.transport._host, - args[1], - ) - - -def test_get_api_hub_instance_rest_flattened_error(transport: str = "rest"): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_api_hub_instance( - provisioning_service.GetApiHubInstanceRequest(), - name="name_value", - ) - - -def test_get_api_hub_instance_rest_error(): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - provisioning_service.LookupApiHubInstanceRequest, - dict, - ], -) -def test_lookup_api_hub_instance_rest(request_type): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = provisioning_service.LookupApiHubInstanceResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = provisioning_service.LookupApiHubInstanceResponse.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.lookup_api_hub_instance(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, provisioning_service.LookupApiHubInstanceResponse) - - -def test_lookup_api_hub_instance_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.lookup_api_hub_instance - in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.lookup_api_hub_instance - ] = mock_rpc - - request = {} - client.lookup_api_hub_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.lookup_api_hub_instance(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_lookup_api_hub_instance_rest_required_fields( - request_type=provisioning_service.LookupApiHubInstanceRequest, -): - transport_class = transports.ProvisioningRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).lookup_api_hub_instance._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).lookup_api_hub_instance._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = provisioning_service.LookupApiHubInstanceResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = provisioning_service.LookupApiHubInstanceResponse.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.lookup_api_hub_instance(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_lookup_api_hub_instance_rest_unset_required_fields(): - transport = transports.ProvisioningRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.lookup_api_hub_instance._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_lookup_api_hub_instance_rest_interceptors(null_interceptor): - transport = transports.ProvisioningRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ProvisioningRestInterceptor(), - ) - client = ProvisioningClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ProvisioningRestInterceptor, "post_lookup_api_hub_instance" - ) as post, mock.patch.object( - transports.ProvisioningRestInterceptor, "pre_lookup_api_hub_instance" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = provisioning_service.LookupApiHubInstanceRequest.pb( - provisioning_service.LookupApiHubInstanceRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = ( - provisioning_service.LookupApiHubInstanceResponse.to_json( - provisioning_service.LookupApiHubInstanceResponse() - ) - ) - - request = provisioning_service.LookupApiHubInstanceRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = provisioning_service.LookupApiHubInstanceResponse() - - client.lookup_api_hub_instance( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_lookup_api_hub_instance_rest_bad_request( - transport: str = "rest", - request_type=provisioning_service.LookupApiHubInstanceRequest, -): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.lookup_api_hub_instance(request) - - -def test_lookup_api_hub_instance_rest_flattened(): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = provisioning_service.LookupApiHubInstanceResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = provisioning_service.LookupApiHubInstanceResponse.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.lookup_api_hub_instance(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/apiHubInstances:lookup" - % client.transport._host, - args[1], - ) - - -def test_lookup_api_hub_instance_rest_flattened_error(transport: str = "rest"): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.lookup_api_hub_instance( - provisioning_service.LookupApiHubInstanceRequest(), - parent="parent_value", - ) - - -def test_lookup_api_hub_instance_rest_error(): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.ProvisioningGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.ProvisioningGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ProvisioningClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.ProvisioningGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = ProvisioningClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = ProvisioningClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.ProvisioningGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ProvisioningClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.ProvisioningGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = ProvisioningClient(transport=transport) - assert client.transport is transport - - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.ProvisioningGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.ProvisioningGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.ProvisioningGrpcTransport, - transports.ProvisioningGrpcAsyncIOTransport, - transports.ProvisioningRestTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "rest", - ], -) -def test_transport_kind(transport_name): - transport = ProvisioningClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.ProvisioningGrpcTransport, - ) - - -def test_provisioning_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.ProvisioningTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json", - ) - - -def test_provisioning_base_transport(): - # Instantiate the base transport. - with mock.patch( - "google.cloud.apihub_v1.services.provisioning.transports.ProvisioningTransport.__init__" - ) as Transport: - Transport.return_value = None - transport = transports.ProvisioningTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - "create_api_hub_instance", - "get_api_hub_instance", - "lookup_api_hub_instance", - "get_location", - "list_locations", - "get_operation", - "cancel_operation", - "delete_operation", - "list_operations", - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Additionally, the LRO client (a property) should - # also raise NotImplementedError - with pytest.raises(NotImplementedError): - transport.operations_client - - # Catch all for all remaining methods and properties - remainder = [ - "kind", - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_provisioning_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.apihub_v1.services.provisioning.transports.ProvisioningTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.ProvisioningTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id="octopus", - ) - - -def test_provisioning_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( - "google.cloud.apihub_v1.services.provisioning.transports.ProvisioningTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.ProvisioningTransport() - adc.assert_called_once() - - -def test_provisioning_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - ProvisioningClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.ProvisioningGrpcTransport, - transports.ProvisioningGrpcAsyncIOTransport, - ], -) -def test_provisioning_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.ProvisioningGrpcTransport, - transports.ProvisioningGrpcAsyncIOTransport, - transports.ProvisioningRestTransport, - ], -) -def test_provisioning_transport_auth_gdch_credentials(transport_class): - host = "https://language.com/" - api_audience_tests = [None, "https://language2.com/"] - api_audience_expect = [host, "https://language2.com/"] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, "default", autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock( - return_value=gdch_mock - ) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with(e) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.ProvisioningGrpcTransport, grpc_helpers), - (transports.ProvisioningGrpcAsyncIOTransport, grpc_helpers_async), - ], -) -def test_provisioning_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - - create_channel.assert_called_with( - "apihub.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - scopes=["1", "2"], - default_host="apihub.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize( - "transport_class", - [transports.ProvisioningGrpcTransport, transports.ProvisioningGrpcAsyncIOTransport], -) -def test_provisioning_grpc_transport_client_cert_source_for_mtls(transport_class): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds, - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback, - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, private_key=expected_key - ) - - -def test_provisioning_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch( - "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" - ) as mock_configure_mtls_channel: - transports.ProvisioningRestTransport( - credentials=cred, client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -def test_provisioning_rest_lro_client(): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.AbstractOperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "grpc_asyncio", - "rest", - ], -) -def test_provisioning_host_no_port(transport_name): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="apihub.googleapis.com" - ), - transport=transport_name, - ) - assert client.transport._host == ( - "apihub.googleapis.com:443" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://apihub.googleapis.com/" - ) - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "grpc_asyncio", - "rest", - ], -) -def test_provisioning_host_with_port(transport_name): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="apihub.googleapis.com:8000" - ), - transport=transport_name, - ) - assert client.transport._host == ( - "apihub.googleapis.com:8000" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://apihub.googleapis.com:8000/" - ) - - -@pytest.mark.parametrize( - "transport_name", - [ - "rest", - ], -) -def test_provisioning_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = ProvisioningClient( - credentials=creds1, - transport=transport_name, - ) - client2 = ProvisioningClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.create_api_hub_instance._session - session2 = client2.transport.create_api_hub_instance._session - assert session1 != session2 - session1 = client1.transport.get_api_hub_instance._session - session2 = client2.transport.get_api_hub_instance._session - assert session1 != session2 - session1 = client1.transport.lookup_api_hub_instance._session - session2 = client2.transport.lookup_api_hub_instance._session - assert session1 != session2 - - -def test_provisioning_grpc_transport_channel(): - channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.ProvisioningGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_provisioning_grpc_asyncio_transport_channel(): - channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.ProvisioningGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize( - "transport_class", - [transports.ProvisioningGrpcTransport, transports.ProvisioningGrpcAsyncIOTransport], -) -def test_provisioning_transport_channel_mtls_with_client_cert_source(transport_class): - with mock.patch( - "grpc.ssl_channel_credentials", autospec=True - ) as grpc_ssl_channel_cred: - with mock.patch.object( - transport_class, "create_channel" - ) as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize( - "transport_class", - [transports.ProvisioningGrpcTransport, transports.ProvisioningGrpcAsyncIOTransport], -) -def test_provisioning_transport_channel_mtls_with_adc(transport_class): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object( - transport_class, "create_channel" - ) as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_provisioning_grpc_lro_client(): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_provisioning_grpc_lro_async_client(): - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_api_hub_instance_path(): - project = "squid" - location = "clam" - api_hub_instance = "whelk" - expected = "projects/{project}/locations/{location}/apiHubInstances/{api_hub_instance}".format( - project=project, - location=location, - api_hub_instance=api_hub_instance, - ) - actual = ProvisioningClient.api_hub_instance_path( - project, location, api_hub_instance - ) - assert expected == actual - - -def test_parse_api_hub_instance_path(): - expected = { - "project": "octopus", - "location": "oyster", - "api_hub_instance": "nudibranch", - } - path = ProvisioningClient.api_hub_instance_path(**expected) - - # Check that the path construction is reversible. - actual = ProvisioningClient.parse_api_hub_instance_path(path) - assert expected == actual - - -def test_common_billing_account_path(): - billing_account = "cuttlefish" - expected = "billingAccounts/{billing_account}".format( - billing_account=billing_account, - ) - actual = ProvisioningClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "mussel", - } - path = ProvisioningClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = ProvisioningClient.parse_common_billing_account_path(path) - assert expected == actual - - -def test_common_folder_path(): - folder = "winkle" - expected = "folders/{folder}".format( - folder=folder, - ) - actual = ProvisioningClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "nautilus", - } - path = ProvisioningClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = ProvisioningClient.parse_common_folder_path(path) - assert expected == actual - - -def test_common_organization_path(): - organization = "scallop" - expected = "organizations/{organization}".format( - organization=organization, - ) - actual = ProvisioningClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "abalone", - } - path = ProvisioningClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = ProvisioningClient.parse_common_organization_path(path) - assert expected == actual - - -def test_common_project_path(): - project = "squid" - expected = "projects/{project}".format( - project=project, - ) - actual = ProvisioningClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "clam", - } - path = ProvisioningClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = ProvisioningClient.parse_common_project_path(path) - assert expected == actual - - -def test_common_location_path(): - project = "whelk" - location = "octopus" - expected = "projects/{project}/locations/{location}".format( - project=project, - location=location, - ) - actual = ProvisioningClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "oyster", - "location": "nudibranch", - } - path = ProvisioningClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = ProvisioningClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object( - transports.ProvisioningTransport, "_prep_wrapped_messages" - ) as prep: - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object( - transports.ProvisioningTransport, "_prep_wrapped_messages" - ) as prep: - transport_class = ProvisioningClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -@pytest.mark.asyncio -async def test_transport_close_async(): - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object( - type(getattr(client.transport, "grpc_channel")), "close" - ) as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_get_location_rest_bad_request( - transport: str = "rest", request_type=locations_pb2.GetLocationRequest -): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_location(request) - - -@pytest.mark.parametrize( - "request_type", - [ - locations_pb2.GetLocationRequest, - dict, - ], -) -def test_get_location_rest(request_type): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.Location() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_location(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - - -def test_list_locations_rest_bad_request( - transport: str = "rest", request_type=locations_pb2.ListLocationsRequest -): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({"name": "projects/sample1"}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_locations(request) - - -@pytest.mark.parametrize( - "request_type", - [ - locations_pb2.ListLocationsRequest, - dict, - ], -) -def test_list_locations_rest(request_type): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.ListLocationsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.list_locations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) - - -def test_cancel_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.CancelOperationRequest -): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.cancel_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.CancelOperationRequest, - dict, - ], -) -def test_cancel_operation_rest(request_type): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "{}" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.cancel_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest -): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.DeleteOperationRequest, - dict, - ], -) -def test_delete_operation_rest(request_type): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "{}" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.delete_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_get_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.GetOperationRequest -): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.GetOperationRequest, - dict, - ], -) -def test_get_operation_rest(request_type): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_operation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - - -def test_list_operations_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.ListOperationsRequest -): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2"}, request ) + client = ProvisioningClient(transport=transport) + assert client.transport is transport - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_operations(request) + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ProvisioningRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() @pytest.mark.parametrize( - "request_type", + "transport_name", [ - operations_pb2.ListOperationsRequest, - dict, + "rest", ], ) -def test_list_operations_rest(request_type): - client = ProvisioningClient( +def test_transport_kind(transport_name): + transport = ProvisioningClient.get_transport_class(transport_name)( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - request_init = {"name": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.ListOperationsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + assert transport.kind == transport_name - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_operations(request) +def test_provisioning_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.ProvisioningTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) +def test_provisioning_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.apihub_v1.services.provisioning.transports.ProvisioningTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.ProvisioningTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) -def test_delete_operation(transport: str = "grpc"): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_api_hub_instance", + "get_api_hub_instance", + "lookup_api_hub_instance", + "get_location", + "list_locations", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - + with pytest.raises(NotImplementedError): + transport.close() -@pytest.mark.asyncio -async def test_delete_operation_async(transport: str = "grpc_asyncio"): - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the response is the type that we expect. - assert response is None +def test_provisioning_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.apihub_v1.services.provisioning.transports.ProvisioningTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ProvisioningTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) -def test_delete_operation_field_headers(): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - ) +def test_provisioning_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.apihub_v1.services.provisioning.transports.ProvisioningTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ProvisioningTransport() + adc.assert_called_once() - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = None +def test_provisioning_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ProvisioningClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) - client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] +def test_provisioning_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.ProvisioningRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) -@pytest.mark.asyncio -async def test_delete_operation_field_headers_async(): - client = ProvisioningAsyncClient( +def test_provisioning_rest_lro_client(): + client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + transport = client.transport - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client -def test_delete_operation_from_dict(): +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_provisioning_host_no_port(transport_name): client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="apihub.googleapis.com" + ), + transport=transport_name, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -@pytest.mark.asyncio -async def test_delete_operation_from_dict_async(): - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + assert client.transport._host == ( + "apihub.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://apihub.googleapis.com/" ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -def test_cancel_operation(transport: str = "grpc"): +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_provisioning_host_with_port(transport_name): client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + client_options=client_options.ClientOptions( + api_endpoint="apihub.googleapis.com:8000" + ), + transport=transport_name, ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc_asyncio"): - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + assert client.transport._host == ( + "apihub.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://apihub.googleapis.com:8000/" ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_provisioning_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = ProvisioningClient( + credentials=creds1, + transport=transport_name, + ) + client2 = ProvisioningClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_api_hub_instance._session + session2 = client2.transport.create_api_hub_instance._session + assert session1 != session2 + session1 = client1.transport.get_api_hub_instance._session + session2 = client2.transport.get_api_hub_instance._session + assert session1 != session2 + session1 = client1.transport.lookup_api_hub_instance._session + session2 = client2.transport.lookup_api_hub_instance._session + assert session1 != session2 -def test_cancel_operation_field_headers(): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_api_hub_instance_path(): + project = "squid" + location = "clam" + api_hub_instance = "whelk" + expected = "projects/{project}/locations/{location}/apiHubInstances/{api_hub_instance}".format( + project=project, + location=location, + api_hub_instance=api_hub_instance, ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None - - client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_cancel_operation_field_headers_async(): - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + actual = ProvisioningClient.api_hub_instance_path( + project, location, api_hub_instance ) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_api_hub_instance_path(): + expected = { + "project": "octopus", + "location": "oyster", + "api_hub_instance": "nudibranch", + } + path = ProvisioningClient.api_hub_instance_path(**expected) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Check that the path construction is reversible. + actual = ProvisioningClient.parse_api_hub_instance_path(path) + assert expected == actual -def test_cancel_operation_from_dict(): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_common_billing_account_path(): + billing_account = "cuttlefish" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() + actual = ProvisioningClient.common_billing_account_path(billing_account) + assert expected == actual -@pytest.mark.asyncio -async def test_cancel_operation_from_dict_async(): - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "mussel", + } + path = ProvisioningClient.common_billing_account_path(**expected) + # Check that the path construction is reversible. + actual = ProvisioningClient.parse_common_billing_account_path(path) + assert expected == actual -def test_get_operation(transport: str = "grpc"): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + +def test_common_folder_path(): + folder = "winkle" + expected = "folders/{folder}".format( + folder=folder, ) + actual = ProvisioningClient.common_folder_path(folder) + assert expected == actual - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_common_folder_path(): + expected = { + "folder": "nautilus", + } + path = ProvisioningClient.common_folder_path(**expected) - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) + # Check that the path construction is reversible. + actual = ProvisioningClient.parse_common_folder_path(path) + assert expected == actual -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc_asyncio"): - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +def test_common_organization_path(): + organization = "scallop" + expected = "organizations/{organization}".format( + organization=organization, ) + actual = ProvisioningClient.common_organization_path(organization) + assert expected == actual - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_common_organization_path(): + expected = { + "organization": "abalone", + } + path = ProvisioningClient.common_organization_path(**expected) - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) + # Check that the path construction is reversible. + actual = ProvisioningClient.parse_common_organization_path(path) + assert expected == actual -def test_get_operation_field_headers(): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_common_project_path(): + project = "squid" + expected = "projects/{project}".format( + project=project, ) + actual = ProvisioningClient.common_project_path(project) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_common_project_path(): + expected = { + "project": "clam", + } + path = ProvisioningClient.common_project_path(**expected) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Check that the path construction is reversible. + actual = ProvisioningClient.parse_common_project_path(path) + assert expected == actual -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_common_location_path(): + project = "whelk" + location = "octopus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, ) + actual = ProvisioningClient.common_location_path(project, location) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] +def test_parse_common_location_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = ProvisioningClient.common_location_path(**expected) + # Check that the path construction is reversible. + actual = ProvisioningClient.parse_common_location_path(path) + assert expected == actual -def test_get_operation_from_dict(): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - - response = client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() + with mock.patch.object( + transports.ProvisioningTransport, "_prep_wrapped_messages" + ) as prep: + client = ProvisioningClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) - response = await client.get_operation( - request={ - "name": "locations", - } + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.ProvisioningTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = ProvisioningClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) - call.assert_called() + prep.assert_called_once_with(client_info) -def test_list_operations(transport: str = "grpc"): +def test_get_location_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.GetLocationRequest +): client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - response = client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - - -@pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc_asyncio"): - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_location(request) -def test_list_operations_field_headers(): +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) +def test_get_location_rest(request_type): client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = operations_pb2.ListOperationsResponse() - - client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_list_operations_field_headers_async(): - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.get_location(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) -def test_list_operations_from_dict(): +def test_list_locations_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.ListLocationsRequest +): client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - - response = client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) -@pytest.mark.asyncio -async def test_list_operations_from_dict_async(): - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_locations(request) -def test_list_locations(transport: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) +def test_list_locations_rest(request_type): client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() response = client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, locations_pb2.ListLocationsResponse) -@pytest.mark.asyncio -async def test_list_locations_async(transport: str = "grpc_asyncio"): - client = ProvisioningAsyncClient( +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) -def test_list_locations_field_headers(): +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = locations_pb2.ListLocationsResponse() - - client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_list_locations_field_headers_async(): - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.cancel_operation(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Establish that the response is the type that we expect. + assert response is None -def test_list_locations_from_dict(): +def test_delete_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest +): client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() - - response = client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() - -@pytest.mark.asyncio -async def test_list_locations_from_dict_async(): - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_operation(request) -def test_get_location(transport: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - response = client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.delete_operation(request) # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) + assert response is None -@pytest.mark.asyncio -async def test_get_location_async(transport: str = "grpc_asyncio"): - client = ProvisioningAsyncClient( +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) -def test_get_location_field_headers(): - client = ProvisioningClient(credentials=ga_credentials.AnonymousCredentials()) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = ProvisioningClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = locations_pb2.Location() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) - client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations/abc", - ) in kw["metadata"] + response = client.get_operation(request) + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) -@pytest.mark.asyncio -async def test_get_location_field_headers_async(): - client = ProvisioningAsyncClient(credentials=ga_credentials.AnonymousCredentials()) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = ProvisioningClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations/abc", - ) in kw["metadata"] + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) -def test_get_location_from_dict(): +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - - response = client.get_location( - request={ - "name": "locations/abc", - } - ) - call.assert_called() + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) -@pytest.mark.asyncio -async def test_get_location_from_dict_async(): - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location( - request={ - "name": "locations", - } - ) - call.assert_called() + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) def test_transport_close(): transports = { "rest": "_session", - "grpc": "_grpc_channel", } for transport, close_name in transports.items(): @@ -5311,7 +2777,6 @@ def test_transport_close(): def test_client_ctx(): transports = [ "rest", - "grpc", ] for transport in transports: client = ProvisioningClient( @@ -5328,8 +2793,7 @@ def test_client_ctx(): @pytest.mark.parametrize( "client_class,transport_class", [ - (ProvisioningClient, transports.ProvisioningGrpcTransport), - (ProvisioningAsyncClient, transports.ProvisioningGrpcAsyncIOTransport), + (ProvisioningClient, transports.ProvisioningRestTransport), ], ) def test_api_key_credentials(client_class, transport_class): diff --git a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_runtime_project_attachment_service.py b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_runtime_project_attachment_service.py index f56c0e63d5f7..836aa804eb51 100644 --- a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_runtime_project_attachment_service.py +++ b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_runtime_project_attachment_service.py @@ -47,7 +47,6 @@ from requests.sessions import Session from google.cloud.apihub_v1.services.runtime_project_attachment_service import ( - RuntimeProjectAttachmentServiceAsyncClient, RuntimeProjectAttachmentServiceClient, pagers, transports, @@ -234,11 +233,6 @@ def test__get_client_cert_source(): "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(RuntimeProjectAttachmentServiceClient), ) -@mock.patch.object( - RuntimeProjectAttachmentServiceAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(RuntimeProjectAttachmentServiceAsyncClient), -) def test__get_api_endpoint(): api_override = "foo.com" mock_client_cert_source = mock.Mock() @@ -337,11 +331,6 @@ def test__get_universe_domain(): @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - ( - RuntimeProjectAttachmentServiceClient, - transports.RuntimeProjectAttachmentServiceGrpcTransport, - "grpc", - ), ( RuntimeProjectAttachmentServiceClient, transports.RuntimeProjectAttachmentServiceRestTransport, @@ -425,8 +414,6 @@ def test__validate_universe_domain(client_class, transport_class, transport_name @pytest.mark.parametrize( "client_class,transport_name", [ - (RuntimeProjectAttachmentServiceClient, "grpc"), - (RuntimeProjectAttachmentServiceAsyncClient, "grpc_asyncio"), (RuntimeProjectAttachmentServiceClient, "rest"), ], ) @@ -453,11 +440,6 @@ def test_runtime_project_attachment_service_client_from_service_account_info( @pytest.mark.parametrize( "transport_class,transport_name", [ - (transports.RuntimeProjectAttachmentServiceGrpcTransport, "grpc"), - ( - transports.RuntimeProjectAttachmentServiceGrpcAsyncIOTransport, - "grpc_asyncio", - ), (transports.RuntimeProjectAttachmentServiceRestTransport, "rest"), ], ) @@ -482,8 +464,6 @@ def test_runtime_project_attachment_service_client_service_account_always_use_jw @pytest.mark.parametrize( "client_class,transport_name", [ - (RuntimeProjectAttachmentServiceClient, "grpc"), - (RuntimeProjectAttachmentServiceAsyncClient, "grpc_asyncio"), (RuntimeProjectAttachmentServiceClient, "rest"), ], ) @@ -517,28 +497,17 @@ def test_runtime_project_attachment_service_client_from_service_account_file( def test_runtime_project_attachment_service_client_get_transport_class(): transport = RuntimeProjectAttachmentServiceClient.get_transport_class() available_transports = [ - transports.RuntimeProjectAttachmentServiceGrpcTransport, transports.RuntimeProjectAttachmentServiceRestTransport, ] assert transport in available_transports - transport = RuntimeProjectAttachmentServiceClient.get_transport_class("grpc") - assert transport == transports.RuntimeProjectAttachmentServiceGrpcTransport + transport = RuntimeProjectAttachmentServiceClient.get_transport_class("rest") + assert transport == transports.RuntimeProjectAttachmentServiceRestTransport @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - ( - RuntimeProjectAttachmentServiceClient, - transports.RuntimeProjectAttachmentServiceGrpcTransport, - "grpc", - ), - ( - RuntimeProjectAttachmentServiceAsyncClient, - transports.RuntimeProjectAttachmentServiceGrpcAsyncIOTransport, - "grpc_asyncio", - ), ( RuntimeProjectAttachmentServiceClient, transports.RuntimeProjectAttachmentServiceRestTransport, @@ -551,11 +520,6 @@ def test_runtime_project_attachment_service_client_get_transport_class(): "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(RuntimeProjectAttachmentServiceClient), ) -@mock.patch.object( - RuntimeProjectAttachmentServiceAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(RuntimeProjectAttachmentServiceAsyncClient), -) def test_runtime_project_attachment_service_client_client_options( client_class, transport_class, transport_name ): @@ -693,30 +657,6 @@ def test_runtime_project_attachment_service_client_client_options( @pytest.mark.parametrize( "client_class,transport_class,transport_name,use_client_cert_env", [ - ( - RuntimeProjectAttachmentServiceClient, - transports.RuntimeProjectAttachmentServiceGrpcTransport, - "grpc", - "true", - ), - ( - RuntimeProjectAttachmentServiceAsyncClient, - transports.RuntimeProjectAttachmentServiceGrpcAsyncIOTransport, - "grpc_asyncio", - "true", - ), - ( - RuntimeProjectAttachmentServiceClient, - transports.RuntimeProjectAttachmentServiceGrpcTransport, - "grpc", - "false", - ), - ( - RuntimeProjectAttachmentServiceAsyncClient, - transports.RuntimeProjectAttachmentServiceGrpcAsyncIOTransport, - "grpc_asyncio", - "false", - ), ( RuntimeProjectAttachmentServiceClient, transports.RuntimeProjectAttachmentServiceRestTransport, @@ -736,11 +676,6 @@ def test_runtime_project_attachment_service_client_client_options( "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(RuntimeProjectAttachmentServiceClient), ) -@mock.patch.object( - RuntimeProjectAttachmentServiceAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(RuntimeProjectAttachmentServiceAsyncClient), -) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) def test_runtime_project_attachment_service_client_mtls_env_auto( client_class, transport_class, transport_name, use_client_cert_env @@ -844,20 +779,12 @@ def test_runtime_project_attachment_service_client_mtls_env_auto( ) -@pytest.mark.parametrize( - "client_class", - [RuntimeProjectAttachmentServiceClient, RuntimeProjectAttachmentServiceAsyncClient], -) +@pytest.mark.parametrize("client_class", [RuntimeProjectAttachmentServiceClient]) @mock.patch.object( RuntimeProjectAttachmentServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RuntimeProjectAttachmentServiceClient), ) -@mock.patch.object( - RuntimeProjectAttachmentServiceAsyncClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(RuntimeProjectAttachmentServiceAsyncClient), -) def test_runtime_project_attachment_service_client_get_mtls_endpoint_and_cert_source( client_class, ): @@ -951,20 +878,12 @@ def test_runtime_project_attachment_service_client_get_mtls_endpoint_and_cert_so ) -@pytest.mark.parametrize( - "client_class", - [RuntimeProjectAttachmentServiceClient, RuntimeProjectAttachmentServiceAsyncClient], -) +@pytest.mark.parametrize("client_class", [RuntimeProjectAttachmentServiceClient]) @mock.patch.object( RuntimeProjectAttachmentServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(RuntimeProjectAttachmentServiceClient), ) -@mock.patch.object( - RuntimeProjectAttachmentServiceAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(RuntimeProjectAttachmentServiceAsyncClient), -) def test_runtime_project_attachment_service_client_client_api_endpoint(client_class): mock_client_cert_source = client_cert_source_callback api_override = "foo.com" @@ -1045,16 +964,6 @@ def test_runtime_project_attachment_service_client_client_api_endpoint(client_cl @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - ( - RuntimeProjectAttachmentServiceClient, - transports.RuntimeProjectAttachmentServiceGrpcTransport, - "grpc", - ), - ( - RuntimeProjectAttachmentServiceAsyncClient, - transports.RuntimeProjectAttachmentServiceGrpcAsyncIOTransport, - "grpc_asyncio", - ), ( RuntimeProjectAttachmentServiceClient, transports.RuntimeProjectAttachmentServiceRestTransport, @@ -1090,18 +999,6 @@ def test_runtime_project_attachment_service_client_client_options_scopes( @pytest.mark.parametrize( "client_class,transport_class,transport_name,grpc_helpers", [ - ( - RuntimeProjectAttachmentServiceClient, - transports.RuntimeProjectAttachmentServiceGrpcTransport, - "grpc", - grpc_helpers, - ), - ( - RuntimeProjectAttachmentServiceAsyncClient, - transports.RuntimeProjectAttachmentServiceGrpcAsyncIOTransport, - "grpc_asyncio", - grpc_helpers_async, - ), ( RuntimeProjectAttachmentServiceClient, transports.RuntimeProjectAttachmentServiceRestTransport, @@ -1134,96 +1031,6 @@ def test_runtime_project_attachment_service_client_client_options_credentials_fi ) -def test_runtime_project_attachment_service_client_client_options_from_dict(): - with mock.patch( - "google.cloud.apihub_v1.services.runtime_project_attachment_service.transports.RuntimeProjectAttachmentServiceGrpcTransport.__init__" - ) as grpc_transport: - grpc_transport.return_value = None - client = RuntimeProjectAttachmentServiceClient( - client_options={"api_endpoint": "squid.clam.whelk"} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize( - "client_class,transport_class,transport_name,grpc_helpers", - [ - ( - RuntimeProjectAttachmentServiceClient, - transports.RuntimeProjectAttachmentServiceGrpcTransport, - "grpc", - grpc_helpers, - ), - ( - RuntimeProjectAttachmentServiceAsyncClient, - transports.RuntimeProjectAttachmentServiceGrpcAsyncIOTransport, - "grpc_asyncio", - grpc_helpers_async, - ), - ], -) -def test_runtime_project_attachment_service_client_create_channel_credentials_file( - client_class, transport_class, transport_name, grpc_helpers -): - # Check the case credentials file is provided. - options = client_options.ClientOptions(credentials_file="credentials.json") - - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE - ), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "apihub.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - scopes=None, - default_host="apihub.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - @pytest.mark.parametrize( "request_type", [ @@ -1231,34 +1038,114 @@ def test_runtime_project_attachment_service_client_create_channel_credentials_fi dict, ], ) -def test_create_runtime_project_attachment(request_type, transport: str = "grpc"): +def test_create_runtime_project_attachment_rest(request_type): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["runtime_project_attachment"] = { + "name": "name_value", + "runtime_project": "runtime_project_value", + "create_time": {"seconds": 751, "nanos": 543}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_runtime_project_attachment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = runtime_project_attachment_service.RuntimeProjectAttachment( + # Determine if the message type is proto-plus or protobuf + test_field = runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest.meta.fields[ + "runtime_project_attachment" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "runtime_project_attachment" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["runtime_project_attachment"][field]) + ): + del request_init["runtime_project_attachment"][field][i][subfield] + else: + del request_init["runtime_project_attachment"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = runtime_project_attachment_service.RuntimeProjectAttachment( name="name_value", runtime_project="runtime_project_value", ) - response = client.create_runtime_project_attachment(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = ( - runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = runtime_project_attachment_service.RuntimeProjectAttachment.pb( + return_value ) - assert args[0] == request + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_runtime_project_attachment(request) # Establish that the response is the type that we expect. assert isinstance( @@ -1268,71 +1155,13 @@ def test_create_runtime_project_attachment(request_type, transport: str = "grpc" assert response.runtime_project == "runtime_project_value" -def test_create_runtime_project_attachment_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_runtime_project_attachment), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_runtime_project_attachment() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert ( - args[0] - == runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest() - ) - - -def test_create_runtime_project_attachment_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest( - parent="parent_value", - runtime_project_attachment_id="runtime_project_attachment_id_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_runtime_project_attachment), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_runtime_project_attachment(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[ - 0 - ] == runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest( - parent="parent_value", - runtime_project_attachment_id="runtime_project_attachment_id_value", - ) - - -def test_create_runtime_project_attachment_use_cached_wrapped_rpc(): +def test_create_runtime_project_attachment_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -1353,6 +1182,7 @@ def test_create_runtime_project_attachment_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.create_runtime_project_attachment ] = mock_rpc + request = {} client.create_runtime_project_attachment(request) @@ -1366,305 +1196,278 @@ def test_create_runtime_project_attachment_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_runtime_project_attachment_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_runtime_project_attachment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - runtime_project_attachment_service.RuntimeProjectAttachment( - name="name_value", - runtime_project="runtime_project_value", - ) - ) - response = await client.create_runtime_project_attachment() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert ( - args[0] - == runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest() - ) - - -@pytest.mark.asyncio -async def test_create_runtime_project_attachment_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_create_runtime_project_attachment_rest_required_fields( + request_type=runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + transport_class = transports.RuntimeProjectAttachmentServiceRestTransport - # Ensure method has been cached - assert ( - client._client._transport.create_runtime_project_attachment - in client._client._transport._wrapped_methods - ) + request_init = {} + request_init["parent"] = "" + request_init["runtime_project_attachment_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.create_runtime_project_attachment - ] = mock_rpc + # verify fields with default values are dropped + assert "runtimeProjectAttachmentId" not in jsonified_request - request = {} - await client.create_runtime_project_attachment(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_runtime_project_attachment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present + assert "runtimeProjectAttachmentId" in jsonified_request + assert ( + jsonified_request["runtimeProjectAttachmentId"] + == request_init["runtime_project_attachment_id"] + ) - await client.create_runtime_project_attachment(request) + jsonified_request["parent"] = "parent_value" + jsonified_request[ + "runtimeProjectAttachmentId" + ] = "runtime_project_attachment_id_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_runtime_project_attachment._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("runtime_project_attachment_id",)) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "runtimeProjectAttachmentId" in jsonified_request + assert ( + jsonified_request["runtimeProjectAttachmentId"] + == "runtime_project_attachment_id_value" + ) -@pytest.mark.asyncio -async def test_create_runtime_project_attachment_async( - transport: str = "grpc_asyncio", - request_type=runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest, -): - client = RuntimeProjectAttachmentServiceAsyncClient( + client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = runtime_project_attachment_service.RuntimeProjectAttachment() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_runtime_project_attachment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - runtime_project_attachment_service.RuntimeProjectAttachment( - name="name_value", - runtime_project="runtime_project_value", - ) - ) - response = await client.create_runtime_project_attachment(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = ( - runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest() - ) - assert args[0] == request + # Convert return value to protobuf type + return_value = ( + runtime_project_attachment_service.RuntimeProjectAttachment.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance( - response, runtime_project_attachment_service.RuntimeProjectAttachment - ) - assert response.name == "name_value" - assert response.runtime_project == "runtime_project_value" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_runtime_project_attachment(request) -@pytest.mark.asyncio -async def test_create_runtime_project_attachment_async_from_dict(): - await test_create_runtime_project_attachment_async(request_type=dict) + expected_params = [ + ( + "runtimeProjectAttachmentId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_create_runtime_project_attachment_field_headers(): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_create_runtime_project_attachment_rest_unset_required_fields(): + transport = transports.RuntimeProjectAttachmentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_runtime_project_attachment), "__call__" - ) as call: - call.return_value = ( - runtime_project_attachment_service.RuntimeProjectAttachment() + unset_fields = ( + transport.create_runtime_project_attachment._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set(("runtimeProjectAttachmentId",)) + & set( + ( + "parent", + "runtimeProjectAttachmentId", + "runtimeProjectAttachment", + ) ) - client.create_runtime_project_attachment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + ) -@pytest.mark.asyncio -async def test_create_runtime_project_attachment_field_headers_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_runtime_project_attachment_rest_interceptors(null_interceptor): + transport = transports.RuntimeProjectAttachmentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RuntimeProjectAttachmentServiceRestInterceptor(), ) + client = RuntimeProjectAttachmentServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RuntimeProjectAttachmentServiceRestInterceptor, + "post_create_runtime_project_attachment", + ) as post, mock.patch.object( + transports.RuntimeProjectAttachmentServiceRestInterceptor, + "pre_create_runtime_project_attachment", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest.pb( + runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest() - - request.parent = "parent_value" + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + runtime_project_attachment_service.RuntimeProjectAttachment.to_json( + runtime_project_attachment_service.RuntimeProjectAttachment() + ) + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_runtime_project_attachment), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + request = ( + runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest() + ) + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = ( runtime_project_attachment_service.RuntimeProjectAttachment() ) - await client.create_runtime_project_attachment(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request + client.create_runtime_project_attachment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + pre.assert_called_once() + post.assert_called_once() -def test_create_runtime_project_attachment_flattened(): +def test_create_runtime_project_attachment_rest_bad_request( + transport: str = "rest", + request_type=runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest, +): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_runtime_project_attachment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = ( - runtime_project_attachment_service.RuntimeProjectAttachment() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_runtime_project_attachment( - parent="parent_value", - runtime_project_attachment=runtime_project_attachment_service.RuntimeProjectAttachment( - name="name_value" - ), - runtime_project_attachment_id="runtime_project_attachment_id_value", - ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].runtime_project_attachment - mock_val = runtime_project_attachment_service.RuntimeProjectAttachment( - name="name_value" - ) - assert arg == mock_val - arg = args[0].runtime_project_attachment_id - mock_val = "runtime_project_attachment_id_value" - assert arg == mock_val - - -def test_create_runtime_project_attachment_flattened_error(): + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_runtime_project_attachment(request) + + +def test_create_runtime_project_attachment_rest_flattened(): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_runtime_project_attachment( - runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest(), + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = runtime_project_attachment_service.RuntimeProjectAttachment() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( parent="parent_value", runtime_project_attachment=runtime_project_attachment_service.RuntimeProjectAttachment( name="name_value" ), runtime_project_attachment_id="runtime_project_attachment_id_value", ) + mock_args.update(sample_request) - -@pytest.mark.asyncio -async def test_create_runtime_project_attachment_flattened_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_runtime_project_attachment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = ( - runtime_project_attachment_service.RuntimeProjectAttachment() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = runtime_project_attachment_service.RuntimeProjectAttachment.pb( + return_value ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - runtime_project_attachment_service.RuntimeProjectAttachment() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_runtime_project_attachment( - parent="parent_value", - runtime_project_attachment=runtime_project_attachment_service.RuntimeProjectAttachment( - name="name_value" - ), - runtime_project_attachment_id="runtime_project_attachment_id_value", - ) + client.create_runtime_project_attachment(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].runtime_project_attachment - mock_val = runtime_project_attachment_service.RuntimeProjectAttachment( - name="name_value" - ) - assert arg == mock_val - arg = args[0].runtime_project_attachment_id - mock_val = "runtime_project_attachment_id_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_create_runtime_project_attachment_flattened_error_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/runtimeProjectAttachments" + % client.transport._host, + args[1], + ) + + +def test_create_runtime_project_attachment_rest_flattened_error( + transport: str = "rest", +): + client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_runtime_project_attachment( + client.create_runtime_project_attachment( runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest(), parent="parent_value", runtime_project_attachment=runtime_project_attachment_service.RuntimeProjectAttachment( @@ -1674,6 +1477,12 @@ async def test_create_runtime_project_attachment_flattened_error_async(): ) +def test_create_runtime_project_attachment_rest_error(): + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -1681,34 +1490,38 @@ async def test_create_runtime_project_attachment_flattened_error_async(): dict, ], ) -def test_get_runtime_project_attachment(request_type, transport: str = "grpc"): +def test_get_runtime_project_attachment_rest(request_type): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/runtimeProjectAttachments/sample3" + } + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_runtime_project_attachment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = runtime_project_attachment_service.RuntimeProjectAttachment( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = runtime_project_attachment_service.RuntimeProjectAttachment( name="name_value", runtime_project="runtime_project_value", ) - response = client.get_runtime_project_attachment(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = ( - runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = runtime_project_attachment_service.RuntimeProjectAttachment.pb( + return_value ) - assert args[0] == request + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_runtime_project_attachment(request) # Establish that the response is the type that we expect. assert isinstance( @@ -1718,69 +1531,13 @@ def test_get_runtime_project_attachment(request_type, transport: str = "grpc"): assert response.runtime_project == "runtime_project_value" -def test_get_runtime_project_attachment_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_runtime_project_attachment), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_runtime_project_attachment() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert ( - args[0] - == runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest() - ) - - -def test_get_runtime_project_attachment_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_runtime_project_attachment), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_runtime_project_attachment(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[ - 0 - ] == runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest( - name="name_value", - ) - - -def test_get_runtime_project_attachment_use_cached_wrapped_rpc(): +def test_get_runtime_project_attachment_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -1801,6 +1558,7 @@ def test_get_runtime_project_attachment_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.get_runtime_project_attachment ] = mock_rpc + request = {} client.get_runtime_project_attachment(request) @@ -1814,282 +1572,256 @@ def test_get_runtime_project_attachment_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_runtime_project_attachment_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_runtime_project_attachment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - runtime_project_attachment_service.RuntimeProjectAttachment( - name="name_value", - runtime_project="runtime_project_value", - ) - ) - response = await client.get_runtime_project_attachment() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert ( - args[0] - == runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest() - ) - - -@pytest.mark.asyncio -async def test_get_runtime_project_attachment_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_get_runtime_project_attachment_rest_required_fields( + request_type=runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + transport_class = transports.RuntimeProjectAttachmentServiceRestTransport - # Ensure method has been cached - assert ( - client._client._transport.get_runtime_project_attachment - in client._client._transport._wrapped_methods - ) + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.get_runtime_project_attachment - ] = mock_rpc + # verify fields with default values are dropped - request = {} - await client.get_runtime_project_attachment(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_runtime_project_attachment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.get_runtime_project_attachment(request) + jsonified_request["name"] = "name_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_runtime_project_attachment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -@pytest.mark.asyncio -async def test_get_runtime_project_attachment_async( - transport: str = "grpc_asyncio", - request_type=runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest, -): - client = RuntimeProjectAttachmentServiceAsyncClient( + client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = runtime_project_attachment_service.RuntimeProjectAttachment() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_runtime_project_attachment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - runtime_project_attachment_service.RuntimeProjectAttachment( - name="name_value", - runtime_project="runtime_project_value", - ) - ) - response = await client.get_runtime_project_attachment(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = ( - runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest() - ) - assert args[0] == request + # Convert return value to protobuf type + return_value = ( + runtime_project_attachment_service.RuntimeProjectAttachment.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance( - response, runtime_project_attachment_service.RuntimeProjectAttachment - ) - assert response.name == "name_value" - assert response.runtime_project == "runtime_project_value" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_runtime_project_attachment(request) -@pytest.mark.asyncio -async def test_get_runtime_project_attachment_async_from_dict(): - await test_get_runtime_project_attachment_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_get_runtime_project_attachment_field_headers(): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_get_runtime_project_attachment_rest_unset_required_fields(): + transport = transports.RuntimeProjectAttachmentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_runtime_project_attachment), "__call__" - ) as call: - call.return_value = ( - runtime_project_attachment_service.RuntimeProjectAttachment() - ) - client.get_runtime_project_attachment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + unset_fields = transport.get_runtime_project_attachment._get_unset_required_fields( + {} + ) + assert set(unset_fields) == (set(()) & set(("name",))) -@pytest.mark.asyncio -async def test_get_runtime_project_attachment_field_headers_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_runtime_project_attachment_rest_interceptors(null_interceptor): + transport = transports.RuntimeProjectAttachmentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RuntimeProjectAttachmentServiceRestInterceptor(), ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. + client = RuntimeProjectAttachmentServiceClient(transport=transport) with mock.patch.object( - type(client.transport.get_runtime_project_attachment), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - runtime_project_attachment_service.RuntimeProjectAttachment() + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RuntimeProjectAttachmentServiceRestInterceptor, + "post_get_runtime_project_attachment", + ) as post, mock.patch.object( + transports.RuntimeProjectAttachmentServiceRestInterceptor, + "pre_get_runtime_project_attachment", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = ( + runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest.pb( + runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest() + ) ) - await client.get_runtime_project_attachment(request) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -def test_get_runtime_project_attachment_flattened(): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + runtime_project_attachment_service.RuntimeProjectAttachment.to_json( + runtime_project_attachment_service.RuntimeProjectAttachment() + ) + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_runtime_project_attachment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = ( + request = ( + runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest() + ) + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = ( runtime_project_attachment_service.RuntimeProjectAttachment() ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. + client.get_runtime_project_attachment( - name="name_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + pre.assert_called_once() + post.assert_called_once() -def test_get_runtime_project_attachment_flattened_error(): +def test_get_runtime_project_attachment_rest_bad_request( + transport: str = "rest", + request_type=runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest, +): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_runtime_project_attachment( - runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest(), - name="name_value", - ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/runtimeProjectAttachments/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_runtime_project_attachment(request) -@pytest.mark.asyncio -async def test_get_runtime_project_attachment_flattened_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( +def test_get_runtime_project_attachment_rest_flattened(): + client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_runtime_project_attachment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = ( - runtime_project_attachment_service.RuntimeProjectAttachment() - ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = runtime_project_attachment_service.RuntimeProjectAttachment() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - runtime_project_attachment_service.RuntimeProjectAttachment() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_runtime_project_attachment( + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/runtimeProjectAttachments/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = runtime_project_attachment_service.RuntimeProjectAttachment.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_runtime_project_attachment(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/runtimeProjectAttachments/*}" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_get_runtime_project_attachment_flattened_error_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( +def test_get_runtime_project_attachment_rest_flattened_error(transport: str = "rest"): + client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_runtime_project_attachment( + client.get_runtime_project_attachment( runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest(), name="name_value", ) +def test_get_runtime_project_attachment_rest_error(): + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -2097,110 +1829,52 @@ async def test_get_runtime_project_attachment_flattened_error_async(): dict, ], ) -def test_list_runtime_project_attachments(request_type, transport: str = "grpc"): +def test_list_runtime_project_attachments_rest(request_type): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_runtime_project_attachments), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = ( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = ( runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse( next_page_token="next_page_token_value", ) ) - response = client.list_runtime_project_attachments(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = ( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = ( + runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse.pb( + return_value + ) ) - assert args[0] == request + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_runtime_project_attachments(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListRuntimeProjectAttachmentsPager) assert response.next_page_token == "next_page_token_value" -def test_list_runtime_project_attachments_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_runtime_project_attachments), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_runtime_project_attachments() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert ( - args[0] - == runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest() - ) - - -def test_list_runtime_project_attachments_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest( - parent="parent_value", - page_token="page_token_value", - filter="filter_value", - order_by="order_by_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_runtime_project_attachments), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_runtime_project_attachments(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[ - 0 - ] == runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest( - parent="parent_value", - page_token="page_token_value", - filter="filter_value", - order_by="order_by_value", - ) - - -def test_list_runtime_project_attachments_use_cached_wrapped_rpc(): +def test_list_runtime_project_attachments_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -2221,6 +1895,7 @@ def test_list_runtime_project_attachments_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.list_runtime_project_attachments ] = mock_rpc + request = {} client.list_runtime_project_attachments(request) @@ -2234,289 +1909,277 @@ def test_list_runtime_project_attachments_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_runtime_project_attachments_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_runtime_project_attachments), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_runtime_project_attachments() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert ( - args[0] - == runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest() - ) - - -@pytest.mark.asyncio -async def test_list_runtime_project_attachments_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_list_runtime_project_attachments_rest_required_fields( + request_type=runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + transport_class = transports.RuntimeProjectAttachmentServiceRestTransport - # Ensure method has been cached - assert ( - client._client._transport.list_runtime_project_attachments - in client._client._transport._wrapped_methods - ) + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.list_runtime_project_attachments - ] = mock_rpc + # verify fields with default values are dropped - request = {} - await client.list_runtime_project_attachments(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_runtime_project_attachments._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.list_runtime_project_attachments(request) + jsonified_request["parent"] = "parent_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_runtime_project_attachments._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" -@pytest.mark.asyncio -async def test_list_runtime_project_attachments_async( - transport: str = "grpc_asyncio", - request_type=runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest, -): - client = RuntimeProjectAttachmentServiceAsyncClient( + client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_runtime_project_attachments), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_runtime_project_attachments(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = ( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest() - ) - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListRuntimeProjectAttachmentsAsyncPager) - assert response.next_page_token == "next_page_token_value" - - -@pytest.mark.asyncio -async def test_list_runtime_project_attachments_async_from_dict(): - await test_list_runtime_project_attachments_async(request_type=dict) - - -def test_list_runtime_project_attachments_field_headers(): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + # Designate an appropriate value for the returned response. + return_value = ( + runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse() ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest() + response_value = Response() + response_value.status_code = 200 - request.parent = "parent_value" + # Convert return value to protobuf type + return_value = runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_runtime_project_attachments), "__call__" - ) as call: - call.return_value = ( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse() - ) - client.list_runtime_project_attachments(request) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.list_runtime_project_attachments(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -@pytest.mark.asyncio -async def test_list_runtime_project_attachments_field_headers_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_list_runtime_project_attachments_rest_unset_required_fields(): + transport = transports.RuntimeProjectAttachmentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_runtime_project_attachments), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse() + unset_fields = ( + transport.list_runtime_project_attachments._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) ) - await client.list_runtime_project_attachments(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + & set(("parent",)) + ) -def test_list_runtime_project_attachments_flattened(): - client = RuntimeProjectAttachmentServiceClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_runtime_project_attachments_rest_interceptors(null_interceptor): + transport = transports.RuntimeProjectAttachmentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RuntimeProjectAttachmentServiceRestInterceptor(), ) - - # Mock the actual call within the gRPC stub, and fake the request. + client = RuntimeProjectAttachmentServiceClient(transport=transport) with mock.patch.object( - type(client.transport.list_runtime_project_attachments), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = ( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RuntimeProjectAttachmentServiceRestInterceptor, + "post_list_runtime_project_attachments", + ) as post, mock.patch.object( + transports.RuntimeProjectAttachmentServiceRestInterceptor, + "pre_list_runtime_project_attachments", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest.pb( + runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse.to_json( + runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse() + ) + + request = ( + runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest() + ) + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = ( runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse() ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. + client.list_runtime_project_attachments( - parent="parent_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val + pre.assert_called_once() + post.assert_called_once() -def test_list_runtime_project_attachments_flattened_error(): +def test_list_runtime_project_attachments_rest_bad_request( + transport: str = "rest", + request_type=runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest, +): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_runtime_project_attachments( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest(), - parent="parent_value", - ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_runtime_project_attachments(request) -@pytest.mark.asyncio -async def test_list_runtime_project_attachments_flattened_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( +def test_list_runtime_project_attachments_rest_flattened(): + client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_runtime_project_attachments), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = ( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = ( runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse() ) - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_runtime_project_attachments( + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( parent="parent_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = ( + runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_runtime_project_attachments(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/runtimeProjectAttachments" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_list_runtime_project_attachments_flattened_error_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( +def test_list_runtime_project_attachments_rest_flattened_error(transport: str = "rest"): + client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_runtime_project_attachments( + client.list_runtime_project_attachments( runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest(), parent="parent_value", ) -def test_list_runtime_project_attachments_pager(transport_name: str = "grpc"): +def test_list_runtime_project_attachments_rest_pager(transport: str = "rest"): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_runtime_project_attachments), "__call__" - ) as call: - # Set the response to a series of pages. - call.side_effect = ( + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse( runtime_project_attachments=[ runtime_project_attachment_service.RuntimeProjectAttachment(), @@ -2541,22 +2204,26 @@ def test_list_runtime_project_attachments_pager(transport_name: str = "grpc"): runtime_project_attachment_service.RuntimeProjectAttachment(), ], ), - RuntimeError, ) + # Two responses for two calls + response = response + response - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) - pager = client.list_runtime_project_attachments( - request={}, retry=retry, timeout=timeout + # Wrap the values into proper Response objs + response = tuple( + runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse.to_json( + x + ) + for x in response ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout + pager = client.list_runtime_project_attachments(request=sample_request) results = list(pager) assert len(results) == 6 @@ -2565,152 +2232,9 @@ def test_list_runtime_project_attachments_pager(transport_name: str = "grpc"): for i in results ) - -def test_list_runtime_project_attachments_pages(transport_name: str = "grpc"): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_runtime_project_attachments), "__call__" - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse( - runtime_project_attachments=[ - runtime_project_attachment_service.RuntimeProjectAttachment(), - runtime_project_attachment_service.RuntimeProjectAttachment(), - runtime_project_attachment_service.RuntimeProjectAttachment(), - ], - next_page_token="abc", - ), - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse( - runtime_project_attachments=[], - next_page_token="def", - ), - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse( - runtime_project_attachments=[ - runtime_project_attachment_service.RuntimeProjectAttachment(), - ], - next_page_token="ghi", - ), - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse( - runtime_project_attachments=[ - runtime_project_attachment_service.RuntimeProjectAttachment(), - runtime_project_attachment_service.RuntimeProjectAttachment(), - ], - ), - RuntimeError, + pages = list( + client.list_runtime_project_attachments(request=sample_request).pages ) - pages = list(client.list_runtime_project_attachments(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.asyncio -async def test_list_runtime_project_attachments_async_pager(): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_runtime_project_attachments), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse( - runtime_project_attachments=[ - runtime_project_attachment_service.RuntimeProjectAttachment(), - runtime_project_attachment_service.RuntimeProjectAttachment(), - runtime_project_attachment_service.RuntimeProjectAttachment(), - ], - next_page_token="abc", - ), - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse( - runtime_project_attachments=[], - next_page_token="def", - ), - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse( - runtime_project_attachments=[ - runtime_project_attachment_service.RuntimeProjectAttachment(), - ], - next_page_token="ghi", - ), - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse( - runtime_project_attachments=[ - runtime_project_attachment_service.RuntimeProjectAttachment(), - runtime_project_attachment_service.RuntimeProjectAttachment(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_runtime_project_attachments( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all( - isinstance(i, runtime_project_attachment_service.RuntimeProjectAttachment) - for i in responses - ) - - -@pytest.mark.asyncio -async def test_list_runtime_project_attachments_async_pages(): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_runtime_project_attachments), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse( - runtime_project_attachments=[ - runtime_project_attachment_service.RuntimeProjectAttachment(), - runtime_project_attachment_service.RuntimeProjectAttachment(), - runtime_project_attachment_service.RuntimeProjectAttachment(), - ], - next_page_token="abc", - ), - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse( - runtime_project_attachments=[], - next_page_token="def", - ), - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse( - runtime_project_attachments=[ - runtime_project_attachment_service.RuntimeProjectAttachment(), - ], - next_page_token="ghi", - ), - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse( - runtime_project_attachments=[ - runtime_project_attachment_service.RuntimeProjectAttachment(), - runtime_project_attachment_service.RuntimeProjectAttachment(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_runtime_project_attachments(request={}) - ).pages: - pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -2722,99 +2246,43 @@ async def test_list_runtime_project_attachments_async_pages(): dict, ], ) -def test_delete_runtime_project_attachment(request_type, transport: str = "grpc"): +def test_delete_runtime_project_attachment_rest(request_type): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/runtimeProjectAttachments/sample3" + } + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_runtime_project_attachment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_runtime_project_attachment(request) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = ( - runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest() - ) - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_runtime_project_attachment(request) # Establish that the response is the type that we expect. assert response is None -def test_delete_runtime_project_attachment_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_runtime_project_attachment), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_runtime_project_attachment() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert ( - args[0] - == runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest() - ) - - -def test_delete_runtime_project_attachment_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_runtime_project_attachment), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_runtime_project_attachment(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[ - 0 - ] == runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest( - name="name_value", - ) - - -def test_delete_runtime_project_attachment_use_cached_wrapped_rpc(): +def test_delete_runtime_project_attachment_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -2835,6 +2303,7 @@ def test_delete_runtime_project_attachment_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.delete_runtime_project_attachment ] = mock_rpc + request = {} client.delete_runtime_project_attachment(request) @@ -2848,258 +2317,232 @@ def test_delete_runtime_project_attachment_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_delete_runtime_project_attachment_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_runtime_project_attachment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_runtime_project_attachment() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert ( - args[0] - == runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest() - ) - - -@pytest.mark.asyncio -async def test_delete_runtime_project_attachment_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.delete_runtime_project_attachment - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.delete_runtime_project_attachment - ] = mock_rpc - - request = {} - await client.delete_runtime_project_attachment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_runtime_project_attachment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_delete_runtime_project_attachment_async( - transport: str = "grpc_asyncio", +def test_delete_runtime_project_attachment_rest_required_fields( request_type=runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest, ): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + transport_class = transports.RuntimeProjectAttachmentServiceRestTransport - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_runtime_project_attachment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_runtime_project_attachment(request) + # verify fields with default values are dropped - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = ( - runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest() - ) - assert args[0] == request + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_runtime_project_attachment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the response is the type that we expect. - assert response is None + # verify required fields with default values are now present + jsonified_request["name"] = "name_value" -@pytest.mark.asyncio -async def test_delete_runtime_project_attachment_async_from_dict(): - await test_delete_runtime_project_attachment_async(request_type=dict) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_runtime_project_attachment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -def test_delete_runtime_project_attachment_field_headers(): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request = request_type(**request_init) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest() + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result - request.name = "name_value" + response_value = Response() + response_value.status_code = 200 + json_return_value = "" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_runtime_project_attachment), "__call__" - ) as call: - call.return_value = None - client.delete_runtime_project_attachment(request) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.delete_runtime_project_attachment(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -@pytest.mark.asyncio -async def test_delete_runtime_project_attachment_field_headers_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_delete_runtime_project_attachment_rest_unset_required_fields(): + transport = transports.RuntimeProjectAttachmentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_runtime_project_attachment), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_runtime_project_attachment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + unset_fields = ( + transport.delete_runtime_project_attachment._get_unset_required_fields({}) + ) + assert set(unset_fields) == (set(()) & set(("name",))) -def test_delete_runtime_project_attachment_flattened(): - client = RuntimeProjectAttachmentServiceClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_runtime_project_attachment_rest_interceptors(null_interceptor): + transport = transports.RuntimeProjectAttachmentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RuntimeProjectAttachmentServiceRestInterceptor(), ) - - # Mock the actual call within the gRPC stub, and fake the request. + client = RuntimeProjectAttachmentServiceClient(transport=transport) with mock.patch.object( - type(client.transport.delete_runtime_project_attachment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_runtime_project_attachment( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -def test_delete_runtime_project_attachment_flattened_error(): + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RuntimeProjectAttachmentServiceRestInterceptor, + "pre_delete_runtime_project_attachment", + ) as pre: + pre.assert_not_called() + pb_message = runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest.pb( + runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = ( + runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest() + ) + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_runtime_project_attachment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_runtime_project_attachment_rest_bad_request( + transport: str = "rest", + request_type=runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest, +): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_runtime_project_attachment( - runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest(), - name="name_value", - ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/runtimeProjectAttachments/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_runtime_project_attachment(request) -@pytest.mark.asyncio -async def test_delete_runtime_project_attachment_flattened_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( +def test_delete_runtime_project_attachment_rest_flattened(): + client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_runtime_project_attachment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_runtime_project_attachment( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/runtimeProjectAttachments/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_runtime_project_attachment(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/runtimeProjectAttachments/*}" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_delete_runtime_project_attachment_flattened_error_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( +def test_delete_runtime_project_attachment_rest_flattened_error( + transport: str = "rest", +): + client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_runtime_project_attachment( + client.delete_runtime_project_attachment( runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest(), name="name_value", ) +def test_delete_runtime_project_attachment_rest_error(): + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -3107,33 +2550,35 @@ async def test_delete_runtime_project_attachment_flattened_error_async(): dict, ], ) -def test_lookup_runtime_project_attachment(request_type, transport: str = "grpc"): +def test_lookup_runtime_project_attachment_rest(request_type): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup_runtime_project_attachment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = ( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = ( runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse() ) - response = client.lookup_runtime_project_attachment(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = ( - runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse.pb( + return_value ) - assert args[0] == request + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.lookup_runtime_project_attachment(request) # Establish that the response is the type that we expect. assert isinstance( @@ -3142,69 +2587,13 @@ def test_lookup_runtime_project_attachment(request_type, transport: str = "grpc" ) -def test_lookup_runtime_project_attachment_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup_runtime_project_attachment), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.lookup_runtime_project_attachment() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert ( - args[0] - == runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest() - ) - - -def test_lookup_runtime_project_attachment_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup_runtime_project_attachment), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.lookup_runtime_project_attachment(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[ - 0 - ] == runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest( - name="name_value", - ) - - -def test_lookup_runtime_project_attachment_use_cached_wrapped_rpc(): +def test_lookup_runtime_project_attachment_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -3225,6 +2614,7 @@ def test_lookup_runtime_project_attachment_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.lookup_runtime_project_attachment ] = mock_rpc + request = {} client.lookup_runtime_project_attachment(request) @@ -3238,216 +2628,235 @@ def test_lookup_runtime_project_attachment_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_lookup_runtime_project_attachment_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup_runtime_project_attachment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse() - ) - response = await client.lookup_runtime_project_attachment() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert ( - args[0] - == runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest() - ) - - -@pytest.mark.asyncio -async def test_lookup_runtime_project_attachment_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.lookup_runtime_project_attachment - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.lookup_runtime_project_attachment - ] = mock_rpc - - request = {} - await client.lookup_runtime_project_attachment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.lookup_runtime_project_attachment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_lookup_runtime_project_attachment_async( - transport: str = "grpc_asyncio", +def test_lookup_runtime_project_attachment_rest_required_fields( request_type=runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest, ): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + transport_class = transports.RuntimeProjectAttachmentServiceRestTransport - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup_runtime_project_attachment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse() - ) - response = await client.lookup_runtime_project_attachment(request) + # verify fields with default values are dropped - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = ( - runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest() - ) - assert args[0] == request + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).lookup_runtime_project_attachment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the response is the type that we expect. - assert isinstance( - response, - runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse, - ) + # verify required fields with default values are now present + jsonified_request["name"] = "name_value" -@pytest.mark.asyncio -async def test_lookup_runtime_project_attachment_async_from_dict(): - await test_lookup_runtime_project_attachment_async(request_type=dict) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).lookup_runtime_project_attachment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -def test_lookup_runtime_project_attachment_field_headers(): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request = request_type(**request_init) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup_runtime_project_attachment), "__call__" - ) as call: - call.return_value = ( - runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse() - ) - client.lookup_runtime_project_attachment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + # Designate an appropriate value for the returned response. + return_value = ( + runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse() + ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value -@pytest.mark.asyncio -async def test_lookup_runtime_project_attachment_field_headers_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + response = client.lookup_runtime_project_attachment(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_lookup_runtime_project_attachment_rest_unset_required_fields(): + transport = transports.RuntimeProjectAttachmentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest() + unset_fields = ( + transport.lookup_runtime_project_attachment._get_unset_required_fields({}) + ) + assert set(unset_fields) == (set(()) & set(("name",))) - request.name = "name_value" - # Mock the actual call within the gRPC stub, and fake the request. +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_lookup_runtime_project_attachment_rest_interceptors(null_interceptor): + transport = transports.RuntimeProjectAttachmentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RuntimeProjectAttachmentServiceRestInterceptor(), + ) + client = RuntimeProjectAttachmentServiceClient(transport=transport) with mock.patch.object( - type(client.transport.lookup_runtime_project_attachment), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RuntimeProjectAttachmentServiceRestInterceptor, + "post_lookup_runtime_project_attachment", + ) as post, mock.patch.object( + transports.RuntimeProjectAttachmentServiceRestInterceptor, + "pre_lookup_runtime_project_attachment", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest.pb( + runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse.to_json( runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse() ) - await client.lookup_runtime_project_attachment(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request + request = ( + runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest() + ) + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = ( + runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse() + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + client.lookup_runtime_project_attachment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() -def test_lookup_runtime_project_attachment_flattened(): +def test_lookup_runtime_project_attachment_rest_bad_request( + transport: str = "rest", + request_type=runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest, +): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup_runtime_project_attachment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = ( + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.lookup_runtime_project_attachment(request) + + +def test_lookup_runtime_project_attachment_rest_flattened(): + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = ( runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse() ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.lookup_runtime_project_attachment( + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.lookup_runtime_project_attachment(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*}:lookupRuntimeProjectAttachment" + % client.transport._host, + args[1], + ) -def test_lookup_runtime_project_attachment_flattened_error(): +def test_lookup_runtime_project_attachment_rest_flattened_error( + transport: str = "rest", +): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -3459,3853 +2868,802 @@ def test_lookup_runtime_project_attachment_flattened_error(): ) -@pytest.mark.asyncio -async def test_lookup_runtime_project_attachment_flattened_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_lookup_runtime_project_attachment_rest_error(): + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup_runtime_project_attachment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = ( - runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse() - ) - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse() +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.RuntimeProjectAttachmentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.lookup_runtime_project_attachment( - name="name_value", + + # It is an error to provide a credentials file and a transport instance. + transport = transports.RuntimeProjectAttachmentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RuntimeProjectAttachmentServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + # It is an error to provide an api_key and a transport instance. + transport = transports.RuntimeProjectAttachmentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RuntimeProjectAttachmentServiceClient( + client_options=options, + transport=transport, + ) + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RuntimeProjectAttachmentServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) -@pytest.mark.asyncio -async def test_lookup_runtime_project_attachment_flattened_error_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( + # It is an error to provide scopes and a transport instance. + transport = transports.RuntimeProjectAttachmentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. with pytest.raises(ValueError): - await client.lookup_runtime_project_attachment( - runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest(), - name="name_value", + client = RuntimeProjectAttachmentServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, ) +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.RuntimeProjectAttachmentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = RuntimeProjectAttachmentServiceClient(transport=transport) + assert client.transport is transport + + @pytest.mark.parametrize( - "request_type", + "transport_class", [ - runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest, - dict, + transports.RuntimeProjectAttachmentServiceRestTransport, ], ) -def test_create_runtime_project_attachment_rest(request_type): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["runtime_project_attachment"] = { - "name": "name_value", - "runtime_project": "runtime_project_value", - "create_time": {"seconds": 751, "nanos": 543}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest.meta.fields[ - "runtime_project_attachment" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init[ - "runtime_project_attachment" - ].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range( - 0, len(request_init["runtime_project_attachment"][field]) - ): - del request_init["runtime_project_attachment"][field][i][subfield] - else: - del request_init["runtime_project_attachment"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = runtime_project_attachment_service.RuntimeProjectAttachment( - name="name_value", - runtime_project="runtime_project_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = runtime_project_attachment_service.RuntimeProjectAttachment.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.create_runtime_project_attachment(request) - - # Establish that the response is the type that we expect. - assert isinstance( - response, runtime_project_attachment_service.RuntimeProjectAttachment - ) - assert response.name == "name_value" - assert response.runtime_project == "runtime_project_value" - - -def test_create_runtime_project_attachment_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.create_runtime_project_attachment - in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.create_runtime_project_attachment - ] = mock_rpc - - request = {} - client.create_runtime_project_attachment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_runtime_project_attachment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_runtime_project_attachment_rest_required_fields( - request_type=runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest, -): - transport_class = transports.RuntimeProjectAttachmentServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["runtime_project_attachment_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - assert "runtimeProjectAttachmentId" not in jsonified_request - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_runtime_project_attachment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - assert "runtimeProjectAttachmentId" in jsonified_request - assert ( - jsonified_request["runtimeProjectAttachmentId"] - == request_init["runtime_project_attachment_id"] - ) - - jsonified_request["parent"] = "parent_value" - jsonified_request[ - "runtimeProjectAttachmentId" - ] = "runtime_project_attachment_id_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_runtime_project_attachment._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("runtime_project_attachment_id",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "runtimeProjectAttachmentId" in jsonified_request - assert ( - jsonified_request["runtimeProjectAttachmentId"] - == "runtime_project_attachment_id_value" - ) - - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = runtime_project_attachment_service.RuntimeProjectAttachment() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = ( - runtime_project_attachment_service.RuntimeProjectAttachment.pb( - return_value - ) - ) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.create_runtime_project_attachment(request) - - expected_params = [ - ( - "runtimeProjectAttachmentId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_create_runtime_project_attachment_rest_unset_required_fields(): - transport = transports.RuntimeProjectAttachmentServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = ( - transport.create_runtime_project_attachment._get_unset_required_fields({}) - ) - assert set(unset_fields) == ( - set(("runtimeProjectAttachmentId",)) - & set( - ( - "parent", - "runtimeProjectAttachmentId", - "runtimeProjectAttachment", - ) - ) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_runtime_project_attachment_rest_interceptors(null_interceptor): - transport = transports.RuntimeProjectAttachmentServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.RuntimeProjectAttachmentServiceRestInterceptor(), - ) - client = RuntimeProjectAttachmentServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.RuntimeProjectAttachmentServiceRestInterceptor, - "post_create_runtime_project_attachment", - ) as post, mock.patch.object( - transports.RuntimeProjectAttachmentServiceRestInterceptor, - "pre_create_runtime_project_attachment", - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest.pb( - runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = ( - runtime_project_attachment_service.RuntimeProjectAttachment.to_json( - runtime_project_attachment_service.RuntimeProjectAttachment() - ) - ) - - request = ( - runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest() - ) - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = ( - runtime_project_attachment_service.RuntimeProjectAttachment() - ) - - client.create_runtime_project_attachment( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_runtime_project_attachment_rest_bad_request( - transport: str = "rest", - request_type=runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest, -): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_runtime_project_attachment(request) - - -def test_create_runtime_project_attachment_rest_flattened(): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = runtime_project_attachment_service.RuntimeProjectAttachment() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - runtime_project_attachment=runtime_project_attachment_service.RuntimeProjectAttachment( - name="name_value" - ), - runtime_project_attachment_id="runtime_project_attachment_id_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = runtime_project_attachment_service.RuntimeProjectAttachment.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.create_runtime_project_attachment(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/runtimeProjectAttachments" - % client.transport._host, - args[1], - ) - - -def test_create_runtime_project_attachment_rest_flattened_error( - transport: str = "rest", -): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_runtime_project_attachment( - runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest(), - parent="parent_value", - runtime_project_attachment=runtime_project_attachment_service.RuntimeProjectAttachment( - name="name_value" - ), - runtime_project_attachment_id="runtime_project_attachment_id_value", - ) - - -def test_create_runtime_project_attachment_rest_error(): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest, - dict, - ], -) -def test_get_runtime_project_attachment_rest(request_type): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/runtimeProjectAttachments/sample3" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = runtime_project_attachment_service.RuntimeProjectAttachment( - name="name_value", - runtime_project="runtime_project_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = runtime_project_attachment_service.RuntimeProjectAttachment.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_runtime_project_attachment(request) - - # Establish that the response is the type that we expect. - assert isinstance( - response, runtime_project_attachment_service.RuntimeProjectAttachment - ) - assert response.name == "name_value" - assert response.runtime_project == "runtime_project_value" - - -def test_get_runtime_project_attachment_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.get_runtime_project_attachment - in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.get_runtime_project_attachment - ] = mock_rpc - - request = {} - client.get_runtime_project_attachment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_runtime_project_attachment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_runtime_project_attachment_rest_required_fields( - request_type=runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest, -): - transport_class = transports.RuntimeProjectAttachmentServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_runtime_project_attachment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_runtime_project_attachment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = runtime_project_attachment_service.RuntimeProjectAttachment() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = ( - runtime_project_attachment_service.RuntimeProjectAttachment.pb( - return_value - ) - ) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_runtime_project_attachment(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_get_runtime_project_attachment_rest_unset_required_fields(): - transport = transports.RuntimeProjectAttachmentServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.get_runtime_project_attachment._get_unset_required_fields( - {} - ) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_runtime_project_attachment_rest_interceptors(null_interceptor): - transport = transports.RuntimeProjectAttachmentServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.RuntimeProjectAttachmentServiceRestInterceptor(), - ) - client = RuntimeProjectAttachmentServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.RuntimeProjectAttachmentServiceRestInterceptor, - "post_get_runtime_project_attachment", - ) as post, mock.patch.object( - transports.RuntimeProjectAttachmentServiceRestInterceptor, - "pre_get_runtime_project_attachment", - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = ( - runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest.pb( - runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest() - ) - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = ( - runtime_project_attachment_service.RuntimeProjectAttachment.to_json( - runtime_project_attachment_service.RuntimeProjectAttachment() - ) - ) - - request = ( - runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest() - ) - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = ( - runtime_project_attachment_service.RuntimeProjectAttachment() - ) - - client.get_runtime_project_attachment( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_runtime_project_attachment_rest_bad_request( - transport: str = "rest", - request_type=runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest, -): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/runtimeProjectAttachments/sample3" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_runtime_project_attachment(request) - - -def test_get_runtime_project_attachment_rest_flattened(): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = runtime_project_attachment_service.RuntimeProjectAttachment() - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/runtimeProjectAttachments/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = runtime_project_attachment_service.RuntimeProjectAttachment.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.get_runtime_project_attachment(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/runtimeProjectAttachments/*}" - % client.transport._host, - args[1], - ) - - -def test_get_runtime_project_attachment_rest_flattened_error(transport: str = "rest"): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_runtime_project_attachment( - runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest(), - name="name_value", - ) - - -def test_get_runtime_project_attachment_rest_error(): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest, - dict, - ], -) -def test_list_runtime_project_attachments_rest(request_type): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = ( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse( - next_page_token="next_page_token_value", - ) - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = ( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse.pb( - return_value - ) - ) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_runtime_project_attachments(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListRuntimeProjectAttachmentsPager) - assert response.next_page_token == "next_page_token_value" - - -def test_list_runtime_project_attachments_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.list_runtime_project_attachments - in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.list_runtime_project_attachments - ] = mock_rpc - - request = {} - client.list_runtime_project_attachments(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_runtime_project_attachments(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_runtime_project_attachments_rest_required_fields( - request_type=runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest, -): - transport_class = transports.RuntimeProjectAttachmentServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_runtime_project_attachments._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_runtime_project_attachments._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", - ) - ) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = ( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse() - ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.list_runtime_project_attachments(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_list_runtime_project_attachments_rest_unset_required_fields(): - transport = transports.RuntimeProjectAttachmentServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = ( - transport.list_runtime_project_attachments._get_unset_required_fields({}) - ) - assert set(unset_fields) == ( - set( - ( - "filter", - "orderBy", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_runtime_project_attachments_rest_interceptors(null_interceptor): - transport = transports.RuntimeProjectAttachmentServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.RuntimeProjectAttachmentServiceRestInterceptor(), - ) - client = RuntimeProjectAttachmentServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.RuntimeProjectAttachmentServiceRestInterceptor, - "post_list_runtime_project_attachments", - ) as post, mock.patch.object( - transports.RuntimeProjectAttachmentServiceRestInterceptor, - "pre_list_runtime_project_attachments", - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest.pb( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse.to_json( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse() - ) - - request = ( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest() - ) - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = ( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse() - ) - - client.list_runtime_project_attachments( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_runtime_project_attachments_rest_bad_request( - transport: str = "rest", - request_type=runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest, -): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_runtime_project_attachments(request) - - -def test_list_runtime_project_attachments_rest_flattened(): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = ( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse() - ) - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = ( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse.pb( - return_value - ) - ) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.list_runtime_project_attachments(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/runtimeProjectAttachments" - % client.transport._host, - args[1], - ) - - -def test_list_runtime_project_attachments_rest_flattened_error(transport: str = "rest"): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_runtime_project_attachments( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest(), - parent="parent_value", - ) - - -def test_list_runtime_project_attachments_rest_pager(transport: str = "rest"): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse( - runtime_project_attachments=[ - runtime_project_attachment_service.RuntimeProjectAttachment(), - runtime_project_attachment_service.RuntimeProjectAttachment(), - runtime_project_attachment_service.RuntimeProjectAttachment(), - ], - next_page_token="abc", - ), - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse( - runtime_project_attachments=[], - next_page_token="def", - ), - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse( - runtime_project_attachments=[ - runtime_project_attachment_service.RuntimeProjectAttachment(), - ], - next_page_token="ghi", - ), - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse( - runtime_project_attachments=[ - runtime_project_attachment_service.RuntimeProjectAttachment(), - runtime_project_attachment_service.RuntimeProjectAttachment(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse.to_json( - x - ) - for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_runtime_project_attachments(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all( - isinstance(i, runtime_project_attachment_service.RuntimeProjectAttachment) - for i in results - ) - - pages = list( - client.list_runtime_project_attachments(request=sample_request).pages - ) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest, - dict, - ], -) -def test_delete_runtime_project_attachment_rest(request_type): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/runtimeProjectAttachments/sample3" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.delete_runtime_project_attachment(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_runtime_project_attachment_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.delete_runtime_project_attachment - in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.delete_runtime_project_attachment - ] = mock_rpc - - request = {} - client.delete_runtime_project_attachment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_runtime_project_attachment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_runtime_project_attachment_rest_required_fields( - request_type=runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest, -): - transport_class = transports.RuntimeProjectAttachmentServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_runtime_project_attachment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_runtime_project_attachment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "delete", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.delete_runtime_project_attachment(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_delete_runtime_project_attachment_rest_unset_required_fields(): - transport = transports.RuntimeProjectAttachmentServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = ( - transport.delete_runtime_project_attachment._get_unset_required_fields({}) - ) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_runtime_project_attachment_rest_interceptors(null_interceptor): - transport = transports.RuntimeProjectAttachmentServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.RuntimeProjectAttachmentServiceRestInterceptor(), - ) - client = RuntimeProjectAttachmentServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.RuntimeProjectAttachmentServiceRestInterceptor, - "pre_delete_runtime_project_attachment", - ) as pre: - pre.assert_not_called() - pb_message = runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest.pb( - runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = ( - runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest() - ) - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_runtime_project_attachment( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - - -def test_delete_runtime_project_attachment_rest_bad_request( - transport: str = "rest", - request_type=runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest, -): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/runtimeProjectAttachments/sample3" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_runtime_project_attachment(request) - - -def test_delete_runtime_project_attachment_rest_flattened(): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/runtimeProjectAttachments/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.delete_runtime_project_attachment(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/runtimeProjectAttachments/*}" - % client.transport._host, - args[1], - ) - - -def test_delete_runtime_project_attachment_rest_flattened_error( - transport: str = "rest", -): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_runtime_project_attachment( - runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest(), - name="name_value", - ) - - -def test_delete_runtime_project_attachment_rest_error(): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest, - dict, - ], -) -def test_lookup_runtime_project_attachment_rest(request_type): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = ( - runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse() - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.lookup_runtime_project_attachment(request) - - # Establish that the response is the type that we expect. - assert isinstance( - response, - runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse, - ) - - -def test_lookup_runtime_project_attachment_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.lookup_runtime_project_attachment - in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.lookup_runtime_project_attachment - ] = mock_rpc - - request = {} - client.lookup_runtime_project_attachment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.lookup_runtime_project_attachment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_lookup_runtime_project_attachment_rest_required_fields( - request_type=runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest, -): - transport_class = transports.RuntimeProjectAttachmentServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).lookup_runtime_project_attachment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).lookup_runtime_project_attachment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = ( - runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse() - ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.lookup_runtime_project_attachment(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_lookup_runtime_project_attachment_rest_unset_required_fields(): - transport = transports.RuntimeProjectAttachmentServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = ( - transport.lookup_runtime_project_attachment._get_unset_required_fields({}) - ) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_lookup_runtime_project_attachment_rest_interceptors(null_interceptor): - transport = transports.RuntimeProjectAttachmentServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.RuntimeProjectAttachmentServiceRestInterceptor(), - ) - client = RuntimeProjectAttachmentServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.RuntimeProjectAttachmentServiceRestInterceptor, - "post_lookup_runtime_project_attachment", - ) as post, mock.patch.object( - transports.RuntimeProjectAttachmentServiceRestInterceptor, - "pre_lookup_runtime_project_attachment", - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest.pb( - runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse.to_json( - runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse() - ) - - request = ( - runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest() - ) - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = ( - runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse() - ) - - client.lookup_runtime_project_attachment( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_lookup_runtime_project_attachment_rest_bad_request( - transport: str = "rest", - request_type=runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest, -): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.lookup_runtime_project_attachment(request) - - -def test_lookup_runtime_project_attachment_rest_flattened(): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = ( - runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse() - ) - - # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.lookup_runtime_project_attachment(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*}:lookupRuntimeProjectAttachment" - % client.transport._host, - args[1], - ) - - -def test_lookup_runtime_project_attachment_rest_flattened_error( - transport: str = "rest", -): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.lookup_runtime_project_attachment( - runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest(), - name="name_value", - ) - - -def test_lookup_runtime_project_attachment_rest_error(): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.RuntimeProjectAttachmentServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.RuntimeProjectAttachmentServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = RuntimeProjectAttachmentServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.RuntimeProjectAttachmentServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = RuntimeProjectAttachmentServiceClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = RuntimeProjectAttachmentServiceClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.RuntimeProjectAttachmentServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = RuntimeProjectAttachmentServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.RuntimeProjectAttachmentServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = RuntimeProjectAttachmentServiceClient(transport=transport) - assert client.transport is transport - - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.RuntimeProjectAttachmentServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.RuntimeProjectAttachmentServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.RuntimeProjectAttachmentServiceGrpcTransport, - transports.RuntimeProjectAttachmentServiceGrpcAsyncIOTransport, - transports.RuntimeProjectAttachmentServiceRestTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "rest", - ], -) -def test_transport_kind(transport_name): - transport = RuntimeProjectAttachmentServiceClient.get_transport_class( - transport_name - )( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.RuntimeProjectAttachmentServiceGrpcTransport, - ) - - -def test_runtime_project_attachment_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.RuntimeProjectAttachmentServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json", - ) - - -def test_runtime_project_attachment_service_base_transport(): - # Instantiate the base transport. - with mock.patch( - "google.cloud.apihub_v1.services.runtime_project_attachment_service.transports.RuntimeProjectAttachmentServiceTransport.__init__" - ) as Transport: - Transport.return_value = None - transport = transports.RuntimeProjectAttachmentServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - "create_runtime_project_attachment", - "get_runtime_project_attachment", - "list_runtime_project_attachments", - "delete_runtime_project_attachment", - "lookup_runtime_project_attachment", - "get_location", - "list_locations", - "get_operation", - "cancel_operation", - "delete_operation", - "list_operations", - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - "kind", - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_runtime_project_attachment_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.apihub_v1.services.runtime_project_attachment_service.transports.RuntimeProjectAttachmentServiceTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.RuntimeProjectAttachmentServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id="octopus", - ) - - -def test_runtime_project_attachment_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( - "google.cloud.apihub_v1.services.runtime_project_attachment_service.transports.RuntimeProjectAttachmentServiceTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.RuntimeProjectAttachmentServiceTransport() - adc.assert_called_once() - - -def test_runtime_project_attachment_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - RuntimeProjectAttachmentServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.RuntimeProjectAttachmentServiceGrpcTransport, - transports.RuntimeProjectAttachmentServiceGrpcAsyncIOTransport, - ], -) -def test_runtime_project_attachment_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.RuntimeProjectAttachmentServiceGrpcTransport, - transports.RuntimeProjectAttachmentServiceGrpcAsyncIOTransport, - transports.RuntimeProjectAttachmentServiceRestTransport, - ], -) -def test_runtime_project_attachment_service_transport_auth_gdch_credentials( - transport_class, -): - host = "https://language.com/" - api_audience_tests = [None, "https://language2.com/"] - api_audience_expect = [host, "https://language2.com/"] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, "default", autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock( - return_value=gdch_mock - ) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with(e) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.RuntimeProjectAttachmentServiceGrpcTransport, grpc_helpers), - ( - transports.RuntimeProjectAttachmentServiceGrpcAsyncIOTransport, - grpc_helpers_async, - ), - ], -) -def test_runtime_project_attachment_service_transport_create_channel( - transport_class, grpc_helpers -): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - - create_channel.assert_called_with( - "apihub.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - scopes=["1", "2"], - default_host="apihub.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.RuntimeProjectAttachmentServiceGrpcTransport, - transports.RuntimeProjectAttachmentServiceGrpcAsyncIOTransport, - ], -) -def test_runtime_project_attachment_service_grpc_transport_client_cert_source_for_mtls( - transport_class, -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds, - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback, - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, private_key=expected_key - ) - - -def test_runtime_project_attachment_service_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch( - "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" - ) as mock_configure_mtls_channel: - transports.RuntimeProjectAttachmentServiceRestTransport( - credentials=cred, client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "grpc_asyncio", - "rest", - ], -) -def test_runtime_project_attachment_service_host_no_port(transport_name): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="apihub.googleapis.com" - ), - transport=transport_name, - ) - assert client.transport._host == ( - "apihub.googleapis.com:443" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://apihub.googleapis.com/" - ) - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "grpc_asyncio", - "rest", - ], -) -def test_runtime_project_attachment_service_host_with_port(transport_name): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="apihub.googleapis.com:8000" - ), - transport=transport_name, - ) - assert client.transport._host == ( - "apihub.googleapis.com:8000" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://apihub.googleapis.com:8000/" - ) - - -@pytest.mark.parametrize( - "transport_name", - [ - "rest", - ], -) -def test_runtime_project_attachment_service_client_transport_session_collision( - transport_name, -): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = RuntimeProjectAttachmentServiceClient( - credentials=creds1, - transport=transport_name, - ) - client2 = RuntimeProjectAttachmentServiceClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.create_runtime_project_attachment._session - session2 = client2.transport.create_runtime_project_attachment._session - assert session1 != session2 - session1 = client1.transport.get_runtime_project_attachment._session - session2 = client2.transport.get_runtime_project_attachment._session - assert session1 != session2 - session1 = client1.transport.list_runtime_project_attachments._session - session2 = client2.transport.list_runtime_project_attachments._session - assert session1 != session2 - session1 = client1.transport.delete_runtime_project_attachment._session - session2 = client2.transport.delete_runtime_project_attachment._session - assert session1 != session2 - session1 = client1.transport.lookup_runtime_project_attachment._session - session2 = client2.transport.lookup_runtime_project_attachment._session - assert session1 != session2 - - -def test_runtime_project_attachment_service_grpc_transport_channel(): - channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.RuntimeProjectAttachmentServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_runtime_project_attachment_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.RuntimeProjectAttachmentServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize( - "transport_class", - [ - transports.RuntimeProjectAttachmentServiceGrpcTransport, - transports.RuntimeProjectAttachmentServiceGrpcAsyncIOTransport, - ], -) -def test_runtime_project_attachment_service_transport_channel_mtls_with_client_cert_source( - transport_class, -): - with mock.patch( - "grpc.ssl_channel_credentials", autospec=True - ) as grpc_ssl_channel_cred: - with mock.patch.object( - transport_class, "create_channel" - ) as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize( - "transport_class", - [ - transports.RuntimeProjectAttachmentServiceGrpcTransport, - transports.RuntimeProjectAttachmentServiceGrpcAsyncIOTransport, - ], -) -def test_runtime_project_attachment_service_transport_channel_mtls_with_adc( - transport_class, -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object( - transport_class, "create_channel" - ) as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_runtime_project_attachment_path(): - project = "squid" - location = "clam" - runtime_project_attachment = "whelk" - expected = "projects/{project}/locations/{location}/runtimeProjectAttachments/{runtime_project_attachment}".format( - project=project, - location=location, - runtime_project_attachment=runtime_project_attachment, - ) - actual = RuntimeProjectAttachmentServiceClient.runtime_project_attachment_path( - project, location, runtime_project_attachment - ) - assert expected == actual - - -def test_parse_runtime_project_attachment_path(): - expected = { - "project": "octopus", - "location": "oyster", - "runtime_project_attachment": "nudibranch", - } - path = RuntimeProjectAttachmentServiceClient.runtime_project_attachment_path( - **expected - ) - - # Check that the path construction is reversible. - actual = ( - RuntimeProjectAttachmentServiceClient.parse_runtime_project_attachment_path( - path - ) - ) - assert expected == actual - - -def test_common_billing_account_path(): - billing_account = "cuttlefish" - expected = "billingAccounts/{billing_account}".format( - billing_account=billing_account, - ) - actual = RuntimeProjectAttachmentServiceClient.common_billing_account_path( - billing_account - ) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "mussel", - } - path = RuntimeProjectAttachmentServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = RuntimeProjectAttachmentServiceClient.parse_common_billing_account_path( - path - ) - assert expected == actual - - -def test_common_folder_path(): - folder = "winkle" - expected = "folders/{folder}".format( - folder=folder, - ) - actual = RuntimeProjectAttachmentServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "nautilus", - } - path = RuntimeProjectAttachmentServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = RuntimeProjectAttachmentServiceClient.parse_common_folder_path(path) - assert expected == actual - - -def test_common_organization_path(): - organization = "scallop" - expected = "organizations/{organization}".format( - organization=organization, - ) - actual = RuntimeProjectAttachmentServiceClient.common_organization_path( - organization - ) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "abalone", - } - path = RuntimeProjectAttachmentServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = RuntimeProjectAttachmentServiceClient.parse_common_organization_path(path) - assert expected == actual - - -def test_common_project_path(): - project = "squid" - expected = "projects/{project}".format( - project=project, - ) - actual = RuntimeProjectAttachmentServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "clam", - } - path = RuntimeProjectAttachmentServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = RuntimeProjectAttachmentServiceClient.parse_common_project_path(path) - assert expected == actual - - -def test_common_location_path(): - project = "whelk" - location = "octopus" - expected = "projects/{project}/locations/{location}".format( - project=project, - location=location, - ) - actual = RuntimeProjectAttachmentServiceClient.common_location_path( - project, location - ) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "oyster", - "location": "nudibranch", - } - path = RuntimeProjectAttachmentServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = RuntimeProjectAttachmentServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object( - transports.RuntimeProjectAttachmentServiceTransport, "_prep_wrapped_messages" - ) as prep: - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object( - transports.RuntimeProjectAttachmentServiceTransport, "_prep_wrapped_messages" - ) as prep: - transport_class = RuntimeProjectAttachmentServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -@pytest.mark.asyncio -async def test_transport_close_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object( - type(getattr(client.transport, "grpc_channel")), "close" - ) as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_get_location_rest_bad_request( - transport: str = "rest", request_type=locations_pb2.GetLocationRequest -): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_location(request) - - -@pytest.mark.parametrize( - "request_type", - [ - locations_pb2.GetLocationRequest, - dict, - ], -) -def test_get_location_rest(request_type): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.Location() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_location(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - - -def test_list_locations_rest_bad_request( - transport: str = "rest", request_type=locations_pb2.ListLocationsRequest -): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({"name": "projects/sample1"}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_locations(request) - - -@pytest.mark.parametrize( - "request_type", - [ - locations_pb2.ListLocationsRequest, - dict, - ], -) -def test_list_locations_rest(request_type): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.ListLocationsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.list_locations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) - - -def test_cancel_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.CancelOperationRequest -): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.cancel_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.CancelOperationRequest, - dict, - ], -) -def test_cancel_operation_rest(request_type): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "{}" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.cancel_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest -): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.DeleteOperationRequest, - dict, - ], -) -def test_delete_operation_rest(request_type): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "{}" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.delete_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_get_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.GetOperationRequest -): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.GetOperationRequest, - dict, - ], -) -def test_get_operation_rest(request_type): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_operation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - - -def test_list_operations_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.ListOperationsRequest -): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_operations(request) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() @pytest.mark.parametrize( - "request_type", + "transport_name", [ - operations_pb2.ListOperationsRequest, - dict, + "rest", ], ) -def test_list_operations_rest(request_type): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.ListOperationsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.list_operations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - - -def test_delete_operation(transport: str = "grpc"): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_operation_async(transport: str = "grpc_asyncio"): - client = RuntimeProjectAttachmentServiceAsyncClient( +def test_transport_kind(transport_name): + transport = RuntimeProjectAttachmentServiceClient.get_transport_class( + transport_name + )( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) + assert transport.kind == transport_name - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_runtime_project_attachment_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.RuntimeProjectAttachmentServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) - # Establish that the response is the type that we expect. - assert response is None +def test_runtime_project_attachment_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.apihub_v1.services.runtime_project_attachment_service.transports.RuntimeProjectAttachmentServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.RuntimeProjectAttachmentServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) -def test_delete_operation_field_headers(): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_runtime_project_attachment", + "get_runtime_project_attachment", + "list_runtime_project_attachments", + "delete_runtime_project_attachment", + "lookup_runtime_project_attachment", + "get_location", + "list_locations", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = None - - client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - + with pytest.raises(NotImplementedError): + transport.close() -@pytest.mark.asyncio -async def test_delete_operation_field_headers_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_runtime_project_attachment_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.apihub_v1.services.runtime_project_attachment_service.transports.RuntimeProjectAttachmentServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RuntimeProjectAttachmentServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] +def test_runtime_project_attachment_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.apihub_v1.services.runtime_project_attachment_service.transports.RuntimeProjectAttachmentServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RuntimeProjectAttachmentServiceTransport() + adc.assert_called_once() -def test_delete_operation_from_dict(): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_operation( - request={ - "name": "locations", - } +def test_runtime_project_attachment_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + RuntimeProjectAttachmentServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, ) - call.assert_called() -@pytest.mark.asyncio -async def test_delete_operation_from_dict_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_operation( - request={ - "name": "locations", - } +def test_runtime_project_attachment_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.RuntimeProjectAttachmentServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback ) - call.assert_called() + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) -def test_cancel_operation(transport: str = "grpc"): +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_runtime_project_attachment_service_host_no_port(transport_name): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + client_options=client_options.ClientOptions( + api_endpoint="apihub.googleapis.com" + ), + transport=transport_name, ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc_asyncio"): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + assert client.transport._host == ( + "apihub.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://apihub.googleapis.com/" ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_cancel_operation_field_headers(): +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_runtime_project_attachment_service_host_with_port(transport_name): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="apihub.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "apihub.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://apihub.googleapis.com:8000/" ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None - - client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_runtime_project_attachment_service_client_transport_session_collision( + transport_name, +): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = RuntimeProjectAttachmentServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = RuntimeProjectAttachmentServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_runtime_project_attachment._session + session2 = client2.transport.create_runtime_project_attachment._session + assert session1 != session2 + session1 = client1.transport.get_runtime_project_attachment._session + session2 = client2.transport.get_runtime_project_attachment._session + assert session1 != session2 + session1 = client1.transport.list_runtime_project_attachments._session + session2 = client2.transport.list_runtime_project_attachments._session + assert session1 != session2 + session1 = client1.transport.delete_runtime_project_attachment._session + session2 = client2.transport.delete_runtime_project_attachment._session + assert session1 != session2 + session1 = client1.transport.lookup_runtime_project_attachment._session + session2 = client2.transport.lookup_runtime_project_attachment._session + assert session1 != session2 -@pytest.mark.asyncio -async def test_cancel_operation_field_headers_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_runtime_project_attachment_path(): + project = "squid" + location = "clam" + runtime_project_attachment = "whelk" + expected = "projects/{project}/locations/{location}/runtimeProjectAttachments/{runtime_project_attachment}".format( + project=project, + location=location, + runtime_project_attachment=runtime_project_attachment, + ) + actual = RuntimeProjectAttachmentServiceClient.runtime_project_attachment_path( + project, location, runtime_project_attachment ) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_runtime_project_attachment_path(): + expected = { + "project": "octopus", + "location": "oyster", + "runtime_project_attachment": "nudibranch", + } + path = RuntimeProjectAttachmentServiceClient.runtime_project_attachment_path( + **expected + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Check that the path construction is reversible. + actual = ( + RuntimeProjectAttachmentServiceClient.parse_runtime_project_attachment_path( + path + ) + ) + assert expected == actual -def test_cancel_operation_from_dict(): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_common_billing_account_path(): + billing_account = "cuttlefish" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = RuntimeProjectAttachmentServiceClient.common_billing_account_path( + billing_account ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None + assert expected == actual - response = client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "mussel", + } + path = RuntimeProjectAttachmentServiceClient.common_billing_account_path(**expected) -@pytest.mark.asyncio -async def test_cancel_operation_from_dict_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + # Check that the path construction is reversible. + actual = RuntimeProjectAttachmentServiceClient.parse_common_billing_account_path( + path ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() + assert expected == actual -def test_get_operation(transport: str = "grpc"): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +def test_common_folder_path(): + folder = "winkle" + expected = "folders/{folder}".format( + folder=folder, ) + actual = RuntimeProjectAttachmentServiceClient.common_folder_path(folder) + assert expected == actual - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_common_folder_path(): + expected = { + "folder": "nautilus", + } + path = RuntimeProjectAttachmentServiceClient.common_folder_path(**expected) - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) + # Check that the path construction is reversible. + actual = RuntimeProjectAttachmentServiceClient.parse_common_folder_path(path) + assert expected == actual -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc_asyncio"): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +def test_common_organization_path(): + organization = "scallop" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = RuntimeProjectAttachmentServiceClient.common_organization_path( + organization ) + assert expected == actual - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_common_organization_path(): + expected = { + "organization": "abalone", + } + path = RuntimeProjectAttachmentServiceClient.common_organization_path(**expected) - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) + # Check that the path construction is reversible. + actual = RuntimeProjectAttachmentServiceClient.parse_common_organization_path(path) + assert expected == actual -def test_get_operation_field_headers(): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_common_project_path(): + project = "squid" + expected = "projects/{project}".format( + project=project, ) + actual = RuntimeProjectAttachmentServiceClient.common_project_path(project) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_common_project_path(): + expected = { + "project": "clam", + } + path = RuntimeProjectAttachmentServiceClient.common_project_path(**expected) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Check that the path construction is reversible. + actual = RuntimeProjectAttachmentServiceClient.parse_common_project_path(path) + assert expected == actual -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_common_location_path(): + project = "whelk" + location = "octopus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = RuntimeProjectAttachmentServiceClient.common_location_path( + project, location ) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_common_location_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = RuntimeProjectAttachmentServiceClient.common_location_path(**expected) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Check that the path construction is reversible. + actual = RuntimeProjectAttachmentServiceClient.parse_common_location_path(path) + assert expected == actual -def test_get_operation_from_dict(): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() - response = client.get_operation( - request={ - "name": "locations", - } + with mock.patch.object( + transports.RuntimeProjectAttachmentServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) - call.assert_called() - + prep.assert_called_once_with(client_info) -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation( - request={ - "name": "locations", - } + with mock.patch.object( + transports.RuntimeProjectAttachmentServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = RuntimeProjectAttachmentServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) - call.assert_called() + prep.assert_called_once_with(client_info) -def test_list_operations(transport: str = "grpc"): +def test_get_location_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.GetLocationRequest +): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - response = client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - - -@pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc_asyncio"): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_location(request) -def test_list_operations_field_headers(): +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) +def test_get_location_rest(request_type): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = operations_pb2.ListOperationsResponse() - - client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_list_operations_field_headers_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.get_location(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) -def test_list_operations_from_dict(): +def test_list_locations_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.ListLocationsRequest +): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - - response = client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) -@pytest.mark.asyncio -async def test_list_operations_from_dict_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_locations(request) -def test_list_locations(transport: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) +def test_list_locations_rest(request_type): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() response = client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, locations_pb2.ListLocationsResponse) -@pytest.mark.asyncio -async def test_list_locations_async(transport: str = "grpc_asyncio"): - client = RuntimeProjectAttachmentServiceAsyncClient( +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) -def test_list_locations_field_headers(): +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = locations_pb2.ListLocationsResponse() + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.cancel_operation(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Establish that the response is the type that we expect. + assert response is None -@pytest.mark.asyncio -async def test_list_locations_field_headers_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( +def test_delete_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest +): + client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_operation(request) -def test_list_locations_from_dict(): +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None - response = client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_operation(request) -@pytest.mark.asyncio -async def test_list_locations_from_dict_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() + # Establish that the response is the type that we expect. + assert response is None -def test_get_location(transport: str = "grpc"): +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - response = client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) -@pytest.mark.asyncio -async def test_get_location_async(transport: str = "grpc_asyncio"): - client = RuntimeProjectAttachmentServiceAsyncClient( +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) + assert isinstance(response, operations_pb2.Operation) -def test_get_location_field_headers(): +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials() + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = locations_pb2.Location() - - client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations/abc", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_get_location_field_headers_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials() + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations/abc", - ) in kw["metadata"] + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) -def test_get_location_from_dict(): +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() - response = client.get_location( - request={ - "name": "locations/abc", - } - ) - call.assert_called() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value -@pytest.mark.asyncio -async def test_get_location_from_dict_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location( - request={ - "name": "locations", - } - ) - call.assert_called() + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) def test_transport_close(): transports = { "rest": "_session", - "grpc": "_grpc_channel", } for transport, close_name in transports.items(): @@ -7323,7 +3681,6 @@ def test_transport_close(): def test_client_ctx(): transports = [ "rest", - "grpc", ] for transport in transports: client = RuntimeProjectAttachmentServiceClient( @@ -7342,11 +3699,7 @@ def test_client_ctx(): [ ( RuntimeProjectAttachmentServiceClient, - transports.RuntimeProjectAttachmentServiceGrpcTransport, - ), - ( - RuntimeProjectAttachmentServiceAsyncClient, - transports.RuntimeProjectAttachmentServiceGrpcAsyncIOTransport, + transports.RuntimeProjectAttachmentServiceRestTransport, ), ], ) From d326f79034540e6028ccd6ac5f329f3269fa12b9 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 4 Sep 2024 10:29:01 -0400 Subject: [PATCH 051/108] chore: release main (#13056) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit :robot: I have created a release *beep* *boop* ---
google-cloud-apihub: 0.2.0 ## [0.2.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-apihub-v0.1.0...google-cloud-apihub-v0.2.0) (2024-09-04) ### ⚠ BREAKING CHANGES * [google-cloud-apihub] remove gRPC support for client libraries ([#13055](https://github.com/googleapis/google-cloud-python/issues/13055)) ### Bug Fixes * [google-cloud-apihub] remove gRPC support for client libraries ([#13055](https://github.com/googleapis/google-cloud-python/issues/13055)) ([3762ff4](https://github.com/googleapis/google-cloud-python/commit/3762ff40e51466bc516939a31732300c8e20211a))
--- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .release-please-manifest.json | 2 +- packages/google-cloud-apihub/CHANGELOG.md | 11 +++++++++++ .../google/cloud/apihub/gapic_version.py | 2 +- .../google/cloud/apihub_v1/gapic_version.py | 2 +- .../snippet_metadata_google.cloud.apihub.v1.json | 2 +- 5 files changed, 15 insertions(+), 4 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index a695d96468bb..4fe1785f9ff1 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -17,7 +17,7 @@ "packages/google-cloud-api-keys": "0.5.11", "packages/google-cloud-apigee-connect": "1.9.5", "packages/google-cloud-apigee-registry": "0.6.11", - "packages/google-cloud-apihub": "0.1.0", + "packages/google-cloud-apihub": "0.2.0", "packages/google-cloud-appengine-admin": "1.11.5", "packages/google-cloud-appengine-logging": "1.4.5", "packages/google-cloud-apphub": "0.1.2", diff --git a/packages/google-cloud-apihub/CHANGELOG.md b/packages/google-cloud-apihub/CHANGELOG.md index ae96d0e193c5..6b8fb34aaf0b 100644 --- a/packages/google-cloud-apihub/CHANGELOG.md +++ b/packages/google-cloud-apihub/CHANGELOG.md @@ -1,5 +1,16 @@ # Changelog +## [0.2.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-apihub-v0.1.0...google-cloud-apihub-v0.2.0) (2024-09-04) + + +### ⚠ BREAKING CHANGES + +* [google-cloud-apihub] remove gRPC support for client libraries ([#13055](https://github.com/googleapis/google-cloud-python/issues/13055)) + +### Bug Fixes + +* [google-cloud-apihub] remove gRPC support for client libraries ([#13055](https://github.com/googleapis/google-cloud-python/issues/13055)) ([3762ff4](https://github.com/googleapis/google-cloud-python/commit/3762ff40e51466bc516939a31732300c8e20211a)) + ## 0.1.0 (2024-08-08) diff --git a/packages/google-cloud-apihub/google/cloud/apihub/gapic_version.py b/packages/google-cloud-apihub/google/cloud/apihub/gapic_version.py index 558c8aab67c5..364164ddb134 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub/gapic_version.py +++ b/packages/google-cloud-apihub/google/cloud/apihub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.2.0" # {x-release-please-version} diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/gapic_version.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/gapic_version.py index 558c8aab67c5..364164ddb134 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/gapic_version.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.2.0" # {x-release-please-version} diff --git a/packages/google-cloud-apihub/samples/generated_samples/snippet_metadata_google.cloud.apihub.v1.json b/packages/google-cloud-apihub/samples/generated_samples/snippet_metadata_google.cloud.apihub.v1.json index 475a2011ac20..2dd2f7e3c87d 100644 --- a/packages/google-cloud-apihub/samples/generated_samples/snippet_metadata_google.cloud.apihub.v1.json +++ b/packages/google-cloud-apihub/samples/generated_samples/snippet_metadata_google.cloud.apihub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-apihub", - "version": "0.1.0" + "version": "0.2.0" }, "snippets": [ { From 2bbab3bea1548fdb6200856ffe4a6fe7d6bf8487 Mon Sep 17 00:00:00 2001 From: "owlbot-bootstrapper[bot]" <104649659+owlbot-bootstrapper[bot]@users.noreply.github.com> Date: Thu, 5 Sep 2024 21:32:03 +0000 Subject: [PATCH 052/108] feat: add initial files for google.marketingplatform.admin.v1alpha (#13060) Source-Link: https://github.com/googleapis/googleapis-gen/commit/01202948aeacf502f63d3d01995521589e4c6db4 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLW1hcmtldGluZ3BsYXRmb3JtLWFkbWluLy5Pd2xCb3QueWFtbCIsImgiOiIwMTIwMjk0OGFlYWNmNTAyZjYzZDNkMDE5OTU1MjE1ODllNGM2ZGI0In0= PiperOrigin-RevId: 0 --------- Co-authored-by: Owlbot Bootstrapper Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../.OwlBot.yaml | 18 + .../.coveragerc | 13 + .../.flake8 | 33 + .../.gitignore | 63 + .../.repo-metadata.json | 17 + .../CHANGELOG.md | 1 + .../CODE_OF_CONDUCT.md | 95 + .../CONTRIBUTING.rst | 271 + .../LICENSE | 202 + .../MANIFEST.in | 25 + .../README.rst | 108 + .../docs/CHANGELOG.md | 1 + .../docs/README.rst | 1 + .../docs/_static/custom.css | 20 + .../docs/_templates/layout.html | 50 + .../docs/conf.py | 384 ++ .../docs/index.rst | 23 + .../marketingplatform_admin_service.rst | 10 + .../services_.rst | 6 + .../types_.rst | 6 + .../docs/multiprocessing.rst | 7 + .../ads/marketingplatform_admin/__init__.py | 57 + .../marketingplatform_admin/gapic_version.py | 16 + .../ads/marketingplatform_admin/py.typed | 2 + .../__init__.py | 51 + .../gapic_metadata.json | 103 + .../gapic_version.py | 16 + .../marketingplatform_admin_v1alpha/py.typed | 2 + .../services/__init__.py | 15 + .../__init__.py | 22 + .../async_client.py | 894 +++ .../marketingplatform_admin_service/client.py | 1349 ++++ .../marketingplatform_admin_service/pagers.py | 208 + .../transports/__init__.py | 41 + .../transports/base.py | 232 + .../transports/grpc.py | 412 ++ .../transports/grpc_asyncio.py | 444 ++ .../transports/rest.py | 858 +++ .../types/__init__.py | 40 + .../types/marketingplatform_admin.py | 217 + .../types/resources.py | 120 + .../mypy.ini | 3 + .../noxfile.py | 452 ++ ...ice_create_analytics_account_link_async.py | 56 + ...vice_create_analytics_account_link_sync.py | 56 + ...ice_delete_analytics_account_link_async.py | 50 + ...vice_delete_analytics_account_link_sync.py | 50 + ...rm_admin_service_get_organization_async.py | 52 + ...orm_admin_service_get_organization_sync.py | 52 + ...vice_list_analytics_account_links_async.py | 53 + ...rvice_list_analytics_account_links_sync.py | 53 + ...ervice_set_property_service_level_async.py | 54 + ...service_set_property_service_level_sync.py | 54 + ...oogle.marketingplatform.admin.v1alpha.json | 822 +++ .../scripts/decrypt-secrets.sh | 46 + ...arketingplatform_admin_v1alpha_keywords.py | 180 + .../setup.py | 95 + .../testing/.gitignore | 3 + .../testing/constraints-3.10.txt | 6 + .../testing/constraints-3.11.txt | 6 + .../testing/constraints-3.12.txt | 6 + .../testing/constraints-3.7.txt | 10 + .../testing/constraints-3.8.txt | 6 + .../testing/constraints-3.9.txt | 6 + .../tests/__init__.py | 15 + .../tests/unit/__init__.py | 15 + .../tests/unit/gapic/__init__.py | 15 + .../__init__.py | 15 + .../test_marketingplatform_admin_service.py | 5969 +++++++++++++++++ 69 files changed, 14653 insertions(+) create mode 100644 packages/google-ads-marketingplatform-admin/.OwlBot.yaml create mode 100644 packages/google-ads-marketingplatform-admin/.coveragerc create mode 100644 packages/google-ads-marketingplatform-admin/.flake8 create mode 100644 packages/google-ads-marketingplatform-admin/.gitignore create mode 100644 packages/google-ads-marketingplatform-admin/.repo-metadata.json create mode 100644 packages/google-ads-marketingplatform-admin/CHANGELOG.md create mode 100644 packages/google-ads-marketingplatform-admin/CODE_OF_CONDUCT.md create mode 100644 packages/google-ads-marketingplatform-admin/CONTRIBUTING.rst create mode 100644 packages/google-ads-marketingplatform-admin/LICENSE create mode 100644 packages/google-ads-marketingplatform-admin/MANIFEST.in create mode 100644 packages/google-ads-marketingplatform-admin/README.rst create mode 120000 packages/google-ads-marketingplatform-admin/docs/CHANGELOG.md create mode 120000 packages/google-ads-marketingplatform-admin/docs/README.rst create mode 100644 packages/google-ads-marketingplatform-admin/docs/_static/custom.css create mode 100644 packages/google-ads-marketingplatform-admin/docs/_templates/layout.html create mode 100644 packages/google-ads-marketingplatform-admin/docs/conf.py create mode 100644 packages/google-ads-marketingplatform-admin/docs/index.rst create mode 100644 packages/google-ads-marketingplatform-admin/docs/marketingplatform_admin_v1alpha/marketingplatform_admin_service.rst create mode 100644 packages/google-ads-marketingplatform-admin/docs/marketingplatform_admin_v1alpha/services_.rst create mode 100644 packages/google-ads-marketingplatform-admin/docs/marketingplatform_admin_v1alpha/types_.rst create mode 100644 packages/google-ads-marketingplatform-admin/docs/multiprocessing.rst create mode 100644 packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin/__init__.py create mode 100644 packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin/gapic_version.py create mode 100644 packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin/py.typed create mode 100644 packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/__init__.py create mode 100644 packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/gapic_metadata.json create mode 100644 packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/gapic_version.py create mode 100644 packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/py.typed create mode 100644 packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/__init__.py create mode 100644 packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/__init__.py create mode 100644 packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/async_client.py create mode 100644 packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/client.py create mode 100644 packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/pagers.py create mode 100644 packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/__init__.py create mode 100644 packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/base.py create mode 100644 packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/grpc.py create mode 100644 packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/grpc_asyncio.py create mode 100644 packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/rest.py create mode 100644 packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/types/__init__.py create mode 100644 packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/types/marketingplatform_admin.py create mode 100644 packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/types/resources.py create mode 100644 packages/google-ads-marketingplatform-admin/mypy.ini create mode 100644 packages/google-ads-marketingplatform-admin/noxfile.py create mode 100644 packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_create_analytics_account_link_async.py create mode 100644 packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_create_analytics_account_link_sync.py create mode 100644 packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_delete_analytics_account_link_async.py create mode 100644 packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_delete_analytics_account_link_sync.py create mode 100644 packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_get_organization_async.py create mode 100644 packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_get_organization_sync.py create mode 100644 packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_list_analytics_account_links_async.py create mode 100644 packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_list_analytics_account_links_sync.py create mode 100644 packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_set_property_service_level_async.py create mode 100644 packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_set_property_service_level_sync.py create mode 100644 packages/google-ads-marketingplatform-admin/samples/generated_samples/snippet_metadata_google.marketingplatform.admin.v1alpha.json create mode 100755 packages/google-ads-marketingplatform-admin/scripts/decrypt-secrets.sh create mode 100644 packages/google-ads-marketingplatform-admin/scripts/fixup_marketingplatform_admin_v1alpha_keywords.py create mode 100644 packages/google-ads-marketingplatform-admin/setup.py create mode 100644 packages/google-ads-marketingplatform-admin/testing/.gitignore create mode 100644 packages/google-ads-marketingplatform-admin/testing/constraints-3.10.txt create mode 100644 packages/google-ads-marketingplatform-admin/testing/constraints-3.11.txt create mode 100644 packages/google-ads-marketingplatform-admin/testing/constraints-3.12.txt create mode 100644 packages/google-ads-marketingplatform-admin/testing/constraints-3.7.txt create mode 100644 packages/google-ads-marketingplatform-admin/testing/constraints-3.8.txt create mode 100644 packages/google-ads-marketingplatform-admin/testing/constraints-3.9.txt create mode 100644 packages/google-ads-marketingplatform-admin/tests/__init__.py create mode 100644 packages/google-ads-marketingplatform-admin/tests/unit/__init__.py create mode 100644 packages/google-ads-marketingplatform-admin/tests/unit/gapic/__init__.py create mode 100644 packages/google-ads-marketingplatform-admin/tests/unit/gapic/marketingplatform_admin_v1alpha/__init__.py create mode 100644 packages/google-ads-marketingplatform-admin/tests/unit/gapic/marketingplatform_admin_v1alpha/test_marketingplatform_admin_service.py diff --git a/packages/google-ads-marketingplatform-admin/.OwlBot.yaml b/packages/google-ads-marketingplatform-admin/.OwlBot.yaml new file mode 100644 index 000000000000..d397bf3a63bd --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/.OwlBot.yaml @@ -0,0 +1,18 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +deep-copy-regex: + - source: /google/marketingplatform/admin/(v.*)/.*-py + dest: /owl-bot-staging/google-ads-marketingplatform-admin/$1 +api-name: google-ads-marketingplatform-admin diff --git a/packages/google-ads-marketingplatform-admin/.coveragerc b/packages/google-ads-marketingplatform-admin/.coveragerc new file mode 100644 index 000000000000..f2b0df425e8e --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/ads/marketingplatform_admin/__init__.py + google/ads/marketingplatform_admin/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/packages/google-ads-marketingplatform-admin/.flake8 b/packages/google-ads-marketingplatform-admin/.flake8 new file mode 100644 index 000000000000..87f6e408c47d --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E231, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/packages/google-ads-marketingplatform-admin/.gitignore b/packages/google-ads-marketingplatform-admin/.gitignore new file mode 100644 index 000000000000..b4243ced74e4 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/.gitignore @@ -0,0 +1,63 @@ +*.py[cod] +*.sw[op] + +# C extensions +*.so + +# Packages +*.egg +*.egg-info +dist +build +eggs +.eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 +__pycache__ + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.nox +.cache +.pytest_cache + + +# Mac +.DS_Store + +# JetBrains +.idea + +# VS Code +.vscode + +# emacs +*~ + +# Built documentation +docs/_build +bigquery/docs/generated +docs.metadata + +# Virtual environment +env/ + +# Test logs +coverage.xml +*sponge_log.xml + +# System test environment variables. +system_tests/local_test_setup + +# Make sure a generated file isn't accidentally committed. +pylintrc +pylintrc.test diff --git a/packages/google-ads-marketingplatform-admin/.repo-metadata.json b/packages/google-ads-marketingplatform-admin/.repo-metadata.json new file mode 100644 index 000000000000..094cd0a04207 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/.repo-metadata.json @@ -0,0 +1,17 @@ +{ + "name": "google-ads-marketingplatform-admin", + "name_pretty": "Google Marketing Platform Admin API", + "api_description": "The Google Marketing Platform Admin API allows for programmatic access to the Google Marketing Platform configuration data. You can use the Google Marketing Platform Admin API to manage links between your Google Marketing Platform organization and Google Analytics accounts, and to set the service level of your GA4 properties.", + "product_documentation": "https://developers.google.com/analytics/devguides/config/gmp/v1", + "client_documentation": "https://googleapis.dev/python/google-ads-marketingplatform-admin/latest", + "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", + "release_level": "preview", + "language": "python", + "library_type": "GAPIC_AUTO", + "repo": "googleapis/google-cloud-python", + "distribution_name": "google-ads-marketingplatform-admin", + "api_id": "marketingplatformadmin.googleapis.com", + "default_version": "v1alpha", + "codeowner_team": "", + "api_shortname": "marketingplatformadmin" +} diff --git a/packages/google-ads-marketingplatform-admin/CHANGELOG.md b/packages/google-ads-marketingplatform-admin/CHANGELOG.md new file mode 100644 index 000000000000..5ddad421e08f --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/CHANGELOG.md @@ -0,0 +1 @@ +# Changelog \ No newline at end of file diff --git a/packages/google-ads-marketingplatform-admin/CODE_OF_CONDUCT.md b/packages/google-ads-marketingplatform-admin/CODE_OF_CONDUCT.md new file mode 100644 index 000000000000..039f43681204 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/CODE_OF_CONDUCT.md @@ -0,0 +1,95 @@ + +# Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/packages/google-ads-marketingplatform-admin/CONTRIBUTING.rst b/packages/google-ads-marketingplatform-admin/CONTRIBUTING.rst new file mode 100644 index 000000000000..a7223a5e0b19 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/CONTRIBUTING.rst @@ -0,0 +1,271 @@ +.. Generated by synthtool. DO NOT EDIT! +############ +Contributing +############ + +#. **Please sign one of the contributor license agreements below.** +#. Fork the repo, develop and test your code changes, add docs. +#. Make sure that your commit messages clearly describe the changes. +#. Send a pull request. (Please Read: `Faster Pull Request Reviews`_) + +.. _Faster Pull Request Reviews: https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews + +.. contents:: Here are some guidelines for hacking on the Google Cloud Client libraries. + +*************** +Adding Features +*************** + +In order to add a feature: + +- The feature must be documented in both the API and narrative + documentation. + +- The feature must work fully on the following CPython versions: + 3.7, 3.8, 3.9, 3.10, 3.11 and 3.12 on both UNIX and Windows. + +- The feature must not add unnecessary dependencies (where + "unnecessary" is of course subjective, but new dependencies should + be discussed). + +**************************** +Using a Development Checkout +**************************** + +You'll have to create a development environment using a Git checkout: + +- While logged into your GitHub account, navigate to the + ``google-cloud-python`` `repo`_ on GitHub. + +- Fork and clone the ``google-cloud-python`` repository to your GitHub account by + clicking the "Fork" button. + +- Clone your fork of ``google-cloud-python`` from your GitHub account to your local + computer, substituting your account username and specifying the destination + as ``hack-on-google-cloud-python``. E.g.:: + + $ cd ${HOME} + $ git clone git@github.com:USERNAME/google-cloud-python.git hack-on-google-cloud-python + $ cd hack-on-google-cloud-python + # Configure remotes such that you can pull changes from the googleapis/google-cloud-python + # repository into your local repository. + $ git remote add upstream git@github.com:googleapis/google-cloud-python.git + # fetch and merge changes from upstream into main + $ git fetch upstream + $ git merge upstream/main + +Now your local repo is set up such that you will push changes to your GitHub +repo, from which you can submit a pull request. + +To work on the codebase and run the tests, we recommend using ``nox``, +but you can also use a ``virtualenv`` of your own creation. + +.. _repo: https://github.com/googleapis/google-cloud-python + +Using ``nox`` +============= + +We use `nox `__ to instrument our tests. + +- To test your changes, run unit tests with ``nox``:: + $ nox -s unit + +- To run a single unit test:: + + $ nox -s unit-3.12 -- -k + + + .. note:: + + The unit tests and system tests are described in the + ``noxfile.py`` files in each directory. + +.. nox: https://pypi.org/project/nox/ + +***************************************** +I'm getting weird errors... Can you help? +***************************************** + +If the error mentions ``Python.h`` not being found, +install ``python-dev`` and try again. +On Debian/Ubuntu:: + + $ sudo apt-get install python-dev + +************ +Coding Style +************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: + + $ nox -s blacken + +- PEP8 compliance is required, with exceptions defined in the linter configuration. + If you have ``nox`` installed, you can test that you have not introduced + any non-compliant code via:: + + $ nox -s lint + +- In order to make ``nox -s lint`` run faster, you can set some environment + variables:: + + export GOOGLE_CLOUD_TESTING_REMOTE="upstream" + export GOOGLE_CLOUD_TESTING_BRANCH="main" + + By doing this, you are specifying the location of the most up-to-date + version of ``google-cloud-python``. The + remote name ``upstream`` should point to the official ``googleapis`` + checkout and the branch should be the default branch on that remote (``main``). + +- This repository contains configuration for the + `pre-commit `__ tool, which automates checking + our linters during a commit. If you have it installed on your ``$PATH``, + you can enable enforcing those checks via: + +.. code-block:: bash + + $ pre-commit install + pre-commit installed at .git/hooks/pre-commit + +Exceptions to PEP8: + +- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for + "Function-Under-Test"), which is PEP8-incompliant, but more readable. + Some also use a local variable, ``MUT`` (short for "Module-Under-Test"). + +******************** +Running System Tests +******************** + +- To run system tests, you can execute:: + + # Run all system tests + $ nox -s system + + # Run a single system test + $ nox -s system-3.12 -- -k + + + .. note:: + + System tests are only configured to run under Python 3.8, 3.9, 3.10, 3.11 and 3.12. + For expediency, we do not run them in older versions of Python 3. + + This alone will not run the tests. You'll need to change some local + auth settings and change some configuration in your project to + run all the tests. + +- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. + +************* +Test Coverage +************* + +- The codebase *must* have 100% test statement coverage after each commit. + You can test coverage via ``nox -s cover``. + +****************************************************** +Documentation Coverage and Building HTML Documentation +****************************************************** + +If you fix a bug, and the bug requires an API or behavior modification, all +documentation in this package which references that API or behavior must be +changed to reflect the bug fix, ideally in the same commit that fixes the bug +or adds the feature. + +Build the docs via: + + $ nox -s docs + +************************* +Samples and code snippets +************************* + +Code samples and snippets live in the `samples/` catalogue. Feel free to +provide more examples, but make sure to write tests for those examples. +Each folder containing example code requires its own `noxfile.py` script +which automates testing. If you decide to create a new folder, you can +base it on the `samples/snippets` folder (providing `noxfile.py` and +the requirements files). + +The tests will run against a real Google Cloud Project, so you should +configure them just like the System Tests. + +- To run sample tests, you can execute:: + + # Run all tests in a folder + $ cd samples/snippets + $ nox -s py-3.8 + + # Run a single sample test + $ cd samples/snippets + $ nox -s py-3.8 -- -k + +******************************************** +Note About ``README`` as it pertains to PyPI +******************************************** + +The `description on PyPI`_ for the project comes directly from the +``README``. Due to the reStructuredText (``rst``) parser used by +PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` +instead of +``https://github.com/googleapis/google-cloud-python/blob/main/CONTRIBUTING.rst``) +may cause problems creating links or rendering the description. + +.. _description on PyPI: https://pypi.org/project/google-ads-marketingplatform-admin + + +************************* +Supported Python Versions +************************* + +We support: + +- `Python 3.7`_ +- `Python 3.8`_ +- `Python 3.9`_ +- `Python 3.10`_ +- `Python 3.11`_ +- `Python 3.12`_ + +.. _Python 3.7: https://docs.python.org/3.7/ +.. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ +.. _Python 3.10: https://docs.python.org/3.10/ +.. _Python 3.11: https://docs.python.org/3.11/ +.. _Python 3.12: https://docs.python.org/3.12/ + + +Supported versions can be found in our ``noxfile.py`` `config`_. + +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/packages/google-ads-marketingplatform-admin/noxfile.py + + +********** +Versioning +********** + +This library follows `Semantic Versioning`_. + +.. _Semantic Versioning: http://semver.org/ + +Some packages are currently in major version zero (``0.y.z``), which means that +anything may change at any time and the public API should not be considered +stable. + +****************************** +Contributor License Agreements +****************************** + +Before we can accept your pull requests you'll need to sign a Contributor +License Agreement (CLA): + +- **If you are an individual writing original source code** and **you own the + intellectual property**, then you'll need to sign an + `individual CLA `__. +- **If you work for a company that wants to allow you to contribute your work**, + then you'll need to sign a + `corporate CLA `__. + +You can sign these electronically (just scroll to the bottom). After that, +we'll be able to accept your pull requests. diff --git a/packages/google-ads-marketingplatform-admin/LICENSE b/packages/google-ads-marketingplatform-admin/LICENSE new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/google-ads-marketingplatform-admin/MANIFEST.in b/packages/google-ads-marketingplatform-admin/MANIFEST.in new file mode 100644 index 000000000000..e0a66705318e --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/MANIFEST.in @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +include README.rst LICENSE +recursive-include google *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ + +# Exclude scripts for samples readmegen +prune scripts/readme-gen diff --git a/packages/google-ads-marketingplatform-admin/README.rst b/packages/google-ads-marketingplatform-admin/README.rst new file mode 100644 index 000000000000..0c99d23ee7c1 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/README.rst @@ -0,0 +1,108 @@ +Python Client for Google Marketing Platform Admin API +===================================================== + +|preview| |pypi| |versions| + +`Google Marketing Platform Admin API`_: The Google Marketing Platform Admin API allows for programmatic access to the Google Marketing Platform configuration data. You can use the Google Marketing Platform Admin API to manage links between your Google Marketing Platform organization and Google Analytics accounts, and to set the service level of your GA4 properties. + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. |preview| image:: https://img.shields.io/badge/support-preview-orange.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels +.. |pypi| image:: https://img.shields.io/pypi/v/google-ads-marketingplatform-admin.svg + :target: https://pypi.org/project/google-ads-marketingplatform-admin/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-ads-marketingplatform-admin.svg + :target: https://pypi.org/project/google-ads-marketingplatform-admin/ +.. _Google Marketing Platform Admin API: https://developers.google.com/analytics/devguides/config/gmp/v1 +.. _Client Library Documentation: https://googleapis.dev/python/google-ads-marketingplatform-admin/latest +.. _Product Documentation: https://developers.google.com/analytics/devguides/config/gmp/v1 + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Google Marketing Platform Admin API.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Google Marketing Platform Admin API.: https://developers.google.com/analytics/devguides/config/gmp/v1 +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a virtual environment using `venv`_. `venv`_ is a tool that +creates isolated Python environments. These isolated environments can have separate +versions of Python packages, which allows you to isolate one project's dependencies +from the dependencies of other projects. + +With `venv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`venv`: https://docs.python.org/3/library/venv.html + + +Code samples and snippets +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Code samples and snippets live in the `samples/`_ folder. + +.. _samples/: https://github.com/googleapis/google-cloud-python/tree/main/packages/google-ads-marketingplatform-admin/samples + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of +Python. + +Python >= 3.7 + +.. _active: https://devguide.python.org/devcycle/#in-development-main-branch +.. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches + +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python <= 3.6 + +If you are using an `end-of-life`_ +version of Python, we recommend that you update as soon as possible to an actively supported version. + +.. _end-of-life: https://devguide.python.org/devcycle/#end-of-life-branches + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + pip install google-ads-marketingplatform-admin + + +Windows +^^^^^^^ + +.. code-block:: console + + py -m venv + .\\Scripts\activate + pip install google-ads-marketingplatform-admin + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for Google Marketing Platform Admin API + to see other available methods on the client. +- Read the `Google Marketing Platform Admin API Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `README`_ to see the full list of Cloud + APIs that we cover. + +.. _Google Marketing Platform Admin API Product documentation: https://developers.google.com/analytics/devguides/config/gmp/v1 +.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst diff --git a/packages/google-ads-marketingplatform-admin/docs/CHANGELOG.md b/packages/google-ads-marketingplatform-admin/docs/CHANGELOG.md new file mode 120000 index 000000000000..04c99a55caae --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/docs/CHANGELOG.md @@ -0,0 +1 @@ +../CHANGELOG.md \ No newline at end of file diff --git a/packages/google-ads-marketingplatform-admin/docs/README.rst b/packages/google-ads-marketingplatform-admin/docs/README.rst new file mode 120000 index 000000000000..89a0106941ff --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/docs/README.rst @@ -0,0 +1 @@ +../README.rst \ No newline at end of file diff --git a/packages/google-ads-marketingplatform-admin/docs/_static/custom.css b/packages/google-ads-marketingplatform-admin/docs/_static/custom.css new file mode 100644 index 000000000000..b0a295464b23 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/docs/_static/custom.css @@ -0,0 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/google-ads-marketingplatform-admin/docs/_templates/layout.html b/packages/google-ads-marketingplatform-admin/docs/_templates/layout.html new file mode 100644 index 000000000000..6316a537f72b --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/packages/google-ads-marketingplatform-admin/docs/conf.py b/packages/google-ads-marketingplatform-admin/docs/conf.py new file mode 100644 index 000000000000..5c68a3e1a72f --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/docs/conf.py @@ -0,0 +1,384 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# google-ads-marketingplatform-admin documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import os +import shlex +import sys + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.5.5" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "recommonmark", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_options = {"members": True} +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = "google-ads-marketingplatform-admin" +copyright = "2019, Google" +author = "Google APIs" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for google-ads-marketingplatform-admin", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-ads-marketingplatform-admin-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + #'preamble': '', + # Latex figure (float) alignment + #'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-ads-marketingplatform-admin.tex", + "google-ads-marketingplatform-admin Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-ads-marketingplatform-admin", + "google-ads-marketingplatform-admin Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-ads-marketingplatform-admin", + "google-ads-marketingplatform-admin Documentation", + author, + "google-ads-marketingplatform-admin", + "google-ads-marketingplatform-admin Library", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/packages/google-ads-marketingplatform-admin/docs/index.rst b/packages/google-ads-marketingplatform-admin/docs/index.rst new file mode 100644 index 000000000000..b217f5c4b15d --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/docs/index.rst @@ -0,0 +1,23 @@ +.. include:: README.rst + +.. include:: multiprocessing.rst + + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + marketingplatform_admin_v1alpha/services_ + marketingplatform_admin_v1alpha/types_ + + +Changelog +--------- + +For a list of all ``google-ads-marketingplatform-admin`` releases: + +.. toctree:: + :maxdepth: 2 + + CHANGELOG diff --git a/packages/google-ads-marketingplatform-admin/docs/marketingplatform_admin_v1alpha/marketingplatform_admin_service.rst b/packages/google-ads-marketingplatform-admin/docs/marketingplatform_admin_v1alpha/marketingplatform_admin_service.rst new file mode 100644 index 000000000000..938e180ef989 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/docs/marketingplatform_admin_v1alpha/marketingplatform_admin_service.rst @@ -0,0 +1,10 @@ +MarketingplatformAdminService +----------------------------------------------- + +.. automodule:: google.ads.marketingplatform_admin_v1alpha.services.marketingplatform_admin_service + :members: + :inherited-members: + +.. automodule:: google.ads.marketingplatform_admin_v1alpha.services.marketingplatform_admin_service.pagers + :members: + :inherited-members: diff --git a/packages/google-ads-marketingplatform-admin/docs/marketingplatform_admin_v1alpha/services_.rst b/packages/google-ads-marketingplatform-admin/docs/marketingplatform_admin_v1alpha/services_.rst new file mode 100644 index 000000000000..427be3b19a50 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/docs/marketingplatform_admin_v1alpha/services_.rst @@ -0,0 +1,6 @@ +Services for Google Ads Marketingplatform Admin v1alpha API +=========================================================== +.. toctree:: + :maxdepth: 2 + + marketingplatform_admin_service diff --git a/packages/google-ads-marketingplatform-admin/docs/marketingplatform_admin_v1alpha/types_.rst b/packages/google-ads-marketingplatform-admin/docs/marketingplatform_admin_v1alpha/types_.rst new file mode 100644 index 000000000000..829ca4ea07f4 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/docs/marketingplatform_admin_v1alpha/types_.rst @@ -0,0 +1,6 @@ +Types for Google Ads Marketingplatform Admin v1alpha API +======================================================== + +.. automodule:: google.ads.marketingplatform_admin_v1alpha.types + :members: + :show-inheritance: diff --git a/packages/google-ads-marketingplatform-admin/docs/multiprocessing.rst b/packages/google-ads-marketingplatform-admin/docs/multiprocessing.rst new file mode 100644 index 000000000000..536d17b2ea65 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin/__init__.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin/__init__.py new file mode 100644 index 000000000000..56669ac018e6 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin/__init__.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.ads.marketingplatform_admin import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.ads.marketingplatform_admin_v1alpha.services.marketingplatform_admin_service.async_client import ( + MarketingplatformAdminServiceAsyncClient, +) +from google.ads.marketingplatform_admin_v1alpha.services.marketingplatform_admin_service.client import ( + MarketingplatformAdminServiceClient, +) +from google.ads.marketingplatform_admin_v1alpha.types.marketingplatform_admin import ( + AnalyticsServiceLevel, + CreateAnalyticsAccountLinkRequest, + DeleteAnalyticsAccountLinkRequest, + GetOrganizationRequest, + ListAnalyticsAccountLinksRequest, + ListAnalyticsAccountLinksResponse, + SetPropertyServiceLevelRequest, + SetPropertyServiceLevelResponse, +) +from google.ads.marketingplatform_admin_v1alpha.types.resources import ( + AnalyticsAccountLink, + LinkVerificationState, + Organization, +) + +__all__ = ( + "MarketingplatformAdminServiceClient", + "MarketingplatformAdminServiceAsyncClient", + "CreateAnalyticsAccountLinkRequest", + "DeleteAnalyticsAccountLinkRequest", + "GetOrganizationRequest", + "ListAnalyticsAccountLinksRequest", + "ListAnalyticsAccountLinksResponse", + "SetPropertyServiceLevelRequest", + "SetPropertyServiceLevelResponse", + "AnalyticsServiceLevel", + "AnalyticsAccountLink", + "Organization", + "LinkVerificationState", +) diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin/gapic_version.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin/gapic_version.py new file mode 100644 index 000000000000..558c8aab67c5 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin/py.typed b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin/py.typed new file mode 100644 index 000000000000..4f4b168c56da --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-ads-marketingplatform-admin package uses inline types. diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/__init__.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/__init__.py new file mode 100644 index 000000000000..c04bf34623a8 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/__init__.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.ads.marketingplatform_admin_v1alpha import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.marketingplatform_admin_service import ( + MarketingplatformAdminServiceAsyncClient, + MarketingplatformAdminServiceClient, +) +from .types.marketingplatform_admin import ( + AnalyticsServiceLevel, + CreateAnalyticsAccountLinkRequest, + DeleteAnalyticsAccountLinkRequest, + GetOrganizationRequest, + ListAnalyticsAccountLinksRequest, + ListAnalyticsAccountLinksResponse, + SetPropertyServiceLevelRequest, + SetPropertyServiceLevelResponse, +) +from .types.resources import AnalyticsAccountLink, LinkVerificationState, Organization + +__all__ = ( + "MarketingplatformAdminServiceAsyncClient", + "AnalyticsAccountLink", + "AnalyticsServiceLevel", + "CreateAnalyticsAccountLinkRequest", + "DeleteAnalyticsAccountLinkRequest", + "GetOrganizationRequest", + "LinkVerificationState", + "ListAnalyticsAccountLinksRequest", + "ListAnalyticsAccountLinksResponse", + "MarketingplatformAdminServiceClient", + "Organization", + "SetPropertyServiceLevelRequest", + "SetPropertyServiceLevelResponse", +) diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/gapic_metadata.json b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/gapic_metadata.json new file mode 100644 index 000000000000..8d346e91ed67 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/gapic_metadata.json @@ -0,0 +1,103 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.ads.marketingplatform_admin_v1alpha", + "protoPackage": "google.marketingplatform.admin.v1alpha", + "schema": "1.0", + "services": { + "MarketingplatformAdminService": { + "clients": { + "grpc": { + "libraryClient": "MarketingplatformAdminServiceClient", + "rpcs": { + "CreateAnalyticsAccountLink": { + "methods": [ + "create_analytics_account_link" + ] + }, + "DeleteAnalyticsAccountLink": { + "methods": [ + "delete_analytics_account_link" + ] + }, + "GetOrganization": { + "methods": [ + "get_organization" + ] + }, + "ListAnalyticsAccountLinks": { + "methods": [ + "list_analytics_account_links" + ] + }, + "SetPropertyServiceLevel": { + "methods": [ + "set_property_service_level" + ] + } + } + }, + "grpc-async": { + "libraryClient": "MarketingplatformAdminServiceAsyncClient", + "rpcs": { + "CreateAnalyticsAccountLink": { + "methods": [ + "create_analytics_account_link" + ] + }, + "DeleteAnalyticsAccountLink": { + "methods": [ + "delete_analytics_account_link" + ] + }, + "GetOrganization": { + "methods": [ + "get_organization" + ] + }, + "ListAnalyticsAccountLinks": { + "methods": [ + "list_analytics_account_links" + ] + }, + "SetPropertyServiceLevel": { + "methods": [ + "set_property_service_level" + ] + } + } + }, + "rest": { + "libraryClient": "MarketingplatformAdminServiceClient", + "rpcs": { + "CreateAnalyticsAccountLink": { + "methods": [ + "create_analytics_account_link" + ] + }, + "DeleteAnalyticsAccountLink": { + "methods": [ + "delete_analytics_account_link" + ] + }, + "GetOrganization": { + "methods": [ + "get_organization" + ] + }, + "ListAnalyticsAccountLinks": { + "methods": [ + "list_analytics_account_links" + ] + }, + "SetPropertyServiceLevel": { + "methods": [ + "set_property_service_level" + ] + } + } + } + } + } + } +} diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/gapic_version.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/gapic_version.py new file mode 100644 index 000000000000..558c8aab67c5 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/py.typed b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/py.typed new file mode 100644 index 000000000000..4f4b168c56da --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-ads-marketingplatform-admin package uses inline types. diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/__init__.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/__init__.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/__init__.py new file mode 100644 index 000000000000..e634b30fd6a2 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import MarketingplatformAdminServiceAsyncClient +from .client import MarketingplatformAdminServiceClient + +__all__ = ( + "MarketingplatformAdminServiceClient", + "MarketingplatformAdminServiceAsyncClient", +) diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/async_client.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/async_client.py new file mode 100644 index 000000000000..cc9647487d98 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/async_client.py @@ -0,0 +1,894 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ads.marketingplatform_admin_v1alpha import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.ads.marketingplatform_admin_v1alpha.services.marketingplatform_admin_service import ( + pagers, +) +from google.ads.marketingplatform_admin_v1alpha.types import ( + marketingplatform_admin, + resources, +) + +from .client import MarketingplatformAdminServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, MarketingplatformAdminServiceTransport +from .transports.grpc_asyncio import MarketingplatformAdminServiceGrpcAsyncIOTransport + + +class MarketingplatformAdminServiceAsyncClient: + """Service Interface for the Google Marketing Platform Admin + API. + """ + + _client: MarketingplatformAdminServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = MarketingplatformAdminServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = MarketingplatformAdminServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = ( + MarketingplatformAdminServiceClient._DEFAULT_ENDPOINT_TEMPLATE + ) + _DEFAULT_UNIVERSE = MarketingplatformAdminServiceClient._DEFAULT_UNIVERSE + + account_path = staticmethod(MarketingplatformAdminServiceClient.account_path) + parse_account_path = staticmethod( + MarketingplatformAdminServiceClient.parse_account_path + ) + analytics_account_link_path = staticmethod( + MarketingplatformAdminServiceClient.analytics_account_link_path + ) + parse_analytics_account_link_path = staticmethod( + MarketingplatformAdminServiceClient.parse_analytics_account_link_path + ) + organization_path = staticmethod( + MarketingplatformAdminServiceClient.organization_path + ) + parse_organization_path = staticmethod( + MarketingplatformAdminServiceClient.parse_organization_path + ) + property_path = staticmethod(MarketingplatformAdminServiceClient.property_path) + parse_property_path = staticmethod( + MarketingplatformAdminServiceClient.parse_property_path + ) + common_billing_account_path = staticmethod( + MarketingplatformAdminServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + MarketingplatformAdminServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod( + MarketingplatformAdminServiceClient.common_folder_path + ) + parse_common_folder_path = staticmethod( + MarketingplatformAdminServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + MarketingplatformAdminServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + MarketingplatformAdminServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod( + MarketingplatformAdminServiceClient.common_project_path + ) + parse_common_project_path = staticmethod( + MarketingplatformAdminServiceClient.parse_common_project_path + ) + common_location_path = staticmethod( + MarketingplatformAdminServiceClient.common_location_path + ) + parse_common_location_path = staticmethod( + MarketingplatformAdminServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MarketingplatformAdminServiceAsyncClient: The constructed client. + """ + return MarketingplatformAdminServiceClient.from_service_account_info.__func__(MarketingplatformAdminServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MarketingplatformAdminServiceAsyncClient: The constructed client. + """ + return MarketingplatformAdminServiceClient.from_service_account_file.__func__(MarketingplatformAdminServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return MarketingplatformAdminServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> MarketingplatformAdminServiceTransport: + """Returns the transport used by the client instance. + + Returns: + MarketingplatformAdminServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = MarketingplatformAdminServiceClient.get_transport_class + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + MarketingplatformAdminServiceTransport, + Callable[..., MarketingplatformAdminServiceTransport], + ] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the marketingplatform admin service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,MarketingplatformAdminServiceTransport,Callable[..., MarketingplatformAdminServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the MarketingplatformAdminServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = MarketingplatformAdminServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def get_organization( + self, + request: Optional[ + Union[marketingplatform_admin.GetOrganizationRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Organization: + r"""Lookup for a single organization. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import marketingplatform_admin_v1alpha + + async def sample_get_organization(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.GetOrganizationRequest( + name="name_value", + ) + + # Make the request + response = await client.get_organization(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ads.marketingplatform_admin_v1alpha.types.GetOrganizationRequest, dict]]): + The request object. Request message for GetOrganization + RPC. + name (:class:`str`): + Required. The name of the Organization to retrieve. + Format: organizations/{org_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ads.marketingplatform_admin_v1alpha.types.Organization: + A resource message representing a + Google Marketing Platform organization. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, marketingplatform_admin.GetOrganizationRequest): + request = marketingplatform_admin.GetOrganizationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_organization + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_analytics_account_links( + self, + request: Optional[ + Union[marketingplatform_admin.ListAnalyticsAccountLinksRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAnalyticsAccountLinksAsyncPager: + r"""Lists the Google Analytics accounts link to the + specified Google Marketing Platform organization. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import marketingplatform_admin_v1alpha + + async def sample_list_analytics_account_links(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.ListAnalyticsAccountLinksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_analytics_account_links(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.ads.marketingplatform_admin_v1alpha.types.ListAnalyticsAccountLinksRequest, dict]]): + The request object. Request message for + ListAnalyticsAccountLinks RPC. + parent (:class:`str`): + Required. The parent organization, which owns this + collection of Analytics account links. Format: + organizations/{org_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ads.marketingplatform_admin_v1alpha.services.marketingplatform_admin_service.pagers.ListAnalyticsAccountLinksAsyncPager: + Response message for + ListAnalyticsAccountLinks RPC. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, marketingplatform_admin.ListAnalyticsAccountLinksRequest + ): + request = marketingplatform_admin.ListAnalyticsAccountLinksRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_analytics_account_links + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListAnalyticsAccountLinksAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_analytics_account_link( + self, + request: Optional[ + Union[marketingplatform_admin.CreateAnalyticsAccountLinkRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + analytics_account_link: Optional[resources.AnalyticsAccountLink] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.AnalyticsAccountLink: + r"""Creates the link between the Analytics account and + the Google Marketing Platform organization. + + User needs to be an org user, and admin on the Analytics + account to create the link. If the account is already + linked to an organization, user needs to unlink the + account from the current organization, then try link + again. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import marketingplatform_admin_v1alpha + + async def sample_create_analytics_account_link(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient() + + # Initialize request argument(s) + analytics_account_link = marketingplatform_admin_v1alpha.AnalyticsAccountLink() + analytics_account_link.analytics_account = "analytics_account_value" + + request = marketingplatform_admin_v1alpha.CreateAnalyticsAccountLinkRequest( + parent="parent_value", + analytics_account_link=analytics_account_link, + ) + + # Make the request + response = await client.create_analytics_account_link(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ads.marketingplatform_admin_v1alpha.types.CreateAnalyticsAccountLinkRequest, dict]]): + The request object. Request message for + CreateAnalyticsAccountLink RPC. + parent (:class:`str`): + Required. The parent resource where this Analytics + account link will be created. Format: + organizations/{org_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + analytics_account_link (:class:`google.ads.marketingplatform_admin_v1alpha.types.AnalyticsAccountLink`): + Required. The Analytics account link + to create. + + This corresponds to the ``analytics_account_link`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ads.marketingplatform_admin_v1alpha.types.AnalyticsAccountLink: + A resource message representing the + link between a Google Analytics account + and a Google Marketing Platform + organization. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, analytics_account_link]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, marketingplatform_admin.CreateAnalyticsAccountLinkRequest + ): + request = marketingplatform_admin.CreateAnalyticsAccountLinkRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if analytics_account_link is not None: + request.analytics_account_link = analytics_account_link + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_analytics_account_link + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_analytics_account_link( + self, + request: Optional[ + Union[marketingplatform_admin.DeleteAnalyticsAccountLinkRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes the AnalyticsAccountLink, which detaches the + Analytics account from the Google Marketing Platform + organization. + + User needs to be an org user, and admin on the Analytics + account in order to delete the link. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import marketingplatform_admin_v1alpha + + async def sample_delete_analytics_account_link(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.DeleteAnalyticsAccountLinkRequest( + name="name_value", + ) + + # Make the request + await client.delete_analytics_account_link(request=request) + + Args: + request (Optional[Union[google.ads.marketingplatform_admin_v1alpha.types.DeleteAnalyticsAccountLinkRequest, dict]]): + The request object. Request message for + DeleteAnalyticsAccountLink RPC. + name (:class:`str`): + Required. The name of the Analytics account link to + delete. Format: + organizations/{org_id}/analyticsAccountLinks/{analytics_account_link_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, marketingplatform_admin.DeleteAnalyticsAccountLinkRequest + ): + request = marketingplatform_admin.DeleteAnalyticsAccountLinkRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_analytics_account_link + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def set_property_service_level( + self, + request: Optional[ + Union[marketingplatform_admin.SetPropertyServiceLevelRequest, dict] + ] = None, + *, + analytics_account_link: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> marketingplatform_admin.SetPropertyServiceLevelResponse: + r"""Updates the service level for an Analytics property. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import marketingplatform_admin_v1alpha + + async def sample_set_property_service_level(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.SetPropertyServiceLevelRequest( + analytics_account_link="analytics_account_link_value", + analytics_property="analytics_property_value", + service_level="ANALYTICS_SERVICE_LEVEL_360", + ) + + # Make the request + response = await client.set_property_service_level(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ads.marketingplatform_admin_v1alpha.types.SetPropertyServiceLevelRequest, dict]]): + The request object. Request message for + SetPropertyServiceLevel RPC. + analytics_account_link (:class:`str`): + Required. The parent AnalyticsAccountLink scope where + this property is in. Format: + organizations/{org_id}/analyticsAccountLinks/{analytics_account_link_id} + + This corresponds to the ``analytics_account_link`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ads.marketingplatform_admin_v1alpha.types.SetPropertyServiceLevelResponse: + Response message for + SetPropertyServiceLevel RPC. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([analytics_account_link]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, marketingplatform_admin.SetPropertyServiceLevelRequest + ): + request = marketingplatform_admin.SetPropertyServiceLevelRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if analytics_account_link is not None: + request.analytics_account_link = analytics_account_link + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.set_property_service_level + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("analytics_account_link", request.analytics_account_link),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "MarketingplatformAdminServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("MarketingplatformAdminServiceAsyncClient",) diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/client.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/client.py new file mode 100644 index 000000000000..f31761153ab6 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/client.py @@ -0,0 +1,1349 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ads.marketingplatform_admin_v1alpha import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.ads.marketingplatform_admin_v1alpha.services.marketingplatform_admin_service import ( + pagers, +) +from google.ads.marketingplatform_admin_v1alpha.types import ( + marketingplatform_admin, + resources, +) + +from .transports.base import DEFAULT_CLIENT_INFO, MarketingplatformAdminServiceTransport +from .transports.grpc import MarketingplatformAdminServiceGrpcTransport +from .transports.grpc_asyncio import MarketingplatformAdminServiceGrpcAsyncIOTransport +from .transports.rest import MarketingplatformAdminServiceRestTransport + + +class MarketingplatformAdminServiceClientMeta(type): + """Metaclass for the MarketingplatformAdminService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[MarketingplatformAdminServiceTransport]] + _transport_registry["grpc"] = MarketingplatformAdminServiceGrpcTransport + _transport_registry[ + "grpc_asyncio" + ] = MarketingplatformAdminServiceGrpcAsyncIOTransport + _transport_registry["rest"] = MarketingplatformAdminServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[MarketingplatformAdminServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class MarketingplatformAdminServiceClient( + metaclass=MarketingplatformAdminServiceClientMeta +): + """Service Interface for the Google Marketing Platform Admin + API. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "marketingplatformadmin.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "marketingplatformadmin.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MarketingplatformAdminServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MarketingplatformAdminServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> MarketingplatformAdminServiceTransport: + """Returns the transport used by the client instance. + + Returns: + MarketingplatformAdminServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def account_path( + account: str, + ) -> str: + """Returns a fully-qualified account string.""" + return "accounts/{account}".format( + account=account, + ) + + @staticmethod + def parse_account_path(path: str) -> Dict[str, str]: + """Parses a account path into its component segments.""" + m = re.match(r"^accounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def analytics_account_link_path( + organization: str, + analytics_account_link: str, + ) -> str: + """Returns a fully-qualified analytics_account_link string.""" + return "organizations/{organization}/analyticsAccountLinks/{analytics_account_link}".format( + organization=organization, + analytics_account_link=analytics_account_link, + ) + + @staticmethod + def parse_analytics_account_link_path(path: str) -> Dict[str, str]: + """Parses a analytics_account_link path into its component segments.""" + m = re.match( + r"^organizations/(?P.+?)/analyticsAccountLinks/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_organization_path(path: str) -> Dict[str, str]: + """Parses a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def property_path( + property: str, + ) -> str: + """Returns a fully-qualified property string.""" + return "properties/{property}".format( + property=property, + ) + + @staticmethod + def parse_property_path(path: str) -> Dict[str, str]: + """Parses a property path into its component segments.""" + m = re.match(r"^properties/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = MarketingplatformAdminServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = MarketingplatformAdminServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = ( + MarketingplatformAdminServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = MarketingplatformAdminServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = MarketingplatformAdminServiceClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or MarketingplatformAdminServiceClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + MarketingplatformAdminServiceTransport, + Callable[..., MarketingplatformAdminServiceTransport], + ] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the marketingplatform admin service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,MarketingplatformAdminServiceTransport,Callable[..., MarketingplatformAdminServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the MarketingplatformAdminServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = MarketingplatformAdminServiceClient._read_environment_variables() + self._client_cert_source = ( + MarketingplatformAdminServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + ) + self._universe_domain = ( + MarketingplatformAdminServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance( + transport, MarketingplatformAdminServiceTransport + ) + if transport_provided: + # transport is a MarketingplatformAdminServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(MarketingplatformAdminServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or MarketingplatformAdminServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[MarketingplatformAdminServiceTransport], + Callable[..., MarketingplatformAdminServiceTransport], + ] = ( + MarketingplatformAdminServiceClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast( + Callable[..., MarketingplatformAdminServiceTransport], transport + ) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def get_organization( + self, + request: Optional[ + Union[marketingplatform_admin.GetOrganizationRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Organization: + r"""Lookup for a single organization. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import marketingplatform_admin_v1alpha + + def sample_get_organization(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.GetOrganizationRequest( + name="name_value", + ) + + # Make the request + response = client.get_organization(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ads.marketingplatform_admin_v1alpha.types.GetOrganizationRequest, dict]): + The request object. Request message for GetOrganization + RPC. + name (str): + Required. The name of the Organization to retrieve. + Format: organizations/{org_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ads.marketingplatform_admin_v1alpha.types.Organization: + A resource message representing a + Google Marketing Platform organization. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, marketingplatform_admin.GetOrganizationRequest): + request = marketingplatform_admin.GetOrganizationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_organization] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_analytics_account_links( + self, + request: Optional[ + Union[marketingplatform_admin.ListAnalyticsAccountLinksRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAnalyticsAccountLinksPager: + r"""Lists the Google Analytics accounts link to the + specified Google Marketing Platform organization. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import marketingplatform_admin_v1alpha + + def sample_list_analytics_account_links(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.ListAnalyticsAccountLinksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_analytics_account_links(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.ads.marketingplatform_admin_v1alpha.types.ListAnalyticsAccountLinksRequest, dict]): + The request object. Request message for + ListAnalyticsAccountLinks RPC. + parent (str): + Required. The parent organization, which owns this + collection of Analytics account links. Format: + organizations/{org_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ads.marketingplatform_admin_v1alpha.services.marketingplatform_admin_service.pagers.ListAnalyticsAccountLinksPager: + Response message for + ListAnalyticsAccountLinks RPC. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, marketingplatform_admin.ListAnalyticsAccountLinksRequest + ): + request = marketingplatform_admin.ListAnalyticsAccountLinksRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_analytics_account_links + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListAnalyticsAccountLinksPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_analytics_account_link( + self, + request: Optional[ + Union[marketingplatform_admin.CreateAnalyticsAccountLinkRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + analytics_account_link: Optional[resources.AnalyticsAccountLink] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.AnalyticsAccountLink: + r"""Creates the link between the Analytics account and + the Google Marketing Platform organization. + + User needs to be an org user, and admin on the Analytics + account to create the link. If the account is already + linked to an organization, user needs to unlink the + account from the current organization, then try link + again. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import marketingplatform_admin_v1alpha + + def sample_create_analytics_account_link(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient() + + # Initialize request argument(s) + analytics_account_link = marketingplatform_admin_v1alpha.AnalyticsAccountLink() + analytics_account_link.analytics_account = "analytics_account_value" + + request = marketingplatform_admin_v1alpha.CreateAnalyticsAccountLinkRequest( + parent="parent_value", + analytics_account_link=analytics_account_link, + ) + + # Make the request + response = client.create_analytics_account_link(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ads.marketingplatform_admin_v1alpha.types.CreateAnalyticsAccountLinkRequest, dict]): + The request object. Request message for + CreateAnalyticsAccountLink RPC. + parent (str): + Required. The parent resource where this Analytics + account link will be created. Format: + organizations/{org_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + analytics_account_link (google.ads.marketingplatform_admin_v1alpha.types.AnalyticsAccountLink): + Required. The Analytics account link + to create. + + This corresponds to the ``analytics_account_link`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ads.marketingplatform_admin_v1alpha.types.AnalyticsAccountLink: + A resource message representing the + link between a Google Analytics account + and a Google Marketing Platform + organization. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, analytics_account_link]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, marketingplatform_admin.CreateAnalyticsAccountLinkRequest + ): + request = marketingplatform_admin.CreateAnalyticsAccountLinkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if analytics_account_link is not None: + request.analytics_account_link = analytics_account_link + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.create_analytics_account_link + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_analytics_account_link( + self, + request: Optional[ + Union[marketingplatform_admin.DeleteAnalyticsAccountLinkRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes the AnalyticsAccountLink, which detaches the + Analytics account from the Google Marketing Platform + organization. + + User needs to be an org user, and admin on the Analytics + account in order to delete the link. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import marketingplatform_admin_v1alpha + + def sample_delete_analytics_account_link(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.DeleteAnalyticsAccountLinkRequest( + name="name_value", + ) + + # Make the request + client.delete_analytics_account_link(request=request) + + Args: + request (Union[google.ads.marketingplatform_admin_v1alpha.types.DeleteAnalyticsAccountLinkRequest, dict]): + The request object. Request message for + DeleteAnalyticsAccountLink RPC. + name (str): + Required. The name of the Analytics account link to + delete. Format: + organizations/{org_id}/analyticsAccountLinks/{analytics_account_link_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, marketingplatform_admin.DeleteAnalyticsAccountLinkRequest + ): + request = marketingplatform_admin.DeleteAnalyticsAccountLinkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.delete_analytics_account_link + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def set_property_service_level( + self, + request: Optional[ + Union[marketingplatform_admin.SetPropertyServiceLevelRequest, dict] + ] = None, + *, + analytics_account_link: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> marketingplatform_admin.SetPropertyServiceLevelResponse: + r"""Updates the service level for an Analytics property. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import marketingplatform_admin_v1alpha + + def sample_set_property_service_level(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.SetPropertyServiceLevelRequest( + analytics_account_link="analytics_account_link_value", + analytics_property="analytics_property_value", + service_level="ANALYTICS_SERVICE_LEVEL_360", + ) + + # Make the request + response = client.set_property_service_level(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ads.marketingplatform_admin_v1alpha.types.SetPropertyServiceLevelRequest, dict]): + The request object. Request message for + SetPropertyServiceLevel RPC. + analytics_account_link (str): + Required. The parent AnalyticsAccountLink scope where + this property is in. Format: + organizations/{org_id}/analyticsAccountLinks/{analytics_account_link_id} + + This corresponds to the ``analytics_account_link`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ads.marketingplatform_admin_v1alpha.types.SetPropertyServiceLevelResponse: + Response message for + SetPropertyServiceLevel RPC. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([analytics_account_link]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, marketingplatform_admin.SetPropertyServiceLevelRequest + ): + request = marketingplatform_admin.SetPropertyServiceLevelRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if analytics_account_link is not None: + request.analytics_account_link = analytics_account_link + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.set_property_service_level + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("analytics_account_link", request.analytics_account_link),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "MarketingplatformAdminServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("MarketingplatformAdminServiceClient",) diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/pagers.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/pagers.py new file mode 100644 index 000000000000..bed8bd431770 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/pagers.py @@ -0,0 +1,208 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, + Union, +) + +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.ads.marketingplatform_admin_v1alpha.types import ( + marketingplatform_admin, + resources, +) + + +class ListAnalyticsAccountLinksPager: + """A pager for iterating through ``list_analytics_account_links`` requests. + + This class thinly wraps an initial + :class:`google.ads.marketingplatform_admin_v1alpha.types.ListAnalyticsAccountLinksResponse` object, and + provides an ``__iter__`` method to iterate through its + ``analytics_account_links`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListAnalyticsAccountLinks`` requests and continue to iterate + through the ``analytics_account_links`` field on the + corresponding responses. + + All the usual :class:`google.ads.marketingplatform_admin_v1alpha.types.ListAnalyticsAccountLinksResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., marketingplatform_admin.ListAnalyticsAccountLinksResponse + ], + request: marketingplatform_admin.ListAnalyticsAccountLinksRequest, + response: marketingplatform_admin.ListAnalyticsAccountLinksResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.ads.marketingplatform_admin_v1alpha.types.ListAnalyticsAccountLinksRequest): + The initial request object. + response (google.ads.marketingplatform_admin_v1alpha.types.ListAnalyticsAccountLinksResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = marketingplatform_admin.ListAnalyticsAccountLinksRequest( + request + ) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages( + self, + ) -> Iterator[marketingplatform_admin.ListAnalyticsAccountLinksResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[resources.AnalyticsAccountLink]: + for page in self.pages: + yield from page.analytics_account_links + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListAnalyticsAccountLinksAsyncPager: + """A pager for iterating through ``list_analytics_account_links`` requests. + + This class thinly wraps an initial + :class:`google.ads.marketingplatform_admin_v1alpha.types.ListAnalyticsAccountLinksResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``analytics_account_links`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListAnalyticsAccountLinks`` requests and continue to iterate + through the ``analytics_account_links`` field on the + corresponding responses. + + All the usual :class:`google.ads.marketingplatform_admin_v1alpha.types.ListAnalyticsAccountLinksResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[marketingplatform_admin.ListAnalyticsAccountLinksResponse] + ], + request: marketingplatform_admin.ListAnalyticsAccountLinksRequest, + response: marketingplatform_admin.ListAnalyticsAccountLinksResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.ads.marketingplatform_admin_v1alpha.types.ListAnalyticsAccountLinksRequest): + The initial request object. + response (google.ads.marketingplatform_admin_v1alpha.types.ListAnalyticsAccountLinksResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = marketingplatform_admin.ListAnalyticsAccountLinksRequest( + request + ) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[marketingplatform_admin.ListAnalyticsAccountLinksResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[resources.AnalyticsAccountLink]: + async def async_generator(): + async for page in self.pages: + for response in page.analytics_account_links: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/__init__.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/__init__.py new file mode 100644 index 000000000000..205d647ea99a --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/__init__.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import MarketingplatformAdminServiceTransport +from .grpc import MarketingplatformAdminServiceGrpcTransport +from .grpc_asyncio import MarketingplatformAdminServiceGrpcAsyncIOTransport +from .rest import ( + MarketingplatformAdminServiceRestInterceptor, + MarketingplatformAdminServiceRestTransport, +) + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[MarketingplatformAdminServiceTransport]] +_transport_registry["grpc"] = MarketingplatformAdminServiceGrpcTransport +_transport_registry["grpc_asyncio"] = MarketingplatformAdminServiceGrpcAsyncIOTransport +_transport_registry["rest"] = MarketingplatformAdminServiceRestTransport + +__all__ = ( + "MarketingplatformAdminServiceTransport", + "MarketingplatformAdminServiceGrpcTransport", + "MarketingplatformAdminServiceGrpcAsyncIOTransport", + "MarketingplatformAdminServiceRestTransport", + "MarketingplatformAdminServiceRestInterceptor", +) diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/base.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/base.py new file mode 100644 index 000000000000..6f70b5c211e1 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/base.py @@ -0,0 +1,232 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.ads.marketingplatform_admin_v1alpha import gapic_version as package_version +from google.ads.marketingplatform_admin_v1alpha.types import ( + marketingplatform_admin, + resources, +) + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class MarketingplatformAdminServiceTransport(abc.ABC): + """Abstract transport class for MarketingplatformAdminService.""" + + AUTH_SCOPES = ( + "https://www.googleapis.com/auth/marketingplatformadmin.analytics.read", + "https://www.googleapis.com/auth/marketingplatformadmin.analytics.update", + ) + + DEFAULT_HOST: str = "marketingplatformadmin.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'marketingplatformadmin.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.get_organization: gapic_v1.method.wrap_method( + self.get_organization, + default_timeout=None, + client_info=client_info, + ), + self.list_analytics_account_links: gapic_v1.method.wrap_method( + self.list_analytics_account_links, + default_timeout=None, + client_info=client_info, + ), + self.create_analytics_account_link: gapic_v1.method.wrap_method( + self.create_analytics_account_link, + default_timeout=None, + client_info=client_info, + ), + self.delete_analytics_account_link: gapic_v1.method.wrap_method( + self.delete_analytics_account_link, + default_timeout=None, + client_info=client_info, + ), + self.set_property_service_level: gapic_v1.method.wrap_method( + self.set_property_service_level, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def get_organization( + self, + ) -> Callable[ + [marketingplatform_admin.GetOrganizationRequest], + Union[resources.Organization, Awaitable[resources.Organization]], + ]: + raise NotImplementedError() + + @property + def list_analytics_account_links( + self, + ) -> Callable[ + [marketingplatform_admin.ListAnalyticsAccountLinksRequest], + Union[ + marketingplatform_admin.ListAnalyticsAccountLinksResponse, + Awaitable[marketingplatform_admin.ListAnalyticsAccountLinksResponse], + ], + ]: + raise NotImplementedError() + + @property + def create_analytics_account_link( + self, + ) -> Callable[ + [marketingplatform_admin.CreateAnalyticsAccountLinkRequest], + Union[ + resources.AnalyticsAccountLink, Awaitable[resources.AnalyticsAccountLink] + ], + ]: + raise NotImplementedError() + + @property + def delete_analytics_account_link( + self, + ) -> Callable[ + [marketingplatform_admin.DeleteAnalyticsAccountLinkRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def set_property_service_level( + self, + ) -> Callable[ + [marketingplatform_admin.SetPropertyServiceLevelRequest], + Union[ + marketingplatform_admin.SetPropertyServiceLevelResponse, + Awaitable[marketingplatform_admin.SetPropertyServiceLevelResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("MarketingplatformAdminServiceTransport",) diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/grpc.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/grpc.py new file mode 100644 index 000000000000..e8f3656c2d06 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/grpc.py @@ -0,0 +1,412 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore + +from google.ads.marketingplatform_admin_v1alpha.types import ( + marketingplatform_admin, + resources, +) + +from .base import DEFAULT_CLIENT_INFO, MarketingplatformAdminServiceTransport + + +class MarketingplatformAdminServiceGrpcTransport( + MarketingplatformAdminServiceTransport +): + """gRPC backend transport for MarketingplatformAdminService. + + Service Interface for the Google Marketing Platform Admin + API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "marketingplatformadmin.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'marketingplatformadmin.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "marketingplatformadmin.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def get_organization( + self, + ) -> Callable[ + [marketingplatform_admin.GetOrganizationRequest], resources.Organization + ]: + r"""Return a callable for the get organization method over gRPC. + + Lookup for a single organization. + + Returns: + Callable[[~.GetOrganizationRequest], + ~.Organization]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_organization" not in self._stubs: + self._stubs["get_organization"] = self.grpc_channel.unary_unary( + "/google.marketingplatform.admin.v1alpha.MarketingplatformAdminService/GetOrganization", + request_serializer=marketingplatform_admin.GetOrganizationRequest.serialize, + response_deserializer=resources.Organization.deserialize, + ) + return self._stubs["get_organization"] + + @property + def list_analytics_account_links( + self, + ) -> Callable[ + [marketingplatform_admin.ListAnalyticsAccountLinksRequest], + marketingplatform_admin.ListAnalyticsAccountLinksResponse, + ]: + r"""Return a callable for the list analytics account links method over gRPC. + + Lists the Google Analytics accounts link to the + specified Google Marketing Platform organization. + + Returns: + Callable[[~.ListAnalyticsAccountLinksRequest], + ~.ListAnalyticsAccountLinksResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_analytics_account_links" not in self._stubs: + self._stubs["list_analytics_account_links"] = self.grpc_channel.unary_unary( + "/google.marketingplatform.admin.v1alpha.MarketingplatformAdminService/ListAnalyticsAccountLinks", + request_serializer=marketingplatform_admin.ListAnalyticsAccountLinksRequest.serialize, + response_deserializer=marketingplatform_admin.ListAnalyticsAccountLinksResponse.deserialize, + ) + return self._stubs["list_analytics_account_links"] + + @property + def create_analytics_account_link( + self, + ) -> Callable[ + [marketingplatform_admin.CreateAnalyticsAccountLinkRequest], + resources.AnalyticsAccountLink, + ]: + r"""Return a callable for the create analytics account link method over gRPC. + + Creates the link between the Analytics account and + the Google Marketing Platform organization. + + User needs to be an org user, and admin on the Analytics + account to create the link. If the account is already + linked to an organization, user needs to unlink the + account from the current organization, then try link + again. + + Returns: + Callable[[~.CreateAnalyticsAccountLinkRequest], + ~.AnalyticsAccountLink]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_analytics_account_link" not in self._stubs: + self._stubs[ + "create_analytics_account_link" + ] = self.grpc_channel.unary_unary( + "/google.marketingplatform.admin.v1alpha.MarketingplatformAdminService/CreateAnalyticsAccountLink", + request_serializer=marketingplatform_admin.CreateAnalyticsAccountLinkRequest.serialize, + response_deserializer=resources.AnalyticsAccountLink.deserialize, + ) + return self._stubs["create_analytics_account_link"] + + @property + def delete_analytics_account_link( + self, + ) -> Callable[ + [marketingplatform_admin.DeleteAnalyticsAccountLinkRequest], empty_pb2.Empty + ]: + r"""Return a callable for the delete analytics account link method over gRPC. + + Deletes the AnalyticsAccountLink, which detaches the + Analytics account from the Google Marketing Platform + organization. + + User needs to be an org user, and admin on the Analytics + account in order to delete the link. + + Returns: + Callable[[~.DeleteAnalyticsAccountLinkRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_analytics_account_link" not in self._stubs: + self._stubs[ + "delete_analytics_account_link" + ] = self.grpc_channel.unary_unary( + "/google.marketingplatform.admin.v1alpha.MarketingplatformAdminService/DeleteAnalyticsAccountLink", + request_serializer=marketingplatform_admin.DeleteAnalyticsAccountLinkRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_analytics_account_link"] + + @property + def set_property_service_level( + self, + ) -> Callable[ + [marketingplatform_admin.SetPropertyServiceLevelRequest], + marketingplatform_admin.SetPropertyServiceLevelResponse, + ]: + r"""Return a callable for the set property service level method over gRPC. + + Updates the service level for an Analytics property. + + Returns: + Callable[[~.SetPropertyServiceLevelRequest], + ~.SetPropertyServiceLevelResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_property_service_level" not in self._stubs: + self._stubs["set_property_service_level"] = self.grpc_channel.unary_unary( + "/google.marketingplatform.admin.v1alpha.MarketingplatformAdminService/SetPropertyServiceLevel", + request_serializer=marketingplatform_admin.SetPropertyServiceLevelRequest.serialize, + response_deserializer=marketingplatform_admin.SetPropertyServiceLevelResponse.deserialize, + ) + return self._stubs["set_property_service_level"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("MarketingplatformAdminServiceGrpcTransport",) diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/grpc_asyncio.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..caef725e70be --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/grpc_asyncio.py @@ -0,0 +1,444 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.ads.marketingplatform_admin_v1alpha.types import ( + marketingplatform_admin, + resources, +) + +from .base import DEFAULT_CLIENT_INFO, MarketingplatformAdminServiceTransport +from .grpc import MarketingplatformAdminServiceGrpcTransport + + +class MarketingplatformAdminServiceGrpcAsyncIOTransport( + MarketingplatformAdminServiceTransport +): + """gRPC AsyncIO backend transport for MarketingplatformAdminService. + + Service Interface for the Google Marketing Platform Admin + API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "marketingplatformadmin.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "marketingplatformadmin.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'marketingplatformadmin.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def get_organization( + self, + ) -> Callable[ + [marketingplatform_admin.GetOrganizationRequest], + Awaitable[resources.Organization], + ]: + r"""Return a callable for the get organization method over gRPC. + + Lookup for a single organization. + + Returns: + Callable[[~.GetOrganizationRequest], + Awaitable[~.Organization]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_organization" not in self._stubs: + self._stubs["get_organization"] = self.grpc_channel.unary_unary( + "/google.marketingplatform.admin.v1alpha.MarketingplatformAdminService/GetOrganization", + request_serializer=marketingplatform_admin.GetOrganizationRequest.serialize, + response_deserializer=resources.Organization.deserialize, + ) + return self._stubs["get_organization"] + + @property + def list_analytics_account_links( + self, + ) -> Callable[ + [marketingplatform_admin.ListAnalyticsAccountLinksRequest], + Awaitable[marketingplatform_admin.ListAnalyticsAccountLinksResponse], + ]: + r"""Return a callable for the list analytics account links method over gRPC. + + Lists the Google Analytics accounts link to the + specified Google Marketing Platform organization. + + Returns: + Callable[[~.ListAnalyticsAccountLinksRequest], + Awaitable[~.ListAnalyticsAccountLinksResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_analytics_account_links" not in self._stubs: + self._stubs["list_analytics_account_links"] = self.grpc_channel.unary_unary( + "/google.marketingplatform.admin.v1alpha.MarketingplatformAdminService/ListAnalyticsAccountLinks", + request_serializer=marketingplatform_admin.ListAnalyticsAccountLinksRequest.serialize, + response_deserializer=marketingplatform_admin.ListAnalyticsAccountLinksResponse.deserialize, + ) + return self._stubs["list_analytics_account_links"] + + @property + def create_analytics_account_link( + self, + ) -> Callable[ + [marketingplatform_admin.CreateAnalyticsAccountLinkRequest], + Awaitable[resources.AnalyticsAccountLink], + ]: + r"""Return a callable for the create analytics account link method over gRPC. + + Creates the link between the Analytics account and + the Google Marketing Platform organization. + + User needs to be an org user, and admin on the Analytics + account to create the link. If the account is already + linked to an organization, user needs to unlink the + account from the current organization, then try link + again. + + Returns: + Callable[[~.CreateAnalyticsAccountLinkRequest], + Awaitable[~.AnalyticsAccountLink]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_analytics_account_link" not in self._stubs: + self._stubs[ + "create_analytics_account_link" + ] = self.grpc_channel.unary_unary( + "/google.marketingplatform.admin.v1alpha.MarketingplatformAdminService/CreateAnalyticsAccountLink", + request_serializer=marketingplatform_admin.CreateAnalyticsAccountLinkRequest.serialize, + response_deserializer=resources.AnalyticsAccountLink.deserialize, + ) + return self._stubs["create_analytics_account_link"] + + @property + def delete_analytics_account_link( + self, + ) -> Callable[ + [marketingplatform_admin.DeleteAnalyticsAccountLinkRequest], + Awaitable[empty_pb2.Empty], + ]: + r"""Return a callable for the delete analytics account link method over gRPC. + + Deletes the AnalyticsAccountLink, which detaches the + Analytics account from the Google Marketing Platform + organization. + + User needs to be an org user, and admin on the Analytics + account in order to delete the link. + + Returns: + Callable[[~.DeleteAnalyticsAccountLinkRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_analytics_account_link" not in self._stubs: + self._stubs[ + "delete_analytics_account_link" + ] = self.grpc_channel.unary_unary( + "/google.marketingplatform.admin.v1alpha.MarketingplatformAdminService/DeleteAnalyticsAccountLink", + request_serializer=marketingplatform_admin.DeleteAnalyticsAccountLinkRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_analytics_account_link"] + + @property + def set_property_service_level( + self, + ) -> Callable[ + [marketingplatform_admin.SetPropertyServiceLevelRequest], + Awaitable[marketingplatform_admin.SetPropertyServiceLevelResponse], + ]: + r"""Return a callable for the set property service level method over gRPC. + + Updates the service level for an Analytics property. + + Returns: + Callable[[~.SetPropertyServiceLevelRequest], + Awaitable[~.SetPropertyServiceLevelResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_property_service_level" not in self._stubs: + self._stubs["set_property_service_level"] = self.grpc_channel.unary_unary( + "/google.marketingplatform.admin.v1alpha.MarketingplatformAdminService/SetPropertyServiceLevel", + request_serializer=marketingplatform_admin.SetPropertyServiceLevelRequest.serialize, + response_deserializer=marketingplatform_admin.SetPropertyServiceLevelResponse.deserialize, + ) + return self._stubs["set_property_service_level"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.get_organization: gapic_v1.method_async.wrap_method( + self.get_organization, + default_timeout=None, + client_info=client_info, + ), + self.list_analytics_account_links: gapic_v1.method_async.wrap_method( + self.list_analytics_account_links, + default_timeout=None, + client_info=client_info, + ), + self.create_analytics_account_link: gapic_v1.method_async.wrap_method( + self.create_analytics_account_link, + default_timeout=None, + client_info=client_info, + ), + self.delete_analytics_account_link: gapic_v1.method_async.wrap_method( + self.delete_analytics_account_link, + default_timeout=None, + client_info=client_info, + ), + self.set_property_service_level: gapic_v1.method_async.wrap_method( + self.set_property_service_level, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("MarketingplatformAdminServiceGrpcAsyncIOTransport",) diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/rest.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/rest.py new file mode 100644 index 000000000000..b3894ca05fc4 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/rest.py @@ -0,0 +1,858 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.protobuf import empty_pb2 # type: ignore + +from google.ads.marketingplatform_admin_v1alpha.types import ( + marketingplatform_admin, + resources, +) + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import MarketingplatformAdminServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class MarketingplatformAdminServiceRestInterceptor: + """Interceptor for MarketingplatformAdminService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the MarketingplatformAdminServiceRestTransport. + + .. code-block:: python + class MyCustomMarketingplatformAdminServiceInterceptor(MarketingplatformAdminServiceRestInterceptor): + def pre_create_analytics_account_link(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_analytics_account_link(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_analytics_account_link(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_organization(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_organization(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_analytics_account_links(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_analytics_account_links(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_property_service_level(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_property_service_level(self, response): + logging.log(f"Received response: {response}") + return response + + transport = MarketingplatformAdminServiceRestTransport(interceptor=MyCustomMarketingplatformAdminServiceInterceptor()) + client = MarketingplatformAdminServiceClient(transport=transport) + + + """ + + def pre_create_analytics_account_link( + self, + request: marketingplatform_admin.CreateAnalyticsAccountLinkRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + marketingplatform_admin.CreateAnalyticsAccountLinkRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for create_analytics_account_link + + Override in a subclass to manipulate the request or metadata + before they are sent to the MarketingplatformAdminService server. + """ + return request, metadata + + def post_create_analytics_account_link( + self, response: resources.AnalyticsAccountLink + ) -> resources.AnalyticsAccountLink: + """Post-rpc interceptor for create_analytics_account_link + + Override in a subclass to manipulate the response + after it is returned by the MarketingplatformAdminService server but before + it is returned to user code. + """ + return response + + def pre_delete_analytics_account_link( + self, + request: marketingplatform_admin.DeleteAnalyticsAccountLinkRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + marketingplatform_admin.DeleteAnalyticsAccountLinkRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for delete_analytics_account_link + + Override in a subclass to manipulate the request or metadata + before they are sent to the MarketingplatformAdminService server. + """ + return request, metadata + + def pre_get_organization( + self, + request: marketingplatform_admin.GetOrganizationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + marketingplatform_admin.GetOrganizationRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for get_organization + + Override in a subclass to manipulate the request or metadata + before they are sent to the MarketingplatformAdminService server. + """ + return request, metadata + + def post_get_organization( + self, response: resources.Organization + ) -> resources.Organization: + """Post-rpc interceptor for get_organization + + Override in a subclass to manipulate the response + after it is returned by the MarketingplatformAdminService server but before + it is returned to user code. + """ + return response + + def pre_list_analytics_account_links( + self, + request: marketingplatform_admin.ListAnalyticsAccountLinksRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + marketingplatform_admin.ListAnalyticsAccountLinksRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for list_analytics_account_links + + Override in a subclass to manipulate the request or metadata + before they are sent to the MarketingplatformAdminService server. + """ + return request, metadata + + def post_list_analytics_account_links( + self, response: marketingplatform_admin.ListAnalyticsAccountLinksResponse + ) -> marketingplatform_admin.ListAnalyticsAccountLinksResponse: + """Post-rpc interceptor for list_analytics_account_links + + Override in a subclass to manipulate the response + after it is returned by the MarketingplatformAdminService server but before + it is returned to user code. + """ + return response + + def pre_set_property_service_level( + self, + request: marketingplatform_admin.SetPropertyServiceLevelRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + marketingplatform_admin.SetPropertyServiceLevelRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for set_property_service_level + + Override in a subclass to manipulate the request or metadata + before they are sent to the MarketingplatformAdminService server. + """ + return request, metadata + + def post_set_property_service_level( + self, response: marketingplatform_admin.SetPropertyServiceLevelResponse + ) -> marketingplatform_admin.SetPropertyServiceLevelResponse: + """Post-rpc interceptor for set_property_service_level + + Override in a subclass to manipulate the response + after it is returned by the MarketingplatformAdminService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class MarketingplatformAdminServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: MarketingplatformAdminServiceRestInterceptor + + +class MarketingplatformAdminServiceRestTransport( + MarketingplatformAdminServiceTransport +): + """REST backend transport for MarketingplatformAdminService. + + Service Interface for the Google Marketing Platform Admin + API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "marketingplatformadmin.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[MarketingplatformAdminServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'marketingplatformadmin.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = ( + interceptor or MarketingplatformAdminServiceRestInterceptor() + ) + self._prep_wrapped_messages(client_info) + + class _CreateAnalyticsAccountLink(MarketingplatformAdminServiceRestStub): + def __hash__(self): + return hash("CreateAnalyticsAccountLink") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: marketingplatform_admin.CreateAnalyticsAccountLinkRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.AnalyticsAccountLink: + r"""Call the create analytics account + link method over HTTP. + + Args: + request (~.marketingplatform_admin.CreateAnalyticsAccountLinkRequest): + The request object. Request message for + CreateAnalyticsAccountLink RPC. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.resources.AnalyticsAccountLink: + A resource message representing the + link between a Google Analytics account + and a Google Marketing Platform + organization. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{parent=organizations/*}/analyticsAccountLinks", + "body": "analytics_account_link", + }, + ] + request, metadata = self._interceptor.pre_create_analytics_account_link( + request, metadata + ) + pb_request = marketingplatform_admin.CreateAnalyticsAccountLinkRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resources.AnalyticsAccountLink() + pb_resp = resources.AnalyticsAccountLink.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_analytics_account_link(resp) + return resp + + class _DeleteAnalyticsAccountLink(MarketingplatformAdminServiceRestStub): + def __hash__(self): + return hash("DeleteAnalyticsAccountLink") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: marketingplatform_admin.DeleteAnalyticsAccountLinkRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete analytics account + link method over HTTP. + + Args: + request (~.marketingplatform_admin.DeleteAnalyticsAccountLinkRequest): + The request object. Request message for + DeleteAnalyticsAccountLink RPC. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1alpha/{name=organizations/*/analyticsAccountLinks/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_analytics_account_link( + request, metadata + ) + pb_request = marketingplatform_admin.DeleteAnalyticsAccountLinkRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetOrganization(MarketingplatformAdminServiceRestStub): + def __hash__(self): + return hash("GetOrganization") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: marketingplatform_admin.GetOrganizationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Organization: + r"""Call the get organization method over HTTP. + + Args: + request (~.marketingplatform_admin.GetOrganizationRequest): + The request object. Request message for GetOrganization + RPC. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.resources.Organization: + A resource message representing a + Google Marketing Platform organization. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=organizations/*}", + }, + ] + request, metadata = self._interceptor.pre_get_organization( + request, metadata + ) + pb_request = marketingplatform_admin.GetOrganizationRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resources.Organization() + pb_resp = resources.Organization.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_organization(resp) + return resp + + class _ListAnalyticsAccountLinks(MarketingplatformAdminServiceRestStub): + def __hash__(self): + return hash("ListAnalyticsAccountLinks") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: marketingplatform_admin.ListAnalyticsAccountLinksRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> marketingplatform_admin.ListAnalyticsAccountLinksResponse: + r"""Call the list analytics account + links method over HTTP. + + Args: + request (~.marketingplatform_admin.ListAnalyticsAccountLinksRequest): + The request object. Request message for + ListAnalyticsAccountLinks RPC. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.marketingplatform_admin.ListAnalyticsAccountLinksResponse: + Response message for + ListAnalyticsAccountLinks RPC. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{parent=organizations/*}/analyticsAccountLinks", + }, + ] + request, metadata = self._interceptor.pre_list_analytics_account_links( + request, metadata + ) + pb_request = marketingplatform_admin.ListAnalyticsAccountLinksRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = marketingplatform_admin.ListAnalyticsAccountLinksResponse() + pb_resp = marketingplatform_admin.ListAnalyticsAccountLinksResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_analytics_account_links(resp) + return resp + + class _SetPropertyServiceLevel(MarketingplatformAdminServiceRestStub): + def __hash__(self): + return hash("SetPropertyServiceLevel") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: marketingplatform_admin.SetPropertyServiceLevelRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> marketingplatform_admin.SetPropertyServiceLevelResponse: + r"""Call the set property service + level method over HTTP. + + Args: + request (~.marketingplatform_admin.SetPropertyServiceLevelRequest): + The request object. Request message for + SetPropertyServiceLevel RPC. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.marketingplatform_admin.SetPropertyServiceLevelResponse: + Response message for + SetPropertyServiceLevel RPC. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{analytics_account_link=organizations/*/analyticsAccountLinks/*}:setPropertyServiceLevel", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_set_property_service_level( + request, metadata + ) + pb_request = marketingplatform_admin.SetPropertyServiceLevelRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = marketingplatform_admin.SetPropertyServiceLevelResponse() + pb_resp = marketingplatform_admin.SetPropertyServiceLevelResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_property_service_level(resp) + return resp + + @property + def create_analytics_account_link( + self, + ) -> Callable[ + [marketingplatform_admin.CreateAnalyticsAccountLinkRequest], + resources.AnalyticsAccountLink, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateAnalyticsAccountLink(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_analytics_account_link( + self, + ) -> Callable[ + [marketingplatform_admin.DeleteAnalyticsAccountLinkRequest], empty_pb2.Empty + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteAnalyticsAccountLink(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_organization( + self, + ) -> Callable[ + [marketingplatform_admin.GetOrganizationRequest], resources.Organization + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetOrganization(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_analytics_account_links( + self, + ) -> Callable[ + [marketingplatform_admin.ListAnalyticsAccountLinksRequest], + marketingplatform_admin.ListAnalyticsAccountLinksResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListAnalyticsAccountLinks(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_property_service_level( + self, + ) -> Callable[ + [marketingplatform_admin.SetPropertyServiceLevelRequest], + marketingplatform_admin.SetPropertyServiceLevelResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetPropertyServiceLevel(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("MarketingplatformAdminServiceRestTransport",) diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/types/__init__.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/types/__init__.py new file mode 100644 index 000000000000..617c3bec15b6 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/types/__init__.py @@ -0,0 +1,40 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .marketingplatform_admin import ( + AnalyticsServiceLevel, + CreateAnalyticsAccountLinkRequest, + DeleteAnalyticsAccountLinkRequest, + GetOrganizationRequest, + ListAnalyticsAccountLinksRequest, + ListAnalyticsAccountLinksResponse, + SetPropertyServiceLevelRequest, + SetPropertyServiceLevelResponse, +) +from .resources import AnalyticsAccountLink, LinkVerificationState, Organization + +__all__ = ( + "CreateAnalyticsAccountLinkRequest", + "DeleteAnalyticsAccountLinkRequest", + "GetOrganizationRequest", + "ListAnalyticsAccountLinksRequest", + "ListAnalyticsAccountLinksResponse", + "SetPropertyServiceLevelRequest", + "SetPropertyServiceLevelResponse", + "AnalyticsServiceLevel", + "AnalyticsAccountLink", + "Organization", + "LinkVerificationState", +) diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/types/marketingplatform_admin.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/types/marketingplatform_admin.py new file mode 100644 index 000000000000..a446e0c57b69 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/types/marketingplatform_admin.py @@ -0,0 +1,217 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.ads.marketingplatform_admin_v1alpha.types import resources + +__protobuf__ = proto.module( + package="google.marketingplatform.admin.v1alpha", + manifest={ + "AnalyticsServiceLevel", + "GetOrganizationRequest", + "ListAnalyticsAccountLinksRequest", + "ListAnalyticsAccountLinksResponse", + "CreateAnalyticsAccountLinkRequest", + "DeleteAnalyticsAccountLinkRequest", + "SetPropertyServiceLevelRequest", + "SetPropertyServiceLevelResponse", + }, +) + + +class AnalyticsServiceLevel(proto.Enum): + r"""Various levels of service for Google Analytics. + + Values: + ANALYTICS_SERVICE_LEVEL_UNSPECIFIED (0): + Service level unspecified. + ANALYTICS_SERVICE_LEVEL_STANDARD (1): + The standard version of Google Analytics. + ANALYTICS_SERVICE_LEVEL_360 (2): + The premium version of Google Analytics. + """ + ANALYTICS_SERVICE_LEVEL_UNSPECIFIED = 0 + ANALYTICS_SERVICE_LEVEL_STANDARD = 1 + ANALYTICS_SERVICE_LEVEL_360 = 2 + + +class GetOrganizationRequest(proto.Message): + r"""Request message for GetOrganization RPC. + + Attributes: + name (str): + Required. The name of the Organization to retrieve. Format: + organizations/{org_id} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListAnalyticsAccountLinksRequest(proto.Message): + r"""Request message for ListAnalyticsAccountLinks RPC. + + Attributes: + parent (str): + Required. The parent organization, which owns this + collection of Analytics account links. Format: + organizations/{org_id} + page_size (int): + Optional. The maximum number of Analytics + account links to return in one call. The service + may return fewer than this value. + + If unspecified, at most 50 Analytics account + links will be returned. The maximum value is + 1000; values above 1000 will be coerced to 1000. + page_token (str): + Optional. A page token, received from a previous + ListAnalyticsAccountLinks call. Provide this to retrieve the + subsequent page. + + When paginating, all other parameters provided to + ``ListAnalyticsAccountLinks`` must match the call that + provided the page token. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListAnalyticsAccountLinksResponse(proto.Message): + r"""Response message for ListAnalyticsAccountLinks RPC. + + Attributes: + analytics_account_links (MutableSequence[google.ads.marketingplatform_admin_v1alpha.types.AnalyticsAccountLink]): + Analytics account links in this organization. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + analytics_account_links: MutableSequence[ + resources.AnalyticsAccountLink + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=resources.AnalyticsAccountLink, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CreateAnalyticsAccountLinkRequest(proto.Message): + r"""Request message for CreateAnalyticsAccountLink RPC. + + Attributes: + parent (str): + Required. The parent resource where this Analytics account + link will be created. Format: organizations/{org_id} + analytics_account_link (google.ads.marketingplatform_admin_v1alpha.types.AnalyticsAccountLink): + Required. The Analytics account link to + create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + analytics_account_link: resources.AnalyticsAccountLink = proto.Field( + proto.MESSAGE, + number=2, + message=resources.AnalyticsAccountLink, + ) + + +class DeleteAnalyticsAccountLinkRequest(proto.Message): + r"""Request message for DeleteAnalyticsAccountLink RPC. + + Attributes: + name (str): + Required. The name of the Analytics account link to delete. + Format: + organizations/{org_id}/analyticsAccountLinks/{analytics_account_link_id} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class SetPropertyServiceLevelRequest(proto.Message): + r"""Request message for SetPropertyServiceLevel RPC. + + Attributes: + analytics_account_link (str): + Required. The parent AnalyticsAccountLink scope where this + property is in. Format: + organizations/{org_id}/analyticsAccountLinks/{analytics_account_link_id} + analytics_property (str): + Required. The Analytics property to change the ServiceLevel + setting. This field is the name of the Google Analytics + Admin API property resource. + + Format: + analyticsadmin.googleapis.com/properties/{property_id} + service_level (google.ads.marketingplatform_admin_v1alpha.types.AnalyticsServiceLevel): + Required. The service level to set for this + property. + """ + + analytics_account_link: str = proto.Field( + proto.STRING, + number=1, + ) + analytics_property: str = proto.Field( + proto.STRING, + number=2, + ) + service_level: "AnalyticsServiceLevel" = proto.Field( + proto.ENUM, + number=3, + enum="AnalyticsServiceLevel", + ) + + +class SetPropertyServiceLevelResponse(proto.Message): + r"""Response message for SetPropertyServiceLevel RPC.""" + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/types/resources.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/types/resources.py new file mode 100644 index 000000000000..420e17747cf7 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/types/resources.py @@ -0,0 +1,120 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.marketingplatform.admin.v1alpha", + manifest={ + "LinkVerificationState", + "Organization", + "AnalyticsAccountLink", + }, +) + + +class LinkVerificationState(proto.Enum): + r"""The verification state of the link between a product account + and a GMP organization. + + Values: + LINK_VERIFICATION_STATE_UNSPECIFIED (0): + The link state is unknown. + LINK_VERIFICATION_STATE_VERIFIED (1): + The link is established. + LINK_VERIFICATION_STATE_NOT_VERIFIED (2): + The link is requested, but hasn't been + approved by the product account admin. + """ + LINK_VERIFICATION_STATE_UNSPECIFIED = 0 + LINK_VERIFICATION_STATE_VERIFIED = 1 + LINK_VERIFICATION_STATE_NOT_VERIFIED = 2 + + +class Organization(proto.Message): + r"""A resource message representing a Google Marketing Platform + organization. + + Attributes: + name (str): + Identifier. The resource name of the GMP organization. + Format: organizations/{org_id} + display_name (str): + The human-readable name for the organization. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + + +class AnalyticsAccountLink(proto.Message): + r"""A resource message representing the link between a Google + Analytics account and a Google Marketing Platform organization. + + Attributes: + name (str): + Identifier. Resource name of this AnalyticsAccountLink. Note + the resource ID is the same as the ID of the Analtyics + account. + + Format: + organizations/{org_id}/analyticsAccountLinks/{analytics_account_link_id} + Example: "organizations/xyz/analyticsAccountLinks/1234". + analytics_account (str): + Required. Immutable. The resource name of the AnalyticsAdmin + API account. The account ID will be used as the ID of this + AnalyticsAccountLink resource, which will become the final + component of the resource name. + + Format: analyticsadmin.googleapis.com/accounts/{account_id} + display_name (str): + Output only. The human-readable name for the + Analytics account. + link_verification_state (google.ads.marketingplatform_admin_v1alpha.types.LinkVerificationState): + Output only. The verification state of the + link between the Analytics account and the + parent organization. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + analytics_account: str = proto.Field( + proto.STRING, + number=2, + ) + display_name: str = proto.Field( + proto.STRING, + number=3, + ) + link_verification_state: "LinkVerificationState" = proto.Field( + proto.ENUM, + number=4, + enum="LinkVerificationState", + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-marketingplatform-admin/mypy.ini b/packages/google-ads-marketingplatform-admin/mypy.ini new file mode 100644 index 000000000000..574c5aed394b --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/packages/google-ads-marketingplatform-admin/noxfile.py b/packages/google-ads-marketingplatform-admin/noxfile.py new file mode 100644 index 000000000000..67b7265f7586 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/noxfile.py @@ -0,0 +1,452 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! + +from __future__ import absolute_import + +import os +import pathlib +import re +import shutil +from typing import Dict, List +import warnings + +import nox + +BLACK_VERSION = "black[jupyter]==23.7.0" +ISORT_VERSION = "isort==5.11.0" + +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + + +DEFAULT_PYTHON_VERSION = "3.10" + +UNIT_TEST_PYTHON_VERSIONS: List[str] = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_DEPENDENCIES: List[str] = [] +UNIT_TEST_EXTRAS: List[str] = [] +UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.8", "3.9", "3.10", "3.11", "3.12"] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_EXTRAS: List[str] = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def unit(session, protobuf_implementation): + # Install all test dependencies, then install this package in-place. + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + session.skip("cpp implementation is not supported in python 3.11+") + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + + # TODO(https://github.com/googleapis/synthtool/issues/1976): + # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. + # The 'cpp' implementation requires Protobuf<4. + if protobuf_implementation == "cpp": + session.install("protobuf<4") + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + + +def install_systemtest_dependencies(session, *constraints): + # Use pre-release gRPC for system tests. + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "sphinx==4.5.0", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "gcp-sphinx-docfx-yaml", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.12") +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def prerelease_deps(session, protobuf_implementation): + """Run all tests with prerelease versions of dependencies installed.""" + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + session.skip("cpp implementation is not supported in python 3.11+") + + # Install all dependencies + session.install("-e", ".[all, tests, tracing]") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "protobuf", + # dependency of grpc + "six", + "grpc-google-iam-v1", + "googleapis-common-protos", + "grpcio", + "grpcio-status", + "google-api-core", + "google-auth", + "proto-plus", + "google-cloud-testutils", + # dependencies of google-cloud-testutils" + "click", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + # Print out prerelease package versions + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") + + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) diff --git a/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_create_analytics_account_link_async.py b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_create_analytics_account_link_async.py new file mode 100644 index 000000000000..bfd28a483b92 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_create_analytics_account_link_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateAnalyticsAccountLink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ads-marketingplatform-admin + + +# [START marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_CreateAnalyticsAccountLink_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ads import marketingplatform_admin_v1alpha + + +async def sample_create_analytics_account_link(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient() + + # Initialize request argument(s) + analytics_account_link = marketingplatform_admin_v1alpha.AnalyticsAccountLink() + analytics_account_link.analytics_account = "analytics_account_value" + + request = marketingplatform_admin_v1alpha.CreateAnalyticsAccountLinkRequest( + parent="parent_value", + analytics_account_link=analytics_account_link, + ) + + # Make the request + response = await client.create_analytics_account_link(request=request) + + # Handle the response + print(response) + +# [END marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_CreateAnalyticsAccountLink_async] diff --git a/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_create_analytics_account_link_sync.py b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_create_analytics_account_link_sync.py new file mode 100644 index 000000000000..6af1b08a5a3b --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_create_analytics_account_link_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateAnalyticsAccountLink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ads-marketingplatform-admin + + +# [START marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_CreateAnalyticsAccountLink_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ads import marketingplatform_admin_v1alpha + + +def sample_create_analytics_account_link(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient() + + # Initialize request argument(s) + analytics_account_link = marketingplatform_admin_v1alpha.AnalyticsAccountLink() + analytics_account_link.analytics_account = "analytics_account_value" + + request = marketingplatform_admin_v1alpha.CreateAnalyticsAccountLinkRequest( + parent="parent_value", + analytics_account_link=analytics_account_link, + ) + + # Make the request + response = client.create_analytics_account_link(request=request) + + # Handle the response + print(response) + +# [END marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_CreateAnalyticsAccountLink_sync] diff --git a/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_delete_analytics_account_link_async.py b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_delete_analytics_account_link_async.py new file mode 100644 index 000000000000..c0b2c7e1ffa9 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_delete_analytics_account_link_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteAnalyticsAccountLink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ads-marketingplatform-admin + + +# [START marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_DeleteAnalyticsAccountLink_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ads import marketingplatform_admin_v1alpha + + +async def sample_delete_analytics_account_link(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.DeleteAnalyticsAccountLinkRequest( + name="name_value", + ) + + # Make the request + await client.delete_analytics_account_link(request=request) + + +# [END marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_DeleteAnalyticsAccountLink_async] diff --git a/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_delete_analytics_account_link_sync.py b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_delete_analytics_account_link_sync.py new file mode 100644 index 000000000000..8f1a794eacac --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_delete_analytics_account_link_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteAnalyticsAccountLink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ads-marketingplatform-admin + + +# [START marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_DeleteAnalyticsAccountLink_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ads import marketingplatform_admin_v1alpha + + +def sample_delete_analytics_account_link(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.DeleteAnalyticsAccountLinkRequest( + name="name_value", + ) + + # Make the request + client.delete_analytics_account_link(request=request) + + +# [END marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_DeleteAnalyticsAccountLink_sync] diff --git a/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_get_organization_async.py b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_get_organization_async.py new file mode 100644 index 000000000000..7666fa53e916 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_get_organization_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetOrganization +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ads-marketingplatform-admin + + +# [START marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_GetOrganization_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ads import marketingplatform_admin_v1alpha + + +async def sample_get_organization(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.GetOrganizationRequest( + name="name_value", + ) + + # Make the request + response = await client.get_organization(request=request) + + # Handle the response + print(response) + +# [END marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_GetOrganization_async] diff --git a/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_get_organization_sync.py b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_get_organization_sync.py new file mode 100644 index 000000000000..52b506c61914 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_get_organization_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetOrganization +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ads-marketingplatform-admin + + +# [START marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_GetOrganization_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ads import marketingplatform_admin_v1alpha + + +def sample_get_organization(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.GetOrganizationRequest( + name="name_value", + ) + + # Make the request + response = client.get_organization(request=request) + + # Handle the response + print(response) + +# [END marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_GetOrganization_sync] diff --git a/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_list_analytics_account_links_async.py b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_list_analytics_account_links_async.py new file mode 100644 index 000000000000..3837010ff87f --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_list_analytics_account_links_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListAnalyticsAccountLinks +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ads-marketingplatform-admin + + +# [START marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_ListAnalyticsAccountLinks_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ads import marketingplatform_admin_v1alpha + + +async def sample_list_analytics_account_links(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.ListAnalyticsAccountLinksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_analytics_account_links(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_ListAnalyticsAccountLinks_async] diff --git a/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_list_analytics_account_links_sync.py b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_list_analytics_account_links_sync.py new file mode 100644 index 000000000000..af3ed458056a --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_list_analytics_account_links_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListAnalyticsAccountLinks +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ads-marketingplatform-admin + + +# [START marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_ListAnalyticsAccountLinks_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ads import marketingplatform_admin_v1alpha + + +def sample_list_analytics_account_links(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.ListAnalyticsAccountLinksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_analytics_account_links(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_ListAnalyticsAccountLinks_sync] diff --git a/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_set_property_service_level_async.py b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_set_property_service_level_async.py new file mode 100644 index 000000000000..b07e73cde9f5 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_set_property_service_level_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetPropertyServiceLevel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ads-marketingplatform-admin + + +# [START marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_SetPropertyServiceLevel_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ads import marketingplatform_admin_v1alpha + + +async def sample_set_property_service_level(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.SetPropertyServiceLevelRequest( + analytics_account_link="analytics_account_link_value", + analytics_property="analytics_property_value", + service_level="ANALYTICS_SERVICE_LEVEL_360", + ) + + # Make the request + response = await client.set_property_service_level(request=request) + + # Handle the response + print(response) + +# [END marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_SetPropertyServiceLevel_async] diff --git a/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_set_property_service_level_sync.py b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_set_property_service_level_sync.py new file mode 100644 index 000000000000..a742b4f50f64 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_set_property_service_level_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetPropertyServiceLevel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ads-marketingplatform-admin + + +# [START marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_SetPropertyServiceLevel_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ads import marketingplatform_admin_v1alpha + + +def sample_set_property_service_level(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.SetPropertyServiceLevelRequest( + analytics_account_link="analytics_account_link_value", + analytics_property="analytics_property_value", + service_level="ANALYTICS_SERVICE_LEVEL_360", + ) + + # Make the request + response = client.set_property_service_level(request=request) + + # Handle the response + print(response) + +# [END marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_SetPropertyServiceLevel_sync] diff --git a/packages/google-ads-marketingplatform-admin/samples/generated_samples/snippet_metadata_google.marketingplatform.admin.v1alpha.json b/packages/google-ads-marketingplatform-admin/samples/generated_samples/snippet_metadata_google.marketingplatform.admin.v1alpha.json new file mode 100644 index 000000000000..72abc6186c7b --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/samples/generated_samples/snippet_metadata_google.marketingplatform.admin.v1alpha.json @@ -0,0 +1,822 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.marketingplatform.admin.v1alpha", + "version": "v1alpha" + } + ], + "language": "PYTHON", + "name": "google-ads-marketingplatform-admin", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient", + "shortName": "MarketingplatformAdminServiceAsyncClient" + }, + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient.create_analytics_account_link", + "method": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService.CreateAnalyticsAccountLink", + "service": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService", + "shortName": "MarketingplatformAdminService" + }, + "shortName": "CreateAnalyticsAccountLink" + }, + "parameters": [ + { + "name": "request", + "type": "google.ads.marketingplatform_admin_v1alpha.types.CreateAnalyticsAccountLinkRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "analytics_account_link", + "type": "google.ads.marketingplatform_admin_v1alpha.types.AnalyticsAccountLink" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ads.marketingplatform_admin_v1alpha.types.AnalyticsAccountLink", + "shortName": "create_analytics_account_link" + }, + "description": "Sample for CreateAnalyticsAccountLink", + "file": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_create_analytics_account_link_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_CreateAnalyticsAccountLink_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_create_analytics_account_link_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient", + "shortName": "MarketingplatformAdminServiceClient" + }, + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient.create_analytics_account_link", + "method": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService.CreateAnalyticsAccountLink", + "service": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService", + "shortName": "MarketingplatformAdminService" + }, + "shortName": "CreateAnalyticsAccountLink" + }, + "parameters": [ + { + "name": "request", + "type": "google.ads.marketingplatform_admin_v1alpha.types.CreateAnalyticsAccountLinkRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "analytics_account_link", + "type": "google.ads.marketingplatform_admin_v1alpha.types.AnalyticsAccountLink" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ads.marketingplatform_admin_v1alpha.types.AnalyticsAccountLink", + "shortName": "create_analytics_account_link" + }, + "description": "Sample for CreateAnalyticsAccountLink", + "file": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_create_analytics_account_link_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_CreateAnalyticsAccountLink_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_create_analytics_account_link_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient", + "shortName": "MarketingplatformAdminServiceAsyncClient" + }, + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient.delete_analytics_account_link", + "method": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService.DeleteAnalyticsAccountLink", + "service": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService", + "shortName": "MarketingplatformAdminService" + }, + "shortName": "DeleteAnalyticsAccountLink" + }, + "parameters": [ + { + "name": "request", + "type": "google.ads.marketingplatform_admin_v1alpha.types.DeleteAnalyticsAccountLinkRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_analytics_account_link" + }, + "description": "Sample for DeleteAnalyticsAccountLink", + "file": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_delete_analytics_account_link_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_DeleteAnalyticsAccountLink_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_delete_analytics_account_link_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient", + "shortName": "MarketingplatformAdminServiceClient" + }, + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient.delete_analytics_account_link", + "method": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService.DeleteAnalyticsAccountLink", + "service": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService", + "shortName": "MarketingplatformAdminService" + }, + "shortName": "DeleteAnalyticsAccountLink" + }, + "parameters": [ + { + "name": "request", + "type": "google.ads.marketingplatform_admin_v1alpha.types.DeleteAnalyticsAccountLinkRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_analytics_account_link" + }, + "description": "Sample for DeleteAnalyticsAccountLink", + "file": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_delete_analytics_account_link_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_DeleteAnalyticsAccountLink_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_delete_analytics_account_link_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient", + "shortName": "MarketingplatformAdminServiceAsyncClient" + }, + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient.get_organization", + "method": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService.GetOrganization", + "service": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService", + "shortName": "MarketingplatformAdminService" + }, + "shortName": "GetOrganization" + }, + "parameters": [ + { + "name": "request", + "type": "google.ads.marketingplatform_admin_v1alpha.types.GetOrganizationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ads.marketingplatform_admin_v1alpha.types.Organization", + "shortName": "get_organization" + }, + "description": "Sample for GetOrganization", + "file": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_get_organization_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_GetOrganization_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_get_organization_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient", + "shortName": "MarketingplatformAdminServiceClient" + }, + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient.get_organization", + "method": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService.GetOrganization", + "service": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService", + "shortName": "MarketingplatformAdminService" + }, + "shortName": "GetOrganization" + }, + "parameters": [ + { + "name": "request", + "type": "google.ads.marketingplatform_admin_v1alpha.types.GetOrganizationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ads.marketingplatform_admin_v1alpha.types.Organization", + "shortName": "get_organization" + }, + "description": "Sample for GetOrganization", + "file": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_get_organization_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_GetOrganization_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_get_organization_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient", + "shortName": "MarketingplatformAdminServiceAsyncClient" + }, + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient.list_analytics_account_links", + "method": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService.ListAnalyticsAccountLinks", + "service": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService", + "shortName": "MarketingplatformAdminService" + }, + "shortName": "ListAnalyticsAccountLinks" + }, + "parameters": [ + { + "name": "request", + "type": "google.ads.marketingplatform_admin_v1alpha.types.ListAnalyticsAccountLinksRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ads.marketingplatform_admin_v1alpha.services.marketingplatform_admin_service.pagers.ListAnalyticsAccountLinksAsyncPager", + "shortName": "list_analytics_account_links" + }, + "description": "Sample for ListAnalyticsAccountLinks", + "file": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_list_analytics_account_links_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_ListAnalyticsAccountLinks_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_list_analytics_account_links_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient", + "shortName": "MarketingplatformAdminServiceClient" + }, + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient.list_analytics_account_links", + "method": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService.ListAnalyticsAccountLinks", + "service": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService", + "shortName": "MarketingplatformAdminService" + }, + "shortName": "ListAnalyticsAccountLinks" + }, + "parameters": [ + { + "name": "request", + "type": "google.ads.marketingplatform_admin_v1alpha.types.ListAnalyticsAccountLinksRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ads.marketingplatform_admin_v1alpha.services.marketingplatform_admin_service.pagers.ListAnalyticsAccountLinksPager", + "shortName": "list_analytics_account_links" + }, + "description": "Sample for ListAnalyticsAccountLinks", + "file": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_list_analytics_account_links_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_ListAnalyticsAccountLinks_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_list_analytics_account_links_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient", + "shortName": "MarketingplatformAdminServiceAsyncClient" + }, + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient.set_property_service_level", + "method": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService.SetPropertyServiceLevel", + "service": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService", + "shortName": "MarketingplatformAdminService" + }, + "shortName": "SetPropertyServiceLevel" + }, + "parameters": [ + { + "name": "request", + "type": "google.ads.marketingplatform_admin_v1alpha.types.SetPropertyServiceLevelRequest" + }, + { + "name": "analytics_account_link", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ads.marketingplatform_admin_v1alpha.types.SetPropertyServiceLevelResponse", + "shortName": "set_property_service_level" + }, + "description": "Sample for SetPropertyServiceLevel", + "file": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_set_property_service_level_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_SetPropertyServiceLevel_async", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_set_property_service_level_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient", + "shortName": "MarketingplatformAdminServiceClient" + }, + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient.set_property_service_level", + "method": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService.SetPropertyServiceLevel", + "service": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService", + "shortName": "MarketingplatformAdminService" + }, + "shortName": "SetPropertyServiceLevel" + }, + "parameters": [ + { + "name": "request", + "type": "google.ads.marketingplatform_admin_v1alpha.types.SetPropertyServiceLevelRequest" + }, + { + "name": "analytics_account_link", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ads.marketingplatform_admin_v1alpha.types.SetPropertyServiceLevelResponse", + "shortName": "set_property_service_level" + }, + "description": "Sample for SetPropertyServiceLevel", + "file": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_set_property_service_level_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_SetPropertyServiceLevel_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_set_property_service_level_sync.py" + } + ] +} diff --git a/packages/google-ads-marketingplatform-admin/scripts/decrypt-secrets.sh b/packages/google-ads-marketingplatform-admin/scripts/decrypt-secrets.sh new file mode 100755 index 000000000000..0018b421ddf8 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/scripts/decrypt-secrets.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +# Copyright 2023 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ROOT=$( dirname "$DIR" ) + +# Work from the project root. +cd $ROOT + +# Prevent it from overriding files. +# We recommend that sample authors use their own service account files and cloud project. +# In that case, they are supposed to prepare these files by themselves. +if [[ -f "testing/test-env.sh" ]] || \ + [[ -f "testing/service-account.json" ]] || \ + [[ -f "testing/client-secrets.json" ]]; then + echo "One or more target files exist, aborting." + exit 1 +fi + +# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. +PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" + +gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + --project="${PROJECT_ID}" \ + > testing/test-env.sh +gcloud secrets versions access latest \ + --secret="python-docs-samples-service-account" \ + --project="${PROJECT_ID}" \ + > testing/service-account.json +gcloud secrets versions access latest \ + --secret="python-docs-samples-client-secrets" \ + --project="${PROJECT_ID}" \ + > testing/client-secrets.json diff --git a/packages/google-ads-marketingplatform-admin/scripts/fixup_marketingplatform_admin_v1alpha_keywords.py b/packages/google-ads-marketingplatform-admin/scripts/fixup_marketingplatform_admin_v1alpha_keywords.py new file mode 100644 index 000000000000..eb6dc67078de --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/scripts/fixup_marketingplatform_admin_v1alpha_keywords.py @@ -0,0 +1,180 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class marketingplatform_adminCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_analytics_account_link': ('parent', 'analytics_account_link', ), + 'delete_analytics_account_link': ('name', ), + 'get_organization': ('name', ), + 'list_analytics_account_links': ('parent', 'page_size', 'page_token', ), + 'set_property_service_level': ('analytics_account_link', 'analytics_property', 'service_level', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=marketingplatform_adminCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the marketingplatform_admin client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-ads-marketingplatform-admin/setup.py b/packages/google-ads-marketingplatform-admin/setup.py new file mode 100644 index 000000000000..bd6f637c0bf8 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/setup.py @@ -0,0 +1,95 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os +import re + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = "google-ads-marketingplatform-admin" + + +description = "Google Ads Marketingplatform Admin API client library" + +version = None + +with open( + os.path.join(package_root, "google/ads/marketingplatform_admin/gapic_version.py") +) as fp: + version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) + assert len(version_candidates) == 1 + version = version_candidates[0] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + # Exclude incompatible versions of `google-auth` + # See https://github.com/googleapis/google-cloud-python/issues/12364 + "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", + "proto-plus >= 1.22.3, <2.0.0dev", + "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-ads-marketingplatform-admin" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.find_namespace_packages() + if package.startswith("google") +] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/packages/google-ads-marketingplatform-admin/testing/.gitignore b/packages/google-ads-marketingplatform-admin/testing/.gitignore new file mode 100644 index 000000000000..b05fbd630881 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/testing/.gitignore @@ -0,0 +1,3 @@ +test-env.sh +service-account.json +client-secrets.json \ No newline at end of file diff --git a/packages/google-ads-marketingplatform-admin/testing/constraints-3.10.txt b/packages/google-ads-marketingplatform-admin/testing/constraints-3.10.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-ads-marketingplatform-admin/testing/constraints-3.11.txt b/packages/google-ads-marketingplatform-admin/testing/constraints-3.11.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-ads-marketingplatform-admin/testing/constraints-3.12.txt b/packages/google-ads-marketingplatform-admin/testing/constraints-3.12.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-ads-marketingplatform-admin/testing/constraints-3.7.txt b/packages/google-ads-marketingplatform-admin/testing/constraints-3.7.txt new file mode 100644 index 000000000000..fc812592b0ee --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/testing/constraints-3.7.txt @@ -0,0 +1,10 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.1 +google-auth==2.14.1 +proto-plus==1.22.3 +protobuf==3.20.2 diff --git a/packages/google-ads-marketingplatform-admin/testing/constraints-3.8.txt b/packages/google-ads-marketingplatform-admin/testing/constraints-3.8.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-ads-marketingplatform-admin/testing/constraints-3.9.txt b/packages/google-ads-marketingplatform-admin/testing/constraints-3.9.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-ads-marketingplatform-admin/tests/__init__.py b/packages/google-ads-marketingplatform-admin/tests/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/tests/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-ads-marketingplatform-admin/tests/unit/__init__.py b/packages/google-ads-marketingplatform-admin/tests/unit/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/tests/unit/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-ads-marketingplatform-admin/tests/unit/gapic/__init__.py b/packages/google-ads-marketingplatform-admin/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/tests/unit/gapic/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-ads-marketingplatform-admin/tests/unit/gapic/marketingplatform_admin_v1alpha/__init__.py b/packages/google-ads-marketingplatform-admin/tests/unit/gapic/marketingplatform_admin_v1alpha/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/tests/unit/gapic/marketingplatform_admin_v1alpha/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-ads-marketingplatform-admin/tests/unit/gapic/marketingplatform_admin_v1alpha/test_marketingplatform_admin_service.py b/packages/google-ads-marketingplatform-admin/tests/unit/gapic/marketingplatform_admin_v1alpha/test_marketingplatform_admin_service.py new file mode 100644 index 000000000000..c0d88b7ea387 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/tests/unit/gapic/marketingplatform_admin_v1alpha/test_marketingplatform_admin_service.py @@ -0,0 +1,5969 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.oauth2 import service_account +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.ads.marketingplatform_admin_v1alpha.services.marketingplatform_admin_service import ( + MarketingplatformAdminServiceAsyncClient, + MarketingplatformAdminServiceClient, + pagers, + transports, +) +from google.ads.marketingplatform_admin_v1alpha.types import ( + marketingplatform_admin, + resources, +) + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert MarketingplatformAdminServiceClient._get_default_mtls_endpoint(None) is None + assert ( + MarketingplatformAdminServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + MarketingplatformAdminServiceClient._get_default_mtls_endpoint( + api_mtls_endpoint + ) + == api_mtls_endpoint + ) + assert ( + MarketingplatformAdminServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + MarketingplatformAdminServiceClient._get_default_mtls_endpoint( + sandbox_mtls_endpoint + ) + == sandbox_mtls_endpoint + ) + assert ( + MarketingplatformAdminServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +def test__read_environment_variables(): + assert MarketingplatformAdminServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert MarketingplatformAdminServiceClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert MarketingplatformAdminServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + MarketingplatformAdminServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert MarketingplatformAdminServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert MarketingplatformAdminServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert MarketingplatformAdminServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + MarketingplatformAdminServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert MarketingplatformAdminServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert ( + MarketingplatformAdminServiceClient._get_client_cert_source(None, False) is None + ) + assert ( + MarketingplatformAdminServiceClient._get_client_cert_source( + mock_provided_cert_source, False + ) + is None + ) + assert ( + MarketingplatformAdminServiceClient._get_client_cert_source( + mock_provided_cert_source, True + ) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + MarketingplatformAdminServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + MarketingplatformAdminServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + MarketingplatformAdminServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MarketingplatformAdminServiceClient), +) +@mock.patch.object( + MarketingplatformAdminServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MarketingplatformAdminServiceAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = MarketingplatformAdminServiceClient._DEFAULT_UNIVERSE + default_endpoint = ( + MarketingplatformAdminServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + ) + mock_universe = "bar.com" + mock_endpoint = ( + MarketingplatformAdminServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + ) + + assert ( + MarketingplatformAdminServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + MarketingplatformAdminServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == MarketingplatformAdminServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + MarketingplatformAdminServiceClient._get_api_endpoint( + None, None, default_universe, "auto" + ) + == default_endpoint + ) + assert ( + MarketingplatformAdminServiceClient._get_api_endpoint( + None, None, default_universe, "always" + ) + == MarketingplatformAdminServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + MarketingplatformAdminServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == MarketingplatformAdminServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + MarketingplatformAdminServiceClient._get_api_endpoint( + None, None, mock_universe, "never" + ) + == mock_endpoint + ) + assert ( + MarketingplatformAdminServiceClient._get_api_endpoint( + None, None, default_universe, "never" + ) + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + MarketingplatformAdminServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + MarketingplatformAdminServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + MarketingplatformAdminServiceClient._get_universe_domain( + None, universe_domain_env + ) + == universe_domain_env + ) + assert ( + MarketingplatformAdminServiceClient._get_universe_domain(None, None) + == MarketingplatformAdminServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + MarketingplatformAdminServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + MarketingplatformAdminServiceClient, + transports.MarketingplatformAdminServiceGrpcTransport, + "grpc", + ), + ( + MarketingplatformAdminServiceClient, + transports.MarketingplatformAdminServiceRestTransport, + "rest", + ), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (MarketingplatformAdminServiceClient, "grpc"), + (MarketingplatformAdminServiceAsyncClient, "grpc_asyncio"), + (MarketingplatformAdminServiceClient, "rest"), + ], +) +def test_marketingplatform_admin_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "marketingplatformadmin.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://marketingplatformadmin.googleapis.com/" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.MarketingplatformAdminServiceGrpcTransport, "grpc"), + (transports.MarketingplatformAdminServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.MarketingplatformAdminServiceRestTransport, "rest"), + ], +) +def test_marketingplatform_admin_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (MarketingplatformAdminServiceClient, "grpc"), + (MarketingplatformAdminServiceAsyncClient, "grpc_asyncio"), + (MarketingplatformAdminServiceClient, "rest"), + ], +) +def test_marketingplatform_admin_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "marketingplatformadmin.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://marketingplatformadmin.googleapis.com/" + ) + + +def test_marketingplatform_admin_service_client_get_transport_class(): + transport = MarketingplatformAdminServiceClient.get_transport_class() + available_transports = [ + transports.MarketingplatformAdminServiceGrpcTransport, + transports.MarketingplatformAdminServiceRestTransport, + ] + assert transport in available_transports + + transport = MarketingplatformAdminServiceClient.get_transport_class("grpc") + assert transport == transports.MarketingplatformAdminServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + MarketingplatformAdminServiceClient, + transports.MarketingplatformAdminServiceGrpcTransport, + "grpc", + ), + ( + MarketingplatformAdminServiceAsyncClient, + transports.MarketingplatformAdminServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + MarketingplatformAdminServiceClient, + transports.MarketingplatformAdminServiceRestTransport, + "rest", + ), + ], +) +@mock.patch.object( + MarketingplatformAdminServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MarketingplatformAdminServiceClient), +) +@mock.patch.object( + MarketingplatformAdminServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MarketingplatformAdminServiceAsyncClient), +) +def test_marketingplatform_admin_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object( + MarketingplatformAdminServiceClient, "get_transport_class" + ) as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object( + MarketingplatformAdminServiceClient, "get_transport_class" + ) as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com/" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com/", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + MarketingplatformAdminServiceClient, + transports.MarketingplatformAdminServiceGrpcTransport, + "grpc", + "true", + ), + ( + MarketingplatformAdminServiceAsyncClient, + transports.MarketingplatformAdminServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + MarketingplatformAdminServiceClient, + transports.MarketingplatformAdminServiceGrpcTransport, + "grpc", + "false", + ), + ( + MarketingplatformAdminServiceAsyncClient, + transports.MarketingplatformAdminServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + MarketingplatformAdminServiceClient, + transports.MarketingplatformAdminServiceRestTransport, + "rest", + "true", + ), + ( + MarketingplatformAdminServiceClient, + transports.MarketingplatformAdminServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + MarketingplatformAdminServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MarketingplatformAdminServiceClient), +) +@mock.patch.object( + MarketingplatformAdminServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MarketingplatformAdminServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_marketingplatform_admin_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", + [MarketingplatformAdminServiceClient, MarketingplatformAdminServiceAsyncClient], +) +@mock.patch.object( + MarketingplatformAdminServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(MarketingplatformAdminServiceClient), +) +@mock.patch.object( + MarketingplatformAdminServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(MarketingplatformAdminServiceAsyncClient), +) +def test_marketingplatform_admin_service_client_get_mtls_endpoint_and_cert_source( + client_class, +): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", + [MarketingplatformAdminServiceClient, MarketingplatformAdminServiceAsyncClient], +) +@mock.patch.object( + MarketingplatformAdminServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MarketingplatformAdminServiceClient), +) +@mock.patch.object( + MarketingplatformAdminServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MarketingplatformAdminServiceAsyncClient), +) +def test_marketingplatform_admin_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = MarketingplatformAdminServiceClient._DEFAULT_UNIVERSE + default_endpoint = ( + MarketingplatformAdminServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + ) + mock_universe = "bar.com" + mock_endpoint = ( + MarketingplatformAdminServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + MarketingplatformAdminServiceClient, + transports.MarketingplatformAdminServiceGrpcTransport, + "grpc", + ), + ( + MarketingplatformAdminServiceAsyncClient, + transports.MarketingplatformAdminServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + MarketingplatformAdminServiceClient, + transports.MarketingplatformAdminServiceRestTransport, + "rest", + ), + ], +) +def test_marketingplatform_admin_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + MarketingplatformAdminServiceClient, + transports.MarketingplatformAdminServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + MarketingplatformAdminServiceAsyncClient, + transports.MarketingplatformAdminServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + MarketingplatformAdminServiceClient, + transports.MarketingplatformAdminServiceRestTransport, + "rest", + None, + ), + ], +) +def test_marketingplatform_admin_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_marketingplatform_admin_service_client_client_options_from_dict(): + with mock.patch( + "google.ads.marketingplatform_admin_v1alpha.services.marketingplatform_admin_service.transports.MarketingplatformAdminServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = MarketingplatformAdminServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + MarketingplatformAdminServiceClient, + transports.MarketingplatformAdminServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + MarketingplatformAdminServiceAsyncClient, + transports.MarketingplatformAdminServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_marketingplatform_admin_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "marketingplatformadmin.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + "https://www.googleapis.com/auth/marketingplatformadmin.analytics.read", + "https://www.googleapis.com/auth/marketingplatformadmin.analytics.update", + ), + scopes=None, + default_host="marketingplatformadmin.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + marketingplatform_admin.GetOrganizationRequest, + dict, + ], +) +def test_get_organization(request_type, transport: str = "grpc"): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_organization), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Organization( + name="name_value", + display_name="display_name_value", + ) + response = client.get_organization(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = marketingplatform_admin.GetOrganizationRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Organization) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +def test_get_organization_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_organization), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_organization() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == marketingplatform_admin.GetOrganizationRequest() + + +def test_get_organization_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = marketingplatform_admin.GetOrganizationRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_organization), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_organization(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == marketingplatform_admin.GetOrganizationRequest( + name="name_value", + ) + + +def test_get_organization_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_organization in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_organization + ] = mock_rpc + request = {} + client.get_organization(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_organization(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_organization_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_organization), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.Organization( + name="name_value", + display_name="display_name_value", + ) + ) + response = await client.get_organization() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == marketingplatform_admin.GetOrganizationRequest() + + +@pytest.mark.asyncio +async def test_get_organization_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_organization + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_organization + ] = mock_rpc + + request = {} + await client.get_organization(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_organization(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_organization_async( + transport: str = "grpc_asyncio", + request_type=marketingplatform_admin.GetOrganizationRequest, +): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_organization), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.Organization( + name="name_value", + display_name="display_name_value", + ) + ) + response = await client.get_organization(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = marketingplatform_admin.GetOrganizationRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Organization) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +@pytest.mark.asyncio +async def test_get_organization_async_from_dict(): + await test_get_organization_async(request_type=dict) + + +def test_get_organization_field_headers(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = marketingplatform_admin.GetOrganizationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_organization), "__call__") as call: + call.return_value = resources.Organization() + client.get_organization(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_organization_field_headers_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = marketingplatform_admin.GetOrganizationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_organization), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.Organization() + ) + await client.get_organization(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_organization_flattened(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_organization), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Organization() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_organization( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_organization_flattened_error(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_organization( + marketingplatform_admin.GetOrganizationRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_organization_flattened_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_organization), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Organization() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.Organization() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_organization( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_organization_flattened_error_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_organization( + marketingplatform_admin.GetOrganizationRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + marketingplatform_admin.ListAnalyticsAccountLinksRequest, + dict, + ], +) +def test_list_analytics_account_links(request_type, transport: str = "grpc"): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_analytics_account_links), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = marketingplatform_admin.ListAnalyticsAccountLinksResponse( + next_page_token="next_page_token_value", + ) + response = client.list_analytics_account_links(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = marketingplatform_admin.ListAnalyticsAccountLinksRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAnalyticsAccountLinksPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_analytics_account_links_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_analytics_account_links), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_analytics_account_links() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == marketingplatform_admin.ListAnalyticsAccountLinksRequest() + + +def test_list_analytics_account_links_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = marketingplatform_admin.ListAnalyticsAccountLinksRequest( + parent="parent_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_analytics_account_links), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_analytics_account_links(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == marketingplatform_admin.ListAnalyticsAccountLinksRequest( + parent="parent_value", + page_token="page_token_value", + ) + + +def test_list_analytics_account_links_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_analytics_account_links + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_analytics_account_links + ] = mock_rpc + request = {} + client.list_analytics_account_links(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_analytics_account_links(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_analytics_account_links_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_analytics_account_links), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_analytics_account_links() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == marketingplatform_admin.ListAnalyticsAccountLinksRequest() + + +@pytest.mark.asyncio +async def test_list_analytics_account_links_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_analytics_account_links + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_analytics_account_links + ] = mock_rpc + + request = {} + await client.list_analytics_account_links(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_analytics_account_links(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_analytics_account_links_async( + transport: str = "grpc_asyncio", + request_type=marketingplatform_admin.ListAnalyticsAccountLinksRequest, +): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_analytics_account_links), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_analytics_account_links(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = marketingplatform_admin.ListAnalyticsAccountLinksRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAnalyticsAccountLinksAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_analytics_account_links_async_from_dict(): + await test_list_analytics_account_links_async(request_type=dict) + + +def test_list_analytics_account_links_field_headers(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = marketingplatform_admin.ListAnalyticsAccountLinksRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_analytics_account_links), "__call__" + ) as call: + call.return_value = marketingplatform_admin.ListAnalyticsAccountLinksResponse() + client.list_analytics_account_links(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_analytics_account_links_field_headers_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = marketingplatform_admin.ListAnalyticsAccountLinksRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_analytics_account_links), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + marketingplatform_admin.ListAnalyticsAccountLinksResponse() + ) + await client.list_analytics_account_links(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_analytics_account_links_flattened(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_analytics_account_links), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = marketingplatform_admin.ListAnalyticsAccountLinksResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_analytics_account_links( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_analytics_account_links_flattened_error(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_analytics_account_links( + marketingplatform_admin.ListAnalyticsAccountLinksRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_analytics_account_links_flattened_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_analytics_account_links), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = marketingplatform_admin.ListAnalyticsAccountLinksResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + marketingplatform_admin.ListAnalyticsAccountLinksResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_analytics_account_links( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_analytics_account_links_flattened_error_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_analytics_account_links( + marketingplatform_admin.ListAnalyticsAccountLinksRequest(), + parent="parent_value", + ) + + +def test_list_analytics_account_links_pager(transport_name: str = "grpc"): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_analytics_account_links), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + ], + next_page_token="abc", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[], + next_page_token="def", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + ], + next_page_token="ghi", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_analytics_account_links( + request={}, retry=retry, timeout=timeout + ) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.AnalyticsAccountLink) for i in results) + + +def test_list_analytics_account_links_pages(transport_name: str = "grpc"): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_analytics_account_links), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + ], + next_page_token="abc", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[], + next_page_token="def", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + ], + next_page_token="ghi", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + ], + ), + RuntimeError, + ) + pages = list(client.list_analytics_account_links(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_analytics_account_links_async_pager(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_analytics_account_links), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + ], + next_page_token="abc", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[], + next_page_token="def", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + ], + next_page_token="ghi", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_analytics_account_links( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.AnalyticsAccountLink) for i in responses) + + +@pytest.mark.asyncio +async def test_list_analytics_account_links_async_pages(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_analytics_account_links), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + ], + next_page_token="abc", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[], + next_page_token="def", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + ], + next_page_token="ghi", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_analytics_account_links(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + marketingplatform_admin.CreateAnalyticsAccountLinkRequest, + dict, + ], +) +def test_create_analytics_account_link(request_type, transport: str = "grpc"): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_analytics_account_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.AnalyticsAccountLink( + name="name_value", + analytics_account="analytics_account_value", + display_name="display_name_value", + link_verification_state=resources.LinkVerificationState.LINK_VERIFICATION_STATE_VERIFIED, + ) + response = client.create_analytics_account_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = marketingplatform_admin.CreateAnalyticsAccountLinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.AnalyticsAccountLink) + assert response.name == "name_value" + assert response.analytics_account == "analytics_account_value" + assert response.display_name == "display_name_value" + assert ( + response.link_verification_state + == resources.LinkVerificationState.LINK_VERIFICATION_STATE_VERIFIED + ) + + +def test_create_analytics_account_link_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_analytics_account_link), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_analytics_account_link() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == marketingplatform_admin.CreateAnalyticsAccountLinkRequest() + + +def test_create_analytics_account_link_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = marketingplatform_admin.CreateAnalyticsAccountLinkRequest( + parent="parent_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_analytics_account_link), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_analytics_account_link(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == marketingplatform_admin.CreateAnalyticsAccountLinkRequest( + parent="parent_value", + ) + + +def test_create_analytics_account_link_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_analytics_account_link + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_analytics_account_link + ] = mock_rpc + request = {} + client.create_analytics_account_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_analytics_account_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_analytics_account_link_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_analytics_account_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.AnalyticsAccountLink( + name="name_value", + analytics_account="analytics_account_value", + display_name="display_name_value", + link_verification_state=resources.LinkVerificationState.LINK_VERIFICATION_STATE_VERIFIED, + ) + ) + response = await client.create_analytics_account_link() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == marketingplatform_admin.CreateAnalyticsAccountLinkRequest() + + +@pytest.mark.asyncio +async def test_create_analytics_account_link_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_analytics_account_link + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_analytics_account_link + ] = mock_rpc + + request = {} + await client.create_analytics_account_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.create_analytics_account_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_analytics_account_link_async( + transport: str = "grpc_asyncio", + request_type=marketingplatform_admin.CreateAnalyticsAccountLinkRequest, +): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_analytics_account_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.AnalyticsAccountLink( + name="name_value", + analytics_account="analytics_account_value", + display_name="display_name_value", + link_verification_state=resources.LinkVerificationState.LINK_VERIFICATION_STATE_VERIFIED, + ) + ) + response = await client.create_analytics_account_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = marketingplatform_admin.CreateAnalyticsAccountLinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.AnalyticsAccountLink) + assert response.name == "name_value" + assert response.analytics_account == "analytics_account_value" + assert response.display_name == "display_name_value" + assert ( + response.link_verification_state + == resources.LinkVerificationState.LINK_VERIFICATION_STATE_VERIFIED + ) + + +@pytest.mark.asyncio +async def test_create_analytics_account_link_async_from_dict(): + await test_create_analytics_account_link_async(request_type=dict) + + +def test_create_analytics_account_link_field_headers(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = marketingplatform_admin.CreateAnalyticsAccountLinkRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_analytics_account_link), "__call__" + ) as call: + call.return_value = resources.AnalyticsAccountLink() + client.create_analytics_account_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_analytics_account_link_field_headers_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = marketingplatform_admin.CreateAnalyticsAccountLinkRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_analytics_account_link), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.AnalyticsAccountLink() + ) + await client.create_analytics_account_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_analytics_account_link_flattened(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_analytics_account_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.AnalyticsAccountLink() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_analytics_account_link( + parent="parent_value", + analytics_account_link=resources.AnalyticsAccountLink(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].analytics_account_link + mock_val = resources.AnalyticsAccountLink(name="name_value") + assert arg == mock_val + + +def test_create_analytics_account_link_flattened_error(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_analytics_account_link( + marketingplatform_admin.CreateAnalyticsAccountLinkRequest(), + parent="parent_value", + analytics_account_link=resources.AnalyticsAccountLink(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_analytics_account_link_flattened_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_analytics_account_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.AnalyticsAccountLink() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.AnalyticsAccountLink() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_analytics_account_link( + parent="parent_value", + analytics_account_link=resources.AnalyticsAccountLink(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].analytics_account_link + mock_val = resources.AnalyticsAccountLink(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_analytics_account_link_flattened_error_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_analytics_account_link( + marketingplatform_admin.CreateAnalyticsAccountLinkRequest(), + parent="parent_value", + analytics_account_link=resources.AnalyticsAccountLink(name="name_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + marketingplatform_admin.DeleteAnalyticsAccountLinkRequest, + dict, + ], +) +def test_delete_analytics_account_link(request_type, transport: str = "grpc"): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_analytics_account_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_analytics_account_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = marketingplatform_admin.DeleteAnalyticsAccountLinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_analytics_account_link_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_analytics_account_link), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_analytics_account_link() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == marketingplatform_admin.DeleteAnalyticsAccountLinkRequest() + + +def test_delete_analytics_account_link_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = marketingplatform_admin.DeleteAnalyticsAccountLinkRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_analytics_account_link), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_analytics_account_link(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == marketingplatform_admin.DeleteAnalyticsAccountLinkRequest( + name="name_value", + ) + + +def test_delete_analytics_account_link_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_analytics_account_link + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_analytics_account_link + ] = mock_rpc + request = {} + client.delete_analytics_account_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_analytics_account_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_analytics_account_link_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_analytics_account_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_analytics_account_link() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == marketingplatform_admin.DeleteAnalyticsAccountLinkRequest() + + +@pytest.mark.asyncio +async def test_delete_analytics_account_link_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_analytics_account_link + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_analytics_account_link + ] = mock_rpc + + request = {} + await client.delete_analytics_account_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_analytics_account_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_analytics_account_link_async( + transport: str = "grpc_asyncio", + request_type=marketingplatform_admin.DeleteAnalyticsAccountLinkRequest, +): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_analytics_account_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_analytics_account_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = marketingplatform_admin.DeleteAnalyticsAccountLinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_analytics_account_link_async_from_dict(): + await test_delete_analytics_account_link_async(request_type=dict) + + +def test_delete_analytics_account_link_field_headers(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = marketingplatform_admin.DeleteAnalyticsAccountLinkRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_analytics_account_link), "__call__" + ) as call: + call.return_value = None + client.delete_analytics_account_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_analytics_account_link_field_headers_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = marketingplatform_admin.DeleteAnalyticsAccountLinkRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_analytics_account_link), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_analytics_account_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_analytics_account_link_flattened(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_analytics_account_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_analytics_account_link( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_analytics_account_link_flattened_error(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_analytics_account_link( + marketingplatform_admin.DeleteAnalyticsAccountLinkRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_analytics_account_link_flattened_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_analytics_account_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_analytics_account_link( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_analytics_account_link_flattened_error_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_analytics_account_link( + marketingplatform_admin.DeleteAnalyticsAccountLinkRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + marketingplatform_admin.SetPropertyServiceLevelRequest, + dict, + ], +) +def test_set_property_service_level(request_type, transport: str = "grpc"): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_property_service_level), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = marketingplatform_admin.SetPropertyServiceLevelResponse() + response = client.set_property_service_level(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = marketingplatform_admin.SetPropertyServiceLevelRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, marketingplatform_admin.SetPropertyServiceLevelResponse) + + +def test_set_property_service_level_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_property_service_level), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.set_property_service_level() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == marketingplatform_admin.SetPropertyServiceLevelRequest() + + +def test_set_property_service_level_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = marketingplatform_admin.SetPropertyServiceLevelRequest( + analytics_account_link="analytics_account_link_value", + analytics_property="analytics_property_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_property_service_level), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.set_property_service_level(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == marketingplatform_admin.SetPropertyServiceLevelRequest( + analytics_account_link="analytics_account_link_value", + analytics_property="analytics_property_value", + ) + + +def test_set_property_service_level_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.set_property_service_level + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_property_service_level + ] = mock_rpc + request = {} + client.set_property_service_level(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.set_property_service_level(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_set_property_service_level_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_property_service_level), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + marketingplatform_admin.SetPropertyServiceLevelResponse() + ) + response = await client.set_property_service_level() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == marketingplatform_admin.SetPropertyServiceLevelRequest() + + +@pytest.mark.asyncio +async def test_set_property_service_level_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.set_property_service_level + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.set_property_service_level + ] = mock_rpc + + request = {} + await client.set_property_service_level(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.set_property_service_level(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_set_property_service_level_async( + transport: str = "grpc_asyncio", + request_type=marketingplatform_admin.SetPropertyServiceLevelRequest, +): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_property_service_level), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + marketingplatform_admin.SetPropertyServiceLevelResponse() + ) + response = await client.set_property_service_level(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = marketingplatform_admin.SetPropertyServiceLevelRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, marketingplatform_admin.SetPropertyServiceLevelResponse) + + +@pytest.mark.asyncio +async def test_set_property_service_level_async_from_dict(): + await test_set_property_service_level_async(request_type=dict) + + +def test_set_property_service_level_field_headers(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = marketingplatform_admin.SetPropertyServiceLevelRequest() + + request.analytics_account_link = "analytics_account_link_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_property_service_level), "__call__" + ) as call: + call.return_value = marketingplatform_admin.SetPropertyServiceLevelResponse() + client.set_property_service_level(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "analytics_account_link=analytics_account_link_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_property_service_level_field_headers_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = marketingplatform_admin.SetPropertyServiceLevelRequest() + + request.analytics_account_link = "analytics_account_link_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_property_service_level), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + marketingplatform_admin.SetPropertyServiceLevelResponse() + ) + await client.set_property_service_level(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "analytics_account_link=analytics_account_link_value", + ) in kw["metadata"] + + +def test_set_property_service_level_flattened(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_property_service_level), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = marketingplatform_admin.SetPropertyServiceLevelResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.set_property_service_level( + analytics_account_link="analytics_account_link_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].analytics_account_link + mock_val = "analytics_account_link_value" + assert arg == mock_val + + +def test_set_property_service_level_flattened_error(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_property_service_level( + marketingplatform_admin.SetPropertyServiceLevelRequest(), + analytics_account_link="analytics_account_link_value", + ) + + +@pytest.mark.asyncio +async def test_set_property_service_level_flattened_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_property_service_level), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = marketingplatform_admin.SetPropertyServiceLevelResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + marketingplatform_admin.SetPropertyServiceLevelResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.set_property_service_level( + analytics_account_link="analytics_account_link_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].analytics_account_link + mock_val = "analytics_account_link_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_set_property_service_level_flattened_error_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.set_property_service_level( + marketingplatform_admin.SetPropertyServiceLevelRequest(), + analytics_account_link="analytics_account_link_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + marketingplatform_admin.GetOrganizationRequest, + dict, + ], +) +def test_get_organization_rest(request_type): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "organizations/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Organization( + name="name_value", + display_name="display_name_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Organization.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_organization(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Organization) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +def test_get_organization_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_organization in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_organization + ] = mock_rpc + + request = {} + client.get_organization(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_organization(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_organization_rest_required_fields( + request_type=marketingplatform_admin.GetOrganizationRequest, +): + transport_class = transports.MarketingplatformAdminServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_organization._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_organization._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resources.Organization() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.Organization.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_organization(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_organization_rest_unset_required_fields(): + transport = transports.MarketingplatformAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_organization._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_organization_rest_interceptors(null_interceptor): + transport = transports.MarketingplatformAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MarketingplatformAdminServiceRestInterceptor(), + ) + client = MarketingplatformAdminServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MarketingplatformAdminServiceRestInterceptor, "post_get_organization" + ) as post, mock.patch.object( + transports.MarketingplatformAdminServiceRestInterceptor, "pre_get_organization" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = marketingplatform_admin.GetOrganizationRequest.pb( + marketingplatform_admin.GetOrganizationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = resources.Organization.to_json( + resources.Organization() + ) + + request = marketingplatform_admin.GetOrganizationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resources.Organization() + + client.get_organization( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_organization_rest_bad_request( + transport: str = "rest", request_type=marketingplatform_admin.GetOrganizationRequest +): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "organizations/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_organization(request) + + +def test_get_organization_rest_flattened(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Organization() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "organizations/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Organization.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_organization(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=organizations/*}" % client.transport._host, args[1] + ) + + +def test_get_organization_rest_flattened_error(transport: str = "rest"): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_organization( + marketingplatform_admin.GetOrganizationRequest(), + name="name_value", + ) + + +def test_get_organization_rest_error(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + marketingplatform_admin.ListAnalyticsAccountLinksRequest, + dict, + ], +) +def test_list_analytics_account_links_rest(request_type): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = marketingplatform_admin.ListAnalyticsAccountLinksResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = marketingplatform_admin.ListAnalyticsAccountLinksResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_analytics_account_links(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAnalyticsAccountLinksPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_analytics_account_links_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_analytics_account_links + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_analytics_account_links + ] = mock_rpc + + request = {} + client.list_analytics_account_links(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_analytics_account_links(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_analytics_account_links_rest_required_fields( + request_type=marketingplatform_admin.ListAnalyticsAccountLinksRequest, +): + transport_class = transports.MarketingplatformAdminServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_analytics_account_links._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_analytics_account_links._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = marketingplatform_admin.ListAnalyticsAccountLinksResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = marketingplatform_admin.ListAnalyticsAccountLinksResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_analytics_account_links(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_analytics_account_links_rest_unset_required_fields(): + transport = transports.MarketingplatformAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_analytics_account_links._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_analytics_account_links_rest_interceptors(null_interceptor): + transport = transports.MarketingplatformAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MarketingplatformAdminServiceRestInterceptor(), + ) + client = MarketingplatformAdminServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MarketingplatformAdminServiceRestInterceptor, + "post_list_analytics_account_links", + ) as post, mock.patch.object( + transports.MarketingplatformAdminServiceRestInterceptor, + "pre_list_analytics_account_links", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = marketingplatform_admin.ListAnalyticsAccountLinksRequest.pb( + marketingplatform_admin.ListAnalyticsAccountLinksRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + marketingplatform_admin.ListAnalyticsAccountLinksResponse.to_json( + marketingplatform_admin.ListAnalyticsAccountLinksResponse() + ) + ) + + request = marketingplatform_admin.ListAnalyticsAccountLinksRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = marketingplatform_admin.ListAnalyticsAccountLinksResponse() + + client.list_analytics_account_links( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_analytics_account_links_rest_bad_request( + transport: str = "rest", + request_type=marketingplatform_admin.ListAnalyticsAccountLinksRequest, +): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_analytics_account_links(request) + + +def test_list_analytics_account_links_rest_flattened(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = marketingplatform_admin.ListAnalyticsAccountLinksResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "organizations/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = marketingplatform_admin.ListAnalyticsAccountLinksResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_analytics_account_links(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=organizations/*}/analyticsAccountLinks" + % client.transport._host, + args[1], + ) + + +def test_list_analytics_account_links_rest_flattened_error(transport: str = "rest"): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_analytics_account_links( + marketingplatform_admin.ListAnalyticsAccountLinksRequest(), + parent="parent_value", + ) + + +def test_list_analytics_account_links_rest_pager(transport: str = "rest"): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + ], + next_page_token="abc", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[], + next_page_token="def", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + ], + next_page_token="ghi", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + marketingplatform_admin.ListAnalyticsAccountLinksResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "organizations/sample1"} + + pager = client.list_analytics_account_links(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.AnalyticsAccountLink) for i in results) + + pages = list(client.list_analytics_account_links(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + marketingplatform_admin.CreateAnalyticsAccountLinkRequest, + dict, + ], +) +def test_create_analytics_account_link_rest(request_type): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1"} + request_init["analytics_account_link"] = { + "name": "name_value", + "analytics_account": "analytics_account_value", + "display_name": "display_name_value", + "link_verification_state": 1, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = marketingplatform_admin.CreateAnalyticsAccountLinkRequest.meta.fields[ + "analytics_account_link" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "analytics_account_link" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["analytics_account_link"][field])): + del request_init["analytics_account_link"][field][i][subfield] + else: + del request_init["analytics_account_link"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.AnalyticsAccountLink( + name="name_value", + analytics_account="analytics_account_value", + display_name="display_name_value", + link_verification_state=resources.LinkVerificationState.LINK_VERIFICATION_STATE_VERIFIED, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.AnalyticsAccountLink.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_analytics_account_link(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.AnalyticsAccountLink) + assert response.name == "name_value" + assert response.analytics_account == "analytics_account_value" + assert response.display_name == "display_name_value" + assert ( + response.link_verification_state + == resources.LinkVerificationState.LINK_VERIFICATION_STATE_VERIFIED + ) + + +def test_create_analytics_account_link_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_analytics_account_link + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_analytics_account_link + ] = mock_rpc + + request = {} + client.create_analytics_account_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_analytics_account_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_analytics_account_link_rest_required_fields( + request_type=marketingplatform_admin.CreateAnalyticsAccountLinkRequest, +): + transport_class = transports.MarketingplatformAdminServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_analytics_account_link._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_analytics_account_link._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resources.AnalyticsAccountLink() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.AnalyticsAccountLink.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_analytics_account_link(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_analytics_account_link_rest_unset_required_fields(): + transport = transports.MarketingplatformAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_analytics_account_link._get_unset_required_fields( + {} + ) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "analyticsAccountLink", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_analytics_account_link_rest_interceptors(null_interceptor): + transport = transports.MarketingplatformAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MarketingplatformAdminServiceRestInterceptor(), + ) + client = MarketingplatformAdminServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MarketingplatformAdminServiceRestInterceptor, + "post_create_analytics_account_link", + ) as post, mock.patch.object( + transports.MarketingplatformAdminServiceRestInterceptor, + "pre_create_analytics_account_link", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = marketingplatform_admin.CreateAnalyticsAccountLinkRequest.pb( + marketingplatform_admin.CreateAnalyticsAccountLinkRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = resources.AnalyticsAccountLink.to_json( + resources.AnalyticsAccountLink() + ) + + request = marketingplatform_admin.CreateAnalyticsAccountLinkRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resources.AnalyticsAccountLink() + + client.create_analytics_account_link( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_analytics_account_link_rest_bad_request( + transport: str = "rest", + request_type=marketingplatform_admin.CreateAnalyticsAccountLinkRequest, +): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_analytics_account_link(request) + + +def test_create_analytics_account_link_rest_flattened(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.AnalyticsAccountLink() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "organizations/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + analytics_account_link=resources.AnalyticsAccountLink(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.AnalyticsAccountLink.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_analytics_account_link(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=organizations/*}/analyticsAccountLinks" + % client.transport._host, + args[1], + ) + + +def test_create_analytics_account_link_rest_flattened_error(transport: str = "rest"): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_analytics_account_link( + marketingplatform_admin.CreateAnalyticsAccountLinkRequest(), + parent="parent_value", + analytics_account_link=resources.AnalyticsAccountLink(name="name_value"), + ) + + +def test_create_analytics_account_link_rest_error(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + marketingplatform_admin.DeleteAnalyticsAccountLinkRequest, + dict, + ], +) +def test_delete_analytics_account_link_rest(request_type): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "organizations/sample1/analyticsAccountLinks/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_analytics_account_link(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_analytics_account_link_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_analytics_account_link + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_analytics_account_link + ] = mock_rpc + + request = {} + client.delete_analytics_account_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_analytics_account_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_analytics_account_link_rest_required_fields( + request_type=marketingplatform_admin.DeleteAnalyticsAccountLinkRequest, +): + transport_class = transports.MarketingplatformAdminServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_analytics_account_link._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_analytics_account_link._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_analytics_account_link(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_analytics_account_link_rest_unset_required_fields(): + transport = transports.MarketingplatformAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_analytics_account_link._get_unset_required_fields( + {} + ) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_analytics_account_link_rest_interceptors(null_interceptor): + transport = transports.MarketingplatformAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MarketingplatformAdminServiceRestInterceptor(), + ) + client = MarketingplatformAdminServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MarketingplatformAdminServiceRestInterceptor, + "pre_delete_analytics_account_link", + ) as pre: + pre.assert_not_called() + pb_message = marketingplatform_admin.DeleteAnalyticsAccountLinkRequest.pb( + marketingplatform_admin.DeleteAnalyticsAccountLinkRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = marketingplatform_admin.DeleteAnalyticsAccountLinkRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_analytics_account_link( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_analytics_account_link_rest_bad_request( + transport: str = "rest", + request_type=marketingplatform_admin.DeleteAnalyticsAccountLinkRequest, +): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "organizations/sample1/analyticsAccountLinks/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_analytics_account_link(request) + + +def test_delete_analytics_account_link_rest_flattened(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "organizations/sample1/analyticsAccountLinks/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_analytics_account_link(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=organizations/*/analyticsAccountLinks/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_analytics_account_link_rest_flattened_error(transport: str = "rest"): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_analytics_account_link( + marketingplatform_admin.DeleteAnalyticsAccountLinkRequest(), + name="name_value", + ) + + +def test_delete_analytics_account_link_rest_error(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + marketingplatform_admin.SetPropertyServiceLevelRequest, + dict, + ], +) +def test_set_property_service_level_rest(request_type): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "analytics_account_link": "organizations/sample1/analyticsAccountLinks/sample2" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = marketingplatform_admin.SetPropertyServiceLevelResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = marketingplatform_admin.SetPropertyServiceLevelResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.set_property_service_level(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, marketingplatform_admin.SetPropertyServiceLevelResponse) + + +def test_set_property_service_level_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.set_property_service_level + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_property_service_level + ] = mock_rpc + + request = {} + client.set_property_service_level(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.set_property_service_level(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_set_property_service_level_rest_required_fields( + request_type=marketingplatform_admin.SetPropertyServiceLevelRequest, +): + transport_class = transports.MarketingplatformAdminServiceRestTransport + + request_init = {} + request_init["analytics_account_link"] = "" + request_init["analytics_property"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_property_service_level._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["analyticsAccountLink"] = "analytics_account_link_value" + jsonified_request["analyticsProperty"] = "analytics_property_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_property_service_level._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "analyticsAccountLink" in jsonified_request + assert jsonified_request["analyticsAccountLink"] == "analytics_account_link_value" + assert "analyticsProperty" in jsonified_request + assert jsonified_request["analyticsProperty"] == "analytics_property_value" + + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = marketingplatform_admin.SetPropertyServiceLevelResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = marketingplatform_admin.SetPropertyServiceLevelResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.set_property_service_level(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_set_property_service_level_rest_unset_required_fields(): + transport = transports.MarketingplatformAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.set_property_service_level._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "analyticsAccountLink", + "analyticsProperty", + "serviceLevel", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_property_service_level_rest_interceptors(null_interceptor): + transport = transports.MarketingplatformAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MarketingplatformAdminServiceRestInterceptor(), + ) + client = MarketingplatformAdminServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MarketingplatformAdminServiceRestInterceptor, + "post_set_property_service_level", + ) as post, mock.patch.object( + transports.MarketingplatformAdminServiceRestInterceptor, + "pre_set_property_service_level", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = marketingplatform_admin.SetPropertyServiceLevelRequest.pb( + marketingplatform_admin.SetPropertyServiceLevelRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + marketingplatform_admin.SetPropertyServiceLevelResponse.to_json( + marketingplatform_admin.SetPropertyServiceLevelResponse() + ) + ) + + request = marketingplatform_admin.SetPropertyServiceLevelRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = marketingplatform_admin.SetPropertyServiceLevelResponse() + + client.set_property_service_level( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_property_service_level_rest_bad_request( + transport: str = "rest", + request_type=marketingplatform_admin.SetPropertyServiceLevelRequest, +): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "analytics_account_link": "organizations/sample1/analyticsAccountLinks/sample2" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_property_service_level(request) + + +def test_set_property_service_level_rest_flattened(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = marketingplatform_admin.SetPropertyServiceLevelResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "analytics_account_link": "organizations/sample1/analyticsAccountLinks/sample2" + } + + # get truthy value for each flattened field + mock_args = dict( + analytics_account_link="analytics_account_link_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = marketingplatform_admin.SetPropertyServiceLevelResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.set_property_service_level(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{analytics_account_link=organizations/*/analyticsAccountLinks/*}:setPropertyServiceLevel" + % client.transport._host, + args[1], + ) + + +def test_set_property_service_level_rest_flattened_error(transport: str = "rest"): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_property_service_level( + marketingplatform_admin.SetPropertyServiceLevelRequest(), + analytics_account_link="analytics_account_link_value", + ) + + +def test_set_property_service_level_rest_error(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.MarketingplatformAdminServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.MarketingplatformAdminServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MarketingplatformAdminServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.MarketingplatformAdminServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = MarketingplatformAdminServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = MarketingplatformAdminServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.MarketingplatformAdminServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MarketingplatformAdminServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.MarketingplatformAdminServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = MarketingplatformAdminServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.MarketingplatformAdminServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.MarketingplatformAdminServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.MarketingplatformAdminServiceGrpcTransport, + transports.MarketingplatformAdminServiceGrpcAsyncIOTransport, + transports.MarketingplatformAdminServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = MarketingplatformAdminServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.MarketingplatformAdminServiceGrpcTransport, + ) + + +def test_marketingplatform_admin_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.MarketingplatformAdminServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_marketingplatform_admin_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.ads.marketingplatform_admin_v1alpha.services.marketingplatform_admin_service.transports.MarketingplatformAdminServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.MarketingplatformAdminServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "get_organization", + "list_analytics_account_links", + "create_analytics_account_link", + "delete_analytics_account_link", + "set_property_service_level", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_marketingplatform_admin_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.ads.marketingplatform_admin_v1alpha.services.marketingplatform_admin_service.transports.MarketingplatformAdminServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.MarketingplatformAdminServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/marketingplatformadmin.analytics.read", + "https://www.googleapis.com/auth/marketingplatformadmin.analytics.update", + ), + quota_project_id="octopus", + ) + + +def test_marketingplatform_admin_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.ads.marketingplatform_admin_v1alpha.services.marketingplatform_admin_service.transports.MarketingplatformAdminServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.MarketingplatformAdminServiceTransport() + adc.assert_called_once() + + +def test_marketingplatform_admin_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + MarketingplatformAdminServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/marketingplatformadmin.analytics.read", + "https://www.googleapis.com/auth/marketingplatformadmin.analytics.update", + ), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.MarketingplatformAdminServiceGrpcTransport, + transports.MarketingplatformAdminServiceGrpcAsyncIOTransport, + ], +) +def test_marketingplatform_admin_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( + "https://www.googleapis.com/auth/marketingplatformadmin.analytics.read", + "https://www.googleapis.com/auth/marketingplatformadmin.analytics.update", + ), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.MarketingplatformAdminServiceGrpcTransport, + transports.MarketingplatformAdminServiceGrpcAsyncIOTransport, + transports.MarketingplatformAdminServiceRestTransport, + ], +) +def test_marketingplatform_admin_service_transport_auth_gdch_credentials( + transport_class, +): + host = "https://language.com/" + api_audience_tests = [None, "https://language2.com/"] + api_audience_expect = [host, "https://language2.com/"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.MarketingplatformAdminServiceGrpcTransport, grpc_helpers), + ( + transports.MarketingplatformAdminServiceGrpcAsyncIOTransport, + grpc_helpers_async, + ), + ], +) +def test_marketingplatform_admin_service_transport_create_channel( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "marketingplatformadmin.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + "https://www.googleapis.com/auth/marketingplatformadmin.analytics.read", + "https://www.googleapis.com/auth/marketingplatformadmin.analytics.update", + ), + scopes=["1", "2"], + default_host="marketingplatformadmin.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.MarketingplatformAdminServiceGrpcTransport, + transports.MarketingplatformAdminServiceGrpcAsyncIOTransport, + ], +) +def test_marketingplatform_admin_service_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_marketingplatform_admin_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.MarketingplatformAdminServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_marketingplatform_admin_service_host_no_port(transport_name): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="marketingplatformadmin.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "marketingplatformadmin.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://marketingplatformadmin.googleapis.com/" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_marketingplatform_admin_service_host_with_port(transport_name): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="marketingplatformadmin.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "marketingplatformadmin.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://marketingplatformadmin.googleapis.com:8000/" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_marketingplatform_admin_service_client_transport_session_collision( + transport_name, +): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = MarketingplatformAdminServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = MarketingplatformAdminServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get_organization._session + session2 = client2.transport.get_organization._session + assert session1 != session2 + session1 = client1.transport.list_analytics_account_links._session + session2 = client2.transport.list_analytics_account_links._session + assert session1 != session2 + session1 = client1.transport.create_analytics_account_link._session + session2 = client2.transport.create_analytics_account_link._session + assert session1 != session2 + session1 = client1.transport.delete_analytics_account_link._session + session2 = client2.transport.delete_analytics_account_link._session + assert session1 != session2 + session1 = client1.transport.set_property_service_level._session + session2 = client2.transport.set_property_service_level._session + assert session1 != session2 + + +def test_marketingplatform_admin_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.MarketingplatformAdminServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_marketingplatform_admin_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.MarketingplatformAdminServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.MarketingplatformAdminServiceGrpcTransport, + transports.MarketingplatformAdminServiceGrpcAsyncIOTransport, + ], +) +def test_marketingplatform_admin_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.MarketingplatformAdminServiceGrpcTransport, + transports.MarketingplatformAdminServiceGrpcAsyncIOTransport, + ], +) +def test_marketingplatform_admin_service_transport_channel_mtls_with_adc( + transport_class, +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_account_path(): + account = "squid" + expected = "accounts/{account}".format( + account=account, + ) + actual = MarketingplatformAdminServiceClient.account_path(account) + assert expected == actual + + +def test_parse_account_path(): + expected = { + "account": "clam", + } + path = MarketingplatformAdminServiceClient.account_path(**expected) + + # Check that the path construction is reversible. + actual = MarketingplatformAdminServiceClient.parse_account_path(path) + assert expected == actual + + +def test_analytics_account_link_path(): + organization = "whelk" + analytics_account_link = "octopus" + expected = "organizations/{organization}/analyticsAccountLinks/{analytics_account_link}".format( + organization=organization, + analytics_account_link=analytics_account_link, + ) + actual = MarketingplatformAdminServiceClient.analytics_account_link_path( + organization, analytics_account_link + ) + assert expected == actual + + +def test_parse_analytics_account_link_path(): + expected = { + "organization": "oyster", + "analytics_account_link": "nudibranch", + } + path = MarketingplatformAdminServiceClient.analytics_account_link_path(**expected) + + # Check that the path construction is reversible. + actual = MarketingplatformAdminServiceClient.parse_analytics_account_link_path(path) + assert expected == actual + + +def test_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = MarketingplatformAdminServiceClient.organization_path(organization) + assert expected == actual + + +def test_parse_organization_path(): + expected = { + "organization": "mussel", + } + path = MarketingplatformAdminServiceClient.organization_path(**expected) + + # Check that the path construction is reversible. + actual = MarketingplatformAdminServiceClient.parse_organization_path(path) + assert expected == actual + + +def test_property_path(): + property = "winkle" + expected = "properties/{property}".format( + property=property, + ) + actual = MarketingplatformAdminServiceClient.property_path(property) + assert expected == actual + + +def test_parse_property_path(): + expected = { + "property": "nautilus", + } + path = MarketingplatformAdminServiceClient.property_path(**expected) + + # Check that the path construction is reversible. + actual = MarketingplatformAdminServiceClient.parse_property_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "scallop" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = MarketingplatformAdminServiceClient.common_billing_account_path( + billing_account + ) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "abalone", + } + path = MarketingplatformAdminServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = MarketingplatformAdminServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "squid" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = MarketingplatformAdminServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "clam", + } + path = MarketingplatformAdminServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = MarketingplatformAdminServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "whelk" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = MarketingplatformAdminServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "octopus", + } + path = MarketingplatformAdminServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = MarketingplatformAdminServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "oyster" + expected = "projects/{project}".format( + project=project, + ) + actual = MarketingplatformAdminServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nudibranch", + } + path = MarketingplatformAdminServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = MarketingplatformAdminServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "cuttlefish" + location = "mussel" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = MarketingplatformAdminServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "winkle", + "location": "nautilus", + } + path = MarketingplatformAdminServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = MarketingplatformAdminServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.MarketingplatformAdminServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.MarketingplatformAdminServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = MarketingplatformAdminServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + ( + MarketingplatformAdminServiceClient, + transports.MarketingplatformAdminServiceGrpcTransport, + ), + ( + MarketingplatformAdminServiceAsyncClient, + transports.MarketingplatformAdminServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) From c365adb60deda36cb8227fa2eaccc522bdd408fd Mon Sep 17 00:00:00 2001 From: yoshi-code-bot <70984784+yoshi-code-bot@users.noreply.github.com> Date: Thu, 5 Sep 2024 14:36:48 -0700 Subject: [PATCH 053/108] chore: Update release-please config files (#13061) Update release-please config files --- .release-please-manifest.json | 1 + release-please-config.json | 15 +++++++++++++++ 2 files changed, 16 insertions(+) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 4fe1785f9ff1..146cef793774 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,5 +1,6 @@ { "packages/google-ads-admanager": "0.1.2", + "packages/google-ads-marketingplatform-admin": "0.0.0", "packages/google-ai-generativelanguage": "0.6.9", "packages/google-analytics-admin": "0.23.0", "packages/google-analytics-data": "0.18.11", diff --git a/release-please-config.json b/release-please-config.json index ea9a89e5e9fc..b2f26908453f 100644 --- a/release-please-config.json +++ b/release-please-config.json @@ -15,6 +15,21 @@ ], "release-type": "python" }, + "packages/google-ads-marketingplatform-admin": { + "bump-minor-pre-major": true, + "bump-patch-for-minor-pre-major": true, + "component": "google-ads-marketingplatform-admin", + "extra-files": [ + "google/ads/marketingplatform_admin/gapic_version.py", + "google/ads/marketingplatform_admin_v1alpha/gapic_version.py", + { + "jsonpath": "$.clientLibrary.version", + "path": "samples/generated_samples/snippet_metadata_google.marketingplatform.admin.v1alpha.json", + "type": "json" + } + ], + "release-type": "python" + }, "packages/google-ai-generativelanguage": { "bump-minor-pre-major": true, "bump-patch-for-minor-pre-major": true, From b76759921d8bedf8edfbf2a97fb6a614fbc5c1ce Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 5 Sep 2024 17:45:04 -0400 Subject: [PATCH 054/108] chore: release main (#13062) :robot: I have created a release *beep* *boop* ---
google-ads-marketingplatform-admin: 0.1.0 ## 0.1.0 (2024-09-05) ### Features * add initial files for google.marketingplatform.admin.v1alpha ([#13060](https://github.com/googleapis/google-cloud-python/issues/13060)) ([2bbab3b](https://github.com/googleapis/google-cloud-python/commit/2bbab3bea1548fdb6200856ffe4a6fe7d6bf8487))
--- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .release-please-manifest.json | 2 +- .../google-ads-marketingplatform-admin/CHANGELOG.md | 11 ++++++++++- .../ads/marketingplatform_admin/gapic_version.py | 2 +- .../marketingplatform_admin_v1alpha/gapic_version.py | 2 +- 4 files changed, 13 insertions(+), 4 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 146cef793774..477ae9480c55 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,6 +1,6 @@ { "packages/google-ads-admanager": "0.1.2", - "packages/google-ads-marketingplatform-admin": "0.0.0", + "packages/google-ads-marketingplatform-admin": "0.1.0", "packages/google-ai-generativelanguage": "0.6.9", "packages/google-analytics-admin": "0.23.0", "packages/google-analytics-data": "0.18.11", diff --git a/packages/google-ads-marketingplatform-admin/CHANGELOG.md b/packages/google-ads-marketingplatform-admin/CHANGELOG.md index 5ddad421e08f..f8676c0292af 100644 --- a/packages/google-ads-marketingplatform-admin/CHANGELOG.md +++ b/packages/google-ads-marketingplatform-admin/CHANGELOG.md @@ -1 +1,10 @@ -# Changelog \ No newline at end of file +# Changelog + +## 0.1.0 (2024-09-05) + + +### Features + +* add initial files for google.marketingplatform.admin.v1alpha ([#13060](https://github.com/googleapis/google-cloud-python/issues/13060)) ([2bbab3b](https://github.com/googleapis/google-cloud-python/commit/2bbab3bea1548fdb6200856ffe4a6fe7d6bf8487)) + +## Changelog diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin/gapic_version.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin/gapic_version.py index 558c8aab67c5..33d37a7b677b 100644 --- a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin/gapic_version.py +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.0" # {x-release-please-version} diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/gapic_version.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/gapic_version.py index 558c8aab67c5..33d37a7b677b 100644 --- a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/gapic_version.py +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.0" # {x-release-please-version} From 0ee300a0497968aa2c85969924b37f95f67675f0 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 5 Sep 2024 19:42:21 -0400 Subject: [PATCH 055/108] feat: [google-apps-chat] Add CHAT_SPACE link type support for GA launch (#13064) - [ ] Regenerate this pull request now. PiperOrigin-RevId: 671436186 Source-Link: https://github.com/googleapis/googleapis/commit/f21743b4fe99a37e86522823454a67203113b43a Source-Link: https://github.com/googleapis/googleapis-gen/commit/01202948aeacf502f63d3d01995521589e4c6db4 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWFwcHMtY2hhdC8uT3dsQm90LnlhbWwiLCJoIjoiMDEyMDI5NDhhZWFjZjUwMmY2M2QzZDAxOTk1NTIxNTg5ZTRjNmRiNCJ9 --------- Co-authored-by: Owl Bot --- packages/google-apps-chat/chat-v1-py.tar.gz | Bin 0 -> 131072 bytes .../google/apps/chat/__init__.py | 2 + .../google/apps/chat_v1/__init__.py | 2 + .../google/apps/chat_v1/types/__init__.py | 2 + .../google/apps/chat_v1/types/annotation.py | 52 ++++++++++++++++++ .../unit/gapic/chat_v1/test_chat_service.py | 10 ++++ 6 files changed, 68 insertions(+) diff --git a/packages/google-apps-chat/chat-v1-py.tar.gz b/packages/google-apps-chat/chat-v1-py.tar.gz index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..496d03ec6ccfe68f0cfb0a0dc0d0d826791ef358 100644 GIT binary patch literal 131072 zcmV)MK)AmjiwFP!000001MEF(Q{yjg> z?7I8m@~&rk!Dib)0UsY8&Xs?weNdABLA$m81KHm!8MDiGE`LUym~rokHod^{W0#U) z7!G~9XGam=L+v2%&1h0t4re{J6zI3^(ypvga`w+1=0$>UJ`Sw!{|_Jl?EL>W$_)7r zyq7q#v`Jf!bn+ZBmxE=$wvEO9zjb`nJ}%0?z29!{{Qp+UyT6dVn6tgU7wpmCi9{1I z3WI~nJJNjLBu?mh!SIyCV$l2%pTP6;Fq$xLI1&WJbVz=ZVSrp+ombw$+bhqZ0jDmB z1D7%)MwFaIwgdn48-x5zA&`UtX`B0`jsR8ts@nJ;z%U78GPWlq2nC5b1r#190}tAu zFAj|a@dBW09QmFdIFvklVkDcM(iP@ z@J@tC?=$lRFw!7A9EfL|QNZGQTrjU6ivqI}>Y&eg0AQCLkm}hT>E2bzKhEyDcLw12 zrQ7@8o6kM+%h~Pi*>$gbc}H$;$@$IoMYq?zxrVzB zksh*r*^JZVKIF-j@AVxH0Gz~}I}jEHeKANcj(pmvR4OhV0AeOQB)48cMF7UQAVxt?Q#AMY zfc3#3{V?<&3<>24%4`v`(-fBKtgi8{{S3xd>ix#BL#A+$LxRuWn;?fAUpH? zu9th%P$ZuBZm+CQXSZh`?@l$ZK+7Mnhc#%9O=T04an1A9!Nm4=Z6W)v7^jp?`#ern`J z5)ec~15Hr+VopWQ!AhoTsi)Yv5dgXvSMd&=2<5`^+#Hz2Xyi|KbKH58il`( zge|t3KAu@AV}rUXQ5Shf)0>Bq>a1yIX7te}=Qw~+J{z@kq}2vT5DgTRM~g($VAEeH0&;B#H&B{hmr z^|<#NO>ac_NTp&zTm}z>S9}ycTSFGcP(8Wzf+uiLWnI#(NtV5;DM&{tv-*>?9i<*K z+etL*H4RN6%gLyWFQaZg=GpiXOzY9ZLB5csFSFrVUaCb5apcZ$bOra8!r1(AUhPct zb3U$EuLi*Zq$E>?U^pAmjE*dAe5Nf-JzRpx4*{~|3<_eUMT7Z61y4*Kd}=>Y3qxO> zQ-1)_9P(l>u>Dga;~%^*h-%x-9?2|0$H(i?rCI2P=$GAKffxaiEWv6H;=nP1RAxTe zeoTv?I7qXAaC?mD0@7oagXKNodq`{PY`T6Xbe4I^SO{VCpL8Ox8bw;(kxP|6VqR_& zh|8nUB`OQUB>z(v6G{<=kZcD}p)b=`$bS2fLdFbozdSOXSZLlBoI8g8`t95dzqi>J$a&WZKN7tV!cL+%^N_< zJgGk>fJ&yM(;-!D!Bv^&Jz|=t8l95JFSPaq9Pm-wCqVfNvOF0s8N`KvD)2F+i7+04 zq0nklP^}rn5zMz{^tXT{>%3j+3Niy|m#Uac`;3y_BKq`Eqf%nGq(M`F|L#x0AnSQ; zZ$MTm-%PzY&6GXY6_dUQOoPz`&)BdoiS5{m=v2cx`1%@jK)*)>jYfg=QbA$aieT*a zZ(dYSi8rRW(8%>P@z?+fy-esTw55rFSD5eC@23b!US0YW-B^7lr3iF~V?KHZHXjV@ z)!OfW)T$RzBPFtTxRFlFF4bUvn#Bd*wYp*mPj zpV9zua-6To{DMD0PX>efM4hKlp{S8rQt=XD=T*-rcD(V>Fv^a-sAQ?lH)fea(NImC z^1%7I0P?4zps}&eeNLYqfQFpz=aI|tFE|GaorT8Z1(-pfCgW}M6Z}_{18?WOdawS8 zY^>LrH6#QUG}009MJ(q_gqm~kf(VoW&?in9TGLMqBoPCFoB`5N#{tyfm?g4Lkno3r zD`m$O{`a~c7&mNS9=LinE(qB~1L*E#5C);+NK8tA$v8ZkVDr`x)CC5rDHrp+sE6Fd zd-K=O3+gyq2&Csc2I`D@Y8kRK92EyDJ=^|cT@hS_9w2~*9FPP(2gV6-Vf8@+2IMkc zn9)R$nVTILATT1xNYA2&oHGj}@Rt${lg3&Dr(6cb2*a5J{T-MXU+Rr~nEr&5H`+8I zK%>!l0jEslMI<9qE`SO~sdjL3^2Zv7ZtT-KG?VpQD>=X+xcTIPS=Mv9RUqSBj$|br zvcE`2DnWD%u>+QqO=bo3S#s1YH)qM}okTCB5b=>>5HP^Eh4+L>c_OMQiX{qDrHj0Gei5=-Vs{b*pAU)(kziYO1=F8&k1ZUSp_=F;Ym4V3}>W zRDDkXv+RHweSj<^j*?EHgT~2SuDXSHj_&~$oc?1rn}Bim@@b%|2gVA*yuRrXoPxv- zKqQxe1fM%$w934U}ae0@s6KmT*Qi>0zCkLsJS<u~LM+ry40H!0gmhu* z$8trq8l#{-#zAucP8AGT&I#nYkc~I6Cn{3lvwB14D2Z)%iUC+A_vzb4lC~^@&R%QW z13hZdPfd^Gzj|)u6)~@zUJt5eq1Ap*QT$PZaTxR*@k)0lzq@cfj83amiwPoftHUSU zF2vo{ag6dU&)3wl5Knvh!Li8Cb_tgIs7G1{+G(V@;VO{Zy1CO$A_2g{R0F39v{Qs zqy3gaemUAdsDMt$b%&jboQ&3Y!?Xb5{{3FWLJ{u0f3NO_cH}wf9pn^GKqF5d zh)I;*Sy8`@OBiJncLF+UL(c9)$Uz>^7mOg-Vcm`ygfFV5WknMQ)yc9jgqTB5;D1~X z>EH$AxPw3C+Q^*$hY=8V^UtyV@8Dp!{%;#)hWzDtjDb59ym?@@+NOB-^;WNKO}2WcG5w5aUK1c!Hqja8u?4xO59f>UtUnhKso5hz5>F zW!wNC3h3lSNgD0VidstE1V7z$AO=*V|87Qp%++N-9%L{PZ#v+W|9r0WKe8;f|Ns7K z^M8%~9~~X+?0*|&XaC=KSswrQzfJt#+5c9`&i=pevef=h-ah^xw-2Y|e|u;D+b9d{ zJmzXr%NbGUQGK%Y!)MA!Ka2yHgSo4(u1tOvlqIEUW zh7k(CHX8W3AZe0}09RG}2*(d&w>bzy^0y9YnXN-}-yna*Sx9r=rB78udN-PDneb7z zDyWR5(@{9eZVI0t9Gb24c`QbqP8(rcITwat?Y4Q4y0T8^z-;fE?J1Ym>Fk>TyH(G& zJpQ-7P5eLH+5a}mF8+VtWvTuD_>IQ@yZFDAva|p1yX3$BJNUKF9#ZCPwvA=>e{fv> z{(o@1znlNxO1Woo@K6C`4IEtmii^D}m3v0a%2^N7FJO?<$&rg~R4RZ98)8|?n{ zn)M&;W8D8|xBlbp%ar_cA$(J9Eb;#>aQ7woAD-;ie{84hHhS8v{(O^-p1!qW@5)ML z%Kzhp&W4kK_2d7+(aFg!{%@nqmj7mxfc4}5(aHYq`_ER&F8=T0|1SQ2zh$=nS3jBA zR2%EZf4FJy;{P_vBKdDB3s^t?ADM9;06PC(B1 zByc|B1M+Tqxsax(D`Dk+$EP#l&iQDDcVzUQ_1bZdPO^iaUEO>=BBKC*q2#h~@3D>uZ`)h=QtWqljtw!%H=CpgVWxg(CncB}!pT)ozf-0R- zR)R}sfzR2ag*ElExYlAIP&5lSSxdRO0-$B187u_HZP6@2!9{?~7edMdIXEePb199lNA^{CaC`nK5v*ES zZe@yB3lJ2zr_L{so7u-J*9jo1N|Rl+Lb?I|iWP82X8Iyx+qb}{uiTD2sR6r_oa?e; zJ$dGW71F>(6L^DK_1Gm z9(3B+I(3tiHr52!ZLL|G`!*keZyJv=A6c;uE@fN?R+F}NJ#|9JS~arsDqdMFPwQ*3 z$sbd$SYzsJnHB3nrq5OcefHcTExXAWceU0o2oeI|vuFD<-VSXm_hhP;nTyUE!N(Z= zSMTZ$0z(i`U#5YDiXql)r(9B=+_D`fj7tJ8=NAKy`mQ{21osq^yU#=4PKny(Y++TA z$9Jx!E!sCmqf*-P(H4?v@FoEy}h#oqcq^Y8>5$i5!_%vajS2@ zSZotVI_Zu6fgOor7KI#lMn~qzW+#?z&`O}|s-+@(s)H~z6@MW&1G@>wRs0!ry_&M1 znr%XDUR-`S`+U{2F0U^>-E^;e4MK5$%!ktkBxm3(Jku+o8T*{+il!9v^|b39 zQl&389`;ezm=v~vBwqnv%&f&B-1ftlnU(arh4mJsV$*Yx41lo zU@Fx&7jW-73}^$j8!ujsCC~f@?wvoy;jcnnc(P&qWvNQ^Ge!2*O-y%Vv+WerMB*3pGl~Y|fmt@K< z^i3BJ^a;7Cu^Hs|>Q6T}KV4l~XP>&(XYe%VSKUkS3Fnu$z3zwZIk<)DDYzpR)9N3& zt+Bs?T6I9h8xkvb>)#Z$Z_nG$0goe0W~f^rI+>=#NWg3l0!sa7JQhhEn1)#>WSL$t z2#beptl7!D?s38D8mB&iNgnxfnbjmoP4Z=F-@d5h&LA}FIpQyWSsgz#~@&(zbj7iCUeQP?uy7_!zm2yn!Kq2wREazOO^9KAde3MDfaW~L_9 zYxxpiUOtgnBf*Y z=L(W76EM4FV^&j=C$r&S(Xf))yk=%Re{~ybdUK`?PRGG{8oO+oV?~Op=_qr#_I*wi z>_TaSBUd%xhzm4Loz~0cG1jPOf7j%WZHAeSJYJJ|ztMU%u?QbpGt$g;>K-?{0pirj z+OzVqY~hBPd;1fMoL=$Jce- zkrew_usD(`w1myh)l2JiO6ZD5@8v$Yc8cF$|BtyUDv|b@8n-l1QXD-#Hr?+5|C@#gl6Cn{N*QQA&~uxv%gX z2`~31%YrzdSM)Meep<#Gi=h;+6WFYwdY_fWiXbzL0L3)AnN1>I@ZK(i!e+--M`Wvp z39Sv-6RtcqQ;(f!SaDtUS176GWRx50ev;o>d5S@!gD{^&AQ3(jLZK-9Mu}KZC-^&U z7vNP`sOWSGhn(r`<_6%u*U?Q#LCko8FD#itk;6E}+2Li64zWpiswvd6oVhDMOL~@$ zSSV>;W(%oA6m!o$nfdo`-m@#5VX(;*uvq0J_XLlClL4TT8O}8YP`D7Lo0al5{K|0W zh%KItvYX_?L6D4Y=n3{P9>v4iP|$#Qn6%`U6(kf5q>u~=U!HVMo^_srHSj*ZBV^GL zA|m!6Wp5b%GnpL7kK$1RSJi$7wT>)f(|vmJZ#Y3byl8~!aS(5>M=yon+v)7=e|~y; z^5qK`SXynt{|HW?NqhPmGQuYqqE&Ct{~)iP2{sy;2&xHwF=?T~XBG1qpM|-^&$tYc zE12HMT<(>xfJKA_Px!8`Yw)RgR#eOjD)(5_r;m7H{OC5mn@!O5fDIEH>OEOk-_I}k zrGto9(JOgwA^v{ABJ_(yu*YU?zXJ#o@a%v{`lH>c*aB0D(Rc8th^K?cgWyVT(G^Uu z01)ZyI7NSHnxNyG7bO;oQl%lOAWH@o+E|bd$1im0fV0Xm`b<3JzumGy#r78q2qq-t z3Ns0zJkG>}(jK^C;l;j*hE*1mk@Vd^cNEHoV-CKR`*Bc`2}=o|17Z1_gEcF%=lm?n zMq^IiR2Y2;ESj-rI1TcF#n|C6+I^rpRrCwW7~T)hXei+h6{K@wuf&5G_jI2Aq|P0> z9!>J8OZRZqA)}q2fmYv87H%M|dqdd(kKP;TwGk(-ouqM-D0&=lTM&r8>>0zZ_J9t~iZH5byeigAfm-=I#GfTn%gq@Q_!M zM7uy53bO9>`SWMHQYw9uOIY*sF3=>|WbBHxx%)-R33x`iLVnk_q2gUZUvjZpS#wPd z+l)O6vg7%CHK)dt^-Vb@E>%g68qa%VQ`!6H!*s8lRi_s3K_;a-NExLmnQ}s;WR;_* z!@!9wfxH(00pX}avY_YIKltRE@vdsP(sXUoR%0t^z%MRra1e!tp$xY1B3cF;v%M`?2EpIF?aLsvah_$c!a9e;vD2RohmS|_{Rn(RJK!t3)1sBH zLWiK~?eEa$EagrPgaaW}YBBy21)F}9WXwoy`QO5L0Km5VJ+SF#U1B;kgM!5JijIlD zhzALRL3w0s&`&kH$Rwq?u5PzX-|9&4C7+B*Vt;57e@7ux!Gqb@2Fv5Px=` zD3MY8tV7)YKSaSpf~E;nr-CpTMGpbdggUOJ)i6c72DYM0>D~0^ROfqxTGi-zr*-vm zACr*An$@$i0}2L}bUML-g=4EFjcfU1LV4L0`fKYeGyF;(Z$gi0hq&=5YXE!Dq5XKz z$XG0-yOaLcEbezmQ-__0`)KfW$EnY0M8C_UX43H((;xx)n6L+`%2w|r4ae<3>0Ouo z-Ph{0ZHcTjj(ZT>ThtZ}NkC~a45OSVy-CKAu>uXi1Yugx%gd1Zzh=oa1h_3sMlbH$ z3k2&;S6B(I3Hf9gH0CLD{_eN;a%TI6Q%OfiD4g|4O*iV0lqFp-?`8}O=1!OQ0m;Bh zW%x5Q2RGuj(-h^4l<1=VeA22*4zZ3>)Q*)oDPBooqk8xt<#Q`@Wqhuhk-y*9E3$G= zK6Yg}2tFmaNGP}-C&8yIH(^>kN5N;&T*a`fjj@%%#mw_n9lkQchCPQYi$m^1&LXps zaP$rI))=oW&T+5~`LEJ(P@B9$4_}y|=B8#T-q9aqWsTeN z(~19>a~Doyr??L}SU{<7fPlAS{DHRkqQM6 zuRfBv0i)tdM^8hT{F^Dn#n5TU&}=CS#X=N^OsuTNNL2#%p=!Gw(_XeaW@q>|nct*_ zH)wBj4DCjn;awpUk?JjT32|eQW!GN*ndj(5t+VtRkX2r|78p{x)&%}3vo`RLbB&OPse!eEe@dm0_p$%CMsdYYLIq1Aj_#h@(#)7jwg6^@%!o_6WX!p3k32)>n%qIu)n{7}NN32r5L_%Yl^ zKvW9w(!k?_1+Q6)vjUlP{e?{I+$^%px#dSdYdWb0)7}Cjn(N67d&h;*R5L=uL14uy zkB69MpUP9Iifs=xT`bUaMN?A~ZrXxP|3(5y%DA~oF}5oi1ByBkrT9?JFRQAj^>gCK{t*TMdr4?nV z4uj^KDNkCayZll!f$D&+?CcSYlkp71Tc47_?{7Yth;f1D1Iyghp9A;!o9|U@W*w!zG>3O2}(!;}o*WyI8g$ONI(wI?#mN zdL<-fH(Su^v!)tM(@HrdN;WKOPxrsC__ zG+Om4R*5~xxRl_BoNoHjpcL~gb17|J&8fP?`oK|hT-H(?JgG;)4s*s(V%8d3mUOpB zCBDgsgds3K?;L~I$;r+aIym0p&iu9w4tMHY$s@SdZg*NBon~C?kp1tY!I+5n=6veJ zQWxo{Ce}kAH|1r9v1TjFF_j&vdQ3^OD93MPC109dk4i@z0|OO<1S8+Eu=hIa_!{h$S?GJG^#~mhLPyi%bE|9 zDkApsQq}u9$S7#3jR~8UEb_1^0!EiDJC+*-)Bse12ogG7Rplwc&fFZy1c>hJlwC>i z@bI8C{0L~2UR4nGhw-S>j|R~++SR>ksuVJehS!8v@8faWTuB|cpg0)~9_K7F9fw3@ zHB!j@O}b8|we!w7Rq+sbp?3Br=2)CvvaM>QoVllDNY=CPZVn3J1$3xjnZF!T%neGj z28(LGam|hV{3XrFYq3F{4hj#hxS|!Xp z)h~U`vKrJSeyqim_Oox%>@-b#Gv| zHW@{I5mxd*ReVr?p~z%LHv>ed%t)!PKAax}X%hSs(7!+iBu@|x$J57v4H{Mb*$*-- z;#lVHrQoh7mJ8Y)?ibi4uy+?9UX#(DT_0ehZt1ngkz7g9{sPAjs%1OA+&m`Fw?chn zw++qVyKVTM>+Vm2H&GhjjqnN*xVV@M@^})%PpT|aEIHn8Obrc#fpC8}{8E%^fnXV_ z$~^$(xVsDg(kzO+nMU8I-Cg+C_?qs4A?^fCDE~HRVQ62TL;PvX5Kiu903c@>q@U(C z--E0p0%1IOG;CElggJ$Qj&eyuwC!E{JFTXCM=^=Ww2#sp-P|!Vynvb~d_|=4^&n2~ z^BwVQ)Q={EN8pQ$Km#I-?i0c{j8wSk2nUA37KotR!ty*h&h8#9x0y`1@*F0G6AA@G#fB>M>22`eM`gxjbuW5sb3ezvmO`- zTY?JlgdM!!jWkQ7abrTDCrl_1mNB7B18m#9@6AoZ1L&S`J3mQwuy?xbuLm5 zkxcOo@=R7C8H$^mWYUMVhLrW{{KMh%lV>juC;^c+A4*zR+6D&sz#1O_V^GTA{bek| zS^wcazPZn0IpN5#GRQ)56X9KN;Rr-YaFbsUM@JBWVj28W8a^&%ZlSVAvNA{0tlD@I zfgmOt@-2*J0HdkuszqcZ9DYnlS>0XwSJOP0F#j4o63s08t|jC226|2YT}vjaKNfYz z%c!|!6f3oxse&sOS<=uI&|3wXt3Y=pX^+vLO}t`EwGfU9dk+(`T_S&p=*T506;El9 z-Y2sG9w-I9F;RoE)Z7Y%!hqiWNXrXYihz`tYZyciC4C1jC)-;x^|PBP%Fz307!Xp4 zNLHLE6Q$>#Xydp?q;J%@Bj0WSSVFb@4piBr-*=FTX&WZJ+mQhlU4;CoFNtx+Gil5Y z_{HhqU!Me*bYP*OD43BpVJKKLaZF$6N)&gcwN&~TH`P}C zjIt3EH>Lk6M?*-D>#YJ-Q3aQ+65@Q8(F-j)WchY~E;}5OlIPa)Tq0@Jy55{^TR)AD zs(I!E=3M8I&QQArV%>2py*MS!pG|M1Qt5TdHPb1Ve~w*QPxVLG{s?>iBkbB67tO7Z zvXgyrmeyX#=sHV8n+cJaA>prdZ}AARY|VN#dSt>KNgN!RLNeqDo`eN|`}pz0(Q)TE z_;hj3_gvQztdS52jxVs(;uFx;cY;fLc$T7ZDHM(zgPSZ#Yrh1S z1bAtiftAbgdU&Kdf?1?8Y$cE(Z6m3c{P*?2UMZ_EpTynohp>G=Wc3~*JXAy)GXm=x zj?VT$5&in`d6x`X*bk@S?w5L^MPhr|cDraj+)IFUkN|b{XfXuRdaHe#C@HMsfS;i( zQv+CAhFJ=tdS9pxm<>zfqz|1AgXv^8y?+!cL#wfJ9(Ev57R8A&$uwAAShpjiV!Im^ZUx1YMU@6@!y`^+EmZsuM&C*=8k{LAiOW!yZ)KWsE z^&*HlZxtg12yubfqh4NrPxBnJw@5>X%l(s@iI8ZdIL2xZ*OkDw{%k zmfq4#zNQOsH794@jz8jb%G#1T9h`}bnP#9G@87)XXUNiYuU0oPlPX=jm2+Y^Um638 zOp~wCi13yCppt{Hk>FczPtVE9!QpK<7=>Axq*CiXz$44?s63JMq|`XM;uchzLhqc~ zYUkA20%!StU$SIaI11Oz$X0UqUa8(IwZu}84MKq)D=w>!746P`LCRuszbT?#{|w zaj-2LBls^P#e9o`65T`?mli;qwDoeNpV3B4pEF6*Jr-a_V~^p zm_ay}e^*#DLzYZPv1G+zmdDOC z>7mNSbMEOwzQ;uP?m@7t`g88@i@oRHP=FV>o^}8HaX^Qmi~k<*K6lw)VKCXkkHvO` z?sxSuvE*UnO$YqZk|-PWE^mv7X-H=SqU&`7=*`|ZKa2VRng#I5Zwn(9JySK;g{ zK2Rxf-$bsUpT=NG;$^!Q&eSiQe&O^Br(ZZ5Z*;xBm0URaH{={HWee7i{SKziGlQVS zAtZ|l;sJ;t`BywfX-#&gpD7hma-ic&R`)Q1(&U5>X> zWuIK;9sr~ce}vbAs4IgXy+HN?*;=_5$W04mL0FZ7!hv3|gURwjjF>GTMp4zqA%kV$ z^B1`xDB^Rp*+?w;l*obcB)l7jr{GF+O@z+Pz0}vNL)rp$|) zJS%z*U>a5T{R)zO{0ib%ka;WjE6Ac$dO?;y3I35xzJ`-zMkGX>rZbe~L=4_1qu^QR zFN6ze+z2=qN%~Rla(Vq2gnbORab=M9R}w=FHa0h63#O8VXtrz zDz${>qsKNm@*af>BesZP-EG|mQ~=RxvfJqw_1W|Wiga;S+N9P|68~HE3vf@vaCn^r)Dhl<*){}XmINX5!l2SPqoC3j|t}` zLsP-wisgqpMBG}bJ)0;&9AaX71I4vt4^wS^@{2 zjdEci$(n)d>o?n>!!sHjGR`g+>4*PI@?9*7d zKI0ZFAIgo6*!O!R5Wh2C-(4h*m525~ye76QWBFIXqMPStJ?!v#uI4(r>oJ!7d zZ%tn>AywypzCV9=_D0S>Ne0E)FG!-!#rr?bdT-C({`Ks_B#-r2fM&*OJzDAEca?j0usX6uLi9JZ!~+pikJ zzy}EQ0RmSMAkeOIZDn{shce~Xrd%Rv)jHLjZCgLhYR<5LCb8~=0{YXsm4yV{7Ts+P zBJcP^y*3=`)mE@wAr6g;h&#ZclEsHNbZm-*4BUby5!<>i6XjtUn$Eg%F}S&L3~`Cn z&#r36Fm!Rek6`GTzabNyEm+~oCB@bY39BU7!XSk$+EW!SwQYz>sBmxDymWauq2jbM#^7cM>o&;g zxdelcD@ez-ENM0>(yMw+;psB17r&P9m%?=3S!z3uUvRQlz0Of0WfWTJKxG8?#E(PNRJo=-=Eofem2DffI}g?L?0i7V?C?Eu{&(zgheJ%_=WX ztMVYB!gL8xN4xB^khym-^A2W(D`z$YYvEe9W>R7KPGyDD?3ijc-9}mBeaXBp*_^&) zIRD;}tb!vMj#y1EvSsY?D!P!BZqnMIoYn1wcOF|K=dl(Tz{;N4cr%&sY06Ai782ct z4PI4|kI_)XU z=RNiuPR9tZwE>>k}v0L6+%i=bO;rcSbJLJ)UEk%?KN$hHVcy~9v zF`Z9iqVz^(yzyq3(mUjB%L$V_-z$bB|L-eJ!>?5)CK&#nq?rO%#2$#ZLYE|IiqU2o2|t)IqM$A7Otta~p$ zf5y68M?SApwnnF116a5}iuFga%kk?oZd_chea6Mv?cJwj*Ajl*?8FQ_|Y?x}UI6*~iC?So!*QA^C3Kf}ywlQiQ zm36z_G+Vz+EnJsurN5?M5SQf_x)p2TTyoL7uF|nB%b)AFY}UK5@=Y{|%6ZB5F{SsC z-IfzvdA?VxQ~a_>+|pOJ%khn zUfSgk7JjpnymuG$?t*1xb~Xwvzvgd^g~YPm2+Pg4W6s&U+eC|ZG4w8mwOtHlTJkQ2 zrCbcnX_%{rG@Fyu)qEI8cL{F-r@BV2h~4?vw>*2)6E)-ie)?9l%8_Ow6m$V z2^jBpkh>G-K>S*O`D8ga{FUsQ-d6f6IdnL;66X?0t5)UaY}@)Nbanid3dFj1 zS@H*_%e}eFtBlK18P@>6?ax*HxvJ${wf1JiQtShlW6f(5f>!!g2 z9V<#m)8AJvo4vnXS${hh$8W2@ooD{Wt)#ZQwN@@Ewq8hBCBYVY;k9UgS-8};!7)$a z-m?4JmN|vA&@s=(i{3HMO>~7TchkOk)`{9m-@J8@pUY5jTIs>J8N#{^a(XVo;63>0 z*p?;DMn!sUC%#W(F#VPEPl)@j^IO@V}3wQ4he^VU+qnbRT4BI~_Rls|b|rCZd3w5T-zl5Xe$)jW!$ z%bUd}5Zw+fdM@wN;#T~HJRNH3ghfWOd2WK_t9xp$scCJHJ)7(9!ffsalY4!amjapK zynY=RhVEp4sW8)A8|Upgm0-68%V}s+QS^FM+I*B-b|S6)f%^T z5>iZ*oVPRV!bV?UO zP?w=@Y@nDP`qT5?A z?{ovXdF2F7mNM;r)?VbrU=gYU-CU9{`XfyehSxWWy;v!zdWXwYOIV`mY8N#^clU7{ znp(X#N{S1r-C$TWP@%gVSS*Tc(f$SI@0BB5w|euY%onpm!L0u z<9uln%7F!&B}7=rFWSmVG#6n3j1R<_-8W4=s7)~_uDo3PqLTVW#V;y;QE8@Hzo^t( zRQNaK1THO|tH6E-)8<)0|OtA{71=k5T55{my4fWpvW%cm}(>Uxa|4@DU_? zuQLfBa1u=02K-WYk3e<#EepL|MtUBA9Snbv{bJOWZIxcMDjzRey=cupMWR&@Jtb^Y zpl9nAp1cU7DFuj4RDE%nK%9U6A~yqtb#4M0i6x&Bt{zXqyJ2_==hoLmfZW_mJkr0Q>lLz^?;-9jM%!nku#+lb;0tNG4yy zNirk+9;fLHWik{x>Eczq zi!&z>0>RIGs7^Gm;rpMHeKF)S>J8o%j?UwO6Za2-wcxdV21DH+nW)%s(2WAjVq<}I zYRl-Asg1Rw4{PP+s@#2Ko!4F;8$)Xq?54Kg6Cgczo$$s8E&D>t2kQxkdn|(eeSb-SOkjo<>|fT?X?w&>70xU4K7t# zVWF125A6-grEWJIEWn5xHMiwtLPK?i?4FLJO?0?Eso%}yrQyYe6lpzkSDfaFwV?<} zBv%D9bB8-F?8}yX=ipyCx8Kw3DU?4zOh?e<D13a<()@*Ig}knO(t_<#-dW z&l8x5Wcu6AYqq2|*JKx6LD`&w%ifV+7e{}jf7fD{>YhJvmZV8H@O!u+>Rk6}ZUnNE zkX-j@j{CCX)QoR;OstQkJZ)7i=CR#4Ffthi8+!5MF;da)1|Kof;DEhtFr>|e;iFJm}Db%i^L+16(I(37lKd`f81f? z-elKfgu`k{QDjD4$JPG0#4Xy5Hw|)V#Vuix^vt1m=*!*gGs$q^if~^0kiyOzcQ2+> z6JVFr?se$;p*5ERq)v<&vlsyh75N*mHydRPlSVC2 zZoP9wp|@{cR+w@C6dGtdq0#@=6|^Txk6S3=F`zidw{j#K6$>=4+O?D@w7`S4(61Ci zXf6Rtc@)~<_yL5O74G}nTeBukItP7)YE^3lsrYgpshgJ^m#C(mLpK1KGUwR)LokV~ zzf@R&6Sl4tvjOJ%;G6XtG~Z> z@rZWI^er(=h|HlA7eVn9$=N>PlrCQY`(gZ ztGg_zDN`{UdbZD7npbMX#J6u-k`8J(jFP>5Q(HypFTXUKq%C)? z^w(V7D0Z9dz-;348yEMo*-K#wY-P|ln0hjWDqK2}jcc9}pT7DkZV;PKn>0^}t(Mfb zs4KMLsx<7gD~!)ex|i)SUBc|ISugYI>=tj#z+(Y7W`XD& z7&w4p`W=Hnuwxg76lSZ!8U@Qn6!gti6vi&|zWG!k>$q8ym1o{rT7##HDsVt;Va{sWzLw3Oz@h%-?DpxHy2~rMOq%%(nyC|{ zU)%l>F;>4(bZU2N>=*PI$e4{c+oDQxhYcJj@m1_b8F4%jD=rzMBO12wq4&M066Sa zNSrE9q%vs#lv$cih%{ZWN?Mg}Yu>f249LJxOOI9)>(&x6ar|m^*lH>Sb>r4!6xYV$ zy$t1zMktD%6zzae%W6oi24NFvf`bJdK2=YNEnt_q@!Gk6CVp!h>~4v4&6dP&iL#+{ zMWqWcFWmv)rrY6;)NGw4l&mCo<&Uq^0Lgjx7FyLF)WIJK;drTU6z zV2;>mjIJPFRNxseqvK*_<_Yv{B~M|{pdvo8eUoA#%zc5wyS4;;J{+7^xX`)u)2}*j zoM6&nVrfYg@{514{!djCkX}H?iYDV!ZmuJr20IBYV8B_m*~kzc_o&h!kw?;Wxuvnc zc~&V+Y^_h@|6+zC-gMYggZLz&9{hTE>B9$MI7w9Tj5N1cp2vT2EVR7jRPViN;^qC) zDG5bKdO&dHQ%)j9YND&GXFT(KdHDVtyt}5Ux+DFrj~Jr)W;}Cwm1y1jD0;B@jl5N2 z(CwvmKScknNir4xWgSO1*U2X^*LJLihdKG#{P~{#%lOF4{95?uWjEJ|RxTrAi3W{~ zv^);%QA|(I$dI|raUKQ(|IekLn(oMPx_|$g;@P4xue2Pp*&v*r z*s>LnXvPq%82_%c=TSKMj}>r?KQm>!Chf$qINCR>R#1Xat~MsslV^_BKc?uBF-H4C zr+O7pm5hM?8=HLD_HSH5C~|IddA%}3*`TNB2BuF~1zmNCc0HTLM@@_Lf=%2R=+oEL zCg2EW-Cv|x0(MOF_jzQxM$D~a72^{+N6e{W741i9nZw2MKdCg~N^B>UDsr-# z1wpK;52P08SzKbaRZ_8LHt#JUEvsOyGLUNR-Z0*V8by~S-}+ zA{$^I6anXQLRA>z{AQ5@WP_6q+hl{nc27sod0_e~s@}~T`h~{zb@s_c$uU@&gw7j; zb|-o+*SUX0n{m|vB$?-Kh@CPahGmbq2dikF$eXnfJQy#-9jI2pY+D?c71}en50a{o zx8yM2BR`)ViqkorCD3_jdfGayPKCobL3)#eW!pc02?5S=BK$}&WeQABe3yu#2jec_ z+5UzX!Fb3QfI|1|y!s2K9{W0!8ErQYOaunfVWdOQ%{6TO3P~j4&Ih^pgE-1ib&cNu zT{CAs2c-$81!Kj3RU2qD27KE6d@8&VmuyK* zqi{@;CH;V&p~A0BH-XW6IyUX!`~fYbY;!JzzuBGL!*)w-*WnVkq5O7@$&fg%b!uo1 z>FvR5(y;$+g`K-T>R8`wb(DN^kT`^LTXrE-0{H;q+c{Mn3L1)-H`r^dX=EoRY`Oij z$D24Fk2c20yOchc*IUk19QbLQ7J7GC>tlEP!6j^_;-=>@WCecE+=Of32@~cxYaETZ zF`dW`hHP?&`)#h1e1)K;jkkXrfdwA{>!wW(fCJuCsZN!whLnaEKVs zmHoaEOHf{$?DVGbx_I_s4?`;|Ln|guVDbAwyTj_iG|!b6dh0zHn;VV%i7;%n<^Wxi zLdCLUrrByNVse4ac0*6tiU{?;s9yMA|7#l*l+ zbp+O@CiR};wT)Q(fMvbL_Q=7s<>H?m6U5CrTBf?Vtw1Gq?Tv*Juc2nea5u;Ls-gw? zqXCtqo8XXvU15@-f zhkh%^SSeSCnGW~+ZAzpRD`XvhmCYxQ>^5NsvZvl?^CpO3YX0QYrKB;=lG^2W!w*t5 zUm6by{JUN>ct!uYA;G=9J`rbZcuSq|2Na7@!A^_-ar{27^R13w+2Jm()4`s9$-V*~ z)oIML@$UA|Ija?X425ld1b;Pl`4;^oz5nu-m|;`iEvLul>3no$Y_dK26Jb_E!@0#O%h|G~}8vg(T;Lr!rjeDxKY z7*>`_stgzsVZpDgB9=lc7^6rGys!_DHgmcq)wRpC48p z|0tEhhn0@fj{i`arZ6v9-mrOiiIIEfJzxn(AlYZE$bnT5!fSw1gYdemoQK%TZrELE zR+iY*@l!=eZO7e^;?aunKQjgB--|aZ$lc|Wq|+<#q;K|JB}HI1&yK1{%tLq|$*%_% zJ2gC{%-<6MmgMviB7h7O1|D-p2~sKFizPbhlAr&09Hl^bU^S`{N$?N3zc(rk1st_B z!l>v@LP&yAJefiE*u|c{q5NEdr0-{Ig2%Q- zH0tj6yD1FTKFR~i);jT_6N5m&_NDq8@J1sNv)3ILkuUoF2oKRTZIuh(W)7wRa74F+ zS|n06w3laoYdTXV%aKrgU&pQGa9YM~0w=chlU;Bs zmulJmRCP4tZ(G~ZZ9;9Bz}}fTJhqli;naEv^mo8}wDc*r!gI1UR5{@|oom??SNsvH z(s<11aknh@o4N*=AwO?h&l0@3xAYxRdsu?#J(~v7apPo((z&+5uczR&Y~iiT8JssRTujw7`^}_N zjma`mK1|xd6$dU=h4&G%*GS@_EnqvUjOJe#)ja#8K;%*+SW{|X=q;BY}!*+W=hE^YJFFI~kuj;$Se z@}3r0?ET$TNErG?o9y%NA}P8h^>wwBXgUt8q`y-xS=5cM@a|{KdWmd1`HG)QD=%fs z;}f4tX^&*fqcx3v$Bk_VB;FO5EmpC6U$s5auVvdXzLMxzP?NcWHGB;fnA*FWaGE>< z`rhR{2*#CP9$^b8}j{&8JOy-1eW(1)<;aci>{{U;{aS z5A09DMVl`4>F^@6o`~2`RhVh&>P|RnmKDgs;NoTT1n3X>ngOkGj1G?5MF5wG>b;*E zXNaee{(?x8h0yDp1RI2G%{qFmMg16W+|8EH%B^3`eS7kz2lY5TDels&djUT_lcIwz zVkn(ci5rLZ)=u;!uGdt>Jd9Ny0$~R=6x-HqL_65x%O+-hG)@g#nD#s16FhZb_Pocw zKR?3Rl3=eRoLllB^*y~n?F-jFA5NZ*)nI?_is$q72Sq&Ne$^|h9Ar9*6}$p*cmLi@|@87y6Uhi zS%LvY>I)FlRb6 zpB^6xOHsPBZ#63SFo5d>A?OxX+a??RisQK?^napRGVbW*3z^}y&#aazXW8k1PdSO4 z>!s>9fJ1R)fx``&cka{hp7vrUxo#n{KGrKgnJxv(T+22FZ9+=m+=d+jBr1#e(XqFm zY03blJM(E$L zI>@QAvf;)UP5x~OZb|M|w4}95x%{o1;3N?z3yDHaNFpKu=sVG_?fj`?jY*rH(h03X zg-Gi@OU)uOiCb>hxO zy~1vyo<>mfH2Fx*anmD4lCVl+O`^DQ)2UzFp=p)4Ll=G-1=In3rcE-|&H^iZ+ zT(Eu}wrnDLg3@S9N9$X`ao(riWd1RxdDSwPXWaD*GOyf~CRK7fz9NDoRT9Lif*>^& z5<`79%*4p?Sfg$ChIybE%POBnHS&q{D0L6;;zU(R8?9-tHB#aWl9~Ko@8h|^4-1P!anG)W?Hxs^85dC-o+WRQ;HIi({!itev|cW@uQ>Z zN@z*37h&I`?B78v?694RRAw=Cl!|=%Il)UktMMHjv6`!Q?b=ETTC)qHNp-_ZnG6Fw zHWVP_C^MxciK2@oCA~pKWQQ#`_^_cQ6E-*f>W5AMd?C;E%+K8&^pq(%YE%VU^_aA# zdKPJqvc%gg&$rfVroC=Yq>C%+gIWf6mpfa^wMg!TcHb|Kyo1>xUqrXkj*A+`3@L_Q z2QGns9dCJRceS9UBwuFYNzopuEy6*fMGc^HkR6~-rnXrG<}gQo2tz3QkTD=d&v;&Qz6qfN#o6Iz76|nbz)G^eI}(-{w0?djCj_)K3KR z8OK1lp?p#rcmw#bu>Z_=@-i;Xxm3yu>v;!K%N-2;2(Y~U0dpi9RnS?us1cw&lc4H( z9VE7Y$NAbgctQT+fBmR|mRB8C_fXg7%}-s5#BHleSqea-f$k6d^g}S1kYo}V^5YOj z$Ad;l37gS9Ov!tgk+&hKJ5(el98SEk`IQr(Im3o5%KkhqRF}X7@`3lrGMm{gs;>!> z(b*^M*9xu|JQEmJxB=1Ac}URH$(l3E!0_K)baY-+{>0tO2KbUD*6fj{BhMj!bNM4~ zrLHMd}m9( zLPV9FntLWdw)kMMD;z2w;{d@-3=wT*q)3C(iGa#R5zizEMwc!L%xjG8h>bl{Gpyk; z&kme)bu>ZJMICwJVDz6+GC1# zR?dmBFnGOzG0_6-@g~!?)m0VBa45$;(X>SYt6n-<}U z)Kwb4(_8E<%L*pZQzj`lytSBpC+qPgX0Zo1)N&8f-p;aUx>MD)+Q7Za-MxYsI#B3~ zPC^jpXj{Kf%5VNQv(@W62D;@psq-0?;q^|8?P?PAP29hAz74d~z-N-9UPSc-Be!c5 zXG_wU7K^mzxG7~5f z&TE{_xs2>s6;r9R>30=S;zI?+jk-63x6-`_5iBdiZ=eDgBvK#rEBE=T#@XZ z!c#JC66?RZ{ivm)gW9Z@<-%?)VbU!tnI_&%6t7Ma+-&1*cNE@b#KVhE z;PC*2@@O}X3ZaNN!u*kgG~wJak*>iMHXsLu0= zmwME+%hq;xOb#xmpNbv3M|2Ml_V?wk&s2hb=&XJvTfVR*wlv;;%kum5s?HY0Kj+9{ z-Ta6XRW(ftN-`MAX-&O5-!5%pS?w(JBqb)R)$*qW8-n z%9@=`x4fd3dyd~8G|BRYP;zOOp|}<2b^WPP0y!?QAgO)0-XZZ$ausG0ldbDwQ*4#aDGXHp}^49qp+`%K`y3u9AP7>5LK!|rZ z&fVQN;mOuBbjs7L_LTDxhB)y`8{k`_685aN`LLjN&{(5_I4`cAe=uAv(lGCNj)>;~ z1opmIb7<3LZN5DhnhSd}45@fhP1AX#T&o#jVE-JMQ8~C3XujzV-)65HK8uGR$KFhA zMs_e>yNYNE>*Gg?qJ%Aqe>QyZiE`9VDTz;dHlX#^%mBDFRZD6<@d6v64KIhQK%sif zCJwqVSNc8cf}zqJM#SJV%*VHu=f0x9N!}xUb%HQMEa= zm4B}H-V8{_xB|SuXRLKROarxV{-x+tAM2@Hf2c%2(3FEmw(3o>1#6zVG?x^(t^t*3 zw5I_Q$1VJu>_ozyJ7{?O5kc*>$@v*GD36U1-TW1=+P0=Q6|?f+Q&3m*WX*U2Z zaS3w(#p?Vyh!-OZo6q&}4hNgj)}MDcx~fwcVhBMA{4o`O1%8uQ68X zj>6VnuI@6+c4#gs0oY~O`G2_T~y$qn^K*+Wk508wPNUZCjFFO40MG@6W0m59T3aI7ly%2z5+ngU* z2HuNi!6ui=nBw)5D8wpUS2vuR;(uF-Okox&Q|Xb%v623t^THnC_q-sANAuaTO>GAF zneN^LFN(#;pQvQL3+-m{dnT?l8Q=O>sm5ly8R}WKgT`eNODd}d@1#yDretuM`r5|# zF#dal>m*5+roN7llfr#_tJt^6SXnCouj1O8J540D?&=+;bQ;66`_gk(qtD>;5S_Hn`wWN-%dB9%L>;p`CIN| zQAg+~$5h?a)Y0jYb)FsG$udWs6`plAuhNd%hECK%u0)uxQib>*kz0&7cBWyT@VHhx zL|juj(^w>4vV^y5$+X7^O(-t^wzIG*ry|BngLQO;J!oymj&>~|-8q%5VW_Z;`E|e) zG}9CFFZ|pfYyqrvh2atYnnpb9_=P?Sacy)sG!Ss92AC-XjG{1zz&?c1J^Zm)DJj|K z+=2=v&8{oHrLmXN@p{T`aeO3vA?Hs0t2au{6%w49ic`!(jlHeKi6?4!I*){ztRti1 zc3mcbPg&?wHX0ArT2j*Ax>c=Y*@jQ6+hSRqxztQ2yio@`MKjzvgNe5dn^j)`IZvvHwH-Gms^WMO=8ttl=M3Jy7mGDbD)RpPjft-ntPKcok9 zc0XnM{FUitlScJl1h3k~#s9)RR0`kWp4vbKIdDOx61<=RjO`dQxRWjK%_p15C3CVN zNFE=Nc$-C$R*kuX5_~=WTRjI(_d&xWs#?1fFrS~-`F0!)S5ORD!ZM}@{EekuYlvUk z`Y&+ptVBZzOR-paTnj~^XOuH-rLz6_nu0Ef0bAO}xKGbR+xNDutVwph7hkUjeh9*t z|9Eykuj+gH{g{m*BCmc3&Mu7%C4@%*#(NNzEU?<`=%gdoBhQ8pL)nDS6ZJg!KvJZH zUKB_{qC@R5J42hh48EUV#q*Va_4~pPM2}aMZxkr?34Wq!tkUWcct>xS-b$-jAX7E_ zf`5&VyhHi9_NK}`Z0v9K!Ke6EyeJ5rOPi}3^hw|qG*l`I zh(zQtB!`3ZNk_a9V7J=#FpEEf27wzQ*n1>A8@JP8xa%6VZN<|$oY^{!Al5|+EC$W5 z6`re0a_1N6B#x_Vwe0l|8an>Y_b|1F9qLWQtKCt>GaiV$vBSysQC(()HD-F2_o$EI z^%lpzVLd7A0P3`1q_-csg*KCFv5yB$=CdPpfgcJ-slWi2mttTx+REgWkQ!so|Lyy! zr;u5|qW>i{`hO*STFfaiIaVaT6Fx-V|0R5){2Yzd6sk#?kRhKJPC0mZINM1AtX#I* z#6v_E5t(sEPLbD<5w8o7Ii4sI1;1N=-2lIE@d$dnz92M3nETvs_ih&wF-K-(a=N&g zDm^(qXG6RCldB&zvSSH1YQ{F`sE=1I!XIng_*Ch>43IXr>=0L{EuGXoC4FJ`Fr`6% zm3!jMtY`MR=c|3yU;1&)2H+XL7W=5JjnO!>hZ8|4a6~U`h-T!*0rHI|`#q6j4IT^* zUCtKtFvm!EBnI}}knpa*X6Jpk^#kkfAsYgCP?#h9nZ=oqccDieH3NLIVxad7V|&nj zf0lp$%p=yro*?mt;MKDeBeau?e8;c={MMc{mRH7P4)HE+itq=#m5hX&)nX_bgYKVZ z@W#2Bu5)$R zB4q0ShIX2ded`v-EyX-{WXw+02+$=iDkhg!??c?Mg|x6zrnf@_&B;9uDb5E!xSc>? z<(|5bMWC^AgN&v{jGCaOqu}ZFVEOq9@|v;|4f+ieEA>an4k2WbODfGS3=>2YzP?EE z>t3KHPLJ^Z5yc^)bM!l5lh@0pG>B8uvSUre%VUut8g&)%1?z$hai}L$Y8d6>Ab6;U zA2)rZ{KbCt;}N>u;%PigJ?X*sE9J?jRMU;sMRO7*aVK3g=SVk!e~}k3$RI->#@4UN zNQ(E6Hqf+9;1>`Ek-xBOb94CsgC>g*d3hfi)6J8U0LpUB$mSdnMqdj~4u_|adig{> zqGZln>)nlHpO zS;|5n*J!41NcuW`oM?CM25|naTM{yUx;l{O{k0am&l?d@T;-Yv5AIsjz*RrDx1V&g z!-@@yqKF<_W}#=N8g?F{QLIHDqR=J_MHuD|dd4E2gjmK{!y}e}acSj61{_sS4NLyD z!f`5|lWLD&BlkiTVZFhf_qI^@_#CqLjBhIu>w3I4R?}kvGo})7VL=|-#n0NF+3)al z@p3AeH+wB~5j{ExMp}|e-Y_OjIyr^*;QqKpZ?!27el(I~*w7_pEA_J6Mnlaa=(HJX z&VWt9K6`O+(v$NCTVbFlL4Hxv#+JwiNl9`$2RHY_0Ee5JuX{*J_SHXu>Wa%5!~ahi z6e%^|TRTDIc{k(HmGiENz#-?()CZOG4$x1R0(BR`u{voqzeb)RaYTtzT7(o_J=b89 z37_wM0w$TGX_2koQ#$T5CSGIdxb{d7fIC+UP}^HTO+=ASERL#XjAT4}9wQ~c=Y||Z z{U^Eo0CU~X@5PQiUj?2Z&Duvev>pkDE~{`di5J8>j3^z^n3*$8&liGGWoH_{M?eu8-BnO@Sqq=3J<&uieicvZgc5t zd@x#6&1n!Os3J4S9Fd3cK|#GDmPIur>7S!a4-v36TK<#IeI~lIH+EZ-7iYwLn|ku6 zpdkqkDT-TlW{)2*)5&OcM<~VL_6uPE8J$!p!;;GWj8VL~2GciEQ|k>)cAOqsV$ZFcUfr z^l?Sv%WIBR5TP3hgI!>ACNBJgDjY2Y32uP1wd2Vx(x!>eR{R!41FGeo51~ z2?Yx3`;cWYu%1So4}My?@eoIil`DA=;v;fL)vBdPeb7aDRmTJp;)1mkp0PB9j!kF& z91v&4=Z5W!5asM*gq&IBE}>?J)YyRqvrPM7As=$s&p}23p#;sg@{+`1i@-|1ujy}` zR#?S{2{-?2@Ve|9`fegD&t%A%M6cAA6cg1J*aH=AM&oXG10pQEO#27ZYriMnt;-SWbA``a^Mf$EXKtzua% zBEBJKnWEgj{0d_AQ^&x!|HTXS`SJwFfIzy2VWM!z0rb6ackXve^G&%oMnR9C&VAd7d%lvIqAOF4?88^l$v`tX zu}cvOu=qzu2X6IJpzMG@d#X)a(r{)3&BfAe=Ip5lU*WkL-~0WEtf6qL1CWl41QRd! za=no~@44dP3u$#w@Py&?r!~8vF_K8h`mwoceS5tZ1cfL_io_^mJKP)0#fymk{(zaW z4Sr!~f_K7(axiw+<^~~GN>3$zjv+>=Pc?eD9!Q$~2y%HR9ECE^6)4&@59LXe9XG^> z`8mWPBacW9=bPw}SEsPo`AcCevteQd^Mru6yJr!8*cwJID)Y!9FD=MMo4q_+7~l}<(0qK>7pVD1n0Gv z?W(RzOwtT1k81ZpA#lPP8V*@JB?95 zo5aoC-z={I7k_?omKV0zbx1i`7me_*B#&8&w6lj95yoQaJDI_oY;hbPvkt`+#=osO zEN65)48A!Jt@{fH;>7RNXS>#(a!SK&+rES}eG^au5zA(6R6aR|nqVy$3uc$s)@*Kf zN3N>%K)koS%LCwlckqahwZaCu~a zE@Jn@%BF;WSNzDJ{+Od=_=0RrjYt7#n?o*zPlLyBr} zeuEhq76i{^AVmk#Uz1sP1~NRttom3@y2`k>mIQ+0e#$wr&=J`JPRPxEBDQdZU|a|) z)vomUDp>rHsig<-l<;b&>Qd4;B1`R`LDEyC8sY6Gh4Bf(^~p|CC8$l8{nMS1?+#@C z-Z_U1;eNniVCF!55#bV{S z$w6PQXCxpaP3(X5F? z!T7^7=Jz5F2Qm8^V315?utY{l++(Y)q$wMyo%I>(={xQ~@P^3GJSef{XPegb3#5FF zg&#k$%g`STi-R^*k1aoTNKmPQ83o*}NT;;hDBw53s?sLMU#Y(D(G;>v&cy|I zfkk-w(rzjv%|^19V=}8|!2H9T2%P)8mtV2nbS1hzee`^sjIff^k5&X1d~7>Xq(Ogu zs!H%Zy;(pO4 zez^c80-%$ke=)0=$5U5jk?X!DoJ~N6xEXbm*aisKlENfvv>Oe(ir3|lUHU=?eQSPKRc$}N z37Tud#W4Sp}QyDRel=fBtixNEVo03CNej9wH+|?^GMBIS@4g zYox;hM6d)cH4&H(`K@_flH@|CMkDzWzr8rsVT}C7rca2{qT7no0!mIV@$je|Uxk$m zyoK0}Lcx74ngtOcUR)+O71C|W(?h`gy7w{_iIGsuC*q5dGG*t}F-%`R?3(Xcf~orq z5-|0q^Rby03l0renN3J|viqEhCN0?Bk*o5X;bC~ASL5M$s*EKIj?mT;qn1)wsc&U5 z6cSO=obD$0JUjkf+xhYH=R^O4HuDKxbsz?|3sEUy(`T-fJB>w#+RRDrP`H|(oP!V3 z?ie3d85PsyEe}>}naxf2Rt}VL`u?`=0qQhet1MIh)#Ba<#X-|%A|9xxC>J>?FoD2% z9(e}pIMJ60|BEyi?)w*Nt|m2wl!t$#@$-cwnEBV#=wt!(zBFqNIqDD7KXqoUij-kW zgvV0Pi}e=MYN*t)vk~-J$pigB03u=nE*P|CYjJQ#?0r6j|L($NB9Q;j%z{uKae$P#q zY8FXNCa~yA8A$O#$v>47O_ebhj0-^Zo>ig<*iLL-Fc4*L80>-R@>!UpuCg?>`Oi2w zBMZf73MmKb5v;@%R@EckIBP;=9q-QwExN>7ZXDJS^Z7tB+nL6eRd%d@u%?XcUMBWc zN06?zp*FIvV(%_!;E&q~M7y5#skjuBt%~(R4?{w_r0YwK&`|F41*RlKscSOH4MfLm zPJA?5buN36a;?3tNyQ#^*Db-fhiH(%UgtFHLU`B8g{?}j7LVT(dv^mCHIxO@0ekSi z@V{p)Rw|X&+z2RB>wCu(8Qu`)37UJeJF#K9I|2mnPZItlq8yXx&<O0viH#5 zt4O5JM%x!QixM^ZCa-TTQgK_?8;7=l$nAIPX&`W{Y+Z-nYONx-(VkOVL z?csnP1LY4^u>!lOm0 zq7==$`Tl5lpJ6qt-BW;IaR#bJNfgW#YVZJ?c!A0V6o?jdkQT|qm7FC@WSNzB=5uk& z&xE0m#K?S#z5aYYssceSq>L`&v9XXz(T1wKFK@`fu-*3Q8zaRPDr)ja@Tu=38Z-_F7tM zWn7QCPZSj$1q03v`r1@i+7}|6K9QH`c1JaP3dvzBfg3z8J4sE^iQ2up` zyzPG&^f|=VVRC?qW)8#$03!x!V~Be3~P^WPE2wAS*Nk z)E2Ezf7&LQz-mPQ9f$TidQ&=KjbqAZxX`xlnP^RlAfbP&=$Z63hjcs9h=g1GY(rqT zq3(;6ehQyA^HiZ=U26SWSwUhBc%-)fK>n(&5Hby^ZHCoNHqPq)jB>3*-1a?l)QKUY zHYEsirlb8LD60_XC%(3;*9%P)kS2EJqT1F|%aVmX;b@6%-i9n5q_3!4t5fc#96wVU z!7I+Hvp=Hjh5vo`BtsSHEWvb17STG~QPPMdy@#jNJj`#*riLuZmwH~3FhO@+9*{%L zLCA)uY<0Kw4xBuyNLVN%9hbO3J9g2KH(7kIa_dPZo^RkHHVUuo{_#IuK(RspbpaI{ z|JMc7V*FnhP{qXC?ArQUPg1kFJHxLWGjtusbp%iJ7)B@^vvItVTHo_42h8yi4@80{~hU1!!L2MFxl9yk^&M@Hq6j&uY8QnSp!0CTpzF$`t z(Hi}vSZJ7b{3y=!+yCsBJw2bEYD_URuY5`1`b|1EN0}|n5!hpd#T}dYXFzXi#@c$* zkkmmHRQ8|@#d}eVVHMi;DNZzYdAvJIWxZty((^iVWvTcG+-HHFuN=jN$^!K3Bc&b{ z8V7rZD3QjWo>IUUhfYtwvM$>h1ust{Fm=(HoG3NYVB`DHiVR1%7zMnQU%Z_qw_6@9 zzvfokqUf5QgKl#e??ycPDHRusD5S6|AiMxGuf=jl!80Lk-M^%N(7AX~)kG$_en&1; z?DlLI&4*Zfw~dE7D&XErYYi$5sII6>x*GQ@4J7upl~ay@#I%o4*J$!H3=U#U zGln+9ngp(B=B|pwr_AQzhd_r{9WaxG2gqo(H4|3Ci3^+OP9Vf{J+q9op6_FUyc9C(S3ELj?pc%% zRAZI>(Y^fc$f}I7PERR3(n%aIXv(4<^OUz7fR`q;VN^vMg5Jb}SnmFYePxG_FZiKf z+F_SjB0DhN13f7aPDc=q%+l2VnvIDKz3~!?{f>}*HR|oOV~pc{*4KUb!tFx2@VQ;) zzJR283yUNZh*%G}8_SXJTiWj(Qwaa)<0n)=WT^~k5uGhsQHucEXk`SbuvU6flnrDi zOb)=`4WqfNHU5Ut>SVk_mE_ZG&(eCjzZ`b;3*o1>of_+gd5K1cmg!PtoZ zan@Hu{DLlS=s&7j_aWS`L&s$V>(ccb)beBVW|A*06pObVQFcHMKe%s5%9-66uPFGeYJnS zytrS5D%^9@V%XZ5zk+=261n1@PzAn>Bh-KoQBFCn!&#_&d&@|rSe7z%LSMNYZpW>a z`kX*rpN6)nq`}H2?nS|TiG2O%15t(;JQ5Yl>g5O(vO+A+m&ylSkwnOMrg_L)s^9nU zT7TN(aEs{`zqBh3k=q>tqnqH6DVXM=dw5`N{I(hx%wPH~go=c=rvIuRl2|kM$zXAjvw) zvY!4dp>$fm#Eu{wiu-4yqu1S?s$0BjfOmJF%&t&ejr#SPtA9YEy)BZC=nLmN5fY4Y z)GposM->@;W0MYXAKSOR9~C61HYHu6%v{R3)1+`I_p@3a@+yWFu#V)n`gn1BlD zzm9P-@j+u0fEe%K=sU&Tii;gv2`vWNWk{{=#(<*7)yv#KLbQXWy#4=HMdpGi8M-Qk z?lpWQlM=mp@e5wA@HOrx?sdmx1!%lS@AqGX`CVBPLW5(BHE}+F9Y*0 zzN?s!N%=MmD8i9R&f~{Ws6v$*`RDxI2qYj4Gmke>=+zJQem#)Xd~SMJwICwP*%R)b zgm3JgdHMhymQT3OIulyS*Q4=koAG1&K>GA{nw#tOQI(EpO=6a0@PDI-^ye!uNSh>P zilt>z{k=cq5;z+mGSB-+bFgYo{TmIl!|5t)Y!Vjp&Uh(&a0r#*{qeDaDkf<0qRJfM zF?LBuNRg*;_jLGjh`F&V|9VSj;xUexBxS!--tYrLfQ?Z8b#CQTP3t+B%C!cZ9hG?H z)#g~X{9Og|gEJk(HO;YD2*EqrBb~cs-kg~jYxE86Am;XEO;@IDu)MfBe#XcZFkxI=JWi|iI_7J^S&_=^CkDr z{b6TB-kJMeYhBm+Ey(8mh@h>;fpSU;@+=BjY&|0kqovzqX}J?Gm?X;Y^qzCnozUP{ zd$tlCNV0587IK*Q?;@e$%W8ZG1F4_5OuknJ`->a73|(BAjRy%zltpDoJvgTv$KGuu!#9Y%t|ye@OTpbL%|L zYm>Bj;fWh!QNYi75C~fS|0szpS`_ffQ~tsCrg|K|$lkg>Y0HzAL&H_A;HC)RNRuI! z{yvfpg;)1S@sB=Rq+|69bM`~ohKY|@!F8HUc@@&k3Kv?e`TPt|2#7ecLY%geseQFg z-`ofZ1^R`g#G&Lrq^mV+`}5PL*XEn#q@|X>MyM@y!auSK!X~v9DzTLVd5_NYwxX>JPjeen-{P`Qu)y$&)Th$Xp&2HQu5 z#l9XkEMLAq&98GLu!$9`^X>-pw&ndSfAHH5Pgqb^4265;xFdP|UGAZ(RXyWX$6H>XbYE$!YttWkyCo`f)0y8ei#)PMK4fHNPqX)} zkeH(S(q;C&o-&dFRJglCCJhcwj|q&Yo(uhQS8q63{OB+Df7xZrL?lb$+Ti2%J;}>` zl-KlpCnZF6+A0}>?-9hROFF9FDU}G;oi-$aOqtU2k1!ah<0O!28iWOL2Dz8dPM8CV ze{$JnCp3~=!-Vad%*8u66iYkuN{6AX^ve(*gdXL}&;3ZpkRb5T^mDi?p@Nd^^f=;I z;>gh0OYs4BhRn%c2di6{qM6won&^(90;B%NeGv^!sp}DO#8ou&w9lmwKUe>q&`Oqx z7e`qBGJZy-u&9+nfDdqA*xV>etjIQXh;6+^nglU>isdMmjO}|e$%1yICGAU4P2tbJ z6-o-XpM9e9eGVdA_c}&NWdoNt9uTv+0QU)^aNT&2;7XVdSeCk??dR~@JP~LrCZ#~x zT=jheOZJ9%vQp;t!yYqZQVY;WvVjeVFZaLM_MzWK*Zf!4#$Oa&dnH zBzS|PFPOYh@o-j&&4!)uBM367k!W*;9B#uuE~M|Ob`k6HkZ525&A(Q6H-2~TU%Rsm zbu9UUJ(_r0$UJWHnHLwQJFo9YQPT)5^r;iz_2G*`jBVDC_l?HeiUBGzKTn7hIy)p) z2dqxENx1PExq%x!ks`+L5`F3ENc$2a^_<;P9wNlGn_xPz0Ya7!90tw4CdAVdwvgHX;patQYhCj(_v{aEfrU$$fm(HW*D3|Eh7X zn!$Z9^7=HW=^BRBMbx#|ZmT!>h@Q#0Z4G7NDXbDSn#IwidU6Ck-c~3H%r&RG$;RcC zOv~V%2=r^y1hs{+9P@1~O)00D&qpHXIR+d0DmtexpN7t_noWvw?z7Fg;r^DNe{W4W znd>8_)Nh^h4#roC)h;OE_!H0nZ7Q3Fx8>lzI#s(!tdomSbUthmL)C#$Vm@4nF7RrV z+I*0ddV*=U6kKI#bbVB%<;4#C4;uWP1EKPKxM~fdvJwRqp<1J=syHBxgH(5w@u&_l zaJ3!2sI`{&6i?Q4mC&SX_-5dmDT*&y^K})<1k)yzYYYVyVhjC%@+c&J8iaFB;IZds zHq3sF!8Q*22)(VE;rt@Ma~Rl2DocI$E1@=Rh8?)5BIz?(F7he~Bs1cVExT2mKp zlmrYz%M~W+)iVw@_S8}l%u`BoF|HqLxzQ2AWzb;KgcpZX`qe5J$~QVrjDUsC zpjWJvCu2pL5SsChf!s0S$r3Bz#u}5k$thNFrJ-XI{icr4Y=S>cv5Cj4O6b1nxyK7I z>NN%#7+>4l&p`5KH`-uxs&YRlM~62ErqAYMp5|j%guU2IsM(77ESKkF6p)S4NuwoL z)g9Dq=3*!s&%{`}mT3IV9>aj~%`PnY=bVD`Fn1~zlvoAuD=vVNTPaEaY9uHr z^P0m4K`a`WIfh647}MA;B0kfEtMnFbSKiE!4E-=5`8eR4XY@ zu~lJZ94oyAj1IT9sn5(l|EH_)ol14vtvNNKpdEIP$-EQJDJq)GCZCKO>Ks8@9X&Kx zH53D4&U9ci3aWbeJkRm3@cEVCd72{{+?ptNq^E?IGA~^hvg>l@oVhfJ#TlkIjy)?Q zNWQEK*cQmiwAC`#1LNwBF?}x;4v8NPPdmjJ{bN^Zn0sl5I2^INoL6L7}L(&1(H?2unps{zU<^U(g?6R z=?qIGWG*9O(JZ5`5ybw&ib6z=xxdwe2h(h_FD*N!+J;;b(LRhb&mhzjXD9KvMLbw# z&(%sgX}|aqES@vx+614o74S0+`xea9{_6B$*o5-sKt$6vcL2(V9}L})W26rYQ(vgC zE)>nYcP2Wcgg4l;!*fN%d~#I!8Qv~J1lu*TsXM7GRE*&g5`^1Yaz8h3CM;Qd5eO`0 zy=#ruFH^gildZ4{sdcgDl*%F125N1sRemCp-8G#+JE~?oCfq8S%m5{Su|a{xGQ(r6 zk`Te`KXm?xGTsT4z(NG=lXAz29(4drY2O#Az)Uj{@>62|g<2`=rf7%l4JWp%djb^o z*`}1GOxQ8XE`|dykwhxI)JQ3yfTEIVi~3nG zlE^{TZalLt|Ng)k3`@+lE76n?smPTY%&^5w)?srm!MRQxPifyblXs;v+gN61@M6{s z=~8iE7{~ldH0Oq^cR>5hP*!|GNUuy|(q3F%21NAZOU7a+s-V%0qX zeG2AC@_&BXZLW7WahAb^9z-#s^WnioB&jaSuu9r}Y0TX1CmV2yZW2uYuu`||y;H0- zo{@E`=ldJjVks2oKWs8ilyf5eY_?~WC!?A)d=De!spZ~|kU!$r@9 zFA?6*+LtU(DY>O5ft$GxRSRo4@!TX-e-566X1&v<1)3Ju7xGmO?f%5N_KMlhM@y$># z9R6-J5Mg4A*O)j)ml}Mwz9-&${7mJ1X2YQuVHsl_%sMI-d96UYAKiT{{pZn6Afpum zc(fPd>B7+Ya?phYcilQTorNh!NklU|qJ%AjCh-$-|DJ?}HZfZDj4WluiK)2?a-nLv z9{^2E2NElrY>nYJ_wEF>ubEc0=h1;}VBYmY!ztPbWKhQl>sm8zezQ70n~d#a^BnpU zgfvRAyfK0O4O6QQWUB7sl=d67I>KTV`rGJy4#i=hNZI0&vA@b~HC3A8!=~=Sz$Nvf zPc)iB|F1r+(aZ|7?c6pz>toMI@g9q%fwyJPdVs-VBGdM@f8vUAqC}A1P;BDpC?p;b z*pF9G_(O4h#q1RZNY~>ZVhtFora@^C-4Ye*`Vq+K;`;ffx9_I;7>IrE->3G1G7c2X zRa2TK->mg_akeW(!hBIg(Rb0MuZie%fz4}BP!-iZu`yX?%+C+A+iWB@YJ^e#Zy+qO z14dI(5=2@gKQrgT99JmCJcCUpe%r)agyYeKT|ySu#TWc+ zFsV4g=$VLk%_2%DB8-JTPZWvF;t+Qyw%&l^kz8q!9FS{o7yHqfOD0-1)Sx`}nAp<{ zZ7gnSbmSDi{6c{vR^fa?`Sil9G=7Htl#;LXAX*4WPDwV+&`<4*kM$%}+y68CLpAq5 zOwdsG(0`bqs;k5{(k$F$EXM-a_`_B*ulr}O#}5V%pb8>^rZOZyiAJy0p+aL+sxa~6 zAe-5MenXa#E3wisxXzIds2~V~tu-}>cIPz`+W|q5Ck)i6+h>gkF6iIB0Gz6-?mPju zpN5g+8{Lp|#bmMNE$1JQx`VLuyP)ZWg?#RYqm*QcJdCes5U1^4JQc_kbZ|!#_g$}V zME{!i5N3pXk7UrXUp^AXTF(LYJ)kSE3#Z(h}96@}{vkcC@FjUP~cp&o;G zY@ROyo?qX$7ML3A&}{DkB&Z_gb=16iUt0irnr`EIU|Xqjr(wJx{NsQ7_soI|jx3nS zCw}vj=$EfoM!Q0doPe!_1PjT9#hb>H{>Qx+^FQ5t_*rd{4JXc2l$(_O4!%0lnucTG znA^|CWzrDlW^huPF=*Fm0Y(&09s1gG7(m>TprS!O2m`B%B-9XRSZjQjNa_}qG`I)B zwiedvggkR5T&FIHYVII^;|8s;+7%+`MnlVRk38#qrJ7e^Qi&za6TEEqfa7RVks5v! z;Y!K#L7|(ES=6RxXlyafub8TU=cM<)D>eQIz~^4>&W47mIQgbhr;8MMhVdQ-$Mh2; zTUkA0-d2hV@;XG#@%eHiI^XGKsE~r!ow(QikYY?&9;)|7W*|5RT5w)k_49iBFWX+^ z|I@Z7=hRz3@MwF&!Y_Uz*Z$tWt~Zyk*H{)xm&)!nK%f==u9{0S-6|iP%-K0mgbD34 z^M7{j@f!}#>$SW*25c(18a-oAz5{G~Xs-zpS)oLeFaO&1RKhg&2C7rFH1l*bUM5@C ziCN8j2v+4f6O?x{Wzytb=h)omKP>AK6ZJFur`;FPV!dP^mQo};s4e8P`B(sKVD@e2 zaoxVKF58tvgE$G5APU@%b zf9Px!gJw^>yfCOb0r`;RkKoFJ;ody6r_B4CJl$)qa&5_z4{1aj9JlYK@Qn|D@4r7T z{!%ZDi0BHzqtng6E``RE(ncM&zCh_!NLDm;xs)ra{pEZQ<*}gWoN3+SRo%QuX>@5F zlvFhktm=-$&}mn{$=k0l{G(8)F26Mlzr)@_kChAV6V9*%i}q1SWb(|GlK2NA@%j(E z9>mqqLOFS7xXir;EWcQ#gQeQ2Vz}aSZ{d@#D7i=qS*i*#r|yuddl+BU1@Hug)k7B>9)$(3S(am%tf-e+e`4zcwRe>96UM@ z&vdYFzc3jtp3;s^-%K^7%vsT)UwWp(=SmBmG3BvU2{mfgge)LgU zu3ickw*L`_M)Pi(p$*^0eK49naP+C|sSNWY=D4gO&BS{vR`1S88F3ofQl^>H2;nWUf^UFV{AIrR{uZGW;HyTZ9{a51I z|J=Y&&X>t2Iy=%F@5P>CXS$>}J5ovka7@XLS_P# z^xmCX(Ph8cXoms9L^%4F-h?$9`oQ`k$*%J+lf+)ecTJyN%-atUKLfE1&4}CR!XWaY zW=^4_A95;Enz7qIyD?q2_j+j+NcFQX1!-kI|ZP z{}^!{59Sy|uU!nX*&xxK^dw*zSalq9LV+OU7THFJJM@6v+}%COD`iD~`R)c#GJh>@ z17FXtP*5W}KW#wcqx6cMk>{7-`6me^v&8qCQ+*6)B$mQC4Wl^kGuA5$zl`FW+S{CCx+y|t0cISE|QWaT3cQVz7HDY_PT_X_0A1g%a%CB zBC3SrI}bZPYs`=#C_i$NNrQcl85+hANo>6jtKq!pY*@u5K%S$*NyETtaD066ry(8Y zQKH1l&4e~*EDDpMsiigm_Wy{!i#aEdM2jAT2b9BMN1q8Q^2NP8*fG222_GH$) z@s^I6UO71s*QlcpS&rnuJdm>liIx-@w}yN)EVV{U{;ZMrGaEr&``n)MFbzmNr-pI$ zz7SLw1y{RJ8xVB{Imb{TZS!W8>9mbx49}>x8fLDh<)98Xj}hsNAHR%~Fx7BTtO)gq zmciJl!=H-3VV29BDs7{*K)zc^kG}-W$Lbt+6g$Gh-BJSGbT5#q7dZW+l=dfdKLFHJ zf6l9!AHV#+2WDGEKk1mp3Tq{0N1w6O1IbB!98gwjW<(3vWhI5Dcu8sbNo59B@fFmd6-+TXFYlWU!xCm69|wO9%6_|e=|&!j&bK8gVj#gK#|z}Zpu!MP}vZBQ!{aAR+t1-V?| zLs6Dg$|!h~$)lrvx#I!;rJbdh`p3Q`=D<5y4eEf55N0}`>AgKgAq1*5?3JzQm5!Wxe11MZ zkB>asY|}SMpd=^66}Lzl0*cp^MwTo(Q1!1Er6R&F2^jYGHw)J>GL1jxoFlRhRP{-m zO+H6iboO5{s>$KNoUp6ASS0F^>MZThA%HRjp9hNNZQ&VcaBwC=mbZ-@5b!mxM8_rEeRBMOWFPnp z4b;`r$K(tbN%f+3zU^CkAYZK+W7`TB0}$R@HX$1b$DVme&3SGBVw7201Lm(~R4;_n zDoEDR0;M(0N=JRT%A@J^a`&W6OzU?x?k;!lu@fir$ALpO$=JM;E zoFu*pT0Ug5gxRP_-jM%v>>P`Nx&Qo6)bC6EKYDiX?b9M{PmQVnjr#fU=!^o1(@k8D zCp_5T(1x)R8nCE0=#d8HpuSbJ=zxYI-tB^rWuXm3BTKLfObj1tL^X3ut860>S-ut< zpWO>K4eed$)!i==^pJ-%(`8pP)acq6+fAVt$lj z%ynWB=Xm+K-W>fSQXL7WnfohjrwvwfvIa(H5dW%%$Jzy=Tp^<#W3n?xG572rh@MpD z(PAGl;6K+n8(@87xODYI5p+#0kUF3V3RFIxD z8qq0zOV4iq3sBpNwC0{heHc`7soV4Su;%6Y>e^tIfn$MmmFgaSW z(zMCJp;KNsUr?Or?Zlgf@Rr!~!%aga(I`#j=%&rENu~c|?M8bc{7rE5n}f)o5LY8-x$_I83Rf7gS1Ev)v01&^YIEsE7Zwe3earC3 z*D`g;`Sp?hox0z))zp1x@S*!Xdf%MTK@&(vfmc_WHoX5j*`GYNp+ZwA`NW8}?63^a zHMHAX%yjT2J7Z<9XUl!H+gM0rS78$l8DPd=75BH^I8Vo)Ga!k{=$5D<4*{f(UzqsT z)Ps|8TeuG@@+AXSi$M^0ja-*Y0QVq~mPPQ?X90mn?jH+%#9TkXLeBxazcogVkP=m% zy#DyDbVi&vY|bWH%XZI0HGzZ{I+??h&XVy86NgqQS{V|GY7&zxI+rxk^#GB!`YV)yHTSgGU=BW#| z>mPjg^G2Fjd_o8aUkol7l_U3(L1vqbN^Wxw@QP0i@%Yy9e!PA?`!OOF;eouJ~ zmHDu+XH!}6nD*#yAkqxccrOeIvtlLLk&HbfLK-7qw3W%19<<=Tv*Jc-LLI^8o=k>^ z(tTwzI~arY+HLYSVJaRxEX>19_|BW-+e|n{i>U#}i8lbP_F^VnoMe%P1M}Kf zv)ZWENuWh)90u?aA16V8+q)P&*gNCUx9oH_Flge!*6H6iN`m{$XfYZlb#t!TpwcU}zzG+zGY0(zDE6-16<<-U(F?Eymvo zO?$G`zZGf!vO9i#a|g6S-R^t86LdWzh2|XlA#Or;YPQ@O*%=kGRZGtfdGTB9k!E;x zqwRwk03U7wNVWsd(5+`JusT}z0FLeO2cv!L-y#P3_IZD$4ErORo3RY1uUWyWE*1=W zrk&KrJlbM~hNL;#&UCAlAIJjm(woLG+LGatMpBi$8deUHC!uZNJ8@=DapX5jD@w7p zC~dr)=BuFYcC1bNmU{BG^zgOaiWT-IT-)42T_@=M!LIyL?E+F7#ngxk^xzB2BegtI z?NZ9xrN>3WU9C7PI0<#)GCaav!`ko8b&$X~FX_Q_;J>RAr7!<$48s^Z-?}au)LF~4vHffMHSyVE0 zzP01(>0#?yX%Mf0R<+ghFZMPT+?!JexjF7rHb|{7LrUDJ|AyKgCAQE93(?_7=xDP5 z%*H4jmwj%wqzP^|iZxv&8xm+XDs#&ukvE0aPX*OHsWJoAQv~}3Jsq@pb8ccuJKG6u z;ZT)^n5nuA`QSpz?jOLjJ&jDtoATO{c)O-6^Xv1xgapTFaBnkFS{rfE|7;AiShy^> z!=6B5QN{)jLH6bVJjHX!P|J*#`X-`(`*6^0n`3j0Z@sue(6>75^PhI@R~`D#T8!)8 z%Ose3-wLq)<)Ob92X?I*cRaL@?rZL2F|=3SZky+#AKSH35NZ>w_H-3QZf{BzQ#_t&j;~5%wB0lq+sT?_ip6=omo3$8 zwt8Zk=FgAREMs-z;xXA(d>HFXJ-w(^o8koYL4J*K&NvKas6E^pi>i&z(h{0IFgcXh zr#pQd>HX@dUft;M#!lGDFcy%kx9hDd3_=GM$UenZuVDxLn?47o$|5j)Le=Jst=~eJ zLxHy=Gu+yg5Sb4HeEu@s5=wjxqTOpocw}_Oh3XG00;LzOGIgMO%{GV|^cUCw4Lrey zeFa@#gcf;C*@e)1z&6_^%VceC?4z3NI#}zqD^85HIVU_$+?DN6+VcAiTv_`Aqk8!_ zxa0q2+`zAi(pA)6D3QdnlhEKWpRge#%&v3F(RKRWK3{I@1qCBJn|7pJ2GeEdN@X%d z+L6M%^7S@~OqkJ9<&o**cGqk6e<>{!-#awf9mW z_O5DfD_^7^+Huh;oH=MrE&_~o2>+eL*5Ul`_;{w0Ps8NIyqm-g$Ek8UjWmciu!*da z`w0I43yiIDxmYw1>yo+r9FoOgVScO@0W4Jts(5hvQ1Dd9S{}3|>Vg^(>v^d;fIkEt zX^9LYmrJp{_ky_qHyk(>dPr0#o!M0Ep(EbW1w~m$w(mJH#N3fGh>GnGwo4n{$gc3% zFsbxu5GN1aVS+HT{UI~WrJ8|1XfVRX-Eg4KnhvG*m@8n13oxj_@nW@GB$*DBVNclG)!OiqwL64q zEI@1_xt3erRY%Lxm_KXr3>zygW(d-|h;*yme(aeO#>?2zrt}}GP@7o}+wu*#qL0DL z5~hc+tA(V8XK{eByUF;*v(<*PsYR$YE_1iTrwWyYvPN1Kx6K;B#_0?6`ak@|ut^@ZilP+z!ZOcYRVp`qO$@LPV;~$;}Bib)>g4Za$M-NZ&X*xWcV)))ib>nBk;h`3a6+blJUXvEBVTl z6J;_CJzkW?R$zaCM)sz*yd4Q$xw~onxpFdsA)%;kW0K} zUQ4#vK`-t58Jw=(GJ0mX@pXICHcBpP)FJ+A8gYJp#kIcOE$`VL`EHL2BkuJ(Qu!w; zfSK));~0%xO{LXxDi&+rI^s4!g**ACfTdP<%=H6E&u2BVv_pXktSPP+6}xrJ!`AlO zQ_V%OA!uWxjft#9)@D`DGe6$ZD%L-6hf(*6aMriEh!R9H^8g`ESfgU8Xokpem& zUi2lq{ggiJMMLwzW;k`^sYU6m*C~SEeb?j#S)*Q265sF_GN;29kxZYL2~2!WeQXY= zrc%@hPdia7(6s(SQxlVz_k0x_mI}J)3@>*<&d4L-%*k3>gOP_r^-qYBP_n3XE zF1l>2PNV=8qka^HGydRTl2B2>cYY*grPCKnZ|K=Ep(n<2Ea!FTF%Zxw?Xi*KCUdc% zBb%ZYvAntb^*b*<^+1MVr2@I1+O~$2;V0L+4hBs@!s~ZTeH58ZErxyIq7Kl|F_!pG zXsw!3-gMccP0OQzZtw}v4H@7KIZ+M#Wpxont_ZS=4n{f$X>n`;T!>wl407$5 z{pza57O*i3LfiP02TK8*$W<{?*vQe#3QH#?FWK8EBXFABH4_9(oZ5CNt-VG?7!O2= z2$N8)ndxYHAzt%H_(q@T7bP`0w`-_<8-t#zLz-%wGxa#nwNDol#5!H1jwV9~rLca! zsixMs5b@FXT1t)-4zI^!^FlVdTwN5lC91hL3_=;~2aA~`LE0%zY(i~g^gE(?Zz3hq zeVKvU0sRsJ%O!#C%Lu$*-Sf4M`HFiQ1Mv&&#y&(P){^7z`Ywf>h45_IGwB;qU`lNe z#IXYFxAKR?c2+ovZe@rTl#B*=gnjGhRDlMJd4eYJ-YC8yPdT}nXbbj99vs3AaP)9Q+cg4FR}=@t zkVIRfVx#-*3r!F1KZD#@vxUayYXGQ`nx4rB>-K*O8;LBC=|iea?)^@S3pY8H142jh)5%Cb$FJiaH0pT)5Fmc zP}oo&YuO49@hL~$xCU)NF!pW;kIVSEWi)$avdUFzR^$t?sXQ4{{Nb#G#Gyr>i6&0% z;qLbJbNb#t{D{UsPKaz7(`wj)Fo7(c5K}%=+?M7JB)fF`O{mLHe1y3JKe)KJ{jvA- z@jG8n*R1>;%M9NO4PdkFy#60H+tB8~|BKBQ&6!*f)I#a`CKd|NkT1Lti4Q1j#Qj^? z7~glr29A@vN_wcwk)&HlQX3n#WXaWA^srQnk0hBLQvKIv`^pj5(3ddDPBZFaLM{r* z3PDCE5#r=|7eLN*KLfojN&^m7*{bm z8H3$lMKqbuEb|nuuQp^&w_;;S?m|bOc?)6yk7xHYtJIx9eI82^`?dGq5E8pb5~KFE zQ3^`~1;+bC`eH5%ujd%9Yiw4OHB9b6K{d^Hb*lu005CfI%e zU2%MI(sb(Sz6iEuxStmL#8nD?86E${>JF&8<%5&nbe8zG2}K>7ls zXI|+f98PN{iyv;W9*W7qlObyRRNZN84F$6uC7b#7_~h5t%E%-{X&jsWfn;c1&$%bIvyn6>JTY@8L7Lnlb`+tfG^ildaG zGRKMRX%5z{!s+&T1#fPs9L9mtX`PLef}5I?u<2J-#xU<1{ZfBj$V|WA)r74vZ}j>jcm`rSR80 zJ^Iyz_36HJlST{V_w0)o3pl=D|B%qN+j>b3c*lisc&C|~9>cHpcST$`xwgeI#4h?= zgr%y6@Xx;54%|(YUROA#pN8*P860Eml9&2E}%lQlK>K z2%5OOom^w0YD_fOMkKQz48^|2VfQzYc#AbDc04%9&6Gs-Vf(XD5c!phtqd=(^^XqH zx7S3&3J^gtu9}ovu+Vh1oUG=DvD5aS6%7!drX6PHB20}mEY_^G@uu1b1n||1V8;1- zJPPl2jE4h@ePpjFV)k}qTynDsbP%rSo7<~QM{FRgRvR_m6p zB)hIog0k+ZQ4c)?N6SG_tg%)R+rn-~qg4T-EN8msX&A$48?vBH*rYSWmzLSI_KO2r zEh+)0&R-CO+$ghRr_@}zFJ3M>)}FWswWwdQz5PHBmohVZ8qgV42_A>QRSQWo>YjMz zwwW(XOCWagWUV)RJQrT&)wmFNrI0 zp`_6%w}X{dEqeIiT0ch`m9VPJEKVBB34AUZ%W(tvYpsO>X3aqqwzC+0IgqTYvQf1W zSm-MgTA=i+;2R;+ybY}o=+MVZj1rW8#SI5aN4G6Ew4jy#X_yZkkf65_7>(Lc{Yfg+ z;z{GOs6VMxxKgVi&8y`jRIrhxg8!|xz|-bTH!wnQ=xurml9+3*_d6hxFRR(^HNZ@m zOEwasl$xNT6S)9Vb~i0z2F+}F<$&e*t~BQ5XI*H2%cNU;Z%uqJTBT}|-Y{;+yP>Qd z%__e68vrg$nt}mD4?QSrl3-7_gu>BTT-l4}V`v6GEKSPL7C_5!rS#~xft&o)zzLYJ zO*#;LA8Z$>q8(;4*qE9nr-jpnLS>s&HTVsjr%jZ`qPL*pLt6W-O2*0`wNOlTSxm*A zbs!x*$hCiZNpDHBV(Y)8b3rvPoliftw#J2O-ifc;ia z$D9!?U-?!08rruhbn|oN&ey=dMPg1b8@+mXZ(F9z(Cf9Xs7Vh7XLD4}XcDPc4wPpM zx=gyI-b|fuXZ6sJdPjpaA(ZH+T`L{rgAK=5MTLP2P5g%*&dq9BR)$dD3coBd$r{IW zXQp>7Q~z_Vj@6+9FVR4no3%hf!{Sl5^W0RJ5E37ir*MJcDn@C76CYE@@@txSaR`2r zaAk+I98+y8?WWUlXwr>SR}=AyXt)N*U8Ae3G?%enj~!`JMkCxP%3q}ZA+^LIaoNC8 zl+{6$%M_Z(7MASj;E`j~KRu+Lwb#wtLCvM{x4S*NAKvmX@e5xn_wE_K(Q6{Gwb`){ zC3u#jWE{%Sa}v2WwKVF zz_h0hwaUh_{El^J5opT#-mi*(z{NP)TmbsTnk;oiATiA%!ayUe!zfDF!;OHw59i56 zi#b~uVj?OaqcpuLcnM@sl%SaUPe$2XrfS7rQS)y`36tI!Zr0X(`>}R#>sU1ji_+Sz z>mu8H?~}zewjGqtRx2W-i$*1AMH9bEf~~E-^(_E@I=H(HcY~)d!@6-(3<0 zL-0V=));o`T!+dVXrBaE}VytA={s$qb=qkN$_`cmoSoi3%1!RcwK#YaB@(k|8xG zkq?ECvC~`=u3g^sH)SQyBUtO+2y<=8wz%E{dAzXrezzeA51e8|=;tKGYr0&D!L(^1 z{EmpLWb#MKX``na-6P z9vjCU^=G!;KT^Ea>DWUOC61h9KNS(tpC_g*f%cfeJ4jz+uVmFS8llZ7QpHb8T)IGA z2M`3_h%3BGevcG?=jlcPyOc-bd6#A^pqFNZtpzA~&VbIDZb?cnXAWaEz-FrkY6LM#|MS(dM^8-F%qA5$(F<7rV!tZ4mpHg~V)@otLEkokj?-X)@h6~6@ z5}#xcdKCY{JOGit3MRA8WRXlYU^si-6KOZWjGVAa!x6=!>Sco_1Af6X4w}Vu^r!n1 zeY_UAQ%pz0=q#oxnUoYIQm=H}^|krV&0$M$t1s_OL)04RbF~VSVlF|Eqc=azchl)x z#i=y9(ra9|QLVN8{T=LX(KxAKiS${b{0@;s>R&P$8ApC1XVbf}dnFOSbh+7nWw5*O z3jZ|1MP^33b0L#-ZvvO4V#Dwqaapu@FGEk}o;ZqgKfbh`K&feibB7jfDxChuGD3Kg z2V5$e9EHJbpR;0JoQ@rB{gX2G+VNLa-?1=p9|WMMbxr@7u0no`+_}jghEZeLo)I9j zvW~J8ZL9H=VqBSFMA;mRiWVl;-kI>#=j60$pV7?FE%S$r%A%2Fa$aENpKWrR4O&T9 z4qI%ife%_*)q z`i+O#h`_JYi9VRHukX@!A3PS!V-Ge<{3lK*P9m?Bv$2N zYCHEcm%SpY7gR!gJqoHzK9O9BUvHHTKLHD)4#!*~xk*&JQ(ar(A#5JyLLR%gGL*#- z##EUL(xs8(@0PbOkT4Fo=zcY4{cy|l^gD>{bXQ@MQAiLco$Uda}|0IC1i!k=usy_7@U=;Fs5iJE ziKi)c81*flb94ZpLsaARLNxB8vOeg@tOGq!=xu3U{DfLmdjy9@YH!3#<|xSio8`nF zsfyyMX@*l;SGY30J8*n?DdjCX(}Ts?%&hlworlYUIcQ=*RXUclwoy30yWkA~zc)Og zG2O|t^p+zXhMKOjGP7>xF_|qkye2a8+CHuGC(T9HMk8MG>U!&e8<^jQ<1-kGY3S=b z%t%>_C)M04)P~cGpefF~27B7g@i-BS?+7M3h~x7{MUX4Kr7drYwbH-q z|0Bkz|C3zU6LXj91!YfH1TK;8=_h1tI)FuG2uOK?fiBnKh?O| zt6QJbSw3WJQKuiU9;Q`{m;K8tuPDnw*M(HjU)trqV-iBIGo_RFuuXiGVbmy|!HQ7q zc)10ys@jnUNeygJd2|~C9(Zm3DU7OO*9S>8Ilt)zV*bqD`$v|Cr)^Yi-f5N?r_v`q zO`_6SDX&C(|3h`kjgJ24m+_*BQaI|RWc@6zqUM4r2Ql?tTG_s99~wnzrQn}6MMr4~ zt>swi+DE|eRJMvb(yJddiIh*~Ez-)ny(+%wMtZPb3r*3tvW>D;SMgMplA72AYzx7Q z|B4TZ{mmv&#G=5-Pq*tt0w#pwX{0hr+udEeTb8Og)lG@kQux@;#h(`6qrW^91o+#L zfk^en5X??;tlol+g@!hRdCphPg($S=945w)bGV_f@lkS5RA9LYdt_V_#tqvNQDqIX z+{P2~$`K#NLuCOP#osJlN2oXWlQPzOk4l9ZU;2x|!7?!EQ0K8Dm^oHbuTxk%><`&D zM5Odbm$8XjaIBY7pKQzoJ&fDY?@V*&W&p4@=`A&(XzT$@eNq5+NpdM@-p-dZTWk#} zIfIje={0LjzA13~?=;cVNePddD{i(!DYhiMSt??8IEyv3B~~!YC|f@zMz6}M61=DkcYUEJ=AHy7Ycp@T_V1voO;h+^8W>fKzY9_ z&r~0*xw}~kxLi!C3GC*dcVSu})Rj0r!sR@8_erM8n>xkXyDI8H+w*47$LX(6pFS0d zWEnJqbuOAngj#A*N)aWnanG}}SnNaq4hAuNvrq@upD zTFFVpN=~X(!iM!Y(^rlKijzI1vv~Mo#*-}LNy&`a?N_Yg|5rCFtGGpyhjxVqIr25p zp4={2xk;9+bG1r#Nw3f*SH=cfRF#aZx#nt%?0;XOIj)RzwMh2+uh1e_#<}M!4=GS_ z%4`|uoHZrRL8Xav=AB;xFdJ+|<~QY*t0bud8)R?o^3w5F@*@JLO{j&=W3rJLP>?z73OY&=V~qc1$moUZELivAa65k zZjE*oq4z|?iLOJR~7d}2B^$|6ef12eqci&tYkD=v0fw?wloJiujDciCTqRZk!Z zAto$X^=|a=um;K_L|PFc$z;8sPG&b# z2*ub+jEhFsbQEZeHEjT#IG1Wi-6l{qj-r~O(5;~yx`ooD1>FK4>G;_KY=Mulkt)nu zD(ok{9+^YjbtyO%Q%zE2ZK;IHhtJYrGm3o(C!=)l4PzX#^8JH6h(~uyTtCMgm5o#_ zer)J7|>bR`eKw!F-i+QR-_ibw4$|)Rh9s1!N(5P zf=@lbmXfnPXbV2-rEk)&Le)SQU(W2VbzhOWUICa}0e}b+t`)y%$y67EArbm9+=!P= zvIW|Nqo5qKRl;|i!V5k6$_!aYW|6Te;jeZ$I+kc+mEBX81gj#@nfe$ue+yLqSD*c z4|q$u?C*ilif;b(fLjA@rn7Khymv+IyKAtdzR)0{tNY&>i*9^^X8dMKH%JpMP)h! z;Sw+aO`>nnB#o~JENZ7?z|CU={}c{zHop1!j+ta{X4B~)x{cF&PPdRA-VA4?_(2K! z#0r2s;fs|B;boWpDM4#$v!!Axlb2->3drjr9(^4|GIc>YNIXR4i3b_&6_Acv|FTF& zt+y)aP;qM(b+8U5V@=qF*WrMy6E(zIN$FLBbcANhrPLrCeFzGuhOPl-k%TKjNs19F zULLs!l~$t_P!{sQzU1#T?1!K}m}o~>_yo$~(j1>#%ENS2uGd&MX`yfF&$q0|qdzTm zl_f|^_O9i52?}-&rn>4{5?3bmGBa2}`r3-B(SN?!2nj4TLk6}6m0}rZ2bRS`&PG&Y zDSosZ5luOZb8pp$Y72e3OA6j{f9d%Q6Q~-{oIQkiXnp4&jO0( ztC^XcHBry2s3z;>hAoRywv56c zZ5RGw)KF2}sO=X@s!36n5iye$REKK~TfSA2(pRZc)TFjyxg@35UX898If`-JlN<~zr9Ye2+>=%k*e1{fMd0i80UKX8*b=IPe0EJ!pdiIcR#qgbM8zJI7g9Cp%^|erCBRzJOIq#}D%GrPIhj_ovRN9qWRjgvw%sJd zA<@aNQaQM zTV`TlDeD{-o9bFJ706>&hXBCKLE{^N0#vP-r+1wm>(p_?vyo@~tAk{Ck}m&Jo{HrD zXeHJqDB~~{Qk1Uu@^Y$czQ%?uncS4YEkkbt!BLzR!WMbRRQ4<$-6om0PW}i{@#FXg zt|S#6v*1SOn2C0~2=ZsX=kp-gb=_GCuY~ySCBivjy80oVl0=bN^CNc4N>ucqes}3vaf?ngqgt&>7~z z%mhx0r_nHrK53iy1xOb|q%8x**PH#kvjsDsRbiUPheaSA}pzrl+GOW)I`%jLQ8F&_dgw;-*nIDhQRAqD{w|22A)ypE-gsEec7!BfBU zM)2j}N*KuP$XF#A9*U1wuE4P*S3;3HoHQ*37=0tqRfuO~K=dFBT5OJYxvysEu-Xv4 zNWbnB620ax!o6B3!DC`I{4;y#ywaYMF_RPGtnSI3>axH4#u~~y0ygS>fKDJx6!hMl z{r2k9A0K;X@7{cPfBx=c@8j9q4}ZM+c*b{jpWdDS{p{lMtoP>q+gIoBw2b_{0pjm; zb1*n!X2FWhIZApuhI-Zix zFVCOjU-D7^9v?qD{Br>Pt(Yzdr`TYW zOEioJ-1rh=V>`*@?r1<0rbmCAzdn0+d3Hz|j9EU7281sI2+$tGI(7X>xRJEyCcGxC z9E1-6(e}c-NkraF6KLNpENRtVT2eXPlC%Rk{W;-EBOr}9KaPk;CERPJ>Xk} zeu2R_nhayy>fk+oq?AECjHlFCr+11}GoMuQpJjan`A^lpqvq=XA^-Bl3!D7M?~?pK zK6&}<IH4j7yN{C+rz z22q$shhzGOCH&#H3@iAQlKtE&oqmJb!Vb$p7P~FP?h&zl;waYwiGY zgZU$U%*6089K+FlLPQ%2GL;@(e0ZILtOIVpNhUMb5g|>A4bcbpci}d@)fzk{-xp|vKhp65LHQ0W=`ne}<&B|%^8Z}?! ze!8`)&is69)LnDZg=5N>aRD~^5sBvYOX!kN|60Jb=R9~@nVu{ zwf+{5`fu=Zj=JM*xZk5%6Mm1Tuf#A*)vA|oUaDO498@mf8djs`dgCqJ4pzC&deoIg z@vm3k*|)m$DtbWIsyg?Yt6zQYomIIF9~IZVI*rhL#HzL853*;>X4j@203({X8{Stn)IOej3-4EaGKj)hhjjx3bJ3 zy|qi0>eXLQhRatf)~UJPc#C)0)ULI@JhNP-Q?(^+}0SzYp~Lz$$h7ePCgwejn)f zfo<6bGH*|R2H!_mlV+{{8XOrlQje@ZaOd4uo~-&s9f(|IgFo;}?qm*Do)RfARS5Wqh6lZ^Oyg zXhI?1;0*HN>_hP4mt*i1`FJ0(U@(V36^B3-!I)yO!DlB8;?W=;MFG%A+SyPR+2~IJ z|IZux13zy5KmO&}(`O3*zkK%0^Z!yl-v7`0|1HA*?{<<51A{Y9K+V*F=Jt>_KhUFr z#&hJG_`gC#I(p{ONBfCJm1PQ`hV(fKpnKhV1c^Y1)&1ih6J?44tI_=JL*KT?3 zM%Qj{^`>$oWX(~QvR7|5&tAXc>UHAkQ*&Z5uQS`-B$CX+{MR^rM2 zMjxWdN=iMX0Wf>B$sn6(`smYSAg9duER&GRdALa^>$%jE)pEI{W2$ml6tjQLOokOh z&z0vWNT`}@&5fd$6K5(C1id7a_m8-L7ZoH0dbPA!PKj$`)c-xcbtQWpPWt~KTJm|{ znM%YZl<1Lkwp}KD91fDZ;uN&|u5|g4V~J|TgPY39i>H94btU=Vrt8;T8R%h~D}`jU zH}xvJ0b80}E(=TLm=ResJR2tejGgIZph20Ei_63myvWNLQM@OQ&J5BiS(!E{84UBR z=HRKNL@DeH@FozlH85=#y^)W7WF3Y<-op)tK zIcKRXrlUA}m1#81HKzQi)|5em8VEg9pbV9=D|w|%c*Ux){g+-o;HsY)OsEpTHlsiF zdf{Mzb{`t4eAjG<***AaS5YJABT-$*pHOoQ{?1Gb@-Hwn;FDa9$bSxbjj9C!9_AVa z{!)!i*+bEOgAc7j&-ym+mT@bOFU%UO_(~K|`EzB}%3o+L%NIlVX;FT~S89dEWio%E zR;bz@@ENK{`8PVv_*<~rWWU96&suI;S>d@&n8Q1hp@%&;4uPhgv4@(d6rZd7D8CaQ znfVuA!Y&HFi2Ee?ZQer|-{jU&33j-nl??wq3Y%A)7+8lT#-1GpK z)ZO&hub;r`fW=RAy8G?aOy1R1lS#|*=(edJ2qTh6L$g*{Z&cM%b z2`Rk<*O+jRHi4;Q?DoFN&dv@1Gl!T2=dT}qiv|gqC5+8WXY!H7=q1IH9TPA`h}m;l z9$-x!dM7C_N+79F%Dw~dV~M`4GY-UZ;*rmWu-Im z0p05aulUnhO3EBOY8hESVHl?Il!yiBP&lG}>PX4ch;Dg6Yg4PSQj}qG|-!nWQD+SR}i9B8@_0 z3AgcgGI{qAti?ps-3HeQSqepZOJ~>gN1Pf<$Z8~O16lThY50{$Ln1s9LJL3;o#6bI z{1~9nFc!hf7t4yLc}dtNC@Sq}`LpH~3{#d)m{B%7dZEZ*y>$8r<<2P(I_FT>C6wi8 z9@e;<;*t~Wf!vM0hhU#2%l&`NfYq*(2`SB307-olPi|%dqHNM+`?fv-WT9ZBK3)95 z2MP1wMZ>NU1DY0pQ;x{H%{bz0G#eVK)mZ{Qe}`*J4J-kl^S7^lKkHq7{LeqmdY|51 zemHx5{@eN4n_%y$rC0EImyne#y_v*#qiGO+3*!OEAbp^+DuTUZM}e2(w3thI_4ijF zUtI)yCyruX%M+&;q>gUZJi`cdgQw=S-AoeFRFXw1X*>w0a9wbEfI==M>*Y7H_z~@3 zGD#lT8c7;8y@$=0%qwIw1akXkHX)6i4q(d}j9}9SGJFt{jVNv?#?V%>nn|KvD(QBR zgwy>_dF_BJtZr#8E1qoVJHc;wJ2EJ4s42tRI~&6W28M~eG?q6c`=9_WCWgEyMX#ZI z^!F4mbSH(8F(TW*tKa0O#sbN#v8_LxUmBCZvK#z&*j%S%cb#uWI((ay438d?$yYqI zM$^&}ZO>q>xOKeeP~ zgbl{2x~Kn~@4@BXXHB|@#&}%gY}(ti%S*C~mR)n;!>F%oX+VK6&G$AXc29_q0b@VL(_Y`<|pllbwkU}IaFY6(h4ak;{tf4f349`op z07IfzWkr#K2?va{Ch_-#u&#-OO+oLG61_{Nxj=%wr649yBvg=-%!h_(dr_h8OB2mf zYIM7X&1_0mhez_8tuPFhW-4Gv7ILz0iH16}lKO83@y%D)ZbBrF&im18NbhJW5gJ$` zn-Xej1Ct3O1g#w!hl&HUBtGTia2iXUcZ$;xmYWwuQ@eFM<2{%$=hDC z?mTlo3EWPkOG;OvOt;%DB{qw@byB@syalE{iMFL`{mR8`Q(b#jZ^mBPdj}?}LhiJ4 zrmns3|0H7IrZb|6?xiSb_e3Lghnt-DYoCIbI{!nJs=f=j`~2_4i|5a@`(K|v_x|5Y z`S|lcfBv^^=YNtXnUsf%R-W<}#CUHQreAyG>l3X$SJr7v7M5u^92;{kTOEg*y>j4YvouDySoYS4|IPd40!+J8N&S#cUonC8w3Y0)saJ$# z3hB^X^pK70!66<2lQ}6`fHTIy4t1B|F>6GZhcpj$iX|V?(Z$E#LXD@CX{%dF< zp3JcdXz{@X7xfSKWI`~?E(j-iqfLo{%p!(7Ln{;?V0(j=AwjX5pqQSe(n)bj3LHc| zAcfv_GVS$;!QOR}4AkMpGm>!73I1^(VL5pt@KFBE+nlhZoSd>4q0#z+`x%iac`K;D z2N?)s-6f`$!cIxq2o)753O?C<^R{Qa0KAmdjoW!Va+HRW?e}{&`v1rWVmIY!krmYG zy>iZPHG5__j*lkMZ8U*Z>Ifo+9}(@Du@#NhteXhWZ&3*%!xzQh=&Bq|CP_9NWGr9( zadGzQ&42dJ{?GHvkC%j1pyg5_c_%D{d3D7G0S?Qrt`36fPg)R&PTveiqy%MosbnJ6<{9fPVjyR2lM&p z$fZ)6RM-&-thcY?n@2L0$ZoDHT2S4?(Z~7Do^JYMIwWkOFV8W)N+VH8Q1^ zeq1?dKN;naIVL)&S`ID+ARV1d*$MteXq>d=fW}{%Xs0_&cev7udBgi0e3(Sg6*cFF zNpd#{hvlW{n3e-VQZon|h65mGY#>bYtsVj5m}CqAnSp>2j1%GEn4u1X43Z_GEQ83G zL|7M9O@%Lq%zLt=(jqOQu2mRy8J;>|9-5S`wPQP@D%Fq37J87DY}gx`T4@n`%8ncb zePnbqm|?>Nc!w+ti7GN7JD*f-mK^lM(Kyk;&7cqP6_u68M(8#XRgaM@qUdm@HZ6o* zw}AvxRzb@a2>`k=*+z=Lx;kgOUs9N&DlyKaIwi^_ve->noUBnUqcYxv`eJ3JNb`-# zb(HV60mH?Q;77aD;9H!=VA&w0;!Z^>gNWF8zz|2+Sm*=WW#yy7E~bFZZ#G+epWBhP z#GQ})mv!KTg%-Gvvwl8sK-$;^+-++|SWI6Dvpm2j7KS}wRwwz$5`+XU4)Lg*gwPWm zpokF!1sPpv9Hm4!v6I`)M+J^^AMWFudvjMfceAZtEb|k?SR?^hfXHGKG2}&j7n6}? z$^fAts7>j`dt76Z@oXSg!$ykg%}m2*dpDU(uC9K+c>n1`@AcnaeI#$mkX&6|et7lz zj3|>?!mvM#M?KgD5Iw8RCPP;MgS|Iz_f5(S+6nXh^8vJ3*&hkRC}l-8LtE>xv0$PL z$$ZHQ{44PlU6fd-C@hmqJW)DB)^yXUgW6m$0A5G;;oufHp4tmy?Qk}p$P2kVgC7%q zE=$|a+jIcCjY(b=s1#DhTQVfGNhFUn5f{S5p|d^29e`!9#A-pk^DkL?E2m#F{7*{t z4O)#X!e|k>LczMI>s+JsA(|W=AOGd$@zay18j?h^hp5z!^i4NSvVlS+a*d>E^4TOd z?MSty`qRZZEM201psq7Wt1wqo|PCB70txEsP{kxurYZ z;F-<<((cL53i9H^nZmJ~(v0a2`u-c(@6O---n;zx>SJwEr^TGT;r+cdQxzEP`dm*& zvBebuu5PeRjgEf$2UB$?aNY?AC8(%|JR{pE0G&;uvB3{29v#xM517{aHHyY5Q&uMI z=p``N4RO*a@`A6;nV!ljH?Wp86ed!ZhJuO#<`0)~oXJ~;sa4eGG)>0w4Oy_^5AF}M z?!aj(t`k}T7{n5-Nq4Ar%5DGj&M=eD4b6X}tRzI+P4^|1WJ1j(xbCV57Z;~Gh*xA< zy-y;iYV!rW>Kc%4@Lc0%W?FE%Bvvv%h$e@u6dOcQ%x6I-=DO@FDY>j}no!%B6A{gJ z9pxG50mjV?kKQRc>S(#!+31GW9gJs_aRSf_GRI&wh;QO?IGU;(Rh(hPD*ocB(JGGQ z1Qtwm?JDlPc=m#$3;Sv!U)hCm zg!szGWc~_HB-|JLp@c8*ot&h95#dIEG&OIa=Gif<>?%AmivjiR+Cw^~-=C|qhRkq>ZkSB}6ZrUUk?X*hu}wrbX_XxHF}eB}S~ zmuJs^`O8bA|HH$5f(tN+1UN#-J97~HGvW3i`Wx7anbeIdZnm(9*_fS$ic#hw@;3Y$ z@$(pc|HE1mGOJ-2lpcgqr>gk%kd}GK%b@3|S`|5%fhSCl{^VZ)(xBz|n6QJ$h@U*M z=I71j;HX0P644yPEPjd96jp<1G7uMBiR10+Dtrw5u28)d@L}`VeD(S@S$xZ?sF*{n z5l+xXUq{Kq2o|1^4|t$(Ji^@Z4RbD441qfL6%-?uo*P6@a|2V2dXAqe6=j7< zwP`S@(*RBh+!g-y{rf+(3ZF}3#K1+hOM4-fCPIzi* z!l82QDL?*2EKxX+Jq;`eur&w#r9R-63d9B}XMgp(`4_2F`C#f)**w+j;mI?-hjO_( zQOzcCeL--N370G^l!oATpp+*x1*o2M+R;Bgvx7OH#MDqyv`f=)#`L^^Zi*D-AobLi zPKxpPJJpXJucQ0~j-AK}=T#kUb(j3#WxaMGKuQLbbb}*h4Z{YMbmOq3GTcB(6F4$Q zvRz$`=-2}QI8Ecb5%nmc8tLR4;-UaA1?{?%3dsU6WN;UT9gK-aNhXN8EeKX=Mr*ye zZ#?i6tYNbeSa*WB4~Rih>Ll}nmA5stK*%v6w37^jPoK`;Kv&4LCc`0wmrxIYXrMEk zGXbW`*?{DpMAO-1WI0yNpcKrtD8wx6a1^#t7JTrC?5PblgyPmve;0<$lk#R~@(d1; z)0_^qR7Q<<5NZU9&yR|8nZ(s>t1ew2=XD_{d74n^m(#->;!l-+nukg`;RmCk zxzUr=Wq1*dYTP5=z&H?VJ!F!w(Gj6n~j7RTp@ssNOzB)J{4hJv_wr%ip|B+VL(XMryVXsLE?n$ z9Uc=vbEGmLImG_jO@TNQ2s45KUcf9nx}!i^EK&&<5Z#fKL^@;HDerTL@V9q=fAz=t zo8GI7-#@)Qdj}_)0ySsTF-^WkMgzPkZpL`b1t|~sg3za&Cv!1c$C6n}sU?Gz4oD#% zCJBVc(`6I{(i=uoir63>n8XoVZqnIxiau&nY=F{tJX{+CmL{a`fe5#b2T&3n>hvDN zHQ=pfQdUm8R6dPq%kUsd=qYJ3$a_sv4lQrIL)XM?F=6JSj% z7-`a7vOv*rY9CG{ z`QU?&L*_0Xn-HJ$TK0p?%6WD5{sLM>Mv`LLq%6OQ0qtae0H}-|c+xRY+yYKdHS4P2 zpw$g_urFGSv`jgKB>6DyfN!hxPCo2cLS&qe=(LE9oJ9Ymq@CI0Q2U{)t1Qlnq))b% zW-G!e0B_w3-1)nY&rWPB>3&`ua&H3*Cj3ijxB)AKY4xS&_s)2<*&#m#wuxeu>vjbj zkV=7)uw`n5Bxtg*05k2IRS!T@(zM%nl1^pUi?IYU7(r+!_kzRa8$yagDugNPL%xsj zlTeYmNQ~4NTPVRvbGZP9MHpR}4rtIaq~%eft}dxUng79pmr+zkKOzN&57>jfG#cEB zXN7dpg{OjoXo|=|HN3)z8%?N=%6K=ZKPqm@<+Mkxm~hX?qP?@-4VQ0^?GCvIx$o6P z_Ttffa;rJ-X2j8ueVGrb&Aq$a*uvqOg5#zu)peNWd(~{0!w*`UAAJtHy(QgIS|k+% zD~5I85iWJ~C;a&Vj+GQSy@=pU*f=}O+SYjK^+jH{~L3MC%Gk7Pm=kwXmL1I6j5g1q}&_oG4ILbMbR zb%}hrMeUT+s*X*{CV`Ivd+;B3P!zP1TG1vdWt-B)sn80>gSwL@<0wbDB~wGt(irm(n12v>7A;GmUmA`N_6J7GPZS-htHxM%}QepuJTV7qr^kB6tn< zSXw`ZLIEb2ljc(QHGw#z(v~tc|4pa`&Xoi8IDcNhmU#Lb#Zf>F|tI!)J2Bm7I-i3A@39;37w2|rqheVp)$|9+HRc)CFatnJYg0%t{W-z z#wq4L(^FWZv^l}fs@$#^=%5qSrRO1h)SBXET*{C8odRDvm=p;yMvXM+xJXZR_23P> zLDV1uAOjR%!7dN_&RxO*tgWH5liORPavW%3VhSL&FMs*v`7gg3ft_R` zuc+ID!2byRNSQ}QF4n070&B9?q7pFQP;Aigug{HvtN#(=(43Mbbc)yBfOGVK=)~W+ z!|Gtb=t5XIJ6b#o+bj@m&^G6qxV4gia^&CPAT4Q@#kE;$ox#ntVXqN$Y$uc-fawMV7Tgt(1juoZ7Jdm6X+yeqIPo|KeXLxWD(L{Gm+Hp*9 z0|canFgXp3RD=6*7X8pNGHH2QdG_r2)0clavCv9}JTPje+`~`9qh1M3%!iAH-RUvt zXOGZUk77TGVpFn&gs3`%Yh_tmGBa880l>#tzjQVt@OosoG^F>w0{*Rhb7_ZR%T+bc zLv($gBwx$tLnpW%XGjn^_1OTFmb{LfsBr2vc>t7vi>jenHqOv^LRwwn$qEny!gldA zux$@(0=M)A*eX!X1bXmEtC@hkXE*S{?esZ#XhHU35WF;;e><$f_F$ZaqsA?9FnTPG z441fhj8|N$ZnNFwf`(QL+S1~|EjWnd2^KQEwM<(d%>yhA+;W7@vO=n_)u&wiceusA zcSEnSuO0{99sm95$;mG-mH6++FJAcjpO^CS@!x&?chR7>-tpfT5>*|9-)k!SjIqOo z_l_f_D-@#mfCz93bkK=Ps4UFU_m@lv@Asbhh^OFGcJB%j4%K^7?=L^5w~iU;mf!@$0`||NZ*! zTK_+#**$I2x)0AC>ph};)Eh(45rF$d#|pb?^joOAe2N5qnTz1yIp#gkol!ipFnyJIQPXSv@Ift=rh#$<+D7q9cb&9 zk{G^D_JoW!i>r4X(Gb8mTQbNb!%=ZB4YUA`$5N`#*U8k_D7-bL^VjzuznEgw0}k#p z4Pl8bauo5mPj6qn`yyMeWfUTJI%8CsyW)igyh{Nt5DNd$FhnuP(11DAPk{oOuz31u z57GhP1SfKY6zn{M_hl#JLQ!fttb+_~Nc{rEwiOMUj(%A*nlc|kj7l6_zWHAYw9?pS z%BCD1*zeiY3WPG=)F@+iSl7nD;i1@h@bP7#Vw{C}`wGBlyy3?uk+hOuuc z;TXyZwQ{nqA`qO7yovUhqq2cHNWIAjZ{n5XM2Xjmn@ZG9K)LU2wq9gFUMX-+(RDal z@L@LPvP*GRT0$scWO3{Lq`C46aB5Pd^O6!;vT(&v(bZC!fKyw6*liGe+2a__HiMxIs`=7AmO0 zs5s5UXpvm4GA5~Co)n~GzQ8>%3Ig|t%uzes#!GLi*~m}%`+s{xWf@KDn1S5(pXV>0 zk?+d+&&e}?|L;;h-u~n5KbvF!DR=)b3bNifd>kZUzm=Exo6iE+(_vw?=dnDIj0iY=<1F46Jc8jladyNAU*3{3Z^DVEvdtNQ%h_qm8hjv#@EP z43Cx2OYX$o1!|ww>|L~^?Cw1wiY(pEL!Cro-O8VlCtLo`WEP>lkUn^NJCzj#g|bUQF;;86cb8 z{V9a&;K8k~lc_X9cqUEyeXT{yVdG`K0hTPUVzX3Z)>Q_tZ!YjmhnETBrZ{NI&%wLs zfr}zS=&aJLLb`nLTQSJ8#v+($=F6X%yKBEnVGP`FDC`Zhic~0!8Bz>|z2($8%KMs@ zDH5mqwYA4txih)zdc5nBReiFKM^;r|9+=uyz2T!(k8V-m-^b&INf(PXQ0`&C=#H~n8}07G=_Dz^^ksbiLPuIX}f8<@HzbRq|Kjt2ZM@k5zMustYTH>hb@t&h=t2r9$T$89!9VkD9z^ZJCW*Im z@Nr=CP;DI8uvQp{k*c=LHyAnfG&&fJbE^#ofAl zmLHa(NpZFhjV$CUs@Dq{t6Hq|S3ykG7%O!%(C~PDS`44j@v6mbe-)O+8snwzXc`_b z%X(dRyy(GHHM&CUW_1BAEw?oUl3`(Qa0u!!7!E`u?3L2Xzf8W9m4;?9EX6k&m1Q;0 zSX-;fwl{UwFY`{h=`pD)lX=GZG@Ql;rg1$d8F@a&Cf zKAELq?#0PrVOh+$)n&KFw4eo!-QomyJ#|PLyCyWE1&&?AdJ(siROHhOmaXQ3msw}3 zOE0q>tl6=sMn8Nk=8)$NjYq?>JR6TXlAMo919CjG{>{lv4g=&O#;YC`H6();Hf9aV zpQgj1z`yzNeZ!a78E$whafe-g%HI$G*lu(^mC;HzLu<{LDMFwmoX_qqt7lgqms0I8 zbxPNHES*yIo0^?^HgBiYc9=S)ivdfgRHm=lsps={N^OU!Q??bs(lz5EP%~oJFZ1@# z*cf{!OB1D0^RC&+myL9?40+ntDpx0)q12OK8|kErTy0iAwR1*GUrqjdQM*^hR_NU+ zOO|%4UCsIW%X~d6L6QywoKBB6quczK)K;cOTT?8(D8Tj2!x?j0lj%H^Y};w&y-S=E808fzxkRsBD;jTx$CbNd5g63 z?r8bGXv6HXKI=C~tJi?ad8^$Uod=E2uA)w}OQEH#{2Fwrad<%(-Z3rTD=pF*T{RdBy$ulh zh3SKPpt>PNUeW$>Ya1!Bda8fZs zJ%&g<;G3QINjj~OO4+M)p0_W0cS+^IZD+Z=7dB0*lr^+^-SUCZKJBR-oN2)C3^rlL zsY5EAt}9^~(tO~Upq8HYlN-DViQeebpi6?V6hFxu)%orG?2k8>ot%yZCs+RA0(|h8 zM@hob7p_2`hRpR@9dCSt$1r3p(G3soKc-%lgnveT{?2rXo|ySGILka~v-1`zX{mQ3 z5hIvhDuzv#wv=brG2>J+S%;3vd>K@WnT?@_lV5{=_(*uL7uR}I(o#AoR?wWeI8T456uTrkJuK z-^tBCM3!Zek`V%k1~$vN@FJNB)sW706;9yd45{N5)eTjGU0wY4`taGaXMf3A0wOX@ zZf_2wehi`zqB4-5@P9~$WK5bSm6Fpdx`)d2#Uc&nBcHWgZpqcFcW*=)tcT#sq$0gX z6K>@K`V%jk$7|1uojddPb$3plo;*8z`s?AzvyaDrIX!uL`s~@iKYe=o^yyAOTBOZC z>xP!^`1HkdmT!U8@8tp|J>4mXZrusqUl_^QU5xI|v9qt9NZY1?-9OjZL>M~I#5~j* zOcPPm8cn%5=j_TpaTq7jKdCZy{V4ik(>se4MlMjfvn?UrQ8k1G$o41Y!j6m9Mmb=X zP~4ms&r4_dR4~;q?AO8_aGg6~D(-=Wh)-#-r$k}-T!1OZc7D-=J>X@Ll*U*@LkRJ6 zKS@T(Ai2YkiOHm|sD^vVpdayNoeb_Io$lw0EZIANwZ^rrd!3N=bu^<&7)^zNE#da} z2sBLyJd%xwnDhLW2PI+v7zl7Mh2<769LBDUj*o>U2adAG2VPn2zWOosbk?otgNQag#lDabb4Mf-T(dCx> zDHZ>X-fotRdSo*ng4MQK6u5%;|IeSjJXYerojgB&>f^sH}6aQ@y0omR# z;rEuwK2yXrHH}p_J0HO6&MwR$mOp0abfgTqTyu%9-n>137wkPVl=X_s?>rtiy#%(n&<`JaOjYci{huGs zFa9&wdv2^D3kl83gcw~y_i6OBShMMYpo%A!aqj*7{8f*%A=rCSu2PX=5{R~U9}aHG znxN(7VZW`jl2`r90;(2f)*L?DPPt+nZBCA2ByEwhAfk5GWTcO7@GO6#Pi{AOt`?pR zL^pV0sziAI%TH`)BZ!<+iSKI!xE6LK7~ong&Tky%`;^lEWl%)J_kW%|gLg{&=U<*a z^ZNfXK3@O#`v2zW|K;L8=g?5N-RAD5mp>?70#iv@cKlT`FF&4UnfcF>xtZ>US?OWn zLN+~i@VgjO+K#d|r&95{=2S*dp(AO9Qr*oodbAmmxGOYzTN#p+=%(H0$Cl1juh9`j zOHf#y4Yq4a<7$o*LuyTEDN)*u+lkk@FUJ&RVz``T*pSMxTO3<-K_Zn3X<)d#S8Xjk z&(0l$FsHJ-6UwaVf})=m*eX!Zwd~TP(llA4r&OOR#nXO4Gi7LScA1!ZS0YhPr;!4& zU{UGe^od`&sNT)o^7&T9OO^R{gb}Jf;Y#?R1x>iA!Rs_#MhRVE&I-c!RG&6em;7k zhHfne_boT)wrJ1hxu%=+;+Uycby_*e`bjAERM`3Ra++PErJ^m9(3B=M$4r#cq2`zg ztWjfVCyb4*659n2h)p`BH08yC2#n=> zeZ$itGH6&|mI5(Xq=)jFGOeZBqG((PigaY&9n<`o*Fm#gZXt`H%LHhi=Vq1Sx#zl+ zI$V3M>nOrC=X*K&ugjimp6_Ng-?`_zl+IgwzUyeaHRpRdO0LT|uUvrHjA|vuL@zVH zwlMKlkbYr=cZW*F8Yj&ZyPt+1FVQlZTtkIojpN?1R5iq=uQU&3r7CsJtn4dMU7|7{ zP{XWQ{Q{ODOI@c+`3&0*k1B5-mDsvvrqmH**fgQpIc|B&N`B*o?@d9Q;Z(`eLq|r@;8aKEqyOiBVjlWef@5;agg2>OHZtiY^vk&>y}E`u&%`f7$t=J=v<#|R3jfJB9hH%v^y9>T z9v`1P{pF>C|9txFrN@6R<>T?69{;(?_|I_C*D#du5FwP|O%J}8gkG{ph5*T#r3`{8 zJu6#IHU?!gOs9lcA(9ZUHG5F9gqvhEjlNH3;h-1x`=u+^{C-*e+ViroSq{XJFQ?e8Hp2YZ_qzSg_-CJqP59qqRMTxtjr zQ~w}h{k(4mykZ^J&4S=i3ag|Lrc$ztRf{od2!JUcUGk4Lq9`{CAN|R`{ZQ|p`3J_? zxVp;1RKj`*HJ!haBbnv_FgX}sEQVwnClEM`oi_^g(R*#uNAh-f2muc7AT9|ho~?^g zIoYYF7#T`Rk@fLa{>l6K*Zk8FzGu*2{yC=+_|gd6gKvn2N@M_O68cYK&^9DXypM;T zL-J5=QYo2IMY2xwxL%@{B9a3SAb1Ut0>^{!hOE3wr^Erv6fx>1%cGzvIc|u)F`B+& z1IkG9yw6W|YY!nU_mB?CAw6=&!lW|#MBc;$7QOH49gt{fn#qbm`RKSG^2R)Y>U&`p zy_r3yQTKRFX@u&L!)ygk!)ln-*>pUE7?p!Z8srg{5deBXg}?N)W_3yI#^`0D2c8A* z)T?kt5|5_LU7IXnp-Lu`s=*0`5)a`=GKv2c9pYXRih~i5ClzCY(oXRs{8Xo!I!!Ug zN&Lhbp%9h?t)_uK^NFG(+oRD8l@+&RxfIG89rt`Vq?#)wbaptvCr=582h&Ca%wgq3 zDzhMn0;ht1D4Gejt)XTN9q#tFc3aaEbBsxIBBt%9s1REqT zr-UdVq*ET~(AF_2XFf($ywg>N{KC*sP#5fKASjtuDX9=bO>VZxpP32>9uZ+nkf)H%i5t&Dt_dp3u6%EJJ$GoFL zfQS|JF?0VPiJFGi$=K9KmbajPu$M6@B_9T1JUYa;*Gd0TRZD2_K!_kl_OGL_crZ93 z1fC&z_y^)2CkL=`eu%DV4CK3L1Ti8(fCg6>3p#p`=@u~h)nped<`5c_%Bu!^{!C*aCeC0Y*c7wSZNc{f5%Bhw^CCQGg}fMp+U&Ktwr0XsJZbWI-W$xqdVZCtqh{ znZn7=n;ME>=Q$hU>dC7nBhwK@ISrRRO62jl3CCbg!7$)?MnbM>3>GOi8=uY(0;0u} zpErY9->}YR@iPr!df#HhrN`uxK9U}PpO3b*^_UJk;nV@)_jno(;+q4AAQyj)ruUO% zc6Wb3mYq?8MG9ZlLaqnN%~uK~yq*oH?14jrj@~=a3)Jk4(9=b5j#^h9NSuYny}J7E zz_nxx1b=?~4{a+zTK$@C0PcytGC@(H6{A)|O8s*-8T|Oa{+yup$p{;Y7j{Zi62gFk z{Y@tm^7(Y}2aYQZ1bUN<`g{Y%3r42>|NY z8Jh;~^iO=%1EucA>D3i&gj#nNzn5f4hw&gfh5Z6@T0e=>(JqE?0Ss#rd?Gwa)Eun{ zsD@DHjI4VY{{7RxAOH5t)2FbP!SnAsGJGE?4LsPB-lAZWxa1Fus0u(ZF#6(VsSwfkb_)iQ_hb`g%hw0ev4RhF#tFZ zagKXoIaFsqPRVBXk%-HHC>=wLbjs%VG%p>eCPjfdB|-u#lVvgyTVL@&02PUpEDu;c z4Gv05foS}!E>QZ&ggt1iHZD+H*r;TCmwb$r^O^GHl7jsUZcU=De23~A&Bwq;YHWnLg$}I#?>QGd#@SsC1C#$jv?aDASSFhC?uV?X~FAgZ4 zMUcwU8QIi`bDwnkF|f?nN_`SEl4CJ89gl zNDeCWDOk8&rXOkpH{g|6R#CwO$uYVEIHP1dBQ%N@Kr++v@L%%i{b1C4q$o(!mW@@_ zh>p#EAJQ@vLs%WOs>;3X;n-qp*1~Un;sz~YBSTi3>+t5QbszIeTx!VP;ARq|%}QBu zb`_fp$e06aLqcm3BqfXO3~VhZf2i2Hj!Y|T)K~OFq6%HZIX~ewIB^aIG!J~(nd*dx zF?W!Rd}5@9sLUDrLw*wx>IGp=QAV>lY|=)z@g3~8Xv|5#0k8&T#|c84CwF+&OSuiC z+fE@HR50#7+3#iT&GsvxxsS&H8kkK5Hj@rKUF-%4U(p;0R%0 z^0VtI;yTNvifh=x9nQvpdV}MfEL~osc*yMXaihsTO$N>d0k=T$C_9zl4j5^NtprF; zv`%HaPmN5>$P_;3Jy2gdwHx*zfwS`6)y?`TbIL^v$u zD;@6HGV?R=*C=7{5MBsS8CB^z7Y`fx#*y6^#rlekheA@tdMQ)I-~p~2%Em%a$?;^X z$Il@7Mywb2jq)TrYIr4_5OOe2wsH7RVgJL8_3{}&AH)e>_mdqaCTM2Ai>3$69*d8t z;XF^L9781Papi${;;5`!S3}`Iu)&M5@?%2ON1lLm?fmagXBYq3`|#@G)!Va=XBU^9 zd}OsHm5z~m&?*fn@NbLG7pV~>yJt0>5jl;Qxl|ChXHW*mtzKdYk4`-qR8)@3r3RV| zmLoB5)|g~ujxao)#NXgJhEEBIt;(mR!eJX-^MMUAgR@B3ASoW=lxuIYh>#CkQLY2) zp!Aj~0Hs+P(K9>aw1b8~dxkqRC^{2z?1seb@-7My5=-G0H@@U|fwt2&o8rsvEXe{{0+0I)%VbFYOQB$eE^}ih^8M&ds~LrYRFb zQ8T!?8n)FD`iMncBzFdMf+R<*(S_gY(h*-lm)bB3-@ zgU{-EA_#x5+ez>C8e2w4t^0~6QA0PF!@m^$bxL6#csoQbU&0gJ&nPQVtWLl%ody=5 zN4*w2(_3IEUu=(xxM{diC-qt-EJIB<=$Bdz0_jno;GRpj4@yvmISPFy(G^HF-LFn{ zD{!6r&F5(MLLb)(m1|w;eLFFD2?`+VIh%W8U|1c)O&cVfXy|RqUy4T8L?^@&|D+r{SABcB=c6Qd%j*%MBUet()Np=PtpEK^*a*kp_O;MC@d zR&lkTBA>G7pW*}t3Gra0ft+dlKMWpeP#J=}!$ zJG|U7a|XBHSo5Y1Dw_qNI6G7a#-zVvmq9IAeQXXuCva2M;H?XPX z17ak(O4m*@+g4`Mv5t$)1yC9oqd%ar3jV3)7RYlg)Q5xa{4#_CZN8Mv;EscC0&*6& z`v(i;wK${0qIcbu_qd#BsiyB>V0J=l@lv(Go^q%YEy)abS~ScFE8&it?BFvyaz^?v zok($fD)p$MEzOTdFQ6;*q<69Br0h2iT{4!?>zRvJAK){?cm&S2ApF4cI!c517LLzR zlcW%BI)iC&o6JW2g@_e~Dv-s=8Knz@JYlEuM}N{UqBLRZ?bqnhea3D$R^%t*bV7NP zr0o2(@j!IT>xk#{93OE>O~olGQZ>6jfBN)kKxP!oy)fen7?k<<;$^ilS+V)?Vg?t& zl_~{GYdm#$-I8g{0!Rr*fr_KCfWuE=LTLM?g2-4(XKwtWnv}VO_2XM4$|*-nbO1Y` zABCU{VARAVEiU3f_W)RfFq?Rwo3%GX97qM_emqW5e2;!W)=GfY3s*tq9 zw+d4?;?0V3VXx>@-c&T@L?L3$nj#dxW-vL?fQv4rWfmh*gwX`e}6i={MdW_{@riqzY_||hKTlYKg&r^aeBqk z3dP_Lj@G{ts-pgi{OpGmj~BC&1Ow_5o+xKNSF|w|R}OX#r8^|PouznqPBsn6a2$@9 zUQI?vm{Qg+5!u8P9=64(Kq@SpDBTkj&`LH+Y>{A<$|@KZWV)GL?1BAe@&cO7(wZ(J zFL(d=o9H$qVwl##HZk|%s6UAM)c5G!*+2M=&EgQlM|fqvo%{n~WrSsjk=VJ{;4~S> zH*$fWKD>GL@hmUXbV)g#B5BFl&7tsgeFDF9U?mJAHRWNhw4$!AUcb6{(|i5L`Ro66 z_J;5nrEV>>2M?)$W2C_AXgPm<|4|(r)|XG0XBXLx*HGQ1`F3xiTNL1oj<+B|ihHLO z7D`Y_)J(6gvPtCK#N$zJ1q8Wv5a{k~n6;^4`rD_suil|M9GqT5S1@vrqQ=Og&wRc7 z2WqY&qY$QFDf@wq3f<%S{g|pzdS9diUQrTQ$y)-n2N-Ks4m`xLmVHd$lGp4w0qNPm)XLj+A}!pHHJ};A7dS!$*%$3ZKlnTBZqk)_rk~rB&HE#g27ZT8ojaQmVm_@*wEQ z9S)vgDe2*G2It6w$HVCR4Jji279G-5#T1#&?#Ql(d8n0Eb!5gd&|wL7`AN$p2Zw=e z91sN*bA=tf>XBtYR?pOhlPhhyj!i2NV{s{S%in4MOQb|t=%^>vD33zPYQr518Q!}) z>SL~ZrgJzt3tnYmKw&PlQ&(lkt>*#osmy0My4_uQXm`!LSkNw!3*||1ZYuL;s~bz9 z)h8N@<8JO$(udvK*0Xn?-n!x}F(jq{@GKmft+5rL$_9!ot za)8F-pp1bUKSc`sXkhlw)bokAW*&%cIoGEJA$2)*nZm`EAutLk^)$9gGzK5iRtQ;O z@RC&=V<}Qag1ckXHcA*Nx{q@YQ>h&8ZK`?5#{WnfJ8z;cEqWRu#`3x@uYZV!VH5^{R|%d`QSm^y}Sz5=Vdm5b4|i)g7;;f+7N7y zMUyeS$kC$4*E|~3-Uaubxl+An0?oeq-*aHV-7M~t)d3P5K`hnyCIa0#nT?pryl<uecaX^n=OjZC;*ZDCw zjqyck4oIH*N^;{o5V~w|%0O=afA;=uD~>El6h-q{U*U~*FF|$?Af%M4sG_<^K$&z& zparOM)zH?8a3UP=QV<>;?g2^X$Q*0z_w#VZSTE=4{M-71^9jey=KbQgxIjRTGvh)yh#uq0$k#hgkn|S(j~IV*osOlSPG9| z7gDDjflMlWq5CIl{sC=!Td~U^9d;yBw8wnJr!D`Ko{hQiI^}$gbQW^-QrJj{YsXts z@qM_Cgt(Cm0g$8tDXGkg-iNqn=GE1wTQ~lWcH8_t8a|abowV6XtR_B0x5#Z10<9Q2 zY7e8RPd<0JTa&S?fqQb7#74B07pX~xO)Y91jo9hAin%X_V7kk#Ii^OL0)!*6h+RFQ zAWY1oRRx>2aFr-6#V>IM0p`UTO4I1$%)hN8?@yIs_~$esYG(ST+kNi?N+SU`?IQ4y9Ws!a@r; z0a&NI^Sux$^q60!-Is}=$^%nSxzY$H4;JCb^wM&PN4~F?q8>ahs>>}DxIf^=!X$1` zoP}OTH#dsc&i4dMWLM(jC0r~{4nDsBN9Xt>=H{#H^92Nth#Duqf}?-p9=GUb#7Hhv z1Q6NooVGZtg{3ZJv_8+G&w`Jm#L?|mkV?3Nn$r>wJn#4sAy5Q}-tE4B|NZXX-#TZ< z9dv9t`|;#pci-=~CInNRHgTUSzNDIuuYTp{VZlRcVeFoKU$Bj3xN5m8Z+a*Dfp~lZ za6EkP_so0DP;=E}e9X4U=RJ*tQmx%Q5P5QV)IO&jLpYK~(-!&G9UY%_-W`8BBFDP% z7!%;6T(a_b_Bm^$J9Cv%1Kqi+((cf`zWkVg&RF4wCn}4G> zs6_PX8+_}5h#?Dad-&57z7yJASn{248}$ujmYMz0HXC@-8s=IJu&i{6O;tu1K~H%0 z4(8D#97E$&9{uz4>BZU_fY{NwxfrI=3Lak7H!@&WpbkaXUyXoX z33|fD&Fzp(@bGAK!3D)cl(hkrLnaWA`0Yk2oeU-TTj+Ojjeg%--_b=J*|jUoivcsC z9-4o-xw$D~{*inc@^9Bn53vF#V{uI60{RAer{Q(e9%VlOVn4iO-hI@h+4WI$8Iq?6 zx5M*Ehj<)zc=Y$(_lNtP-IE_aeK98l9)1m)td_>A=& zCek6-sJYA1KCza^Q7?iK_MMuDR6eN~L%t0_H~h$*8ELgf;zo{r#S3OUj-wkczfaCn zaum&I1lF+Vm=NB_!;mGK#v=)K3u>e;{BR#ilV7EvPNH@u{qAsxY+s7<^m^pjQ4j_g zLYrB_mHNOJj7t(U3ZtSnA|_N1D5iY^2pati7@E;(CinCel`i4|g2)@YoED0gumB?h zyYcFB9nS9pI=Wc}^-cxH>UP(-o!+^>wko8IHq!$gwKwtgfdCLU2JV7ebj9aVNdBcX zYeeD(fVzlmnJKy_@AlSTy?XUWrE_5>^YKM*9kas0uE1>O@F)Hs%b^oGV+KTd&qPU$1YyJbU?a`_<<5<|h0J|6M`d{2!*5KyXQu8(Y(c z0;nT~8C|RT3p-vjXfs1e9fU!6VK=0KsU=PrC|SGR!=F_T{*CJPb{<_Iebc&Vt5x?m zBDHh6l1}b#H%N5^EV|uYk|@Q%j8@L0>=sk|Ww(jy(v$Urqusd(t8D~W4lK0Q66BbG zORHRvRjWzX3?Bxg*RItkUV-rvQ)BQfULq6L2@}Z#RtZw^ka?!~`p^;AG(1u9m|sLC zOOR!8+P3}<%w(W{2>|n7Zuv2P`kDbu)KMq`4WJ;)c{1UO#d^8*YKp%9ZF5uhT~zze zR2z2tOZeXzRkCW&cjVJoJ{&URp1#QA=%0+caLP>SLdTrIT12&ij6s=4JLiRmKJ_Jx zA6XtK>CmY}=#=${^5LnBB)}vuOLAa2QSn+d&yPws(`oDA1u8UB#eL5gqY=)=DfVm1 zw~g0ZVfS=TBNdSJ2Ki_*k^F$po3o2hb4 zhT%;Z4^*lQc@#cZybnL+3~-ODa7AYOq zV!WdW9TG#3=uqo0T9FzEC2wT6UeR@fP7-KV_?G za-(A;o-^6a?3?t_l|&DDMjj*mM9(v2YPC*6PxN@rD9kCy;J|q>4BN-J_ca=$IiTA; z9`2DLb~oqx7Ytpy_k|Okn-`|hs^g?6<6`GWhGT4omUFJzWT{;z+??roSUbh6n+b<) zF#7L86I8mnZhV&s=AM{kpu#L4;%5wAYdCy62!`9}q6@3)fzZE+j5QRD&=q8)M8my+| zDYx3`hzl9OFR#O4d;vRXZSC1N&GR%KA|f|0L4Mv~n_~kv#|D?}HfVEf$SC0OAM%oF zkM3&pu?f$c*RSbccyU> zhT{lY>BlMOlRB`(=rBo|Rv@8+@0H!t;P7-M_I%8jUrSfap?cntA_sH%hS;RhWsgX+@GjS>rk2jkpC!?oa3?=YA{xN>T%3%E^N9Z3@cmhyO8ozF zoJ>a3aDMib@&DIbn->4y+Moh<&?%DKJkKHXqv<$AWmu5-qGE<5V-}pwy@<#8@p=~#(qJnK z@-AYo0A| zsYp&)LXkrp-zC-dC`n@$w@`(z{2jU~c{gKSPbIUF#ds*$n@PWz`#W$-vdO@MxFA&> z#C#l%N_rFIvpVrykSeDqK$(3z`PwPjQMMjbbRX7ixG->?v*XYamnTh8Sxkzy9u;Z! zUbFW!=i__JuH(sjV>D8d1IJ|9cDkFNgpA}CMBRzU>SEGNrfqSf5q@10tF<&9$8uca z{Y)X4^5~VHY>U;Il5#{{CIz!y;u#ypnYQ9Hvj{#o%yF zUKl6fXfo9Nur1GPr9a@`l7ZP|f7?;d=9jL zw(Y+!x4wJZ*njWiY3#p^{r73we_fqSbizU(rC0H&Vg)JkUG_TkdqFneDEl?E;Lh*-<@QTM1CYUy zuO_f#C>@av!yF{oKRh|uI}-^qcwWxj+5quE^a(b>_er*YE!yDOn6(LcYa5dbDD1R0 zsC|}6sc726{CS6L<|3=8sn&9Hjqo3>v0h&26hXtP&vKoXuC$#JrV?N%|BGjcoOW@v zrj3&F#1cHCq#0yYq&krLpPS=OJB?s9YG(4Yq7gZn{6r`|s4)fz1F!k?3G=fdsl!ym z7!b_L@Kc)ePtWt`>e7W7|4axNl|9g?HX51w-AOzq!(s8j_9}6J9?**M9?!z1GH{-r zlTUE&nn6uPH9M}Uc`B#J0Am~x7L|D^F#+!;X6GUQOsv@a(rD5SjzKc=SW_v5dvwRb zEcfl?3`W8XPpo9zVLfSQqinGv1Krl92^JKD`~BDn^Q_r;qp_XU0Bx9z(5SXr-0r5J z*Jd9j%f?K9CPm`W#zJoR6j=>qCc#lMnha>wS1(FKK8P$0KyDZrLnshoM%vUy+SdN@ z(}&$7Jzyn1M(}zRblB9`lIF1LU=^Wer72XN`52VjOd`or>5VaUy-eeU8nSx^t0M#D znwJ9v{*urDIn-f@EZdtn6E@FsnX#KT2HDwZ3=CFnHvm2ZCXdZ;F%D!f6571hUoWwJ zG>Ic+Ka=?W19dC*gK2xCmqY@hpEJ-F!hzUCN;UzqhwM^8RDX#K5BDW4TLNcYVBhn zPQPYqL`LST)`%|anFk)>Ac2aE+*7s})IM$#oj%Rbb(sjm*^JujEqy?Me_m}cYCq=$Tg&1l3B-W-7~SkNQkzqjYO)t%(G)MeO$EwDWeeq;~U4_FoWdvt)cXmGkd6rT2ecZN9bd|Gb2s zjs5pNp2q&$*ngjn{dW;o*&iA4)6ZiKgRK7DO-GG}SXAIR3Dtaa3+40FgI;Us5X9vX@s(E&5o@+AL zPsE08JFOkRd*>!_=HP){dC?BKAM}YhV@I%Ty*34?WHQz+GjsLJxyc+3)7xmg@fLW< zn?JlYqx;L@9l42;<>-dY0-)PbK~?Ggo#sA{Fo}%triidWCwbj_Zd;8rV-#Qnk zL=rcgyjXN|!EHF1snPS-Qsfub*GHrf)__7egO^JxX`cIfD#?GHPCSe=K!EA4fR~*A zzuJ2B+P?q&-DY$D`(B<#{%hpFg~@+yGkXx1-IyR|CVE*D6{LJLl)6;hPUUfggK&3ZmQ9UUCf_t#5uogUJWR?adEr2xyMchMZ-9 zO4pR-&ls|N-?IM3!JR}G+K`PmKSY@-%MbDS*hYgS>{tHCvd5{BQZmAyG3ix@a($TN z-IHH;`~BnLhj9FLGE$XAe#4H;glN>>=bshKM8)U^2GbdZcQ`z=lCq9`j}EJQ?C9#? zhQp}kE5y?`78hc^{rQE;dMfKAuYD5Q3g@Cq=od+2BMG**83B!zKmeiFvRY8k;XCbbP`lJ<>*kSD}0)@gI zW79H7%(Gjb1O0!x$ob_!~Q-U*!mDe z-LABNLT_DIiP!pwwBrrqcA13+%$=I}qD8Ox7>K(0d!$b}qY>YnA<+lBf9)EZcM$gF zI;}nV$un2GlG!OcAvY1mc4$;}oV-KMNd2Mv^?j+0RW!?PZs{$d(5JrN5>>P0RYvNR zRHPY9Sb-P1M9U3@FO<+I-Z+x0_+ZGc;-xLUiuXSGRebYEu)1lBqFB z>zz+tm1FTstoffA?n+?!WKua6OXL*qOQg!2Vzx|@Q_Pk>++yYZm3NijOTL8ZE0f<- zzLaT|De0!9hrGrMrA=?4ypHlDPWyXa{d}p@D3SM3j}GQLDsaPxzQ_>>4Ybbx>=O#b zH@{ez`_XX`?Ug1y@2~!GGuXQ=lq$nf_N;pVa(1wi8oo%pqzvBk7qC@O{aX_B4#}RdXah-v@Ja^wwNnU&}t>&S~5b?{dF-`lnCs+`O@W z{Fh8C1P#i)p;DU8EGHEj$tvQvE@8bu5$mCC8H^K|@ws$J(K{h!_Dq`M~zaT~IPBS$35k?G2$wNg2;Or$Fn>q;d{!~01T zDF_-Ut)3VYMP+8E5_RI1ol&R}PsB3Go~q;&wSwmeN1Kgb_^)3jw}W@$=darB0m7bw zJFqCylqE-?Hg2yHd0WfA%cZ6y-b9EKMg0=ectFM~($xxnlhkZ{5n=dne%oSy5k%&f zzg}xp?~}>zn+&7m zqT3O{z*es(84LUfEOIbO@v1WWw5BqIKnGU=RczrcjO!$2VZ7M+6uglkYZa+Ptt9^ad=D(qIrI$a@%7WQGGixpq6~P}S`~c+3&_)<-ZA z5I?gkKuFXPpgu+(Z^y|-G~7su{%^2Zwy(15!ERK`l6;$1-bU{(z&chQVhS)9%`_Yi`nhcLHsb&udVfn6Ld;>92L zK_ZGwh?X=_T;~#UsUq?K!=<4pQ*mW_2kanCda-_2TXE)8zmv|LsCvm{nBn!_%$JxP zr0RhNT-lJiQEJrDI-iw6I(>!NRGwZx@6k?8detxN|{^5&Ydr z1Q*dQ)c05BK{WyS`K^`13y@o6 zv?F#?yw5JS7!_HFPN=u_ojF|fVRQ3O>@Ty+@H_!w9KO;m4cHOGOUBUPRoetqmVAwd z#Q{cE4-qcuD$4_ zadH#)xm`|+0q({fnVhF&pUJ3W3!^B2E4+_~Av29pEa7;7x0D!E)2n2Hck%I{)LKS% zWtyo`BQ_p(lUmIX1`2!44QQgGjj&!=P^1AyLBZIy!|3M`1r4=jCO`>|`|^R7M1!GK z;AO4pi#PzhAqC`fGoEUfNfISl8*fEW1%B&k6nhdh5r1^98Le`wV3+mO=5o9USj1lH zQg<69MIM2}94Q$u<}pxI;oWf>VJqx7C1;5XNxDo2%NC%IZuj4^D z7CN<*y}I4w6NCm#CCn0xe?0+3IyCp21wCLQEC0X$@BiojSwVBG=ga^5|Fz=1p)Y5M z#aIxIU3#b?&)4tiS@8vt@?aJhOVKxv#X(^9b)4hw(Y_T4W6B9bL%xCXY50OV-GMs7 zw+fVnaV8XFgGR*TyxEUR8L*r>#XFT0j5r*C24q&< zw*C$R-_%%0k`=q8A3pw4_Wr`+zrep$Tg^way7gxsNzVG`0BLpl@vPZxmGtcBMt(eq zhZQlQVE!{Lg=%0PSTQTUR#^!sGw*FO4MNsg|3kLQ#>$VQe`?{vnQ)2bc(OMPE$Vl8 z^!MHOhx?t~lOH~PI5;{3Ql-TtTa3CQ#pzi!S*g#ynJ>-|5J6NQ$k~UA~+wX%_vP>A1AX=DPGxCI2ripw%=UV8#AluQxYe+WucJ zH{ZT){J-wwY5cz$|F7Sh{}B}lc1_pjIjO;@@ z%&JJNCc}+mDb)Hd5s&7kAY(0yFw!B`L~z8mNyjE-x*Hb9^Lp-`zR>Zs)kh->`bMs1 zJbcxg5hij;MY?*Nd`eSj9&t@`5TmuYj(;TM_`jlc(gG&@IHqMUOOPT%fJF;Neate3?hobEmbC?@-6_2Dsk{4RQVCZMsv|y{ZgrY#* z$;9(awqmhfkT>OwmI#YT2!$qT9)eRAXGW66Kve`&ngObV=pysMbR1nL+IfEgKp7B? z$_oRi2v%uJW;sz)X(CWZg+o=_3ONiH&uE89KS7p#L&_fJ=op(#xf=k=Ju;C4XE01v z(RQUBG!>r@2OqvaI63|C@MD$OsrWeYQ!!zV5k3{4|2RH8!WQi4srbkX14tM8eYPr+ zC9)K!J?r2CACx-dUOb|>83q&8OC28l{qPLCwnL-ZUE7;g8^O7?JMjVG6=%vff1H`U z9vz=`b~_B3!Ro7`uKP(uEuBXc8C9RVbFz+)D1Q~+U`DA?H0+atiNhDhb2$U2wgn-9 zxA+vUy@<7n4_{0XYE_3a##;5lU?+I>%)3KS;>zD3h62O$Yj{N{T@Y&r9}+EzZZs4Q zPTKZhh&JWcI5~b_e`mb2a*N>enU40pd^Yu4T{LAo{L8jXu7gV-It`ini$YtW1##qN zSyVoIQmx@b48CwQx+KBJ^kVEb6b^2~yHu$ZWcp(Ac*0CKz9m6|>@;Pp(3-i)Xc@#F z^HG*vA6&E>N~nO+wFsBCD2q@l7{p&uA7cAVrGHNHNDe-SEaw@ys0>lM>x;y5T6Ak@ z8zjl`ot?7-pDRkJ#Np)ljUEAgK<2oF|SjX0sQHN!>L3G0xDyi;D_Cb+B z`yGrI$cszmKago?!nQg4@K_0xaD@N77ZU_ zK(lR&E>F?p+uR)8%+Yj(JmyUW$68vNGFDh1@o+#(dgSCvn#CjC zdzF`wHRUoG%Cj-@i0 zo0);I#c$Q9jC4X|%;;{loyravI32o)V>RlNq)K-4LMToMYndX?G;Qdqe64ONU*Nfb z5Bg{Oj0at`yg}s>dmOuf$L(;;`o>;4_BWa1^CP>nxBKys&N8%N=<^Lcx)lq>gsC>5 zvX!hbzOWjHpELm=Q+Ob3>Z|)aLm>9_SKbCoT8LU|xBGJI)$8xxy#2!;aq;d%qQ$-` zI?JIN>g4&IOhM;vzH3|Q5U51x#<0dSCMN?fE2)jL+BUL`JZek9#z7Y=X9HQ?+M5Zf zp4?F0O=LCj{qb4;cG6%#?Jm+iQcYspIMAHmjV|YuRB_atbI@8Gb%Jfw;HZmYS$GeI zro(j$&OlUFc>9h@mldbeZe&3~7ug#vhp|?~HJh4;={`D8DNeDt69TjNmLV)_h_pSe3rm{VlSy~A%&)4nZC9^0Q_bBZflg{NGm_uN*)nQlab$BZQrq`HUALqr&90B4OJ<$nG=Wb%9G1HG%Y~_MWmCU(*|M)) z8g=9qOm??1VJ_@C9vgQr&ALGHuJQcb@%^rO3sd@Os3uc+#nQwY?RsX}l}uB{GOuS^ z7c!-RP}RIZ<8v zb9*2uwjJyXAi!3G-5A|&n>pEeM>fs3`%>xJyWM};(Qc0x4V~^ilC@tfopV7`YbUoE z?&V1hq4H(5V_asnhI1gU0&BCui&O-)6>%k4|M)NP6+pLQ-5rfJ?^rrCCMBRlR`E_q z@u8Voh6{!JeLQi_Xz}645Y!c$Valu&=jyuFLa%#{nKgGsM?kuqW_}>VbuOfNp-9)% zyv;hD`v5)nYmE7Ww7yX9mB8c9ki*9m9u|8A4V=z)uWGSrYUX z+v`joNKHvb#>cTNXXI<(J0K+cbmg~T%6m|*Y%ghbglNf3WZc zHo?ogEdqyiYSiY3{a(lz3txM&IrJDoZ7p=5b6Ub2Wtwx1lz9%C@{Dp(jLVIfQ}{PF zS9hmx>^tKaR*~K90C#0^liwd7zjqpZ<21~^v885UsDcW$5{ zc|F;@l6=7~B$E^evH{8m!$i|(E#2@kn&XvZ4Sb$U$(l`V0oRgqEOv{#nC!b-Q)+XA z1--;8BD#4CFMpJ7f%5CgC3h}#TypiPOXgXF=Ov(%C$~erX7~J#>t^@QUQD(uc8dro zIJo0R=$s28{Pe_YI;?Q3#8r5U`tjI^j5`PY2fA}0fF`tA?$}7Dtu!Rm(&I4YCxL4E zZ5wV%Ma+c+FY(t)Y@Hq2S$1A_{_81+f|xu876`1}1aTm@n26hE;DM>OICEYxce&<} zzu;ZY;}N(pLl#Mxz{jWj1=1J%06HF9n?EgR2jp1Z5)-@e{R__#g0Ur-vpyZyvHo@E zd@0D5pSX(CR!=;z@_o(R6WY>BVP+1=hxhXDvUnDAJfqw_mlnCv)UDb<;sgBoap zLM%8aM8-@mW7x~#=&H!cPG9Wy8C3fyYFYZvew>a5A^Htu%<3f?kGY=>!-r`ufn#o2 zUDYLAsXXYzYlWH4V28Z1`494Ibk||NJyi$#x9Ezz?cC`vYK%s zjTwAT^=(V;E;A}a*S!69@ICJchlU4MT{ftI4T$;V4+o|*CT?A}^U_FqZZcOLwt;;Y zuMc}NQ=8{9DZ4H}L({IA^(Y;QEa$4o>$@4nN|sV0v$-Zs*{`?%{hF z$@Hjp@b`nGv(CqZlXu4_ALy#+tZkhh?4Ch0(_`+t_lH0Hc-8?i@9^)tIJ@cbmp8S4 z@NxI#?9&NusOgdU$K$gD7|!&gp$0l9$HyN!XaD$!gPDHi*|9Y=G5zlPu=}y|@$kqW zdZ1zx@|G;D`1IT1!>9LWhacY`03II0KQOC5Ox@P;yLS{$VR|%%&2UO2VtN$*_~S8A z@980Xa&UUK`w0~tPOzp&mD3-OKf*fy&;e=;J9B!_!iWH}({G^Dvt!(XTho)?v%|f= z9l#u>N42w)T}1h<=~w3Ohx-S|085~jyU@_|;A9N}qyGmqGu<}UZ>&24h<;67N)-AX zbtxk31Y2*^Tj2Td;yd{QnfOldNA)*xWj%^VE}MnkR4L9k;oWQXPGf}7;#>KFaW*Kc z=3uibC~tox-UIK|94%%74Zg9zDeeGY)wCJ8-wEEBZ#4Q1#npg0L56nkBq26Git(|NBRhc_vPl?r0@WYw^qt^4?Zq*{ zXG<+7--|nengiQhP)hZ0>=yX)2kQ%=KfM2=9{I&kD!ujue3(r^X_cr6K*cscp`bWD zsCP;^o}K*Ay^46q&Ng(kp9vR0v*plAeGGLwR? ziO!M5$f`Pm?0TxTkTxeWq5cBMVBPJQ57BdJPZlh(E}B?$%iVcZ<;jVLFzp6DF1Vuq z;yi${h6jISRT z`<^LN+JQZLWMn~Yxs({4rhQRSMWHiTCbF3N#+77z7S2uakx~;KZ0t!*gPtXh{n%YH?Qpf8Ukf?0kSfR)o7N*WYBCsS))^*9WxLm+%@| zX~u#=rQwkB!^r#Z+w)wG@}dR8&9Zw%NC_ERbC=Oi*~u-WyUt2e-wf21)!ITTv6y{{ zMAqWfflSDlfSV154dX=33SLfKZN4{u}6!3_7t*#WZJ(Zm-Tr?)I(9IJWCJq`2fWq>Vn(XaXAKH-6k%0FYEMU@*Y8H@t4o$>Ke_! zQqF#|TBL3cg}&SALsU_$Xm)TPuU4lI+rBr^2tN@!nx9lSM3nySJ_N|Q4Lan=qNjUc z=m6dGU6Zq=rg%qH_Eu)eKf^)E==Ck_7pS&}L~-uu*Djv*$eDULrRs`cWIt6IS{yXc zn0_P3&T2#nP$h#ybSKB#CegbSd!c}p9jAyRXom{>3cz#)!L9*tWSg3C0Wt2_p+pAM zU?j{e7b)_ICUSNdC93TN;+ATtIj*EA_ZEF9iJGz%<08U+&xifQe5yez8X^oI<@>oTF``;P~y_r2r{P2cXA+a6?2(oG+(}c}u<^z?4K`C@lwF z$$6G76q;rGI4Lw=>^TXK3s)SSSANQEY+OUD!S2o2L7ey_i*4Di;+(+M{_=ekWW;%jxF8LxR3f*f+BHt?3K=DtrjX%b@AXX>7rU~f z;oG_1TIu;p(9kUyFG48er>C_tZ~;3ylem>oUnZc|w=!ceUq{T8B&YAjOHW^4*E=!G zvqRZ?ds;4gXfVadTcI1osUGh{>VWPRPC&d3j_$OUb^KkblZ4z87mq ze6fN&>-l5t3Ve1o$>&49byJ?bKM5z6-0*xhzC##*{0ReYL6j*fk~` zS*;7?w&WhmKM^`|`r0j@GG`QOp{LvT_198-L{-a6O zEaMa`h?CN30KMU1d;SRy7K3#$xLsRl3BH;AAgK@!lpQ`vx?06{dj{iF-W;Fy0{p80 z_9aYHOzP%lXqL~oD8k$arv+pGt}gA#dy=HHNUg;-&<63U{tCQGB(Y+4lGJv_zB@=@ zIxtE)I&u$9-}Eio`cBNLaX9$AeMsz61&EM`ie4N7!j`fx3C$F3te{9Ecqrb<8|6H_ zmlG!F1UNFl90-~*#Zns-RtX*(r&1-hmt#N85L;~F{8o0c_2h*CL37(|)KOM9)rm%q z>0#_>Mk-Fl+AJkv6uA)C%JMWqtK?K#FXvE6&*HJjunvi+iap;%o_6P9Ix%Tk%(lvv z+ha<*C&A4>gPIu8rUsi#+Q^|jftqsjemgg~$>U{S1#><;$tbOVkMHfkbKSu0oPZc{ z9~32Dk@=8op`hi;{mzLu*Z7xi0+zho99$1!Y>R)h;>!l*L&u2=lJVY$nMHYDqu9WC zKckqjV9tc}PEb7mrm7v}I!PSlvyTsLw-PIV(Km+D`&TvKsp%k{?h` zKvF{tLvX6$h^c9rHuE-Q7K$pt!sW0tS{Gk}b1fy>S_`3kOSudKT>~RT1o9mp1}0|a zBt3mV;EP-LcEpcgCE|IZo$yUV58kpEra`U8w1^y7A0qgu-Hyryv$%pR>kj|7;0uG)OV)fBFQo9IK|EW5-c{N zlbFI}y7#wU-^oyllOWr!fO0fkfpdPTZlfia>2AvBY&L4*F3E_f0!XK2Ed_Auo=6tZ zFmWEzF~Eg2h!T9oca?^JK&vp)GlI z%y2<0@=j*w--V+{dFC^M{t%i7$!>WdtrJ^8)*8=@c*!)VMitJ5)NUGmHD8YKfaQtb zf@QD&LvOX3o!-Wqo^Fr#TMOq;XIv@gkJZdi&deKrbNZyqApyx&je3A|avp&8Ggq>l zGvi4#5{sEB3C~rO0}=%Bn?GLt_^ohqNGkYrlu|rU&2ysFX~oTlFHix-yXHqv0$DWv zOEo;jMs@*)sSisWZcId*t4xE%K4lET2#T9Q*1s0xYrNm2RXxBYhF9BKmWA#c(pt<& zq-$NSy;prAA%8YrmNOp?PQ5coONvt(EibM=QjrVt+1ba|B0jg*C_djM-c{%E)E2=o zDM6gJuRbmpU>Vln9#9<0A=Z6ihFAH8_17FzX5o2eV@=lL5h(7>Xw z=|3X=nz~Ej`Gxx@5>K8beH@zUJHW@#Rq(nv5eFph-a9s^CoiMs3=)C8JyMA`0lhkV z9-|yWIWP3o`|4Y88zu8%&L71(XKY;ss9NjaRS*(&fh<-gUZQ$;s4~48f`7=lK`g%> zCR*^k3_+9QCNC{bwBXe436w!^=Ydrh(c&DBsB{fcqRpV43!uWHi^3|DbTVr- zMFBb2+!K3Qt-sr1tj1?fX)$Hgw?H6_?Y)U;UCQCI5KVu%(W$0-EUs-?>aWW%1uHP1 zi-(upy)nUTevl;ponT+*#KyKYx4!bX1Ns0$bw_a1cidOEw05!y2qu( zNjdrq>|Lii0PU2`y68G|{n?yn5%x)@I{&y#BQtvi)H9&9gk)p{Zsm*wg3M2cnqf(C z9f|5HAHx&9159aRnD@ad?ba#j@<46TV5C+RiU8GXD49C`bxD~Ba^>U0mp)_5SMQx8 zp~j#T@t~u07B>_))+%4Pv*#K-sS+Tv|LmGx68}(_eyzWy&TcMqdRn@M-Lg%Z;aMQn z|DDEQ(hzbjy1UuixZru434_bJCZ=H81+GK)tD7`yBXF~lB&Mi;m+esNq->R*(6Vy-U^9Fnwc|NbzH3(Be6sxK;{ zL5e>^EjFPfgoQ*_4;Ban5r)1*&cBDg2fetj7ITDAp!$H;zD?mTw0{sU^T~xHZhPP9 zGLt5AHvTLh&a&4&kO;#8IANoPS((sJ%U8F2Wajm?JSrV7lT>FLA0o%nPm?Bp)Q=o; zcs2S{ge^=AB#YdsM66$~h`MAe_l)gHW3dSUe#ip3Xq13qhhaPu1}ykwaPKxv)K-@- zB7`%kVi!5(CGyYZI96tin#;tN(2LiTcQiBggGal}%1)|NKuZ&?mLhol>yC^CkiuC$ znimd1#tW44`Bp!k?y>T$J24>O^WwkGagpV}zCNm((%Ax^082;IBym1(kUV3}B=b_c z8=EzN-c{-U(82Cq05El6@;y`QS>LOQ)pFXc^>nInYy0Q2`OJ76D2(b3FT>gSj1Xe8 zV{raUY4Q=C;_hkYX&qkGwxumktT(c^wJ0xqj(!<;jp^<19M25%*`wHwkL|yuo@TSv zwPi3HY~cX1w(`xHwE*JqEjuO|*YAmG=ugCETqTe>+_oWsrX9Id0LOvp5KwlynWPMW z{As{hNfFLE<}>TF)n#&({djkccgigstq)p1YU?z+UnH6`DRxwi^kGr^L;h1jK#k{W z*nRbiWtDKjxS^cxRT$uxt=5ZO#ryjAYYv>~@R~>YDEEX7WpyhPUd)gcH4C}Z@F9Eg zi-vEP^VH+zP)Kw7CuukdD1`WL(UXoq)E7UyUbpw#$n%eP<5R=kPgh*&-H-Jre{RXo ziTe_AHIS!x_WaulFK9HOrHACu!UcZ04}DuqK-4I)?>;Fr4Q=ejayDMi5|1u5o|?R| zT8)}(yGB=z{uI^Oww-Q9xY&~5oTLJtz&2W)?M**?ZrhM);4lq(VpJWt)pj|BO&%e) zq1*Lbml)(Tiv%MPLL6uvjaREQ5HeWWJOfaQ%3Dt!wtPIp0q7W+WzJR&E{3twqs1*>#J4QxAm<<<2PwhqWNQu1E-W+RWTp(e(`gD zwNh#Nb#sv6@-=U(X^Kd(^rL4N;Q6+o`E%3K^j$~3oQz4*vuk*WOk>z z3ykNH&<(0jWOl^Nc?EZ)*^gJ&-d3sA@B433Gr6q@702o1N0k6ZzIxYM2>c~CR8y~R z(IK~JHmBc^At|KHQ|O{`aKi66YN@1^7BYcta%t|`JI}A(Ntl{LGIaizagQTOJk{@> z72S&1D|T>u&-D*3HZ}n5^J}-rb9VfVzcA;t-v~`>PSo=H#ThluQf>F8w$wbJittHt za^`#~WNY9)bBTK9qeA2FrHjkNy{+Zyh5U1YChILv3|!Q?OHsv{QFco+%LJK z=mRWUQSC@95dn>)2Kb^{^ULWA;ivI4pKf@o%HBq^KZ3K7tD%;mG z?bgPjT;9|kL)3`Gq`x<0VfnLc&cp-Nzt^f1i8G|;@P<&=(Rq;dSYses3v6`0PgHmOw%?2l^x0ETBInPH-?xpks)n$jrc)Cw_*Pvu&_E z%cJ3oonmy8U#f+K_l!uY2ZTr-#}yOgaBIBA+fey9%ZFLjXvJd#+G^|W0~HbGx*J%M zB&t1?=E=JJ_$1x6o11_Ssw&^{OZQ=&yysF@Qd-N)-YoT(KVPRn2PAG1wBZe9gKU_@ zD3um0ZSfFui5Rs-XftmfgDW_uVKh_iDD;V4D&MrBe^eg6#TKyM`k)E&n%jI@F3AT`X&_l{v4 zApu&af@jzV!ay|mi8?wjA8Gy4r2xA~i|s7KC#9s8Fw&eiV;K{l>UPdXjpC8h-0B_v zqQE{9vQL;KyCh>8a~h;EkA#73HYgouG4k=OrMmxp2Ya0Z&{907?rY{jO2UC{e52(Y zs}iIG-%W;eSkFJfJQEI@V;rQThj|Elj~5s^yvCZ8OhfiH>;bZ6>Wo$M6q50pwc}LD|~ikkJklpBS!^JOnGs zJ5x!c)@dG(RTH{yhxCul{hCB<^04p`HhBGZap=57i6|lmL%Q)cX8>;*pZNTx>un(~ z(7oOo&?`Skw`s*!M=kfFw7Ako2OOH@FWQrwW8BouRHN+k?l>>Rj<*WFv?Z1DuB5-? zGpl$(miZx1M_#z9+B~iYb}k+i@HShcyes(!Vt<_s7N)3{@mv+E{BAI{w2dHqm z9i{?v*Vt_iQ}Kt+^BrGFGkw?N>C)9>K6<4qI-S?n-c&(r6r@FLLrZB#Z*sMNRQ)Y;G_0dYh ze%drc+!SqcN2bIozZbSq4?_^{)#rL0FKolZ2nZ&&I174pRW1hh?xR( z!>`{~a(1$ULwd5F^B`F}?=)WBSEr!XYD*QwrXw*jGz+(2G~ZO`e*QDG*myo&k9_5qya~G;M&TnyZ?;9#oS8G|>QD&BcmAd5O1jeOiso#=C@H0~!}1Vt>v)s7 z&)pkp0}!-9MJ>S9IZamEkI5d!TRhrHFImHdUK@M7rK4N5QIZaAj6dvqt8=Kz1_~yk zYAQAv+gko+4X|=rIO=;7^3>JL5YN{uKU6zhMF$BXuqVVo+mXh?1|OV; z>~V;Zi*8NeC(2SpPp1fcWfpWy-1qSb09Dcq1Xw18u~WN96lk^iCo9O5TV0Ws4A>peXM`;rT?aLC@=;G`$}gHV1FzRmZRTm7 zE4E%bKP9g77Bd~b`_6(#ABrwwnK9VEI>n%AD@_+cWwc}RB{F>CeZtW?k2$}Yv3W3j z*HFd<-R;DK&%u}IBky{mPH>TRjq~yMFZw>eqg>WNxP()$6AyS9FBA@aPtZ7mBb^(A zkIXc9BYyjkB0`g;ts8?`=+Z&|QDBmW+yEOgtr>TA_0}iO{Li1Pc~xkEV?FUWGhvVV z_rE6=xl5@c8q9Pw-USU0U4Ifc_oqIFGMA0E35V~FBvmDtPn9XuT->IBJ0@G{b9g_f z=>X*BHdA}QB8cT0$2o)4$W1T{TDOYkFF#a-Li4SszL$2}b>d^0=#7e@j!z~J+Q}N# zb8xb9Ku-*AtXM9s`T@TnY2hh^jiB7!kYSOghNA&^a)*NWQbtG)XlB_0u*5t#kIP$< z^2Y{cLHP%;j9MhNZ!t~f%^m=mM(RK!kCZa{|v^QRz+G7IHK2fM{D9ToQnPtVj= z{=H=Ki%!hTvOc#(Zb>5yTJBbVYDkjw)CBxULlxWksv(1~AGT zcgkaWcwH}k>QytkL2!>TkVG_QBll_sqi4e6`-52_6Q7WgU*#| zi58f{_Uu-Vl@cZ1&w>HZVwD>qkz|_#=E9z$33p;Q&gjOmwtEbxn;qBM%7M` zL?%`rr35_?p>;6V>GFpdqTR1VvG24MgOCR{FpsceA!m6O{*>3F2yG6>8GrdGQiH#t zoSwFa4vZxn2O5PW_p7Yxu;)HDIXamHL!$H1;j$64#6?B}bd<8MqQ>i5x0chQL|_Ze zFeqGhY`CWp8zD#2ka2h=foZ^9@d?+jfZhRAVLNm_fr<{m&f& zT0y;|ZH(ig!mqoiW=2bv@wzaPi z(BXk)FMiq08M7L;4(O|X;i2;w@q0C>|45PJ2f*C=8IXhrKkwR{(H{{Z!O||h?SqL9 z2-ky?YAKhL0wIw0U@!0N;S-1H@W?iMxTbxD%~qC~uXKdWNK zU(*}B0}XS;DxB8IlW^LzMsyX8Uh(>2D_JD{6Iy_GEe@=L)0x9j4ST3*6{*XZXb|W~ zbBxIeSR9*)ZQd$t!bMIJsXka^Px{A$B^bw!(rI}2Jf@TCRSz%Sf83wwFl+SmYVy5Q zGhu8C2nh2G=k-xy2d&gMU6+nU%7$A6slyv|H!UMojVX{SnSG->1^Y~3+35(RsZSe5 zIt1YJ=2`)s4z)5-^py-9<4?dmlSv%mE_2al1BG|sPb>4C-f4=W# zty8`OYgc_1*gvbK)7>Q3zfReHJwQ>n-;;(5xE84@ z|2+eFY+P<`QD(!5umb-{ssy_I!>uBvg* z)SGV+DRpwZ-Qma0!MSwX#>MN};lBzGdHcBjkOME>?B}y4GJV*@-Oi6i`XsXYsla-t zmI=c|oJ!c|{X34J!ydmr;*UnaC>F#wKw<+)ehp`<`Xl1|)`9yEf&6tN5TCJOq)CVE zp3P=^nRTeVwy8G1U^|1&9ZaD~9KtGKP?Ltgy#NPn=iSomfV=oQdpDO`@!dckS!5ykY#5b`EqE#aJioVfrRb)4o%DUc9sgt|KMp}>7@1NG6u z1zR=e7{UkQgn{%Z$nX(bb3H@`@g=Chvc3CB4Gf>Ou`nXQW|WU2#b}a#%3RRd+&wAs z;24FJfPE!&7o_Tg92`KYM2)P$E@Pb!C)H27g1-tlf1 z8;IaFO4919`F8VJ>yRlu_m;U;P3(|$74R5(_nG-rQPrm-3kzU%X0WuOgr(1Yx(`A3Vz7hf3V#L{Khgq-fz`;L zCgkqj?Pm!M=IYN3V`WmxiYzTu$YtwFCS4nTkv)c0kZOCZ(Ga!xA1)h*b# z_Gq|uhM6_u)exzj<%`}hUvU8W!$j#CFO?vt8mI!+DX~yI0Leb=6WiU?Uds&ftV6tF zigSxBC^+K4U_WcTcT3tP`ZLJshT%>oW@MKvlykAbL&VLoj5v0Do~xkDOak?rUFr^M`$grRNlC& z=S#7CD&|Yq7EhI8_@2fdDT7YXtQ5uONtg0LtSUuGwpqt zB$Kmp(>2<$NGqm<qor z2{lt^0Ckmks&oNjgH|fkVSz^R#sn@?sK-h6JEq$uL>{5&a*;|M*grK@z3td3O|!i( zN57-KG^-%^FlDB&8xl_s6e>-hY-j@VSd}Su-ot`L=pR4>cfBv#+2-@+F{MPw@2?-3 zqhJHJZvaN^#0e5)Sqd~VRsqJjyWp9$Du7HKvE|pfVI08WB5lQ!-9G(NFa&{>Wv=_e z130x;RrdzlaAd=!^Qjg8l?@3sx4geBL$$aURr}gIzAlubyvnT_B?zdt6lE3@K@I0)NHCEw$wY zUt7s)#hAVbWa>EWShK0b8Ms%Ek?wb_kF@sEiFF$pWj@nsaMHG^snjZ@VY&RFUMiDn ztdxq8Rjxo@?zC+_bgTCM%&W;{dFfOY8;l8L`5QQdlL`~h=et}M3C5C3ts)ALrEzD$ z18`Ecahj4UUX*6*0iAkwp=U%ja4nW>C-U|`86}>;c041hsY|D2PduYZEPuRU4tV#9 zmpO5jE5-{pfpNLb{&wO$DuUHdT&g2}6 z0iWCmN@kl4V4*uJt;r%{n_Hvgw%tL$;YyT5p$eADOzayv5qq#Dn*Apyg-JaB3#aRQ z(`)r{_AI`B7Rh_7#;a6;Z1z=Y?r+_wak=<^y_zVe1lC_R{8p;tNs!L?+LHea>t{c2 zQ0?aM%Ab!}z&pEOcP+rKUO{`=(TZ)1+HzUX z${dj>NJC)F_ac5_|0noyr?n#r)CLB3sB^_#KWC+%Dqny7!W*_|puN;q8CpEfMSq{J zi7(m~d9uAVw!9D4L>wD8YcmilL<+QunJH`;NZVpN{~|`%`x2JR@%C~CR5)plj!(7q zlPRQggtr*a7Z2?3|d*ODT&q^nw?8jGjse=>}`LfxfJp*DD%s*9&HxCg?z6g?cA z$%tjU6wNuD!I<~XvJ~ARXeuA9CmoE5Z*L~g{HOr5Zeut}69cp`l-lmrZC)Q>62Ci( zr!vy|gDqO75RCPo?WAT+t5y7E{1kWkwXT@*I`_8VGcI8p-ypgHW}nNG3C+ha;??2hzlg9&VM zUd;bCdSd7nPKo{}a_ZV-@OnPd-oNXCDF$l}{g;7&gQO*U}*J{;$@4-t*e7vl! zl2}kGnQ=La06Ua$48bZm3J{@{qi&8HhPdpRQ-kJAq~YW_TZG~*FoHZQxNV+ldkCpm z1cCY7cKeqzxlhx)IFK`m`UoKc_VS~EWg-f5UzC`BCdfr_I>lk14eCa|TgS6w@QYV5 zF?>x75)$^i=1WpF?6 =YGIqIW~2=D-JLJehIC59oaKTmSUHIytH$?r+;85&#k0D zMT!)%u!hV3le2Q_Uz0QKN`BV9RGI7_TY0r|EG((*f%?Q4_KD@oiDeA`_5SH1 zY#vVU+{D!4>BTH#QWt0|aAhx9E;Z1)*)ng=X|8HOQrqO(FvT9K{X%VOu2qkO^YrjR z!V_l8d1{#blr)0tK@@q@;tqEf7R-G)_}ev$1tbeW=XcU%f;V9xtG)Ah%@RuI0;s=T z3b9y6W&g?PIJJ2DXN`!|M0IQQd161U<*Brpn!1yMWYV%{A?tJjs|kp7RI?U>FP7)8 zx5_G$G!wa!Uv9Y^?9xmr=O1!(moFCSxj}MS<``v;5Nl-f8~1$Sn(=b&hkSXKXl1d| z^4eF7>aw}Y|3pr`lFG!t-BZP4`e>obTe`5qGK5AYL`CAHTunoUU2-`~s6oTt2dhGP z{&NkDYOWZ}p(4y&>IAcJ8vY7XF2}yF&y<%S8JCr6F(}lcGB~*64_%Hw)Zs2s6%fo+ zd8`ft5)Mc_8Y6i&n|r385u2w>;VR32!eh`~k|m`QQJkve;=zIq zpIcVC!~lKp$%?gNTR1aC+?5G8Us^EsYO1OdvgfvZf@(Jny#2d1xFWlpXDF7rps*AL z_@Z6N=hR@>s&9-nt3J#KhP7?Z3Xfh@jYF(rq)k&^(nzHAx|uy973NV=q;DNEEv1~d4#b%hAqb}3b3DU}i2Xw&~} zQC|E0SXIUavDs`@2g*QO%S+*4B9YAWQMiK8P5$6b1%9AKKvW#qjaR4CJBSTRuJD%SKjAS5DFte*KuM%6y-qCWL#r*T{-P1P zV|31Ww=h%YmXnBQH&qe#%uV`t4dk*R&BDTROe#9~;PHK6$sVgPNz{>6kmEkXvLX2Z z?;}ZTk>*CM{+Ix>#C7np|8O+zy^J&%>;I9N^T^_0;VUoSYO^XhI3`(c^{k z845aSEfJ8CNx~rWTQDSR7GYhSh}9sYHKF$S-%4TK4!y9PAU`*{M8-8k9P(PlfRPpI zcix}=o%h?Q7=ykAE)?YK7?wlD)+<1oj)w~8w@Mba5Zbugq_CT1@kf{jV}ziWIt>AW=6gIi$VO7m|roRk}><-ajv%?asD zR{*F1J;hq-uvkq1o&e;Zw@!UuYCcp31(xGeAC-jr9a!{l}E zeAd(i{zkk z{e6@aEOuvw(kpgnBv<o5KP^a+!Yr+&m7NtfS1uCB!^V|SgCr`Z^j%3hc{Dk2M8 zASX6hc?>`IoI3M&FOL|H()MKEfbJ{!G4X-#b@z^O|iKVR0jIx6pQqZHIu zl=7$~4k9jIgR&-HK!e<>;0FVAvR66Xd2kZp_haA9b|SNi8jOoXLaapQ?MN&3{Xcrv zU9}o6OIkYJC>G@ERW%Cg#AvZ`>*Oi1@!ti3F`MM^ffCaLX`tdTbMLt$RUw&NRZSXE z$iL}^IyV^iyMCe18;3y}#(m3gcTjb0k?*Cx1P>mp&F zk(d?t8%-k{ZzUT?15kk(v?SZOHQa(@Y6Jl2Im}%o{DpuvieM`8aqH{&Ps3DIdXaPNhakl+(QWd)lLQJA9Tu=pK zM$o#e2Yajk?_Ly*MAAGe*#_VS(o>Lmx-?y_Jjb;#{()7Q;J{@ROPjQO8_?S`6_(YE zN0ZO<_tj21hNn~@F?oj|#^X)s9;-P=T`vmn;h>?^eny;3r?b#u)d5yC0D0RqBavGK z#deWzMx(XI6m*4`@(!fBezn4O@ycjB%StkOC*9Od$3g+asskg>7gmBDCrsFbcsBx+ z2(%_>0zv%*)7Vs;wKLI+(Y4UneZe8IZX`0UEVO2nC69>bNnHSH9sFY42_mqc15W`y zB64j%F^gfZT*@TI(BdB4vp0Gt(Q;c5!Ly+?Xcb`xo=Ki`mR&Ra?P{TJ;P4$N+|Pu! zAd&}LlVQoNbE}8LxVO6Tv`K>1;B};OYVF#Xda)toB+c6?x^U2< zkgAY+@1ds~`5;~YB@qRJ*Zr^aJWdO;cKgbv8RErPA5PcBC!1s;=LIP$k; zq1ECRy)b&7%f-hFhM*yOdsNGJq&9_|oE-W9ycUP~_}apR)h^8JMh3a5;CW5W^-J)oPq7b+Q)nAYV>pVw!j`0edaw|o-h9%zwz?&_JutCO}i-Ugmp!=C&r0c zfc4($_uht)d4>L76(eIH+iJs!^UP-G#(v%};_Y~N{(l(cDZgK6>%R@}{||)xn;O5K zX74}t0ni8fXN&`~6P-jtO{MnymB2R}^klaR?D}QKooO9;0rN4RZmW9)Gw)=z*MBzg#Cdd8fEp7CI0JnKJ% zMWU_uTK8=3Qri!ZYfY! z3o(`}z^W$b1=Uo@eKxvX;OU_WxdVVt&X}%`>;bg$Ytv1nHns;aqLrm*`f_$7Xi_YB zAe?cnMie_>JW(Tz6JmP;kLV*2n-P%33LD4TB{9Q_8Y+>RsMHkDVpmWg23uf??Vt-b z7b&UP%jMKG@2pV|J(eqExRtUR+cH<4Qz*KXp$nfSRV8sQ8!KZ?8ug)0iPlTNs030o zHm zRY08*t#ENB?J9aF+MpU58tpu9ZRodL{T^!{l4)V0oQ7_tImaf71tkJuSO5yr(JYpZEsK&K-QkU_g|YA^|9fD6Z4TUUQY zRJeAwojNRBf%&)TQNC?~0E6rMd-aX<|2k{p>xsFB5CK!d2F>eYP`bKDzx8t$tsQgD z4V*w0N231q-sUmOHsM?y!`o#R9AK0HUkUPhVx#JJ;lyrMOg}?2sH~ZeHca9H}qW489gMHXvTVh9#ic|tXq7r~2;P$J&3LskBqkcRV%Lq7TT}nm_;>0Y9K33z{11us6ZhXw_N5B!oHV(+iEE zz#1lJE=h*uM{!mZ??kg2*>EL^^^^bl`-Xv*PhkbcrE~i9c>D!7M#L25_`s_RV0!;p zMxd1dU=tAy{86`e-(z73F7A&;6e{~Y-DL9-6SOrV0KtF&Fz1V5xDqv5($5MlOJt)W z6CIp1cSnv(%EyKp8PRl8aLq^*eNP6YBFPi@mn8@=E#L{h0lB>1lMdsxsiSjDa!@eO zyuCGQ+i`rlRx&qYYzu{@GtfKVPmjc=YXM=27A+VI<`_ze9*maOmkR^#Mw54brD9A3 z<|{U>R1<$l4`wJ>j)=fgvg=_AcSxHB!(^5DeEdW+AU~aycJMdwHz17xWW0k5Z>?ha z?=tSnr5YV(Bw*{4wxd&KgrW8BD7fIh$(@44$uPrAE}~j;2zxrBA(;IzLDongkvaj9 z*c!1^phk10Wi3qDP16HgU;HCmD}2s*)OVgrdlvU2L?eq>G>q^X4$Niq%lX;p#QEm+ zpxJS3&aH*JS1H-bvRyqh24{pFG05;jmC#uUKX6DNav8FN96k%q z;90%4?1P6l9k5q~Ierh|KZ^ltsM%Q|f?rv3rQRx8go;Z#6-<1H-b zP0g)Wyzf2`{P}=!K*qHHcG}x0!WPd>#A?C9jA6>E*GxPtgBF(OM5ei6THda1P&)F&UkIZYk!-dVnwy-q+ zi2Wng74YW;KdFiRX8l{XNSu(aT6VITXUPCvp!+0{s}pO{g9{nTAbxdkH5>+2VD`>l z`1wFV1|a~++=A!Kq`XA5Kqd5%$Qf@)EP`hM;M9F1`M5);)Ii-g0XQIKnN{3UH3kY= zZ-jlm4t2=;B&YFC{MvC0nWW)IA_0R|V2SYZHjA+(>aDl);<<31gD4JEsBG6WQ3il~ zb~_iWAhADPBD}+9n`rF^pN3@ze4BY;tL$C zlV*U;`4|`jk_bY~qoZ}pU+XjoOsvNV6KHtLi@NRNX7NtqfnedqbgIXwQtfcnr4qM; z*pWNf)K{!6P(uJnym^?QIBJW2z<{Q!N@&$c;-&b(bivy{VTD+P3&6nA&>$S?sS zzduBe4xPkxl==Kv5a2#Io%eTPmgo_GJvZ8WUV>-7=%G0#)3c!A;5d2!)K`mO_-J`U z9FkUH#z{eMKDbdgw7<6&*c+0GEr3#Qiz*_cPh%cHFs-n}?{T>^?oF1t-eR{yCdWcj z05;@F2BwNSCbj*ynW?h*QiPgQ)9}HcWeLtPZ`6U^u20qqcdZMKBf`nJ zuFZR?)6Lwv>>gqox!>_C(#PE(3~&vtov?%X1jxSWr{9zj+!d9_fcjHt1n0dEwRZ{G z$|(4jyRaR|eJ79QGwkFG=~S&+k$4W>1`7MIo3wMFOZH+$Jqw62ta#C<)yYfy0J{q? z!Z(G&OFOXyPtR3nv+okR&r;*)!Uq`h=GCQG4$|ImqhGvaAf(Nbhl6gE$4GLxL!8H| zb)6=62N#1by`L4cmr)CxSNCRdm#Tt|49pN<=^LpLdRO#3nIP0z1^J@(|CSk=T+GA? zzW8E=5vN2aWBP{7D&*o2(J|E=tB>J);RLywK~4~G#&WgJ9aEfFwxMF(sZYXU7B*MY zEhlsIpKeorg;yFJQ$n4EmG8BO!pkVb0G$fmP!aXASSy6L`T>!eqs+sSG1WLqm0JsI zl7@`;R~BM;zzyl2%>{U>q(nnYb8+C2XtqB-d|$TjRBS;GI{!AC!T(uMJL4K!`SZGP zKHfUFE}(_LR)k{1*~PF0JNE^$PAg}c3F{YIOTM~LtY%0!_l34}2}_pjOBv-m(}unC ze{|h*aAe)P2m08yCpJ2^Z6^~>Y}>YN+qUgwVkZ-O!ih1r-ycq$Q}^D#y1Ke*t<_bl zyY^oD^L(F<`1AV3y%h~>XXoV1+|5B=6?f1~QJ25gmS+TzdNM_5Q|?9v{$u#C=TBo{ zPQFLs&K`hv$LyH?MWyq%St#(qu5|?eb2py17SX0x0Ne52z9#na?nTj9JbBcX%_X8x zH9>8(YA?(<2`{my8B5W$aEl%Gq(Q=VQtJ3sR*q@=%F7x!7|EcPT%-6HX*Z=RQ%yV1 z6e_lH@O&oN4JfwKW#E;Nop;AT9><;*{}|pBGbW)3w;P^(JNy0FW2Q34;;*VJGIznx zlP!s+?*Y&X2+M#~EWy#D=0q&k)JGF1-s3Si@5y6PQO^pz*ar)A*7`z0!-gOYq>WFw znbFRpbM+N5}}zQ z;Qxvtg=h%$28Yx@fbuS;9?C_oiV4Z6w-eL%0U{6K6P=XXPhVQxKW;bbKE#h~7Al>* zSrEqE3E%4Jrh3;=@X_M0?e*K(p)U}@ZTaRJ3E~Jrvn_g5MS8ZHD(czY65ERdY&j;f zt*n39Mhdr<-c=A7?6}j@wKl^QYpb7Vxj-?DX^k{BWUHmBz3ykO%YGnpUgK1%OfWS- zM5fKDGShCrNkg3mmprpyMDsGm2&i`83^t~aL^?B$H9AOf^veVbI!geeI2oxtI3hhk zM%NH?m>el&BqL*}&PYKuw++9x&2#M*8ihChTDg<^Iij2-?9SDPwag;2BlRi9Blgt_CBF8H8`!2`8RyZc#Rl)N!e9GK@%Iljcz~~ z0IJ;w;-#BIzyd9LU^jG9s)n?SyRxuE{U?7nFc23u0`N)muTxo9f+Kg0@@a%MvohsG zCimQ*OD<__crl}PhYhd^pBTG+lts=)4x?~FT@*B|OJX`@aM8||VDW&5q9%wde&UYB zA(skdE-8(NxZ=o&A;?LM!*re&iKuk&_y8IHMWMioK^oYw7C2)*bUsDOOtl9K5Whh1 zGBW%|I8IlnVPv|nk;x()(5itap}P&x_-$SHOCIc67H3}ZJTJmTH7xlF5g3zfE>GN_oiiZH)s z?XqNmn$Gf*xTi;znF%U26PFU-XP6a{tc~%*5&A+B}k8@Vpuu+-kIr%g8zLu*hDfu3OduG6@^I6(7)sTh|}GF zLIe>y3mKZ_Tu&a!1UM=*OF~&*+v#xo+toPf@^AOUqqmgnf;B^aj+CgYh^10p=BC|r z$8WWHRcb$LDzc~#gasq4ZImb>uJMh02u+0+$AoEfR;jTxzQa}E2Wn&DR;{`&$E)M^ zJpD*6i93jO9KrmFIFmM{i(a@A>jx~EuMnrynr#lQI@c;w6@OouDQgTJN?27#PlJxX z&*@Q=&|K=#49j6m`brDVZZZ-5duo29I>8>At61FwqMAPxWc8;%gnHE+RNsm_`dBf1 z4gBQS{eAGZoG!af5XNN>Ua-MVVRGjnLQhCqDq;kl5e%dF`(572RY@9R4@)b%$-8(3 zYiBr1#t0OJJMZLRS?gm2mxh>nS$NNcldVRn5`_13l^s5gC2N#cdth?vGk1Jn( zDSop_3(i+hKiWAejbGB;5&I8FKEhp*1se{#Vk$Pr95^HD%?^#+k=vM~VOBZfHy%&z zNmv((IHIQpCo((Ki0TyVxts?){43j6C}-q{^nPaN^D3nC+{;2==47k=-V1jG`-8O* zw3D>?cW++vDzVAKlzHn0`|kVBZ4rL4Kp0iFBa_CRq5S^WPb-eFPH5K(fWsxFg7JtV zQBX#)7RwXYgR_ll7&qail~FcUB(b44SSPAPAQtB*Mn1A1+ES>aUYMx#5tVk&u^hbf z_HVaW+d($C@!sl{Ry%jbC~j-l;8v@e3ceiYNCrWc~&=S&j z(^ia28;^nlY;w(ZEy3wV{b{*L>OqXYc=7h9`GfIkIwB)a0R?AiWLsavWHUmB-e@1t z-L9AS?C9<$rGGZeSP2DiqfA~f40uRS>xbX#WUw>rlyjIhDmz7_g!orUT#6&f6Y-az zji$R=q4^SNl&4%*Cn#oZZz18-BQPqjo{aBKQeVTu>Lj`^*_|}NBvRW``VACwdTI@k zi02}C065Y_Aw-#E4g0rteEppoy8p({4gTfgA)tv?A*Oy};elyrYz?yUDStIG zqVD@A5N&@4Rj6Xi))>YSuBYrrlG(%1<$_N(Ru$qPyJu23_>7~M`8f)GBfFcF^})XX z^bDGeBmR_vo5Xl3CYGP`Ofg|QYCUb}KdLw~Qz9|U)+n;-MB zeL=tfU|?sWnQVHQ9qZdjZS%<(F0#WGO&h==lGmLW>qz^ka zM|nQ1)i9t79$M&eZZtHc1Gyfuj`WOKK1d4tiCIQs6LnQ!@OS(0gzPW& zvU^7+tBcKDL(fb+NeoGfZJ<)dsCHD_tglC3VNRc-9O}CwkrJP+>qi3XK-3|P==y<^ zwYRuE5>`<0toU6vv(piP-#_0^*%ze=y%y`qB*Sy!E-4AcAb}KGTweWf1yZ z057ipo>;oi>dA7LusWRGWfApZ3jebt-OHxeXQ)Pg7$Bte7Z8q#?FCARs8)>Iys_A~ z0wQ8Bf3-(rXme%q=sHB7O(UZCQzct!s=V}&L{Q9#4Qe$3EEfK_s8l>SY1UyTrLOxF z)ttOA1~mjlAKXBo>P8yZBEX~MR-)}DJ#TWvzM`KvG1+SV;wzl}P$hFN zD^^XPB!H;-;!I8y1xhH3$n0{UU>Wa}!h`^H5sU-BM6OjBH=+UWaeP2pyrMa6u9F5n z4i_e-;t3jrKPc#@fcdUcjYZa&EU^aAV~NUB!W;l^6@~zszo<68T48$gVyRM4p(!3i zwk5c9hxT)OGo*;N7RAL)#1Q`A4D)%TMUipYkVPl6%SqGLK6jyv_{hxK(uxriZOcK= z*%mRxJGgS2nBY`DJ~m842oUk(FnbWa^#~%sQA7qh!tG>)a&d;uLL=xQW(rW>PHku4 z`9bn=eaABX5{b0vz%Ud{dRLG|EEcB65*|m)ImAMn197ZDm53!m1k@H>s+(-m#F*BZ z)F&*bHH5-WR;A)$!H=sLI&W8D!4IpD;CqH(MYDX$@Wl=u2q0p0b?Gh{7^>yA4~iJd zB09U}QT`=1vqk5lXur1BZrjis57lmaW#3ov4(+o1;SzSf&6gimc;|fz31uZ0EmfK_ zV3TmRM;Zs^n+Nwjs|k~r))2LBVY7j_I#+!JrV0QeKIn`yXlw#fDs@R}KMZ>PTjGU{ zV4$KaV{30Iqef56n>x7_wjc?A&y`PW-F8?8!CyAzuaA zlBtAvqUIcHi!ntm#7{SVNzB>w9s$T29_s`#t`oH`U{Z-S$gVZFPBkNNdjCeWtf7wIJg4%c;x!=o>=uDQ{XCqz+E(|H~d3@W1b^=NP&ws`7XDpJI& z{YiSsplsMpH|HWxYzY4>bb7s2Pg|GXd(V2^w2uK+1&12@M=@qG7XJF7D{`e(?69o; zXxB6T$OJgYJ)C)V4;U-1s}}n!Cb6jS;pGD7>I9E|IEQp5mwW1X8J=VSBJ38~692*E z=8Bu|H~z@>48wb#nDEPX$9(1~*|(9GaaR_NcS2UsxcbpNamFUPh=yzr(4Q{y1@r7T zZy5Qgo~s(RYe`j7$E2DH$76)PcX3n#v?^=hqg!L!ddj*8m7=>3jn3M1il9>pzpDW32@JsXJT^!eT&qSmbx73upm{LzuyuG zTCKWpnxEKkjZLC1X{C)*$g21KBU?MdgP$ax1}Ylrd|S0--4?lO26K_9P4zA!nUbt0 znm?FQeudU1F^s{Bwn#dnw`QYS`7JWbI5J%O22paqpy2yZYmJ9k7t`ZV1b4V&1306I zJ3EvVu-!el#y z$IWQ231-TbOtej7&MC_6?;&f^S4^27csnl^Ut|=nJdQTl78v+`G&Gi-bS#_uA;%>9 zH9Mi2cQ_gntZC;F_kfq4=5oK^!NiQLe6j~8U$iz|Za85}ePlxWT(S^%vVBHab;z$b zXsvxyFcH`_Wk=phX_vLes#jBSX zU253fD%rCbp6YaBi#a)nr)Ft{>b!+4zJG!X(hi$j9lTgueex0W&GGZAy9J@69|Dlf z9DrV~>T*Gv!cM9M_Lc&jTobys$4Fo^Lor{|qLgtN-|E_~?l;88-kT62 zRh1v@Uqy>va=CO|(9qww^ctWYD9jOWe`rt9-q%r2Q+%O+nL}!6u41@aiG8>7Gv;aLYIzL9@EL}9ky*H>x zBVh5vAt#f@gcG82UNy5w!J11MjFvZZ7)&5cYUsXE5?dMcMj1-nvW-Vrr;9w>IUqW~zsIuYsoc zy7~#EC=>&RgK~pZp5SL1%@VeOMTcsdo31Q4GSP$)-`% z!@%?El(-gdC+_r9A&-IHXhwEb5LPhHRlXVdnr6pvLA!xQKBb8u2bPlYlGu%TxVaY@ z-?nH)XO_kFiS)n+GvoDl!flRZl245GtOsY%2{R{&Lduc|!C1?wD-fya_siuGvKAsu zF-Gi3Ke_>0t**i=XQZlxvLMB`!me1h!z^KMrz@h z5ybd!FR^=sn7Bdpxt5+msLvI6I!a$yD>#93ATFNWNMsYv+ZpJOYoZMNc2G)$`MNz; z9s}T`Bw2ZgN3Bf*XJYJ`!tzC&zGp?R&Y`+{STr#7R_26ld&@$8o)S!S%@FN~)q!5x zVAghyR$$hD^&cT@WmxIpYHG)4sjCh$qOJQe_tWTiZzEsN;pVRMM6?{Xz6Z1!kVvp2 zDF=V8oFbKmIAWy^f-gC3p+aTM(PqU~)Tren**PUwZLN}OD6eFVNr~C2k$gzSRno_o zmvukhoq3phgh#+lZWiAnq5t^F>}|`N`XHZQ-P!)@U$srPRKwJrXSa5-vVy%PFnqz1 z8XXE}2p!?*LDH(^9Y;Y+kXR}4kiUlsog){z#aW-k*|D;+AtBQNXf~EOJ5|{c=npa_ zF?eWYgCO%(Gb=5Avc(}1TrgjHZS%kHFZXxq@Yu__YzPRDWCvTtG|gcP^8?iBs0g5U zj!I_`v}3#o{hoTKoA`>2lzTkrb%cG|Fn(Iv#w^dawDp3q>?H;Na*7C`-?%;Z-1=kT zSk7_UxEbAagi5sBcyR1oU>WPqVV$*OHZH7j7Fd!>?=G?kP3z3Z+NTo!$)jHGB)OR3 z2pLbV&zO(if4E+q_pU;5J{=86?cbP7iAzn<|ZJ9JDbDHZEH@Y3CH3e;{x=_t4u_wUqfKK8H2 zzbTwDEOm_FK4R}^ z#;d{i=bs_7gYj@%5vKn}M>E~GamM=bc0lB9T@I);YZG3S#>(Hh>C)e|95NDu(B~k` z?*QoHKivhW_jHfXGuM z=0xBqcJ7%u!V!c{OY%-E>ED;zbBaSLOoc5Ue;>Rs&Vhu)kT+hvhyH6xLw^+H;#|#L z8c!?Od80N!0>2Zj6-;OGXPcYo^eDHGn5T1AF=XLepH-361W?PcMgo+5ff37-MxA^g{JdD=&XA8K4 zK@P0z2Gh%fnet|=myDy!8=*Awkm|THNl*3!mUFHI`1u?UHh=De)DI??hf^sKTyql~ zvU}YBxgCK+$ z9ZV|^X5co=Juz;wcQ}zgh~eCvwjm$0{Acn|rp?e1PXL+QkN&|#s$jCY+7(Za%DXZ= zKF<%2`|7UYZG3VXWb(>Uqy;+>{Pa<+^MBK&eS`UP2wqU|U+}-pUXYbS)BfJR{1X<$ zNA2?;#N^o#9%Q`Tqx)^=PWBUj-y%iDA`%Ho-Ozgc$8A3cOQYX8)UK#~{59PBddTVQ z<=P)6zSVyFAH8qa-S+x@eh(+Jm+AFw9KZaUJ0EveS1;{@X5T>uAWRA!QG8exl4>e+ zH=ZTm_>foo^`Mi&0wLRirQI68pnV>$JR8zuuE*|<)v;`B?Rv59WOn<1c|Q^K^+EID z7&X|!tqGIBx&?k45d4s(d*MaSX;=HcU#w`<)7&u8BTe<8&`=?FsT~dK(>@rXrcYD;G{uj#}$Y9pv13J2r|A_1CqTJ z)>RZ_{vfrg*~%1)R9RMn-$QHx>|b!KsP%b~%8?wD0V+7bkn5jrC_j(_8@I$I9HC!( zS2(?C-qf2w+^iy@L)k+=)W1iNQ`=-t)0!FPfHSRwlaHQ>?*~9R{bQV-#2z)!Y|La^ zh9`Z1^dM;x&`ftyCyF0kT7>C3S!x97N~_roCa3|zZEf~%0orlN-Y6~TG2#WgVf6)h7!#9%snD$Y_8(hk^o;{&py6z$`j6v4H6z-c-3p}zO6Ed!5VMOVX3AI zOAh(J8ou7d)-hc0*Z08k=?yoK5<;mz@KyF4A0l|3JcKQ6t~_O>(W8~`t8D0c6qb~D zE4(bSmWwBCDTjjDM${)**!|3Z_LNy-oOI3$cq-T-!Ee{3h3}&OD1=@MseibBIVYJI zLccr;0=VvOT1*OuC+gl_D5M40Myb~(H#xj%>9(zNFdL|fR=`hBVUC|9WG(Q_V zOf$ok$zdC4!HyLd+Zz&Q4kZ|VMY2C|WDz4|z;0=Q&NUs4sC&t)VSy^4W^?3nfpQ>% z#Ca#aXy)0%Id|!Iy#+$t}K&5$bM< zvbJPqvIU<=fd(bR>$mAi3GxB({+*~mDnm83e%cUlce|)}ZoQ zQ-Du3Lrgwu#?umFrja-e44Sq9fGP=n(3M@Z$;>h-v&ttNp!#17+Pid{O28#NOB zcgc|U0L1me?mBuWrW)gDgpi3-Y7<6$?Xj8=`o5=V=98B)lt}@ANTg%JHqt0P^xZ2L z!LQb7*oL+wu>eCF|E{QsvaoGtDdFN^)X#B6b78MkIVj(6P3U}%nXg#LA?%~k2`b|% z85@Rw@D7{n8aU~;K2+(xqnZm{z!Obd;BD4d^#cE!Jb)LTu@ihX3|nx#AI%>!Vl?n| zLl|c>8Q~3j=YUX+UsZJ0eHp`coxedqX>qayL=Y77GO*~s>#wtO3W>a<#j~`CA(06y zj89s`=*n0}fK##`P~!gv^W$X+V}5hQF?P_q%&_+#s&Fsig1sMDNyT=vW~xB`e1y-6 zl`Ya(%{xX&PShK4)dfiwJ}gePf%Mqbh)p>8X61!fhWEuP`}z8$1O8*0Zau_#+)sHu zC~AfPu_hJLf}{lI4W><(z6MDqPQLnye447!%;lbMT``P_1r78XjHG=(3te(8leLaF z3_|igFb0pNQM#odZ9QphPcwnbo$hL@ZMcPqE z8g@BGqMxDoXL8V)75HB$o->Y?*C9Mc-|a#cPc5}uokVC6N%cIIi?@fQj7yG~Q&cWr zLy{%@e7sLBy!@;Jm6ku^qtf;XBK~QZH20zeU_VOZc)DbxsxeFXzYh%*07CDxj86dfnAOB@LtB1sKa ztZyCV2r&rw5s8%NpF8m}Li+?!;3~MMMB|uP);<2S3LidoQccIBbXHUMW|KS4PFjJLpdXg zfFDt>6ooMH2zUgOzcr^wsbJvwI0_HqkNh)w%)Q56bPU8UarhTKqEiYa=npNejp1jU zST-Q((+_Es0QOTi#CfBYsi!hg>L%O{!%%YD>Y??C5f<>;Hy9}L2Kb=6AF8{E^z8b_ zkZ_88=PCxhU@rtAv`MmTnA?JDH0_ZgiXETi8ttBv81!E-r}S@zL6MYpi=c`;o5g!X zZU|gy?RlR%Y1M<`K&T@>*z)6$_|Sqx8QnTodH{dF3<*_!HL62_9QeGdsau^0?-8ZP zwR&#Wd5czln@<0&1!AzR#E{#MGwEG1Dw({R)!h#|kWxrUGaVwL+$%mHx;s+?f~K5? zte{Z~oAF^*6;NLDq#J0mt5O4A^o`WkVF~O^G%#})9$noK)SyCw8v+a+U3bD zruj8*7*7?K|2h?07LA$z#dAtCmE7nFkK`wj5T+TB_>!N#C8r{$%&pOLuhCrkU9V`Q zh1No=v%a%=xzN?<6jj#)jGy9NLS4TdM|r0=KEFNap`Bbm_SAAc^6Nh*R4GLN+SRB+ zyAPUjhD;uRZy`oY%cNeoM>Y!PU#w2JznpF3(HqADVOCaMTe z8~06*a4SG5DKE-%;GUp5h`5OU7aP^b&#noQZ0^ZN5oVNO8bNtgwAp4e-FOO=xr6dKzaE=PpLGor4!)X8y;CC!-TlyB0IBrsY1Uje-zP zb30K@!)`1%yDN>M7js4I^{d&Me!_1QdBPf0G%L-*KozIuezURy4O26puAnc-s@Nde z>@cbplQP$iMLwOc`NV%rY>)NW-|zM5cy(2l>hD7}<==;1KKZXx;49dJpmTWsaymqp z%*koM=*fmKvjEGmYn1fcH7Y0aLq>h7jI5 zP7qby_&JbKW)BThScn2?@WtPf6MFGu4=){j#y@EiA0|J7PpD?#q-a1Y@t9)=37f!Y zhgBn<*`%z3n_LU6E$I2Aky)pd)1g}OdQCvk`#J_X$@+~Vbp!N*TV6PM?_>S#3sQZLXpD8-C{$^W#^P{ zjf?;ue#o-40sEKYC0e(#^;U5{lxlrA9&3fZB7`ppRge5pUnF>1mxOHo`P;1<^XymO zQA@PXCr|JoI7>_tjy;E(!d_G+3-64tRyD==?JImbpFsXE(+s=#uStBVEF>PEq}-g+ zg)-GX#yJ$;23E_e=J-eVv_Eaq_SN9^()QLh$*DqVcYa;&C}8+|ym&7GQk3DLVj@$L z`Hb#i8=H~NnuwGQC$c%0e{K*Qu+EyB@aC;qaehkdhALlm7K5O&X6oug$`yh1^dR+1 zkQaiSi#rEgO&Ir&w?ggH;VXLK%BoMWY!-51DIe&{IA#N6pup!3!=w^A8e~nM^fl6c zlWF8-sTU6or88H6F(g!&FzymfvMUTPo`?kHe-}XetPov~;WoCxZnkm{zLG3<3`uW% z6La^dTi(0+v?QkO{Eo<~ifd%ZLk$wQU!M}~Bo%IWN4^_Mt~#zIM26c+uDY7+*Yzu)*~V+xJ% zri|WAqL3Y+rab^^T6X0F{Ke$;q`MxeF@I0ElS8~&q1AzwW6F}LMCfmMt(!EjGc>P! z26pLj4Hd9SP?Q-#h2_06MWp#MxJFUX@GVkE>vzjqdJh`3;OzUxbEiQLa5DeDnwF@o z7N}`o7AsGm5A{GP?nov_S#S87_c3HC@cqTCDSrr3orfdoEK7qZ1*?@+*=Ism12wIN zQq6xf?bJ2SwOd6lA6|R$CnHDF%{r6M(;eZHV&k#vxL=~R%PjKtK4yj+5ZC^$_&2Wg z1mapNn`V5X4ho>H)e}|vU$*wozih3Pz{*Gk{Sk%P1_3QAk`)_Omi)LlItS}XfsvDn z85k+Nna5#c-Wkl8_D^xv)k&HLp?Uz+K^w{eIEv4m?uzOT(YK9kyVtWIY64>}0!H%F z=?|o_ZVgv3>~R+fExs3pP`QInRjKgp3-)$kOw_2XyRN;IkA`N%qF%V5yHYcnA84*H zThC0L5QvU{CVIYPH7uGo#xF5UI-R;KsGWC9g1C_KpoYY#;V}Urei)|z$2$2;x=7PsY;rkjaADm}F;`5(AD{&0SOMNsuFEJeTqH?l9Lruv%OI|d86}omDr_TpWGk_;SF*Tb!%%zdmX_m1O^!OSI38+H4$9hV{9PFQ-E6Gq)F|7+%9K?wk_F z^^g%^BfE_fM0dHr;G+dAZh<;UDSncx!)AvYMCwD?DPGgO=2VM7*s)3U;>cb4-T);j zLEEDL=EyW`_l32yLwVT8lWS=i6q%LrJfC9kVHlyppVwealN^IX4yUmo8wzJ*A!|CG z0J#Uy7l<0-z_JnnpD#{%;`Ji>lk@Y2<5;EGzPjn@j?t`K-B3gCn zCNoM1n0gH&lEjZ42W+dYxLC4LV_f|Vf53>C2C{fdlU z3qNt)NAaHnhd}mz*tT+Ta&Ew?@OntLSsS2{blp~ZmMdepD4t`8_MwBnvvwVpZ{g!l z`6kLHQH$GwsK3jImUdI&BS2h=)P2gfFD;Z4qOr4sNjdfVh8F3SoOYacG$`4FXdheX z!LXyoiMk1uMNUT8jm>xIkp~Mq$wG6OtsS*>)sFQOp}@e-8AUPhJU3di$QC|ojI*NI zt`oZyje8G3E_)F_owtRW`uosPnb>o|=*!PFSpI9LaSBZ-m^EJRFm&xpD;7z)G})HPA8sZEIZ{F$gqxTo}g=6Dh-N6Ns=B!g_Q+@`Atk5>yTr zla{$5CZWUu+|Y2(rNGERpmkhu^nsMl5eG9#MMZ^=MaKO6i2Ek6m8y(rBwWJ&wv~r4 ze5V)wb(v+PZxf*xF?BpU-FEWYW4IS3xpL%i1+y-8n2W};lP2z4+n60k2x-$SysM`3 zZfE^b#Sn{s4072QT#Y3mT05016@?v4XQZ$$TP1^LZj_EE)lZ@j-t8Lmhw3O@LP$`_ zSc6`=O~h*LmH6B@_U#}Kws-_1N&c2{BV;fejtifs zJTWiyLR^Xb!36z^2{_gjEEC&aF&CUW!^bYP~H z+=<+Q*?K%ikj~7eu;KYN*L;+Lk-0F(<=llnDN4`z%|UCU++0w&p! zjTNh9#}phr*R0+%EmZ0;t|>&S#}A0Bwzg%4TeoEgaJ$8&VGP?$I$Sb<@N?)WK1%+) zk*cId&X$>8Y8n8mgx<*_0T%`dq^5GzM=!xjVa_4loE#$z3uBI$|fh}Um7@f@-fY7*IK zLpha;pSGcgkk>>1Ly5hb*CYQ$iIotc#ri%EvyP6@nhDF$?2kFt!kgLO=N%q%xT1?@n%h;qZS=K(o+Fa6|v?bOp* z6-3?2-@W_8E#ZH58vtT-y#k2zJlPI7S4|{VVd^A>v zIy_n?&nxyYCU7)G^DD)5h7Zx;e4G4@Vy~SE!S*w%vdkP;05esSHG3=xR#aP4{<{GC zl=Yo}mbUz%F0Mora3hFJsoPP1e#p7N6_+(ETBc$fRH#ZR#V)2N^uLYl%95BjZk^wH z4bUH`-}`E|pDBObSk(kPhShCDZ9FHbX>bsCg)h)xrjA#;HlwR^+)lZyB&=Z<79A1g z8AxEHj1fu=7f5s*w{vg?BmZg-<|mR%uft!mC5NQgvu9%Y{e?W3li%(b(4V|=q=hi? zG@~cVA$HK4!)ULVPnW$9=#3r2xBunyeE;Zf|M#>1F)8;OQXCTUCMo4-f8XY>_{DlH z7O%LSugg_RSA@U!b^DrpA%7dL_o3Z%(~s4*5D)HB?YFnE>^GVw^lT5)^!&e`7|`jg zSc?hRaw5MS7he1^V}g!3-ngMX=~b;far%(mQCWJKaeh8harZnQ%W-C{B>Vs<1T>K{ zHt0yceU_l&chswp$plebYV=9XR06T%A7f*+aX9$g5|A8|3%P2X2id5XlUpjaZ6 zbLrOeduNOta=fu}t=-CvZkpBJF{s6(qm9={bgjAz`@HmsfL$Fz5Q-{17YZ>*M+TKM z?63RljcSSZ(|+_wnlJV{K(XFE40nmE=OHWl*43O#Ngigx(EpL!pC~*BSda!%$#!Tj z&QY&ocgpHXt28|^@$`Y=7cSnuH?I%$!ghw9gbL2*hzzcXy#IJ%GB403;7)-y99>d+ z&jrt< zsCZz4NGF?aff|PnQcV5)Am+gx=dM4h;U(*L(ZKqu!7eQ&@q*BFweJYSAoW~4ujNOXDO z-)GDW3p^|XH9a|{BD|P_09}RSc@F3}12F1%nvwxd%3_`(f$6<1f!HR+V(#dV3|1qS zs$ujFWst*#$M;Gsyp$qcB`o)AZDOAkgrShJe(5yba4CId?;MNEyjuSL}DX)lEh8{1XU#%~@+uCsVU2gEC6xgb9uCntWU&!Gt*pA5OO zcZPvR*#Q*nzDePJjK|6r;)e;{LdW2#Ud-BQb~B9owT0)9E?yo54&Ek&z14+HX66^G;V5a}nB9AYB9 z{zE|j6dSzp68gn{r`{uyku9MHj#!Z~l;11Nat?$8#ulU71L?x16l*qPCi|`d4%5rt7;Mho z+?c$w2HR{?2E)%UYfDO#pB5MX0TZ!+^IbHGn_)R34U1WtW0+aBYc@|ZCozO24$01A z2dCbUml^ugn0SAM>^un_#4kxG{D_{r5=%vgw^b2+>O$fFrG@t{{;w8J_(uyvwB1UP~R06dy+|Pe$;fpk&7S7lOYGL%@^Z#n$oT0Ou|Eq9^Sm>BiXQ(Ir+qvnes6mI2DA=U2!^RUCt=PSL$(tyw$N zIsL6!Ke`!OPrp!Yc25=Czq9liZZk?f~&(# z`SS@)%{UDK8%Ozn-UxrF>y05wz;Z)bWzM8bB0WcA4r=;^gaOK7HaIo{-YA40bG!GJ zZt48cbx$>mI>U?Bny84vqSU6hszbo|vw;~-7-$iX^<0{ykF$`c@11*mh?SgGj3-Xc zwrl9vtwb-{&Y8$EK_-w>qiYs%72>!5idNG0`g6vh|6mu0V>b|{vu~25X#kbaSWVdv z#zjr<1$ zbb>=G?@$=CN;an5ylY*rm|${)>~vLy-;1n)#pvSD%Wq?Kn%k`U4W@#bJd&(h>InIx zBRC>nfB6yPu8r9}9Ti*n0_EN(C1+JaHK0+oJLQEWQ}T-Gz_5Q9$)9#y#>m(ontfxz z!_XkZlAfE3`dzFlD41JoJQQ`ficukt;TfGw=MKAvb4~L6_X&x!Yx4&lF4VYu` zUqCGE*&od5%H>bA$Um_WgRNbd`NzoL1~i$S!{yxd<}N8jzW%&J%xCHgxr`RN%Y*?m1lu{ z>Ipt|D=m&|$`cQ=DHNLI0RwIeJr47{#RcgQwNUN$$CN_z;7XW|x7qn4%Hej3dCrmQ&?!-Y zl^~q-qvd8ky@R^z_i zjFXquschMr56ZHWcpH|2eOVf3(|)MUp|flFk%6PYq*=~aExnD$%jmi=k6wNB*E=ZwD?0I%t>oa{!^>F3z05*hi% zq=0{jZ>LKh(c>rAyYFz@=(jLo;U@g0uZjl*xS#ZL6P7=s^PVyg< zAm3P=7wa}LNFl`K_&iTycC7BRaFXtExXeb6N)+bne^nCZLwwDxrxD&;b_s(3I$<-2 zKeF3}`OD1I zG8mb7Tgc^AHlSs3OxuM)jzqDtBg#^BM+5wbi00LRex5Rs5*+p2`>=R!%FI!&fTdwk zG|poA;#c|o#lwCFQ`>{dEW_7-T%6>AhX?HJdAF&M$KlkoJ5m%hAzM;`UR4g}n7zo( zm;W_O;JO=NmYN{o8qo(MRv7;a#zDD5*|)X4*Li_4Wj zwmGm0K)-t^$6_xRxIDfJ(J<$TBi;_QY~KevCQmzWTbq{3xHu$dFX=GVO(_^26jD|t zc#!v~rH!L5<(_(|b+GpMiiVo}R`!sT-Z@Q|Y|WiIFZ;Q&ymf*CQq z=9;O`GIDkIv1%+*-o&4SB9}#4K!Kfu(_r92|DDh-SvE$DA*-?8qY+=pmml)uW zRgCh7YBdiF_+%Kq0d%<7>@V)hjtC*=bg1_sjGwMJ&KdL-h?Z8siRF_Ueeu(7WL{vR zQK@oH@#S;^5wx)KH1J}6vEG{+KWu_Aq)qp)0S0$ySsR_5f|p)8y~~S(zG>5>q5@Bq z4x``Gsg&LEBayL*$`F>*%?7*_Lqm-wDv zQN}wxI|aH9v!M=mO2q1UsVf^y513wl;AehsbSYjMTrL(GlXUO&wHbOid9;4RCVKx> zd!ujf_i+E1&1tXS@6+1)`jkCe%b8u@_fEOqgA4-l|5SC3?Ue=VqK$1^9cRV1lTJFe zZM$PTEAH4%$F^o%qIq>Pi$O#y0Bf_{QP{j!ua3ekPr9wQ^2$3(}Bs;$;HhM8`wLX-m6)`z-~gK%jQ5BA)wvg z6m+&?-F(`RyqSgtD4IbeAEdn#cI1rYw!}cY?D&(FUqb$S|G;9yefdKB6#WFROedsZ*2h*KqGFcJwjnO^`JKVIRrg5TcX7Hk*!gxtMmeXli@oK3k8|d#qtj^ulw3AF&zVHfThiwrsU^ryttc-LRD3K1(d-$ zC5mq-83HiJhw1I#;PSySHViIWixX+8wk*hk`es0JGiGqA?s~9I9(mp`R8ytU>vsO1 z^329;+}n+VJ{WrwD(`*}2cT@7V9EA;gYRFLOY4=(SYLh*XYJ~UU)`}KyI+W3`Z!<5 zR_6`gRzTT{B}{d8FH9;DATm=7C_jHZ?JZ;L zxt_pe!CLRyei!h$8OzW&3L=(UV1nbroL;c&xo&c2Qx2w@ag1-vx&Qpz{}ilA@efXP z|ME zzY2iCDkh$iLXR4&KUasCU#W6( zJ?_b&rj%pAd9g^v>dU3ZbQVu(J8__puhy`*=;)*R^2L_Kd1+#3+>A)d( z=jnBuNhyo&X-26xv;F1vnAUIc~#XGx(YLav~Ln9O6=Xd}HoUXqGH3#I!*TL`ACDgTD^%&8l^@}nKi$qiBBy}Yvp z$)7zr@H91NuQBd9s|D{c<$~cAv6qJT&P%;|yqf>m@m`UTAou<%?e#D<l6_>e=GsJIzeV}NnvRqL5_Qt41oA(=CrXomXl5Vn+ZD+ z`s*Tmv+YEZKMJh3VYj37Is88E7#Y7z-_P~+8Z_l3k!p}&X|s^tMS{;u{4b!%ST3^4Y(Z>8s2v z#_sU6c>2ABYPBixGNn(F8?hOFk7r|YDg|weA_a&~=lX+~v*}FY zr!Ss}lWEjLVqyw$)@GZAOPBt6qt2(KT~9IH-TX?GCP*^muQMAL_&SYoT)>=$k5$m$ zPy;(8hgz0a$WtnYn0h9LtByNz`)2RFj;v*GV?VWDbshg2YX_c4v@*EnG_r`pgtA1@ ze5;LI;w!mR)&w{rUh+&2X|c?pY{2e;Zl;vJd^ILKmFylggY%vo2d4+~ZAbeOLb?*5 zs;g1X5yxnwWzpg-@fz?vHWxk7D{X~F*5c>aiSY)+X_Md8z^#&^lnAcajS)I$H+j;Z z<1RDs$wgTmd(KUdXLlNfy-x$+)p%r$tP_LMiwp}nEe5)o;{x)O|TjA24iKLd@cUAMY?JP>iBy};MZcP6X`W& zEh%*jfZ6eT-7cWn=_We<*kfo6FSi+{fN+y4+8j#X80m{V>ic&IC;dZO{b}T5jI(c3W zlO_l@FzF2fF6CDQkiSeB76CzGe2H{-Xid&+h*=if8y)bNaB4oM?FI;%&rUg+8( zSxuP>^0G_@deDoPGNwob7;XG7(KK0NxWOi2A3TtUS^qVH|+m+zFQ`~Q&xO;ju6 zOnZJ*F&{cL!dWDPqY~cap+CMQy`4%1!Guv9o<%)Au(0GQwhq1vuWH;142iQkJ8hkw4S;aTja^9+3RV!HT3{@^0}_d?F1YZ5UTt` z_4R+m>M0;O2{c$br!4;koxffd5orHk0)@CdQhgQERjHjS&UaX1c9v^2;chg9TB!JK z$~D#Z5;)Zi*BYsd>FSO%{y^PbY?NsE#wSDJ3K^4}#S-X6Dct~O*L``>Dagmgb520i z$?KLeyIQ$4fqbLIaag@by2vf(^!;kq>nQxgxFMo7Nq(oXi~^~(0nCfs2UHM29aYTw zUn;zQ?VO0pl9gkgtl(4vm5hp|4@kJ+4i?|izzaFiOguc10uzS^xStaIsmXwekx`0e z4PGPTdO+qD-d|x0XZ!#Q{#|H0Ba98SO=C0ctzeI#*?3vdbiMq&00M3FG4*APgq`h) z=ZLv~=EMTtFA2*R$Cx@(m<{X4MnZs}SzUg2y^cOanhJ1c%K2oH{cXO-Y&u z0*;ihR#s@hXMo@LQ~2DaLW{um&Bb6;?vP=R`GP{0me`4oLozd!u+q|aKan|163gA0 zr<6fnL2&_u)K75OFjSg!5sHNViA&Cowm|edoN5K=+99cprKsVmuvibHQQ047wZ)K> zl3Vsu(*5yA%1r`ZGJH@A@5Dew%ZtGKBQ@2Nt4rH@-Au6IrYb+rh21>$GQ$BRNJCd! zdz{Lsd^ynM(j7K4-xgX_B~>{U0)?g_V}1-JAm0p?LfPXUGsxYG`$$XhC(Q=A*#AtD zUjB*PU~=aB*V|G;Xb^{p(ToSJ1D(_%$955mX5ojqsBjzRvDTb|AY3~HzN<(OPR6xi zJ=1jnkBI9LVsN)E_e!|&x?a9#h?7V&G~)$gSnHrx6uhLLM$m+)&)k5%I- z@tldMj}!9s3!hi}?KgW4yRCyenkiz9ENxW8?>aD~DsfdU(CaSD6J6sVTct7s`7;vo zc}ONNh$6i_DC#`SEptnXI8O<16m?mU`LQDsJ30-TKI?)k_9yTD<)_FJ_1Z^ar;aSh2Yn-BM?_o8A4l&3bS&PoX`tk&A{#^orbx z%d8ygcTkQ{y&pMzjl5akP1WVo3ZDE1|2Y0Yb36!mGt!}#@sfi6ZsN>LBA3Tl!3!dy zk2a$vjGWp)ekuvah#PGP?4pAhQ=_wi{-|JxOb|a?bYm;X?NFhdogDk_tv$vls35yU z@eRSK(R7)$@1!K!N*juFh*+LB3}Q#09XupH0HtEDiyHN8*tmAkd4 zE5Vmr4P93G%^<`j_rRfq4if1LE|(TZ_I=%e)4G9BYJR-Y*hP&g2mdM1@O2GV-EV|`Zl%`>rh?Fye@wFQ0R!i6>u#m~yM5hZNM+m^6AlFtMrkf+Fy6=zbc z$UMmMlZ+lUD~vr$4DBbf!gup@1j6(QPaCF20%b;mJYm34pu#dQoQ{|U)9+x9m^CGD zb@LAapP6zbv(Py)BH;0F0p0E)ureb0tO?sK*Y1SPm*lkmr1t34vPOMKg`o$et0L99 zrrODA*@_Ef)tHYpRtTksyAI%gpL%i?X$s9n!hkA+uFu`DnT`owz`kmk}MLDH8 zk)XCf(1chGeA0Yj|8T6I%qMJ_bE`bmr7&qH31((JN9j^mGCV}p@A61L&_-{M_1C^0 zudU%!$lL1$SFj0Ez=azb<$zqrChXZI+k|FU5U95k@wgBwQs8u(!|Lni5IF+9)ANWYe%1h(yGQj{JEm&`NAed`Qt75+%|%pkrw#Y9 zZ|i9VBLDI3Nm2k=NtN*j4=S!POECW2BtBcChW8`o+gALIz`MBVV$pkEZtv5w3aadk zvjN`e4~A5P-E@6xBi=YZSZZI{IULKb=x%;jL}Z#SNPqWQtt+ZIUQX50AcvRAxFf;ch_Rf{KRyxHvRM9Kj|tjp)#R)LbBahV6@9^4m>& zNQ=!-a-nlDq3lHNGa_QY^_ z6kbYA1+#GAppaG|VF?>^@i8O_3*xad)ojmK@|J5iY*au1Sl{y0Qa(ESKmXPzzpk7! zkP=_Z&{3HM(+&ADI^-qXnsNcZ=v3OuKh4_2=e4P|csdCHxt}Fk5+9c_pORS=uI8G* z4Wqdy8#<0E9OEB2;dRbtxrFVt1pFR{UEn9^uq){q!N$k@{$d_7x;=NhF3o0&Oj2MJ zWLrKQbU5rj6T|D&3(w8RFKMvd+P1M{G@jG%g_c5H4@?IpVT!vA#PUShPIewyjla;F zwXzHCP~9#AOLv@kge4i~cNqyDc{?pKUFS?#t{jii?~aO&HZS!?ZCDv$)W|Tawu}A7 zx{f;B^7%r33lh;%$-(Up$o+HV0>ww#L~bdj3+Y{bQ@*tMlF52MKO9wM=s#~2?fG?O zhyb*Y+sFr(35@x$vMFbTb=oA3iS={$n+X7Q704nAFDLMVrZNw^MtX9aMrvc4$#Bm^ zINf-rBp7x%#Pt;Q8%na_i%`>IiNg^1tR=+tT`kwH@Io)M{VZvs-4xm^0ND)ML|WGI z$XygP{PhYFOx$EnGCS>{byIXD^XL~XRbD-WiSQp5qxj`)}SWTE&=qr8E0Za}kq z(C(MnE;wb6pq@&Tu!^zE>@`{|D(DjcSC)_U2uKS&dC-M};xq-81P!#%kSrEBAltU{ z2PzJ;C3^|PUI!(^`Cao5zg5v+>sxe0Ydv#ytA8m?Y_(Zu&l~s(03EQBYId^TXW{&5 zRaG91vwV(p5AL^W`!mN>Tz%-EZ`@02@2# zW=@h9UJSMB=0HhaJ?}1>RD;4cR3CNDv(Ng%C3!qDqK(p&c?ek+R5di&4g8gr=({M> zK(?VL9&8BATTk56O;Ys5qh_FIe&#z@5-{;zC~1SCGCK{o6)081Nb*)C*5az?_$50F zQmdare-gtobbvpMgScrsxoVIto7&Edkko!;HUnL}oxr+=0Z9!9iG0=5Cp%D>!h*N& zpDmu5Yp8Sq?lF(dxL0Q&H6hS5A+T1&i9j$N^h-;7vOW{D*=|;^XIKk_5NA)hM=*)w zum)ypC3dv3>xF(F8~x*L7DkU$VO|Vk)%*rGWd8;)`j6#$z{4=Ptg#6uU<^W7Aq@i3 z9IUK2O&K>S4+H{#iN1LtXv{b87XB~8cH=tKa;`Fwz>i{U@PksXr%#ql9_S8$2XaP^&6nU zGq4)nt00tIIi$udqETJRB&_!`OsnjGGFs=TLPBx zp7oaa0SNM7fV{Lu5eKsY0~Auy647PxYZIoRdN-MT1Lz`HweAT*0|Q7V_Jem5>Ci~S z`HPF4ewZf)EGK(Jh(OFD7>vWM44n^Ao}AcxR2aLAK7=GBo0k-@4N=n;)@f^q!x)iV#JW`wI12sFnhDrjt)@faAkphu-uyP{b;Hn^~iGEli6E3LZJqsH%URAG< zW7mCG34Z~fN#tjy*+a9DTE`KSI-akOug(T_KD50?M%VkKi1xr=L>8!DGIFX)WVrbI zcNZ!ti3*7^a`N`73X)kGRory?d}SRY6rroV;*n=&L1Tu}?`1}YL0zUSW|yd^S!tfX zdnQEOKP;E6>RpcXK!+yn&8dP|Ay8?`kME2*|IrAf!Lj~WH(=gPV+`l4s7X_uCbnAI z)~&iDI+j$Ajpoi(}fzNEK5M*&Z|d^Y-#afxb{Hdo6SODGYTXjoKM>dIrU zbY4uTdPq-_Sh`WqRGJ738E@L+E0qK!z<(yJ^3}#K?B^Az0#BC*37;JD*k~8paK+E% z0iOpmti_Hd>4=9zxnk)|w13Vl{b{t!`ZS8#Ch#%FIgGMPn?6P=K-Ea=u~X>soa6%e z7YXZc;v4Dzvr_@Io$8V^B7`hdY${MyT6;zzN+rj-)TmGIhNFEdFk znYG?Yx^i2ZTfFT;zwfO#@U7av$s zd|z^SN6Ry~@KihhhY1lDpIiFiM3`>RlulCD?9&Zf2QNZ>HP%RSf!!mVZj2Xk?O}NK z&;%t$75!W1Ue8y(CwNYCF_|6CpBjZZBm}V6ax4^v$1}_x6O0hWxjv1uNX)G}>OHS) zLa80kV)Sm}DG|OWP!l);M!NXKlz(>aPm!VGgmL%j{ES=X7ny)by4obCJc66a% zq${N@2(=3$c)uL;rQ3)KfW~rL3;I|IL^c(@1?HZE|xq07AzZ*f4_TI%J5A&4|U<_`Fmo!iA|CwA|Y z3cG)j3xQ(3`scX+^5E|P9?0s_M^(&YS^biR17|yPi#K&^_dX7H&5e;EsL6N@s8dCn zx$%F5lW9T(3&GJEl7hfx)r-sE3jtzB(A`5z1A`9KQaixnT8DR^?Y8Vz&$5AQ%>tIO z5oy?ld?=PZVZU+MsAWUN+k7L`8|*G4mGd$xU(Z-j`}%^Vv*OxB93@Y;602i&)edYI z&hKA!%Ny(&Bo{|LQ2)&7E5xHT`4GFGYH@TCG@ z|NQ(QC>^(r;$@k+{A-vSk<_+-m!}{@R_zh0&1>w_o|1mIMOvVmwWHB3@tg1{P$@Km zuH!kfN60oRonEtPfyJw3cpa}mW?hoIl$P}jhK<1D)F7b`AdCH>oZvg``RK`d;-F3E z%@;}mVj^+mKu5ST@vm~nI}mre6#+32OF33&?OiA2GI=!NM&arhfLDC$!51Dd$=V>~ z#E&bdM1F6Hz*>hCs}hRw_w(Z9Km$KZ9+6#b$aY*O=C~LO?-f&fS$a5Q85_pzaJz4< z7ObdG+^*<{Om=fv47nibzPYjid^~s1T^2ppbfkt^+go{b7je)Eske8s>eT(e7TMJKg?de*;t9WG6ELQJ*^Ma{pRy3KseMU5P5AK3s{NuJ~b{+S=uh*ETwZ>K_anD+TIP6zjzFnX|AHK>nNuL z>2%pxa%cF9uE;0T(1L1BxvY5}u!79H`ZYn0m&CXKSO%mM(d`Or@vBb37y=H9`DOBZ z5Hi?y(VPscV&PS6vwq+2KjsGxV7bvUBR3P0v+*)e+}cRx=YHC)a88pDM$Uj<$$EK~ z@J#mjcjN(8(VjkxYknQZKs{^`UN&pTX&!~Yzu$56-^eMZU+q{nlmI~JGw+SrxSpYl zQ)*&-7f1?}gOJR5kh_tZ#)^?l#;W_8Ktd9Yu;m|FM1}tW?M64qw<7{RGzYj9xIsPs0oZnI zHz@b3*%lD_3O5W&I~e-NzlzpRPjH__BGZbY5lJ?nU}a|II?P-`T%vNIm+<}7xV5l{ zlfPNJQ#&SkyZvAEqTP$^riYtq`})W*!4T%m;%&;zP1XsS0(Dzw?jj{AeKRXf|7Gmf zGuDY1YDy5@)lO3-Ol`!J>9z6dPn#%(8`gyxPbJU)ld5p_;>Fnny$LN2zRfIZkPW-F zz%Zpo4bZbA?hIf7CX1@9!7iwen)tu+W}68eKl3c2L^nq1Lc1IB_6Y9S3OGhr*I?)r} z0b-ePslUk(bh540#F~U2P_({vw(NBK>L3mvCe5kiW7)C>kc6bHJWzy&)`Qzs{pYR? z;`Sr}<&{5_@@->P&=UP_YF268*%T!livEEF=pQE<-s3+`1dwrx5{CiY-ngzb*|6)A zdTmEU@;DqswKjV_9_+4zvWgPt!~IMja8$<8ymS}(lpfVBeg3~wiNBZi?m97kcPJ>d zT_)99$Tch#k3quE>gE(;!yH60!3zFq4_I37y*sV=w{Vc9GFKkEV>(7O9;Yd8v3Ve7 zGk6$kdV6$0CNCB*Vs;JUiv3-2>#oYQ$AriTxU;EbhZdRo?L9=`7$}$c)+}PeW$rqA z(3xAioo0Tlm0J9qd^VPv`F0l2aE&uC}K=?E;UwvB%N zXuvgc?}qx_a&om5>550%&UaL~75e8(?#?D7CP0XGvY(YSuqjB_aDZ>aoJtbSI`By` z*@^&r!7xRPNt@&4=Dt9E=OnEVNhijI36hw%5H zrp-rTgjXUBnj6=--a*YT&*t%94avYoBF$9#MV=gZO^Y;k{VLM5Qs=cAI`G#P>Ml9W zB38E|qhRaDBTT1M$CZBkB=WDg8;N)%&%7{(e(+TPBJExW5$<{o17LYOYB*B1cG}FT zVPpwj4J3>}A+Ljvq52*PoDU-ll|#m39BWF%42hDin3!`>_(@S18Ld{-DU#%h(g{Xp z6w)ohaxS`)rRdR7tqT0@A2h<1<8k5NKf7H&pif*g8a5j*4FvlLr(7f9cZ_P90QVYE zz(4)Gv9%KDE%lBzIhW3xTpARWsrq@SjE8PV=#5eo(@PEM<*1@wB?m=uPRVqV`?YuZ z4Rk)ZF3hkY2D6%J<1RGt&akg`X>0V>Cw3r8sp63I5pfMlBpU$k9G#VfXf|M*e`{>Id{1qi- zb5bV4-=>pJF+DobtE8v==eRS-rjU4M1NA*nQe!DN0W0GjGjBrzzp8NyucP!6Me0D+ z4#?;Z*Hnmou8&A#GWL796SrH3;IQn!wH7K-VZ<=x;VU}b#}7*+$<2Vn&1Y2E;_ z3$*?mLOcmtL5c^pqJ%tkzbVD!_aC|y$^*$!^fr2#sao`-xVgieFZRV(qtq#{&Wy2E z&Q^DIyd4Rfe>RR)*U&DOn z3L9zBOT(sBsYv)OKh@&ZH~0En(3z_ZYYTNB}MGlJzYcEGnpR>D8GQftxH$EHKKp! zJlB!Na~%eul9}xBv^1AGMl)kbe5q`+`lMCAQrE&VS{>58v=8zQw{{c=Lth^!e`?A) zjYYj>qC~=6mVsibRNYp4(wO^w(yhn(g7md)|G5>vO7@|=T~ae@>`EvNlqY8P`O^gJ z@sBxtUS>yCYkx-+rS?f7C$YvQFN`Smg~O&`V z4UTt8ho)jcvPGd(nz`I4C{NRmoUhY99$)Pe8t#D4a2T^sIzbVoa(x`0M``e z$@BaHEIjibI5*xvHY8{2aQBX1;ULEBHBRhb%(}?MQjUXCMCGSig0aY+Klx5s5r;@+Qs$K{h~%|^5mFm z?%|b1K7C^rxjz4Zk-q!SHB-o4=;^&{~`N3=wX ztYZ;c1ci0+Lu~4UY*f9f23d)wU>`Hv|ISsHSe2$01oFJ`*9@uZWf0iMQI#9da0gLL zmtJO4L1h{qvqw9JTGB+J?>pS12waS~K9RhIOr-Oa%!}S2wDwY;1Z#Zem4mAk_UOem zHLO*hMuyhS|F^b8^`F|(;!n;0LeV$+mUB7533(2iiQ3J5P0G9TA51Iax0HCw&c;^0 zI>ly#r^E$~t60p*6FU|DZxFUq+^{mw@vvQPg>D892_8Y~uFk!blWPQ(Cc^oh$ zSTXHA|7_N>@XPDFw};MjEL^ANkyP|Esc)RoM2qSA9*3ScaB}eX>xZ9pj3%j>gi{u5 z5N450!js`=SMp!3hp-3%dGu=s58dX@Y!XR|Z*Dxs_sH>3b2KuurHp*8q76_CkaY79 z^l0$68T!KRF#|fwQBDZ6AbzWHy14Y3jMSfLrYXL8Zhy{li(wJth8uoyE{zWHtZGmZ zhq%s>y~mDoobGe^j+6gO&cbZlZ98c}!xBs3t`Y+suCmk(uJpo9(}81`ek!XQ#t?Dq zQ<~q4m^M~DkDklV{hqDE*4hI`Ga7R%=*?K>>9u?LLh~h*G~n|tV$1dGx(sOS;K)($ z8cprQRHd$OHR5}t;WJg*HHT_fRXoGL-C8$WOEwLZvUd3%$Gft##RpcW^2H5|kp{MTXUvERrX)>e(q zP#z{k;y=tWF8B1TDN8D4M+NaC3shg1lwq?&5P?66RCkviMjf`CeMjb4hRVRRC^}(A zl7V8uf7Zs8))lU@CX+)9k0+N#+>thx7NF`|7B+@YHt=430o3@Ssh_t32!?>~Uxd?U z0Y&_I?SW$XG71UeV*SRo2KWU}oho8~Qs5#H#nvNtPf^`EAodYl-+X-9I?Q_aMlWvJb*N6JJ(#wYia zKzWm)_}@Kl_^I*#IGfY$c8)RfWXhTDRY}iN1H&Fy0{zhk3E*uUc)E!mJx3ulKpowu z)zq_o+cgoD`C&a#;cmOy?F&#JA%lj$XO)tEIb9450X9>MQbqvu$Sn=gbE$t*CS`Pu zjLQ;EN9h_F!bUy2PSac*OO&EKLkK%xjq4*b%KGNnfbcF&2nVm{Lke9AqTJ(}yg~CP z0_-!-(O9eXvXLQJHc*WX3;ZrQB^cSW#p7zNwWnh-L`kwK$4PcJZDvJ7#qB~FRfE_> zDYDDMiC3!uf7ZR|nU2=I%$XLj9CltqwRppP_McXteYE6_4YZ}fGfHpXYTel7QpYVe zX_XLueK2)qelssx-SFx%%6cB@5+%cX(J#5l6pk4hSt(03h<~L|G&ctqt9{A(R1-4s= zzUY2)=1hXPLD%1Wn=Rl#H$-i|-cFv5H#&WlZ8^{I=Xn_{U1R)Ls@JT<-yxa#R7X@7JACdo$CtM|Kc2ppe!2N_aq;k3_(4>~ z01ou^FM}*T*?&n)*RK0*t`)}Y>Tj{XM~2-HFY&+k#NZdm8{ZpZ7@WN&bFBL5xaAP| zvMOim2U1-Ko6(?kW7*L=r5L3lN(=&h)3vjJ$;TPdcy~kjllX^ZQC6wO_GILePQnk2 zTi!NAu`1(Jt5m8OmwanS2|>eoEj$ocmS62kI`ZOVQtgp*6I;)p*6J;LKs($8Y zWyz|4Cby-DV(@5vWjta$Jvd453esdvv_W~+k$n9-S;$&}htFGIJJi}O5+@8G>0AC; zn*+vUVhr{#MFN-VqMiz44t!^b_-g#N=wY=}RvgXhL~N<80ot5BQq$1!TsAuvp``{U zhxu%Z2l9)@o2I_X00h~8jBT_nYxE-Vm zVmkse&WvKkcIX@Yc9gG~&J{+05*VYBk!m|PZFyugU3zm(#I=$z%L=_1IvlEj?jiry zOCe}tPi+9{PV7Zw8TOqR_`N1@g@e$<@pR!6JK7LmzNF;(^&?cGuba)vi-ba$ z!7U#h|AIuLS-Uos#~ChdY;|uSs^GU(g)YdP4T5(2{Ar&6-j-N@D%p(_;R-aQ;Gt%t zIr!pl5CgsW9yz2P4dU+;pD}Y-ZX%4KrLFM@GHQ^ab8nwDi=hx1jOzI65bc4!^3Gw> zr>&`t@tO7qJX~}@kn4ReRE$KDoyzHJGZW6PuTEscAV>1v6=cvpOHTe<+9jJ-{RCu$ zTeq;ua3#h}&c7|lMz!m)kS9EGJ#Qp^rkJC#!Rz&r9=C7`%Lwz7aq%VoDF2AB(%ZH| zRM!Z{F^`OiW5fF^6AG+G*bCAvh^S` zB5&eQ1$VV)FR!`;xmPy7B#(Fe2);iSSQ<^+ZX;B7l{W(j-x>3^#%-ASuW^D*vpf1* zCzG>}VXb*LccPn@BH=)&ua{Q?m-TcFNI@j|=n&OHj~%V3C1Y1Blr$D+vG~|$A#b%Y zbEIiNV4|8x6IT z`hv%*Z+xUJ$4Xo;$L7{WQGbU?eIb6D`|k`8F&R4_Kr11r_9ughIAc{FTM8eVm19P; zLa!!+H*wabOM$SI54Tai;9}lFt9{wH$xxNa;C7s1guoG=hC415aP^)#9FIw3ia-Zv z$KNjmS}9~Y72Tp!DTC==dCaHONo@^uf{?G?i{~oE3cqYl>}@mZKv|cQ07K$rvoLeC zYA?=$zA^V<`G-F{g=UzYo2MsRW$2%e4+0{TFRtdvUa117Z?*TsKq%Ld%&_O8FW(~L z!v?wa%uxanEKWnP1?H%-h#{?cE?WO4N55UyWmarz;WCmF!|5=yTt-M1YK6?EWnn}| zcp;+7qSJWPYxu85GzPM<^gV9*w%;x$ z#JqCi0k=T$2ac^A?1faRka05oPm>771|;g@TgfF(|7^1Hns*Yc|2FlQ2n*MJS|857 zy*68)yRk6(@j}5l12zA(Wc@H%#ks7UkqK~dWNEh7dfjcb(PFs9qZXYM}+G|LBS+`2qZjkz2 z)Jy)NIC-T*PB Date: Thu, 5 Sep 2024 23:46:37 +0000 Subject: [PATCH 056/108] chore: release main (#13065) :robot: I have created a release *beep* *boop* ---
google-apps-chat: 0.1.10 ## [0.1.10](https://github.com/googleapis/google-cloud-python/compare/google-apps-chat-v0.1.9...google-apps-chat-v0.1.10) (2024-09-05) ### Features * [google-apps-chat] Add CHAT_SPACE link type support for GA launch ([#13064](https://github.com/googleapis/google-cloud-python/issues/13064)) ([0ee300a](https://github.com/googleapis/google-cloud-python/commit/0ee300a0497968aa2c85969924b37f95f67675f0))
--- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .release-please-manifest.json | 2 +- packages/google-apps-chat/CHANGELOG.md | 7 +++++++ .../google-apps-chat/google/apps/chat/gapic_version.py | 2 +- .../google-apps-chat/google/apps/chat_v1/gapic_version.py | 2 +- .../generated_samples/snippet_metadata_google.chat.v1.json | 2 +- 5 files changed, 11 insertions(+), 4 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 477ae9480c55..d64187a095df 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -5,7 +5,7 @@ "packages/google-analytics-admin": "0.23.0", "packages/google-analytics-data": "0.18.11", "packages/google-apps-card": "0.1.4", - "packages/google-apps-chat": "0.1.9", + "packages/google-apps-chat": "0.1.10", "packages/google-apps-events-subscriptions": "0.1.2", "packages/google-apps-meet": "0.1.8", "packages/google-apps-script-type": "0.3.10", diff --git a/packages/google-apps-chat/CHANGELOG.md b/packages/google-apps-chat/CHANGELOG.md index ec2485ff2775..2ed2b6a49d98 100644 --- a/packages/google-apps-chat/CHANGELOG.md +++ b/packages/google-apps-chat/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.1.10](https://github.com/googleapis/google-cloud-python/compare/google-apps-chat-v0.1.9...google-apps-chat-v0.1.10) (2024-09-05) + + +### Features + +* [google-apps-chat] Add CHAT_SPACE link type support for GA launch ([#13064](https://github.com/googleapis/google-cloud-python/issues/13064)) ([0ee300a](https://github.com/googleapis/google-cloud-python/commit/0ee300a0497968aa2c85969924b37f95f67675f0)) + ## [0.1.9](https://github.com/googleapis/google-cloud-python/compare/google-apps-chat-v0.1.8...google-apps-chat-v0.1.9) (2024-07-30) diff --git a/packages/google-apps-chat/google/apps/chat/gapic_version.py b/packages/google-apps-chat/google/apps/chat/gapic_version.py index 558c8aab67c5..9413c3341313 100644 --- a/packages/google-apps-chat/google/apps/chat/gapic_version.py +++ b/packages/google-apps-chat/google/apps/chat/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.10" # {x-release-please-version} diff --git a/packages/google-apps-chat/google/apps/chat_v1/gapic_version.py b/packages/google-apps-chat/google/apps/chat_v1/gapic_version.py index 558c8aab67c5..9413c3341313 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/gapic_version.py +++ b/packages/google-apps-chat/google/apps/chat_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.10" # {x-release-please-version} diff --git a/packages/google-apps-chat/samples/generated_samples/snippet_metadata_google.chat.v1.json b/packages/google-apps-chat/samples/generated_samples/snippet_metadata_google.chat.v1.json index 6e637d46d014..4442b6c5505a 100644 --- a/packages/google-apps-chat/samples/generated_samples/snippet_metadata_google.chat.v1.json +++ b/packages/google-apps-chat/samples/generated_samples/snippet_metadata_google.chat.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-apps-chat", - "version": "0.1.0" + "version": "0.1.10" }, "snippets": [ { From a7f0dbfb8a5f3518cf1acec86b7bc3a6151a811b Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 6 Sep 2024 15:45:13 -0400 Subject: [PATCH 057/108] docs: [google-maps-fleetengine-delivery] update comment link for ListTasks filter (#13066) - [ ] Regenerate this pull request now. PiperOrigin-RevId: 671458761 Source-Link: https://github.com/googleapis/googleapis/commit/d3029316f8793ac5178dfbd1ebd366b80e32dd6c Source-Link: https://github.com/googleapis/googleapis-gen/commit/fe4884a9d2013647c34ddcd4f8df1d1d88c9eed4 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLW1hcHMtZmxlZXRlbmdpbmUtZGVsaXZlcnkvLk93bEJvdC55YW1sIiwiaCI6ImZlNDg4NGE5ZDIwMTM2NDdjMzRkZGNkNGY4ZGYxZDFkODhjOWVlZDQifQ== --------- Co-authored-by: Owl Bot --- .../google/maps/fleetengine_delivery_v1/types/delivery_api.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-maps-fleetengine-delivery/google/maps/fleetengine_delivery_v1/types/delivery_api.py b/packages/google-maps-fleetengine-delivery/google/maps/fleetengine_delivery_v1/types/delivery_api.py index a8e773aae506..cd619910ac96 100644 --- a/packages/google-maps-fleetengine-delivery/google/maps/fleetengine_delivery_v1/types/delivery_api.py +++ b/packages/google-maps-fleetengine-delivery/google/maps/fleetengine_delivery_v1/types/delivery_api.py @@ -509,7 +509,7 @@ class ListTasksRequest(proto.Message): don't specify a value, or if you filter on an empty string, then all Tasks are returned. For information about the Task properties that you can filter on, see `List - tasks `__. + tasks `__. """ header: mfd_header.DeliveryRequestHeader = proto.Field( From 1a2b325c0da966131072673e06d17015b16c7a1a Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 6 Sep 2024 19:17:09 -0400 Subject: [PATCH 058/108] feat: [google-cloud-documentai] Add API fields for the descriptions of entity type and property in the document schema (#13067) - [ ] Regenerate this pull request now. PiperOrigin-RevId: 671472365 Source-Link: https://github.com/googleapis/googleapis/commit/003e62665190becd32d722a82ed3cab62696225f Source-Link: https://github.com/googleapis/googleapis-gen/commit/bf0196fe2004e1fbb1edf5aa8d8ada653e10d62c Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWRvY3VtZW50YWkvLk93bEJvdC55YW1sIiwiaCI6ImJmMDE5NmZlMjAwNGUxZmJiMWVkZjVhYThkOGFkYTY1M2UxMGQ2MmMifQ== --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../documentai_v1beta3/types/document_schema.py | 16 ++++++++++++++++ .../documentai_v1beta3/types/document_service.py | 4 +--- .../client-post-processing/doc-formatting.yaml | 1 + .../documentai_v1beta3/test_document_service.py | 2 ++ .../client-post-processing/doc-formatting.yaml | 15 +++++++++++++-- 5 files changed, 33 insertions(+), 5 deletions(-) create mode 120000 packages/google-cloud-documentai/scripts/client-post-processing/doc-formatting.yaml diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/types/document_schema.py b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/types/document_schema.py index 60d49d8c76b1..1c37739aca48 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/types/document_schema.py +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/types/document_schema.py @@ -189,6 +189,10 @@ class EntityType(proto.Message): type. For example ``line_item/amount``. This convention is deprecated, but will still be honored for backward compatibility. + description (str): + The description of the entity type. Could be + used to provide more information about the + entity type for model calls. base_types (MutableSequence[str]): The entity type that this type is derived from. For now, one and only one should be set. @@ -220,6 +224,10 @@ class Property(proto.Message): name (str): The name of the property. Follows the same guidelines as the EntityType name. + description (str): + The description of the property. Could be + used to provide more information about the + property for model calls. display_name (str): User defined name for the property. value_type (str): @@ -274,6 +282,10 @@ class OccurrenceType(proto.Enum): proto.STRING, number=1, ) + description: str = proto.Field( + proto.STRING, + number=7, + ) display_name: str = proto.Field( proto.STRING, number=6, @@ -309,6 +321,10 @@ class OccurrenceType(proto.Enum): proto.STRING, number=1, ) + description: str = proto.Field( + proto.STRING, + number=15, + ) base_types: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=2, diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/types/document_service.py b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/types/document_service.py index 5fd1139ddcfb..86af095d60ab 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/types/document_service.py +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/types/document_service.py @@ -396,10 +396,8 @@ class ListDocumentsRequest(proto.Message): https://google.aip.dev/160. Currently support query strings are: - ------------------------------------ - - ``SplitType=DATASET_SPLIT_TEST|DATASET_SPLIT_TRAIN|DATASET_SPLIT_UNASSIGNED`` + - ``SplitType=DATASET_SPLIT_TEST|DATASET_SPLIT_TRAIN|DATASET_SPLIT_UNASSIGNED`` - ``LabelingState=DOCUMENT_LABELED|DOCUMENT_UNLABELED|DOCUMENT_AUTO_LABELED`` - ``DisplayName=\"file_name.pdf\"`` - ``EntityType=abc/def`` diff --git a/packages/google-cloud-documentai/scripts/client-post-processing/doc-formatting.yaml b/packages/google-cloud-documentai/scripts/client-post-processing/doc-formatting.yaml new file mode 120000 index 000000000000..6e0991666f97 --- /dev/null +++ b/packages/google-cloud-documentai/scripts/client-post-processing/doc-formatting.yaml @@ -0,0 +1 @@ +../../../../scripts/client-post-processing/doc-formatting.yaml \ No newline at end of file diff --git a/packages/google-cloud-documentai/tests/unit/gapic/documentai_v1beta3/test_document_service.py b/packages/google-cloud-documentai/tests/unit/gapic/documentai_v1beta3/test_document_service.py index 7233ce2c91c1..a8e8294d811e 100644 --- a/packages/google-cloud-documentai/tests/unit/gapic/documentai_v1beta3/test_document_service.py +++ b/packages/google-cloud-documentai/tests/unit/gapic/documentai_v1beta3/test_document_service.py @@ -6117,10 +6117,12 @@ def test_update_dataset_schema_rest(request_type): "enum_values": {"values": ["values_value1", "values_value2"]}, "display_name": "display_name_value", "name": "name_value", + "description": "description_value", "base_types": ["base_types_value1", "base_types_value2"], "properties": [ { "name": "name_value", + "description": "description_value", "display_name": "display_name_value", "value_type": "value_type_value", "occurrence_type": 1, diff --git a/scripts/client-post-processing/doc-formatting.yaml b/scripts/client-post-processing/doc-formatting.yaml index d95e1c4fc542..88dd09382f64 100644 --- a/scripts/client-post-processing/doc-formatting.yaml +++ b/scripts/client-post-processing/doc-formatting.yaml @@ -161,9 +161,20 @@ replacements: after: " 'ingestionTime': DOUBLE; (UNIX timestamp)\n 'application': STRING;\n" count: 2 - paths: [ - packages/google-cloud-visionai/google/cloud/visionai_v1/types/platform.py, + packages/google-cloud-visionai/google/cloud/visionai_v1/types/platform.py, ] before: "'processor': STRING;\n }\n dynamic_config_input_topic " after: "'processor': STRING;\n\n }\n\n dynamic_config_input_topic " count: 1 - + - paths: [ + packages/google-cloud-documentai/google/cloud/documentai_v1beta3/types/document_service.py, + ] + before: | + \n Currently support query strings are: + \ ------------------------------------ + \ + \ ``SplitType=DATASET_SPLIT_TEST\|DATASET_SPLIT_TRAIN\|DATASET_SPLIT_UNASSIGNED`` + \ + \ - ``LabelingState=DOCUMENT_LABELED\|DOCUMENT_UNLABELED\|DOCUMENT_AUTO_LABELED`` + after: "\n Currently support query strings are:\n\n - ``SplitType=DATASET_SPLIT_TEST|DATASET_SPLIT_TRAIN|DATASET_SPLIT_UNASSIGNED``\n - ``LabelingState=DOCUMENT_LABELED|DOCUMENT_UNLABELED|DOCUMENT_AUTO_LABELED``\n" + count: 1 From 31b38106f13113f205d6e25ac802754d611979da Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 9 Sep 2024 07:53:33 -0400 Subject: [PATCH 059/108] chore: release main (#13069) :robot: I have created a release *beep* *boop* ---
google-cloud-documentai: 2.32.0 ## [2.32.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-documentai-v2.31.0...google-cloud-documentai-v2.32.0) (2024-09-06) ### Features * [google-cloud-documentai] Add API fields for the descriptions of entity type and property in the document schema ([#13067](https://github.com/googleapis/google-cloud-python/issues/13067)) ([1a2b325](https://github.com/googleapis/google-cloud-python/commit/1a2b325c0da966131072673e06d17015b16c7a1a))
google-maps-fleetengine-delivery: 0.2.4 ## [0.2.4](https://github.com/googleapis/google-cloud-python/compare/google-maps-fleetengine-delivery-v0.2.3...google-maps-fleetengine-delivery-v0.2.4) (2024-09-06) ### Documentation * [google-maps-fleetengine-delivery] update comment link for ListTasks filter ([#13066](https://github.com/googleapis/google-cloud-python/issues/13066)) ([a7f0dbf](https://github.com/googleapis/google-cloud-python/commit/a7f0dbfb8a5f3518cf1acec86b7bc3a6151a811b))
--- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .release-please-manifest.json | 4 ++-- packages/google-cloud-documentai/CHANGELOG.md | 7 +++++++ .../google/cloud/documentai/gapic_version.py | 2 +- .../google/cloud/documentai_v1/gapic_version.py | 2 +- .../google/cloud/documentai_v1beta2/gapic_version.py | 2 +- .../google/cloud/documentai_v1beta3/gapic_version.py | 2 +- .../snippet_metadata_google.cloud.documentai.v1.json | 2 +- .../snippet_metadata_google.cloud.documentai.v1beta2.json | 2 +- .../snippet_metadata_google.cloud.documentai.v1beta3.json | 2 +- packages/google-maps-fleetengine-delivery/CHANGELOG.md | 7 +++++++ .../google/maps/fleetengine_delivery/gapic_version.py | 2 +- .../google/maps/fleetengine_delivery_v1/gapic_version.py | 2 +- .../snippet_metadata_maps.fleetengine.delivery.v1.json | 2 +- 13 files changed, 26 insertions(+), 12 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index d64187a095df..a23882e898d2 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -78,7 +78,7 @@ "packages/google-cloud-discoveryengine": "0.12.2", "packages/google-cloud-dlp": "3.22.0", "packages/google-cloud-dms": "1.9.5", - "packages/google-cloud-documentai": "2.31.0", + "packages/google-cloud-documentai": "2.32.0", "packages/google-cloud-domains": "1.7.5", "packages/google-cloud-edgecontainer": "0.5.11", "packages/google-cloud-edgenetwork": "0.1.11", @@ -179,7 +179,7 @@ "packages/google-geo-type": "0.3.9", "packages/google-maps-addressvalidation": "0.3.13", "packages/google-maps-fleetengine": "0.2.2", - "packages/google-maps-fleetengine-delivery": "0.2.3", + "packages/google-maps-fleetengine-delivery": "0.2.4", "packages/google-maps-mapsplatformdatasets": "0.4.2", "packages/google-maps-places": "0.1.17", "packages/google-maps-routeoptimization": "0.1.2", diff --git a/packages/google-cloud-documentai/CHANGELOG.md b/packages/google-cloud-documentai/CHANGELOG.md index 8b3db92af5a1..5a26f2674ae3 100644 --- a/packages/google-cloud-documentai/CHANGELOG.md +++ b/packages/google-cloud-documentai/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [2.32.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-documentai-v2.31.0...google-cloud-documentai-v2.32.0) (2024-09-06) + + +### Features + +* [google-cloud-documentai] Add API fields for the descriptions of entity type and property in the document schema ([#13067](https://github.com/googleapis/google-cloud-python/issues/13067)) ([1a2b325](https://github.com/googleapis/google-cloud-python/commit/1a2b325c0da966131072673e06d17015b16c7a1a)) + ## [2.31.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-documentai-v2.30.0...google-cloud-documentai-v2.31.0) (2024-08-08) diff --git a/packages/google-cloud-documentai/google/cloud/documentai/gapic_version.py b/packages/google-cloud-documentai/google/cloud/documentai/gapic_version.py index 558c8aab67c5..c82b1e137507 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai/gapic_version.py +++ b/packages/google-cloud-documentai/google/cloud/documentai/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "2.32.0" # {x-release-please-version} diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1/gapic_version.py b/packages/google-cloud-documentai/google/cloud/documentai_v1/gapic_version.py index 558c8aab67c5..c82b1e137507 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1/gapic_version.py +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "2.32.0" # {x-release-please-version} diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1beta2/gapic_version.py b/packages/google-cloud-documentai/google/cloud/documentai_v1beta2/gapic_version.py index 558c8aab67c5..c82b1e137507 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1beta2/gapic_version.py +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1beta2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "2.32.0" # {x-release-please-version} diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/gapic_version.py b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/gapic_version.py index 558c8aab67c5..c82b1e137507 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/gapic_version.py +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "2.32.0" # {x-release-please-version} diff --git a/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1.json b/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1.json index 96d60af285a2..2fc98b45f209 100644 --- a/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1.json +++ b/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-documentai", - "version": "0.1.0" + "version": "2.32.0" }, "snippets": [ { diff --git a/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta2.json b/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta2.json index ef56bd7e3eb3..31e4348ff0cb 100644 --- a/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta2.json +++ b/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-documentai", - "version": "0.1.0" + "version": "2.32.0" }, "snippets": [ { diff --git a/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta3.json b/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta3.json index f47545a8ed3e..43bcd3c8902f 100644 --- a/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta3.json +++ b/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta3.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-documentai", - "version": "0.1.0" + "version": "2.32.0" }, "snippets": [ { diff --git a/packages/google-maps-fleetengine-delivery/CHANGELOG.md b/packages/google-maps-fleetengine-delivery/CHANGELOG.md index cf4766e90920..5c90dc3fdce5 100644 --- a/packages/google-maps-fleetengine-delivery/CHANGELOG.md +++ b/packages/google-maps-fleetengine-delivery/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.2.4](https://github.com/googleapis/google-cloud-python/compare/google-maps-fleetengine-delivery-v0.2.3...google-maps-fleetengine-delivery-v0.2.4) (2024-09-06) + + +### Documentation + +* [google-maps-fleetengine-delivery] update comment link for ListTasks filter ([#13066](https://github.com/googleapis/google-cloud-python/issues/13066)) ([a7f0dbf](https://github.com/googleapis/google-cloud-python/commit/a7f0dbfb8a5f3518cf1acec86b7bc3a6151a811b)) + ## [0.2.3](https://github.com/googleapis/google-cloud-python/compare/google-maps-fleetengine-delivery-v0.2.2...google-maps-fleetengine-delivery-v0.2.3) (2024-07-30) diff --git a/packages/google-maps-fleetengine-delivery/google/maps/fleetengine_delivery/gapic_version.py b/packages/google-maps-fleetengine-delivery/google/maps/fleetengine_delivery/gapic_version.py index 558c8aab67c5..668eac0d72ce 100644 --- a/packages/google-maps-fleetengine-delivery/google/maps/fleetengine_delivery/gapic_version.py +++ b/packages/google-maps-fleetengine-delivery/google/maps/fleetengine_delivery/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.2.4" # {x-release-please-version} diff --git a/packages/google-maps-fleetengine-delivery/google/maps/fleetengine_delivery_v1/gapic_version.py b/packages/google-maps-fleetengine-delivery/google/maps/fleetengine_delivery_v1/gapic_version.py index 558c8aab67c5..668eac0d72ce 100644 --- a/packages/google-maps-fleetengine-delivery/google/maps/fleetengine_delivery_v1/gapic_version.py +++ b/packages/google-maps-fleetengine-delivery/google/maps/fleetengine_delivery_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.2.4" # {x-release-please-version} diff --git a/packages/google-maps-fleetengine-delivery/samples/generated_samples/snippet_metadata_maps.fleetengine.delivery.v1.json b/packages/google-maps-fleetengine-delivery/samples/generated_samples/snippet_metadata_maps.fleetengine.delivery.v1.json index 884ac226d0e4..685d1554024c 100644 --- a/packages/google-maps-fleetengine-delivery/samples/generated_samples/snippet_metadata_maps.fleetengine.delivery.v1.json +++ b/packages/google-maps-fleetengine-delivery/samples/generated_samples/snippet_metadata_maps.fleetengine.delivery.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-maps-fleetengine-delivery", - "version": "0.1.0" + "version": "0.2.4" }, "snippets": [ { From ae583456fd0a2f2e46ce8759918ebe3fc19a0fe8 Mon Sep 17 00:00:00 2001 From: "owlbot-bootstrapper[bot]" <104649659+owlbot-bootstrapper[bot]@users.noreply.github.com> Date: Thu, 12 Sep 2024 13:46:09 -0700 Subject: [PATCH 060/108] feat: add initial files for google.maps.areainsights.v1 (#13078) Source-Link: https://github.com/googleapis/googleapis-gen/commit/802f7c8cdf887527e99fa9c0d774adfd33a16ffe Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLW1hcHMtYXJlYWluc2lnaHRzLy5Pd2xCb3QueWFtbCIsImgiOiI4MDJmN2M4Y2RmODg3NTI3ZTk5ZmE5YzBkNzc0YWRmZDMzYTE2ZmZlIn0= PiperOrigin-RevId: 672562643 --------- Co-authored-by: Owlbot Bootstrapper Co-authored-by: Owl Bot Co-authored-by: Victor Chudnovsky --- .../google-maps-areainsights/.OwlBot.yaml | 18 + packages/google-maps-areainsights/.coveragerc | 13 + packages/google-maps-areainsights/.flake8 | 33 + packages/google-maps-areainsights/.gitignore | 63 + .../.repo-metadata.json | 17 + .../google-maps-areainsights/CHANGELOG.md | 1 + .../CODE_OF_CONDUCT.md | 95 + .../google-maps-areainsights/CONTRIBUTING.rst | 271 ++ packages/google-maps-areainsights/LICENSE | 202 ++ packages/google-maps-areainsights/MANIFEST.in | 25 + packages/google-maps-areainsights/README.rst | 108 + .../docs/CHANGELOG.md | 1 + .../google-maps-areainsights/docs/README.rst | 1 + .../docs/_static/custom.css | 20 + .../docs/_templates/layout.html | 50 + .../docs/areainsights_v1/area_insights.rst | 6 + .../docs/areainsights_v1/services_.rst | 6 + .../docs/areainsights_v1/types_.rst | 6 + .../google-maps-areainsights/docs/conf.py | 384 +++ .../google-maps-areainsights/docs/index.rst | 23 + .../docs/multiprocessing.rst | 7 + .../google/maps/areainsights/__init__.py | 51 + .../google/maps/areainsights/gapic_version.py | 17 + .../google/maps/areainsights/py.typed | 2 + .../google/maps/areainsights_v1/__init__.py | 48 + .../maps/areainsights_v1/gapic_metadata.json | 43 + .../maps/areainsights_v1/gapic_version.py | 17 + .../google/maps/areainsights_v1/py.typed | 2 + .../maps/areainsights_v1/services/__init__.py | 15 + .../services/area_insights/__init__.py | 22 + .../services/area_insights/async_client.py | 353 +++ .../services/area_insights/client.py | 773 ++++++ .../area_insights/transports/__init__.py | 36 + .../services/area_insights/transports/base.py | 173 ++ .../services/area_insights/transports/grpc.py | 286 ++ .../area_insights/transports/grpc_asyncio.py | 305 +++ .../services/area_insights/transports/rest.py | 313 +++ .../maps/areainsights_v1/types/__init__.py | 40 + .../types/area_insights_service.py | 510 ++++ packages/google-maps-areainsights/mypy.ini | 3 + packages/google-maps-areainsights/noxfile.py | 452 ++++ ...ed_area_insights_compute_insights_async.py | 52 + ...ted_area_insights_compute_insights_sync.py | 52 + ..._metadata_google.maps.areainsights.v1.json | 168 ++ .../scripts/decrypt-secrets.sh | 46 + .../scripts/fixup_areainsights_v1_keywords.py | 176 ++ packages/google-maps-areainsights/setup.py | 95 + .../testing/.gitignore | 3 + .../testing/constraints-3.10.txt | 6 + .../testing/constraints-3.11.txt | 6 + .../testing/constraints-3.12.txt | 6 + .../testing/constraints-3.7.txt | 10 + .../testing/constraints-3.8.txt | 6 + .../testing/constraints-3.9.txt | 6 + .../tests/__init__.py | 15 + .../tests/unit/__init__.py | 15 + .../tests/unit/gapic/__init__.py | 15 + .../unit/gapic/areainsights_v1/__init__.py | 15 + .../areainsights_v1/test_area_insights.py | 2315 +++++++++++++++++ release-please-config.json | 10 + 60 files changed, 7828 insertions(+) create mode 100644 packages/google-maps-areainsights/.OwlBot.yaml create mode 100644 packages/google-maps-areainsights/.coveragerc create mode 100644 packages/google-maps-areainsights/.flake8 create mode 100644 packages/google-maps-areainsights/.gitignore create mode 100644 packages/google-maps-areainsights/.repo-metadata.json create mode 100644 packages/google-maps-areainsights/CHANGELOG.md create mode 100644 packages/google-maps-areainsights/CODE_OF_CONDUCT.md create mode 100644 packages/google-maps-areainsights/CONTRIBUTING.rst create mode 100644 packages/google-maps-areainsights/LICENSE create mode 100644 packages/google-maps-areainsights/MANIFEST.in create mode 100644 packages/google-maps-areainsights/README.rst create mode 120000 packages/google-maps-areainsights/docs/CHANGELOG.md create mode 120000 packages/google-maps-areainsights/docs/README.rst create mode 100644 packages/google-maps-areainsights/docs/_static/custom.css create mode 100644 packages/google-maps-areainsights/docs/_templates/layout.html create mode 100644 packages/google-maps-areainsights/docs/areainsights_v1/area_insights.rst create mode 100644 packages/google-maps-areainsights/docs/areainsights_v1/services_.rst create mode 100644 packages/google-maps-areainsights/docs/areainsights_v1/types_.rst create mode 100644 packages/google-maps-areainsights/docs/conf.py create mode 100644 packages/google-maps-areainsights/docs/index.rst create mode 100644 packages/google-maps-areainsights/docs/multiprocessing.rst create mode 100644 packages/google-maps-areainsights/google/maps/areainsights/__init__.py create mode 100644 packages/google-maps-areainsights/google/maps/areainsights/gapic_version.py create mode 100644 packages/google-maps-areainsights/google/maps/areainsights/py.typed create mode 100644 packages/google-maps-areainsights/google/maps/areainsights_v1/__init__.py create mode 100644 packages/google-maps-areainsights/google/maps/areainsights_v1/gapic_metadata.json create mode 100644 packages/google-maps-areainsights/google/maps/areainsights_v1/gapic_version.py create mode 100644 packages/google-maps-areainsights/google/maps/areainsights_v1/py.typed create mode 100644 packages/google-maps-areainsights/google/maps/areainsights_v1/services/__init__.py create mode 100644 packages/google-maps-areainsights/google/maps/areainsights_v1/services/area_insights/__init__.py create mode 100644 packages/google-maps-areainsights/google/maps/areainsights_v1/services/area_insights/async_client.py create mode 100644 packages/google-maps-areainsights/google/maps/areainsights_v1/services/area_insights/client.py create mode 100644 packages/google-maps-areainsights/google/maps/areainsights_v1/services/area_insights/transports/__init__.py create mode 100644 packages/google-maps-areainsights/google/maps/areainsights_v1/services/area_insights/transports/base.py create mode 100644 packages/google-maps-areainsights/google/maps/areainsights_v1/services/area_insights/transports/grpc.py create mode 100644 packages/google-maps-areainsights/google/maps/areainsights_v1/services/area_insights/transports/grpc_asyncio.py create mode 100644 packages/google-maps-areainsights/google/maps/areainsights_v1/services/area_insights/transports/rest.py create mode 100644 packages/google-maps-areainsights/google/maps/areainsights_v1/types/__init__.py create mode 100644 packages/google-maps-areainsights/google/maps/areainsights_v1/types/area_insights_service.py create mode 100644 packages/google-maps-areainsights/mypy.ini create mode 100644 packages/google-maps-areainsights/noxfile.py create mode 100644 packages/google-maps-areainsights/samples/generated_samples/areainsights_v1_generated_area_insights_compute_insights_async.py create mode 100644 packages/google-maps-areainsights/samples/generated_samples/areainsights_v1_generated_area_insights_compute_insights_sync.py create mode 100644 packages/google-maps-areainsights/samples/generated_samples/snippet_metadata_google.maps.areainsights.v1.json create mode 100755 packages/google-maps-areainsights/scripts/decrypt-secrets.sh create mode 100644 packages/google-maps-areainsights/scripts/fixup_areainsights_v1_keywords.py create mode 100644 packages/google-maps-areainsights/setup.py create mode 100644 packages/google-maps-areainsights/testing/.gitignore create mode 100644 packages/google-maps-areainsights/testing/constraints-3.10.txt create mode 100644 packages/google-maps-areainsights/testing/constraints-3.11.txt create mode 100644 packages/google-maps-areainsights/testing/constraints-3.12.txt create mode 100644 packages/google-maps-areainsights/testing/constraints-3.7.txt create mode 100644 packages/google-maps-areainsights/testing/constraints-3.8.txt create mode 100644 packages/google-maps-areainsights/testing/constraints-3.9.txt create mode 100644 packages/google-maps-areainsights/tests/__init__.py create mode 100644 packages/google-maps-areainsights/tests/unit/__init__.py create mode 100644 packages/google-maps-areainsights/tests/unit/gapic/__init__.py create mode 100644 packages/google-maps-areainsights/tests/unit/gapic/areainsights_v1/__init__.py create mode 100644 packages/google-maps-areainsights/tests/unit/gapic/areainsights_v1/test_area_insights.py diff --git a/packages/google-maps-areainsights/.OwlBot.yaml b/packages/google-maps-areainsights/.OwlBot.yaml new file mode 100644 index 000000000000..35589d0d6923 --- /dev/null +++ b/packages/google-maps-areainsights/.OwlBot.yaml @@ -0,0 +1,18 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +deep-copy-regex: + - source: /google/maps/areainsights/(v.*)/.*-py + dest: /owl-bot-staging/google-maps-areainsights/$1 +api-name: google-maps-areainsights diff --git a/packages/google-maps-areainsights/.coveragerc b/packages/google-maps-areainsights/.coveragerc new file mode 100644 index 000000000000..612fd0382a68 --- /dev/null +++ b/packages/google-maps-areainsights/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/maps/areainsights/__init__.py + google/maps/areainsights/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/packages/google-maps-areainsights/.flake8 b/packages/google-maps-areainsights/.flake8 new file mode 100644 index 000000000000..32986c79287a --- /dev/null +++ b/packages/google-maps-areainsights/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E231, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/packages/google-maps-areainsights/.gitignore b/packages/google-maps-areainsights/.gitignore new file mode 100644 index 000000000000..b4243ced74e4 --- /dev/null +++ b/packages/google-maps-areainsights/.gitignore @@ -0,0 +1,63 @@ +*.py[cod] +*.sw[op] + +# C extensions +*.so + +# Packages +*.egg +*.egg-info +dist +build +eggs +.eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 +__pycache__ + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.nox +.cache +.pytest_cache + + +# Mac +.DS_Store + +# JetBrains +.idea + +# VS Code +.vscode + +# emacs +*~ + +# Built documentation +docs/_build +bigquery/docs/generated +docs.metadata + +# Virtual environment +env/ + +# Test logs +coverage.xml +*sponge_log.xml + +# System test environment variables. +system_tests/local_test_setup + +# Make sure a generated file isn't accidentally committed. +pylintrc +pylintrc.test diff --git a/packages/google-maps-areainsights/.repo-metadata.json b/packages/google-maps-areainsights/.repo-metadata.json new file mode 100644 index 000000000000..2b94b1bde887 --- /dev/null +++ b/packages/google-maps-areainsights/.repo-metadata.json @@ -0,0 +1,17 @@ +{ + "name": "google-maps-areainsights", + "name_pretty": "Places Insights API", + "api_description": "Places Insights API. ", + "product_documentation": "https://developers.google.com/maps/documentation/places-insights", + "client_documentation": "https://googleapis.dev/python/google-maps-areainsights/latest", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=1624013&template=2026178", + "release_level": "preview", + "language": "python", + "library_type": "GAPIC_AUTO", + "repo": "googleapis/google-cloud-python", + "distribution_name": "google-maps-areainsights", + "api_id": "areainsights.googleapis.com", + "default_version": "v1", + "codeowner_team": "", + "api_shortname": "areainsights" +} diff --git a/packages/google-maps-areainsights/CHANGELOG.md b/packages/google-maps-areainsights/CHANGELOG.md new file mode 100644 index 000000000000..5ddad421e08f --- /dev/null +++ b/packages/google-maps-areainsights/CHANGELOG.md @@ -0,0 +1 @@ +# Changelog \ No newline at end of file diff --git a/packages/google-maps-areainsights/CODE_OF_CONDUCT.md b/packages/google-maps-areainsights/CODE_OF_CONDUCT.md new file mode 100644 index 000000000000..039f43681204 --- /dev/null +++ b/packages/google-maps-areainsights/CODE_OF_CONDUCT.md @@ -0,0 +1,95 @@ + +# Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/packages/google-maps-areainsights/CONTRIBUTING.rst b/packages/google-maps-areainsights/CONTRIBUTING.rst new file mode 100644 index 000000000000..f47c4696d0d8 --- /dev/null +++ b/packages/google-maps-areainsights/CONTRIBUTING.rst @@ -0,0 +1,271 @@ +.. Generated by synthtool. DO NOT EDIT! +############ +Contributing +############ + +#. **Please sign one of the contributor license agreements below.** +#. Fork the repo, develop and test your code changes, add docs. +#. Make sure that your commit messages clearly describe the changes. +#. Send a pull request. (Please Read: `Faster Pull Request Reviews`_) + +.. _Faster Pull Request Reviews: https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews + +.. contents:: Here are some guidelines for hacking on the Google Cloud Client libraries. + +*************** +Adding Features +*************** + +In order to add a feature: + +- The feature must be documented in both the API and narrative + documentation. + +- The feature must work fully on the following CPython versions: + 3.7, 3.8, 3.9, 3.10, 3.11 and 3.12 on both UNIX and Windows. + +- The feature must not add unnecessary dependencies (where + "unnecessary" is of course subjective, but new dependencies should + be discussed). + +**************************** +Using a Development Checkout +**************************** + +You'll have to create a development environment using a Git checkout: + +- While logged into your GitHub account, navigate to the + ``google-cloud-python`` `repo`_ on GitHub. + +- Fork and clone the ``google-cloud-python`` repository to your GitHub account by + clicking the "Fork" button. + +- Clone your fork of ``google-cloud-python`` from your GitHub account to your local + computer, substituting your account username and specifying the destination + as ``hack-on-google-cloud-python``. E.g.:: + + $ cd ${HOME} + $ git clone git@github.com:USERNAME/google-cloud-python.git hack-on-google-cloud-python + $ cd hack-on-google-cloud-python + # Configure remotes such that you can pull changes from the googleapis/google-cloud-python + # repository into your local repository. + $ git remote add upstream git@github.com:googleapis/google-cloud-python.git + # fetch and merge changes from upstream into main + $ git fetch upstream + $ git merge upstream/main + +Now your local repo is set up such that you will push changes to your GitHub +repo, from which you can submit a pull request. + +To work on the codebase and run the tests, we recommend using ``nox``, +but you can also use a ``virtualenv`` of your own creation. + +.. _repo: https://github.com/googleapis/google-cloud-python + +Using ``nox`` +============= + +We use `nox `__ to instrument our tests. + +- To test your changes, run unit tests with ``nox``:: + $ nox -s unit + +- To run a single unit test:: + + $ nox -s unit-3.12 -- -k + + + .. note:: + + The unit tests and system tests are described in the + ``noxfile.py`` files in each directory. + +.. nox: https://pypi.org/project/nox/ + +***************************************** +I'm getting weird errors... Can you help? +***************************************** + +If the error mentions ``Python.h`` not being found, +install ``python-dev`` and try again. +On Debian/Ubuntu:: + + $ sudo apt-get install python-dev + +************ +Coding Style +************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: + + $ nox -s blacken + +- PEP8 compliance is required, with exceptions defined in the linter configuration. + If you have ``nox`` installed, you can test that you have not introduced + any non-compliant code via:: + + $ nox -s lint + +- In order to make ``nox -s lint`` run faster, you can set some environment + variables:: + + export GOOGLE_CLOUD_TESTING_REMOTE="upstream" + export GOOGLE_CLOUD_TESTING_BRANCH="main" + + By doing this, you are specifying the location of the most up-to-date + version of ``google-cloud-python``. The + remote name ``upstream`` should point to the official ``googleapis`` + checkout and the branch should be the default branch on that remote (``main``). + +- This repository contains configuration for the + `pre-commit `__ tool, which automates checking + our linters during a commit. If you have it installed on your ``$PATH``, + you can enable enforcing those checks via: + +.. code-block:: bash + + $ pre-commit install + pre-commit installed at .git/hooks/pre-commit + +Exceptions to PEP8: + +- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for + "Function-Under-Test"), which is PEP8-incompliant, but more readable. + Some also use a local variable, ``MUT`` (short for "Module-Under-Test"). + +******************** +Running System Tests +******************** + +- To run system tests, you can execute:: + + # Run all system tests + $ nox -s system + + # Run a single system test + $ nox -s system-3.12 -- -k + + + .. note:: + + System tests are only configured to run under Python 3.8, 3.9, 3.10, 3.11 and 3.12. + For expediency, we do not run them in older versions of Python 3. + + This alone will not run the tests. You'll need to change some local + auth settings and change some configuration in your project to + run all the tests. + +- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. + +************* +Test Coverage +************* + +- The codebase *must* have 100% test statement coverage after each commit. + You can test coverage via ``nox -s cover``. + +****************************************************** +Documentation Coverage and Building HTML Documentation +****************************************************** + +If you fix a bug, and the bug requires an API or behavior modification, all +documentation in this package which references that API or behavior must be +changed to reflect the bug fix, ideally in the same commit that fixes the bug +or adds the feature. + +Build the docs via: + + $ nox -s docs + +************************* +Samples and code snippets +************************* + +Code samples and snippets live in the `samples/` catalogue. Feel free to +provide more examples, but make sure to write tests for those examples. +Each folder containing example code requires its own `noxfile.py` script +which automates testing. If you decide to create a new folder, you can +base it on the `samples/snippets` folder (providing `noxfile.py` and +the requirements files). + +The tests will run against a real Google Cloud Project, so you should +configure them just like the System Tests. + +- To run sample tests, you can execute:: + + # Run all tests in a folder + $ cd samples/snippets + $ nox -s py-3.8 + + # Run a single sample test + $ cd samples/snippets + $ nox -s py-3.8 -- -k + +******************************************** +Note About ``README`` as it pertains to PyPI +******************************************** + +The `description on PyPI`_ for the project comes directly from the +``README``. Due to the reStructuredText (``rst``) parser used by +PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` +instead of +``https://github.com/googleapis/google-cloud-python/blob/main/CONTRIBUTING.rst``) +may cause problems creating links or rendering the description. + +.. _description on PyPI: https://pypi.org/project/google-maps-areainsights + + +************************* +Supported Python Versions +************************* + +We support: + +- `Python 3.7`_ +- `Python 3.8`_ +- `Python 3.9`_ +- `Python 3.10`_ +- `Python 3.11`_ +- `Python 3.12`_ + +.. _Python 3.7: https://docs.python.org/3.7/ +.. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ +.. _Python 3.10: https://docs.python.org/3.10/ +.. _Python 3.11: https://docs.python.org/3.11/ +.. _Python 3.12: https://docs.python.org/3.12/ + + +Supported versions can be found in our ``noxfile.py`` `config`_. + +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/packages/google-maps-areainsights/noxfile.py + + +********** +Versioning +********** + +This library follows `Semantic Versioning`_. + +.. _Semantic Versioning: http://semver.org/ + +Some packages are currently in major version zero (``0.y.z``), which means that +anything may change at any time and the public API should not be considered +stable. + +****************************** +Contributor License Agreements +****************************** + +Before we can accept your pull requests you'll need to sign a Contributor +License Agreement (CLA): + +- **If you are an individual writing original source code** and **you own the + intellectual property**, then you'll need to sign an + `individual CLA `__. +- **If you work for a company that wants to allow you to contribute your work**, + then you'll need to sign a + `corporate CLA `__. + +You can sign these electronically (just scroll to the bottom). After that, +we'll be able to accept your pull requests. diff --git a/packages/google-maps-areainsights/LICENSE b/packages/google-maps-areainsights/LICENSE new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/packages/google-maps-areainsights/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/google-maps-areainsights/MANIFEST.in b/packages/google-maps-areainsights/MANIFEST.in new file mode 100644 index 000000000000..d6814cd60037 --- /dev/null +++ b/packages/google-maps-areainsights/MANIFEST.in @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +include README.rst LICENSE +recursive-include google *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ + +# Exclude scripts for samples readmegen +prune scripts/readme-gen diff --git a/packages/google-maps-areainsights/README.rst b/packages/google-maps-areainsights/README.rst new file mode 100644 index 000000000000..4f7db1c05b22 --- /dev/null +++ b/packages/google-maps-areainsights/README.rst @@ -0,0 +1,108 @@ +Python Client for Places Insights API +===================================== + +|preview| |pypi| |versions| + +`Places Insights API`_: Places Insights API. + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. |preview| image:: https://img.shields.io/badge/support-preview-orange.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels +.. |pypi| image:: https://img.shields.io/pypi/v/google-maps-areainsights.svg + :target: https://pypi.org/project/google-maps-areainsights/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-maps-areainsights.svg + :target: https://pypi.org/project/google-maps-areainsights/ +.. _Places Insights API: https://developers.google.com/maps/documentation/places-insights +.. _Client Library Documentation: https://googleapis.dev/python/google-maps-areainsights/latest +.. _Product Documentation: https://developers.google.com/maps/documentation/places-insights + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Places Insights API.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Places Insights API.: https://developers.google.com/maps/documentation/places-insights +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a virtual environment using `venv`_. `venv`_ is a tool that +creates isolated Python environments. These isolated environments can have separate +versions of Python packages, which allows you to isolate one project's dependencies +from the dependencies of other projects. + +With `venv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`venv`: https://docs.python.org/3/library/venv.html + + +Code samples and snippets +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Code samples and snippets live in the `samples/`_ folder. + +.. _samples/: https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-areainsights/samples + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of +Python. + +Python >= 3.7 + +.. _active: https://devguide.python.org/devcycle/#in-development-main-branch +.. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches + +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python <= 3.6 + +If you are using an `end-of-life`_ +version of Python, we recommend that you update as soon as possible to an actively supported version. + +.. _end-of-life: https://devguide.python.org/devcycle/#end-of-life-branches + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + pip install google-maps-areainsights + + +Windows +^^^^^^^ + +.. code-block:: console + + py -m venv + .\\Scripts\activate + pip install google-maps-areainsights + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for Places Insights API + to see other available methods on the client. +- Read the `Places Insights API Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `README`_ to see the full list of Cloud + APIs that we cover. + +.. _Places Insights API Product documentation: https://developers.google.com/maps/documentation/places-insights +.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst diff --git a/packages/google-maps-areainsights/docs/CHANGELOG.md b/packages/google-maps-areainsights/docs/CHANGELOG.md new file mode 120000 index 000000000000..04c99a55caae --- /dev/null +++ b/packages/google-maps-areainsights/docs/CHANGELOG.md @@ -0,0 +1 @@ +../CHANGELOG.md \ No newline at end of file diff --git a/packages/google-maps-areainsights/docs/README.rst b/packages/google-maps-areainsights/docs/README.rst new file mode 120000 index 000000000000..89a0106941ff --- /dev/null +++ b/packages/google-maps-areainsights/docs/README.rst @@ -0,0 +1 @@ +../README.rst \ No newline at end of file diff --git a/packages/google-maps-areainsights/docs/_static/custom.css b/packages/google-maps-areainsights/docs/_static/custom.css new file mode 100644 index 000000000000..b0a295464b23 --- /dev/null +++ b/packages/google-maps-areainsights/docs/_static/custom.css @@ -0,0 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/google-maps-areainsights/docs/_templates/layout.html b/packages/google-maps-areainsights/docs/_templates/layout.html new file mode 100644 index 000000000000..6316a537f72b --- /dev/null +++ b/packages/google-maps-areainsights/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/packages/google-maps-areainsights/docs/areainsights_v1/area_insights.rst b/packages/google-maps-areainsights/docs/areainsights_v1/area_insights.rst new file mode 100644 index 000000000000..78835e4cf335 --- /dev/null +++ b/packages/google-maps-areainsights/docs/areainsights_v1/area_insights.rst @@ -0,0 +1,6 @@ +AreaInsights +------------------------------ + +.. automodule:: google.maps.areainsights_v1.services.area_insights + :members: + :inherited-members: diff --git a/packages/google-maps-areainsights/docs/areainsights_v1/services_.rst b/packages/google-maps-areainsights/docs/areainsights_v1/services_.rst new file mode 100644 index 000000000000..54fbea792518 --- /dev/null +++ b/packages/google-maps-areainsights/docs/areainsights_v1/services_.rst @@ -0,0 +1,6 @@ +Services for Google Maps Areainsights v1 API +============================================ +.. toctree:: + :maxdepth: 2 + + area_insights diff --git a/packages/google-maps-areainsights/docs/areainsights_v1/types_.rst b/packages/google-maps-areainsights/docs/areainsights_v1/types_.rst new file mode 100644 index 000000000000..4774a4b375c1 --- /dev/null +++ b/packages/google-maps-areainsights/docs/areainsights_v1/types_.rst @@ -0,0 +1,6 @@ +Types for Google Maps Areainsights v1 API +========================================= + +.. automodule:: google.maps.areainsights_v1.types + :members: + :show-inheritance: diff --git a/packages/google-maps-areainsights/docs/conf.py b/packages/google-maps-areainsights/docs/conf.py new file mode 100644 index 000000000000..89eaf10bb31d --- /dev/null +++ b/packages/google-maps-areainsights/docs/conf.py @@ -0,0 +1,384 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# google-maps-areainsights documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import os +import shlex +import sys + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.5.5" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "recommonmark", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_options = {"members": True} +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = "google-maps-areainsights" +copyright = "2019, Google" +author = "Google APIs" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for google-maps-areainsights", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-maps-areainsights-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + #'preamble': '', + # Latex figure (float) alignment + #'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-maps-areainsights.tex", + "google-maps-areainsights Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-maps-areainsights", + "google-maps-areainsights Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-maps-areainsights", + "google-maps-areainsights Documentation", + author, + "google-maps-areainsights", + "google-maps-areainsights Library", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/packages/google-maps-areainsights/docs/index.rst b/packages/google-maps-areainsights/docs/index.rst new file mode 100644 index 000000000000..edf03f49e17f --- /dev/null +++ b/packages/google-maps-areainsights/docs/index.rst @@ -0,0 +1,23 @@ +.. include:: README.rst + +.. include:: multiprocessing.rst + + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + areainsights_v1/services_ + areainsights_v1/types_ + + +Changelog +--------- + +For a list of all ``google-maps-areainsights`` releases: + +.. toctree:: + :maxdepth: 2 + + CHANGELOG diff --git a/packages/google-maps-areainsights/docs/multiprocessing.rst b/packages/google-maps-areainsights/docs/multiprocessing.rst new file mode 100644 index 000000000000..536d17b2ea65 --- /dev/null +++ b/packages/google-maps-areainsights/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/packages/google-maps-areainsights/google/maps/areainsights/__init__.py b/packages/google-maps-areainsights/google/maps/areainsights/__init__.py new file mode 100644 index 000000000000..aeff01ed5cae --- /dev/null +++ b/packages/google-maps-areainsights/google/maps/areainsights/__init__.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.maps.areainsights import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.maps.areainsights_v1.services.area_insights.async_client import ( + AreaInsightsAsyncClient, +) +from google.maps.areainsights_v1.services.area_insights.client import AreaInsightsClient +from google.maps.areainsights_v1.types.area_insights_service import ( + ComputeInsightsRequest, + ComputeInsightsResponse, + Filter, + Insight, + LocationFilter, + OperatingStatus, + PlaceInsight, + PriceLevel, + RatingFilter, + TypeFilter, +) + +__all__ = ( + "AreaInsightsClient", + "AreaInsightsAsyncClient", + "ComputeInsightsRequest", + "ComputeInsightsResponse", + "Filter", + "LocationFilter", + "PlaceInsight", + "RatingFilter", + "TypeFilter", + "Insight", + "OperatingStatus", + "PriceLevel", +) diff --git a/packages/google-maps-areainsights/google/maps/areainsights/gapic_version.py b/packages/google-maps-areainsights/google/maps/areainsights/gapic_version.py new file mode 100644 index 000000000000..caeec5b9e887 --- /dev/null +++ b/packages/google-maps-areainsights/google/maps/areainsights/gapic_version.py @@ -0,0 +1,17 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-maps-areainsights/google/maps/areainsights/py.typed b/packages/google-maps-areainsights/google/maps/areainsights/py.typed new file mode 100644 index 000000000000..b098dc9b9f40 --- /dev/null +++ b/packages/google-maps-areainsights/google/maps/areainsights/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-maps-areainsights package uses inline types. diff --git a/packages/google-maps-areainsights/google/maps/areainsights_v1/__init__.py b/packages/google-maps-areainsights/google/maps/areainsights_v1/__init__.py new file mode 100644 index 000000000000..4d2ba8c829c8 --- /dev/null +++ b/packages/google-maps-areainsights/google/maps/areainsights_v1/__init__.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.maps.areainsights_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.area_insights import AreaInsightsAsyncClient, AreaInsightsClient +from .types.area_insights_service import ( + ComputeInsightsRequest, + ComputeInsightsResponse, + Filter, + Insight, + LocationFilter, + OperatingStatus, + PlaceInsight, + PriceLevel, + RatingFilter, + TypeFilter, +) + +__all__ = ( + "AreaInsightsAsyncClient", + "AreaInsightsClient", + "ComputeInsightsRequest", + "ComputeInsightsResponse", + "Filter", + "Insight", + "LocationFilter", + "OperatingStatus", + "PlaceInsight", + "PriceLevel", + "RatingFilter", + "TypeFilter", +) diff --git a/packages/google-maps-areainsights/google/maps/areainsights_v1/gapic_metadata.json b/packages/google-maps-areainsights/google/maps/areainsights_v1/gapic_metadata.json new file mode 100644 index 000000000000..413146689761 --- /dev/null +++ b/packages/google-maps-areainsights/google/maps/areainsights_v1/gapic_metadata.json @@ -0,0 +1,43 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.maps.areainsights_v1", + "protoPackage": "google.maps.areainsights.v1", + "schema": "1.0", + "services": { + "AreaInsights": { + "clients": { + "grpc": { + "libraryClient": "AreaInsightsClient", + "rpcs": { + "ComputeInsights": { + "methods": [ + "compute_insights" + ] + } + } + }, + "grpc-async": { + "libraryClient": "AreaInsightsAsyncClient", + "rpcs": { + "ComputeInsights": { + "methods": [ + "compute_insights" + ] + } + } + }, + "rest": { + "libraryClient": "AreaInsightsClient", + "rpcs": { + "ComputeInsights": { + "methods": [ + "compute_insights" + ] + } + } + } + } + } + } +} diff --git a/packages/google-maps-areainsights/google/maps/areainsights_v1/gapic_version.py b/packages/google-maps-areainsights/google/maps/areainsights_v1/gapic_version.py new file mode 100644 index 000000000000..caeec5b9e887 --- /dev/null +++ b/packages/google-maps-areainsights/google/maps/areainsights_v1/gapic_version.py @@ -0,0 +1,17 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-maps-areainsights/google/maps/areainsights_v1/py.typed b/packages/google-maps-areainsights/google/maps/areainsights_v1/py.typed new file mode 100644 index 000000000000..b098dc9b9f40 --- /dev/null +++ b/packages/google-maps-areainsights/google/maps/areainsights_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-maps-areainsights package uses inline types. diff --git a/packages/google-maps-areainsights/google/maps/areainsights_v1/services/__init__.py b/packages/google-maps-areainsights/google/maps/areainsights_v1/services/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-maps-areainsights/google/maps/areainsights_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-maps-areainsights/google/maps/areainsights_v1/services/area_insights/__init__.py b/packages/google-maps-areainsights/google/maps/areainsights_v1/services/area_insights/__init__.py new file mode 100644 index 000000000000..8630700a35b9 --- /dev/null +++ b/packages/google-maps-areainsights/google/maps/areainsights_v1/services/area_insights/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import AreaInsightsAsyncClient +from .client import AreaInsightsClient + +__all__ = ( + "AreaInsightsClient", + "AreaInsightsAsyncClient", +) diff --git a/packages/google-maps-areainsights/google/maps/areainsights_v1/services/area_insights/async_client.py b/packages/google-maps-areainsights/google/maps/areainsights_v1/services/area_insights/async_client.py new file mode 100644 index 000000000000..9a362d2be7e9 --- /dev/null +++ b/packages/google-maps-areainsights/google/maps/areainsights_v1/services/area_insights/async_client.py @@ -0,0 +1,353 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.maps.areainsights_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.maps.areainsights_v1.types import area_insights_service + +from .client import AreaInsightsClient +from .transports.base import DEFAULT_CLIENT_INFO, AreaInsightsTransport +from .transports.grpc_asyncio import AreaInsightsGrpcAsyncIOTransport + + +class AreaInsightsAsyncClient: + """Service definition for the Places Insights API.""" + + _client: AreaInsightsClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = AreaInsightsClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = AreaInsightsClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = AreaInsightsClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = AreaInsightsClient._DEFAULT_UNIVERSE + + place_path = staticmethod(AreaInsightsClient.place_path) + parse_place_path = staticmethod(AreaInsightsClient.parse_place_path) + common_billing_account_path = staticmethod( + AreaInsightsClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + AreaInsightsClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(AreaInsightsClient.common_folder_path) + parse_common_folder_path = staticmethod(AreaInsightsClient.parse_common_folder_path) + common_organization_path = staticmethod(AreaInsightsClient.common_organization_path) + parse_common_organization_path = staticmethod( + AreaInsightsClient.parse_common_organization_path + ) + common_project_path = staticmethod(AreaInsightsClient.common_project_path) + parse_common_project_path = staticmethod( + AreaInsightsClient.parse_common_project_path + ) + common_location_path = staticmethod(AreaInsightsClient.common_location_path) + parse_common_location_path = staticmethod( + AreaInsightsClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AreaInsightsAsyncClient: The constructed client. + """ + return AreaInsightsClient.from_service_account_info.__func__(AreaInsightsAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AreaInsightsAsyncClient: The constructed client. + """ + return AreaInsightsClient.from_service_account_file.__func__(AreaInsightsAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return AreaInsightsClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> AreaInsightsTransport: + """Returns the transport used by the client instance. + + Returns: + AreaInsightsTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = AreaInsightsClient.get_transport_class + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, AreaInsightsTransport, Callable[..., AreaInsightsTransport]] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the area insights async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,AreaInsightsTransport,Callable[..., AreaInsightsTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the AreaInsightsTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = AreaInsightsClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def compute_insights( + self, + request: Optional[ + Union[area_insights_service.ComputeInsightsRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> area_insights_service.ComputeInsightsResponse: + r"""Compute Insights RPC + + This method lets you retrieve insights about areas using a + variaty of filter such as: area, place type, operating status, + price level and ratings. Currently "count" and "places" insights + are supported. With "count" insights you can answer questions + such as "How many restaurant are located in California that are + operational, are inexpensive and have an average rating of at + least 4 stars" (see ``insight`` enum for more details). With + "places" insights, you can determine which places match the + requested filter. Clients can then use those place resource + names to fetch more details about each individual place using + the Places API. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.maps import areainsights_v1 + + async def sample_compute_insights(): + # Create a client + client = areainsights_v1.AreaInsightsAsyncClient() + + # Initialize request argument(s) + request = areainsights_v1.ComputeInsightsRequest( + insights=['INSIGHT_PLACES'], + ) + + # Make the request + response = await client.compute_insights(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.maps.areainsights_v1.types.ComputeInsightsRequest, dict]]): + The request object. Request for the ComputeInsights RPC. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.maps.areainsights_v1.types.ComputeInsightsResponse: + Response for the ComputeInsights RPC. + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, area_insights_service.ComputeInsightsRequest): + request = area_insights_service.ComputeInsightsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.compute_insights + ] + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "AreaInsightsAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("AreaInsightsAsyncClient",) diff --git a/packages/google-maps-areainsights/google/maps/areainsights_v1/services/area_insights/client.py b/packages/google-maps-areainsights/google/maps/areainsights_v1/services/area_insights/client.py new file mode 100644 index 000000000000..8fdceb253313 --- /dev/null +++ b/packages/google-maps-areainsights/google/maps/areainsights_v1/services/area_insights/client.py @@ -0,0 +1,773 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.maps.areainsights_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.maps.areainsights_v1.types import area_insights_service + +from .transports.base import DEFAULT_CLIENT_INFO, AreaInsightsTransport +from .transports.grpc import AreaInsightsGrpcTransport +from .transports.grpc_asyncio import AreaInsightsGrpcAsyncIOTransport +from .transports.rest import AreaInsightsRestTransport + + +class AreaInsightsClientMeta(type): + """Metaclass for the AreaInsights client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[AreaInsightsTransport]] + _transport_registry["grpc"] = AreaInsightsGrpcTransport + _transport_registry["grpc_asyncio"] = AreaInsightsGrpcAsyncIOTransport + _transport_registry["rest"] = AreaInsightsRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[AreaInsightsTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class AreaInsightsClient(metaclass=AreaInsightsClientMeta): + """Service definition for the Places Insights API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "areainsights.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "areainsights.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AreaInsightsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AreaInsightsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> AreaInsightsTransport: + """Returns the transport used by the client instance. + + Returns: + AreaInsightsTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def place_path( + place_id: str, + ) -> str: + """Returns a fully-qualified place string.""" + return "places/{place_id}".format( + place_id=place_id, + ) + + @staticmethod + def parse_place_path(path: str) -> Dict[str, str]: + """Parses a place path into its component segments.""" + m = re.match(r"^places/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = AreaInsightsClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = AreaInsightsClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = AreaInsightsClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = AreaInsightsClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = AreaInsightsClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or AreaInsightsClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, AreaInsightsTransport, Callable[..., AreaInsightsTransport]] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the area insights client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,AreaInsightsTransport,Callable[..., AreaInsightsTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the AreaInsightsTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = AreaInsightsClient._read_environment_variables() + self._client_cert_source = AreaInsightsClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = AreaInsightsClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, AreaInsightsTransport) + if transport_provided: + # transport is a AreaInsightsTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(AreaInsightsTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = self._api_endpoint or AreaInsightsClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[AreaInsightsTransport], Callable[..., AreaInsightsTransport] + ] = ( + AreaInsightsClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., AreaInsightsTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def compute_insights( + self, + request: Optional[ + Union[area_insights_service.ComputeInsightsRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> area_insights_service.ComputeInsightsResponse: + r"""Compute Insights RPC + + This method lets you retrieve insights about areas using a + variaty of filter such as: area, place type, operating status, + price level and ratings. Currently "count" and "places" insights + are supported. With "count" insights you can answer questions + such as "How many restaurant are located in California that are + operational, are inexpensive and have an average rating of at + least 4 stars" (see ``insight`` enum for more details). With + "places" insights, you can determine which places match the + requested filter. Clients can then use those place resource + names to fetch more details about each individual place using + the Places API. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.maps import areainsights_v1 + + def sample_compute_insights(): + # Create a client + client = areainsights_v1.AreaInsightsClient() + + # Initialize request argument(s) + request = areainsights_v1.ComputeInsightsRequest( + insights=['INSIGHT_PLACES'], + ) + + # Make the request + response = client.compute_insights(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.maps.areainsights_v1.types.ComputeInsightsRequest, dict]): + The request object. Request for the ComputeInsights RPC. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.maps.areainsights_v1.types.ComputeInsightsResponse: + Response for the ComputeInsights RPC. + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, area_insights_service.ComputeInsightsRequest): + request = area_insights_service.ComputeInsightsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.compute_insights] + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "AreaInsightsClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("AreaInsightsClient",) diff --git a/packages/google-maps-areainsights/google/maps/areainsights_v1/services/area_insights/transports/__init__.py b/packages/google-maps-areainsights/google/maps/areainsights_v1/services/area_insights/transports/__init__.py new file mode 100644 index 000000000000..5a454cdd5e69 --- /dev/null +++ b/packages/google-maps-areainsights/google/maps/areainsights_v1/services/area_insights/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import AreaInsightsTransport +from .grpc import AreaInsightsGrpcTransport +from .grpc_asyncio import AreaInsightsGrpcAsyncIOTransport +from .rest import AreaInsightsRestInterceptor, AreaInsightsRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[AreaInsightsTransport]] +_transport_registry["grpc"] = AreaInsightsGrpcTransport +_transport_registry["grpc_asyncio"] = AreaInsightsGrpcAsyncIOTransport +_transport_registry["rest"] = AreaInsightsRestTransport + +__all__ = ( + "AreaInsightsTransport", + "AreaInsightsGrpcTransport", + "AreaInsightsGrpcAsyncIOTransport", + "AreaInsightsRestTransport", + "AreaInsightsRestInterceptor", +) diff --git a/packages/google-maps-areainsights/google/maps/areainsights_v1/services/area_insights/transports/base.py b/packages/google-maps-areainsights/google/maps/areainsights_v1/services/area_insights/transports/base.py new file mode 100644 index 000000000000..06ab292d9102 --- /dev/null +++ b/packages/google-maps-areainsights/google/maps/areainsights_v1/services/area_insights/transports/base.py @@ -0,0 +1,173 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.maps.areainsights_v1 import gapic_version as package_version +from google.maps.areainsights_v1.types import area_insights_service + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class AreaInsightsTransport(abc.ABC): + """Abstract transport class for AreaInsights.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "areainsights.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'areainsights.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.compute_insights: gapic_v1.method.wrap_method( + self.compute_insights, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=120.0, + ), + default_timeout=120.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def compute_insights( + self, + ) -> Callable[ + [area_insights_service.ComputeInsightsRequest], + Union[ + area_insights_service.ComputeInsightsResponse, + Awaitable[area_insights_service.ComputeInsightsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("AreaInsightsTransport",) diff --git a/packages/google-maps-areainsights/google/maps/areainsights_v1/services/area_insights/transports/grpc.py b/packages/google-maps-areainsights/google/maps/areainsights_v1/services/area_insights/transports/grpc.py new file mode 100644 index 000000000000..9f02613a9af3 --- /dev/null +++ b/packages/google-maps-areainsights/google/maps/areainsights_v1/services/area_insights/transports/grpc.py @@ -0,0 +1,286 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore + +from google.maps.areainsights_v1.types import area_insights_service + +from .base import DEFAULT_CLIENT_INFO, AreaInsightsTransport + + +class AreaInsightsGrpcTransport(AreaInsightsTransport): + """gRPC backend transport for AreaInsights. + + Service definition for the Places Insights API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "areainsights.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'areainsights.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "areainsights.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def compute_insights( + self, + ) -> Callable[ + [area_insights_service.ComputeInsightsRequest], + area_insights_service.ComputeInsightsResponse, + ]: + r"""Return a callable for the compute insights method over gRPC. + + Compute Insights RPC + + This method lets you retrieve insights about areas using a + variaty of filter such as: area, place type, operating status, + price level and ratings. Currently "count" and "places" insights + are supported. With "count" insights you can answer questions + such as "How many restaurant are located in California that are + operational, are inexpensive and have an average rating of at + least 4 stars" (see ``insight`` enum for more details). With + "places" insights, you can determine which places match the + requested filter. Clients can then use those place resource + names to fetch more details about each individual place using + the Places API. + + Returns: + Callable[[~.ComputeInsightsRequest], + ~.ComputeInsightsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "compute_insights" not in self._stubs: + self._stubs["compute_insights"] = self.grpc_channel.unary_unary( + "/google.maps.areainsights.v1.AreaInsights/ComputeInsights", + request_serializer=area_insights_service.ComputeInsightsRequest.serialize, + response_deserializer=area_insights_service.ComputeInsightsResponse.deserialize, + ) + return self._stubs["compute_insights"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("AreaInsightsGrpcTransport",) diff --git a/packages/google-maps-areainsights/google/maps/areainsights_v1/services/area_insights/transports/grpc_asyncio.py b/packages/google-maps-areainsights/google/maps/areainsights_v1/services/area_insights/transports/grpc_asyncio.py new file mode 100644 index 000000000000..2f5fa839a9ed --- /dev/null +++ b/packages/google-maps-areainsights/google/maps/areainsights_v1/services/area_insights/transports/grpc_asyncio.py @@ -0,0 +1,305 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.maps.areainsights_v1.types import area_insights_service + +from .base import DEFAULT_CLIENT_INFO, AreaInsightsTransport +from .grpc import AreaInsightsGrpcTransport + + +class AreaInsightsGrpcAsyncIOTransport(AreaInsightsTransport): + """gRPC AsyncIO backend transport for AreaInsights. + + Service definition for the Places Insights API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "areainsights.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "areainsights.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'areainsights.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def compute_insights( + self, + ) -> Callable[ + [area_insights_service.ComputeInsightsRequest], + Awaitable[area_insights_service.ComputeInsightsResponse], + ]: + r"""Return a callable for the compute insights method over gRPC. + + Compute Insights RPC + + This method lets you retrieve insights about areas using a + variaty of filter such as: area, place type, operating status, + price level and ratings. Currently "count" and "places" insights + are supported. With "count" insights you can answer questions + such as "How many restaurant are located in California that are + operational, are inexpensive and have an average rating of at + least 4 stars" (see ``insight`` enum for more details). With + "places" insights, you can determine which places match the + requested filter. Clients can then use those place resource + names to fetch more details about each individual place using + the Places API. + + Returns: + Callable[[~.ComputeInsightsRequest], + Awaitable[~.ComputeInsightsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "compute_insights" not in self._stubs: + self._stubs["compute_insights"] = self.grpc_channel.unary_unary( + "/google.maps.areainsights.v1.AreaInsights/ComputeInsights", + request_serializer=area_insights_service.ComputeInsightsRequest.serialize, + response_deserializer=area_insights_service.ComputeInsightsResponse.deserialize, + ) + return self._stubs["compute_insights"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.compute_insights: gapic_v1.method_async.wrap_method( + self.compute_insights, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=120.0, + ), + default_timeout=120.0, + client_info=client_info, + ), + } + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("AreaInsightsGrpcAsyncIOTransport",) diff --git a/packages/google-maps-areainsights/google/maps/areainsights_v1/services/area_insights/transports/rest.py b/packages/google-maps-areainsights/google/maps/areainsights_v1/services/area_insights/transports/rest.py new file mode 100644 index 000000000000..c1fa1d2c5628 --- /dev/null +++ b/packages/google-maps-areainsights/google/maps/areainsights_v1/services/area_insights/transports/rest.py @@ -0,0 +1,313 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.maps.areainsights_v1.types import area_insights_service + +from .base import AreaInsightsTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class AreaInsightsRestInterceptor: + """Interceptor for AreaInsights. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the AreaInsightsRestTransport. + + .. code-block:: python + class MyCustomAreaInsightsInterceptor(AreaInsightsRestInterceptor): + def pre_compute_insights(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_compute_insights(self, response): + logging.log(f"Received response: {response}") + return response + + transport = AreaInsightsRestTransport(interceptor=MyCustomAreaInsightsInterceptor()) + client = AreaInsightsClient(transport=transport) + + + """ + + def pre_compute_insights( + self, + request: area_insights_service.ComputeInsightsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[area_insights_service.ComputeInsightsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for compute_insights + + Override in a subclass to manipulate the request or metadata + before they are sent to the AreaInsights server. + """ + return request, metadata + + def post_compute_insights( + self, response: area_insights_service.ComputeInsightsResponse + ) -> area_insights_service.ComputeInsightsResponse: + """Post-rpc interceptor for compute_insights + + Override in a subclass to manipulate the response + after it is returned by the AreaInsights server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class AreaInsightsRestStub: + _session: AuthorizedSession + _host: str + _interceptor: AreaInsightsRestInterceptor + + +class AreaInsightsRestTransport(AreaInsightsTransport): + """REST backend transport for AreaInsights. + + Service definition for the Places Insights API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "areainsights.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[AreaInsightsRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'areainsights.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or AreaInsightsRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _ComputeInsights(AreaInsightsRestStub): + def __hash__(self): + return hash("ComputeInsights") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: area_insights_service.ComputeInsightsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> area_insights_service.ComputeInsightsResponse: + r"""Call the compute insights method over HTTP. + + Args: + request (~.area_insights_service.ComputeInsightsRequest): + The request object. Request for the ComputeInsights RPC. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.area_insights_service.ComputeInsightsResponse: + Response for the ComputeInsights RPC. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1:computeInsights", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_compute_insights( + request, metadata + ) + pb_request = area_insights_service.ComputeInsightsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = area_insights_service.ComputeInsightsResponse() + pb_resp = area_insights_service.ComputeInsightsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_compute_insights(resp) + return resp + + @property + def compute_insights( + self, + ) -> Callable[ + [area_insights_service.ComputeInsightsRequest], + area_insights_service.ComputeInsightsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ComputeInsights(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("AreaInsightsRestTransport",) diff --git a/packages/google-maps-areainsights/google/maps/areainsights_v1/types/__init__.py b/packages/google-maps-areainsights/google/maps/areainsights_v1/types/__init__.py new file mode 100644 index 000000000000..52bf7638e04e --- /dev/null +++ b/packages/google-maps-areainsights/google/maps/areainsights_v1/types/__init__.py @@ -0,0 +1,40 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .area_insights_service import ( + ComputeInsightsRequest, + ComputeInsightsResponse, + Filter, + Insight, + LocationFilter, + OperatingStatus, + PlaceInsight, + PriceLevel, + RatingFilter, + TypeFilter, +) + +__all__ = ( + "ComputeInsightsRequest", + "ComputeInsightsResponse", + "Filter", + "LocationFilter", + "PlaceInsight", + "RatingFilter", + "TypeFilter", + "Insight", + "OperatingStatus", + "PriceLevel", +) diff --git a/packages/google-maps-areainsights/google/maps/areainsights_v1/types/area_insights_service.py b/packages/google-maps-areainsights/google/maps/areainsights_v1/types/area_insights_service.py new file mode 100644 index 000000000000..fa6d48a92d48 --- /dev/null +++ b/packages/google-maps-areainsights/google/maps/areainsights_v1/types/area_insights_service.py @@ -0,0 +1,510 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.type import latlng_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.maps.areainsights.v1", + manifest={ + "Insight", + "OperatingStatus", + "PriceLevel", + "ComputeInsightsRequest", + "ComputeInsightsResponse", + "PlaceInsight", + "Filter", + "LocationFilter", + "TypeFilter", + "RatingFilter", + }, +) + + +class Insight(proto.Enum): + r"""Supported insights. + + Values: + INSIGHT_UNSPECIFIED (0): + Not Specified. + INSIGHT_COUNT (1): + Count insight. + + When this insight is specified ComputeInsights returns the + number of places that match the specified filter criteria. + + :: + + For example if the request is: + ComputeInsightsRequest { + insights: INSIGHT_COUNT + filter { + location_filter {region: } + type_filter {included_types: "restaurant"} + operating_status: OPERATING_STATUS_OPERATIONAL + price_levels: PRICE_LEVEL_FREE + price_levels: PRICE_LEVEL_INEXPENSIVE + min_rating: 4.0 + } + } + + The method will return the count of restaurants in California that are + operational, with price level free or inexpensive and have an average + rating of at least 4 starts. + + Example response: + ComputeInsightsResponse { + count: + } + INSIGHT_PLACES (2): + Return Places + + When this insight is specified ComputeInsights returns + Places that match the specified filter criteria. + + :: + + For example if the request is: + ComputeInsightsRequest { + insights: INSIGHT_PLACES + filter { + location_filter {region: } + type_filter {included_types: "restaurant"} + operating_status: OPERATING_STATUS_OPERATIONAL + price_levels: PRICE_LEVEL_FREE + price_levels: PRICE_LEVEL_INEXPENSIVE + min_rating: 4.0 + } + } + + The method will return list of places of restaurants in + California that are operational, with price level free or inexpensive and + have an average rating of at least 4 stars. + + Example response: + ComputeInsightsResponse { + place_insights { place: "places/ABC" } + place_insights { place: "places/PQR" } + place_insights { place: "places/XYZ" } + } + """ + INSIGHT_UNSPECIFIED = 0 + INSIGHT_COUNT = 1 + INSIGHT_PLACES = 2 + + +class OperatingStatus(proto.Enum): + r"""Operating status of the place. + + Values: + OPERATING_STATUS_UNSPECIFIED (0): + Not Specified. + OPERATING_STATUS_OPERATIONAL (1): + The place is operational and its open during + its defined hours. + OPERATING_STATUS_PERMANENTLY_CLOSED (3): + The Place is no longer in business. + OPERATING_STATUS_TEMPORARILY_CLOSED (4): + The Place is temporarily closed and expected + to reopen in the future. + """ + OPERATING_STATUS_UNSPECIFIED = 0 + OPERATING_STATUS_OPERATIONAL = 1 + OPERATING_STATUS_PERMANENTLY_CLOSED = 3 + OPERATING_STATUS_TEMPORARILY_CLOSED = 4 + + +class PriceLevel(proto.Enum): + r"""Price level of the place. + + Values: + PRICE_LEVEL_UNSPECIFIED (0): + Place price level is unspecified or unknown. + PRICE_LEVEL_FREE (1): + Place provides free services. + PRICE_LEVEL_INEXPENSIVE (2): + Place provides inexpensive services. + PRICE_LEVEL_MODERATE (3): + Place provides moderately priced services. + PRICE_LEVEL_EXPENSIVE (4): + Place provides expensive services. + PRICE_LEVEL_VERY_EXPENSIVE (5): + Place provides very expensive services. + """ + PRICE_LEVEL_UNSPECIFIED = 0 + PRICE_LEVEL_FREE = 1 + PRICE_LEVEL_INEXPENSIVE = 2 + PRICE_LEVEL_MODERATE = 3 + PRICE_LEVEL_EXPENSIVE = 4 + PRICE_LEVEL_VERY_EXPENSIVE = 5 + + +class ComputeInsightsRequest(proto.Message): + r"""Request for the ComputeInsights RPC. + + Attributes: + insights (MutableSequence[google.maps.areainsights_v1.types.Insight]): + Required. Insights to compute. Currently only INSIGHT_COUNT + and INSIGHT_PLACES are supported. + filter (google.maps.areainsights_v1.types.Filter): + Required. Insight filter. + """ + + insights: MutableSequence["Insight"] = proto.RepeatedField( + proto.ENUM, + number=4, + enum="Insight", + ) + filter: "Filter" = proto.Field( + proto.MESSAGE, + number=5, + message="Filter", + ) + + +class ComputeInsightsResponse(proto.Message): + r"""Response for the ComputeInsights RPC. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + count (int): + Result for Insights.INSIGHT_COUNT. + + This field is a member of `oneof`_ ``_count``. + place_insights (MutableSequence[google.maps.areainsights_v1.types.PlaceInsight]): + Result for Insights.INSIGHT_PLACES. + """ + + count: int = proto.Field( + proto.INT64, + number=1, + optional=True, + ) + place_insights: MutableSequence["PlaceInsight"] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message="PlaceInsight", + ) + + +class PlaceInsight(proto.Message): + r"""Holds information about a place + + Attributes: + place (str): + The resource name of a place. This resource name can be used + to retrieve details about the place using the `Places + API `__. + """ + + place: str = proto.Field( + proto.STRING, + number=1, + ) + + +class Filter(proto.Message): + r"""Filters for the ComputeInsights RPC. + + Attributes: + location_filter (google.maps.areainsights_v1.types.LocationFilter): + Required. Restricts results to places which + are located in the area specified by location + filters. + type_filter (google.maps.areainsights_v1.types.TypeFilter): + Required. Place type filters. + operating_status (MutableSequence[google.maps.areainsights_v1.types.OperatingStatus]): + Optional. Restricts results to places whose operating status + is included on this list. If operating_status is not set, + OPERATING_STATUS_OPERATIONAL is used as default. + price_levels (MutableSequence[google.maps.areainsights_v1.types.PriceLevel]): + Optional. Restricts results to places whose price level is + included on this list. If price_level is not set, all price + levels are included in the results. + rating_filter (google.maps.areainsights_v1.types.RatingFilter): + Optional. Restricts results to places whose average user + ratings are in the range specified by rating_filter. If + rating_filter is not set, all ratings are included in the + result. + """ + + location_filter: "LocationFilter" = proto.Field( + proto.MESSAGE, + number=1, + message="LocationFilter", + ) + type_filter: "TypeFilter" = proto.Field( + proto.MESSAGE, + number=2, + message="TypeFilter", + ) + operating_status: MutableSequence["OperatingStatus"] = proto.RepeatedField( + proto.ENUM, + number=3, + enum="OperatingStatus", + ) + price_levels: MutableSequence["PriceLevel"] = proto.RepeatedField( + proto.ENUM, + number=4, + enum="PriceLevel", + ) + rating_filter: "RatingFilter" = proto.Field( + proto.MESSAGE, + number=5, + message="RatingFilter", + ) + + +class LocationFilter(proto.Message): + r"""Location filters. + + Specifies the area of interest for the insight. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + circle (google.maps.areainsights_v1.types.LocationFilter.Circle): + Area as a circle. + + This field is a member of `oneof`_ ``area``. + region (google.maps.areainsights_v1.types.LocationFilter.Region): + Area as region. + + This field is a member of `oneof`_ ``area``. + custom_area (google.maps.areainsights_v1.types.LocationFilter.CustomArea): + Custom area specified by a polygon. + + This field is a member of `oneof`_ ``area``. + """ + + class Circle(proto.Message): + r"""A circle is defined by a center point and radius in meters. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + lat_lng (google.type.latlng_pb2.LatLng): + The latitude and longitude of the center of + the circle. + + This field is a member of `oneof`_ ``center``. + place (str): + The Place resource name of the center of the + circle. Only point places are supported. + + This field is a member of `oneof`_ ``center``. + radius (int): + Optional. The radius of the circle in meters + """ + + lat_lng: latlng_pb2.LatLng = proto.Field( + proto.MESSAGE, + number=1, + oneof="center", + message=latlng_pb2.LatLng, + ) + place: str = proto.Field( + proto.STRING, + number=2, + oneof="center", + ) + radius: int = proto.Field( + proto.INT32, + number=3, + ) + + class Region(proto.Message): + r"""A region is a geographic boundary such as: cities, postal + codes, counties, states, etc. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + place (str): + The Place resource name of a region. + + This field is a member of `oneof`_ ``region``. + """ + + place: str = proto.Field( + proto.STRING, + number=1, + oneof="region", + ) + + class CustomArea(proto.Message): + r"""Custom Area. + + Attributes: + polygon (google.maps.areainsights_v1.types.LocationFilter.CustomArea.Polygon): + Required. The custom area represented as a + polygon + """ + + class Polygon(proto.Message): + r"""A polygon is represented by a series of connected coordinates + in an counterclockwise ordered sequence. The coordinates form a + closed loop and define a filled region. The first and last + coordinates are equivalent, and they must contain identical + values. The format is a simplified version of GeoJSON polygons + (we only support one counterclockwise exterior ring). + + Attributes: + coordinates (MutableSequence[google.type.latlng_pb2.LatLng]): + Optional. The coordinates that define the + polygon. + """ + + coordinates: MutableSequence[latlng_pb2.LatLng] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=latlng_pb2.LatLng, + ) + + polygon: "LocationFilter.CustomArea.Polygon" = proto.Field( + proto.MESSAGE, + number=1, + message="LocationFilter.CustomArea.Polygon", + ) + + circle: Circle = proto.Field( + proto.MESSAGE, + number=1, + oneof="area", + message=Circle, + ) + region: Region = proto.Field( + proto.MESSAGE, + number=2, + oneof="area", + message=Region, + ) + custom_area: CustomArea = proto.Field( + proto.MESSAGE, + number=3, + oneof="area", + message=CustomArea, + ) + + +class TypeFilter(proto.Message): + r"""Place type filters. + + Only Place types from `Table + a `__ + are supported. + + A place can only have a single primary type associated with it. For + example, the primary type might be "mexican_restaurant" or + "steak_house". Use included_primary_types and excluded_primary_types + to filter the results on a place's primary type. + + A place can also have multiple type values associated with it. For + example a restaurant might have the following types: + "seafood_restaurant", "restaurant", "food", "point_of_interest", + "establishment". Use included_types and excluded_types to filter the + results on the list of types associated with a place. + + If a search is specified with multiple type restrictions, only + places that satisfy all of the restrictions are returned. For + example, if you specify {"included_types": ["restaurant"], + "excluded_primary_types": ["steak_house"]}, the returned places + provide "restaurant" related services but do not operate primarily + as a "steak_house". + + If there are any conflicting types, i.e. a type appears in both + included_types and excluded_types types or included_primary_types + and excluded_primary_types, an INVALID_ARGUMENT error is returned. + + One of included_types or included_primary_types must be set. + + Attributes: + included_types (MutableSequence[str]): + Optional. Included Place types. + excluded_types (MutableSequence[str]): + Optional. Excluded Place types. + included_primary_types (MutableSequence[str]): + Optional. Included primary Place types. + excluded_primary_types (MutableSequence[str]): + Optional. Excluded primary Place types. + """ + + included_types: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + excluded_types: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + included_primary_types: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + excluded_primary_types: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + + +class RatingFilter(proto.Message): + r"""Average user rating filters. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + min_rating (float): + Optional. Restricts results to places whose average user + rating is greater than or equal to min_rating. Values must + be between 1.0 and 5.0. + + This field is a member of `oneof`_ ``_min_rating``. + max_rating (float): + Optional. Restricts results to places whose average user + rating is strictly less than or equal to max_rating. Values + must be between 1.0 and 5.0. + + This field is a member of `oneof`_ ``_max_rating``. + """ + + min_rating: float = proto.Field( + proto.FLOAT, + number=5, + optional=True, + ) + max_rating: float = proto.Field( + proto.FLOAT, + number=6, + optional=True, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-maps-areainsights/mypy.ini b/packages/google-maps-areainsights/mypy.ini new file mode 100644 index 000000000000..574c5aed394b --- /dev/null +++ b/packages/google-maps-areainsights/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/packages/google-maps-areainsights/noxfile.py b/packages/google-maps-areainsights/noxfile.py new file mode 100644 index 000000000000..aeee7851401a --- /dev/null +++ b/packages/google-maps-areainsights/noxfile.py @@ -0,0 +1,452 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! + +from __future__ import absolute_import + +import os +import pathlib +import re +import shutil +from typing import Dict, List +import warnings + +import nox + +BLACK_VERSION = "black[jupyter]==23.7.0" +ISORT_VERSION = "isort==5.11.0" + +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + + +DEFAULT_PYTHON_VERSION = "3.10" + +UNIT_TEST_PYTHON_VERSIONS: List[str] = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_DEPENDENCIES: List[str] = [] +UNIT_TEST_EXTRAS: List[str] = [] +UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.8", "3.9", "3.10", "3.11", "3.12"] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_EXTRAS: List[str] = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def unit(session, protobuf_implementation): + # Install all test dependencies, then install this package in-place. + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + session.skip("cpp implementation is not supported in python 3.11+") + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + + # TODO(https://github.com/googleapis/synthtool/issues/1976): + # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. + # The 'cpp' implementation requires Protobuf<4. + if protobuf_implementation == "cpp": + session.install("protobuf<4") + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + + +def install_systemtest_dependencies(session, *constraints): + # Use pre-release gRPC for system tests. + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "sphinx==4.5.0", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "gcp-sphinx-docfx-yaml", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.12") +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def prerelease_deps(session, protobuf_implementation): + """Run all tests with prerelease versions of dependencies installed.""" + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + session.skip("cpp implementation is not supported in python 3.11+") + + # Install all dependencies + session.install("-e", ".[all, tests, tracing]") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "protobuf", + # dependency of grpc + "six", + "grpc-google-iam-v1", + "googleapis-common-protos", + "grpcio", + "grpcio-status", + "google-api-core", + "google-auth", + "proto-plus", + "google-cloud-testutils", + # dependencies of google-cloud-testutils" + "click", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + # Print out prerelease package versions + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") + + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) diff --git a/packages/google-maps-areainsights/samples/generated_samples/areainsights_v1_generated_area_insights_compute_insights_async.py b/packages/google-maps-areainsights/samples/generated_samples/areainsights_v1_generated_area_insights_compute_insights_async.py new file mode 100644 index 000000000000..880f4b4385cc --- /dev/null +++ b/packages/google-maps-areainsights/samples/generated_samples/areainsights_v1_generated_area_insights_compute_insights_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ComputeInsights +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-maps-areainsights + + +# [START areainsights_v1_generated_AreaInsights_ComputeInsights_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.maps import areainsights_v1 + + +async def sample_compute_insights(): + # Create a client + client = areainsights_v1.AreaInsightsAsyncClient() + + # Initialize request argument(s) + request = areainsights_v1.ComputeInsightsRequest( + insights=['INSIGHT_PLACES'], + ) + + # Make the request + response = await client.compute_insights(request=request) + + # Handle the response + print(response) + +# [END areainsights_v1_generated_AreaInsights_ComputeInsights_async] diff --git a/packages/google-maps-areainsights/samples/generated_samples/areainsights_v1_generated_area_insights_compute_insights_sync.py b/packages/google-maps-areainsights/samples/generated_samples/areainsights_v1_generated_area_insights_compute_insights_sync.py new file mode 100644 index 000000000000..cbf41464d3b3 --- /dev/null +++ b/packages/google-maps-areainsights/samples/generated_samples/areainsights_v1_generated_area_insights_compute_insights_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ComputeInsights +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-maps-areainsights + + +# [START areainsights_v1_generated_AreaInsights_ComputeInsights_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.maps import areainsights_v1 + + +def sample_compute_insights(): + # Create a client + client = areainsights_v1.AreaInsightsClient() + + # Initialize request argument(s) + request = areainsights_v1.ComputeInsightsRequest( + insights=['INSIGHT_PLACES'], + ) + + # Make the request + response = client.compute_insights(request=request) + + # Handle the response + print(response) + +# [END areainsights_v1_generated_AreaInsights_ComputeInsights_sync] diff --git a/packages/google-maps-areainsights/samples/generated_samples/snippet_metadata_google.maps.areainsights.v1.json b/packages/google-maps-areainsights/samples/generated_samples/snippet_metadata_google.maps.areainsights.v1.json new file mode 100644 index 000000000000..508e4607c0f8 --- /dev/null +++ b/packages/google-maps-areainsights/samples/generated_samples/snippet_metadata_google.maps.areainsights.v1.json @@ -0,0 +1,168 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.maps.areainsights.v1", + "version": "v1" + } + ], + "language": "PYTHON", + "name": "google-maps-areainsights", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.maps.areainsights_v1.AreaInsightsAsyncClient", + "shortName": "AreaInsightsAsyncClient" + }, + "fullName": "google.maps.areainsights_v1.AreaInsightsAsyncClient.compute_insights", + "method": { + "fullName": "google.maps.areainsights.v1.AreaInsights.ComputeInsights", + "service": { + "fullName": "google.maps.areainsights.v1.AreaInsights", + "shortName": "AreaInsights" + }, + "shortName": "ComputeInsights" + }, + "parameters": [ + { + "name": "request", + "type": "google.maps.areainsights_v1.types.ComputeInsightsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.maps.areainsights_v1.types.ComputeInsightsResponse", + "shortName": "compute_insights" + }, + "description": "Sample for ComputeInsights", + "file": "areainsights_v1_generated_area_insights_compute_insights_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "areainsights_v1_generated_AreaInsights_ComputeInsights_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "areainsights_v1_generated_area_insights_compute_insights_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.maps.areainsights_v1.AreaInsightsClient", + "shortName": "AreaInsightsClient" + }, + "fullName": "google.maps.areainsights_v1.AreaInsightsClient.compute_insights", + "method": { + "fullName": "google.maps.areainsights.v1.AreaInsights.ComputeInsights", + "service": { + "fullName": "google.maps.areainsights.v1.AreaInsights", + "shortName": "AreaInsights" + }, + "shortName": "ComputeInsights" + }, + "parameters": [ + { + "name": "request", + "type": "google.maps.areainsights_v1.types.ComputeInsightsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.maps.areainsights_v1.types.ComputeInsightsResponse", + "shortName": "compute_insights" + }, + "description": "Sample for ComputeInsights", + "file": "areainsights_v1_generated_area_insights_compute_insights_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "areainsights_v1_generated_AreaInsights_ComputeInsights_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "areainsights_v1_generated_area_insights_compute_insights_sync.py" + } + ] +} diff --git a/packages/google-maps-areainsights/scripts/decrypt-secrets.sh b/packages/google-maps-areainsights/scripts/decrypt-secrets.sh new file mode 100755 index 000000000000..120b0ddc4364 --- /dev/null +++ b/packages/google-maps-areainsights/scripts/decrypt-secrets.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +# Copyright 2024 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ROOT=$( dirname "$DIR" ) + +# Work from the project root. +cd $ROOT + +# Prevent it from overriding files. +# We recommend that sample authors use their own service account files and cloud project. +# In that case, they are supposed to prepare these files by themselves. +if [[ -f "testing/test-env.sh" ]] || \ + [[ -f "testing/service-account.json" ]] || \ + [[ -f "testing/client-secrets.json" ]]; then + echo "One or more target files exist, aborting." + exit 1 +fi + +# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. +PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" + +gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + --project="${PROJECT_ID}" \ + > testing/test-env.sh +gcloud secrets versions access latest \ + --secret="python-docs-samples-service-account" \ + --project="${PROJECT_ID}" \ + > testing/service-account.json +gcloud secrets versions access latest \ + --secret="python-docs-samples-client-secrets" \ + --project="${PROJECT_ID}" \ + > testing/client-secrets.json diff --git a/packages/google-maps-areainsights/scripts/fixup_areainsights_v1_keywords.py b/packages/google-maps-areainsights/scripts/fixup_areainsights_v1_keywords.py new file mode 100644 index 000000000000..2b35b82f7b4e --- /dev/null +++ b/packages/google-maps-areainsights/scripts/fixup_areainsights_v1_keywords.py @@ -0,0 +1,176 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class areainsightsCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'compute_insights': ('insights', 'filter', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=areainsightsCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the areainsights client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-maps-areainsights/setup.py b/packages/google-maps-areainsights/setup.py new file mode 100644 index 000000000000..70e743f355eb --- /dev/null +++ b/packages/google-maps-areainsights/setup.py @@ -0,0 +1,95 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os +import re + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = "google-maps-areainsights" + + +description = "Google Maps Areainsights API client library" + +version = None + +with open( + os.path.join(package_root, "google/maps/areainsights/gapic_version.py") +) as fp: + version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) + assert len(version_candidates) == 1 + version = version_candidates[0] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + # Exclude incompatible versions of `google-auth` + # See https://github.com/googleapis/google-cloud-python/issues/12364 + "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", + "proto-plus >= 1.22.3, <2.0.0dev", + "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-areainsights" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.find_namespace_packages() + if package.startswith("google") +] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/packages/google-maps-areainsights/testing/.gitignore b/packages/google-maps-areainsights/testing/.gitignore new file mode 100644 index 000000000000..b05fbd630881 --- /dev/null +++ b/packages/google-maps-areainsights/testing/.gitignore @@ -0,0 +1,3 @@ +test-env.sh +service-account.json +client-secrets.json \ No newline at end of file diff --git a/packages/google-maps-areainsights/testing/constraints-3.10.txt b/packages/google-maps-areainsights/testing/constraints-3.10.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-maps-areainsights/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-maps-areainsights/testing/constraints-3.11.txt b/packages/google-maps-areainsights/testing/constraints-3.11.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-maps-areainsights/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-maps-areainsights/testing/constraints-3.12.txt b/packages/google-maps-areainsights/testing/constraints-3.12.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-maps-areainsights/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-maps-areainsights/testing/constraints-3.7.txt b/packages/google-maps-areainsights/testing/constraints-3.7.txt new file mode 100644 index 000000000000..fc812592b0ee --- /dev/null +++ b/packages/google-maps-areainsights/testing/constraints-3.7.txt @@ -0,0 +1,10 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.1 +google-auth==2.14.1 +proto-plus==1.22.3 +protobuf==3.20.2 diff --git a/packages/google-maps-areainsights/testing/constraints-3.8.txt b/packages/google-maps-areainsights/testing/constraints-3.8.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-maps-areainsights/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-maps-areainsights/testing/constraints-3.9.txt b/packages/google-maps-areainsights/testing/constraints-3.9.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-maps-areainsights/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-maps-areainsights/tests/__init__.py b/packages/google-maps-areainsights/tests/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-maps-areainsights/tests/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-maps-areainsights/tests/unit/__init__.py b/packages/google-maps-areainsights/tests/unit/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-maps-areainsights/tests/unit/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-maps-areainsights/tests/unit/gapic/__init__.py b/packages/google-maps-areainsights/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-maps-areainsights/tests/unit/gapic/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-maps-areainsights/tests/unit/gapic/areainsights_v1/__init__.py b/packages/google-maps-areainsights/tests/unit/gapic/areainsights_v1/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-maps-areainsights/tests/unit/gapic/areainsights_v1/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-maps-areainsights/tests/unit/gapic/areainsights_v1/test_area_insights.py b/packages/google-maps-areainsights/tests/unit/gapic/areainsights_v1/test_area_insights.py new file mode 100644 index 000000000000..f82db5638443 --- /dev/null +++ b/packages/google-maps-areainsights/tests/unit/gapic/areainsights_v1/test_area_insights.py @@ -0,0 +1,2315 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.oauth2 import service_account +from google.protobuf import json_format +from google.type import latlng_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.maps.areainsights_v1.services.area_insights import ( + AreaInsightsAsyncClient, + AreaInsightsClient, + transports, +) +from google.maps.areainsights_v1.types import area_insights_service + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert AreaInsightsClient._get_default_mtls_endpoint(None) is None + assert ( + AreaInsightsClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + ) + assert ( + AreaInsightsClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + AreaInsightsClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + AreaInsightsClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert AreaInsightsClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +def test__read_environment_variables(): + assert AreaInsightsClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert AreaInsightsClient._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert AreaInsightsClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + AreaInsightsClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert AreaInsightsClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert AreaInsightsClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert AreaInsightsClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + AreaInsightsClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert AreaInsightsClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert AreaInsightsClient._get_client_cert_source(None, False) is None + assert ( + AreaInsightsClient._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + AreaInsightsClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + AreaInsightsClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + AreaInsightsClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + AreaInsightsClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AreaInsightsClient), +) +@mock.patch.object( + AreaInsightsAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AreaInsightsAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = AreaInsightsClient._DEFAULT_UNIVERSE + default_endpoint = AreaInsightsClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = AreaInsightsClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + AreaInsightsClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + AreaInsightsClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == AreaInsightsClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + AreaInsightsClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + AreaInsightsClient._get_api_endpoint(None, None, default_universe, "always") + == AreaInsightsClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + AreaInsightsClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == AreaInsightsClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + AreaInsightsClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + AreaInsightsClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + AreaInsightsClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + AreaInsightsClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + AreaInsightsClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + AreaInsightsClient._get_universe_domain(None, None) + == AreaInsightsClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + AreaInsightsClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (AreaInsightsClient, transports.AreaInsightsGrpcTransport, "grpc"), + (AreaInsightsClient, transports.AreaInsightsRestTransport, "rest"), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (AreaInsightsClient, "grpc"), + (AreaInsightsAsyncClient, "grpc_asyncio"), + (AreaInsightsClient, "rest"), + ], +) +def test_area_insights_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "areainsights.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://areainsights.googleapis.com/" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.AreaInsightsGrpcTransport, "grpc"), + (transports.AreaInsightsGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.AreaInsightsRestTransport, "rest"), + ], +) +def test_area_insights_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (AreaInsightsClient, "grpc"), + (AreaInsightsAsyncClient, "grpc_asyncio"), + (AreaInsightsClient, "rest"), + ], +) +def test_area_insights_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "areainsights.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://areainsights.googleapis.com/" + ) + + +def test_area_insights_client_get_transport_class(): + transport = AreaInsightsClient.get_transport_class() + available_transports = [ + transports.AreaInsightsGrpcTransport, + transports.AreaInsightsRestTransport, + ] + assert transport in available_transports + + transport = AreaInsightsClient.get_transport_class("grpc") + assert transport == transports.AreaInsightsGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (AreaInsightsClient, transports.AreaInsightsGrpcTransport, "grpc"), + ( + AreaInsightsAsyncClient, + transports.AreaInsightsGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (AreaInsightsClient, transports.AreaInsightsRestTransport, "rest"), + ], +) +@mock.patch.object( + AreaInsightsClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AreaInsightsClient), +) +@mock.patch.object( + AreaInsightsAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AreaInsightsAsyncClient), +) +def test_area_insights_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(AreaInsightsClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(AreaInsightsClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com/" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com/", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (AreaInsightsClient, transports.AreaInsightsGrpcTransport, "grpc", "true"), + ( + AreaInsightsAsyncClient, + transports.AreaInsightsGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (AreaInsightsClient, transports.AreaInsightsGrpcTransport, "grpc", "false"), + ( + AreaInsightsAsyncClient, + transports.AreaInsightsGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (AreaInsightsClient, transports.AreaInsightsRestTransport, "rest", "true"), + (AreaInsightsClient, transports.AreaInsightsRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + AreaInsightsClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AreaInsightsClient), +) +@mock.patch.object( + AreaInsightsAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AreaInsightsAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_area_insights_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [AreaInsightsClient, AreaInsightsAsyncClient]) +@mock.patch.object( + AreaInsightsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AreaInsightsClient) +) +@mock.patch.object( + AreaInsightsAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(AreaInsightsAsyncClient), +) +def test_area_insights_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize("client_class", [AreaInsightsClient, AreaInsightsAsyncClient]) +@mock.patch.object( + AreaInsightsClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AreaInsightsClient), +) +@mock.patch.object( + AreaInsightsAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AreaInsightsAsyncClient), +) +def test_area_insights_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = AreaInsightsClient._DEFAULT_UNIVERSE + default_endpoint = AreaInsightsClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = AreaInsightsClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (AreaInsightsClient, transports.AreaInsightsGrpcTransport, "grpc"), + ( + AreaInsightsAsyncClient, + transports.AreaInsightsGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (AreaInsightsClient, transports.AreaInsightsRestTransport, "rest"), + ], +) +def test_area_insights_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + AreaInsightsClient, + transports.AreaInsightsGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + AreaInsightsAsyncClient, + transports.AreaInsightsGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (AreaInsightsClient, transports.AreaInsightsRestTransport, "rest", None), + ], +) +def test_area_insights_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_area_insights_client_client_options_from_dict(): + with mock.patch( + "google.maps.areainsights_v1.services.area_insights.transports.AreaInsightsGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = AreaInsightsClient(client_options={"api_endpoint": "squid.clam.whelk"}) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + AreaInsightsClient, + transports.AreaInsightsGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + AreaInsightsAsyncClient, + transports.AreaInsightsGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_area_insights_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "areainsights.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="areainsights.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + area_insights_service.ComputeInsightsRequest, + dict, + ], +) +def test_compute_insights(request_type, transport: str = "grpc"): + client = AreaInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.compute_insights), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = area_insights_service.ComputeInsightsResponse( + count=553, + ) + response = client.compute_insights(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = area_insights_service.ComputeInsightsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, area_insights_service.ComputeInsightsResponse) + assert response.count == 553 + + +def test_compute_insights_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AreaInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.compute_insights), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.compute_insights() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == area_insights_service.ComputeInsightsRequest() + + +def test_compute_insights_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AreaInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = area_insights_service.ComputeInsightsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.compute_insights), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.compute_insights(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == area_insights_service.ComputeInsightsRequest() + + +def test_compute_insights_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AreaInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.compute_insights in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.compute_insights + ] = mock_rpc + request = {} + client.compute_insights(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.compute_insights(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_compute_insights_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AreaInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.compute_insights), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + area_insights_service.ComputeInsightsResponse( + count=553, + ) + ) + response = await client.compute_insights() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == area_insights_service.ComputeInsightsRequest() + + +@pytest.mark.asyncio +async def test_compute_insights_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AreaInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.compute_insights + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.compute_insights + ] = mock_rpc + + request = {} + await client.compute_insights(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.compute_insights(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_compute_insights_async( + transport: str = "grpc_asyncio", + request_type=area_insights_service.ComputeInsightsRequest, +): + client = AreaInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.compute_insights), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + area_insights_service.ComputeInsightsResponse( + count=553, + ) + ) + response = await client.compute_insights(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = area_insights_service.ComputeInsightsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, area_insights_service.ComputeInsightsResponse) + assert response.count == 553 + + +@pytest.mark.asyncio +async def test_compute_insights_async_from_dict(): + await test_compute_insights_async(request_type=dict) + + +@pytest.mark.parametrize( + "request_type", + [ + area_insights_service.ComputeInsightsRequest, + dict, + ], +) +def test_compute_insights_rest(request_type): + client = AreaInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = area_insights_service.ComputeInsightsResponse( + count=553, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = area_insights_service.ComputeInsightsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.compute_insights(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, area_insights_service.ComputeInsightsResponse) + assert response.count == 553 + + +def test_compute_insights_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AreaInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.compute_insights in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.compute_insights + ] = mock_rpc + + request = {} + client.compute_insights(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.compute_insights(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_compute_insights_rest_required_fields( + request_type=area_insights_service.ComputeInsightsRequest, +): + transport_class = transports.AreaInsightsRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).compute_insights._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).compute_insights._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = AreaInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = area_insights_service.ComputeInsightsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = area_insights_service.ComputeInsightsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.compute_insights(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_compute_insights_rest_unset_required_fields(): + transport = transports.AreaInsightsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.compute_insights._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "insights", + "filter", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_compute_insights_rest_interceptors(null_interceptor): + transport = transports.AreaInsightsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AreaInsightsRestInterceptor(), + ) + client = AreaInsightsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AreaInsightsRestInterceptor, "post_compute_insights" + ) as post, mock.patch.object( + transports.AreaInsightsRestInterceptor, "pre_compute_insights" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = area_insights_service.ComputeInsightsRequest.pb( + area_insights_service.ComputeInsightsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + area_insights_service.ComputeInsightsResponse.to_json( + area_insights_service.ComputeInsightsResponse() + ) + ) + + request = area_insights_service.ComputeInsightsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = area_insights_service.ComputeInsightsResponse() + + client.compute_insights( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_compute_insights_rest_bad_request( + transport: str = "rest", request_type=area_insights_service.ComputeInsightsRequest +): + client = AreaInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.compute_insights(request) + + +def test_compute_insights_rest_error(): + client = AreaInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.AreaInsightsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AreaInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.AreaInsightsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AreaInsightsClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.AreaInsightsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = AreaInsightsClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = AreaInsightsClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.AreaInsightsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AreaInsightsClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.AreaInsightsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = AreaInsightsClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.AreaInsightsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.AreaInsightsGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AreaInsightsGrpcTransport, + transports.AreaInsightsGrpcAsyncIOTransport, + transports.AreaInsightsRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = AreaInsightsClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = AreaInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.AreaInsightsGrpcTransport, + ) + + +def test_area_insights_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.AreaInsightsTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_area_insights_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.maps.areainsights_v1.services.area_insights.transports.AreaInsightsTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.AreaInsightsTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ("compute_insights",) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_area_insights_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.maps.areainsights_v1.services.area_insights.transports.AreaInsightsTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.AreaInsightsTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_area_insights_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.maps.areainsights_v1.services.area_insights.transports.AreaInsightsTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.AreaInsightsTransport() + adc.assert_called_once() + + +def test_area_insights_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + AreaInsightsClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AreaInsightsGrpcTransport, + transports.AreaInsightsGrpcAsyncIOTransport, + ], +) +def test_area_insights_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AreaInsightsGrpcTransport, + transports.AreaInsightsGrpcAsyncIOTransport, + transports.AreaInsightsRestTransport, + ], +) +def test_area_insights_transport_auth_gdch_credentials(transport_class): + host = "https://language.com/" + api_audience_tests = [None, "https://language2.com/"] + api_audience_expect = [host, "https://language2.com/"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.AreaInsightsGrpcTransport, grpc_helpers), + (transports.AreaInsightsGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_area_insights_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "areainsights.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="areainsights.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [transports.AreaInsightsGrpcTransport, transports.AreaInsightsGrpcAsyncIOTransport], +) +def test_area_insights_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_area_insights_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.AreaInsightsRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_area_insights_host_no_port(transport_name): + client = AreaInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="areainsights.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "areainsights.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://areainsights.googleapis.com/" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_area_insights_host_with_port(transport_name): + client = AreaInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="areainsights.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "areainsights.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://areainsights.googleapis.com:8000/" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_area_insights_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = AreaInsightsClient( + credentials=creds1, + transport=transport_name, + ) + client2 = AreaInsightsClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.compute_insights._session + session2 = client2.transport.compute_insights._session + assert session1 != session2 + + +def test_area_insights_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.AreaInsightsGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_area_insights_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.AreaInsightsGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.AreaInsightsGrpcTransport, transports.AreaInsightsGrpcAsyncIOTransport], +) +def test_area_insights_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.AreaInsightsGrpcTransport, transports.AreaInsightsGrpcAsyncIOTransport], +) +def test_area_insights_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_place_path(): + place_id = "squid" + expected = "places/{place_id}".format( + place_id=place_id, + ) + actual = AreaInsightsClient.place_path(place_id) + assert expected == actual + + +def test_parse_place_path(): + expected = { + "place_id": "clam", + } + path = AreaInsightsClient.place_path(**expected) + + # Check that the path construction is reversible. + actual = AreaInsightsClient.parse_place_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = AreaInsightsClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = AreaInsightsClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = AreaInsightsClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = AreaInsightsClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = AreaInsightsClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = AreaInsightsClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = AreaInsightsClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = AreaInsightsClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = AreaInsightsClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format( + project=project, + ) + actual = AreaInsightsClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = AreaInsightsClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = AreaInsightsClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = AreaInsightsClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = AreaInsightsClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = AreaInsightsClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.AreaInsightsTransport, "_prep_wrapped_messages" + ) as prep: + client = AreaInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.AreaInsightsTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = AreaInsightsClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = AreaInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = AreaInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = AreaInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (AreaInsightsClient, transports.AreaInsightsGrpcTransport), + (AreaInsightsAsyncClient, transports.AreaInsightsGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/release-please-config.json b/release-please-config.json index b2f26908453f..c33b2c671761 100644 --- a/release-please-config.json +++ b/release-please-config.json @@ -3490,6 +3490,16 @@ } ], "release-type": "python" + }, + "packages/google-maps-areainsights": { + "extra-files": [ + "google/maps/areainsights_v1/gapic_version.py", + { + "type": "json", + "path": "samples/generated_samples/snippet_metadata_google.maps.areainsights.v1.json", + "jsonpath": "$.clientLibrary.version" + } + ] } } } From 1f7011b9f31a26e3a415f31c2533758f453dcb3d Mon Sep 17 00:00:00 2001 From: yoshi-code-bot <70984784+yoshi-code-bot@users.noreply.github.com> Date: Thu, 12 Sep 2024 13:58:41 -0700 Subject: [PATCH 061/108] chore: Update release-please config files (#13080) Update release-please config files --- .release-please-manifest.json | 1 + release-please-config.json | 25 +++++++++++++++---------- 2 files changed, 16 insertions(+), 10 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index a23882e898d2..cc2385c7dd15 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -178,6 +178,7 @@ "packages/google-cloud-workstations": "0.5.8", "packages/google-geo-type": "0.3.9", "packages/google-maps-addressvalidation": "0.3.13", + "packages/google-maps-areainsights": "0.0.0", "packages/google-maps-fleetengine": "0.2.2", "packages/google-maps-fleetengine-delivery": "0.2.4", "packages/google-maps-mapsplatformdatasets": "0.4.2", diff --git a/release-please-config.json b/release-please-config.json index c33b2c671761..ca70cf6baec5 100644 --- a/release-please-config.json +++ b/release-please-config.json @@ -3197,6 +3197,21 @@ ], "release-type": "python" }, + "packages/google-maps-areainsights": { + "bump-minor-pre-major": true, + "bump-patch-for-minor-pre-major": true, + "component": "google-maps-areainsights", + "extra-files": [ + "google/maps/areainsights/gapic_version.py", + "google/maps/areainsights_v1/gapic_version.py", + { + "jsonpath": "$.clientLibrary.version", + "path": "samples/generated_samples/snippet_metadata_google.maps.areainsights.v1.json", + "type": "json" + } + ], + "release-type": "python" + }, "packages/google-maps-fleetengine": { "bump-minor-pre-major": true, "bump-patch-for-minor-pre-major": true, @@ -3490,16 +3505,6 @@ } ], "release-type": "python" - }, - "packages/google-maps-areainsights": { - "extra-files": [ - "google/maps/areainsights_v1/gapic_version.py", - { - "type": "json", - "path": "samples/generated_samples/snippet_metadata_google.maps.areainsights.v1.json", - "jsonpath": "$.clientLibrary.version" - } - ] } } } From 77b64f5739e1599d0d4f6860fc668e1e521168f4 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 12 Sep 2024 14:18:51 -0700 Subject: [PATCH 062/108] chore: release main (#13082) :robot: I have created a release *beep* *boop* ---
google-maps-areainsights: 0.1.0 ## 0.1.0 (2024-09-12) ### Features * add initial files for google.maps.areainsights.v1 ([#13078](https://github.com/googleapis/google-cloud-python/issues/13078)) ([ae58345](https://github.com/googleapis/google-cloud-python/commit/ae583456fd0a2f2e46ce8759918ebe3fc19a0fe8))
--- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .release-please-manifest.json | 2 +- packages/google-maps-areainsights/CHANGELOG.md | 11 ++++++++++- .../google/maps/areainsights/gapic_version.py | 2 +- .../google/maps/areainsights_v1/gapic_version.py | 2 +- 4 files changed, 13 insertions(+), 4 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index cc2385c7dd15..2a86228c483b 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -178,7 +178,7 @@ "packages/google-cloud-workstations": "0.5.8", "packages/google-geo-type": "0.3.9", "packages/google-maps-addressvalidation": "0.3.13", - "packages/google-maps-areainsights": "0.0.0", + "packages/google-maps-areainsights": "0.1.0", "packages/google-maps-fleetengine": "0.2.2", "packages/google-maps-fleetengine-delivery": "0.2.4", "packages/google-maps-mapsplatformdatasets": "0.4.2", diff --git a/packages/google-maps-areainsights/CHANGELOG.md b/packages/google-maps-areainsights/CHANGELOG.md index 5ddad421e08f..2f417509fcba 100644 --- a/packages/google-maps-areainsights/CHANGELOG.md +++ b/packages/google-maps-areainsights/CHANGELOG.md @@ -1 +1,10 @@ -# Changelog \ No newline at end of file +# Changelog + +## 0.1.0 (2024-09-12) + + +### Features + +* add initial files for google.maps.areainsights.v1 ([#13078](https://github.com/googleapis/google-cloud-python/issues/13078)) ([ae58345](https://github.com/googleapis/google-cloud-python/commit/ae583456fd0a2f2e46ce8759918ebe3fc19a0fe8)) + +## Changelog diff --git a/packages/google-maps-areainsights/google/maps/areainsights/gapic_version.py b/packages/google-maps-areainsights/google/maps/areainsights/gapic_version.py index caeec5b9e887..20d1d778beb7 100644 --- a/packages/google-maps-areainsights/google/maps/areainsights/gapic_version.py +++ b/packages/google-maps-areainsights/google/maps/areainsights/gapic_version.py @@ -14,4 +14,4 @@ # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.0" # {x-release-please-version} diff --git a/packages/google-maps-areainsights/google/maps/areainsights_v1/gapic_version.py b/packages/google-maps-areainsights/google/maps/areainsights_v1/gapic_version.py index caeec5b9e887..20d1d778beb7 100644 --- a/packages/google-maps-areainsights/google/maps/areainsights_v1/gapic_version.py +++ b/packages/google-maps-areainsights/google/maps/areainsights_v1/gapic_version.py @@ -14,4 +14,4 @@ # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.0" # {x-release-please-version} From 2402404a5ac48c8289a2dbc24fcc85a1eebe4224 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 13 Sep 2024 17:53:35 -0700 Subject: [PATCH 063/108] feat: [google-cloud-dataproc] add resource reference for KMS keys and fix comments (#13072) - [ ] Regenerate this pull request now. BEGIN_COMMIT_OVERRIDE feat: [google-cloud-dataproc] add resource reference for KMS keys and fix comments feat: [google-cloud-dataproc] Allow flink and trino job support for workflow templates API feat: [google-cloud-dataproc] Add unreachable output field for LIST workflow template API feat: [google-cloud-dataproc] Add unreachable output field for LIST batch templates API feat: [google-cloud-dataproc] Add kms key input for create cluster API feat: [google-cloud-dataproc] Add FLINK metric source for Dataproc Metric Source feat: [google-cloud-dataproc] Allow flink job support for jobs feat: [google-cloud-dataproc] Add unreachable output field for LIST jobs API END_COMMIT_OVERRIDE PiperOrigin-RevId: 674408200 Source-Link: https://github.com/googleapis/googleapis/commit/964f6c9ab536dd33a99bbba8d9d8071d0cf39f89 Source-Link: https://github.com/googleapis/googleapis-gen/commit/2979ec5cbcf2abae857f9a4cbe5cca09f040cb58 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWRhdGFwcm9jLy5Pd2xCb3QueWFtbCIsImgiOiIyOTc5ZWM1Y2JjZjJhYmFlODU3ZjlhNGNiZTVjY2EwOWYwNDBjYjU4In0= BEGIN_NESTED_COMMIT feat: [google-cloud-dataproc] add support for new Dataproc features 1. Allow flink and trino job support for workflow templates API 2. Add unreachable output field for LIST workflow template API 4. Add unreachable output field for LIST batch templates API 5. Add kms key input for create cluster API 6. Add FLINK metric source for Dataproc Metric Source PiperOrigin-RevId: 673000575 Source-Link: https://github.com/googleapis/googleapis/commit/02f62c8e241a9f95f0d183785354e90e35388cbd Source-Link: https://github.com/googleapis/googleapis-gen/commit/7726f478ac19d91e914ed3ae546cf24453d000b5 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWRhdGFwcm9jLy5Pd2xCb3QueWFtbCIsImgiOiI3NzI2ZjQ3OGFjMTlkOTFlOTE0ZWQzYWU1NDZjZjI0NDUzZDAwMGI1In0= END_NESTED_COMMIT BEGIN_NESTED_COMMIT feat: [google-cloud-dataproc] add support for new Dataproc features 1. Allow flink job support for jobs 2. Add unreachable output field for LIST jobs API PiperOrigin-RevId: 672705294 Source-Link: https://github.com/googleapis/googleapis/commit/32bc03653260356351854429bd7e2dfbf670d352 Source-Link: https://github.com/googleapis/googleapis-gen/commit/46e7728c9908d9793ebce1061b0d1c6c4bad925b Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWRhdGFwcm9jLy5Pd2xCb3QueWFtbCIsImgiOiI0NmU3NzI4Yzk5MDhkOTc5M2ViY2UxMDYxYjBkMWM2YzRiYWQ5MjViIn0= END_NESTED_COMMIT --------- Co-authored-by: Owl Bot Co-authored-by: Victor Chudnovsky --- .../google/cloud/dataproc/__init__.py | 2 + .../google/cloud/dataproc/gapic_version.py | 2 +- .../google/cloud/dataproc_v1/__init__.py | 2 + .../google/cloud/dataproc_v1/gapic_version.py | 2 +- .../cluster_controller/async_client.py | 11 +- .../services/cluster_controller/client.py | 33 +++- .../workflow_template_service/async_client.py | 4 + .../workflow_template_service/client.py | 24 +++ .../cloud/dataproc_v1/types/__init__.py | 2 + .../google/cloud/dataproc_v1/types/batches.py | 9 + .../cloud/dataproc_v1/types/clusters.py | 108 ++++++++--- .../google/cloud/dataproc_v1/types/jobs.py | 164 +++++++++++++---- .../google/cloud/dataproc_v1/types/shared.py | 9 +- .../dataproc_v1/types/workflow_templates.py | 78 ++++++++ ...pet_metadata_google.cloud.dataproc.v1.json | 2 +- .../dataproc_v1/test_batch_controller.py | 7 + .../dataproc_v1/test_cluster_controller.py | 93 +++++++--- .../gapic/dataproc_v1/test_job_controller.py | 16 ++ .../test_workflow_template_service.py | 168 ++++++++++++++---- 19 files changed, 598 insertions(+), 138 deletions(-) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc/__init__.py b/packages/google-cloud-dataproc/google/cloud/dataproc/__init__.py index 8c0be718b5bc..1c45dca78fda 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc/__init__.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc/__init__.py @@ -135,6 +135,7 @@ CancelJobRequest, DeleteJobRequest, DriverSchedulingConfig, + FlinkJob, GetJobRequest, HadoopJob, HiveJob, @@ -315,6 +316,7 @@ "CancelJobRequest", "DeleteJobRequest", "DriverSchedulingConfig", + "FlinkJob", "GetJobRequest", "HadoopJob", "HiveJob", diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc/gapic_version.py b/packages/google-cloud-dataproc/google/cloud/dataproc/gapic_version.py index 0f412e925d59..558c8aab67c5 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc/gapic_version.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "5.11.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/__init__.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/__init__.py index 49bcea5780a2..e89772784679 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/__init__.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/__init__.py @@ -115,6 +115,7 @@ CancelJobRequest, DeleteJobRequest, DriverSchedulingConfig, + FlinkJob, GetJobRequest, HadoopJob, HiveJob, @@ -271,6 +272,7 @@ "EnvironmentConfig", "ExecutionConfig", "FailureAction", + "FlinkJob", "GceClusterConfig", "GetAutoscalingPolicyRequest", "GetBatchRequest", diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/gapic_version.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/gapic_version.py index 0f412e925d59..558c8aab67c5 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/gapic_version.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "5.11.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/async_client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/async_client.py index b3c00033b6a1..72ad480491a2 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/async_client.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/async_client.py @@ -74,6 +74,8 @@ class ClusterControllerAsyncClient: cluster_path = staticmethod(ClusterControllerClient.cluster_path) parse_cluster_path = staticmethod(ClusterControllerClient.parse_cluster_path) + crypto_key_path = staticmethod(ClusterControllerClient.crypto_key_path) + parse_crypto_key_path = staticmethod(ClusterControllerClient.parse_crypto_key_path) node_group_path = staticmethod(ClusterControllerClient.node_group_path) parse_node_group_path = staticmethod(ClusterControllerClient.parse_node_group_path) service_path = staticmethod(ClusterControllerClient.service_path) @@ -1236,10 +1238,11 @@ async def sample_list_clusters(): label key. **value** can be ``*`` to match all values. ``status.state`` can be one of the following: ``ACTIVE``, ``INACTIVE``, ``CREATING``, ``RUNNING``, - ``ERROR``, ``DELETING``, or ``UPDATING``. ``ACTIVE`` - contains the ``CREATING``, ``UPDATING``, and ``RUNNING`` - states. ``INACTIVE`` contains the ``DELETING`` and - ``ERROR`` states. ``clusterName`` is the name of the + ``ERROR``, ``DELETING``, ``UPDATING``, ``STOPPING``, or + ``STOPPED``. ``ACTIVE`` contains the ``CREATING``, + ``UPDATING``, and ``RUNNING`` states. ``INACTIVE`` + contains the ``DELETING``, ``ERROR``, ``STOPPING``, and + ``STOPPED`` states. ``clusterName`` is the name of the cluster provided at creation time. Only the logical ``AND`` operator is supported; space-separated items are treated as having an implicit ``AND`` operator. diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/client.py index d46589e86a36..d0662bc0348c 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/client.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/client.py @@ -215,6 +215,30 @@ def parse_cluster_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def crypto_key_path( + project: str, + location: str, + key_ring: str, + crypto_key: str, + ) -> str: + """Returns a fully-qualified crypto_key string.""" + return "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}".format( + project=project, + location=location, + key_ring=key_ring, + crypto_key=crypto_key, + ) + + @staticmethod + def parse_crypto_key_path(path: str) -> Dict[str, str]: + """Parses a crypto_key path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/keyRings/(?P.+?)/cryptoKeys/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def node_group_path( project: str, @@ -1686,10 +1710,11 @@ def sample_list_clusters(): label key. **value** can be ``*`` to match all values. ``status.state`` can be one of the following: ``ACTIVE``, ``INACTIVE``, ``CREATING``, ``RUNNING``, - ``ERROR``, ``DELETING``, or ``UPDATING``. ``ACTIVE`` - contains the ``CREATING``, ``UPDATING``, and ``RUNNING`` - states. ``INACTIVE`` contains the ``DELETING`` and - ``ERROR`` states. ``clusterName`` is the name of the + ``ERROR``, ``DELETING``, ``UPDATING``, ``STOPPING``, or + ``STOPPED``. ``ACTIVE`` contains the ``CREATING``, + ``UPDATING``, and ``RUNNING`` states. ``INACTIVE`` + contains the ``DELETING``, ``ERROR``, ``STOPPING``, and + ``STOPPED`` states. ``clusterName`` is the name of the cluster provided at creation time. Only the logical ``AND`` operator is supported; space-separated items are treated as having an implicit ``AND`` operator. diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/async_client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/async_client.py index 85222311c4d7..90bd5f500cbc 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/async_client.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/async_client.py @@ -75,6 +75,10 @@ class WorkflowTemplateServiceAsyncClient: ) _DEFAULT_UNIVERSE = WorkflowTemplateServiceClient._DEFAULT_UNIVERSE + crypto_key_path = staticmethod(WorkflowTemplateServiceClient.crypto_key_path) + parse_crypto_key_path = staticmethod( + WorkflowTemplateServiceClient.parse_crypto_key_path + ) node_group_path = staticmethod(WorkflowTemplateServiceClient.node_group_path) parse_node_group_path = staticmethod( WorkflowTemplateServiceClient.parse_node_group_path diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/client.py index 4da14dd2c32c..175bc494f421 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/client.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/client.py @@ -194,6 +194,30 @@ def transport(self) -> WorkflowTemplateServiceTransport: """ return self._transport + @staticmethod + def crypto_key_path( + project: str, + location: str, + key_ring: str, + crypto_key: str, + ) -> str: + """Returns a fully-qualified crypto_key string.""" + return "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}".format( + project=project, + location=location, + key_ring=key_ring, + crypto_key=crypto_key, + ) + + @staticmethod + def parse_crypto_key_path(path: str) -> Dict[str, str]: + """Parses a crypto_key path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/keyRings/(?P.+?)/cryptoKeys/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def node_group_path( project: str, diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/__init__.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/__init__.py index 535fc0e4fc92..2bf4fcd11209 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/__init__.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/__init__.py @@ -84,6 +84,7 @@ CancelJobRequest, DeleteJobRequest, DriverSchedulingConfig, + FlinkJob, GetJobRequest, HadoopJob, HiveJob, @@ -248,6 +249,7 @@ "CancelJobRequest", "DeleteJobRequest", "DriverSchedulingConfig", + "FlinkJob", "GetJobRequest", "HadoopJob", "HiveJob", diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/batches.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/batches.py index bff597bc91b6..2459180957df 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/batches.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/batches.py @@ -183,6 +183,11 @@ class ListBatchesResponse(proto.Message): A token, which can be sent as ``page_token`` to retrieve the next page. If this field is omitted, there are no subsequent pages. + unreachable (MutableSequence[str]): + Output only. List of Batches that could not + be included in the response. Attempting to get + one of these resources may indicate why it was + not included in the list response. """ @property @@ -198,6 +203,10 @@ def raw_page(self): proto.STRING, number=2, ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) class DeleteBatchRequest(proto.Message): diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/clusters.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/clusters.py index b6e60e1765c0..169d350f046b 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/clusters.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/clusters.py @@ -464,15 +464,50 @@ class EncryptionConfig(proto.Message): Attributes: gce_pd_kms_key_name (str): - Optional. The Cloud KMS key name to use for - PD disk encryption for all instances in the - cluster. + Optional. The Cloud KMS key resource name to use for + persistent disk encryption for all instances in the cluster. + See [Use CMEK with cluster data] + (https://cloud.google.com//dataproc/docs/concepts/configuring-clusters/customer-managed-encryption#use_cmek_with_cluster_data) + for more information. + kms_key (str): + Optional. The Cloud KMS key resource name to use for cluster + persistent disk and job argument encryption. See [Use CMEK + with cluster data] + (https://cloud.google.com//dataproc/docs/concepts/configuring-clusters/customer-managed-encryption#use_cmek_with_cluster_data) + for more information. + + When this key resource name is provided, the following job + arguments of the following job types submitted to the + cluster are encrypted using CMEK: + + - `FlinkJob + args `__ + - `HadoopJob + args `__ + - `SparkJob + args `__ + - `SparkRJob + args `__ + - `PySparkJob + args `__ + - `SparkSqlJob `__ + scriptVariables and queryList.queries + - `HiveJob `__ + scriptVariables and queryList.queries + - `PigJob `__ + scriptVariables and queryList.queries + - `PrestoJob `__ + scriptVariables and queryList.queries """ gce_pd_kms_key_name: str = proto.Field( proto.STRING, number=1, ) + kms_key: str = proto.Field( + proto.STRING, + number=2, + ) class GceClusterConfig(proto.Message): @@ -519,14 +554,25 @@ class GceClusterConfig(proto.Message): - ``projects/[project_id]/regions/[region]/subnetworks/sub0`` - ``sub0`` internal_ip_only (bool): - Optional. If true, all instances in the cluster will only - have internal IP addresses. By default, clusters are not - restricted to internal IP addresses, and will have ephemeral - external IP addresses assigned to each instance. This - ``internal_ip_only`` restriction can only be enabled for - subnetwork enabled networks, and all off-cluster - dependencies must be configured to be accessible without - external IP addresses. + Optional. This setting applies to subnetwork-enabled + networks. It is set to ``true`` by default in clusters + created with image versions 2.2.x. + + When set to ``true``: + + - All cluster VMs have internal IP addresses. + - [Google Private Access] + (https://cloud.google.com/vpc/docs/private-google-access) + must be enabled to access Dataproc and other Google Cloud + APIs. + - Off-cluster dependencies must be configured to be + accessible without external IP addresses. + + When set to ``false``: + + - Cluster VMs are not restricted to internal IP addresses. + - Ephemeral external IP addresses are assigned to each + cluster VM. This field is a member of `oneof`_ ``_internal_ip_only``. private_ipv6_google_access (google.cloud.dataproc_v1.types.GceClusterConfig.PrivateIpv6GoogleAccess): @@ -560,9 +606,9 @@ class GceClusterConfig(proto.Message): - https://www.googleapis.com/auth/bigtable.data - https://www.googleapis.com/auth/devstorage.full_control tags (MutableSequence[str]): - The Compute Engine tags to add to all instances (see + The Compute Engine network tags to add to all instances (see `Tagging - instances `__). + instances `__). metadata (MutableMapping[str, str]): Optional. The Compute Engine metadata entries to add to all instances (see `Project and instance @@ -1156,15 +1202,15 @@ class AcceleratorConfig(proto.Message): Examples: - - ``https://www.googleapis.com/compute/v1/projects/[project_id]/zones/[zone]/acceleratorTypes/nvidia-tesla-k80`` - - ``projects/[project_id]/zones/[zone]/acceleratorTypes/nvidia-tesla-k80`` - - ``nvidia-tesla-k80`` + - ``https://www.googleapis.com/compute/v1/projects/[project_id]/zones/[zone]/acceleratorTypes/nvidia-tesla-t4`` + - ``projects/[project_id]/zones/[zone]/acceleratorTypes/nvidia-tesla-t4`` + - ``nvidia-tesla-t4`` **Auto Zone Exception**: If you are using the Dataproc `Auto Zone Placement `__ feature, you must use the short name of the accelerator type - resource, for example, ``nvidia-tesla-k80``. + resource, for example, ``nvidia-tesla-t4``. accelerator_count (int): The number of the accelerator cards of this type exposed to this instance. @@ -1501,8 +1547,8 @@ class KerberosConfig(proto.Message): encrypted file containing the root principal password. kms_key_uri (str): - Optional. The uri of the KMS key used to - encrypt various sensitive files. + Optional. The URI of the KMS key used to + encrypt sensitive files. keystore_uri (str): Optional. The Cloud Storage URI of the keystore file used for SSL encryption. If not @@ -1649,7 +1695,7 @@ class SoftwareConfig(proto.Message): image_version (str): Optional. The version of software inside the cluster. It must be one of the supported `Dataproc - Versions `__, + Versions `__, such as "1.2" (including a subminor version, such as "1.2.29"), or the `"preview" version `__. @@ -1834,6 +1880,8 @@ class MetricSource(proto.Enum): Hiveserver2 metric source. HIVEMETASTORE (7): hivemetastore metric source + FLINK (8): + flink metric source """ METRIC_SOURCE_UNSPECIFIED = 0 MONITORING_AGENT_DEFAULTS = 1 @@ -1843,6 +1891,7 @@ class MetricSource(proto.Enum): SPARK_HISTORY_SERVER = 5 HIVESERVER2 = 6 HIVEMETASTORE = 7 + FLINK = 8 class Metric(proto.Message): r"""A Dataproc custom metric. @@ -2312,11 +2361,12 @@ class ListClustersRequest(proto.Message): or ``labels.[KEY]``, and ``[KEY]`` is a label key. **value** can be ``*`` to match all values. ``status.state`` can be one of the following: ``ACTIVE``, ``INACTIVE``, - ``CREATING``, ``RUNNING``, ``ERROR``, ``DELETING``, or - ``UPDATING``. ``ACTIVE`` contains the ``CREATING``, - ``UPDATING``, and ``RUNNING`` states. ``INACTIVE`` contains - the ``DELETING`` and ``ERROR`` states. ``clusterName`` is - the name of the cluster provided at creation time. Only the + ``CREATING``, ``RUNNING``, ``ERROR``, ``DELETING``, + ``UPDATING``, ``STOPPING``, or ``STOPPED``. ``ACTIVE`` + contains the ``CREATING``, ``UPDATING``, and ``RUNNING`` + states. ``INACTIVE`` contains the ``DELETING``, ``ERROR``, + ``STOPPING``, and ``STOPPED`` states. ``clusterName`` is the + name of the cluster provided at creation time. Only the logical ``AND`` operator is supported; space-separated items are treated as having an implicit ``AND`` operator. @@ -2393,10 +2443,10 @@ class DiagnoseClusterRequest(proto.Message): cluster_name (str): Required. The cluster name. tarball_gcs_dir (str): - Optional. The output Cloud Storage directory - for the diagnostic tarball. If not specified, a - task-specific directory in the cluster's staging - bucket will be used. + Optional. (Optional) The output Cloud Storage + directory for the diagnostic tarball. If not + specified, a task-specific directory in the + cluster's staging bucket will be used. tarball_access (google.cloud.dataproc_v1.types.DiagnoseClusterRequest.TarballAccess): Optional. (Optional) The access type to the diagnostic tarball. If not specified, falls back diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/jobs.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/jobs.py index b0e094f18985..2f9bcc9dba29 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/jobs.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/jobs.py @@ -35,6 +35,7 @@ "SparkRJob", "PrestoJob", "TrinoJob", + "FlinkJob", "JobPlacement", "JobStatus", "JobReference", @@ -60,7 +61,7 @@ class LoggingConfig(proto.Message): Attributes: driver_log_levels (MutableMapping[str, google.cloud.dataproc_v1.types.LoggingConfig.Level]): The per-package log levels for the driver. - This may include "root" package name to + This can include "root" package name to configure rootLogger. Examples: - 'com.google = FATAL' @@ -144,7 +145,7 @@ class HadoopJob(proto.Message): args (MutableSequence[str]): Optional. The arguments to pass to the driver. Do not include arguments, such as ``-libjars`` or ``-Dfoo=bar``, - that can be set as job properties, since a collision may + that can be set as job properties, since a collision might occur that causes an incorrect job submission. jar_file_uris (MutableSequence[str]): Optional. Jar file URIs to add to the @@ -163,7 +164,7 @@ class HadoopJob(proto.Message): properties (MutableMapping[str, str]): Optional. A mapping of property names to values, used to configure Hadoop. Properties that conflict with values set - by the Dataproc API may be overwritten. Can include + by the Dataproc API might be overwritten. Can include properties set in ``/etc/hadoop/conf/*-site`` and classes in user code. logging_config (google.cloud.dataproc_v1.types.LoggingConfig): @@ -229,7 +230,7 @@ class SparkJob(proto.Message): main_class (str): The name of the driver's main class. The jar file that contains the class must be in the default CLASSPATH or - specified in ``jar_file_uris``. + specified in SparkJob.jar_file_uris. This field is a member of `oneof`_ ``driver``. args (MutableSequence[str]): @@ -253,8 +254,9 @@ class SparkJob(proto.Message): properties (MutableMapping[str, str]): Optional. A mapping of property names to values, used to configure Spark. Properties that - conflict with values set by the Dataproc API may - be overwritten. Can include properties set in + conflict with values set by the Dataproc API + might be overwritten. Can include properties set + in /etc/spark/conf/spark-defaults.conf and classes in user code. logging_config (google.cloud.dataproc_v1.types.LoggingConfig): @@ -335,7 +337,7 @@ class PySparkJob(proto.Message): Optional. A mapping of property names to values, used to configure PySpark. Properties that conflict with values set by the Dataproc - API may be overwritten. Can include properties + API might be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code. @@ -441,8 +443,8 @@ class HiveJob(proto.Message): properties (MutableMapping[str, str]): Optional. A mapping of property names and values, used to configure Hive. Properties that conflict with values set by - the Dataproc API may be overwritten. Can include properties - set in ``/etc/hadoop/conf/*-site.xml``, + the Dataproc API might be overwritten. Can include + properties set in ``/etc/hadoop/conf/*-site.xml``, /etc/hive/conf/hive-site.xml, and classes in user code. jar_file_uris (MutableSequence[str]): Optional. HCFS URIs of jar files to add to @@ -511,7 +513,7 @@ class SparkSqlJob(proto.Message): Optional. A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the - Dataproc API may be overwritten. + Dataproc API might be overwritten. jar_file_uris (MutableSequence[str]): Optional. HCFS URIs of jar files to be added to the Spark CLASSPATH. @@ -583,8 +585,8 @@ class PigJob(proto.Message): properties (MutableMapping[str, str]): Optional. A mapping of property names to values, used to configure Pig. Properties that conflict with values set by - the Dataproc API may be overwritten. Can include properties - set in ``/etc/hadoop/conf/*-site.xml``, + the Dataproc API might be overwritten. Can include + properties set in ``/etc/hadoop/conf/*-site.xml``, /etc/pig/conf/pig.properties, and classes in user code. jar_file_uris (MutableSequence[str]): Optional. HCFS URIs of jar files to add to @@ -659,7 +661,7 @@ class SparkRJob(proto.Message): Optional. A mapping of property names to values, used to configure SparkR. Properties that conflict with values set by the Dataproc - API may be overwritten. Can include properties + API might be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code. @@ -856,6 +858,86 @@ class TrinoJob(proto.Message): ) +class FlinkJob(proto.Message): + r"""A Dataproc job for running Apache Flink applications on YARN. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + main_jar_file_uri (str): + The HCFS URI of the jar file that contains + the main class. + + This field is a member of `oneof`_ ``driver``. + main_class (str): + The name of the driver's main class. The jar file that + contains the class must be in the default CLASSPATH or + specified in + [jarFileUris][google.cloud.dataproc.v1.FlinkJob.jar_file_uris]. + + This field is a member of `oneof`_ ``driver``. + args (MutableSequence[str]): + Optional. The arguments to pass to the driver. Do not + include arguments, such as ``--conf``, that can be set as + job properties, since a collision might occur that causes an + incorrect job submission. + jar_file_uris (MutableSequence[str]): + Optional. HCFS URIs of jar files to add to + the CLASSPATHs of the Flink driver and tasks. + savepoint_uri (str): + Optional. HCFS URI of the savepoint, which + contains the last saved progress for starting + the current job. + properties (MutableMapping[str, str]): + Optional. A mapping of property names to values, used to + configure Flink. Properties that conflict with values set by + the Dataproc API might be overwritten. Can include + properties set in ``/etc/flink/conf/flink-defaults.conf`` + and classes in user code. + logging_config (google.cloud.dataproc_v1.types.LoggingConfig): + Optional. The runtime log config for job + execution. + """ + + main_jar_file_uri: str = proto.Field( + proto.STRING, + number=1, + oneof="driver", + ) + main_class: str = proto.Field( + proto.STRING, + number=2, + oneof="driver", + ) + args: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + jar_file_uris: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + savepoint_uri: str = proto.Field( + proto.STRING, + number=9, + ) + properties: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=7, + ) + logging_config: "LoggingConfig" = proto.Field( + proto.MESSAGE, + number=8, + message="LoggingConfig", + ) + + class JobPlacement(proto.Message): r"""Dataproc job config. @@ -894,9 +976,8 @@ class JobStatus(proto.Message): Output only. A state message specifying the overall job state. details (str): - Optional. Output only. Job state details, - such as an error description if the state is - ERROR. + Optional. Output only. Job state details, such as an error + description if the state is ``ERROR``. state_start_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The time when this state was entered. @@ -963,14 +1044,14 @@ class Substate(proto.Enum): Applies to RUNNING state. QUEUED (2): The Job has been received and is awaiting - execution (it may be waiting for a condition to - be met). See the "details" field for the reason - for the delay. + execution (it might be waiting for a condition + to be met). See the "details" field for the + reason for the delay. Applies to RUNNING state. STALE_STATUS (3): The agent-reported status is out of date, - which may be caused by a loss of communication + which can be caused by a loss of communication between the agent and Dataproc. If the agent does not send a timely update, the job will fail. @@ -1165,10 +1246,14 @@ class Job(proto.Message): trino_job (google.cloud.dataproc_v1.types.TrinoJob): Optional. Job is a Trino job. + This field is a member of `oneof`_ ``type_job``. + flink_job (google.cloud.dataproc_v1.types.FlinkJob): + Optional. Job is a Flink job. + This field is a member of `oneof`_ ``type_job``. status (google.cloud.dataproc_v1.types.JobStatus): Output only. The job status. Additional application-specific - status information may be contained in the type_job and + status information might be contained in the type_job and yarn_applications fields. status_history (MutableSequence[google.cloud.dataproc_v1.types.JobStatus]): Output only. The previous job status. @@ -1177,20 +1262,20 @@ class Job(proto.Message): this job. **Beta** Feature: This report is available for testing - purposes only. It may be changed before final release. + purposes only. It might be changed before final release. driver_output_resource_uri (str): Output only. A URI pointing to the location of the stdout of the job's driver program. driver_control_files_uri (str): Output only. If present, the location of miscellaneous - control files which may be used as part of job setup and - handling. If not present, control files may be placed in the - same location as ``driver_output_uri``. + control files which can be used as part of job setup and + handling. If not present, control files might be placed in + the same location as ``driver_output_uri``. labels (MutableMapping[str, str]): Optional. The labels to associate with this job. Label **keys** must contain 1 to 63 characters, and must conform to `RFC 1035 `__. - Label **values** may be empty, but, if present, must contain + Label **values** can be empty, but, if present, must contain 1 to 63 characters, and must conform to `RFC 1035 `__. No more than 32 labels can be associated with a job. @@ -1199,7 +1284,8 @@ class Job(proto.Message): job_uuid (str): Output only. A UUID that uniquely identifies a job within the project over time. This is in contrast to a - user-settable reference.job_id that may be reused over time. + user-settable reference.job_id that might be reused over + time. done (bool): Output only. Indicates whether the job is completed. If the value is ``false``, the job is still in progress. If @@ -1273,6 +1359,12 @@ class Job(proto.Message): oneof="type_job", message="TrinoJob", ) + flink_job: "FlinkJob" = proto.Field( + proto.MESSAGE, + number=29, + oneof="type_job", + message="FlinkJob", + ) status: "JobStatus" = proto.Field( proto.MESSAGE, number=8, @@ -1348,12 +1440,12 @@ class JobScheduling(proto.Message): Attributes: max_failures_per_hour (int): - Optional. Maximum number of times per hour a driver may be + Optional. Maximum number of times per hour a driver can be restarted as a result of driver exiting with non-zero code before job is reported failed. - A job may be reported as thrashing if the driver exits with - a non-zero code four times within a 10-minute window. + A job might be reported as thrashing if the driver exits + with a non-zero code four times within a 10-minute window. Maximum value is 10. @@ -1361,7 +1453,7 @@ class JobScheduling(proto.Message): Dataproc [workflow templates] (https://cloud.google.com/dataproc/docs/concepts/workflows/using-workflows#adding_jobs_to_a_template). max_failures_total (int): - Optional. Maximum total number of times a driver may be + Optional. Maximum total number of times a driver can be restarted as a result of the driver exiting with a non-zero code. After the maximum number is reached, the job will be reported as failed. @@ -1644,6 +1736,12 @@ class ListJobsResponse(proto.Message): are more results to fetch. To fetch additional results, provide this value as the ``page_token`` in a subsequent ListJobsRequest. + unreachable (MutableSequence[str]): + Output only. List of jobs with + [kms_key][google.cloud.dataproc.v1.EncryptionConfig.kms_key]-encrypted + parameters that could not be decrypted. A response to a + ``jobs.get`` request may indicate the reason for the + decryption failure for a specific job. """ @property @@ -1659,6 +1757,10 @@ def raw_page(self): proto.STRING, number=2, ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) class CancelJobRequest(proto.Message): diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/shared.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/shared.py index ed37c1b8a565..656453c5fe33 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/shared.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/shared.py @@ -54,10 +54,11 @@ class Component(proto.Enum): Unspecified component. Specifying this will cause Cluster creation to fail. ANACONDA (5): - The Anaconda python distribution. The Anaconda component is - not supported in the Dataproc [2.0 image] - (/https://cloud.google.com/dataproc/docs/concepts/versioning/dataproc-release-2.0). - The 2.0 image is pre-installed with Miniconda. + The Anaconda component is no longer supported or applicable + to [supported Dataproc on Compute Engine image versions] + (https://cloud.google.com/dataproc/docs/concepts/versioning/dataproc-version-clusters#supported-dataproc-image-versions). + It cannot be activated on clusters created with supported + Dataproc on Compute Engine image versions. DOCKER (13): Docker DRUID (9): diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/workflow_templates.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/workflow_templates.py index 3526627e14b0..5022f3e353a7 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/workflow_templates.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/workflow_templates.py @@ -124,8 +124,51 @@ class WorkflowTemplate(proto.Message): `managed cluster `__, the cluster is deleted. + encryption_config (google.cloud.dataproc_v1.types.WorkflowTemplate.EncryptionConfig): + Optional. Encryption settings for encrypting + workflow template job arguments. """ + class EncryptionConfig(proto.Message): + r"""Encryption settings for encrypting workflow template job + arguments. + + Attributes: + kms_key (str): + Optional. The Cloud KMS key name to use for encrypting + workflow template job arguments. + + When this this key is provided, the following workflow + template [job arguments] + (https://cloud.google.com/dataproc/docs/concepts/workflows/use-workflows#adding_jobs_to_a_template), + if present, are `CMEK + encrypted `__: + + - `FlinkJob + args `__ + - `HadoopJob + args `__ + - `SparkJob + args `__ + - `SparkRJob + args `__ + - `PySparkJob + args `__ + - `SparkSqlJob `__ + scriptVariables and queryList.queries + - `HiveJob `__ + scriptVariables and queryList.queries + - `PigJob `__ + scriptVariables and queryList.queries + - `PrestoJob `__ + scriptVariables and queryList.queries + """ + + kms_key: str = proto.Field( + proto.STRING, + number=1, + ) + id: str = proto.Field( proto.STRING, number=2, @@ -173,6 +216,11 @@ class WorkflowTemplate(proto.Message): number=10, message=duration_pb2.Duration, ) + encryption_config: EncryptionConfig = proto.Field( + proto.MESSAGE, + number=11, + message=EncryptionConfig, + ) class WorkflowTemplatePlacement(proto.Message): @@ -346,6 +394,14 @@ class OrderedJob(proto.Message): presto_job (google.cloud.dataproc_v1.types.PrestoJob): Optional. Job is a Presto job. + This field is a member of `oneof`_ ``job_type``. + trino_job (google.cloud.dataproc_v1.types.TrinoJob): + Optional. Job is a Trino job. + + This field is a member of `oneof`_ ``job_type``. + flink_job (google.cloud.dataproc_v1.types.FlinkJob): + Optional. Job is a Flink job. + This field is a member of `oneof`_ ``job_type``. labels (MutableMapping[str, str]): Optional. The labels to associate with this job. @@ -419,6 +475,18 @@ class OrderedJob(proto.Message): oneof="job_type", message=gcd_jobs.PrestoJob, ) + trino_job: gcd_jobs.TrinoJob = proto.Field( + proto.MESSAGE, + number=13, + oneof="job_type", + message=gcd_jobs.TrinoJob, + ) + flink_job: gcd_jobs.FlinkJob = proto.Field( + proto.MESSAGE, + number=14, + oneof="job_type", + message=gcd_jobs.FlinkJob, + ) labels: MutableMapping[str, str] = proto.MapField( proto.STRING, proto.STRING, @@ -1095,6 +1163,12 @@ class ListWorkflowTemplatesResponse(proto.Message): are more results to fetch. To fetch additional results, provide this value as the page_token in a subsequent ListWorkflowTemplatesRequest. + unreachable (MutableSequence[str]): + Output only. List of workflow templates that + could not be included in the response. + Attempting to get one of these resources may + indicate why it was not included in the list + response. """ @property @@ -1110,6 +1184,10 @@ def raw_page(self): proto.STRING, number=2, ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) class DeleteWorkflowTemplateRequest(proto.Message): diff --git a/packages/google-cloud-dataproc/samples/generated_samples/snippet_metadata_google.cloud.dataproc.v1.json b/packages/google-cloud-dataproc/samples/generated_samples/snippet_metadata_google.cloud.dataproc.v1.json index c1b4b338fe39..c5f4e003db04 100644 --- a/packages/google-cloud-dataproc/samples/generated_samples/snippet_metadata_google.cloud.dataproc.v1.json +++ b/packages/google-cloud-dataproc/samples/generated_samples/snippet_metadata_google.cloud.dataproc.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dataproc", - "version": "5.11.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_batch_controller.py b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_batch_controller.py index 4e92cbcfc4a1..b1b1cf5bcb8b 100644 --- a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_batch_controller.py +++ b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_batch_controller.py @@ -1953,6 +1953,7 @@ def test_list_batches(request_type, transport: str = "grpc"): # Designate an appropriate return value for the call. call.return_value = batches.ListBatchesResponse( next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) response = client.list_batches(request) @@ -1965,6 +1966,7 @@ def test_list_batches(request_type, transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListBatchesPager) assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] def test_list_batches_empty_call(): @@ -2070,6 +2072,7 @@ async def test_list_batches_empty_call_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( batches.ListBatchesResponse( next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) ) response = await client.list_batches() @@ -2139,6 +2142,7 @@ async def test_list_batches_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( batches.ListBatchesResponse( next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) ) response = await client.list_batches(request) @@ -2152,6 +2156,7 @@ async def test_list_batches_async( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListBatchesAsyncPager) assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio @@ -3648,6 +3653,7 @@ def test_list_batches_rest(request_type): # Designate an appropriate value for the returned response. return_value = batches.ListBatchesResponse( next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) # Wrap the value into a proper Response obj @@ -3664,6 +3670,7 @@ def test_list_batches_rest(request_type): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListBatchesPager) assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] def test_list_batches_rest_use_cached_wrapped_rpc(): diff --git a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_cluster_controller.py b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_cluster_controller.py index 099921eb7e2b..6910a6fc2fa4 100644 --- a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_cluster_controller.py +++ b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_cluster_controller.py @@ -4476,7 +4476,10 @@ def test_create_cluster_rest(request_type): "execution_timeout": {"seconds": 751, "nanos": 543}, } ], - "encryption_config": {"gce_pd_kms_key_name": "gce_pd_kms_key_name_value"}, + "encryption_config": { + "gce_pd_kms_key_name": "gce_pd_kms_key_name_value", + "kms_key": "kms_key_value", + }, "autoscaling_config": {"policy_uri": "policy_uri_value"}, "security_config": { "kerberos_config": { @@ -5082,7 +5085,10 @@ def test_update_cluster_rest(request_type): "execution_timeout": {"seconds": 751, "nanos": 543}, } ], - "encryption_config": {"gce_pd_kms_key_name": "gce_pd_kms_key_name_value"}, + "encryption_config": { + "gce_pd_kms_key_name": "gce_pd_kms_key_name_value", + "kms_key": "kms_key_value", + }, "autoscaling_config": {"policy_uri": "policy_uri_value"}, "security_config": { "kerberos_config": { @@ -8167,11 +8173,42 @@ def test_parse_cluster_path(): assert expected == actual -def test_node_group_path(): +def test_crypto_key_path(): project = "cuttlefish" - region = "mussel" - cluster = "winkle" - node_group = "nautilus" + location = "mussel" + key_ring = "winkle" + crypto_key = "nautilus" + expected = "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}".format( + project=project, + location=location, + key_ring=key_ring, + crypto_key=crypto_key, + ) + actual = ClusterControllerClient.crypto_key_path( + project, location, key_ring, crypto_key + ) + assert expected == actual + + +def test_parse_crypto_key_path(): + expected = { + "project": "scallop", + "location": "abalone", + "key_ring": "squid", + "crypto_key": "clam", + } + path = ClusterControllerClient.crypto_key_path(**expected) + + # Check that the path construction is reversible. + actual = ClusterControllerClient.parse_crypto_key_path(path) + assert expected == actual + + +def test_node_group_path(): + project = "whelk" + region = "octopus" + cluster = "oyster" + node_group = "nudibranch" expected = "projects/{project}/regions/{region}/clusters/{cluster}/nodeGroups/{node_group}".format( project=project, region=region, @@ -8186,10 +8223,10 @@ def test_node_group_path(): def test_parse_node_group_path(): expected = { - "project": "scallop", - "region": "abalone", - "cluster": "squid", - "node_group": "clam", + "project": "cuttlefish", + "region": "mussel", + "cluster": "winkle", + "node_group": "nautilus", } path = ClusterControllerClient.node_group_path(**expected) @@ -8199,9 +8236,9 @@ def test_parse_node_group_path(): def test_service_path(): - project = "whelk" - location = "octopus" - service = "oyster" + project = "scallop" + location = "abalone" + service = "squid" expected = "projects/{project}/locations/{location}/services/{service}".format( project=project, location=location, @@ -8213,9 +8250,9 @@ def test_service_path(): def test_parse_service_path(): expected = { - "project": "nudibranch", - "location": "cuttlefish", - "service": "mussel", + "project": "clam", + "location": "whelk", + "service": "octopus", } path = ClusterControllerClient.service_path(**expected) @@ -8225,7 +8262,7 @@ def test_parse_service_path(): def test_common_billing_account_path(): - billing_account = "winkle" + billing_account = "oyster" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -8235,7 +8272,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "nautilus", + "billing_account": "nudibranch", } path = ClusterControllerClient.common_billing_account_path(**expected) @@ -8245,7 +8282,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "scallop" + folder = "cuttlefish" expected = "folders/{folder}".format( folder=folder, ) @@ -8255,7 +8292,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "abalone", + "folder": "mussel", } path = ClusterControllerClient.common_folder_path(**expected) @@ -8265,7 +8302,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "squid" + organization = "winkle" expected = "organizations/{organization}".format( organization=organization, ) @@ -8275,7 +8312,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "clam", + "organization": "nautilus", } path = ClusterControllerClient.common_organization_path(**expected) @@ -8285,7 +8322,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "whelk" + project = "scallop" expected = "projects/{project}".format( project=project, ) @@ -8295,7 +8332,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "octopus", + "project": "abalone", } path = ClusterControllerClient.common_project_path(**expected) @@ -8305,8 +8342,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "oyster" - location = "nudibranch" + project = "squid" + location = "clam" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -8317,8 +8354,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "cuttlefish", - "location": "mussel", + "project": "whelk", + "location": "octopus", } path = ClusterControllerClient.common_location_path(**expected) diff --git a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_job_controller.py b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_job_controller.py index 1d12641b7a95..8b8450a8006f 100644 --- a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_job_controller.py +++ b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_job_controller.py @@ -2368,6 +2368,7 @@ def test_list_jobs(request_type, transport: str = "grpc"): # Designate an appropriate return value for the call. call.return_value = jobs.ListJobsResponse( next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) response = client.list_jobs(request) @@ -2380,6 +2381,7 @@ def test_list_jobs(request_type, transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListJobsPager) assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] def test_list_jobs_empty_call(): @@ -2487,6 +2489,7 @@ async def test_list_jobs_empty_call_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( jobs.ListJobsResponse( next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) ) response = await client.list_jobs() @@ -2554,6 +2557,7 @@ async def test_list_jobs_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( jobs.ListJobsResponse( next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) ) response = await client.list_jobs(request) @@ -2567,6 +2571,7 @@ async def test_list_jobs_async( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListJobsAsyncPager) assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio @@ -5008,6 +5013,7 @@ def test_list_jobs_rest(request_type): # Designate an appropriate value for the returned response. return_value = jobs.ListJobsResponse( next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) # Wrap the value into a proper Response obj @@ -5024,6 +5030,7 @@ def test_list_jobs_rest(request_type): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListJobsPager) assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] def test_list_jobs_rest_use_cached_wrapped_rpc(): @@ -5477,6 +5484,15 @@ def test_update_job_rest(request_type): "properties": {}, "logging_config": {}, }, + "flink_job": { + "main_jar_file_uri": "main_jar_file_uri_value", + "main_class": "main_class_value", + "args": ["args_value1", "args_value2"], + "jar_file_uris": ["jar_file_uris_value1", "jar_file_uris_value2"], + "savepoint_uri": "savepoint_uri_value", + "properties": {}, + "logging_config": {}, + }, "status": { "state": 1, "details": "details_value", diff --git a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_workflow_template_service.py b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_workflow_template_service.py index 67bc73b8e99b..c996bc73de7b 100644 --- a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_workflow_template_service.py +++ b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_workflow_template_service.py @@ -3238,6 +3238,7 @@ def test_list_workflow_templates(request_type, transport: str = "grpc"): # Designate an appropriate return value for the call. call.return_value = workflow_templates.ListWorkflowTemplatesResponse( next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) response = client.list_workflow_templates(request) @@ -3250,6 +3251,7 @@ def test_list_workflow_templates(request_type, transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListWorkflowTemplatesPager) assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] def test_list_workflow_templates_empty_call(): @@ -3362,6 +3364,7 @@ async def test_list_workflow_templates_empty_call_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( workflow_templates.ListWorkflowTemplatesResponse( next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) ) response = await client.list_workflow_templates() @@ -3434,6 +3437,7 @@ async def test_list_workflow_templates_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( workflow_templates.ListWorkflowTemplatesResponse( next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) ) response = await client.list_workflow_templates(request) @@ -3447,6 +3451,7 @@ async def test_list_workflow_templates_async( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListWorkflowTemplatesAsyncPager) assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio @@ -4307,7 +4312,8 @@ def test_create_workflow_template_rest(request_type): } ], "encryption_config": { - "gce_pd_kms_key_name": "gce_pd_kms_key_name_value" + "gce_pd_kms_key_name": "gce_pd_kms_key_name_value", + "kms_key": "kms_key_value", }, "autoscaling_config": {"policy_uri": "policy_uri_value"}, "security_config": { @@ -4448,6 +4454,24 @@ def test_create_workflow_template_rest(request_type): "properties": {}, "logging_config": {}, }, + "trino_job": { + "query_file_uri": "query_file_uri_value", + "query_list": {}, + "continue_on_failure": True, + "output_format": "output_format_value", + "client_tags": ["client_tags_value1", "client_tags_value2"], + "properties": {}, + "logging_config": {}, + }, + "flink_job": { + "main_jar_file_uri": "main_jar_file_uri_value", + "main_class": "main_class_value", + "args": ["args_value1", "args_value2"], + "jar_file_uris": ["jar_file_uris_value1", "jar_file_uris_value2"], + "savepoint_uri": "savepoint_uri_value", + "properties": {}, + "logging_config": {}, + }, "labels": {}, "scheduling": { "max_failures_per_hour": 2243, @@ -4471,6 +4495,7 @@ def test_create_workflow_template_rest(request_type): } ], "dag_timeout": {}, + "encryption_config": {"kms_key": "kms_key_value"}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -5615,7 +5640,8 @@ def test_instantiate_inline_workflow_template_rest(request_type): } ], "encryption_config": { - "gce_pd_kms_key_name": "gce_pd_kms_key_name_value" + "gce_pd_kms_key_name": "gce_pd_kms_key_name_value", + "kms_key": "kms_key_value", }, "autoscaling_config": {"policy_uri": "policy_uri_value"}, "security_config": { @@ -5756,6 +5782,24 @@ def test_instantiate_inline_workflow_template_rest(request_type): "properties": {}, "logging_config": {}, }, + "trino_job": { + "query_file_uri": "query_file_uri_value", + "query_list": {}, + "continue_on_failure": True, + "output_format": "output_format_value", + "client_tags": ["client_tags_value1", "client_tags_value2"], + "properties": {}, + "logging_config": {}, + }, + "flink_job": { + "main_jar_file_uri": "main_jar_file_uri_value", + "main_class": "main_class_value", + "args": ["args_value1", "args_value2"], + "jar_file_uris": ["jar_file_uris_value1", "jar_file_uris_value2"], + "savepoint_uri": "savepoint_uri_value", + "properties": {}, + "logging_config": {}, + }, "labels": {}, "scheduling": { "max_failures_per_hour": 2243, @@ -5779,6 +5823,7 @@ def test_instantiate_inline_workflow_template_rest(request_type): } ], "dag_timeout": {}, + "encryption_config": {"kms_key": "kms_key_value"}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -6289,7 +6334,8 @@ def test_update_workflow_template_rest(request_type): } ], "encryption_config": { - "gce_pd_kms_key_name": "gce_pd_kms_key_name_value" + "gce_pd_kms_key_name": "gce_pd_kms_key_name_value", + "kms_key": "kms_key_value", }, "autoscaling_config": {"policy_uri": "policy_uri_value"}, "security_config": { @@ -6430,6 +6476,24 @@ def test_update_workflow_template_rest(request_type): "properties": {}, "logging_config": {}, }, + "trino_job": { + "query_file_uri": "query_file_uri_value", + "query_list": {}, + "continue_on_failure": True, + "output_format": "output_format_value", + "client_tags": ["client_tags_value1", "client_tags_value2"], + "properties": {}, + "logging_config": {}, + }, + "flink_job": { + "main_jar_file_uri": "main_jar_file_uri_value", + "main_class": "main_class_value", + "args": ["args_value1", "args_value2"], + "jar_file_uris": ["jar_file_uris_value1", "jar_file_uris_value2"], + "savepoint_uri": "savepoint_uri_value", + "properties": {}, + "logging_config": {}, + }, "labels": {}, "scheduling": { "max_failures_per_hour": 2243, @@ -6453,6 +6517,7 @@ def test_update_workflow_template_rest(request_type): } ], "dag_timeout": {}, + "encryption_config": {"kms_key": "kms_key_value"}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -6849,6 +6914,7 @@ def test_list_workflow_templates_rest(request_type): # Designate an appropriate value for the returned response. return_value = workflow_templates.ListWorkflowTemplatesResponse( next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) # Wrap the value into a proper Response obj @@ -6865,6 +6931,7 @@ def test_list_workflow_templates_rest(request_type): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListWorkflowTemplatesPager) assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] def test_list_workflow_templates_rest_use_cached_wrapped_rpc(): @@ -8134,11 +8201,42 @@ def test_workflow_template_service_grpc_lro_async_client(): assert transport.operations_client is transport.operations_client -def test_node_group_path(): +def test_crypto_key_path(): project = "squid" - region = "clam" - cluster = "whelk" - node_group = "octopus" + location = "clam" + key_ring = "whelk" + crypto_key = "octopus" + expected = "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}".format( + project=project, + location=location, + key_ring=key_ring, + crypto_key=crypto_key, + ) + actual = WorkflowTemplateServiceClient.crypto_key_path( + project, location, key_ring, crypto_key + ) + assert expected == actual + + +def test_parse_crypto_key_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "key_ring": "cuttlefish", + "crypto_key": "mussel", + } + path = WorkflowTemplateServiceClient.crypto_key_path(**expected) + + # Check that the path construction is reversible. + actual = WorkflowTemplateServiceClient.parse_crypto_key_path(path) + assert expected == actual + + +def test_node_group_path(): + project = "winkle" + region = "nautilus" + cluster = "scallop" + node_group = "abalone" expected = "projects/{project}/regions/{region}/clusters/{cluster}/nodeGroups/{node_group}".format( project=project, region=region, @@ -8153,10 +8251,10 @@ def test_node_group_path(): def test_parse_node_group_path(): expected = { - "project": "oyster", - "region": "nudibranch", - "cluster": "cuttlefish", - "node_group": "mussel", + "project": "squid", + "region": "clam", + "cluster": "whelk", + "node_group": "octopus", } path = WorkflowTemplateServiceClient.node_group_path(**expected) @@ -8166,9 +8264,9 @@ def test_parse_node_group_path(): def test_service_path(): - project = "winkle" - location = "nautilus" - service = "scallop" + project = "oyster" + location = "nudibranch" + service = "cuttlefish" expected = "projects/{project}/locations/{location}/services/{service}".format( project=project, location=location, @@ -8180,9 +8278,9 @@ def test_service_path(): def test_parse_service_path(): expected = { - "project": "abalone", - "location": "squid", - "service": "clam", + "project": "mussel", + "location": "winkle", + "service": "nautilus", } path = WorkflowTemplateServiceClient.service_path(**expected) @@ -8192,9 +8290,9 @@ def test_parse_service_path(): def test_workflow_template_path(): - project = "whelk" - region = "octopus" - workflow_template = "oyster" + project = "scallop" + region = "abalone" + workflow_template = "squid" expected = "projects/{project}/regions/{region}/workflowTemplates/{workflow_template}".format( project=project, region=region, @@ -8208,9 +8306,9 @@ def test_workflow_template_path(): def test_parse_workflow_template_path(): expected = { - "project": "nudibranch", - "region": "cuttlefish", - "workflow_template": "mussel", + "project": "clam", + "region": "whelk", + "workflow_template": "octopus", } path = WorkflowTemplateServiceClient.workflow_template_path(**expected) @@ -8220,7 +8318,7 @@ def test_parse_workflow_template_path(): def test_common_billing_account_path(): - billing_account = "winkle" + billing_account = "oyster" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -8230,7 +8328,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "nautilus", + "billing_account": "nudibranch", } path = WorkflowTemplateServiceClient.common_billing_account_path(**expected) @@ -8240,7 +8338,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "scallop" + folder = "cuttlefish" expected = "folders/{folder}".format( folder=folder, ) @@ -8250,7 +8348,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "abalone", + "folder": "mussel", } path = WorkflowTemplateServiceClient.common_folder_path(**expected) @@ -8260,7 +8358,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "squid" + organization = "winkle" expected = "organizations/{organization}".format( organization=organization, ) @@ -8270,7 +8368,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "clam", + "organization": "nautilus", } path = WorkflowTemplateServiceClient.common_organization_path(**expected) @@ -8280,7 +8378,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "whelk" + project = "scallop" expected = "projects/{project}".format( project=project, ) @@ -8290,7 +8388,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "octopus", + "project": "abalone", } path = WorkflowTemplateServiceClient.common_project_path(**expected) @@ -8300,8 +8398,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "oyster" - location = "nudibranch" + project = "squid" + location = "clam" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -8312,8 +8410,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "cuttlefish", - "location": "mussel", + "project": "whelk", + "location": "octopus", } path = WorkflowTemplateServiceClient.common_location_path(**expected) From 76267b2b8998fd2a3602ebf4d12d2aaa30a90cde Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sat, 14 Sep 2024 00:57:09 +0000 Subject: [PATCH 064/108] feat: [google-cloud-batch] A new value `CANCELLATION_IN_PROGRESS` is added to enum `State` (#13074) - [ ] Regenerate this pull request now. feat: A new value `CANCELLED` is added to enum `State` PiperOrigin-RevId: 673051518 Source-Link: https://github.com/googleapis/googleapis/commit/2b46b7546bd801cf9bc9449843666c4b55fc574d Source-Link: https://github.com/googleapis/googleapis-gen/commit/4083dff5bd4f0c0136aca9f226ae89f58a669069 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWJhdGNoLy5Pd2xCb3QueWFtbCIsImgiOiI0MDgzZGZmNWJkNGYwYzAxMzZhY2E5ZjIyNmFlODlmNThhNjY5MDY5In0= --------- Co-authored-by: Owl Bot Co-authored-by: Victor Chudnovsky --- .../google/cloud/batch/gapic_version.py | 2 +- .../google/cloud/batch_v1/gapic_version.py | 2 +- .../google/cloud/batch_v1alpha/gapic_version.py | 2 +- .../google/cloud/batch_v1alpha/types/job.py | 10 ++++++++++ .../snippet_metadata_google.cloud.batch.v1.json | 2 +- .../snippet_metadata_google.cloud.batch.v1alpha.json | 2 +- 6 files changed, 15 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-batch/google/cloud/batch/gapic_version.py b/packages/google-cloud-batch/google/cloud/batch/gapic_version.py index 7de8a6a6838d..558c8aab67c5 100644 --- a/packages/google-cloud-batch/google/cloud/batch/gapic_version.py +++ b/packages/google-cloud-batch/google/cloud/batch/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.17.27" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py b/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py index 7de8a6a6838d..558c8aab67c5 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.17.27" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py index 7de8a6a6838d..558c8aab67c5 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.17.27" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/job.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/job.py index 666e309ad00b..744d6bdb9a8b 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/job.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/job.py @@ -332,6 +332,14 @@ class State(proto.Enum): The Job will be deleted, but has not been deleted yet. Typically this is because resources used by the Job are still being cleaned up. + CANCELLATION_IN_PROGRESS (7): + The Job cancellation is in progress, this is + because the resources used by the Job are still + being cleaned up. + CANCELLED (8): + The Job has been cancelled, the task + executions were stopped and the resources were + cleaned up. """ STATE_UNSPECIFIED = 0 QUEUED = 1 @@ -340,6 +348,8 @@ class State(proto.Enum): SUCCEEDED = 4 FAILED = 5 DELETION_IN_PROGRESS = 6 + CANCELLATION_IN_PROGRESS = 7 + CANCELLED = 8 class InstanceStatus(proto.Message): r"""VM instance status. diff --git a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json index 1a9ad7a0b658..e2df1067e4dd 100644 --- a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json +++ b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-batch", - "version": "0.17.27" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json index 9c3638c4d767..7f67670b100c 100644 --- a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json +++ b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-batch", - "version": "0.17.27" + "version": "0.1.0" }, "snippets": [ { From b624f04da8a9b6461d4714f0f0bcf13f1f35fa31 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sat, 14 Sep 2024 01:00:39 +0000 Subject: [PATCH 065/108] feat: [google-cloud-orchestration-airflow] add `satisfies_pzi` to `Environment` (#13077) - [ ] Regenerate this pull request now. BEGIN_COMMIT_OVERRIDE feat: [google-cloud-orchestration-airflow] add `satisfies_pzi` to `Environment` feat: [google-cloud-orchestration-airflow] A new method `CheckUpgrade` is added to service `Environments` feat: A new field `satisfies_pzi` is added to message `.google.cloud.orchestration.airflow.service.v1.Environment` feat: A new message `CheckUpgradeRequest` is added feat: A new field `airflow_metadata_retention_config` is added to message `.google.cloud.orchestration.airflow.service.v1.DataRetentionConfig` feat: A new message `AirflowMetadataRetentionPolicyConfig` is added docs: A comment for field `maintenance_window` in message `.google.cloud.orchestration.airflow.service.v1.EnvironmentConfig` is changed docs: A comment for message `WorkloadsConfig` is changed docs: A comment for field `storage_mode` in message `.google.cloud.orchestration.airflow.service.v1.TaskLogsRetentionConfig` is changed END_COMMIT_OVERRIDE PiperOrigin-RevId: 673910740 Source-Link: https://github.com/googleapis/googleapis/commit/dcc4f933bfbc0bc805187ae7f65b3b6be23fd1c3 Source-Link: https://github.com/googleapis/googleapis-gen/commit/0683c793d18547a017f446533bed4bcd09f565d9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLW9yY2hlc3RyYXRpb24tYWlyZmxvdy8uT3dsQm90LnlhbWwiLCJoIjoiMDY4M2M3OTNkMTg1NDdhMDE3ZjQ0NjUzM2JlZDRiY2QwOWY1NjVkOSJ9 BEGIN_NESTED_COMMIT feat: [google-cloud-orchestration-airflow] A new method `CheckUpgrade` is added to service `Environments` feat: A new field `satisfies_pzi` is added to message `.google.cloud.orchestration.airflow.service.v1.Environment` feat: A new message `CheckUpgradeRequest` is added feat: A new field `airflow_metadata_retention_config` is added to message `.google.cloud.orchestration.airflow.service.v1.DataRetentionConfig` feat: A new message `AirflowMetadataRetentionPolicyConfig` is added docs: A comment for field `maintenance_window` in message `.google.cloud.orchestration.airflow.service.v1.EnvironmentConfig` is changed docs: A comment for message `WorkloadsConfig` is changed docs: A comment for field `storage_mode` in message `.google.cloud.orchestration.airflow.service.v1.TaskLogsRetentionConfig` is changed PiperOrigin-RevId: 673766368 Source-Link: https://github.com/googleapis/googleapis/commit/0f44538daf93e648e4fe5529acf8219cef3a0a39 Source-Link: https://github.com/googleapis/googleapis-gen/commit/802f7c8cdf887527e99fa9c0d774adfd33a16ffe Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLW9yY2hlc3RyYXRpb24tYWlyZmxvdy8uT3dsQm90LnlhbWwiLCJoIjoiODAyZjdjOGNkZjg4NzUyN2U5OWZhOWMwZDc3NGFkZmQzM2ExNmZmZSJ9 END_NESTED_COMMIT --------- Co-authored-by: Owl Bot Co-authored-by: Victor Chudnovsky --- .../orchestration/airflow/service/__init__.py | 4 + .../airflow/service_v1/__init__.py | 4 + .../airflow/service_v1/gapic_metadata.json | 15 + .../services/environments/async_client.py | 103 ++++ .../services/environments/client.py | 101 ++++ .../services/environments/transports/base.py | 14 + .../services/environments/transports/grpc.py | 29 + .../environments/transports/grpc_asyncio.py | 36 ++ .../services/environments/transports/rest.py | 133 +++++ .../airflow/service_v1/types/__init__.py | 4 + .../airflow/service_v1/types/environments.py | 121 +++- .../service_v1beta1/types/environments.py | 6 + ...erated_environments_check_upgrade_async.py | 56 ++ ...nerated_environments_check_upgrade_sync.py | 56 ++ ...loud.orchestration.airflow.service.v1.json | 153 +++++ .../scripts/fixup_service_v1_keywords.py | 1 + .../gapic/service_v1/test_environments.py | 561 +++++++++++++++++- .../service_v1beta1/test_environments.py | 9 + 18 files changed, 1400 insertions(+), 6 deletions(-) create mode 100644 packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_check_upgrade_async.py create mode 100644 packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_check_upgrade_sync.py diff --git a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service/__init__.py b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service/__init__.py index b79f7274cddc..962fbf440f12 100644 --- a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service/__init__.py +++ b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service/__init__.py @@ -31,6 +31,8 @@ ImageVersionsClient, ) from google.cloud.orchestration.airflow.service_v1.types.environments import ( + AirflowMetadataRetentionPolicyConfig, + CheckUpgradeRequest, CheckUpgradeResponse, CloudDataLineageIntegration, CreateEnvironmentRequest, @@ -104,6 +106,8 @@ "EnvironmentsAsyncClient", "ImageVersionsClient", "ImageVersionsAsyncClient", + "AirflowMetadataRetentionPolicyConfig", + "CheckUpgradeRequest", "CheckUpgradeResponse", "CloudDataLineageIntegration", "CreateEnvironmentRequest", diff --git a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/__init__.py b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/__init__.py index a30d9c61ae59..2cab45afb1ee 100644 --- a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/__init__.py +++ b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/__init__.py @@ -23,6 +23,8 @@ from .services.environments import EnvironmentsAsyncClient, EnvironmentsClient from .services.image_versions import ImageVersionsAsyncClient, ImageVersionsClient from .types.environments import ( + AirflowMetadataRetentionPolicyConfig, + CheckUpgradeRequest, CheckUpgradeResponse, CloudDataLineageIntegration, CreateEnvironmentRequest, @@ -92,6 +94,8 @@ __all__ = ( "EnvironmentsAsyncClient", "ImageVersionsAsyncClient", + "AirflowMetadataRetentionPolicyConfig", + "CheckUpgradeRequest", "CheckUpgradeResponse", "CloudDataLineageIntegration", "CreateEnvironmentRequest", diff --git a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/gapic_metadata.json b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/gapic_metadata.json index 3511b1f8d28f..aca0f03a3f15 100644 --- a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/gapic_metadata.json +++ b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/gapic_metadata.json @@ -10,6 +10,11 @@ "grpc": { "libraryClient": "EnvironmentsClient", "rpcs": { + "CheckUpgrade": { + "methods": [ + "check_upgrade" + ] + }, "CreateEnvironment": { "methods": [ "create_environment" @@ -130,6 +135,11 @@ "grpc-async": { "libraryClient": "EnvironmentsAsyncClient", "rpcs": { + "CheckUpgrade": { + "methods": [ + "check_upgrade" + ] + }, "CreateEnvironment": { "methods": [ "create_environment" @@ -250,6 +260,11 @@ "rest": { "libraryClient": "EnvironmentsClient", "rpcs": { + "CheckUpgrade": { + "methods": [ + "check_upgrade" + ] + }, "CreateEnvironment": { "methods": [ "create_environment" diff --git a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/async_client.py b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/async_client.py index 3406e2214500..849eed1fbf2d 100644 --- a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/async_client.py +++ b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/async_client.py @@ -1448,6 +1448,109 @@ async def sample_list_workloads(): # Done; return the response. return response + async def check_upgrade( + self, + request: Optional[Union[environments.CheckUpgradeRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Check if an upgrade operation on the environment will + succeed. + In case of problems detailed info can be found in the + returned Operation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.orchestration.airflow import service_v1 + + async def sample_check_upgrade(): + # Create a client + client = service_v1.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = service_v1.CheckUpgradeRequest( + environment="environment_value", + ) + + # Make the request + operation = client.check_upgrade(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.orchestration.airflow.service_v1.types.CheckUpgradeRequest, dict]]): + The request object. Request to check whether image + upgrade will succeed. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.orchestration.airflow.service_v1.types.CheckUpgradeResponse` Message containing information about the result of an upgrade check + operation. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, environments.CheckUpgradeRequest): + request = environments.CheckUpgradeRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.check_upgrade + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("environment", request.environment),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + environments.CheckUpgradeResponse, + metadata_type=operations.OperationMetadata, + ) + + # Done; return the response. + return response + async def create_user_workloads_secret( self, request: Optional[ diff --git a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/client.py b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/client.py index a00e4bf1e986..6a8635aa5380 100644 --- a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/client.py +++ b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/client.py @@ -1884,6 +1884,107 @@ def sample_list_workloads(): # Done; return the response. return response + def check_upgrade( + self, + request: Optional[Union[environments.CheckUpgradeRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Check if an upgrade operation on the environment will + succeed. + In case of problems detailed info can be found in the + returned Operation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.orchestration.airflow import service_v1 + + def sample_check_upgrade(): + # Create a client + client = service_v1.EnvironmentsClient() + + # Initialize request argument(s) + request = service_v1.CheckUpgradeRequest( + environment="environment_value", + ) + + # Make the request + operation = client.check_upgrade(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.orchestration.airflow.service_v1.types.CheckUpgradeRequest, dict]): + The request object. Request to check whether image + upgrade will succeed. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.orchestration.airflow.service_v1.types.CheckUpgradeResponse` Message containing information about the result of an upgrade check + operation. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, environments.CheckUpgradeRequest): + request = environments.CheckUpgradeRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.check_upgrade] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("environment", request.environment),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + environments.CheckUpgradeResponse, + metadata_type=operations.OperationMetadata, + ) + + # Done; return the response. + return response + def create_user_workloads_secret( self, request: Optional[ diff --git a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/transports/base.py b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/transports/base.py index 735acee260d9..05b1c3627955 100644 --- a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/transports/base.py +++ b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/transports/base.py @@ -177,6 +177,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.check_upgrade: gapic_v1.method.wrap_method( + self.check_upgrade, + default_timeout=None, + client_info=client_info, + ), self.create_user_workloads_secret: gapic_v1.method.wrap_method( self.create_user_workloads_secret, default_timeout=None, @@ -359,6 +364,15 @@ def list_workloads( ]: raise NotImplementedError() + @property + def check_upgrade( + self, + ) -> Callable[ + [environments.CheckUpgradeRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def create_user_workloads_secret( self, diff --git a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/transports/grpc.py b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/transports/grpc.py index 78c4216a1d96..1d424d9d8162 100644 --- a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/transports/grpc.py +++ b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/transports/grpc.py @@ -502,6 +502,35 @@ def list_workloads( ) return self._stubs["list_workloads"] + @property + def check_upgrade( + self, + ) -> Callable[[environments.CheckUpgradeRequest], operations_pb2.Operation]: + r"""Return a callable for the check upgrade method over gRPC. + + Check if an upgrade operation on the environment will + succeed. + In case of problems detailed info can be found in the + returned Operation. + + Returns: + Callable[[~.CheckUpgradeRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "check_upgrade" not in self._stubs: + self._stubs["check_upgrade"] = self.grpc_channel.unary_unary( + "/google.cloud.orchestration.airflow.service.v1.Environments/CheckUpgrade", + request_serializer=environments.CheckUpgradeRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["check_upgrade"] + @property def create_user_workloads_secret( self, diff --git a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/transports/grpc_asyncio.py b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/transports/grpc_asyncio.py index cbe9ca640a9c..0c408a50e4cf 100644 --- a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/transports/grpc_asyncio.py +++ b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/transports/grpc_asyncio.py @@ -518,6 +518,37 @@ def list_workloads( ) return self._stubs["list_workloads"] + @property + def check_upgrade( + self, + ) -> Callable[ + [environments.CheckUpgradeRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the check upgrade method over gRPC. + + Check if an upgrade operation on the environment will + succeed. + In case of problems detailed info can be found in the + returned Operation. + + Returns: + Callable[[~.CheckUpgradeRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "check_upgrade" not in self._stubs: + self._stubs["check_upgrade"] = self.grpc_channel.unary_unary( + "/google.cloud.orchestration.airflow.service.v1.Environments/CheckUpgrade", + request_serializer=environments.CheckUpgradeRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["check_upgrade"] + @property def create_user_workloads_secret( self, @@ -1021,6 +1052,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.check_upgrade: gapic_v1.method_async.wrap_method( + self.check_upgrade, + default_timeout=None, + client_info=client_info, + ), self.create_user_workloads_secret: gapic_v1.method_async.wrap_method( self.create_user_workloads_secret, default_timeout=None, diff --git a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/transports/rest.py b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/transports/rest.py index a735deff7f54..6a8be243f2a9 100644 --- a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/transports/rest.py +++ b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/transports/rest.py @@ -72,6 +72,14 @@ class EnvironmentsRestInterceptor: .. code-block:: python class MyCustomEnvironmentsInterceptor(EnvironmentsRestInterceptor): + def pre_check_upgrade(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_check_upgrade(self, response): + logging.log(f"Received response: {response}") + return response + def pre_create_environment(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -254,6 +262,29 @@ def post_update_user_workloads_secret(self, response): """ + def pre_check_upgrade( + self, + request: environments.CheckUpgradeRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[environments.CheckUpgradeRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for check_upgrade + + Override in a subclass to manipulate the request or metadata + before they are sent to the Environments server. + """ + return request, metadata + + def post_check_upgrade( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for check_upgrade + + Override in a subclass to manipulate the response + after it is returned by the Environments server but before + it is returned to user code. + """ + return response + def pre_create_environment( self, request: environments.CreateEnvironmentRequest, @@ -988,6 +1019,100 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Return the client from cache. return self._operations_client + class _CheckUpgrade(EnvironmentsRestStub): + def __hash__(self): + return hash("CheckUpgrade") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: environments.CheckUpgradeRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the check upgrade method over HTTP. + + Args: + request (~.environments.CheckUpgradeRequest): + The request object. Request to check whether image + upgrade will succeed. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{environment=projects/*/locations/*/environments/*}:checkUpgrade", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_check_upgrade(request, metadata) + pb_request = environments.CheckUpgradeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_check_upgrade(resp) + return resp + class _CreateEnvironment(EnvironmentsRestStub): def __hash__(self): return hash("CreateEnvironment") @@ -2958,6 +3083,14 @@ def __call__( resp = self._interceptor.post_update_user_workloads_secret(resp) return resp + @property + def check_upgrade( + self, + ) -> Callable[[environments.CheckUpgradeRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CheckUpgrade(self._session, self._host, self._interceptor) # type: ignore + @property def create_environment( self, diff --git a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/types/__init__.py b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/types/__init__.py index 873b24e7b536..05d6386b5e43 100644 --- a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/types/__init__.py +++ b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/types/__init__.py @@ -14,6 +14,8 @@ # limitations under the License. # from .environments import ( + AirflowMetadataRetentionPolicyConfig, + CheckUpgradeRequest, CheckUpgradeResponse, CloudDataLineageIntegration, CreateEnvironmentRequest, @@ -81,6 +83,8 @@ from .operations import OperationMetadata __all__ = ( + "AirflowMetadataRetentionPolicyConfig", + "CheckUpgradeRequest", "CheckUpgradeResponse", "CloudDataLineageIntegration", "CreateEnvironmentRequest", diff --git a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/types/environments.py b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/types/environments.py index 5414a93ee0fe..05f5049857da 100644 --- a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/types/environments.py +++ b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/types/environments.py @@ -79,9 +79,11 @@ "MasterAuthorizedNetworksConfig", "CloudDataLineageIntegration", "Environment", + "CheckUpgradeRequest", "CheckUpgradeResponse", "DataRetentionConfig", "TaskLogsRetentionConfig", + "AirflowMetadataRetentionPolicyConfig", }, ) @@ -1392,8 +1394,10 @@ class EnvironmentConfig(proto.Message): hours. If this value is omitted, the default value for - maintenance window will be applied. The default - value is Saturday and Sunday 00-06 GMT. + maintenance window is applied. By default, + maintenance windows are from 00:00:00 to + 04:00:00 (GMT) on Friday, Saturday, and Sunday + every week. workloads_config (google.cloud.orchestration.airflow.service_v1.types.WorkloadsConfig): Optional. The workloads configuration settings for the GKE cluster associated with the Cloud Composer environment. The @@ -2559,6 +2563,9 @@ class TriggererResource(proto.Message): class DagProcessorResource(proto.Message): r"""Configuration for resources used by Airflow DAG processors. + This field is supported for Cloud Composer environments in versions + composer-3.\ *.*-airflow-*.*.\* and newer. + Attributes: cpu (float): Optional. CPU request and limit for a single @@ -2778,6 +2785,8 @@ class Environment(proto.Message): <= 128 bytes in size. satisfies_pzs (bool): Output only. Reserved for future use. + satisfies_pzi (bool): + Output only. Reserved for future use. storage_config (google.cloud.orchestration.airflow.service_v1.types.StorageConfig): Optional. Storage configuration for this environment. @@ -2850,6 +2859,10 @@ class State(proto.Enum): proto.BOOL, number=8, ) + satisfies_pzi: bool = proto.Field( + proto.BOOL, + number=10, + ) storage_config: "StorageConfig" = proto.Field( proto.MESSAGE, number=9, @@ -2857,6 +2870,58 @@ class State(proto.Enum): ) +class CheckUpgradeRequest(proto.Message): + r"""Request to check whether image upgrade will succeed. + + Attributes: + environment (str): + Required. The resource name of the + environment to check upgrade for, in the form: + + "projects/{projectId}/locations/{locationId}/environments/{environmentId}". + image_version (str): + Optional. The version of the software running in the + environment. This encapsulates both the version of Cloud + Composer functionality and the version of Apache Airflow. It + must match the regular expression + ``composer-([0-9]+(\.[0-9]+\.[0-9]+(-preview\.[0-9]+)?)?|latest)-airflow-([0-9]+(\.[0-9]+(\.[0-9]+)?)?)``. + When used as input, the server also checks if the provided + version is supported and denies the request for an + unsupported version. + + The Cloud Composer portion of the image version is a full + `semantic version `__, or an alias in + the form of major version number or ``latest``. When an + alias is provided, the server replaces it with the current + Cloud Composer version that satisfies the alias. + + The Apache Airflow portion of the image version is a full + semantic version that points to one of the supported Apache + Airflow versions, or an alias in the form of only major or + major.minor versions specified. When an alias is provided, + the server replaces it with the latest Apache Airflow + version that satisfies the alias and is supported in the + given Cloud Composer version. + + In all cases, the resolved image version is stored in the + same field. + + See also `version + list `__ + and `versioning + overview `__. + """ + + environment: str = proto.Field( + proto.STRING, + number=1, + ) + image_version: str = proto.Field( + proto.STRING, + number=2, + ) + + class CheckUpgradeResponse(proto.Message): r"""Message containing information about the result of an upgrade check operation. @@ -2927,11 +2992,21 @@ class DataRetentionConfig(proto.Message): mechanism. Attributes: + airflow_metadata_retention_config (google.cloud.orchestration.airflow.service_v1.types.AirflowMetadataRetentionPolicyConfig): + Optional. The retention policy for airflow + metadata database. task_logs_retention_config (google.cloud.orchestration.airflow.service_v1.types.TaskLogsRetentionConfig): Optional. The configuration settings for task logs retention """ + airflow_metadata_retention_config: "AirflowMetadataRetentionPolicyConfig" = ( + proto.Field( + proto.MESSAGE, + number=1, + message="AirflowMetadataRetentionPolicyConfig", + ) + ) task_logs_retention_config: "TaskLogsRetentionConfig" = proto.Field( proto.MESSAGE, number=2, @@ -2945,8 +3020,7 @@ class TaskLogsRetentionConfig(proto.Message): Attributes: storage_mode (google.cloud.orchestration.airflow.service_v1.types.TaskLogsRetentionConfig.TaskLogsStorageMode): Optional. The mode of storage for Airflow - workers task logs. For details, see - go/composer-store-task-logs-in-cloud-logging-only-design-doc + workers task logs. """ class TaskLogsStorageMode(proto.Enum): @@ -2973,4 +3047,43 @@ class TaskLogsStorageMode(proto.Enum): ) +class AirflowMetadataRetentionPolicyConfig(proto.Message): + r"""The policy for airflow metadata database retention. + + Attributes: + retention_mode (google.cloud.orchestration.airflow.service_v1.types.AirflowMetadataRetentionPolicyConfig.RetentionMode): + Optional. Retention can be either enabled or + disabled. + retention_days (int): + Optional. How many days data should be + retained for. + """ + + class RetentionMode(proto.Enum): + r"""Describes retention policy. + + Values: + RETENTION_MODE_UNSPECIFIED (0): + Default mode doesn't change environment + parameters. + RETENTION_MODE_ENABLED (1): + Retention policy is enabled. + RETENTION_MODE_DISABLED (2): + Retention policy is disabled. + """ + RETENTION_MODE_UNSPECIFIED = 0 + RETENTION_MODE_ENABLED = 1 + RETENTION_MODE_DISABLED = 2 + + retention_mode: RetentionMode = proto.Field( + proto.ENUM, + number=1, + enum=RetentionMode, + ) + retention_days: int = proto.Field( + proto.INT32, + number=2, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/types/environments.py b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/types/environments.py index b83e81eb209b..a7d029a3a945 100644 --- a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/types/environments.py +++ b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/types/environments.py @@ -2957,6 +2957,8 @@ class Environment(proto.Message): <= 128 bytes in size. satisfies_pzs (bool): Output only. Reserved for future use. + satisfies_pzi (bool): + Output only. Reserved for future use. storage_config (google.cloud.orchestration.airflow.service_v1beta1.types.StorageConfig): Optional. Storage configuration for this environment. @@ -3029,6 +3031,10 @@ class State(proto.Enum): proto.BOOL, number=8, ) + satisfies_pzi: bool = proto.Field( + proto.BOOL, + number=10, + ) storage_config: "StorageConfig" = proto.Field( proto.MESSAGE, number=9, diff --git a/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_check_upgrade_async.py b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_check_upgrade_async.py new file mode 100644 index 000000000000..6b0d1d7e863b --- /dev/null +++ b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_check_upgrade_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CheckUpgrade +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow + + +# [START composer_v1_generated_Environments_CheckUpgrade_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1 + + +async def sample_check_upgrade(): + # Create a client + client = service_v1.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = service_v1.CheckUpgradeRequest( + environment="environment_value", + ) + + # Make the request + operation = client.check_upgrade(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END composer_v1_generated_Environments_CheckUpgrade_async] diff --git a/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_check_upgrade_sync.py b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_check_upgrade_sync.py new file mode 100644 index 000000000000..f8e30156ed47 --- /dev/null +++ b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_check_upgrade_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CheckUpgrade +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow + + +# [START composer_v1_generated_Environments_CheckUpgrade_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1 + + +def sample_check_upgrade(): + # Create a client + client = service_v1.EnvironmentsClient() + + # Initialize request argument(s) + request = service_v1.CheckUpgradeRequest( + environment="environment_value", + ) + + # Make the request + operation = client.check_upgrade(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END composer_v1_generated_Environments_CheckUpgrade_sync] diff --git a/packages/google-cloud-orchestration-airflow/samples/generated_samples/snippet_metadata_google.cloud.orchestration.airflow.service.v1.json b/packages/google-cloud-orchestration-airflow/samples/generated_samples/snippet_metadata_google.cloud.orchestration.airflow.service.v1.json index 3fffc7af33c9..389370672713 100644 --- a/packages/google-cloud-orchestration-airflow/samples/generated_samples/snippet_metadata_google.cloud.orchestration.airflow.service.v1.json +++ b/packages/google-cloud-orchestration-airflow/samples/generated_samples/snippet_metadata_google.cloud.orchestration.airflow.service.v1.json @@ -11,6 +11,159 @@ "version": "0.1.0" }, "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsAsyncClient", + "shortName": "EnvironmentsAsyncClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsAsyncClient.check_upgrade", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments.CheckUpgrade", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments", + "shortName": "Environments" + }, + "shortName": "CheckUpgrade" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1.types.CheckUpgradeRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "check_upgrade" + }, + "description": "Sample for CheckUpgrade", + "file": "composer_v1_generated_environments_check_upgrade_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1_generated_Environments_CheckUpgrade_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1_generated_environments_check_upgrade_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsClient", + "shortName": "EnvironmentsClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsClient.check_upgrade", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments.CheckUpgrade", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments", + "shortName": "Environments" + }, + "shortName": "CheckUpgrade" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1.types.CheckUpgradeRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "check_upgrade" + }, + "description": "Sample for CheckUpgrade", + "file": "composer_v1_generated_environments_check_upgrade_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1_generated_Environments_CheckUpgrade_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1_generated_environments_check_upgrade_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-orchestration-airflow/scripts/fixup_service_v1_keywords.py b/packages/google-cloud-orchestration-airflow/scripts/fixup_service_v1_keywords.py index dcf2905a8c0c..1d453d96293f 100644 --- a/packages/google-cloud-orchestration-airflow/scripts/fixup_service_v1_keywords.py +++ b/packages/google-cloud-orchestration-airflow/scripts/fixup_service_v1_keywords.py @@ -39,6 +39,7 @@ def partition( class serviceCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'check_upgrade': ('environment', 'image_version', ), 'create_environment': ('parent', 'environment', ), 'create_user_workloads_config_map': ('parent', 'user_workloads_config_map', ), 'create_user_workloads_secret': ('parent', 'user_workloads_secret', ), diff --git a/packages/google-cloud-orchestration-airflow/tests/unit/gapic/service_v1/test_environments.py b/packages/google-cloud-orchestration-airflow/tests/unit/gapic/service_v1/test_environments.py index 3751f10f0bbf..ddd117361ace 100644 --- a/packages/google-cloud-orchestration-airflow/tests/unit/gapic/service_v1/test_environments.py +++ b/packages/google-cloud-orchestration-airflow/tests/unit/gapic/service_v1/test_environments.py @@ -1531,6 +1531,7 @@ def test_get_environment(request_type, transport: str = "grpc"): uuid="uuid_value", state=environments.Environment.State.CREATING, satisfies_pzs=True, + satisfies_pzi=True, ) response = client.get_environment(request) @@ -1546,6 +1547,7 @@ def test_get_environment(request_type, transport: str = "grpc"): assert response.uuid == "uuid_value" assert response.state == environments.Environment.State.CREATING assert response.satisfies_pzs is True + assert response.satisfies_pzi is True def test_get_environment_empty_call(): @@ -1648,6 +1650,7 @@ async def test_get_environment_empty_call_async(): uuid="uuid_value", state=environments.Environment.State.CREATING, satisfies_pzs=True, + satisfies_pzi=True, ) ) response = await client.get_environment() @@ -1720,6 +1723,7 @@ async def test_get_environment_async( uuid="uuid_value", state=environments.Environment.State.CREATING, satisfies_pzs=True, + satisfies_pzi=True, ) ) response = await client.get_environment(request) @@ -1736,6 +1740,7 @@ async def test_get_environment_async( assert response.uuid == "uuid_value" assert response.state == environments.Environment.State.CREATING assert response.satisfies_pzs is True + assert response.satisfies_pzi is True @pytest.mark.asyncio @@ -4783,6 +4788,293 @@ async def test_list_workloads_async_pages(): assert page_.raw_page.next_page_token == token +@pytest.mark.parametrize( + "request_type", + [ + environments.CheckUpgradeRequest, + dict, + ], +) +def test_check_upgrade(request_type, transport: str = "grpc"): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.check_upgrade), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.check_upgrade(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = environments.CheckUpgradeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_check_upgrade_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.check_upgrade), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.check_upgrade() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == environments.CheckUpgradeRequest() + + +def test_check_upgrade_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = environments.CheckUpgradeRequest( + environment="environment_value", + image_version="image_version_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.check_upgrade), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.check_upgrade(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == environments.CheckUpgradeRequest( + environment="environment_value", + image_version="image_version_value", + ) + + +def test_check_upgrade_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.check_upgrade in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.check_upgrade] = mock_rpc + request = {} + client.check_upgrade(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.check_upgrade(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_check_upgrade_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EnvironmentsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.check_upgrade), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.check_upgrade() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == environments.CheckUpgradeRequest() + + +@pytest.mark.asyncio +async def test_check_upgrade_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = EnvironmentsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.check_upgrade + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.check_upgrade + ] = mock_rpc + + request = {} + await client.check_upgrade(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.check_upgrade(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_check_upgrade_async( + transport: str = "grpc_asyncio", request_type=environments.CheckUpgradeRequest +): + client = EnvironmentsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.check_upgrade), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.check_upgrade(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = environments.CheckUpgradeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_check_upgrade_async_from_dict(): + await test_check_upgrade_async(request_type=dict) + + +def test_check_upgrade_field_headers(): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = environments.CheckUpgradeRequest() + + request.environment = "environment_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.check_upgrade), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.check_upgrade(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "environment=environment_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_check_upgrade_field_headers_async(): + client = EnvironmentsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = environments.CheckUpgradeRequest() + + request.environment = "environment_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.check_upgrade), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.check_upgrade(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "environment=environment_value", + ) in kw["metadata"] + + @pytest.mark.parametrize( "request_type", [ @@ -10400,7 +10692,11 @@ def test_create_environment_rest(request_type): }, "resilience_mode": 1, "data_retention_config": { - "task_logs_retention_config": {"storage_mode": 1} + "airflow_metadata_retention_config": { + "retention_mode": 1, + "retention_days": 1512, + }, + "task_logs_retention_config": {"storage_mode": 1}, }, }, "uuid": "uuid_value", @@ -10409,6 +10705,7 @@ def test_create_environment_rest(request_type): "update_time": {}, "labels": {}, "satisfies_pzs": True, + "satisfies_pzi": True, "storage_config": {"bucket": "bucket_value"}, } # The version of a generated dependency at test runtime may differ from the version used during generation. @@ -10713,6 +11010,7 @@ def test_get_environment_rest(request_type): uuid="uuid_value", state=environments.Environment.State.CREATING, satisfies_pzs=True, + satisfies_pzi=True, ) # Wrap the value into a proper Response obj @@ -10732,6 +11030,7 @@ def test_get_environment_rest(request_type): assert response.uuid == "uuid_value" assert response.state == environments.Environment.State.CREATING assert response.satisfies_pzs is True + assert response.satisfies_pzi is True def test_get_environment_rest_use_cached_wrapped_rpc(): @@ -11324,7 +11623,11 @@ def test_update_environment_rest(request_type): }, "resilience_mode": 1, "data_retention_config": { - "task_logs_retention_config": {"storage_mode": 1} + "airflow_metadata_retention_config": { + "retention_mode": 1, + "retention_days": 1512, + }, + "task_logs_retention_config": {"storage_mode": 1}, }, }, "uuid": "uuid_value", @@ -11333,6 +11636,7 @@ def test_update_environment_rest(request_type): "update_time": {}, "labels": {}, "satisfies_pzs": True, + "satisfies_pzi": True, "storage_config": {"bucket": "bucket_value"}, } # The version of a generated dependency at test runtime may differ from the version used during generation. @@ -12747,6 +13051,255 @@ def test_list_workloads_rest_pager(transport: str = "rest"): assert page_.raw_page.next_page_token == token +@pytest.mark.parametrize( + "request_type", + [ + environments.CheckUpgradeRequest, + dict, + ], +) +def test_check_upgrade_rest(request_type): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "environment": "projects/sample1/locations/sample2/environments/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.check_upgrade(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_check_upgrade_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.check_upgrade in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.check_upgrade] = mock_rpc + + request = {} + client.check_upgrade(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.check_upgrade(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_check_upgrade_rest_required_fields( + request_type=environments.CheckUpgradeRequest, +): + transport_class = transports.EnvironmentsRestTransport + + request_init = {} + request_init["environment"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).check_upgrade._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["environment"] = "environment_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).check_upgrade._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "environment" in jsonified_request + assert jsonified_request["environment"] == "environment_value" + + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.check_upgrade(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_check_upgrade_rest_unset_required_fields(): + transport = transports.EnvironmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.check_upgrade._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("environment",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_check_upgrade_rest_interceptors(null_interceptor): + transport = transports.EnvironmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.EnvironmentsRestInterceptor(), + ) + client = EnvironmentsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.EnvironmentsRestInterceptor, "post_check_upgrade" + ) as post, mock.patch.object( + transports.EnvironmentsRestInterceptor, "pre_check_upgrade" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = environments.CheckUpgradeRequest.pb( + environments.CheckUpgradeRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = environments.CheckUpgradeRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.check_upgrade( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_check_upgrade_rest_bad_request( + transport: str = "rest", request_type=environments.CheckUpgradeRequest +): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "environment": "projects/sample1/locations/sample2/environments/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.check_upgrade(request) + + +def test_check_upgrade_rest_error(): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -17095,6 +17648,7 @@ def test_environments_base_transport(): "stop_airflow_command", "poll_airflow_command", "list_workloads", + "check_upgrade", "create_user_workloads_secret", "get_user_workloads_secret", "list_user_workloads_secrets", @@ -17416,6 +17970,9 @@ def test_environments_client_transport_session_collision(transport_name): session1 = client1.transport.list_workloads._session session2 = client2.transport.list_workloads._session assert session1 != session2 + session1 = client1.transport.check_upgrade._session + session2 = client2.transport.check_upgrade._session + assert session1 != session2 session1 = client1.transport.create_user_workloads_secret._session session2 = client2.transport.create_user_workloads_secret._session assert session1 != session2 diff --git a/packages/google-cloud-orchestration-airflow/tests/unit/gapic/service_v1beta1/test_environments.py b/packages/google-cloud-orchestration-airflow/tests/unit/gapic/service_v1beta1/test_environments.py index 5cef23532261..053e5f28d823 100644 --- a/packages/google-cloud-orchestration-airflow/tests/unit/gapic/service_v1beta1/test_environments.py +++ b/packages/google-cloud-orchestration-airflow/tests/unit/gapic/service_v1beta1/test_environments.py @@ -1534,6 +1534,7 @@ def test_get_environment(request_type, transport: str = "grpc"): uuid="uuid_value", state=environments.Environment.State.CREATING, satisfies_pzs=True, + satisfies_pzi=True, ) response = client.get_environment(request) @@ -1549,6 +1550,7 @@ def test_get_environment(request_type, transport: str = "grpc"): assert response.uuid == "uuid_value" assert response.state == environments.Environment.State.CREATING assert response.satisfies_pzs is True + assert response.satisfies_pzi is True def test_get_environment_empty_call(): @@ -1651,6 +1653,7 @@ async def test_get_environment_empty_call_async(): uuid="uuid_value", state=environments.Environment.State.CREATING, satisfies_pzs=True, + satisfies_pzi=True, ) ) response = await client.get_environment() @@ -1723,6 +1726,7 @@ async def test_get_environment_async( uuid="uuid_value", state=environments.Environment.State.CREATING, satisfies_pzs=True, + satisfies_pzi=True, ) ) response = await client.get_environment(request) @@ -1739,6 +1743,7 @@ async def test_get_environment_async( assert response.uuid == "uuid_value" assert response.state == environments.Environment.State.CREATING assert response.satisfies_pzs is True + assert response.satisfies_pzi is True @pytest.mark.asyncio @@ -11008,6 +11013,7 @@ def test_create_environment_rest(request_type): "update_time": {}, "labels": {}, "satisfies_pzs": True, + "satisfies_pzi": True, "storage_config": {"bucket": "bucket_value"}, } # The version of a generated dependency at test runtime may differ from the version used during generation. @@ -11312,6 +11318,7 @@ def test_get_environment_rest(request_type): uuid="uuid_value", state=environments.Environment.State.CREATING, satisfies_pzs=True, + satisfies_pzi=True, ) # Wrap the value into a proper Response obj @@ -11331,6 +11338,7 @@ def test_get_environment_rest(request_type): assert response.uuid == "uuid_value" assert response.state == environments.Environment.State.CREATING assert response.satisfies_pzs is True + assert response.satisfies_pzi is True def test_get_environment_rest_use_cached_wrapped_rpc(): @@ -11938,6 +11946,7 @@ def test_update_environment_rest(request_type): "update_time": {}, "labels": {}, "satisfies_pzs": True, + "satisfies_pzi": True, "storage_config": {"bucket": "bucket_value"}, } # The version of a generated dependency at test runtime may differ from the version used during generation. From a20b1e508068845c36b1701836ba17a699cb10ac Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sat, 14 Sep 2024 01:03:49 +0000 Subject: [PATCH 066/108] feat: [google-apps-chat] If you're a domain administrator or a delegated administrator, you can now include the `useAdminAccess` parameter when you call the Chat API with your administrator privileges with the following methods to manage Chat spaces an... (#13081) - [ ] Regenerate this pull request now. BEGIN_COMMIT_OVERRIDE feat: If you're a domain administrator or a delegated administrator, you can now include the `useAdminAccess` parameter when you call the Chat API with your administrator privileges with the following methods to manage Chat spaces and memberships in your Workspace organization: - SearchSpaces - DeleteSpace - UpdateSpace - GetSpace - ListMemberships - DeleteMembership - CreateMembership - UpdateMembership - GetMembership Additionally, `last_active_time` and `membership_count` parameters are added to the `Space` resource. docs: A comment for field `filter` in message `.google.chat.v1.ListMembershipsRequest` is updated to support `!=` operator END_COMMIT_OVERRIDE d memberships in your Workspace organization: - SearchSpaces - DeleteSpace - UpdateSpace - GetSpace - ListMemberships - DeleteMembership - CreateMembership - UpdateMembership - GetMembership Additionally, `last_active_time` and `membership_count` parameters are added to the `Space` resource. docs: A comment for field `filter` in message `.google.chat.v1.ListMembershipsRequest` is updated to support `!=` operator PiperOrigin-RevId: 673895888 Source-Link: https://github.com/googleapis/googleapis/commit/c5bc296a6d5e0b9344e2a2aef90cefb017ae29ad Source-Link: https://github.com/googleapis/googleapis-gen/commit/1ed1ebf58a6d432da9a23de6d9d9d058c21d9a44 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWFwcHMtY2hhdC8uT3dsQm90LnlhbWwiLCJoIjoiMWVkMWViZjU4YTZkNDMyZGE5YTIzZGU2ZDlkOWQwNThjMjFkOWE0NCJ9 --------- Co-authored-by: Owl Bot Co-authored-by: Victor Chudnovsky --- .../google/apps/chat/__init__.py | 4 + .../google/apps/chat/gapic_version.py | 2 +- .../google/apps/chat_v1/__init__.py | 4 + .../google/apps/chat_v1/gapic_metadata.json | 15 + .../google/apps/chat_v1/gapic_version.py | 2 +- .../services/chat_service/async_client.py | 97 ++ .../chat_v1/services/chat_service/client.py | 95 ++ .../chat_v1/services/chat_service/pagers.py | 152 +++ .../services/chat_service/transports/base.py | 23 + .../services/chat_service/transports/grpc.py | 30 + .../chat_service/transports/grpc_asyncio.py | 44 + .../services/chat_service/transports/rest.py | 128 +++ .../google/apps/chat_v1/types/__init__.py | 4 + .../google/apps/chat_v1/types/membership.py | 89 +- .../google/apps/chat_v1/types/space.py | 287 ++++++ ...erated_chat_service_search_spaces_async.py | 53 + ...nerated_chat_service_search_spaces_sync.py | 53 + .../snippet_metadata_google.chat.v1.json | 155 ++- .../scripts/fixup_chat_v1_keywords.py | 17 +- .../unit/gapic/chat_v1/test_chat_service.py | 924 ++++++++++++++++-- 20 files changed, 2108 insertions(+), 70 deletions(-) create mode 100644 packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_search_spaces_async.py create mode 100644 packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_search_spaces_sync.py diff --git a/packages/google-apps-chat/google/apps/chat/__init__.py b/packages/google-apps-chat/google/apps/chat/__init__.py index ad2d607382db..17679ffaa12a 100644 --- a/packages/google-apps-chat/google/apps/chat/__init__.py +++ b/packages/google-apps-chat/google/apps/chat/__init__.py @@ -111,6 +111,8 @@ GetSpaceRequest, ListSpacesRequest, ListSpacesResponse, + SearchSpacesRequest, + SearchSpacesResponse, Space, UpdateSpaceRequest, ) @@ -212,6 +214,8 @@ "GetSpaceRequest", "ListSpacesRequest", "ListSpacesResponse", + "SearchSpacesRequest", + "SearchSpacesResponse", "Space", "UpdateSpaceRequest", "GetSpaceEventRequest", diff --git a/packages/google-apps-chat/google/apps/chat/gapic_version.py b/packages/google-apps-chat/google/apps/chat/gapic_version.py index 9413c3341313..558c8aab67c5 100644 --- a/packages/google-apps-chat/google/apps/chat/gapic_version.py +++ b/packages/google-apps-chat/google/apps/chat/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.10" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-apps-chat/google/apps/chat_v1/__init__.py b/packages/google-apps-chat/google/apps/chat_v1/__init__.py index 8fe816081153..d770a6fbb7ff 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/__init__.py +++ b/packages/google-apps-chat/google/apps/chat_v1/__init__.py @@ -108,6 +108,8 @@ GetSpaceRequest, ListSpacesRequest, ListSpacesResponse, + SearchSpacesRequest, + SearchSpacesResponse, Space, UpdateSpaceRequest, ) @@ -201,6 +203,8 @@ "ReactionCreatedEventData", "ReactionDeletedEventData", "RichLinkMetadata", + "SearchSpacesRequest", + "SearchSpacesResponse", "SetUpSpaceRequest", "SlashCommand", "SlashCommandMetadata", diff --git a/packages/google-apps-chat/google/apps/chat_v1/gapic_metadata.json b/packages/google-apps-chat/google/apps/chat_v1/gapic_metadata.json index 3cdf90b4e07e..325f347acf03 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/gapic_metadata.json +++ b/packages/google-apps-chat/google/apps/chat_v1/gapic_metadata.json @@ -120,6 +120,11 @@ "list_spaces" ] }, + "SearchSpaces": { + "methods": [ + "search_spaces" + ] + }, "SetUpSpace": { "methods": [ "set_up_space" @@ -265,6 +270,11 @@ "list_spaces" ] }, + "SearchSpaces": { + "methods": [ + "search_spaces" + ] + }, "SetUpSpace": { "methods": [ "set_up_space" @@ -410,6 +420,11 @@ "list_spaces" ] }, + "SearchSpaces": { + "methods": [ + "search_spaces" + ] + }, "SetUpSpace": { "methods": [ "set_up_space" diff --git a/packages/google-apps-chat/google/apps/chat_v1/gapic_version.py b/packages/google-apps-chat/google/apps/chat_v1/gapic_version.py index 9413c3341313..558c8aab67c5 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/gapic_version.py +++ b/packages/google-apps-chat/google/apps/chat_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.10" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/async_client.py b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/async_client.py index 945434859bde..82774eb03431 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/async_client.py +++ b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/async_client.py @@ -1535,6 +1535,103 @@ async def sample_list_spaces(): # Done; return the response. return response + async def search_spaces( + self, + request: Optional[Union[space.SearchSpacesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.SearchSpacesAsyncPager: + r"""Returns a list of spaces in a Google Workspace organization + based on an administrator's search. Requires `user + authentication with administrator + privileges `__. + In the request, set ``use_admin_access`` to ``true``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.apps import chat_v1 + + async def sample_search_spaces(): + # Create a client + client = chat_v1.ChatServiceAsyncClient() + + # Initialize request argument(s) + request = chat_v1.SearchSpacesRequest( + query="query_value", + ) + + # Make the request + page_result = client.search_spaces(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.apps.chat_v1.types.SearchSpacesRequest, dict]]): + The request object. Request to search for a list of + spaces based on a query. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.apps.chat_v1.services.chat_service.pagers.SearchSpacesAsyncPager: + Response with a list of spaces + corresponding to the search spaces + request. Iterating over this object + will yield results and resolve + additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, space.SearchSpacesRequest): + request = space.SearchSpacesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.search_spaces + ] + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.SearchSpacesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def get_space( self, request: Optional[Union[space.GetSpaceRequest, dict]] = None, diff --git a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/client.py b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/client.py index 268ff696d3a6..0d542091414a 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/client.py +++ b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/client.py @@ -2071,6 +2071,101 @@ def sample_list_spaces(): # Done; return the response. return response + def search_spaces( + self, + request: Optional[Union[space.SearchSpacesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.SearchSpacesPager: + r"""Returns a list of spaces in a Google Workspace organization + based on an administrator's search. Requires `user + authentication with administrator + privileges `__. + In the request, set ``use_admin_access`` to ``true``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.apps import chat_v1 + + def sample_search_spaces(): + # Create a client + client = chat_v1.ChatServiceClient() + + # Initialize request argument(s) + request = chat_v1.SearchSpacesRequest( + query="query_value", + ) + + # Make the request + page_result = client.search_spaces(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.apps.chat_v1.types.SearchSpacesRequest, dict]): + The request object. Request to search for a list of + spaces based on a query. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.apps.chat_v1.services.chat_service.pagers.SearchSpacesPager: + Response with a list of spaces + corresponding to the search spaces + request. Iterating over this object + will yield results and resolve + additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, space.SearchSpacesRequest): + request = space.SearchSpacesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.search_spaces] + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.SearchSpacesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def get_space( self, request: Optional[Union[space.GetSpaceRequest, dict]] = None, diff --git a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/pagers.py b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/pagers.py index d7565e952ff8..94763fc39240 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/pagers.py +++ b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/pagers.py @@ -497,6 +497,158 @@ def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) +class SearchSpacesPager: + """A pager for iterating through ``search_spaces`` requests. + + This class thinly wraps an initial + :class:`google.apps.chat_v1.types.SearchSpacesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``spaces`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``SearchSpaces`` requests and continue to iterate + through the ``spaces`` field on the + corresponding responses. + + All the usual :class:`google.apps.chat_v1.types.SearchSpacesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., space.SearchSpacesResponse], + request: space.SearchSpacesRequest, + response: space.SearchSpacesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.apps.chat_v1.types.SearchSpacesRequest): + The initial request object. + response (google.apps.chat_v1.types.SearchSpacesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = space.SearchSpacesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[space.SearchSpacesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[space.Space]: + for page in self.pages: + yield from page.spaces + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class SearchSpacesAsyncPager: + """A pager for iterating through ``search_spaces`` requests. + + This class thinly wraps an initial + :class:`google.apps.chat_v1.types.SearchSpacesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``spaces`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``SearchSpaces`` requests and continue to iterate + through the ``spaces`` field on the + corresponding responses. + + All the usual :class:`google.apps.chat_v1.types.SearchSpacesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[space.SearchSpacesResponse]], + request: space.SearchSpacesRequest, + response: space.SearchSpacesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.apps.chat_v1.types.SearchSpacesRequest): + The initial request object. + response (google.apps.chat_v1.types.SearchSpacesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = space.SearchSpacesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[space.SearchSpacesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[space.Space]: + async def async_generator(): + async for page in self.pages: + for response in page.spaces: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + class ListReactionsPager: """A pager for iterating through ``list_reactions`` requests. diff --git a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/base.py b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/base.py index e5b32aaf272b..5c83cc5f462e 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/base.py +++ b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/base.py @@ -304,6 +304,20 @@ def _prep_wrapped_messages(self, client_info): default_timeout=30.0, client_info=client_info, ), + self.search_spaces: gapic_v1.method.wrap_method( + self.search_spaces, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), self.get_space: gapic_v1.method.wrap_method( self.get_space, default_retry=retries.Retry( @@ -662,6 +676,15 @@ def list_spaces( ]: raise NotImplementedError() + @property + def search_spaces( + self, + ) -> Callable[ + [space.SearchSpacesRequest], + Union[space.SearchSpacesResponse, Awaitable[space.SearchSpacesResponse]], + ]: + raise NotImplementedError() + @property def get_space( self, diff --git a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc.py b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc.py index 2ef3b8c317bb..bfb0492b21e8 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc.py +++ b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc.py @@ -619,6 +619,36 @@ def list_spaces( ) return self._stubs["list_spaces"] + @property + def search_spaces( + self, + ) -> Callable[[space.SearchSpacesRequest], space.SearchSpacesResponse]: + r"""Return a callable for the search spaces method over gRPC. + + Returns a list of spaces in a Google Workspace organization + based on an administrator's search. Requires `user + authentication with administrator + privileges `__. + In the request, set ``use_admin_access`` to ``true``. + + Returns: + Callable[[~.SearchSpacesRequest], + ~.SearchSpacesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "search_spaces" not in self._stubs: + self._stubs["search_spaces"] = self.grpc_channel.unary_unary( + "/google.chat.v1.ChatService/SearchSpaces", + request_serializer=space.SearchSpacesRequest.serialize, + response_deserializer=space.SearchSpacesResponse.deserialize, + ) + return self._stubs["search_spaces"] + @property def get_space(self) -> Callable[[space.GetSpaceRequest], space.Space]: r"""Return a callable for the get space method over gRPC. diff --git a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc_asyncio.py b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc_asyncio.py index e36b81f08a6c..a404fca34305 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc_asyncio.py +++ b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc_asyncio.py @@ -629,6 +629,36 @@ def list_spaces( ) return self._stubs["list_spaces"] + @property + def search_spaces( + self, + ) -> Callable[[space.SearchSpacesRequest], Awaitable[space.SearchSpacesResponse]]: + r"""Return a callable for the search spaces method over gRPC. + + Returns a list of spaces in a Google Workspace organization + based on an administrator's search. Requires `user + authentication with administrator + privileges `__. + In the request, set ``use_admin_access`` to ``true``. + + Returns: + Callable[[~.SearchSpacesRequest], + Awaitable[~.SearchSpacesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "search_spaces" not in self._stubs: + self._stubs["search_spaces"] = self.grpc_channel.unary_unary( + "/google.chat.v1.ChatService/SearchSpaces", + request_serializer=space.SearchSpacesRequest.serialize, + response_deserializer=space.SearchSpacesResponse.deserialize, + ) + return self._stubs["search_spaces"] + @property def get_space(self) -> Callable[[space.GetSpaceRequest], Awaitable[space.Space]]: r"""Return a callable for the get space method over gRPC. @@ -1485,6 +1515,20 @@ def _prep_wrapped_messages(self, client_info): default_timeout=30.0, client_info=client_info, ), + self.search_spaces: gapic_v1.method_async.wrap_method( + self.search_spaces, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), self.get_space: gapic_v1.method_async.wrap_method( self.get_space, default_retry=retries.AsyncRetry( diff --git a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/rest.py b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/rest.py index 87947ff116c8..f9c4a5cd53a3 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/rest.py +++ b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/rest.py @@ -241,6 +241,14 @@ def post_list_spaces(self, response): logging.log(f"Received response: {response}") return response + def pre_search_spaces(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_search_spaces(self, response): + logging.log(f"Received response: {response}") + return response + def pre_set_up_space(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -744,6 +752,27 @@ def post_list_spaces( """ return response + def pre_search_spaces( + self, request: space.SearchSpacesRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[space.SearchSpacesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for search_spaces + + Override in a subclass to manipulate the request or metadata + before they are sent to the ChatService server. + """ + return request, metadata + + def post_search_spaces( + self, response: space.SearchSpacesResponse + ) -> space.SearchSpacesResponse: + """Post-rpc interceptor for search_spaces + + Override in a subclass to manipulate the response + after it is returned by the ChatService server but before + it is returned to user code. + """ + return response + def pre_set_up_space( self, request: space_setup.SetUpSpaceRequest, @@ -2908,6 +2937,97 @@ def __call__( resp = self._interceptor.post_list_spaces(resp) return resp + class _SearchSpaces(ChatServiceRestStub): + def __hash__(self): + return hash("SearchSpaces") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "query": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: space.SearchSpacesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> space.SearchSpacesResponse: + r"""Call the search spaces method over HTTP. + + Args: + request (~.space.SearchSpacesRequest): + The request object. Request to search for a list of + spaces based on a query. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.space.SearchSpacesResponse: + Response with a list of spaces + corresponding to the search spaces + request. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/spaces:search", + }, + ] + request, metadata = self._interceptor.pre_search_spaces(request, metadata) + pb_request = space.SearchSpacesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = space.SearchSpacesResponse() + pb_resp = space.SearchSpacesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_search_spaces(resp) + return resp + class _SetUpSpace(ChatServiceRestStub): def __hash__(self): return hash("SetUpSpace") @@ -3671,6 +3791,14 @@ def list_spaces( # In C++ this would require a dynamic_cast return self._ListSpaces(self._session, self._host, self._interceptor) # type: ignore + @property + def search_spaces( + self, + ) -> Callable[[space.SearchSpacesRequest], space.SearchSpacesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SearchSpaces(self._session, self._host, self._interceptor) # type: ignore + @property def set_up_space(self) -> Callable[[space_setup.SetUpSpaceRequest], space.Space]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. diff --git a/packages/google-apps-chat/google/apps/chat_v1/types/__init__.py b/packages/google-apps-chat/google/apps/chat_v1/types/__init__.py index 3e4671f0637d..d510a888bd89 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/types/__init__.py +++ b/packages/google-apps-chat/google/apps/chat_v1/types/__init__.py @@ -102,6 +102,8 @@ GetSpaceRequest, ListSpacesRequest, ListSpacesResponse, + SearchSpacesRequest, + SearchSpacesResponse, Space, UpdateSpaceRequest, ) @@ -198,6 +200,8 @@ "GetSpaceRequest", "ListSpacesRequest", "ListSpacesResponse", + "SearchSpacesRequest", + "SearchSpacesResponse", "Space", "UpdateSpaceRequest", "GetSpaceEventRequest", diff --git a/packages/google-apps-chat/google/apps/chat_v1/types/membership.py b/packages/google-apps-chat/google/apps/chat_v1/types/membership.py index 925e1d7b28ed..f0e00c470930 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/types/membership.py +++ b/packages/google-apps-chat/google/apps/chat_v1/types/membership.py @@ -208,6 +208,20 @@ class CreateMembershipRequest(proto.Message): relation for itself, it must use the ``chat.memberships.app`` scope, set ``user.type`` to ``BOT``, and set ``user.name`` to ``users/app``. + use_admin_access (bool): + When ``true``, the method runs using the user's Google + Workspace administrator privileges. + + The calling user must be a Google Workspace administrator + with the `manage chat and spaces conversations + privilege `__. + + Requires the ``chat.admin.memberships`` `OAuth 2.0 + scope `__. + + Creating app memberships or creating memberships for users + outside the administrator's Google Workspace organization + isn't supported using admin access. """ parent: str = proto.Field( @@ -219,6 +233,10 @@ class CreateMembershipRequest(proto.Message): number=2, message="Membership", ) + use_admin_access: bool = proto.Field( + proto.BOOL, + number=5, + ) class UpdateMembershipRequest(proto.Message): @@ -235,6 +253,16 @@ class UpdateMembershipRequest(proto.Message): Currently supported field paths: - ``role`` + use_admin_access (bool): + When ``true``, the method runs using the user's Google + Workspace administrator privileges. + + The calling user must be a Google Workspace administrator + with the `manage chat and spaces conversations + privilege `__. + + Requires the ``chat.admin.memberships`` `OAuth 2.0 + scope `__. """ membership: "Membership" = proto.Field( @@ -247,6 +275,10 @@ class UpdateMembershipRequest(proto.Message): number=2, message=field_mask_pb2.FieldMask, ) + use_admin_access: bool = proto.Field( + proto.BOOL, + number=3, + ) class ListMembershipsRequest(proto.Message): @@ -289,8 +321,8 @@ class ListMembershipsRequest(proto.Message): ``ROLE_MANAGER``. To filter by type, set ``member.type`` to ``HUMAN`` or - ``BOT``. Developer Preview: You can also filter for - ``member.type`` using the ``!=`` operator. + ``BOT``. You can also filter for ``member.type`` using the + ``!=`` operator. To filter by both role and type, use the ``AND`` operator. To filter by either role or type, use the ``OR`` operator. @@ -338,6 +370,20 @@ class ListMembershipsRequest(proto.Message): Currently requires `user authentication `__. + use_admin_access (bool): + When ``true``, the method runs using the user's Google + Workspace administrator privileges. + + The calling user must be a Google Workspace administrator + with the `manage chat and spaces conversations + privilege `__. + + Requires either the ``chat.admin.memberships.readonly`` or + ``chat.admin.memberships`` `OAuth 2.0 + scope `__. + + Listing app memberships in a space isn't supported when + using admin access. """ parent: str = proto.Field( @@ -364,6 +410,10 @@ class ListMembershipsRequest(proto.Message): proto.BOOL, number=7, ) + use_admin_access: bool = proto.Field( + proto.BOOL, + number=8, + ) class ListMembershipsResponse(proto.Message): @@ -414,12 +464,30 @@ class GetMembershipRequest(proto.Message): For example, ``spaces/{space}/members/example@gmail.com`` where ``example@gmail.com`` is the email of the Google Chat user. + use_admin_access (bool): + When ``true``, the method runs using the user's Google + Workspace administrator privileges. + + The calling user must be a Google Workspace administrator + with the `manage chat and spaces conversations + privilege `__. + + Requires the ``chat.admin.memberships`` or + ``chat.admin.memberships.readonly`` `OAuth 2.0 + scopes `__. + + Getting app memberships in a space isn't supported when + using admin access. """ name: str = proto.Field( proto.STRING, number=1, ) + use_admin_access: bool = proto.Field( + proto.BOOL, + number=3, + ) class DeleteMembershipRequest(proto.Message): @@ -444,12 +512,29 @@ class DeleteMembershipRequest(proto.Message): Format: ``spaces/{space}/members/{member}`` or ``spaces/{space}/members/app``. + use_admin_access (bool): + When ``true``, the method runs using the user's Google + Workspace administrator privileges. + + The calling user must be a Google Workspace administrator + with the `manage chat and spaces conversations + privilege `__. + + Requires the ``chat.admin.memberships`` `OAuth 2.0 + scope `__. + + Deleting app memberships in a space isn't supported using + admin access. """ name: str = proto.Field( proto.STRING, number=1, ) + use_admin_access: bool = proto.Field( + proto.BOOL, + number=2, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-apps-chat/google/apps/chat_v1/types/space.py b/packages/google-apps-chat/google/apps/chat_v1/types/space.py index 46f46068321a..75456c5e5e13 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/types/space.py +++ b/packages/google-apps-chat/google/apps/chat_v1/types/space.py @@ -33,6 +33,8 @@ "GetSpaceRequest", "FindDirectMessageRequest", "UpdateSpaceRequest", + "SearchSpacesRequest", + "SearchSpacesResponse", "DeleteSpaceRequest", "CompleteImportSpaceRequest", "CompleteImportSpaceResponse", @@ -119,6 +121,9 @@ class Space(proto.Message): Only populated in the output when ``spaceType`` is ``GROUP_CHAT`` or ``SPACE``. + last_active_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Timestamp of the last message in + the space. admin_installed (bool): Output only. For direct message (DM) spaces with a Chat app, whether the space was created @@ -129,6 +134,10 @@ class Space(proto.Message): To support admin install, your Chat app must feature direct messaging. + membership_count (google.apps.chat_v1.types.Space.MembershipCount): + Output only. The count of joined memberships grouped by + member type. Populated when the ``space_type`` is ``SPACE``, + ``DIRECT_MESSAGE`` or ``GROUP_CHAT``. access_settings (google.apps.chat_v1.types.Space.AccessSettings): Optional. Specifies the `access setting `__ @@ -228,6 +237,29 @@ class SpaceDetails(proto.Message): number=2, ) + class MembershipCount(proto.Message): + r"""Represents the count of memberships of a space, grouped into + categories. + + Attributes: + joined_direct_human_user_count (int): + Count of human users that have directly + joined the space, not counting users joined by + having membership in a joined group. + joined_group_count (int): + Count of all groups that have directly joined + the space. + """ + + joined_direct_human_user_count: int = proto.Field( + proto.INT32, + number=4, + ) + joined_group_count: int = proto.Field( + proto.INT32, + number=5, + ) + class AccessSettings(proto.Message): r"""Represents the `access setting `__ of the @@ -334,10 +366,20 @@ class AccessState(proto.Enum): number=17, message=timestamp_pb2.Timestamp, ) + last_active_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=18, + message=timestamp_pb2.Timestamp, + ) admin_installed: bool = proto.Field( proto.BOOL, number=19, ) + membership_count: MembershipCount = proto.Field( + proto.MESSAGE, + number=20, + message=MembershipCount, + ) access_settings: AccessSettings = proto.Field( proto.MESSAGE, number=23, @@ -480,12 +522,27 @@ class GetSpaceRequest(proto.Message): ``spaces/{space}``. Format: ``spaces/{space}`` + use_admin_access (bool): + When ``true``, the method runs using the user's Google + Workspace administrator privileges. + + The calling user must be a Google Workspace administrator + with the `manage chat and spaces conversations + privilege `__. + + Requires the ``chat.admin.spaces`` or + ``chat.admin.spaces.readonly`` `OAuth 2.0 + scopes `__. """ name: str = proto.Field( proto.STRING, number=1, ) + use_admin_access: bool = proto.Field( + proto.BOOL, + number=2, + ) class FindDirectMessageRequest(proto.Message): @@ -591,6 +648,19 @@ class UpdateSpaceRequest(proto.Message): exclusive with all other non-permission settings field paths). ``permission_settings`` is not supported with admin access. + use_admin_access (bool): + When ``true``, the method runs using the user's Google + Workspace administrator privileges. + + The calling user must be a Google Workspace administrator + with the `manage chat and spaces conversations + privilege `__. + + Requires the ``chat.admin.spaces`` `OAuth 2.0 + scope `__. + + Some ``FieldMask`` values are not supported using admin + access. For details, see the description of ``update_mask``. """ space: "Space" = proto.Field( @@ -603,6 +673,209 @@ class UpdateSpaceRequest(proto.Message): number=2, message=field_mask_pb2.FieldMask, ) + use_admin_access: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class SearchSpacesRequest(proto.Message): + r"""Request to search for a list of spaces based on a query. + + Attributes: + use_admin_access (bool): + When ``true``, the method runs using the user's Google + Workspace administrator privileges. + + The calling user must be a Google Workspace administrator + with the `manage chat and spaces conversations + privilege `__. + + Requires either the ``chat.admin.spaces.readonly`` or + ``chat.admin.spaces`` `OAuth 2.0 + scope `__. + + This method currently only supports admin access, thus only + ``true`` is accepted for this field. + page_size (int): + The maximum number of spaces to return. The + service may return fewer than this value. + + If unspecified, at most 100 spaces are returned. + + The maximum value is 1000. If you use a value + more than 1000, it's automatically changed to + 1000. + page_token (str): + A token, received from the previous search + spaces call. Provide this parameter to retrieve + the subsequent page. + + When paginating, all other parameters provided + should match the call that provided the page + token. Passing different values to the other + parameters might lead to unexpected results. + query (str): + Required. A search query. + + You can search by using the following parameters: + + - ``create_time`` + - ``customer`` + - ``display_name`` + - ``external_user_allowed`` + - ``last_active_time`` + - ``space_history_state`` + - ``space_type`` + + ``create_time`` and ``last_active_time`` accept a timestamp + in `RFC-3339 `__ + format and the supported comparison operators are: ``=``, + ``<``, ``>``, ``<=``, ``>=``. + + ``customer`` is required and is used to indicate which + customer to fetch spaces from. ``customers/my_customer`` is + the only supported value. + + ``display_name`` only accepts the ``HAS`` (``:``) operator. + The text to match is first tokenized into tokens and each + token is prefix-matched case-insensitively and independently + as a substring anywhere in the space's ``display_name``. For + example, ``Fun Eve`` matches ``Fun event`` or + ``The evening was fun``, but not ``notFun event`` or + ``even``. + + ``external_user_allowed`` accepts either ``true`` or + ``false``. + + ``space_history_state`` only accepts values from the + [``historyState``] + (https://developers.google.com/workspace/chat/api/reference/rest/v1/spaces#Space.HistoryState) + field of a ``space`` resource. + + ``space_type`` is required and the only valid value is + ``SPACE``. + + Across different fields, only ``AND`` operators are + supported. A valid example is + ``space_type = "SPACE" AND display_name:"Hello"`` and an + invalid example is + ``space_type = "SPACE" OR display_name:"Hello"``. + + Among the same field, ``space_type`` doesn't support ``AND`` + or ``OR`` operators. ``display_name``, + 'space_history_state', and 'external_user_allowed' only + support ``OR`` operators. ``last_active_time`` and + ``create_time`` support both ``AND`` and ``OR`` operators. + ``AND`` can only be used to represent an interval, such as + ``last_active_time < "2022-01-01T00:00:00+00:00" AND last_active_time > "2023-01-01T00:00:00+00:00"``. + + The following example queries are valid: + + :: + + customer = "customers/my_customer" AND space_type = "SPACE" + + customer = "customers/my_customer" AND space_type = "SPACE" AND + display_name:"Hello World" + + customer = "customers/my_customer" AND space_type = "SPACE" AND + (last_active_time < "2020-01-01T00:00:00+00:00" OR last_active_time > + "2022-01-01T00:00:00+00:00") + + customer = "customers/my_customer" AND space_type = "SPACE" AND + (display_name:"Hello World" OR display_name:"Fun event") AND + (last_active_time > "2020-01-01T00:00:00+00:00" AND last_active_time < + "2022-01-01T00:00:00+00:00") + + customer = "customers/my_customer" AND space_type = "SPACE" AND + (create_time > "2019-01-01T00:00:00+00:00" AND create_time < + "2020-01-01T00:00:00+00:00") AND (external_user_allowed = "true") AND + (space_history_state = "HISTORY_ON" OR space_history_state = "HISTORY_OFF") + order_by (str): + Optional. How the list of spaces is ordered. + + Supported attributes to order by are: + + - ``membership_count.joined_direct_human_user_count`` — + Denotes the count of human users that have directly + joined a space. + - ``last_active_time`` — Denotes the time when last + eligible item is added to any topic of this space. + - ``create_time`` — Denotes the time of the space creation. + + Valid ordering operation values are: + + - ``ASC`` for ascending. Default value. + + - ``DESC`` for descending. + + The supported syntax are: + + - ``membership_count.joined_direct_human_user_count DESC`` + - ``membership_count.joined_direct_human_user_count ASC`` + - ``last_active_time DESC`` + - ``last_active_time ASC`` + - ``create_time DESC`` + - ``create_time ASC`` + """ + + use_admin_access: bool = proto.Field( + proto.BOOL, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + query: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class SearchSpacesResponse(proto.Message): + r"""Response with a list of spaces corresponding to the search + spaces request. + + Attributes: + spaces (MutableSequence[google.apps.chat_v1.types.Space]): + A page of the requested spaces. + next_page_token (str): + A token that can be used to retrieve the next + page. If this field is empty, there are no + subsequent pages. + total_size (int): + The total number of spaces that match the + query, across all pages. If the result is over + 10,000 spaces, this value is an estimate. + """ + + @property + def raw_page(self): + return self + + spaces: MutableSequence["Space"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Space", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + total_size: int = proto.Field( + proto.INT32, + number=3, + ) class DeleteSpaceRequest(proto.Message): @@ -613,12 +886,26 @@ class DeleteSpaceRequest(proto.Message): Required. Resource name of the space to delete. Format: ``spaces/{space}`` + use_admin_access (bool): + When ``true``, the method runs using the user's Google + Workspace administrator privileges. + + The calling user must be a Google Workspace administrator + with the `manage chat and spaces conversations + privilege `__. + + Requires the ``chat.admin.delete`` `OAuth 2.0 + scope `__. """ name: str = proto.Field( proto.STRING, number=1, ) + use_admin_access: bool = proto.Field( + proto.BOOL, + number=2, + ) class CompleteImportSpaceRequest(proto.Message): diff --git a/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_search_spaces_async.py b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_search_spaces_async.py new file mode 100644 index 000000000000..3d25def75a1b --- /dev/null +++ b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_search_spaces_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SearchSpaces +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-apps-chat + + +# [START chat_v1_generated_ChatService_SearchSpaces_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.apps import chat_v1 + + +async def sample_search_spaces(): + # Create a client + client = chat_v1.ChatServiceAsyncClient() + + # Initialize request argument(s) + request = chat_v1.SearchSpacesRequest( + query="query_value", + ) + + # Make the request + page_result = client.search_spaces(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END chat_v1_generated_ChatService_SearchSpaces_async] diff --git a/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_search_spaces_sync.py b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_search_spaces_sync.py new file mode 100644 index 000000000000..52e32c45ead5 --- /dev/null +++ b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_search_spaces_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SearchSpaces +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-apps-chat + + +# [START chat_v1_generated_ChatService_SearchSpaces_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.apps import chat_v1 + + +def sample_search_spaces(): + # Create a client + client = chat_v1.ChatServiceClient() + + # Initialize request argument(s) + request = chat_v1.SearchSpacesRequest( + query="query_value", + ) + + # Make the request + page_result = client.search_spaces(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END chat_v1_generated_ChatService_SearchSpaces_sync] diff --git a/packages/google-apps-chat/samples/generated_samples/snippet_metadata_google.chat.v1.json b/packages/google-apps-chat/samples/generated_samples/snippet_metadata_google.chat.v1.json index 4442b6c5505a..ae65b2bfaefe 100644 --- a/packages/google-apps-chat/samples/generated_samples/snippet_metadata_google.chat.v1.json +++ b/packages/google-apps-chat/samples/generated_samples/snippet_metadata_google.chat.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-apps-chat", - "version": "0.1.10" + "version": "0.1.0" }, "snippets": [ { @@ -3551,6 +3551,159 @@ ], "title": "chat_v1_generated_chat_service_list_spaces_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.apps.chat_v1.ChatServiceAsyncClient", + "shortName": "ChatServiceAsyncClient" + }, + "fullName": "google.apps.chat_v1.ChatServiceAsyncClient.search_spaces", + "method": { + "fullName": "google.chat.v1.ChatService.SearchSpaces", + "service": { + "fullName": "google.chat.v1.ChatService", + "shortName": "ChatService" + }, + "shortName": "SearchSpaces" + }, + "parameters": [ + { + "name": "request", + "type": "google.apps.chat_v1.types.SearchSpacesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.apps.chat_v1.services.chat_service.pagers.SearchSpacesAsyncPager", + "shortName": "search_spaces" + }, + "description": "Sample for SearchSpaces", + "file": "chat_v1_generated_chat_service_search_spaces_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "chat_v1_generated_ChatService_SearchSpaces_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "chat_v1_generated_chat_service_search_spaces_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.apps.chat_v1.ChatServiceClient", + "shortName": "ChatServiceClient" + }, + "fullName": "google.apps.chat_v1.ChatServiceClient.search_spaces", + "method": { + "fullName": "google.chat.v1.ChatService.SearchSpaces", + "service": { + "fullName": "google.chat.v1.ChatService", + "shortName": "ChatService" + }, + "shortName": "SearchSpaces" + }, + "parameters": [ + { + "name": "request", + "type": "google.apps.chat_v1.types.SearchSpacesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.apps.chat_v1.services.chat_service.pagers.SearchSpacesPager", + "shortName": "search_spaces" + }, + "description": "Sample for SearchSpaces", + "file": "chat_v1_generated_chat_service_search_spaces_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "chat_v1_generated_ChatService_SearchSpaces_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "chat_v1_generated_chat_service_search_spaces_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-apps-chat/scripts/fixup_chat_v1_keywords.py b/packages/google-apps-chat/scripts/fixup_chat_v1_keywords.py index 6dfa6d4d93f8..18fea55c4198 100644 --- a/packages/google-apps-chat/scripts/fixup_chat_v1_keywords.py +++ b/packages/google-apps-chat/scripts/fixup_chat_v1_keywords.py @@ -40,31 +40,32 @@ class chatCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { 'complete_import_space': ('name', ), - 'create_membership': ('parent', 'membership', ), + 'create_membership': ('parent', 'membership', 'use_admin_access', ), 'create_message': ('parent', 'message', 'thread_key', 'request_id', 'message_reply_option', 'message_id', ), 'create_reaction': ('parent', 'reaction', ), 'create_space': ('space', 'request_id', ), - 'delete_membership': ('name', ), + 'delete_membership': ('name', 'use_admin_access', ), 'delete_message': ('name', 'force', ), 'delete_reaction': ('name', ), - 'delete_space': ('name', ), + 'delete_space': ('name', 'use_admin_access', ), 'find_direct_message': ('name', ), 'get_attachment': ('name', ), - 'get_membership': ('name', ), + 'get_membership': ('name', 'use_admin_access', ), 'get_message': ('name', ), - 'get_space': ('name', ), + 'get_space': ('name', 'use_admin_access', ), 'get_space_event': ('name', ), 'get_space_read_state': ('name', ), 'get_thread_read_state': ('name', ), - 'list_memberships': ('parent', 'page_size', 'page_token', 'filter', 'show_groups', 'show_invited', ), + 'list_memberships': ('parent', 'page_size', 'page_token', 'filter', 'show_groups', 'show_invited', 'use_admin_access', ), 'list_messages': ('parent', 'page_size', 'page_token', 'filter', 'order_by', 'show_deleted', ), 'list_reactions': ('parent', 'page_size', 'page_token', 'filter', ), 'list_space_events': ('parent', 'filter', 'page_size', 'page_token', ), 'list_spaces': ('page_size', 'page_token', 'filter', ), + 'search_spaces': ('query', 'use_admin_access', 'page_size', 'page_token', 'order_by', ), 'set_up_space': ('space', 'request_id', 'memberships', ), - 'update_membership': ('membership', 'update_mask', ), + 'update_membership': ('membership', 'update_mask', 'use_admin_access', ), 'update_message': ('message', 'update_mask', 'allow_missing', ), - 'update_space': ('space', 'update_mask', ), + 'update_space': ('space', 'update_mask', 'use_admin_access', ), 'update_space_read_state': ('space_read_state', 'update_mask', ), 'upload_attachment': ('parent', 'filename', ), } diff --git a/packages/google-apps-chat/tests/unit/gapic/chat_v1/test_chat_service.py b/packages/google-apps-chat/tests/unit/gapic/chat_v1/test_chat_service.py index 2d5d1309f21f..70b8272ff798 100644 --- a/packages/google-apps-chat/tests/unit/gapic/chat_v1/test_chat_service.py +++ b/packages/google-apps-chat/tests/unit/gapic/chat_v1/test_chat_service.py @@ -5289,6 +5289,428 @@ async def test_list_spaces_async_pages(): assert page_.raw_page.next_page_token == token +@pytest.mark.parametrize( + "request_type", + [ + space.SearchSpacesRequest, + dict, + ], +) +def test_search_spaces(request_type, transport: str = "grpc"): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.search_spaces), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = space.SearchSpacesResponse( + next_page_token="next_page_token_value", + total_size=1086, + ) + response = client.search_spaces(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = space.SearchSpacesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.SearchSpacesPager) + assert response.next_page_token == "next_page_token_value" + assert response.total_size == 1086 + + +def test_search_spaces_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.search_spaces), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.search_spaces() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == space.SearchSpacesRequest() + + +def test_search_spaces_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = space.SearchSpacesRequest( + page_token="page_token_value", + query="query_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.search_spaces), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.search_spaces(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == space.SearchSpacesRequest( + page_token="page_token_value", + query="query_value", + order_by="order_by_value", + ) + + +def test_search_spaces_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.search_spaces in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.search_spaces] = mock_rpc + request = {} + client.search_spaces(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.search_spaces(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_search_spaces_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ChatServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.search_spaces), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + space.SearchSpacesResponse( + next_page_token="next_page_token_value", + total_size=1086, + ) + ) + response = await client.search_spaces() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == space.SearchSpacesRequest() + + +@pytest.mark.asyncio +async def test_search_spaces_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ChatServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.search_spaces + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.search_spaces + ] = mock_rpc + + request = {} + await client.search_spaces(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.search_spaces(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_search_spaces_async( + transport: str = "grpc_asyncio", request_type=space.SearchSpacesRequest +): + client = ChatServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.search_spaces), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + space.SearchSpacesResponse( + next_page_token="next_page_token_value", + total_size=1086, + ) + ) + response = await client.search_spaces(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = space.SearchSpacesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.SearchSpacesAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.total_size == 1086 + + +@pytest.mark.asyncio +async def test_search_spaces_async_from_dict(): + await test_search_spaces_async(request_type=dict) + + +def test_search_spaces_pager(transport_name: str = "grpc"): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.search_spaces), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + space.SearchSpacesResponse( + spaces=[ + space.Space(), + space.Space(), + space.Space(), + ], + next_page_token="abc", + ), + space.SearchSpacesResponse( + spaces=[], + next_page_token="def", + ), + space.SearchSpacesResponse( + spaces=[ + space.Space(), + ], + next_page_token="ghi", + ), + space.SearchSpacesResponse( + spaces=[ + space.Space(), + space.Space(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + pager = client.search_spaces(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, space.Space) for i in results) + + +def test_search_spaces_pages(transport_name: str = "grpc"): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.search_spaces), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + space.SearchSpacesResponse( + spaces=[ + space.Space(), + space.Space(), + space.Space(), + ], + next_page_token="abc", + ), + space.SearchSpacesResponse( + spaces=[], + next_page_token="def", + ), + space.SearchSpacesResponse( + spaces=[ + space.Space(), + ], + next_page_token="ghi", + ), + space.SearchSpacesResponse( + spaces=[ + space.Space(), + space.Space(), + ], + ), + RuntimeError, + ) + pages = list(client.search_spaces(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_search_spaces_async_pager(): + client = ChatServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_spaces), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + space.SearchSpacesResponse( + spaces=[ + space.Space(), + space.Space(), + space.Space(), + ], + next_page_token="abc", + ), + space.SearchSpacesResponse( + spaces=[], + next_page_token="def", + ), + space.SearchSpacesResponse( + spaces=[ + space.Space(), + ], + next_page_token="ghi", + ), + space.SearchSpacesResponse( + spaces=[ + space.Space(), + space.Space(), + ], + ), + RuntimeError, + ) + async_pager = await client.search_spaces( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, space.Space) for i in responses) + + +@pytest.mark.asyncio +async def test_search_spaces_async_pages(): + client = ChatServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_spaces), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + space.SearchSpacesResponse( + spaces=[ + space.Space(), + space.Space(), + space.Space(), + ], + next_page_token="abc", + ), + space.SearchSpacesResponse( + spaces=[], + next_page_token="def", + ), + space.SearchSpacesResponse( + spaces=[ + space.Space(), + ], + next_page_token="ghi", + ), + space.SearchSpacesResponse( + spaces=[ + space.Space(), + space.Space(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.search_spaces(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + @pytest.mark.parametrize( "request_type", [ @@ -12682,7 +13104,12 @@ def test_create_message_rest(request_type): "space_history_state": 1, "import_mode": True, "create_time": {}, + "last_active_time": {}, "admin_installed": True, + "membership_count": { + "joined_direct_human_user_count": 3185, + "joined_group_count": 1933, + }, "access_settings": {"access_state": 1, "audience": "audience_value"}, "space_uri": "space_uri_value", }, @@ -13612,6 +14039,7 @@ def test_list_memberships_rest_required_fields( "page_token", "show_groups", "show_invited", + "use_admin_access", ) ) jsonified_request.update(unset_fields) @@ -13675,6 +14103,7 @@ def test_list_memberships_rest_unset_required_fields(): "pageToken", "showGroups", "showInvited", + "useAdminAccess", ) ) & set(("parent",)) @@ -13987,6 +14416,8 @@ def test_get_membership_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() ).get_membership._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("use_admin_access",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -14040,7 +14471,7 @@ def test_get_membership_rest_unset_required_fields(): ) unset_fields = transport.get_membership._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + assert set(unset_fields) == (set(("useAdminAccess",)) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -14820,7 +15251,12 @@ def test_update_message_rest(request_type): "space_history_state": 1, "import_mode": True, "create_time": {}, + "last_active_time": {}, "admin_installed": True, + "membership_count": { + "joined_direct_human_user_count": 3185, + "joined_group_count": 1933, + }, "access_settings": {"access_state": 1, "audience": "audience_value"}, "space_uri": "space_uri_value", }, @@ -16013,7 +16449,170 @@ def test_upload_attachment_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_upload_attachment_rest_interceptors(null_interceptor): +def test_upload_attachment_rest_interceptors(null_interceptor): + transport = transports.ChatServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ChatServiceRestInterceptor(), + ) + client = ChatServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ChatServiceRestInterceptor, "post_upload_attachment" + ) as post, mock.patch.object( + transports.ChatServiceRestInterceptor, "pre_upload_attachment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = attachment.UploadAttachmentRequest.pb( + attachment.UploadAttachmentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = attachment.UploadAttachmentResponse.to_json( + attachment.UploadAttachmentResponse() + ) + + request = attachment.UploadAttachmentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = attachment.UploadAttachmentResponse() + + client.upload_attachment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_upload_attachment_rest_bad_request( + transport: str = "rest", request_type=attachment.UploadAttachmentRequest +): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "spaces/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.upload_attachment(request) + + +def test_upload_attachment_rest_error(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + space.ListSpacesRequest, + dict, + ], +) +def test_list_spaces_rest(request_type): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = space.ListSpacesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = space.ListSpacesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_spaces(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSpacesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_spaces_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_spaces in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_spaces] = mock_rpc + + request = {} + client.list_spaces(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_spaces(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_spaces_rest_interceptors(null_interceptor): transport = transports.ChatServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -16026,15 +16625,13 @@ def test_upload_attachment_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ChatServiceRestInterceptor, "post_upload_attachment" + transports.ChatServiceRestInterceptor, "post_list_spaces" ) as post, mock.patch.object( - transports.ChatServiceRestInterceptor, "pre_upload_attachment" + transports.ChatServiceRestInterceptor, "pre_list_spaces" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = attachment.UploadAttachmentRequest.pb( - attachment.UploadAttachmentRequest() - ) + pb_message = space.ListSpacesRequest.pb(space.ListSpacesRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -16045,19 +16642,19 @@ def test_upload_attachment_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = attachment.UploadAttachmentResponse.to_json( - attachment.UploadAttachmentResponse() + req.return_value._content = space.ListSpacesResponse.to_json( + space.ListSpacesResponse() ) - request = attachment.UploadAttachmentRequest() + request = space.ListSpacesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = attachment.UploadAttachmentResponse() + post.return_value = space.ListSpacesResponse() - client.upload_attachment( + client.list_spaces( request, metadata=[ ("key", "val"), @@ -16069,8 +16666,8 @@ def test_upload_attachment_rest_interceptors(null_interceptor): post.assert_called_once() -def test_upload_attachment_rest_bad_request( - transport: str = "rest", request_type=attachment.UploadAttachmentRequest +def test_list_spaces_rest_bad_request( + transport: str = "rest", request_type=space.ListSpacesRequest ): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16078,7 +16675,7 @@ def test_upload_attachment_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "spaces/sample1"} + request_init = {} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -16090,23 +16687,78 @@ def test_upload_attachment_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.upload_attachment(request) + client.list_spaces(request) -def test_upload_attachment_rest_error(): +def test_list_spaces_rest_pager(transport: str = "rest"): client = ChatServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + space.ListSpacesResponse( + spaces=[ + space.Space(), + space.Space(), + space.Space(), + ], + next_page_token="abc", + ), + space.ListSpacesResponse( + spaces=[], + next_page_token="def", + ), + space.ListSpacesResponse( + spaces=[ + space.Space(), + ], + next_page_token="ghi", + ), + space.ListSpacesResponse( + spaces=[ + space.Space(), + space.Space(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(space.ListSpacesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {} + + pager = client.list_spaces(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, space.Space) for i in results) + + pages = list(client.list_spaces(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - space.ListSpacesRequest, + space.SearchSpacesRequest, dict, ], ) -def test_list_spaces_rest(request_type): +def test_search_spaces_rest(request_type): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -16119,27 +16771,29 @@ def test_list_spaces_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = space.ListSpacesResponse( + return_value = space.SearchSpacesResponse( next_page_token="next_page_token_value", + total_size=1086, ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = space.ListSpacesResponse.pb(return_value) + return_value = space.SearchSpacesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_spaces(request) + response = client.search_spaces(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSpacesPager) + assert isinstance(response, pagers.SearchSpacesPager) assert response.next_page_token == "next_page_token_value" + assert response.total_size == 1086 -def test_list_spaces_rest_use_cached_wrapped_rpc(): +def test_search_spaces_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -16153,30 +16807,141 @@ def test_list_spaces_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_spaces in client._transport._wrapped_methods + assert client._transport.search_spaces in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_spaces] = mock_rpc + client._transport._wrapped_methods[client._transport.search_spaces] = mock_rpc request = {} - client.list_spaces(request) + client.search_spaces(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_spaces(request) + client.search_spaces(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 +def test_search_spaces_rest_required_fields(request_type=space.SearchSpacesRequest): + transport_class = transports.ChatServiceRestTransport + + request_init = {} + request_init["query"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "query" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).search_spaces._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "query" in jsonified_request + assert jsonified_request["query"] == request_init["query"] + + jsonified_request["query"] = "query_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).search_spaces._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "order_by", + "page_size", + "page_token", + "query", + "use_admin_access", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "query" in jsonified_request + assert jsonified_request["query"] == "query_value" + + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = space.SearchSpacesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = space.SearchSpacesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.search_spaces(request) + + expected_params = [ + ( + "query", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_search_spaces_rest_unset_required_fields(): + transport = transports.ChatServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.search_spaces._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "orderBy", + "pageSize", + "pageToken", + "query", + "useAdminAccess", + ) + ) + & set(("query",)) + ) + + @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_spaces_rest_interceptors(null_interceptor): +def test_search_spaces_rest_interceptors(null_interceptor): transport = transports.ChatServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -16189,13 +16954,13 @@ def test_list_spaces_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ChatServiceRestInterceptor, "post_list_spaces" + transports.ChatServiceRestInterceptor, "post_search_spaces" ) as post, mock.patch.object( - transports.ChatServiceRestInterceptor, "pre_list_spaces" + transports.ChatServiceRestInterceptor, "pre_search_spaces" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = space.ListSpacesRequest.pb(space.ListSpacesRequest()) + pb_message = space.SearchSpacesRequest.pb(space.SearchSpacesRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -16206,19 +16971,19 @@ def test_list_spaces_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = space.ListSpacesResponse.to_json( - space.ListSpacesResponse() + req.return_value._content = space.SearchSpacesResponse.to_json( + space.SearchSpacesResponse() ) - request = space.ListSpacesRequest() + request = space.SearchSpacesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = space.ListSpacesResponse() + post.return_value = space.SearchSpacesResponse() - client.list_spaces( + client.search_spaces( request, metadata=[ ("key", "val"), @@ -16230,8 +16995,8 @@ def test_list_spaces_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_spaces_rest_bad_request( - transport: str = "rest", request_type=space.ListSpacesRequest +def test_search_spaces_rest_bad_request( + transport: str = "rest", request_type=space.SearchSpacesRequest ): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16251,10 +17016,10 @@ def test_list_spaces_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_spaces(request) + client.search_spaces(request) -def test_list_spaces_rest_pager(transport: str = "rest"): +def test_search_spaces_rest_pager(transport: str = "rest"): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -16266,7 +17031,7 @@ def test_list_spaces_rest_pager(transport: str = "rest"): # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( - space.ListSpacesResponse( + space.SearchSpacesResponse( spaces=[ space.Space(), space.Space(), @@ -16274,17 +17039,17 @@ def test_list_spaces_rest_pager(transport: str = "rest"): ], next_page_token="abc", ), - space.ListSpacesResponse( + space.SearchSpacesResponse( spaces=[], next_page_token="def", ), - space.ListSpacesResponse( + space.SearchSpacesResponse( spaces=[ space.Space(), ], next_page_token="ghi", ), - space.ListSpacesResponse( + space.SearchSpacesResponse( spaces=[ space.Space(), space.Space(), @@ -16295,7 +17060,7 @@ def test_list_spaces_rest_pager(transport: str = "rest"): response = response + response # Wrap the values into proper Response objs - response = tuple(space.ListSpacesResponse.to_json(x) for x in response) + response = tuple(space.SearchSpacesResponse.to_json(x) for x in response) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): return_val._content = response_val.encode("UTF-8") @@ -16304,13 +17069,13 @@ def test_list_spaces_rest_pager(transport: str = "rest"): sample_request = {} - pager = client.list_spaces(request=sample_request) + pager = client.search_spaces(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, space.Space) for i in results) - pages = list(client.list_spaces(request=sample_request).pages) + pages = list(client.search_spaces(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -16441,6 +17206,8 @@ def test_get_space_rest_required_fields(request_type=space.GetSpaceRequest): unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() ).get_space._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("use_admin_access",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -16494,7 +17261,7 @@ def test_get_space_rest_unset_required_fields(): ) unset_fields = transport.get_space._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + assert set(unset_fields) == (set(("useAdminAccess",)) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -16666,7 +17433,12 @@ def test_create_space_rest(request_type): "space_history_state": 1, "import_mode": True, "create_time": {"seconds": 751, "nanos": 543}, + "last_active_time": {}, "admin_installed": True, + "membership_count": { + "joined_direct_human_user_count": 3185, + "joined_group_count": 1933, + }, "access_settings": {"access_state": 1, "audience": "audience_value"}, "space_uri": "space_uri_value", } @@ -17330,7 +18102,12 @@ def test_update_space_rest(request_type): "space_history_state": 1, "import_mode": True, "create_time": {"seconds": 751, "nanos": 543}, + "last_active_time": {}, "admin_installed": True, + "membership_count": { + "joined_direct_human_user_count": 3185, + "joined_group_count": 1933, + }, "access_settings": {"access_state": 1, "audience": "audience_value"}, "space_uri": "space_uri_value", } @@ -17510,7 +18287,12 @@ def test_update_space_rest_required_fields(request_type=gc_space.UpdateSpaceRequ credentials=ga_credentials.AnonymousCredentials() ).update_space._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) + assert not set(unset_fields) - set( + ( + "update_mask", + "use_admin_access", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -17563,7 +18345,15 @@ def test_update_space_rest_unset_required_fields(): ) unset_fields = transport.update_space._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask",)) & set(("space",))) + assert set(unset_fields) == ( + set( + ( + "updateMask", + "useAdminAccess", + ) + ) + & set(("space",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -17802,6 +18592,8 @@ def test_delete_space_rest_required_fields(request_type=space.DeleteSpaceRequest unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() ).delete_space._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("use_admin_access",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -17852,7 +18644,7 @@ def test_delete_space_rest_unset_required_fields(): ) unset_fields = transport.delete_space._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + assert set(unset_fields) == (set(("useAdminAccess",)) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -18707,6 +19499,8 @@ def test_create_membership_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() ).create_membership._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("use_admin_access",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -18762,7 +19556,7 @@ def test_create_membership_rest_unset_required_fields(): unset_fields = transport.create_membership._get_unset_required_fields({}) assert set(unset_fields) == ( - set(()) + set(("useAdminAccess",)) & set( ( "parent", @@ -19105,7 +19899,12 @@ def test_update_membership_rest_required_fields( credentials=ga_credentials.AnonymousCredentials() ).update_membership._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) + assert not set(unset_fields) - set( + ( + "update_mask", + "use_admin_access", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -19159,7 +19958,12 @@ def test_update_membership_rest_unset_required_fields(): unset_fields = transport.update_membership._get_unset_required_fields({}) assert set(unset_fields) == ( - set(("updateMask",)) + set( + ( + "updateMask", + "useAdminAccess", + ) + ) & set( ( "membership", @@ -19423,6 +20227,8 @@ def test_delete_membership_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() ).delete_membership._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("use_admin_access",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -19476,7 +20282,7 @@ def test_delete_membership_rest_unset_required_fields(): ) unset_fields = transport.delete_membership._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + assert set(unset_fields) == (set(("useAdminAccess",)) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -22529,6 +23335,7 @@ def test_chat_service_base_transport(): "get_attachment", "upload_attachment", "list_spaces", + "search_spaces", "get_space", "create_space", "set_up_space", @@ -22922,6 +23729,9 @@ def test_chat_service_client_transport_session_collision(transport_name): session1 = client1.transport.list_spaces._session session2 = client2.transport.list_spaces._session assert session1 != session2 + session1 = client1.transport.search_spaces._session + session2 = client2.transport.search_spaces._session + assert session1 != session2 session1 = client1.transport.get_space._session session2 = client2.transport.get_space._session assert session1 != session2 From c03c4411287ee195fd5c99aff94d812381a908f3 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sat, 14 Sep 2024 01:08:32 +0000 Subject: [PATCH 067/108] fix!: [google-cloud-cloudcontrolspartner] Field behavior for field `display_name` in message `.google.cloud.cloudcontrolspartner.v1beta.Customer` is changed (#13085) - [ ] Regenerate this pull request now. BEGIN_COMMIT_OVERRIDE fix!: [google-cloud-cloudcontrolspartner] Field behavior for field display_name in message .google.cloud.cloudcontrolspartner.v1beta.Customer is changed feat: Field behavior for field `customer_onboarding_state` in message `.google.cloud.cloudcontrolspartner.v1beta.Customer` is changed feat: Field behavior for field `is_onboarded` in message `.google.cloud.cloudcontrolspartner.v1beta.Customer` is changed feat: A new value `ACCESS_TRANSPARENCY_LOGS_SUPPORT_CASE_VIEWER` is added to enum `.google.cloud.cloudcontrolspartner.v1beta.PartnerPermissions.Permission` docs: A comment for field `display_name` in message `.google.cloud.cloudcontrolspartner.v1beta.Customer` is changed END_COMMIT_OVERRIDE feat: Field behavior for field `customer_onboarding_state` in message `.google.cloud.cloudcontrolspartner.v1beta.Customer` is changed feat: Field behavior for field `is_onboarded` in message `.google.cloud.cloudcontrolspartner.v1beta.Customer` is changed feat: A new value `ACCESS_TRANSPARENCY_LOGS_SUPPORT_CASE_VIEWER` is added to enum `.google.cloud.cloudcontrolspartner.v1beta.PartnerPermissions.Permission` docs: A comment for field `display_name` in message `.google.cloud.cloudcontrolspartner.v1beta.Customer` is changed PiperOrigin-RevId: 674282504 Source-Link: https://github.com/googleapis/googleapis/commit/ffbe78335c0b7efe1f0c7f44713e44fa30f7c7d9 Source-Link: https://github.com/googleapis/googleapis-gen/commit/ee0f8c5be62f917333559bded6665302ec98b5c9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWNsb3VkY29udHJvbHNwYXJ0bmVyLy5Pd2xCb3QueWFtbCIsImgiOiJlZTBmOGM1YmU2MmY5MTczMzM1NTliZGVkNjY2NTMwMmVjOThiNWM5In0= BEGIN_NESTED_COMMIT fix!: [google-cloud-cloudcontrolspartner] Field behavior for field `display_name` in message `.google.cloud.cloudcontrolspartner.v1.Customer` is changed feat: Field behavior for field `customer_onboarding_state` in message `.google.cloud.cloudcontrolspartner.v1.Customer` is changed feat: Field behavior for field `is_onboarded` in message `.google.cloud.cloudcontrolspartner.v1.Customer` is changed feat: A new value `ACCESS_TRANSPARENCY_LOGS_SUPPORT_CASE_VIEWER` is added to enum `.google.cloud.cloudcontrolspartner.v1.PartnerPermissions.Permission` docs: A comment for field `display_name` in message `.google.cloud.cloudcontrolspartner.v1.Customer` is changed PiperOrigin-RevId: 674282173 Source-Link: https://github.com/googleapis/googleapis/commit/9ebde5402abfe4014e63f3a9bb45c206a2a66f32 Source-Link: https://github.com/googleapis/googleapis-gen/commit/256435db38ff3a1d6d48b175058758b73b8d07a5 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWNsb3VkY29udHJvbHNwYXJ0bmVyLy5Pd2xCb3QueWFtbCIsImgiOiIyNTY0MzVkYjM4ZmYzYTFkNmQ0OGIxNzUwNTg3NThiNzNiOGQwN2E1In0= END_NESTED_COMMIT --------- Co-authored-by: Owl Bot Co-authored-by: Victor Chudnovsky --- .../cloud/cloudcontrolspartner_v1/types/customers.py | 10 +++++----- .../types/partner_permissions.py | 4 ++++ .../cloudcontrolspartner_v1beta/types/customers.py | 10 +++++----- .../types/partner_permissions.py | 4 ++++ 4 files changed, 18 insertions(+), 10 deletions(-) diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/types/customers.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/types/customers.py index 62eee778fdaf..917b2c256294 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/types/customers.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/types/customers.py @@ -45,13 +45,13 @@ class Customer(proto.Message): Identifier. Format: ``organizations/{organization}/locations/{location}/customers/{customer}`` display_name (str): - The customer organization's display name. - E.g. "google.com". + Required. Display name for the customer customer_onboarding_state (google.cloud.cloudcontrolspartner_v1.types.CustomerOnboardingState): - Container for customer onboarding steps + Output only. Container for customer + onboarding steps is_onboarded (bool): - Indicates whether a customer is fully - onboarded + Output only. Indicates whether a customer is + fully onboarded """ name: str = proto.Field( diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/types/partner_permissions.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/types/partner_permissions.py index 072b279e0861..a9c4f2513124 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/types/partner_permissions.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/types/partner_permissions.py @@ -57,12 +57,16 @@ class Permission(proto.Enum): ASSURED_WORKLOADS_EKM_CONNECTION_STATUS (4): Permission for External Key Manager connection status + ACCESS_TRANSPARENCY_LOGS_SUPPORT_CASE_VIEWER (5): + Permission for support case details for + Access Transparency log entries """ PERMISSION_UNSPECIFIED = 0 ACCESS_TRANSPARENCY_AND_EMERGENCY_ACCESS_LOGS = 1 ASSURED_WORKLOADS_MONITORING = 2 ACCESS_APPROVAL_REQUESTS = 3 ASSURED_WORKLOADS_EKM_CONNECTION_STATUS = 4 + ACCESS_TRANSPARENCY_LOGS_SUPPORT_CASE_VIEWER = 5 name: str = proto.Field( proto.STRING, diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/customers.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/customers.py index dae25231d4f0..2237867d884f 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/customers.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/customers.py @@ -45,13 +45,13 @@ class Customer(proto.Message): Identifier. Format: ``organizations/{organization}/locations/{location}/customers/{customer}`` display_name (str): - The customer organization's display name. - E.g. "google.com". + Required. Display name for the customer customer_onboarding_state (google.cloud.cloudcontrolspartner_v1beta.types.CustomerOnboardingState): - Container for customer onboarding steps + Output only. Container for customer + onboarding steps is_onboarded (bool): - Indicates whether a customer is fully - onboarded + Output only. Indicates whether a customer is + fully onboarded """ name: str = proto.Field( diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/partner_permissions.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/partner_permissions.py index d94dff633d35..eddc0cf9ab95 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/partner_permissions.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/partner_permissions.py @@ -57,12 +57,16 @@ class Permission(proto.Enum): ASSURED_WORKLOADS_EKM_CONNECTION_STATUS (4): Permission for External Key Manager connection status + ACCESS_TRANSPARENCY_LOGS_SUPPORT_CASE_VIEWER (5): + Permission for support case details for + Access Transparency log entries """ PERMISSION_UNSPECIFIED = 0 ACCESS_TRANSPARENCY_AND_EMERGENCY_ACCESS_LOGS = 1 ASSURED_WORKLOADS_MONITORING = 2 ACCESS_APPROVAL_REQUESTS = 3 ASSURED_WORKLOADS_EKM_CONNECTION_STATUS = 4 + ACCESS_TRANSPARENCY_LOGS_SUPPORT_CASE_VIEWER = 5 name: str = proto.Field( proto.STRING, From 366f6f10e29a9d9cc307cbd1f16deb4decf26050 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sat, 14 Sep 2024 01:10:19 +0000 Subject: [PATCH 068/108] feat: [google-maps-routeoptimization] minor fields and documentation update (#13083) - [ ] Regenerate this pull request now. BEGIN_COMMIT_OVERRIDE feat: [google-maps-routeoptimization] minor fields and documentation update feat: A new message `RouteModifiers` is added feat: A new field `route_modifiers` is added to message `.google.maps.routeoptimization.v1.Vehicle` feat: A new field `cost_per_kilometer_below_soft_max` is added to message `.google.maps.routeoptimization.v1.DistanceLimit` docs: A comment for method `BatchOptimizeTours` in service `RouteOptimization` is changed docs: A comment for enum value `DEFAULT_SOLVE` in enum `SolvingMode` is changed docs: A comment for field `validation_errors` in message `.google.maps.routeoptimization.v1.OptimizeToursResponse` is changed docs: A comment for message `TimeWindow` is changed docs: A comment for enum value `CODE_UNSPECIFIED` in enum `Code` is changed docs: A comment for field `reasons` in message `.google.maps.routeoptimization.v1.SkippedShipment` is changed docs: A comment for enum value `RELAX_VISIT_TIMES_AND_SEQUENCE_AFTER_THRESHOLD` in enum `Level` is changed docs: A comment for message `OptimizeToursValidationError` is changed docs: A comment for field `code` in message `.google.maps.routeoptimization.v1.OptimizeToursValidationError` is changed END_COMMIT_OVERRIDE feat: A new message `RouteModifiers` is added feat: A new field `route_modifiers` is added to message `.google.maps.routeoptimization.v1.Vehicle` feat: A new field `cost_per_kilometer_below_soft_max` is added to message `.google.maps.routeoptimization.v1.DistanceLimit` docs: A comment for method `BatchOptimizeTours` in service `RouteOptimization` is changed docs: A comment for enum value `DEFAULT_SOLVE` in enum `SolvingMode` is changed docs: A comment for field `validation_errors` in message `.google.maps.routeoptimization.v1.OptimizeToursResponse` is changed docs: A comment for message `TimeWindow` is changed docs: A comment for enum value `CODE_UNSPECIFIED` in enum `Code` is changed docs: A comment for field `reasons` in message `.google.maps.routeoptimization.v1.SkippedShipment` is changed docs: A comment for enum value `RELAX_VISIT_TIMES_AND_SEQUENCE_AFTER_THRESHOLD` in enum `Level` is changed docs: A comment for message `OptimizeToursValidationError` is changed docs: A comment for field `code` in message `.google.maps.routeoptimization.v1.OptimizeToursValidationError` is changed PiperOrigin-RevId: 674021227 Source-Link: https://github.com/googleapis/googleapis/commit/a83432038474bbff69f79a4360a60330bf7eaaa9 Source-Link: https://github.com/googleapis/googleapis-gen/commit/13dfeeb21d7dd16c6f80f7196e07a2a2ace35eb4 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLW1hcHMtcm91dGVvcHRpbWl6YXRpb24vLk93bEJvdC55YW1sIiwiaCI6IjEzZGZlZWIyMWQ3ZGQxNmM2ZjgwZjcxOTZlMDdhMmEyYWNlMzVlYjQifQ== --------- Co-authored-by: Owl Bot Co-authored-by: Victor Chudnovsky --- .../google/maps/routeoptimization/__init__.py | 2 + .../maps/routeoptimization_v1/__init__.py | 2 + .../route_optimization/async_client.py | 13 ++ .../services/route_optimization/client.py | 13 ++ .../route_optimization/transports/grpc.py | 13 ++ .../transports/grpc_asyncio.py | 13 ++ .../routeoptimization_v1/types/__init__.py | 2 + .../types/route_optimization_service.py | 122 ++++++++++++++++-- 8 files changed, 167 insertions(+), 13 deletions(-) diff --git a/packages/google-maps-routeoptimization/google/maps/routeoptimization/__init__.py b/packages/google-maps-routeoptimization/google/maps/routeoptimization/__init__.py index 185073f9abae..c2d0f75734bc 100644 --- a/packages/google-maps-routeoptimization/google/maps/routeoptimization/__init__.py +++ b/packages/google-maps-routeoptimization/google/maps/routeoptimization/__init__.py @@ -41,6 +41,7 @@ OptimizeToursResponse, OptimizeToursValidationError, OutputConfig, + RouteModifiers, Shipment, ShipmentModel, ShipmentRoute, @@ -71,6 +72,7 @@ "OptimizeToursResponse", "OptimizeToursValidationError", "OutputConfig", + "RouteModifiers", "Shipment", "ShipmentModel", "ShipmentRoute", diff --git a/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/__init__.py b/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/__init__.py index 053f46b064a7..6f3ede719590 100644 --- a/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/__init__.py +++ b/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/__init__.py @@ -39,6 +39,7 @@ OptimizeToursResponse, OptimizeToursValidationError, OutputConfig, + RouteModifiers, Shipment, ShipmentModel, ShipmentRoute, @@ -69,6 +70,7 @@ "OptimizeToursResponse", "OptimizeToursValidationError", "OutputConfig", + "RouteModifiers", "RouteOptimizationClient", "Shipment", "ShipmentModel", diff --git a/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/async_client.py b/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/async_client.py index a87f03648831..3c899f1f772a 100644 --- a/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/async_client.py +++ b/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/async_client.py @@ -410,6 +410,19 @@ async def batch_optimize_tours( containing ``ShipmentRoute``\ s, which are a set of routes to be performed by vehicles minimizing the overall cost. + The user can poll ``operations.get`` to check the status of the + LRO: + + If the LRO's ``done`` field is false, then at least one request + is still being processed. Other requests may have completed + successfully and their results are available in GCS. + + If the LRO's ``done`` field is true, then all requests have been + processed. Any successfully processed requests will have their + results available in GCS. Any requests that failed will not have + their results available in GCS. If the LRO's ``error`` field is + set, then it contains the error from one of the failed requests. + .. code-block:: python # This snippet has been automatically generated and should be regarded as a diff --git a/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/client.py b/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/client.py index ba2242e7e73f..c88ee1b4892f 100644 --- a/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/client.py +++ b/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/client.py @@ -812,6 +812,19 @@ def batch_optimize_tours( containing ``ShipmentRoute``\ s, which are a set of routes to be performed by vehicles minimizing the overall cost. + The user can poll ``operations.get`` to check the status of the + LRO: + + If the LRO's ``done`` field is false, then at least one request + is still being processed. Other requests may have completed + successfully and their results are available in GCS. + + If the LRO's ``done`` field is true, then all requests have been + processed. Any successfully processed requests will have their + results available in GCS. Any requests that failed will not have + their results available in GCS. If the LRO's ``error`` field is + set, then it contains the error from one of the failed requests. + .. code-block:: python # This snippet has been automatically generated and should be regarded as a diff --git a/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/transports/grpc.py b/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/transports/grpc.py index 33cdb5684e3d..8c5621a5f0ba 100644 --- a/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/transports/grpc.py +++ b/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/transports/grpc.py @@ -336,6 +336,19 @@ def batch_optimize_tours( containing ``ShipmentRoute``\ s, which are a set of routes to be performed by vehicles minimizing the overall cost. + The user can poll ``operations.get`` to check the status of the + LRO: + + If the LRO's ``done`` field is false, then at least one request + is still being processed. Other requests may have completed + successfully and their results are available in GCS. + + If the LRO's ``done`` field is true, then all requests have been + processed. Any successfully processed requests will have their + results available in GCS. Any requests that failed will not have + their results available in GCS. If the LRO's ``error`` field is + set, then it contains the error from one of the failed requests. + Returns: Callable[[~.BatchOptimizeToursRequest], ~.Operation]: diff --git a/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/transports/grpc_asyncio.py b/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/transports/grpc_asyncio.py index 0ebbbbf4c6e8..edf1e396fe6f 100644 --- a/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/transports/grpc_asyncio.py +++ b/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/transports/grpc_asyncio.py @@ -343,6 +343,19 @@ def batch_optimize_tours( containing ``ShipmentRoute``\ s, which are a set of routes to be performed by vehicles minimizing the overall cost. + The user can poll ``operations.get`` to check the status of the + LRO: + + If the LRO's ``done`` field is false, then at least one request + is still being processed. Other requests may have completed + successfully and their results are available in GCS. + + If the LRO's ``done`` field is true, then all requests have been + processed. Any successfully processed requests will have their + results available in GCS. Any requests that failed will not have + their results available in GCS. If the LRO's ``error`` field is + set, then it contains the error from one of the failed requests. + Returns: Callable[[~.BatchOptimizeToursRequest], Awaitable[~.Operation]]: diff --git a/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/types/__init__.py b/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/types/__init__.py index 88b09e715f9d..fbcd717dca6f 100644 --- a/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/types/__init__.py +++ b/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/types/__init__.py @@ -30,6 +30,7 @@ OptimizeToursResponse, OptimizeToursValidationError, OutputConfig, + RouteModifiers, Shipment, ShipmentModel, ShipmentRoute, @@ -58,6 +59,7 @@ "OptimizeToursResponse", "OptimizeToursValidationError", "OutputConfig", + "RouteModifiers", "Shipment", "ShipmentModel", "ShipmentRoute", diff --git a/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/types/route_optimization_service.py b/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/types/route_optimization_service.py index f83a834bca15..0dd1de2b1f9d 100644 --- a/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/types/route_optimization_service.py +++ b/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/types/route_optimization_service.py @@ -35,6 +35,7 @@ "Shipment", "ShipmentTypeIncompatibility", "ShipmentTypeRequirement", + "RouteModifiers", "Vehicle", "TimeWindow", "DistanceLimit", @@ -382,7 +383,8 @@ class SolvingMode(proto.Enum): Values: DEFAULT_SOLVE (0): - Solve the model. + Solve the model. Warnings may be issued in + [OptimizeToursResponse.validation_errors][google.cloud.optimization.v1.OptimizeToursResponse.validation_errors]. VALIDATE_ONLY (1): Only validates the model without solving it: populates as many @@ -531,7 +533,8 @@ class OptimizeToursResponse(proto.Message): detect independently. See the "MULTIPLE ERRORS" explanation for the [OptimizeToursValidationError][google.maps.routeoptimization.v1.OptimizeToursValidationError] - message. + message. Instead of errors, this will include warnings in + the case ``solving_mode`` is ``DEFAULT_SOLVE``. metrics (google.maps.routeoptimization_v1.types.OptimizeToursResponse.Metrics): Duration, distance and usage metrics for this solution. @@ -1524,6 +1527,53 @@ class RequirementMode(proto.Enum): ) +class RouteModifiers(proto.Message): + r"""Encapsulates a set of optional conditions to satisfy when + calculating vehicle routes. This is similar to ``RouteModifiers`` in + the Google Maps Platform Routes Preferred API; see: + https://developers.google.com/maps/documentation/routes/reference/rest/v2/RouteModifiers. + + Attributes: + avoid_tolls (bool): + Specifies whether to avoid toll roads where + reasonable. Preference will be given to routes + not containing toll roads. Applies only to + motorized travel modes. + avoid_highways (bool): + Specifies whether to avoid highways where + reasonable. Preference will be given to routes + not containing highways. Applies only to + motorized travel modes. + avoid_ferries (bool): + Specifies whether to avoid ferries where + reasonable. Preference will be given to routes + not containing travel by ferries. Applies only + to motorized travel modes. + avoid_indoor (bool): + Optional. Specifies whether to avoid navigating indoors + where reasonable. Preference will be given to routes not + containing indoor navigation. Applies only to the + ``WALKING`` travel mode. + """ + + avoid_tolls: bool = proto.Field( + proto.BOOL, + number=2, + ) + avoid_highways: bool = proto.Field( + proto.BOOL, + number=3, + ) + avoid_ferries: bool = proto.Field( + proto.BOOL, + number=4, + ) + avoid_indoor: bool = proto.Field( + proto.BOOL, + number=5, + ) + + class Vehicle(proto.Message): r"""Models a vehicle in a shipment problem. Solving a shipment problem will build a route starting from ``start_location`` and ending at @@ -1542,6 +1592,10 @@ class Vehicle(proto.Message): The travel mode which affects the roads usable by the vehicle and its speed. See also ``travel_duration_multiple``. + route_modifiers (google.maps.routeoptimization_v1.types.RouteModifiers): + A set of conditions to satisfy that affect + the way routes are calculated for the given + vehicle. start_location (google.type.latlng_pb2.LatLng): Geographic location where the vehicle starts before picking up any shipments. If not specified, the vehicle starts at @@ -1964,6 +2018,11 @@ class DurationLimit(proto.Message): number=1, enum=TravelMode, ) + route_modifiers: "RouteModifiers" = proto.Field( + proto.MESSAGE, + number=2, + message="RouteModifiers", + ) start_location: latlng_pb2.LatLng = proto.Field( proto.MESSAGE, number=3, @@ -2098,8 +2157,9 @@ class TimeWindow(proto.Message): :: - 0 <= `start_time` <= `soft_start_time` <= `end_time` and - 0 <= `start_time` <= `soft_end_time` <= `end_time`. + 0 <= `start_time` <= `end_time` and + 0 <= `start_time` <= `soft_start_time` and + 0 <= `soft_end_time` <= `end_time`. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -2203,6 +2263,18 @@ class DistanceLimit(proto.Message): must be nonnegative. This field is a member of `oneof`_ ``_soft_max_meters``. + cost_per_kilometer_below_soft_max (float): + Cost per kilometer incurred, increasing up to + ``soft_max_meters``, with formula: + + :: + + min(distance_meters, soft_max_meters) / 1000.0 * + cost_per_kilometer_below_soft_max. + + This cost is not supported in ``route_distance_limit``. + + This field is a member of `oneof`_ ``_cost_per_kilometer_below_soft_max``. cost_per_kilometer_above_soft_max (float): Cost per kilometer incurred if distance is above ``soft_max_meters`` limit. The additional cost is 0 if the @@ -2229,6 +2301,11 @@ class DistanceLimit(proto.Message): number=2, optional=True, ) + cost_per_kilometer_below_soft_max: float = proto.Field( + proto.DOUBLE, + number=4, + optional=True, + ) cost_per_kilometer_above_soft_max: float = proto.Field( proto.DOUBLE, number=3, @@ -3056,7 +3133,8 @@ class SkippedShipment(proto.Message): if specified in the ``Shipment``. reasons (MutableSequence[google.maps.routeoptimization_v1.types.SkippedShipment.Reason]): A list of reasons that explain why the shipment was skipped. - See comment above ``Reason``. + See comment above ``Reason``. If we are unable to understand + why a shipment was skipped, reasons will not be set. """ class Reason(proto.Message): @@ -3115,9 +3193,7 @@ class Code(proto.Enum): Values: CODE_UNSPECIFIED (0): - This should never be used. If we are unable - to understand why a shipment was skipped, we - simply return an empty set of reasons. + This should never be used. NO_VEHICLE (1): There is no vehicle in the model making all shipments infeasible. @@ -3420,8 +3496,8 @@ class Level(proto.Enum): or before them. RELAX_VISIT_TIMES_AND_SEQUENCE_AFTER_THRESHOLD (2): Same as ``RELAX_VISIT_TIMES_AFTER_THRESHOLD``, but the visit - sequence is also relaxed: visits remain simply bound to - their vehicle. + sequence is also relaxed: visits can only be performed by + this vehicle, but can potentially become unperformed. RELAX_ALL_AFTER_THRESHOLD (3): Same as ``RELAX_VISIT_TIMES_AND_SEQUENCE_AFTER_THRESHOLD``, but the vehicle is also relaxed: visits are completely free @@ -3478,7 +3554,7 @@ class Level(proto.Enum): class OptimizeToursValidationError(proto.Message): - r"""Describes an error encountered when validating an + r"""Describes an error or warning encountered when validating an ``OptimizeToursRequest``. Attributes: @@ -3592,8 +3668,10 @@ class OptimizeToursValidationError(proto.Message): - TIME_WINDOW_OVERLAPPING_ADJACENT_OR_EARLIER_THAN_PREVIOUS = 2812; - TIME_WINDOW_START_TIME_AFTER_SOFT_START_TIME = 2813; - - TIME_WINDOW_SOFT_START_TIME_AFTER_END_TIME = 2814; - - TIME_WINDOW_START_TIME_AFTER_SOFT_END_TIME = 2815; + - TIME_WINDOW_SOFT_START_TIME_OUTSIDE_GLOBAL_TIME_WINDOW + = 2819; + - TIME_WINDOW_SOFT_END_TIME_OUTSIDE_GLOBAL_TIME_WINDOW = + 2820; - TIME_WINDOW_SOFT_END_TIME_AFTER_END_TIME = 2816; - TIME_WINDOW_COST_BEFORE_SOFT_START_TIME_SET_AND_MULTIPLE_WINDOWS = 2817; @@ -3754,6 +3832,15 @@ class OptimizeToursValidationError(proto.Message): - PRECEDENCE_ERROR = 46; + - PRECEDENCE_RULE_MISSING_FIRST_INDEX = 4600; + - PRECEDENCE_RULE_MISSING_SECOND_INDEX = 4601; + - PRECEDENCE_RULE_FIRST_INDEX_OUT_OF_BOUNDS = 4602; + - PRECEDENCE_RULE_SECOND_INDEX_OUT_OF_BOUNDS = 4603; + - PRECEDENCE_RULE_DUPLICATE_INDEX = 4604; + - PRECEDENCE_RULE_INEXISTENT_FIRST_VISIT_REQUEST = 4605; + - PRECEDENCE_RULE_INEXISTENT_SECOND_VISIT_REQUEST = + 4606; + - BREAK_ERROR = 48; - BREAK_RULE_EMPTY = 4800; @@ -3826,6 +3913,15 @@ class OptimizeToursValidationError(proto.Message): 5600; - DURATION_SECONDS_MATRIX_DURATION_EXCEEDS_GLOBAL_DURATION = 5601; + + - WARNING = 9; + + - WARNING_INJECTED_FIRST_SOLUTION = 90; + + - WARNING_INJECTED_FIRST_SOLUTION_INFEASIBLE_SHIPMENTS_REMOVED + = 9000; + - WARNING_INJECTED_FIRST_SOLUTION_INFEASIBLE_AFTER_GETTING_TRAVEL_TIMES + = 9001; display_name (str): The error display name. fields (MutableSequence[google.maps.routeoptimization_v1.types.OptimizeToursValidationError.FieldReference]): From 5e3f4aebeb2f79efb1992ae623eb1aea86de2b0c Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sat, 14 Sep 2024 01:17:57 +0000 Subject: [PATCH 069/108] feat(api): [google-cloud-netapp] A new rpc 'SwitchActiveReplicaZone' is added to service 'google.cloud.netapp.v1.NetApp' (#13086) - [ ] Regenerate this pull request now. BEGIN_COMMIT_OVERRIDE feat(api): [google-cloud-netapp] A new rpc 'SwitchActiveReplicaZone' is added to service 'google.cloud.netapp.v1.NetApp' feat: A new message 'google.cloud.netapp.v1.SwitchActiveReplicaZoneRequest' is added feat: A new field 'allow_auto_tiering' in message 'google.cloud.netapp.v1.StoragePool' is added feat: A new field 'cold_tier_size_gib' in message 'google.cloud.netapp.v1.Volume' is added END_COMMIT_OVERRIDE feat: A new message 'google.cloud.netapp.v1.SwitchActiveReplicaZoneRequest' is added feat: A new field 'allow_auto_tiering' in message 'google.cloud.netapp.v1.StoragePool' is added feat: A new field 'cold_tier_size_gib' in message 'google.cloud.netapp.v1.Volume' is added PiperOrigin-RevId: 674440910 Source-Link: https://github.com/googleapis/googleapis/commit/e99de3d3cc1144ad2af19f5e142a1c59f77f2bd3 Source-Link: https://github.com/googleapis/googleapis-gen/commit/ca2b9f634b559466682d84b12d0aeb9b74ebdba2 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLW5ldGFwcC8uT3dsQm90LnlhbWwiLCJoIjoiY2EyYjlmNjM0YjU1OTQ2NjY4MmQ4NGIxMmQwYWViOWI3NGViZGJhMiJ9 --------- Co-authored-by: Owl Bot Co-authored-by: Victor Chudnovsky --- .../google/cloud/netapp/__init__.py | 2 + .../google/cloud/netapp_v1/__init__.py | 2 + .../cloud/netapp_v1/gapic_metadata.json | 15 + .../services/net_app/async_client.py | 104 ++++ .../netapp_v1/services/net_app/client.py | 104 ++++ .../services/net_app/transports/base.py | 14 + .../services/net_app/transports/grpc.py | 29 + .../net_app/transports/grpc_asyncio.py | 35 ++ .../services/net_app/transports/rest.py | 139 +++++ .../google/cloud/netapp_v1/types/__init__.py | 2 + .../cloud/netapp_v1/types/storage_pool.py | 25 + .../google/cloud/netapp_v1/types/volume.py | 7 + ...et_app_switch_active_replica_zone_async.py | 56 ++ ...net_app_switch_active_replica_zone_sync.py | 56 ++ ...ippet_metadata_google.cloud.netapp.v1.json | 153 +++++ .../scripts/fixup_netapp_v1_keywords.py | 1 + .../unit/gapic/netapp_v1/test_net_app.py | 575 ++++++++++++++++++ 17 files changed, 1319 insertions(+) create mode 100644 packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_switch_active_replica_zone_async.py create mode 100644 packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_switch_active_replica_zone_sync.py diff --git a/packages/google-cloud-netapp/google/cloud/netapp/__init__.py b/packages/google-cloud-netapp/google/cloud/netapp/__init__.py index 911af7583e73..66ff5c729847 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp/__init__.py +++ b/packages/google-cloud-netapp/google/cloud/netapp/__init__.py @@ -104,6 +104,7 @@ ListStoragePoolsRequest, ListStoragePoolsResponse, StoragePool, + SwitchActiveReplicaZoneRequest, UpdateStoragePoolRequest, ) from google.cloud.netapp_v1.types.volume import ( @@ -203,6 +204,7 @@ "ListStoragePoolsRequest", "ListStoragePoolsResponse", "StoragePool", + "SwitchActiveReplicaZoneRequest", "UpdateStoragePoolRequest", "BackupConfig", "CreateVolumeRequest", diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/__init__.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/__init__.py index e55949c3cc22..6e842284502b 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/__init__.py +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/__init__.py @@ -99,6 +99,7 @@ ListStoragePoolsRequest, ListStoragePoolsResponse, StoragePool, + SwitchActiveReplicaZoneRequest, UpdateStoragePoolRequest, ) from .types.volume import ( @@ -208,6 +209,7 @@ "SnapshotPolicy", "StopReplicationRequest", "StoragePool", + "SwitchActiveReplicaZoneRequest", "TieringPolicy", "TransferStats", "UpdateActiveDirectoryRequest", diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/gapic_metadata.json b/packages/google-cloud-netapp/google/cloud/netapp_v1/gapic_metadata.json index ca240ed3ab2f..cfe1b93cc977 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/gapic_metadata.json +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/gapic_metadata.json @@ -215,6 +215,11 @@ "stop_replication" ] }, + "SwitchActiveReplicaZone": { + "methods": [ + "switch_active_replica_zone" + ] + }, "UpdateActiveDirectory": { "methods": [ "update_active_directory" @@ -475,6 +480,11 @@ "stop_replication" ] }, + "SwitchActiveReplicaZone": { + "methods": [ + "switch_active_replica_zone" + ] + }, "UpdateActiveDirectory": { "methods": [ "update_active_directory" @@ -735,6 +745,11 @@ "stop_replication" ] }, + "SwitchActiveReplicaZone": { + "methods": [ + "switch_active_replica_zone" + ] + }, "UpdateActiveDirectory": { "methods": [ "update_active_directory" diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/async_client.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/async_client.py index a1bd9a72ca72..4a46db735693 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/async_client.py +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/async_client.py @@ -941,6 +941,110 @@ async def sample_delete_storage_pool(): # Done; return the response. return response + async def switch_active_replica_zone( + self, + request: Optional[ + Union[storage_pool.SwitchActiveReplicaZoneRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""This operation will switch the active/replica zone + for a regional storagePool. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import netapp_v1 + + async def sample_switch_active_replica_zone(): + # Create a client + client = netapp_v1.NetAppAsyncClient() + + # Initialize request argument(s) + request = netapp_v1.SwitchActiveReplicaZoneRequest( + name="name_value", + ) + + # Make the request + operation = client.switch_active_replica_zone(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.netapp_v1.types.SwitchActiveReplicaZoneRequest, dict]]): + The request object. SwitchActiveReplicaZoneRequest switch + the active/replica zone for a regional + storagePool. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.netapp_v1.types.StoragePool` StoragePool is a container for volumes with a service level and capacity. + Volumes can be created in a pool of sufficient + available capacity. StoragePool capacity is what you + are billed for. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, storage_pool.SwitchActiveReplicaZoneRequest): + request = storage_pool.SwitchActiveReplicaZoneRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.switch_active_replica_zone + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + storage_pool.StoragePool, + metadata_type=cloud_netapp_service.OperationMetadata, + ) + + # Done; return the response. + return response + async def list_volumes( self, request: Optional[Union[volume.ListVolumesRequest, dict]] = None, diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/client.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/client.py index 8d8dc7fd2ef6..23ea30de2f0e 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/client.py +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/client.py @@ -1537,6 +1537,110 @@ def sample_delete_storage_pool(): # Done; return the response. return response + def switch_active_replica_zone( + self, + request: Optional[ + Union[storage_pool.SwitchActiveReplicaZoneRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""This operation will switch the active/replica zone + for a regional storagePool. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import netapp_v1 + + def sample_switch_active_replica_zone(): + # Create a client + client = netapp_v1.NetAppClient() + + # Initialize request argument(s) + request = netapp_v1.SwitchActiveReplicaZoneRequest( + name="name_value", + ) + + # Make the request + operation = client.switch_active_replica_zone(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.netapp_v1.types.SwitchActiveReplicaZoneRequest, dict]): + The request object. SwitchActiveReplicaZoneRequest switch + the active/replica zone for a regional + storagePool. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.netapp_v1.types.StoragePool` StoragePool is a container for volumes with a service level and capacity. + Volumes can be created in a pool of sufficient + available capacity. StoragePool capacity is what you + are billed for. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, storage_pool.SwitchActiveReplicaZoneRequest): + request = storage_pool.SwitchActiveReplicaZoneRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.switch_active_replica_zone + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + storage_pool.StoragePool, + metadata_type=cloud_netapp_service.OperationMetadata, + ) + + # Done; return the response. + return response + def list_volumes( self, request: Optional[Union[volume.ListVolumesRequest, dict]] = None, diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/transports/base.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/transports/base.py index 19de7acc8804..0609c5169e97 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/transports/base.py +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/transports/base.py @@ -189,6 +189,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.switch_active_replica_zone: gapic_v1.method.wrap_method( + self.switch_active_replica_zone, + default_timeout=None, + client_info=client_info, + ), self.list_volumes: gapic_v1.method.wrap_method( self.list_volumes, default_retry=retries.Retry( @@ -627,6 +632,15 @@ def delete_storage_pool( ]: raise NotImplementedError() + @property + def switch_active_replica_zone( + self, + ) -> Callable[ + [storage_pool.SwitchActiveReplicaZoneRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def list_volumes( self, diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/transports/grpc.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/transports/grpc.py index 023b562b7012..37f1b85f9322 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/transports/grpc.py +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/transports/grpc.py @@ -407,6 +407,35 @@ def delete_storage_pool( ) return self._stubs["delete_storage_pool"] + @property + def switch_active_replica_zone( + self, + ) -> Callable[ + [storage_pool.SwitchActiveReplicaZoneRequest], operations_pb2.Operation + ]: + r"""Return a callable for the switch active replica zone method over gRPC. + + This operation will switch the active/replica zone + for a regional storagePool. + + Returns: + Callable[[~.SwitchActiveReplicaZoneRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "switch_active_replica_zone" not in self._stubs: + self._stubs["switch_active_replica_zone"] = self.grpc_channel.unary_unary( + "/google.cloud.netapp.v1.NetApp/SwitchActiveReplicaZone", + request_serializer=storage_pool.SwitchActiveReplicaZoneRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["switch_active_replica_zone"] + @property def list_volumes( self, diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/transports/grpc_asyncio.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/transports/grpc_asyncio.py index 933f69b45e91..fe94f2e42c7c 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/transports/grpc_asyncio.py +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/transports/grpc_asyncio.py @@ -418,6 +418,36 @@ def delete_storage_pool( ) return self._stubs["delete_storage_pool"] + @property + def switch_active_replica_zone( + self, + ) -> Callable[ + [storage_pool.SwitchActiveReplicaZoneRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the switch active replica zone method over gRPC. + + This operation will switch the active/replica zone + for a regional storagePool. + + Returns: + Callable[[~.SwitchActiveReplicaZoneRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "switch_active_replica_zone" not in self._stubs: + self._stubs["switch_active_replica_zone"] = self.grpc_channel.unary_unary( + "/google.cloud.netapp.v1.NetApp/SwitchActiveReplicaZone", + request_serializer=storage_pool.SwitchActiveReplicaZoneRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["switch_active_replica_zone"] + @property def list_volumes( self, @@ -1753,6 +1783,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.switch_active_replica_zone: gapic_v1.method_async.wrap_method( + self.switch_active_replica_zone, + default_timeout=None, + client_info=client_info, + ), self.list_volumes: gapic_v1.method_async.wrap_method( self.list_volumes, default_retry=retries.AsyncRetry( diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/transports/rest.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/transports/rest.py index 0a163bc67ea5..da0a85a7bcd9 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/transports/rest.py +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/transports/rest.py @@ -416,6 +416,14 @@ def post_stop_replication(self, response): logging.log(f"Received response: {response}") return response + def pre_switch_active_replica_zone(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_switch_active_replica_zone(self, response): + logging.log(f"Received response: {response}") + return response + def pre_update_active_directory(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -1419,6 +1427,29 @@ def post_stop_replication( """ return response + def pre_switch_active_replica_zone( + self, + request: storage_pool.SwitchActiveReplicaZoneRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[storage_pool.SwitchActiveReplicaZoneRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for switch_active_replica_zone + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetApp server. + """ + return request, metadata + + def post_switch_active_replica_zone( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for switch_active_replica_zone + + Override in a subclass to manipulate the response + after it is returned by the NetApp server but before + it is returned to user code. + """ + return response + def pre_update_active_directory( self, request: gcn_active_directory.UpdateActiveDirectoryRequest, @@ -5680,6 +5711,104 @@ def __call__( resp = self._interceptor.post_stop_replication(resp) return resp + class _SwitchActiveReplicaZone(NetAppRestStub): + def __hash__(self): + return hash("SwitchActiveReplicaZone") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: storage_pool.SwitchActiveReplicaZoneRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the switch active replica + zone method over HTTP. + + Args: + request (~.storage_pool.SwitchActiveReplicaZoneRequest): + The request object. SwitchActiveReplicaZoneRequest switch + the active/replica zone for a regional + storagePool. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/storagePools/*}:switch", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_switch_active_replica_zone( + request, metadata + ) + pb_request = storage_pool.SwitchActiveReplicaZoneRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_switch_active_replica_zone(resp) + return resp + class _UpdateActiveDirectory(NetAppRestStub): def __hash__(self): return hash("UpdateActiveDirectory") @@ -7004,6 +7133,16 @@ def stop_replication( # In C++ this would require a dynamic_cast return self._StopReplication(self._session, self._host, self._interceptor) # type: ignore + @property + def switch_active_replica_zone( + self, + ) -> Callable[ + [storage_pool.SwitchActiveReplicaZoneRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SwitchActiveReplicaZone(self._session, self._host, self._interceptor) # type: ignore + @property def update_active_directory( self, diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/types/__init__.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/types/__init__.py index fbdb02042fae..c06102732661 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/types/__init__.py +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/types/__init__.py @@ -93,6 +93,7 @@ ListStoragePoolsRequest, ListStoragePoolsResponse, StoragePool, + SwitchActiveReplicaZoneRequest, UpdateStoragePoolRequest, ) from .volume import ( @@ -190,6 +191,7 @@ "ListStoragePoolsRequest", "ListStoragePoolsResponse", "StoragePool", + "SwitchActiveReplicaZoneRequest", "UpdateStoragePoolRequest", "BackupConfig", "CreateVolumeRequest", diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/types/storage_pool.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/types/storage_pool.py index e9687e66b7b4..f7815be021ac 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/types/storage_pool.py +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/types/storage_pool.py @@ -32,6 +32,7 @@ "CreateStoragePoolRequest", "UpdateStoragePoolRequest", "DeleteStoragePoolRequest", + "SwitchActiveReplicaZoneRequest", "StoragePool", }, ) @@ -199,6 +200,21 @@ class DeleteStoragePoolRequest(proto.Message): ) +class SwitchActiveReplicaZoneRequest(proto.Message): + r"""SwitchActiveReplicaZoneRequest switch the active/replica zone + for a regional storagePool. + + Attributes: + name (str): + Required. Name of the storage pool + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + class StoragePool(proto.Message): r"""StoragePool is a container for volumes with a service level and capacity. Volumes can be created in a pool of sufficient @@ -255,6 +271,11 @@ class StoragePool(proto.Message): AD or DNS server from other regions. This field is a member of `oneof`_ ``_global_access_allowed``. + allow_auto_tiering (bool): + Optional. True if the storage pool supports + Auto Tiering enabled volumes. Default is false. + Auto-tiering can be enabled after storage pool + creation but it can't be disabled once enabled. replica_zone (str): Optional. Specifies the replica zone for regional storagePool. @@ -367,6 +388,10 @@ class State(proto.Enum): number=17, optional=True, ) + allow_auto_tiering: bool = proto.Field( + proto.BOOL, + number=18, + ) replica_zone: str = proto.Field( proto.STRING, number=20, diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/types/volume.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/types/volume.py index a10bd13bb922..2589c8149e04 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/types/volume.py +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/types/volume.py @@ -466,6 +466,9 @@ class Volume(proto.Message): zone (str): Output only. Specifies the active zone for regional volume. + cold_tier_size_gib (int): + Output only. Size of the volume cold tier + data in GiB. """ class State(proto.Enum): @@ -659,6 +662,10 @@ class State(proto.Enum): proto.STRING, number=37, ) + cold_tier_size_gib: int = proto.Field( + proto.INT64, + number=39, + ) class ExportPolicy(proto.Message): diff --git a/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_switch_active_replica_zone_async.py b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_switch_active_replica_zone_async.py new file mode 100644 index 000000000000..bb3bed5a2e23 --- /dev/null +++ b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_switch_active_replica_zone_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SwitchActiveReplicaZone +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-netapp + + +# [START netapp_v1_generated_NetApp_SwitchActiveReplicaZone_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import netapp_v1 + + +async def sample_switch_active_replica_zone(): + # Create a client + client = netapp_v1.NetAppAsyncClient() + + # Initialize request argument(s) + request = netapp_v1.SwitchActiveReplicaZoneRequest( + name="name_value", + ) + + # Make the request + operation = client.switch_active_replica_zone(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END netapp_v1_generated_NetApp_SwitchActiveReplicaZone_async] diff --git a/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_switch_active_replica_zone_sync.py b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_switch_active_replica_zone_sync.py new file mode 100644 index 000000000000..cf4ce4ac46c5 --- /dev/null +++ b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_switch_active_replica_zone_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SwitchActiveReplicaZone +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-netapp + + +# [START netapp_v1_generated_NetApp_SwitchActiveReplicaZone_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import netapp_v1 + + +def sample_switch_active_replica_zone(): + # Create a client + client = netapp_v1.NetAppClient() + + # Initialize request argument(s) + request = netapp_v1.SwitchActiveReplicaZoneRequest( + name="name_value", + ) + + # Make the request + operation = client.switch_active_replica_zone(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END netapp_v1_generated_NetApp_SwitchActiveReplicaZone_sync] diff --git a/packages/google-cloud-netapp/samples/generated_samples/snippet_metadata_google.cloud.netapp.v1.json b/packages/google-cloud-netapp/samples/generated_samples/snippet_metadata_google.cloud.netapp.v1.json index 3edfdb18757c..a43c86575bc2 100644 --- a/packages/google-cloud-netapp/samples/generated_samples/snippet_metadata_google.cloud.netapp.v1.json +++ b/packages/google-cloud-netapp/samples/generated_samples/snippet_metadata_google.cloud.netapp.v1.json @@ -6716,6 +6716,159 @@ ], "title": "netapp_v1_generated_net_app_stop_replication_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient", + "shortName": "NetAppAsyncClient" + }, + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.switch_active_replica_zone", + "method": { + "fullName": "google.cloud.netapp.v1.NetApp.SwitchActiveReplicaZone", + "service": { + "fullName": "google.cloud.netapp.v1.NetApp", + "shortName": "NetApp" + }, + "shortName": "SwitchActiveReplicaZone" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.netapp_v1.types.SwitchActiveReplicaZoneRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "switch_active_replica_zone" + }, + "description": "Sample for SwitchActiveReplicaZone", + "file": "netapp_v1_generated_net_app_switch_active_replica_zone_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "netapp_v1_generated_NetApp_SwitchActiveReplicaZone_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "netapp_v1_generated_net_app_switch_active_replica_zone_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.netapp_v1.NetAppClient", + "shortName": "NetAppClient" + }, + "fullName": "google.cloud.netapp_v1.NetAppClient.switch_active_replica_zone", + "method": { + "fullName": "google.cloud.netapp.v1.NetApp.SwitchActiveReplicaZone", + "service": { + "fullName": "google.cloud.netapp.v1.NetApp", + "shortName": "NetApp" + }, + "shortName": "SwitchActiveReplicaZone" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.netapp_v1.types.SwitchActiveReplicaZoneRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "switch_active_replica_zone" + }, + "description": "Sample for SwitchActiveReplicaZone", + "file": "netapp_v1_generated_net_app_switch_active_replica_zone_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "netapp_v1_generated_NetApp_SwitchActiveReplicaZone_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "netapp_v1_generated_net_app_switch_active_replica_zone_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-netapp/scripts/fixup_netapp_v1_keywords.py b/packages/google-cloud-netapp/scripts/fixup_netapp_v1_keywords.py index 3a50d5244cd5..c317695a165e 100644 --- a/packages/google-cloud-netapp/scripts/fixup_netapp_v1_keywords.py +++ b/packages/google-cloud-netapp/scripts/fixup_netapp_v1_keywords.py @@ -80,6 +80,7 @@ class netappCallTransformer(cst.CSTTransformer): 'reverse_replication_direction': ('name', ), 'revert_volume': ('name', 'snapshot_id', ), 'stop_replication': ('name', 'force', ), + 'switch_active_replica_zone': ('name', ), 'update_active_directory': ('update_mask', 'active_directory', ), 'update_backup': ('update_mask', 'backup', ), 'update_backup_policy': ('update_mask', 'backup_policy', ), diff --git a/packages/google-cloud-netapp/tests/unit/gapic/netapp_v1/test_net_app.py b/packages/google-cloud-netapp/tests/unit/gapic/netapp_v1/test_net_app.py index 0465f736de27..0fbc5b0e378e 100644 --- a/packages/google-cloud-netapp/tests/unit/gapic/netapp_v1/test_net_app.py +++ b/packages/google-cloud-netapp/tests/unit/gapic/netapp_v1/test_net_app.py @@ -2128,6 +2128,7 @@ def test_get_storage_pool(request_type, transport: str = "grpc"): psa_range="psa_range_value", encryption_type=common.EncryptionType.SERVICE_MANAGED, global_access_allowed=True, + allow_auto_tiering=True, replica_zone="replica_zone_value", zone="zone_value", ) @@ -2156,6 +2157,7 @@ def test_get_storage_pool(request_type, transport: str = "grpc"): assert response.psa_range == "psa_range_value" assert response.encryption_type == common.EncryptionType.SERVICE_MANAGED assert response.global_access_allowed is True + assert response.allow_auto_tiering is True assert response.replica_zone == "replica_zone_value" assert response.zone == "zone_value" @@ -2273,6 +2275,7 @@ async def test_get_storage_pool_empty_call_async(): psa_range="psa_range_value", encryption_type=common.EncryptionType.SERVICE_MANAGED, global_access_allowed=True, + allow_auto_tiering=True, replica_zone="replica_zone_value", zone="zone_value", ) @@ -2358,6 +2361,7 @@ async def test_get_storage_pool_async( psa_range="psa_range_value", encryption_type=common.EncryptionType.SERVICE_MANAGED, global_access_allowed=True, + allow_auto_tiering=True, replica_zone="replica_zone_value", zone="zone_value", ) @@ -2387,6 +2391,7 @@ async def test_get_storage_pool_async( assert response.psa_range == "psa_range_value" assert response.encryption_type == common.EncryptionType.SERVICE_MANAGED assert response.global_access_allowed is True + assert response.allow_auto_tiering is True assert response.replica_zone == "replica_zone_value" assert response.zone == "zone_value" @@ -3324,6 +3329,311 @@ async def test_delete_storage_pool_flattened_error_async(): ) +@pytest.mark.parametrize( + "request_type", + [ + storage_pool.SwitchActiveReplicaZoneRequest, + dict, + ], +) +def test_switch_active_replica_zone(request_type, transport: str = "grpc"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.switch_active_replica_zone), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.switch_active_replica_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = storage_pool.SwitchActiveReplicaZoneRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_switch_active_replica_zone_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.switch_active_replica_zone), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.switch_active_replica_zone() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == storage_pool.SwitchActiveReplicaZoneRequest() + + +def test_switch_active_replica_zone_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = storage_pool.SwitchActiveReplicaZoneRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.switch_active_replica_zone), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.switch_active_replica_zone(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == storage_pool.SwitchActiveReplicaZoneRequest( + name="name_value", + ) + + +def test_switch_active_replica_zone_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.switch_active_replica_zone + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.switch_active_replica_zone + ] = mock_rpc + request = {} + client.switch_active_replica_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.switch_active_replica_zone(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_switch_active_replica_zone_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.switch_active_replica_zone), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.switch_active_replica_zone() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == storage_pool.SwitchActiveReplicaZoneRequest() + + +@pytest.mark.asyncio +async def test_switch_active_replica_zone_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.switch_active_replica_zone + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.switch_active_replica_zone + ] = mock_rpc + + request = {} + await client.switch_active_replica_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.switch_active_replica_zone(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_switch_active_replica_zone_async( + transport: str = "grpc_asyncio", + request_type=storage_pool.SwitchActiveReplicaZoneRequest, +): + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.switch_active_replica_zone), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.switch_active_replica_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = storage_pool.SwitchActiveReplicaZoneRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_switch_active_replica_zone_async_from_dict(): + await test_switch_active_replica_zone_async(request_type=dict) + + +def test_switch_active_replica_zone_field_headers(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = storage_pool.SwitchActiveReplicaZoneRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.switch_active_replica_zone), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.switch_active_replica_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_switch_active_replica_zone_field_headers_async(): + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = storage_pool.SwitchActiveReplicaZoneRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.switch_active_replica_zone), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.switch_active_replica_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + @pytest.mark.parametrize( "request_type", [ @@ -3943,6 +4253,7 @@ def test_get_volume(request_type, transport: str = "grpc"): multiple_endpoints=True, replica_zone="replica_zone_value", zone="zone_value", + cold_tier_size_gib=1888, ) response = client.get_volume(request) @@ -3982,6 +4293,7 @@ def test_get_volume(request_type, transport: str = "grpc"): assert response.multiple_endpoints is True assert response.replica_zone == "replica_zone_value" assert response.zone == "zone_value" + assert response.cold_tier_size_gib == 1888 def test_get_volume_empty_call(): @@ -4108,6 +4420,7 @@ async def test_get_volume_empty_call_async(): multiple_endpoints=True, replica_zone="replica_zone_value", zone="zone_value", + cold_tier_size_gib=1888, ) ) response = await client.get_volume() @@ -4202,6 +4515,7 @@ async def test_get_volume_async( multiple_endpoints=True, replica_zone="replica_zone_value", zone="zone_value", + cold_tier_size_gib=1888, ) ) response = await client.get_volume(request) @@ -4242,6 +4556,7 @@ async def test_get_volume_async( assert response.multiple_endpoints is True assert response.replica_zone == "replica_zone_value" assert response.zone == "zone_value" + assert response.cold_tier_size_gib == 1888 @pytest.mark.asyncio @@ -22924,6 +23239,7 @@ def test_create_storage_pool_rest(request_type): "psa_range": "psa_range_value", "encryption_type": 1, "global_access_allowed": True, + "allow_auto_tiering": True, "replica_zone": "replica_zone_value", "zone": "zone_value", } @@ -23345,6 +23661,7 @@ def test_get_storage_pool_rest(request_type): psa_range="psa_range_value", encryption_type=common.EncryptionType.SERVICE_MANAGED, global_access_allowed=True, + allow_auto_tiering=True, replica_zone="replica_zone_value", zone="zone_value", ) @@ -23377,6 +23694,7 @@ def test_get_storage_pool_rest(request_type): assert response.psa_range == "psa_range_value" assert response.encryption_type == common.EncryptionType.SERVICE_MANAGED assert response.global_access_allowed is True + assert response.allow_auto_tiering is True assert response.replica_zone == "replica_zone_value" assert response.zone == "zone_value" @@ -23683,6 +24001,7 @@ def test_update_storage_pool_rest(request_type): "psa_range": "psa_range_value", "encryption_type": 1, "global_access_allowed": True, + "allow_auto_tiering": True, "replica_zone": "replica_zone_value", "zone": "zone_value", } @@ -24358,6 +24677,254 @@ def test_delete_storage_pool_rest_error(): ) +@pytest.mark.parametrize( + "request_type", + [ + storage_pool.SwitchActiveReplicaZoneRequest, + dict, + ], +) +def test_switch_active_replica_zone_rest(request_type): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/storagePools/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.switch_active_replica_zone(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_switch_active_replica_zone_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.switch_active_replica_zone + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.switch_active_replica_zone + ] = mock_rpc + + request = {} + client.switch_active_replica_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.switch_active_replica_zone(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_switch_active_replica_zone_rest_required_fields( + request_type=storage_pool.SwitchActiveReplicaZoneRequest, +): + transport_class = transports.NetAppRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).switch_active_replica_zone._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).switch_active_replica_zone._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.switch_active_replica_zone(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_switch_active_replica_zone_rest_unset_required_fields(): + transport = transports.NetAppRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.switch_active_replica_zone._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_switch_active_replica_zone_rest_interceptors(null_interceptor): + transport = transports.NetAppRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), + ) + client = NetAppClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.NetAppRestInterceptor, "post_switch_active_replica_zone" + ) as post, mock.patch.object( + transports.NetAppRestInterceptor, "pre_switch_active_replica_zone" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = storage_pool.SwitchActiveReplicaZoneRequest.pb( + storage_pool.SwitchActiveReplicaZoneRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = storage_pool.SwitchActiveReplicaZoneRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.switch_active_replica_zone( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_switch_active_replica_zone_rest_bad_request( + transport: str = "rest", request_type=storage_pool.SwitchActiveReplicaZoneRequest +): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/storagePools/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.switch_active_replica_zone(request) + + +def test_switch_active_replica_zone_rest_error(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -24779,6 +25346,7 @@ def test_get_volume_rest(request_type): multiple_endpoints=True, replica_zone="replica_zone_value", zone="zone_value", + cold_tier_size_gib=1888, ) # Wrap the value into a proper Response obj @@ -24822,6 +25390,7 @@ def test_get_volume_rest(request_type): assert response.multiple_endpoints is True assert response.replica_zone == "replica_zone_value" assert response.zone == "zone_value" + assert response.cold_tier_size_gib == 1888 def test_get_volume_rest_use_cached_wrapped_rpc(): @@ -25184,6 +25753,7 @@ def test_create_volume_rest(request_type): "tiering_policy": {"tier_action": 1, "cooling_threshold_days": 2343}, "replica_zone": "replica_zone_value", "zone": "zone_value", + "cold_tier_size_gib": 1888, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -25668,6 +26238,7 @@ def test_update_volume_rest(request_type): "tiering_policy": {"tier_action": 1, "cooling_threshold_days": 2343}, "replica_zone": "replica_zone_value", "zone": "zone_value", + "cold_tier_size_gib": 1888, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -40680,6 +41251,7 @@ def test_net_app_base_transport(): "get_storage_pool", "update_storage_pool", "delete_storage_pool", + "switch_active_replica_zone", "list_volumes", "get_volume", "create_volume", @@ -41024,6 +41596,9 @@ def test_net_app_client_transport_session_collision(transport_name): session1 = client1.transport.delete_storage_pool._session session2 = client2.transport.delete_storage_pool._session assert session1 != session2 + session1 = client1.transport.switch_active_replica_zone._session + session2 = client2.transport.switch_active_replica_zone._session + assert session1 != session2 session1 = client1.transport.list_volumes._session session2 = client2.transport.list_volumes._session assert session1 != session2 From 35b2c456c6791bc47ffe894f3ef966558cb6c98e Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 16 Sep 2024 10:56:49 -0700 Subject: [PATCH 070/108] docs: [google-cloud-asset] Comments are clarified for certain fields in messages `QueryAssetsResponse` and `ResourceSearchResult` (#13076) - [ ] Regenerate this pull request now. PiperOrigin-RevId: 673500457 Source-Link: https://github.com/googleapis/googleapis/commit/c27097ea636b7b2699f1a1c9c6bf3fb66ff8a789 Source-Link: https://github.com/googleapis/googleapis-gen/commit/dd6d5ed8b59764109b996ba6895dd10be1c8b865 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWFzc2V0Ly5Pd2xCb3QueWFtbCIsImgiOiJkZDZkNWVkOGI1OTc2NDEwOWI5OTZiYTY4OTVkZDEwYmUxYzhiODY1In0= --------- Co-authored-by: Owl Bot --- .../google/cloud/asset_v1/types/asset_service.py | 14 ++++++++------ .../google/cloud/asset_v1/types/assets.py | 6 +++--- 2 files changed, 11 insertions(+), 9 deletions(-) diff --git a/packages/google-cloud-asset/google/cloud/asset_v1/types/asset_service.py b/packages/google-cloud-asset/google/cloud/asset_v1/types/asset_service.py index 88d2a833272d..1d09f9e767c6 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1/types/asset_service.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1/types/asset_service.py @@ -2780,9 +2780,11 @@ class QueryAssetsResponse(proto.Message): valid ``response``. If ``done`` == ``false`` and the query result is being saved - in a output, the output_config field will be set. If + in an output, the output_config field will be set. If ``done`` == ``true``, exactly one of ``error``, - ``query_result`` or ``output_config`` will be set. + ``query_result`` or ``output_config`` will be set. [done] is + unset unless the [QueryAssetsResponse] contains a + [QueryAssetsResponse.job_reference]. error (google.rpc.status_pb2.Status): Error status. @@ -2792,10 +2794,10 @@ class QueryAssetsResponse(proto.Message): This field is a member of `oneof`_ ``response``. output_config (google.cloud.asset_v1.types.QueryAssetsOutputConfig): - Output configuration which indicates instead - of being returned in API response on the fly, - the query result will be saved in a specific - output. + Output configuration, which indicates that + instead of being returned in an API response on + the fly, the query result will be saved in a + specific output. This field is a member of `oneof`_ ``response``. """ diff --git a/packages/google-cloud-asset/google/cloud/asset_v1/types/assets.py b/packages/google-cloud-asset/google/cloud/asset_v1/types/assets.py index da13dc114c9b..d31228b6e04c 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1/types/assets.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1/types/assets.py @@ -602,7 +602,7 @@ class EffectiveTagDetails(proto.Message): class ResourceSearchResult(proto.Message): r"""A result of Resource Search, containing information of a - cloud resource. Next ID: 34 + cloud resource. Attributes: name (str): @@ -684,8 +684,8 @@ class ResourceSearchResult(proto.Message): - Use a field query. Example: ``location:us-west*`` - Use a free text query. Example: ``us-west*`` labels (MutableMapping[str, str]): - Labels associated with this resource. See `Labelling and - grouping Google Cloud + User labels associated with this resource. See `Labelling + and grouping Google Cloud resources `__ for more information. This field is available only when the resource's Protobuf contains it. From 8fc8b25192eead473cf590b372d352a71634c8f2 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 16 Sep 2024 14:31:32 -0400 Subject: [PATCH 071/108] build: release script update (#13089) Towards b/366438331 --- .kokoro/release-single.sh | 2 +- .kokoro/release/common.cfg | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.kokoro/release-single.sh b/.kokoro/release-single.sh index 5665c4828a93..f917f8ef66d0 100755 --- a/.kokoro/release-single.sh +++ b/.kokoro/release-single.sh @@ -21,7 +21,7 @@ set -eo pipefail pwd # Move into the package, build the distribution and upload. -TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google-cloud-pypi-token-keystore-1") +TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google-cloud-pypi-token-keystore-2") python3 setup.py sdist bdist_wheel twine upload --username __token__ --password "${TWINE_PASSWORD}" dist/* diff --git a/.kokoro/release/common.cfg b/.kokoro/release/common.cfg index c9b8a36f766d..830be65dde19 100644 --- a/.kokoro/release/common.cfg +++ b/.kokoro/release/common.cfg @@ -28,7 +28,7 @@ before_action { fetch_keystore { keystore_resource { keystore_config_id: 73713 - keyname: "google-cloud-pypi-token-keystore-1" + keyname: "google-cloud-pypi-token-keystore-2" } } } From 33834de6d9eeced6da30f3fcbeb4e1029e07cf18 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 16 Sep 2024 15:19:57 -0400 Subject: [PATCH 072/108] feat: [google-cloud-gke-multicloud] An optional field `security_posture_config` in message `.google.cloud.gkemulticloud.v1.AttachedCluster` is added (#13088) BEGIN_COMMIT_OVERRIDE feat: An optional field `security_posture_config` in message `.google.cloud.gkemulticloud.v1.AttachedCluster` is added feat: An optional field `kubelet_config` in message `.google.cloud.gkemulticloud.v1.AwsNodePool` is added END_COMMIT_OVERRIDE - [ ] Regenerate this pull request now. feat: An optional field `kubelet_config` in message `.google.cloud.gkemulticloud.v1.AwsNodePool` is added PiperOrigin-RevId: 674451558 Source-Link: https://github.com/googleapis/googleapis/commit/31df26d0ff3193117fbf9d6dd25280dc3f8ca978 Source-Link: https://github.com/googleapis/googleapis-gen/commit/23e655713582360b568172348be5d883caa40efb Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWdrZS1tdWx0aWNsb3VkLy5Pd2xCb3QueWFtbCIsImgiOiIyM2U2NTU3MTM1ODIzNjBiNTY4MTcyMzQ4YmU1ZDg4M2NhYTQwZWZiIn0= --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../google/cloud/gke_multicloud/__init__.py | 4 + .../cloud/gke_multicloud_v1/__init__.py | 4 + .../attached_clusters/async_client.py | 1 + .../services/attached_clusters/client.py | 1 + .../cloud/gke_multicloud_v1/types/__init__.py | 4 + .../types/attached_resources.py | 8 ++ .../types/attached_service.py | 3 +- .../gke_multicloud_v1/types/aws_resources.py | 9 +- .../types/azure_resources.py | 2 +- .../types/common_resources.py | 127 ++++++++++++++++++ .../test_attached_clusters.py | 2 + .../gke_multicloud_v1/test_aws_clusters.py | 14 ++ 12 files changed, 176 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud/__init__.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud/__init__.py index 718abad6aa06..bd6a6e31b887 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud/__init__.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud/__init__.py @@ -175,8 +175,10 @@ ManagedPrometheusConfig, MaxPodsConstraint, MonitoringConfig, + NodeKubeletConfig, NodeTaint, OperationMetadata, + SecurityPostureConfig, WorkloadIdentityConfig, ) @@ -313,7 +315,9 @@ "ManagedPrometheusConfig", "MaxPodsConstraint", "MonitoringConfig", + "NodeKubeletConfig", "NodeTaint", "OperationMetadata", + "SecurityPostureConfig", "WorkloadIdentityConfig", ) diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/__init__.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/__init__.py index e6ae2f8a5e9c..88648899fa1c 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/__init__.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/__init__.py @@ -163,8 +163,10 @@ ManagedPrometheusConfig, MaxPodsConstraint, MonitoringConfig, + NodeKubeletConfig, NodeTaint, OperationMetadata, + SecurityPostureConfig, WorkloadIdentityConfig, ) @@ -291,10 +293,12 @@ "ManagedPrometheusConfig", "MaxPodsConstraint", "MonitoringConfig", + "NodeKubeletConfig", "NodeTaint", "OperationMetadata", "ReplicaPlacement", "RollbackAwsNodePoolUpdateRequest", + "SecurityPostureConfig", "SpotConfig", "SurgeSettings", "UpdateAttachedClusterRequest", diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/attached_clusters/async_client.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/attached_clusters/async_client.py index 47e8caee062a..60c78caa006a 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/attached_clusters/async_client.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/attached_clusters/async_client.py @@ -533,6 +533,7 @@ async def sample_update_attached_cluster(): - ``platform_version``. - ``proxy_config.kubernetes_secret.name``. - ``proxy_config.kubernetes_secret.namespace``. + - ``security_posture_config.vulnerability_mode`` This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/attached_clusters/client.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/attached_clusters/client.py index a380b95dfcf7..b8c138ceaef6 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/attached_clusters/client.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/attached_clusters/client.py @@ -966,6 +966,7 @@ def sample_update_attached_cluster(): - ``platform_version``. - ``proxy_config.kubernetes_secret.name``. - ``proxy_config.kubernetes_secret.namespace``. + - ``security_posture_config.vulnerability_mode`` This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/__init__.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/__init__.py index 664570793e2b..1f7c9d3c38e8 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/__init__.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/__init__.py @@ -152,8 +152,10 @@ ManagedPrometheusConfig, MaxPodsConstraint, MonitoringConfig, + NodeKubeletConfig, NodeTaint, OperationMetadata, + SecurityPostureConfig, WorkloadIdentityConfig, ) @@ -284,7 +286,9 @@ "ManagedPrometheusConfig", "MaxPodsConstraint", "MonitoringConfig", + "NodeKubeletConfig", "NodeTaint", "OperationMetadata", + "SecurityPostureConfig", "WorkloadIdentityConfig", ) diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/attached_resources.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/attached_resources.py index 253db740d172..de4ea8f3bddc 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/attached_resources.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/attached_resources.py @@ -134,6 +134,9 @@ class AttachedCluster(proto.Message): binary_authorization (google.cloud.gke_multicloud_v1.types.BinaryAuthorization): Optional. Binary Authorization configuration for this cluster. + security_posture_config (google.cloud.gke_multicloud_v1.types.SecurityPostureConfig): + Optional. Security Posture configuration for + this cluster. """ class State(proto.Enum): @@ -272,6 +275,11 @@ class State(proto.Enum): number=25, message=common_resources.BinaryAuthorization, ) + security_posture_config: common_resources.SecurityPostureConfig = proto.Field( + proto.MESSAGE, + number=26, + message=common_resources.SecurityPostureConfig, + ) class AttachedClustersAuthorization(proto.Message): diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/attached_service.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/attached_service.py index e5a54e3562c5..e0791affdff6 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/attached_service.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/attached_service.py @@ -211,7 +211,7 @@ class ImportAttachedClusterRequest(proto.Message): Required. The Kubernetes distribution of the underlying attached cluster. - Supported values: ["eks", "aks"]. + Supported values: ["eks", "aks", "generic"]. proxy_config (google.cloud.gke_multicloud_v1.types.AttachedProxyConfig): Optional. Proxy configuration for outbound HTTP(S) traffic. @@ -272,6 +272,7 @@ class UpdateAttachedClusterRequest(proto.Message): - ``platform_version``. - ``proxy_config.kubernetes_secret.name``. - ``proxy_config.kubernetes_secret.namespace``. + - ``security_posture_config.vulnerability_mode`` """ attached_cluster: attached_resources.AttachedCluster = proto.Field( diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/aws_resources.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/aws_resources.py index 73fbfa3225ab..2943000087e5 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/aws_resources.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/aws_resources.py @@ -735,6 +735,8 @@ class AwsNodePool(proto.Message): management (google.cloud.gke_multicloud_v1.types.AwsNodeManagement): Optional. The Management configuration for this node pool. + kubelet_config (google.cloud.gke_multicloud_v1.types.NodeKubeletConfig): + Optional. Node kubelet configs. update_settings (google.cloud.gke_multicloud_v1.types.UpdateSettings): Optional. Update settings control the speed and disruption of the update. @@ -843,6 +845,11 @@ class State(proto.Enum): number=30, message="AwsNodeManagement", ) + kubelet_config: common_resources.NodeKubeletConfig = proto.Field( + proto.MESSAGE, + number=31, + message=common_resources.NodeKubeletConfig, + ) update_settings: "UpdateSettings" = proto.Field( proto.MESSAGE, number=32, @@ -1279,7 +1286,7 @@ class AwsProxyConfig(proto.Message): The secret must be a JSON encoded proxy configuration as described in - https://cloud.google.com/anthos/clusters/docs/multi-cloud/aws/how-to/use-a-proxy#create_a_proxy_configuration_file + https://cloud.google.com/kubernetes-engine/multi-cloud/docs/aws/how-to/use-a-proxy#create_a_proxy_configuration_file secret_version (str): The version string of the AWS Secret Manager secret that contains the HTTP(S) proxy diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/azure_resources.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/azure_resources.py index a6d17925f782..597c6e78f78a 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/azure_resources.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/azure_resources.py @@ -557,7 +557,7 @@ class AzureProxyConfig(proto.Message): The secret must be a JSON encoded proxy configuration as described in - https://cloud.google.com/anthos/clusters/docs/multi-cloud/azure/how-to/use-a-proxy#create_a_proxy_configuration_file + https://cloud.google.com/kubernetes-engine/multi-cloud/docs/azure/how-to/use-a-proxy#create_a_proxy_configuration_file Secret ids are formatted as ``https://.vault.azure.net/secrets//``. diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/common_resources.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/common_resources.py index 9915db5dcc2b..58f053bff12c 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/common_resources.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/common_resources.py @@ -28,12 +28,14 @@ "MaxPodsConstraint", "OperationMetadata", "NodeTaint", + "NodeKubeletConfig", "Fleet", "LoggingConfig", "LoggingComponentConfig", "MonitoringConfig", "ManagedPrometheusConfig", "BinaryAuthorization", + "SecurityPostureConfig", }, ) @@ -263,6 +265,96 @@ class Effect(proto.Enum): ) +class NodeKubeletConfig(proto.Message): + r"""Configuration for node pool kubelet options. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + insecure_kubelet_readonly_port_enabled (bool): + Optional. Enable the insecure kubelet read + only port. + cpu_manager_policy (str): + Optional. Control the CPU management policy on the node. See + https://kubernetes.io/docs/tasks/administer-cluster/cpu-management-policies/ + + The following values are allowed. + + - "none": the default, which represents the existing + scheduling behavior. + - "static": allows pods with certain resource + characteristics to be granted increased CPU affinity and + exclusivity on the node. The default value is 'none' if + unspecified. + + This field is a member of `oneof`_ ``_cpu_manager_policy``. + cpu_cfs_quota (bool): + Optional. Enable CPU CFS quota enforcement + for containers that specify CPU limits. + + This option is enabled by default which makes + kubelet use CFS quota + (https://www.kernel.org/doc/Documentation/scheduler/sched-bwc.txt) + to enforce container CPU limits. Otherwise, CPU + limits will not be enforced at all. + + Disable this option to mitigate CPU throttling + problems while still having your pods to be in + Guaranteed QoS class by specifying the CPU + limits. + + The default value is 'true' if unspecified. + + This field is a member of `oneof`_ ``_cpu_cfs_quota``. + cpu_cfs_quota_period (str): + Optional. Set the CPU CFS quota period value + 'cpu.cfs_period_us'. + + The string must be a sequence of decimal numbers, each with + optional fraction and a unit suffix, such as "300ms". Valid + time units are "ns", "us" (or "µs"), "ms", "s", "m", "h". + The value must be a positive duration. + + The default value is '100ms' if unspecified. + + This field is a member of `oneof`_ ``_cpu_cfs_quota_period``. + pod_pids_limit (int): + Optional. Set the Pod PID limits. See + https://kubernetes.io/docs/concepts/policy/pid-limiting/#pod-pid-limits + + Controls the maximum number of processes allowed + to run in a pod. The value must be greater than + or equal to 1024 and less than 4194304. + + This field is a member of `oneof`_ ``_pod_pids_limit``. + """ + + insecure_kubelet_readonly_port_enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + cpu_manager_policy: str = proto.Field( + proto.STRING, + number=2, + optional=True, + ) + cpu_cfs_quota: bool = proto.Field( + proto.BOOL, + number=3, + optional=True, + ) + cpu_cfs_quota_period: str = proto.Field( + proto.STRING, + number=4, + optional=True, + ) + pod_pids_limit: int = proto.Field( + proto.INT64, + number=5, + optional=True, + ) + + class Fleet(proto.Message): r"""Fleet related configuration. @@ -414,4 +506,39 @@ class EvaluationMode(proto.Enum): ) +class SecurityPostureConfig(proto.Message): + r"""SecurityPostureConfig defines the flags needed to + enable/disable features for the Security Posture API. + + Attributes: + vulnerability_mode (google.cloud.gke_multicloud_v1.types.SecurityPostureConfig.VulnerabilityMode): + Sets which mode to use for vulnerability + scanning. + """ + + class VulnerabilityMode(proto.Enum): + r"""VulnerabilityMode defines enablement mode for vulnerability + scanning. + + Values: + VULNERABILITY_MODE_UNSPECIFIED (0): + Default value not specified. + VULNERABILITY_DISABLED (1): + Disables vulnerability scanning on the + cluster. + VULNERABILITY_ENTERPRISE (2): + Applies the Security Posture's vulnerability + on cluster Enterprise level features. + """ + VULNERABILITY_MODE_UNSPECIFIED = 0 + VULNERABILITY_DISABLED = 1 + VULNERABILITY_ENTERPRISE = 2 + + vulnerability_mode: VulnerabilityMode = proto.Field( + proto.ENUM, + number=1, + enum=VulnerabilityMode, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-gke-multicloud/tests/unit/gapic/gke_multicloud_v1/test_attached_clusters.py b/packages/google-cloud-gke-multicloud/tests/unit/gapic/gke_multicloud_v1/test_attached_clusters.py index a438c01e8714..1aeeadddb0c6 100644 --- a/packages/google-cloud-gke-multicloud/tests/unit/gapic/gke_multicloud_v1/test_attached_clusters.py +++ b/packages/google-cloud-gke-multicloud/tests/unit/gapic/gke_multicloud_v1/test_attached_clusters.py @@ -4992,6 +4992,7 @@ def test_create_attached_cluster_rest(request_type): "kubernetes_secret": {"name": "name_value", "namespace": "namespace_value"} }, "binary_authorization": {"evaluation_mode": 1}, + "security_posture_config": {"vulnerability_mode": 1}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -5440,6 +5441,7 @@ def test_update_attached_cluster_rest(request_type): "kubernetes_secret": {"name": "name_value", "namespace": "namespace_value"} }, "binary_authorization": {"evaluation_mode": 1}, + "security_posture_config": {"vulnerability_mode": 1}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency diff --git a/packages/google-cloud-gke-multicloud/tests/unit/gapic/gke_multicloud_v1/test_aws_clusters.py b/packages/google-cloud-gke-multicloud/tests/unit/gapic/gke_multicloud_v1/test_aws_clusters.py index 87fa3f604e12..f9a695942606 100644 --- a/packages/google-cloud-gke-multicloud/tests/unit/gapic/gke_multicloud_v1/test_aws_clusters.py +++ b/packages/google-cloud-gke-multicloud/tests/unit/gapic/gke_multicloud_v1/test_aws_clusters.py @@ -10124,6 +10124,13 @@ def test_create_aws_node_pool_rest(request_type): "max_pods_constraint": {"max_pods_per_node": 1798}, "errors": [{"message": "message_value"}], "management": {"auto_repair": True}, + "kubelet_config": { + "insecure_kubelet_readonly_port_enabled": True, + "cpu_manager_policy": "cpu_manager_policy_value", + "cpu_cfs_quota": True, + "cpu_cfs_quota_period": "cpu_cfs_quota_period_value", + "pod_pids_limit": 1488, + }, "update_settings": { "surge_settings": {"max_surge": 971, "max_unavailable": 1577} }, @@ -10590,6 +10597,13 @@ def test_update_aws_node_pool_rest(request_type): "max_pods_constraint": {"max_pods_per_node": 1798}, "errors": [{"message": "message_value"}], "management": {"auto_repair": True}, + "kubelet_config": { + "insecure_kubelet_readonly_port_enabled": True, + "cpu_manager_policy": "cpu_manager_policy_value", + "cpu_cfs_quota": True, + "cpu_cfs_quota_period": "cpu_cfs_quota_period_value", + "pod_pids_limit": 1488, + }, "update_settings": { "surge_settings": {"max_surge": 971, "max_unavailable": 1577} }, From a032b1147b93281760525a161b9f2437e8d7aeba Mon Sep 17 00:00:00 2001 From: yoshi-code-bot <70984784+yoshi-code-bot@users.noreply.github.com> Date: Mon, 16 Sep 2024 12:25:21 -0700 Subject: [PATCH 073/108] chore: Update the root changelog (#13073) Update the root changelog Co-authored-by: ohmayr --- CHANGELOG.md | 29 +++++++++++++++-------------- 1 file changed, 15 insertions(+), 14 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9f4e47fb21b5..1004d55731cc 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,11 +3,12 @@ Please refer to each API's `CHANGELOG.md` file under the `packages/` directory Changelogs ----- - [google-ads-admanager==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-ads-admanager/CHANGELOG.md) +- [google-ads-marketingplatform-admin==0.1.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-ads-marketingplatform-admin/CHANGELOG.md) - [google-ai-generativelanguage==0.6.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-ai-generativelanguage/CHANGELOG.md) - [google-analytics-admin==0.23.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-analytics-admin/CHANGELOG.md) - [google-analytics-data==0.18.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-analytics-data/CHANGELOG.md) - [google-apps-card==0.1.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-card/CHANGELOG.md) -- [google-apps-chat==0.1.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-chat/CHANGELOG.md) +- [google-apps-chat==0.1.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-chat/CHANGELOG.md) - [google-apps-events-subscriptions==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-events-subscriptions/CHANGELOG.md) - [google-apps-meet==0.1.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-meet/CHANGELOG.md) - [google-apps-script-type==0.3.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-script-type/CHANGELOG.md) @@ -15,12 +16,12 @@ Changelogs - [google-cloud-access-approval==1.13.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-access-approval/CHANGELOG.md) - [google-cloud-advisorynotifications==0.3.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-advisorynotifications/CHANGELOG.md) - [google-cloud-alloydb-connectors==0.1.6](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-alloydb-connectors/CHANGELOG.md) -- [google-cloud-alloydb==0.3.12](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-alloydb/CHANGELOG.md) +- [google-cloud-alloydb==0.3.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-alloydb/CHANGELOG.md) - [google-cloud-api-gateway==1.9.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-api-gateway/CHANGELOG.md) - [google-cloud-api-keys==0.5.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-api-keys/CHANGELOG.md) - [google-cloud-apigee-connect==1.9.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-apigee-connect/CHANGELOG.md) - [google-cloud-apigee-registry==0.6.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-apigee-registry/CHANGELOG.md) -- [google-cloud-apihub==0.1.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-apihub/CHANGELOG.md) +- [google-cloud-apihub==0.2.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-apihub/CHANGELOG.md) - [google-cloud-appengine-admin==1.11.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-appengine-admin/CHANGELOG.md) - [google-cloud-appengine-logging==1.4.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-appengine-logging/CHANGELOG.md) - [google-cloud-apphub==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-apphub/CHANGELOG.md) @@ -30,7 +31,7 @@ Changelogs - [google-cloud-automl==2.13.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-automl/CHANGELOG.md) - [google-cloud-backupdr==0.1.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-backupdr/CHANGELOG.md) - [google-cloud-bare-metal-solution==1.7.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bare-metal-solution/CHANGELOG.md) -- [google-cloud-batch==0.17.26](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-batch/CHANGELOG.md) +- [google-cloud-batch==0.17.27](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-batch/CHANGELOG.md) - [google-cloud-beyondcorp-appconnections==0.4.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-appconnections/CHANGELOG.md) - [google-cloud-beyondcorp-appconnectors==0.4.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-appconnectors/CHANGELOG.md) - [google-cloud-beyondcorp-appgateways==0.4.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-appgateways/CHANGELOG.md) @@ -41,7 +42,7 @@ Changelogs - [google-cloud-bigquery-connection==1.15.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-connection/CHANGELOG.md) - [google-cloud-bigquery-data-exchange==0.5.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-data-exchange/CHANGELOG.md) - [google-cloud-bigquery-datapolicies==0.6.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-datapolicies/CHANGELOG.md) -- [google-cloud-bigquery-datatransfer==3.15.6](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-datatransfer/CHANGELOG.md) +- [google-cloud-bigquery-datatransfer==3.15.7](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-datatransfer/CHANGELOG.md) - [google-cloud-bigquery-logging==1.4.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-logging/CHANGELOG.md) - [google-cloud-bigquery-migration==0.11.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-migration/CHANGELOG.md) - [google-cloud-bigquery-reservation==1.13.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-reservation/CHANGELOG.md) @@ -59,7 +60,7 @@ Changelogs - [google-cloud-confidentialcomputing==0.4.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-confidentialcomputing/CHANGELOG.md) - [google-cloud-config==0.1.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-config/CHANGELOG.md) - [google-cloud-contact-center-insights==1.17.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-contact-center-insights/CHANGELOG.md) -- [google-cloud-container==2.50.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-container/CHANGELOG.md) +- [google-cloud-container==2.51.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-container/CHANGELOG.md) - [google-cloud-containeranalysis==2.14.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-containeranalysis/CHANGELOG.md) - [google-cloud-contentwarehouse==0.7.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-contentwarehouse/CHANGELOG.md) - [google-cloud-data-fusion==1.10.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-data-fusion/CHANGELOG.md) @@ -71,19 +72,19 @@ Changelogs - [google-cloud-datalabeling==1.10.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-datalabeling/CHANGELOG.md) - [google-cloud-dataplex==2.2.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataplex/CHANGELOG.md) - [google-cloud-dataproc-metastore==1.15.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataproc-metastore/CHANGELOG.md) -- [google-cloud-dataproc==5.10.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataproc/CHANGELOG.md) +- [google-cloud-dataproc==5.11.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataproc/CHANGELOG.md) - [google-cloud-datastream==1.9.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-datastream/CHANGELOG.md) - [google-cloud-deploy==2.0.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-deploy/CHANGELOG.md) - [google-cloud-developerconnect==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-developerconnect/CHANGELOG.md) - [google-cloud-dialogflow-cx==1.35.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dialogflow-cx/CHANGELOG.md) - [google-cloud-dialogflow==2.31.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dialogflow/CHANGELOG.md) -- [google-cloud-discoveryengine==0.12.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-discoveryengine/CHANGELOG.md) +- [google-cloud-discoveryengine==0.12.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-discoveryengine/CHANGELOG.md) - [google-cloud-dlp==3.22.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dlp/CHANGELOG.md) - [google-cloud-dms==1.9.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dms/CHANGELOG.md) -- [google-cloud-documentai==2.31.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-documentai/CHANGELOG.md) +- [google-cloud-documentai==2.32.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-documentai/CHANGELOG.md) - [google-cloud-domains==1.7.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-domains/CHANGELOG.md) - [google-cloud-edgecontainer==0.5.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-edgecontainer/CHANGELOG.md) -- [google-cloud-edgenetwork==0.1.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-edgenetwork/CHANGELOG.md) +- [google-cloud-edgenetwork==0.1.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-edgenetwork/CHANGELOG.md) - [google-cloud-enterpriseknowledgegraph==0.3.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-enterpriseknowledgegraph/CHANGELOG.md) - [google-cloud-essential-contacts==1.7.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-essential-contacts/CHANGELOG.md) - [google-cloud-eventarc-publishing==0.6.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-eventarc-publishing/CHANGELOG.md) @@ -132,7 +133,7 @@ Changelogs - [google-cloud-privilegedaccessmanager==0.1.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-privilegedaccessmanager/CHANGELOG.md) - [google-cloud-public-ca==0.3.12](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-public-ca/CHANGELOG.md) - [google-cloud-rapidmigrationassessment==0.1.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-rapidmigrationassessment/CHANGELOG.md) -- [google-cloud-recaptcha-enterprise==1.22.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-recaptcha-enterprise/CHANGELOG.md) +- [google-cloud-recaptcha-enterprise==1.22.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-recaptcha-enterprise/CHANGELOG.md) - [google-cloud-recommendations-ai==0.10.12](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-recommendations-ai/CHANGELOG.md) - [google-cloud-recommender==2.15.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-recommender/CHANGELOG.md) - [google-cloud-redis-cluster==0.1.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-redis-cluster/CHANGELOG.md) @@ -148,7 +149,7 @@ Changelogs - [google-cloud-securitycentermanagement==0.1.14](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-securitycentermanagement/CHANGELOG.md) - [google-cloud-service-control==1.12.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-service-control/CHANGELOG.md) - [google-cloud-service-directory==1.11.6](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-service-directory/CHANGELOG.md) -- [google-cloud-service-management==1.8.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-service-management/CHANGELOG.md) +- [google-cloud-service-management==1.9.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-service-management/CHANGELOG.md) - [google-cloud-service-usage==1.10.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-service-usage/CHANGELOG.md) - [google-cloud-servicehealth==0.1.6](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-servicehealth/CHANGELOG.md) - [google-cloud-shell==1.9.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-shell/CHANGELOG.md) @@ -170,7 +171,7 @@ Changelogs - [google-cloud-video-transcoder==1.12.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-video-transcoder/CHANGELOG.md) - [google-cloud-videointelligence==2.13.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-videointelligence/CHANGELOG.md) - [google-cloud-vision==3.7.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-vision/CHANGELOG.md) -- [google-cloud-visionai==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-visionai/CHANGELOG.md) +- [google-cloud-visionai==0.1.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-visionai/CHANGELOG.md) - [google-cloud-vm-migration==1.8.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-vm-migration/CHANGELOG.md) - [google-cloud-vmwareengine==1.5.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-vmwareengine/CHANGELOG.md) - [google-cloud-vpc-access==1.10.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-vpc-access/CHANGELOG.md) @@ -180,7 +181,7 @@ Changelogs - [google-cloud-workstations==0.5.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-workstations/CHANGELOG.md) - [google-geo-type==0.3.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-geo-type/CHANGELOG.md) - [google-maps-addressvalidation==0.3.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-addressvalidation/CHANGELOG.md) -- [google-maps-fleetengine-delivery==0.2.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-fleetengine-delivery/CHANGELOG.md) +- [google-maps-fleetengine-delivery==0.2.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-fleetengine-delivery/CHANGELOG.md) - [google-maps-fleetengine==0.2.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-fleetengine/CHANGELOG.md) - [google-maps-mapsplatformdatasets==0.4.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-mapsplatformdatasets/CHANGELOG.md) - [google-maps-places==0.1.17](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-places/CHANGELOG.md) From 786ac0ec331c405dd84cb1ac76b59a3463b7ba3a Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 16 Sep 2024 15:37:52 -0400 Subject: [PATCH 074/108] chore: release main (#13087) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit :robot: I have created a release *beep* *boop* ---
google-apps-chat: 0.1.11 ## [0.1.11](https://github.com/googleapis/google-cloud-python/compare/google-apps-chat-v0.1.10...google-apps-chat-v0.1.11) (2024-09-16) ### Features * If you're a domain administrator or a delegated administrator, you can now include the `useAdminAccess` parameter when you call the Chat API with your administrator privileges with the following methods to manage Chat spaces and memberships in your Workspace organization: ([a20b1e5](https://github.com/googleapis/google-cloud-python/commit/a20b1e508068845c36b1701836ba17a699cb10ac)) ### Documentation * A comment for field `filter` in message `.google.chat.v1.ListMembershipsRequest` is updated to support `!=` operator ([a20b1e5](https://github.com/googleapis/google-cloud-python/commit/a20b1e508068845c36b1701836ba17a699cb10ac))
google-cloud-asset: 3.26.4 ## [3.26.4](https://github.com/googleapis/google-cloud-python/compare/google-cloud-asset-v3.26.3...google-cloud-asset-v3.26.4) (2024-09-16) ### Documentation * [google-cloud-asset] Comments are clarified for certain fields in messages `QueryAssetsResponse` and `ResourceSearchResult` ([#13076](https://github.com/googleapis/google-cloud-python/issues/13076)) ([35b2c45](https://github.com/googleapis/google-cloud-python/commit/35b2c456c6791bc47ffe894f3ef966558cb6c98e))
google-cloud-batch: 0.17.28 ## [0.17.28](https://github.com/googleapis/google-cloud-python/compare/google-cloud-batch-v0.17.27...google-cloud-batch-v0.17.28) (2024-09-16) ### Features * [google-cloud-batch] A new value `CANCELLATION_IN_PROGRESS` is added to enum `State` ([#13074](https://github.com/googleapis/google-cloud-python/issues/13074)) ([76267b2](https://github.com/googleapis/google-cloud-python/commit/76267b2b8998fd2a3602ebf4d12d2aaa30a90cde))
google-cloud-cloudcontrolspartner: 0.2.0 ## [0.2.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-cloudcontrolspartner-v0.1.3...google-cloud-cloudcontrolspartner-v0.2.0) (2024-09-16) ### ⚠ BREAKING CHANGES * [google-cloud-cloudcontrolspartner] Field behavior for field display_name in message .google.cloud.cloudcontrolspartner.v1beta.Customer is changed ### Features * A new value `ACCESS_TRANSPARENCY_LOGS_SUPPORT_CASE_VIEWER` is added to enum `.google.cloud.cloudcontrolspartner.v1beta.PartnerPermissions.Permission` ([c03c441](https://github.com/googleapis/google-cloud-python/commit/c03c4411287ee195fd5c99aff94d812381a908f3)) * Field behavior for field `customer_onboarding_state` in message `.google.cloud.cloudcontrolspartner.v1beta.Customer` is changed ([c03c441](https://github.com/googleapis/google-cloud-python/commit/c03c4411287ee195fd5c99aff94d812381a908f3)) * Field behavior for field `is_onboarded` in message `.google.cloud.cloudcontrolspartner.v1beta.Customer` is changed ([c03c441](https://github.com/googleapis/google-cloud-python/commit/c03c4411287ee195fd5c99aff94d812381a908f3)) ### Bug Fixes * [google-cloud-cloudcontrolspartner] Field behavior for field display_name in message .google.cloud.cloudcontrolspartner.v1beta.Customer is changed ([c03c441](https://github.com/googleapis/google-cloud-python/commit/c03c4411287ee195fd5c99aff94d812381a908f3)) ### Documentation * A comment for field `display_name` in message `.google.cloud.cloudcontrolspartner.v1beta.Customer` is changed ([c03c441](https://github.com/googleapis/google-cloud-python/commit/c03c4411287ee195fd5c99aff94d812381a908f3))
google-cloud-dataproc: 5.12.0 ## [5.12.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dataproc-v5.11.0...google-cloud-dataproc-v5.12.0) (2024-09-16) ### Features * [google-cloud-dataproc] Add FLINK metric source for Dataproc Metric Source ([2402404](https://github.com/googleapis/google-cloud-python/commit/2402404a5ac48c8289a2dbc24fcc85a1eebe4224)) * [google-cloud-dataproc] Add kms key input for create cluster API ([2402404](https://github.com/googleapis/google-cloud-python/commit/2402404a5ac48c8289a2dbc24fcc85a1eebe4224)) * [google-cloud-dataproc] add resource reference for KMS keys and fix comments ([2402404](https://github.com/googleapis/google-cloud-python/commit/2402404a5ac48c8289a2dbc24fcc85a1eebe4224)) * [google-cloud-dataproc] Add unreachable output field for LIST batch templates API ([2402404](https://github.com/googleapis/google-cloud-python/commit/2402404a5ac48c8289a2dbc24fcc85a1eebe4224)) * [google-cloud-dataproc] Add unreachable output field for LIST jobs API ([2402404](https://github.com/googleapis/google-cloud-python/commit/2402404a5ac48c8289a2dbc24fcc85a1eebe4224)) * [google-cloud-dataproc] Add unreachable output field for LIST workflow template API ([2402404](https://github.com/googleapis/google-cloud-python/commit/2402404a5ac48c8289a2dbc24fcc85a1eebe4224)) * [google-cloud-dataproc] Allow flink and trino job support for workflow templates API ([2402404](https://github.com/googleapis/google-cloud-python/commit/2402404a5ac48c8289a2dbc24fcc85a1eebe4224)) * [google-cloud-dataproc] Allow flink job support for jobs ([2402404](https://github.com/googleapis/google-cloud-python/commit/2402404a5ac48c8289a2dbc24fcc85a1eebe4224))
google-cloud-gke-multicloud: 0.6.13 ## [0.6.13](https://github.com/googleapis/google-cloud-python/compare/google-cloud-gke-multicloud-v0.6.12...google-cloud-gke-multicloud-v0.6.13) (2024-09-16) ### Features * An optional field `kubelet_config` in message `.google.cloud.gkemulticloud.v1.AwsNodePool` is added ([33834de](https://github.com/googleapis/google-cloud-python/commit/33834de6d9eeced6da30f3fcbeb4e1029e07cf18)) * An optional field `security_posture_config` in message `.google.cloud.gkemulticloud.v1.AttachedCluster` is added ([33834de](https://github.com/googleapis/google-cloud-python/commit/33834de6d9eeced6da30f3fcbeb4e1029e07cf18))
google-cloud-netapp: 0.3.14 ## [0.3.14](https://github.com/googleapis/google-cloud-python/compare/google-cloud-netapp-v0.3.13...google-cloud-netapp-v0.3.14) (2024-09-16) ### Features * A new field 'allow_auto_tiering' in message 'google.cloud.netapp.v1.StoragePool' is added ([5e3f4ae](https://github.com/googleapis/google-cloud-python/commit/5e3f4aebeb2f79efb1992ae623eb1aea86de2b0c)) * A new field 'cold_tier_size_gib' in message 'google.cloud.netapp.v1.Volume' is added ([5e3f4ae](https://github.com/googleapis/google-cloud-python/commit/5e3f4aebeb2f79efb1992ae623eb1aea86de2b0c)) * A new message 'google.cloud.netapp.v1.SwitchActiveReplicaZoneRequest' is added ([5e3f4ae](https://github.com/googleapis/google-cloud-python/commit/5e3f4aebeb2f79efb1992ae623eb1aea86de2b0c)) * **api:** [google-cloud-netapp] A new rpc 'SwitchActiveReplicaZone' is added to service 'google.cloud.netapp.v1.NetApp' ([5e3f4ae](https://github.com/googleapis/google-cloud-python/commit/5e3f4aebeb2f79efb1992ae623eb1aea86de2b0c))
google-cloud-orchestration-airflow: 1.14.0 ## [1.14.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-orchestration-airflow-v1.13.1...google-cloud-orchestration-airflow-v1.14.0) (2024-09-16) ### Features * [google-cloud-orchestration-airflow] A new method `CheckUpgrade` is added to service `Environments` ([b624f04](https://github.com/googleapis/google-cloud-python/commit/b624f04da8a9b6461d4714f0f0bcf13f1f35fa31)) * [google-cloud-orchestration-airflow] add `satisfies_pzi` to `Environment` ([b624f04](https://github.com/googleapis/google-cloud-python/commit/b624f04da8a9b6461d4714f0f0bcf13f1f35fa31)) * A new field `airflow_metadata_retention_config` is added to message `.google.cloud.orchestration.airflow.service.v1.DataRetentionConfig` ([b624f04](https://github.com/googleapis/google-cloud-python/commit/b624f04da8a9b6461d4714f0f0bcf13f1f35fa31)) * A new field `satisfies_pzi` is added to message `.google.cloud.orchestration.airflow.service.v1.Environment` ([b624f04](https://github.com/googleapis/google-cloud-python/commit/b624f04da8a9b6461d4714f0f0bcf13f1f35fa31)) * A new message `AirflowMetadataRetentionPolicyConfig` is added ([b624f04](https://github.com/googleapis/google-cloud-python/commit/b624f04da8a9b6461d4714f0f0bcf13f1f35fa31)) * A new message `CheckUpgradeRequest` is added ([b624f04](https://github.com/googleapis/google-cloud-python/commit/b624f04da8a9b6461d4714f0f0bcf13f1f35fa31)) ### Documentation * A comment for field `maintenance_window` in message `.google.cloud.orchestration.airflow.service.v1.EnvironmentConfig` is changed ([b624f04](https://github.com/googleapis/google-cloud-python/commit/b624f04da8a9b6461d4714f0f0bcf13f1f35fa31)) * A comment for field `storage_mode` in message `.google.cloud.orchestration.airflow.service.v1.TaskLogsRetentionConfig` is changed ([b624f04](https://github.com/googleapis/google-cloud-python/commit/b624f04da8a9b6461d4714f0f0bcf13f1f35fa31)) * A comment for message `WorkloadsConfig` is changed ([b624f04](https://github.com/googleapis/google-cloud-python/commit/b624f04da8a9b6461d4714f0f0bcf13f1f35fa31))
google-maps-routeoptimization: 0.1.3 ## [0.1.3](https://github.com/googleapis/google-cloud-python/compare/google-maps-routeoptimization-v0.1.2...google-maps-routeoptimization-v0.1.3) (2024-09-16) ### Features * [google-maps-routeoptimization] minor fields and documentation update ([366f6f1](https://github.com/googleapis/google-cloud-python/commit/366f6f10e29a9d9cc307cbd1f16deb4decf26050)) * A new field `cost_per_kilometer_below_soft_max` is added to message `.google.maps.routeoptimization.v1.DistanceLimit` ([366f6f1](https://github.com/googleapis/google-cloud-python/commit/366f6f10e29a9d9cc307cbd1f16deb4decf26050)) * A new field `route_modifiers` is added to message `.google.maps.routeoptimization.v1.Vehicle` ([366f6f1](https://github.com/googleapis/google-cloud-python/commit/366f6f10e29a9d9cc307cbd1f16deb4decf26050)) * A new message `RouteModifiers` is added ([366f6f1](https://github.com/googleapis/google-cloud-python/commit/366f6f10e29a9d9cc307cbd1f16deb4decf26050)) ### Documentation * A comment for enum value `CODE_UNSPECIFIED` in enum `Code` is changed ([366f6f1](https://github.com/googleapis/google-cloud-python/commit/366f6f10e29a9d9cc307cbd1f16deb4decf26050)) * A comment for enum value `DEFAULT_SOLVE` in enum `SolvingMode` is changed ([366f6f1](https://github.com/googleapis/google-cloud-python/commit/366f6f10e29a9d9cc307cbd1f16deb4decf26050)) * A comment for enum value `RELAX_VISIT_TIMES_AND_SEQUENCE_AFTER_THRESHOLD` in enum `Level` is changed ([366f6f1](https://github.com/googleapis/google-cloud-python/commit/366f6f10e29a9d9cc307cbd1f16deb4decf26050)) * A comment for field `code` in message `.google.maps.routeoptimization.v1.OptimizeToursValidationError` is changed ([366f6f1](https://github.com/googleapis/google-cloud-python/commit/366f6f10e29a9d9cc307cbd1f16deb4decf26050)) * A comment for field `reasons` in message `.google.maps.routeoptimization.v1.SkippedShipment` is changed ([366f6f1](https://github.com/googleapis/google-cloud-python/commit/366f6f10e29a9d9cc307cbd1f16deb4decf26050)) * A comment for field `validation_errors` in message `.google.maps.routeoptimization.v1.OptimizeToursResponse` is changed ([366f6f1](https://github.com/googleapis/google-cloud-python/commit/366f6f10e29a9d9cc307cbd1f16deb4decf26050)) * A comment for message `OptimizeToursValidationError` is changed ([366f6f1](https://github.com/googleapis/google-cloud-python/commit/366f6f10e29a9d9cc307cbd1f16deb4decf26050)) * A comment for message `TimeWindow` is changed ([366f6f1](https://github.com/googleapis/google-cloud-python/commit/366f6f10e29a9d9cc307cbd1f16deb4decf26050)) * A comment for method `BatchOptimizeTours` in service `RouteOptimization` is changed ([366f6f1](https://github.com/googleapis/google-cloud-python/commit/366f6f10e29a9d9cc307cbd1f16deb4decf26050))
--- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> Co-authored-by: ohmayr --- .release-please-manifest.json | 18 +++++++-------- packages/google-apps-chat/CHANGELOG.md | 12 ++++++++++ .../google/apps/chat/gapic_version.py | 2 +- .../google/apps/chat_v1/gapic_version.py | 2 +- .../snippet_metadata_google.chat.v1.json | 2 +- packages/google-cloud-asset/CHANGELOG.md | 7 ++++++ .../google/cloud/asset/gapic_version.py | 2 +- .../google/cloud/asset_v1/gapic_version.py | 2 +- .../cloud/asset_v1p1beta1/gapic_version.py | 2 +- .../cloud/asset_v1p2beta1/gapic_version.py | 2 +- .../cloud/asset_v1p4beta1/gapic_version.py | 2 +- .../cloud/asset_v1p5beta1/gapic_version.py | 2 +- ...nippet_metadata_google.cloud.asset.v1.json | 2 +- ...metadata_google.cloud.asset.v1p1beta1.json | 2 +- ...metadata_google.cloud.asset.v1p2beta1.json | 2 +- ...metadata_google.cloud.asset.v1p5beta1.json | 2 +- packages/google-cloud-batch/CHANGELOG.md | 7 ++++++ .../google/cloud/batch/gapic_version.py | 2 +- .../google/cloud/batch_v1/gapic_version.py | 2 +- .../cloud/batch_v1alpha/gapic_version.py | 2 +- ...nippet_metadata_google.cloud.batch.v1.json | 2 +- ...t_metadata_google.cloud.batch.v1alpha.json | 2 +- .../CHANGELOG.md | 23 +++++++++++++++++++ .../cloudcontrolspartner/gapic_version.py | 2 +- .../cloudcontrolspartner_v1/gapic_version.py | 2 +- .../gapic_version.py | 2 +- ..._google.cloud.cloudcontrolspartner.v1.json | 2 +- ...gle.cloud.cloudcontrolspartner.v1beta.json | 2 +- packages/google-cloud-dataproc/CHANGELOG.md | 14 +++++++++++ .../google/cloud/dataproc/gapic_version.py | 2 +- .../google/cloud/dataproc_v1/gapic_version.py | 2 +- ...pet_metadata_google.cloud.dataproc.v1.json | 2 +- .../google-cloud-gke-multicloud/CHANGELOG.md | 8 +++++++ .../cloud/gke_multicloud/gapic_version.py | 2 +- .../cloud/gke_multicloud_v1/gapic_version.py | 2 +- ...etadata_google.cloud.gkemulticloud.v1.json | 2 +- packages/google-cloud-netapp/CHANGELOG.md | 10 ++++++++ .../google/cloud/netapp/gapic_version.py | 2 +- .../google/cloud/netapp_v1/gapic_version.py | 2 +- ...ippet_metadata_google.cloud.netapp.v1.json | 2 +- .../CHANGELOG.md | 19 +++++++++++++++ .../airflow/service/gapic_version.py | 2 +- .../airflow/service_v1/gapic_version.py | 2 +- .../airflow/service_v1beta1/gapic_version.py | 2 +- ...loud.orchestration.airflow.service.v1.json | 2 +- ...orchestration.airflow.service.v1beta1.json | 2 +- .../CHANGELOG.md | 23 +++++++++++++++++++ .../maps/routeoptimization/gapic_version.py | 2 +- .../routeoptimization_v1/gapic_version.py | 2 +- ...data_google.maps.routeoptimization.v1.json | 2 +- 50 files changed, 172 insertions(+), 49 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 2a86228c483b..19f3bed603ce 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -5,7 +5,7 @@ "packages/google-analytics-admin": "0.23.0", "packages/google-analytics-data": "0.18.11", "packages/google-apps-card": "0.1.4", - "packages/google-apps-chat": "0.1.10", + "packages/google-apps-chat": "0.1.11", "packages/google-apps-events-subscriptions": "0.1.2", "packages/google-apps-meet": "0.1.8", "packages/google-apps-script-type": "0.3.10", @@ -23,12 +23,12 @@ "packages/google-cloud-appengine-logging": "1.4.5", "packages/google-cloud-apphub": "0.1.2", "packages/google-cloud-artifact-registry": "1.11.5", - "packages/google-cloud-asset": "3.26.3", + "packages/google-cloud-asset": "3.26.4", "packages/google-cloud-assured-workloads": "1.12.5", "packages/google-cloud-automl": "2.13.5", "packages/google-cloud-backupdr": "0.1.3", "packages/google-cloud-bare-metal-solution": "1.7.5", - "packages/google-cloud-batch": "0.17.27", + "packages/google-cloud-batch": "0.17.28", "packages/google-cloud-beyondcorp-appconnections": "0.4.11", "packages/google-cloud-beyondcorp-appconnectors": "0.4.11", "packages/google-cloud-beyondcorp-appgateways": "0.4.11", @@ -49,7 +49,7 @@ "packages/google-cloud-build": "3.24.2", "packages/google-cloud-certificate-manager": "1.7.2", "packages/google-cloud-channel": "1.18.5", - "packages/google-cloud-cloudcontrolspartner": "0.1.3", + "packages/google-cloud-cloudcontrolspartner": "0.2.0", "packages/google-cloud-cloudquotas": "0.1.10", "packages/google-cloud-commerce-consumer-procurement": "0.1.7", "packages/google-cloud-common": "1.3.5", @@ -68,7 +68,7 @@ "packages/google-cloud-dataform": "0.5.11", "packages/google-cloud-datalabeling": "1.10.5", "packages/google-cloud-dataplex": "2.2.2", - "packages/google-cloud-dataproc": "5.11.0", + "packages/google-cloud-dataproc": "5.12.0", "packages/google-cloud-dataproc-metastore": "1.15.5", "packages/google-cloud-datastream": "1.9.5", "packages/google-cloud-deploy": "2.0.1", @@ -92,7 +92,7 @@ "packages/google-cloud-gke-backup": "0.5.11", "packages/google-cloud-gke-connect-gateway": "0.9.0", "packages/google-cloud-gke-hub": "1.14.2", - "packages/google-cloud-gke-multicloud": "0.6.12", + "packages/google-cloud-gke-multicloud": "0.6.13", "packages/google-cloud-gsuiteaddons": "0.3.10", "packages/google-cloud-iam": "2.15.2", "packages/google-cloud-iam-logging": "1.3.5", @@ -110,14 +110,14 @@ "packages/google-cloud-monitoring": "2.22.2", "packages/google-cloud-monitoring-dashboards": "2.15.3", "packages/google-cloud-monitoring-metrics-scopes": "1.6.5", - "packages/google-cloud-netapp": "0.3.13", + "packages/google-cloud-netapp": "0.3.14", "packages/google-cloud-network-connectivity": "2.4.5", "packages/google-cloud-network-management": "1.18.0", "packages/google-cloud-network-security": "0.9.11", "packages/google-cloud-network-services": "0.5.14", "packages/google-cloud-notebooks": "1.10.5", "packages/google-cloud-optimization": "1.8.5", - "packages/google-cloud-orchestration-airflow": "1.13.1", + "packages/google-cloud-orchestration-airflow": "1.14.0", "packages/google-cloud-os-config": "1.17.5", "packages/google-cloud-os-login": "2.14.6", "packages/google-cloud-parallelstore": "0.2.3", @@ -183,7 +183,7 @@ "packages/google-maps-fleetengine-delivery": "0.2.4", "packages/google-maps-mapsplatformdatasets": "0.4.2", "packages/google-maps-places": "0.1.17", - "packages/google-maps-routeoptimization": "0.1.2", + "packages/google-maps-routeoptimization": "0.1.3", "packages/google-maps-routing": "0.6.10", "packages/google-maps-solar": "0.1.2", "packages/google-shopping-css": "0.1.8", diff --git a/packages/google-apps-chat/CHANGELOG.md b/packages/google-apps-chat/CHANGELOG.md index 2ed2b6a49d98..ca79f8626cc8 100644 --- a/packages/google-apps-chat/CHANGELOG.md +++ b/packages/google-apps-chat/CHANGELOG.md @@ -1,5 +1,17 @@ # Changelog +## [0.1.11](https://github.com/googleapis/google-cloud-python/compare/google-apps-chat-v0.1.10...google-apps-chat-v0.1.11) (2024-09-16) + + +### Features + +* If you're a domain administrator or a delegated administrator, you can now include the `useAdminAccess` parameter when you call the Chat API with your administrator privileges with the following methods to manage Chat spaces and memberships in your Workspace organization: ([a20b1e5](https://github.com/googleapis/google-cloud-python/commit/a20b1e508068845c36b1701836ba17a699cb10ac)) + + +### Documentation + +* A comment for field `filter` in message `.google.chat.v1.ListMembershipsRequest` is updated to support `!=` operator ([a20b1e5](https://github.com/googleapis/google-cloud-python/commit/a20b1e508068845c36b1701836ba17a699cb10ac)) + ## [0.1.10](https://github.com/googleapis/google-cloud-python/compare/google-apps-chat-v0.1.9...google-apps-chat-v0.1.10) (2024-09-05) diff --git a/packages/google-apps-chat/google/apps/chat/gapic_version.py b/packages/google-apps-chat/google/apps/chat/gapic_version.py index 558c8aab67c5..4b834789ba9e 100644 --- a/packages/google-apps-chat/google/apps/chat/gapic_version.py +++ b/packages/google-apps-chat/google/apps/chat/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.11" # {x-release-please-version} diff --git a/packages/google-apps-chat/google/apps/chat_v1/gapic_version.py b/packages/google-apps-chat/google/apps/chat_v1/gapic_version.py index 558c8aab67c5..4b834789ba9e 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/gapic_version.py +++ b/packages/google-apps-chat/google/apps/chat_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.11" # {x-release-please-version} diff --git a/packages/google-apps-chat/samples/generated_samples/snippet_metadata_google.chat.v1.json b/packages/google-apps-chat/samples/generated_samples/snippet_metadata_google.chat.v1.json index ae65b2bfaefe..f0d8a1017646 100644 --- a/packages/google-apps-chat/samples/generated_samples/snippet_metadata_google.chat.v1.json +++ b/packages/google-apps-chat/samples/generated_samples/snippet_metadata_google.chat.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-apps-chat", - "version": "0.1.0" + "version": "0.1.11" }, "snippets": [ { diff --git a/packages/google-cloud-asset/CHANGELOG.md b/packages/google-cloud-asset/CHANGELOG.md index a038645b6f8a..733c3b3086d1 100644 --- a/packages/google-cloud-asset/CHANGELOG.md +++ b/packages/google-cloud-asset/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-asset/#history +## [3.26.4](https://github.com/googleapis/google-cloud-python/compare/google-cloud-asset-v3.26.3...google-cloud-asset-v3.26.4) (2024-09-16) + + +### Documentation + +* [google-cloud-asset] Comments are clarified for certain fields in messages `QueryAssetsResponse` and `ResourceSearchResult` ([#13076](https://github.com/googleapis/google-cloud-python/issues/13076)) ([35b2c45](https://github.com/googleapis/google-cloud-python/commit/35b2c456c6791bc47ffe894f3ef966558cb6c98e)) + ## [3.26.3](https://github.com/googleapis/google-cloud-python/compare/google-cloud-asset-v3.26.2...google-cloud-asset-v3.26.3) (2024-07-30) diff --git a/packages/google-cloud-asset/google/cloud/asset/gapic_version.py b/packages/google-cloud-asset/google/cloud/asset/gapic_version.py index 558c8aab67c5..fdbe0bc4679f 100644 --- a/packages/google-cloud-asset/google/cloud/asset/gapic_version.py +++ b/packages/google-cloud-asset/google/cloud/asset/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "3.26.4" # {x-release-please-version} diff --git a/packages/google-cloud-asset/google/cloud/asset_v1/gapic_version.py b/packages/google-cloud-asset/google/cloud/asset_v1/gapic_version.py index 558c8aab67c5..fdbe0bc4679f 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1/gapic_version.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "3.26.4" # {x-release-please-version} diff --git a/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/gapic_version.py b/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/gapic_version.py index 558c8aab67c5..fdbe0bc4679f 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/gapic_version.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "3.26.4" # {x-release-please-version} diff --git a/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/gapic_version.py b/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/gapic_version.py index 558c8aab67c5..fdbe0bc4679f 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/gapic_version.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "3.26.4" # {x-release-please-version} diff --git a/packages/google-cloud-asset/google/cloud/asset_v1p4beta1/gapic_version.py b/packages/google-cloud-asset/google/cloud/asset_v1p4beta1/gapic_version.py index e6f4a2765144..d0552744bb07 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1p4beta1/gapic_version.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1p4beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.26.3" # {x-release-please-version} +__version__ = "3.26.4" # {x-release-please-version} diff --git a/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/gapic_version.py b/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/gapic_version.py index 558c8aab67c5..fdbe0bc4679f 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/gapic_version.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "3.26.4" # {x-release-please-version} diff --git a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1.json b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1.json index 5a90dfa88b31..ada630458cc7 100644 --- a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1.json +++ b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-asset", - "version": "0.1.0" + "version": "3.26.4" }, "snippets": [ { diff --git a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p1beta1.json b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p1beta1.json index dcbeb822733b..d088663067e5 100644 --- a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p1beta1.json +++ b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-asset", - "version": "0.1.0" + "version": "3.26.4" }, "snippets": [ { diff --git a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p2beta1.json b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p2beta1.json index 38eaede856da..6af620ebaf84 100644 --- a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p2beta1.json +++ b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p2beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-asset", - "version": "0.1.0" + "version": "3.26.4" }, "snippets": [ { diff --git a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p5beta1.json b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p5beta1.json index 13ffc229e3a7..1d6c2a34e3ae 100644 --- a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p5beta1.json +++ b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p5beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-asset", - "version": "0.1.0" + "version": "3.26.4" }, "snippets": [ { diff --git a/packages/google-cloud-batch/CHANGELOG.md b/packages/google-cloud-batch/CHANGELOG.md index addee943a3ea..27c494159115 100644 --- a/packages/google-cloud-batch/CHANGELOG.md +++ b/packages/google-cloud-batch/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.17.28](https://github.com/googleapis/google-cloud-python/compare/google-cloud-batch-v0.17.27...google-cloud-batch-v0.17.28) (2024-09-16) + + +### Features + +* [google-cloud-batch] A new value `CANCELLATION_IN_PROGRESS` is added to enum `State` ([#13074](https://github.com/googleapis/google-cloud-python/issues/13074)) ([76267b2](https://github.com/googleapis/google-cloud-python/commit/76267b2b8998fd2a3602ebf4d12d2aaa30a90cde)) + ## [0.17.27](https://github.com/googleapis/google-cloud-python/compare/google-cloud-batch-v0.17.26...google-cloud-batch-v0.17.27) (2024-09-03) diff --git a/packages/google-cloud-batch/google/cloud/batch/gapic_version.py b/packages/google-cloud-batch/google/cloud/batch/gapic_version.py index 558c8aab67c5..5f7f6c52ce54 100644 --- a/packages/google-cloud-batch/google/cloud/batch/gapic_version.py +++ b/packages/google-cloud-batch/google/cloud/batch/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.17.28" # {x-release-please-version} diff --git a/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py b/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py index 558c8aab67c5..5f7f6c52ce54 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.17.28" # {x-release-please-version} diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py index 558c8aab67c5..5f7f6c52ce54 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.17.28" # {x-release-please-version} diff --git a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json index e2df1067e4dd..4aeac54efe09 100644 --- a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json +++ b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-batch", - "version": "0.1.0" + "version": "0.17.28" }, "snippets": [ { diff --git a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json index 7f67670b100c..88b47050fed2 100644 --- a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json +++ b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-batch", - "version": "0.1.0" + "version": "0.17.28" }, "snippets": [ { diff --git a/packages/google-cloud-cloudcontrolspartner/CHANGELOG.md b/packages/google-cloud-cloudcontrolspartner/CHANGELOG.md index b2bd23a8caac..275e18ff132a 100644 --- a/packages/google-cloud-cloudcontrolspartner/CHANGELOG.md +++ b/packages/google-cloud-cloudcontrolspartner/CHANGELOG.md @@ -1,5 +1,28 @@ # Changelog +## [0.2.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-cloudcontrolspartner-v0.1.3...google-cloud-cloudcontrolspartner-v0.2.0) (2024-09-16) + + +### ⚠ BREAKING CHANGES + +* [google-cloud-cloudcontrolspartner] Field behavior for field display_name in message .google.cloud.cloudcontrolspartner.v1beta.Customer is changed + +### Features + +* A new value `ACCESS_TRANSPARENCY_LOGS_SUPPORT_CASE_VIEWER` is added to enum `.google.cloud.cloudcontrolspartner.v1beta.PartnerPermissions.Permission` ([c03c441](https://github.com/googleapis/google-cloud-python/commit/c03c4411287ee195fd5c99aff94d812381a908f3)) +* Field behavior for field `customer_onboarding_state` in message `.google.cloud.cloudcontrolspartner.v1beta.Customer` is changed ([c03c441](https://github.com/googleapis/google-cloud-python/commit/c03c4411287ee195fd5c99aff94d812381a908f3)) +* Field behavior for field `is_onboarded` in message `.google.cloud.cloudcontrolspartner.v1beta.Customer` is changed ([c03c441](https://github.com/googleapis/google-cloud-python/commit/c03c4411287ee195fd5c99aff94d812381a908f3)) + + +### Bug Fixes + +* [google-cloud-cloudcontrolspartner] Field behavior for field display_name in message .google.cloud.cloudcontrolspartner.v1beta.Customer is changed ([c03c441](https://github.com/googleapis/google-cloud-python/commit/c03c4411287ee195fd5c99aff94d812381a908f3)) + + +### Documentation + +* A comment for field `display_name` in message `.google.cloud.cloudcontrolspartner.v1beta.Customer` is changed ([c03c441](https://github.com/googleapis/google-cloud-python/commit/c03c4411287ee195fd5c99aff94d812381a908f3)) + ## [0.1.3](https://github.com/googleapis/google-cloud-python/compare/google-cloud-cloudcontrolspartner-v0.1.2...google-cloud-cloudcontrolspartner-v0.1.3) (2024-07-30) diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner/gapic_version.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner/gapic_version.py index 558c8aab67c5..364164ddb134 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner/gapic_version.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.2.0" # {x-release-please-version} diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/gapic_version.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/gapic_version.py index 558c8aab67c5..364164ddb134 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/gapic_version.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.2.0" # {x-release-please-version} diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/gapic_version.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/gapic_version.py index 558c8aab67c5..364164ddb134 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/gapic_version.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.2.0" # {x-release-please-version} diff --git a/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/snippet_metadata_google.cloud.cloudcontrolspartner.v1.json b/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/snippet_metadata_google.cloud.cloudcontrolspartner.v1.json index 5a93fc370b33..606c14b81f01 100644 --- a/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/snippet_metadata_google.cloud.cloudcontrolspartner.v1.json +++ b/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/snippet_metadata_google.cloud.cloudcontrolspartner.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-cloudcontrolspartner", - "version": "0.1.0" + "version": "0.2.0" }, "snippets": [ { diff --git a/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/snippet_metadata_google.cloud.cloudcontrolspartner.v1beta.json b/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/snippet_metadata_google.cloud.cloudcontrolspartner.v1beta.json index 642805220b5c..9c0039bf1f65 100644 --- a/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/snippet_metadata_google.cloud.cloudcontrolspartner.v1beta.json +++ b/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/snippet_metadata_google.cloud.cloudcontrolspartner.v1beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-cloudcontrolspartner", - "version": "0.1.0" + "version": "0.2.0" }, "snippets": [ { diff --git a/packages/google-cloud-dataproc/CHANGELOG.md b/packages/google-cloud-dataproc/CHANGELOG.md index 00b658392846..7a8b08f948bd 100644 --- a/packages/google-cloud-dataproc/CHANGELOG.md +++ b/packages/google-cloud-dataproc/CHANGELOG.md @@ -4,6 +4,20 @@ [1]: https://pypi.org/project/google-cloud-dataproc/#history +## [5.12.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dataproc-v5.11.0...google-cloud-dataproc-v5.12.0) (2024-09-16) + + +### Features + +* [google-cloud-dataproc] Add FLINK metric source for Dataproc Metric Source ([2402404](https://github.com/googleapis/google-cloud-python/commit/2402404a5ac48c8289a2dbc24fcc85a1eebe4224)) +* [google-cloud-dataproc] Add kms key input for create cluster API ([2402404](https://github.com/googleapis/google-cloud-python/commit/2402404a5ac48c8289a2dbc24fcc85a1eebe4224)) +* [google-cloud-dataproc] add resource reference for KMS keys and fix comments ([2402404](https://github.com/googleapis/google-cloud-python/commit/2402404a5ac48c8289a2dbc24fcc85a1eebe4224)) +* [google-cloud-dataproc] Add unreachable output field for LIST batch templates API ([2402404](https://github.com/googleapis/google-cloud-python/commit/2402404a5ac48c8289a2dbc24fcc85a1eebe4224)) +* [google-cloud-dataproc] Add unreachable output field for LIST jobs API ([2402404](https://github.com/googleapis/google-cloud-python/commit/2402404a5ac48c8289a2dbc24fcc85a1eebe4224)) +* [google-cloud-dataproc] Add unreachable output field for LIST workflow template API ([2402404](https://github.com/googleapis/google-cloud-python/commit/2402404a5ac48c8289a2dbc24fcc85a1eebe4224)) +* [google-cloud-dataproc] Allow flink and trino job support for workflow templates API ([2402404](https://github.com/googleapis/google-cloud-python/commit/2402404a5ac48c8289a2dbc24fcc85a1eebe4224)) +* [google-cloud-dataproc] Allow flink job support for jobs ([2402404](https://github.com/googleapis/google-cloud-python/commit/2402404a5ac48c8289a2dbc24fcc85a1eebe4224)) + ## [5.11.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dataproc-v5.10.2...google-cloud-dataproc-v5.11.0) (2024-09-03) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc/gapic_version.py b/packages/google-cloud-dataproc/google/cloud/dataproc/gapic_version.py index 558c8aab67c5..435e79ea7a30 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc/gapic_version.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "5.12.0" # {x-release-please-version} diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/gapic_version.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/gapic_version.py index 558c8aab67c5..435e79ea7a30 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/gapic_version.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "5.12.0" # {x-release-please-version} diff --git a/packages/google-cloud-dataproc/samples/generated_samples/snippet_metadata_google.cloud.dataproc.v1.json b/packages/google-cloud-dataproc/samples/generated_samples/snippet_metadata_google.cloud.dataproc.v1.json index c5f4e003db04..a44d5d6db9b3 100644 --- a/packages/google-cloud-dataproc/samples/generated_samples/snippet_metadata_google.cloud.dataproc.v1.json +++ b/packages/google-cloud-dataproc/samples/generated_samples/snippet_metadata_google.cloud.dataproc.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dataproc", - "version": "0.1.0" + "version": "5.12.0" }, "snippets": [ { diff --git a/packages/google-cloud-gke-multicloud/CHANGELOG.md b/packages/google-cloud-gke-multicloud/CHANGELOG.md index fb3583312dfb..415808ddd4f9 100644 --- a/packages/google-cloud-gke-multicloud/CHANGELOG.md +++ b/packages/google-cloud-gke-multicloud/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [0.6.13](https://github.com/googleapis/google-cloud-python/compare/google-cloud-gke-multicloud-v0.6.12...google-cloud-gke-multicloud-v0.6.13) (2024-09-16) + + +### Features + +* An optional field `kubelet_config` in message `.google.cloud.gkemulticloud.v1.AwsNodePool` is added ([33834de](https://github.com/googleapis/google-cloud-python/commit/33834de6d9eeced6da30f3fcbeb4e1029e07cf18)) +* An optional field `security_posture_config` in message `.google.cloud.gkemulticloud.v1.AttachedCluster` is added ([33834de](https://github.com/googleapis/google-cloud-python/commit/33834de6d9eeced6da30f3fcbeb4e1029e07cf18)) + ## [0.6.12](https://github.com/googleapis/google-cloud-python/compare/google-cloud-gke-multicloud-v0.6.11...google-cloud-gke-multicloud-v0.6.12) (2024-07-30) diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud/gapic_version.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud/gapic_version.py index 558c8aab67c5..b72badcc1eca 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud/gapic_version.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.6.13" # {x-release-please-version} diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/gapic_version.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/gapic_version.py index 558c8aab67c5..b72badcc1eca 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/gapic_version.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.6.13" # {x-release-please-version} diff --git a/packages/google-cloud-gke-multicloud/samples/generated_samples/snippet_metadata_google.cloud.gkemulticloud.v1.json b/packages/google-cloud-gke-multicloud/samples/generated_samples/snippet_metadata_google.cloud.gkemulticloud.v1.json index 08dd05577dce..7fec7507cd76 100644 --- a/packages/google-cloud-gke-multicloud/samples/generated_samples/snippet_metadata_google.cloud.gkemulticloud.v1.json +++ b/packages/google-cloud-gke-multicloud/samples/generated_samples/snippet_metadata_google.cloud.gkemulticloud.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-gke-multicloud", - "version": "0.1.0" + "version": "0.6.13" }, "snippets": [ { diff --git a/packages/google-cloud-netapp/CHANGELOG.md b/packages/google-cloud-netapp/CHANGELOG.md index d794a24d9b59..5313d0e0147c 100644 --- a/packages/google-cloud-netapp/CHANGELOG.md +++ b/packages/google-cloud-netapp/CHANGELOG.md @@ -1,5 +1,15 @@ # Changelog +## [0.3.14](https://github.com/googleapis/google-cloud-python/compare/google-cloud-netapp-v0.3.13...google-cloud-netapp-v0.3.14) (2024-09-16) + + +### Features + +* A new field 'allow_auto_tiering' in message 'google.cloud.netapp.v1.StoragePool' is added ([5e3f4ae](https://github.com/googleapis/google-cloud-python/commit/5e3f4aebeb2f79efb1992ae623eb1aea86de2b0c)) +* A new field 'cold_tier_size_gib' in message 'google.cloud.netapp.v1.Volume' is added ([5e3f4ae](https://github.com/googleapis/google-cloud-python/commit/5e3f4aebeb2f79efb1992ae623eb1aea86de2b0c)) +* A new message 'google.cloud.netapp.v1.SwitchActiveReplicaZoneRequest' is added ([5e3f4ae](https://github.com/googleapis/google-cloud-python/commit/5e3f4aebeb2f79efb1992ae623eb1aea86de2b0c)) +* **api:** [google-cloud-netapp] A new rpc 'SwitchActiveReplicaZone' is added to service 'google.cloud.netapp.v1.NetApp' ([5e3f4ae](https://github.com/googleapis/google-cloud-python/commit/5e3f4aebeb2f79efb1992ae623eb1aea86de2b0c)) + ## [0.3.13](https://github.com/googleapis/google-cloud-python/compare/google-cloud-netapp-v0.3.12...google-cloud-netapp-v0.3.13) (2024-07-31) diff --git a/packages/google-cloud-netapp/google/cloud/netapp/gapic_version.py b/packages/google-cloud-netapp/google/cloud/netapp/gapic_version.py index 558c8aab67c5..0106eadcd8d9 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp/gapic_version.py +++ b/packages/google-cloud-netapp/google/cloud/netapp/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.3.14" # {x-release-please-version} diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/gapic_version.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/gapic_version.py index 558c8aab67c5..0106eadcd8d9 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/gapic_version.py +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.3.14" # {x-release-please-version} diff --git a/packages/google-cloud-netapp/samples/generated_samples/snippet_metadata_google.cloud.netapp.v1.json b/packages/google-cloud-netapp/samples/generated_samples/snippet_metadata_google.cloud.netapp.v1.json index a43c86575bc2..59c739a9b3ab 100644 --- a/packages/google-cloud-netapp/samples/generated_samples/snippet_metadata_google.cloud.netapp.v1.json +++ b/packages/google-cloud-netapp/samples/generated_samples/snippet_metadata_google.cloud.netapp.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-netapp", - "version": "0.1.0" + "version": "0.3.14" }, "snippets": [ { diff --git a/packages/google-cloud-orchestration-airflow/CHANGELOG.md b/packages/google-cloud-orchestration-airflow/CHANGELOG.md index f48353fb4a09..94c3195d35e3 100644 --- a/packages/google-cloud-orchestration-airflow/CHANGELOG.md +++ b/packages/google-cloud-orchestration-airflow/CHANGELOG.md @@ -1,5 +1,24 @@ # Changelog +## [1.14.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-orchestration-airflow-v1.13.1...google-cloud-orchestration-airflow-v1.14.0) (2024-09-16) + + +### Features + +* [google-cloud-orchestration-airflow] A new method `CheckUpgrade` is added to service `Environments` ([b624f04](https://github.com/googleapis/google-cloud-python/commit/b624f04da8a9b6461d4714f0f0bcf13f1f35fa31)) +* [google-cloud-orchestration-airflow] add `satisfies_pzi` to `Environment` ([b624f04](https://github.com/googleapis/google-cloud-python/commit/b624f04da8a9b6461d4714f0f0bcf13f1f35fa31)) +* A new field `airflow_metadata_retention_config` is added to message `.google.cloud.orchestration.airflow.service.v1.DataRetentionConfig` ([b624f04](https://github.com/googleapis/google-cloud-python/commit/b624f04da8a9b6461d4714f0f0bcf13f1f35fa31)) +* A new field `satisfies_pzi` is added to message `.google.cloud.orchestration.airflow.service.v1.Environment` ([b624f04](https://github.com/googleapis/google-cloud-python/commit/b624f04da8a9b6461d4714f0f0bcf13f1f35fa31)) +* A new message `AirflowMetadataRetentionPolicyConfig` is added ([b624f04](https://github.com/googleapis/google-cloud-python/commit/b624f04da8a9b6461d4714f0f0bcf13f1f35fa31)) +* A new message `CheckUpgradeRequest` is added ([b624f04](https://github.com/googleapis/google-cloud-python/commit/b624f04da8a9b6461d4714f0f0bcf13f1f35fa31)) + + +### Documentation + +* A comment for field `maintenance_window` in message `.google.cloud.orchestration.airflow.service.v1.EnvironmentConfig` is changed ([b624f04](https://github.com/googleapis/google-cloud-python/commit/b624f04da8a9b6461d4714f0f0bcf13f1f35fa31)) +* A comment for field `storage_mode` in message `.google.cloud.orchestration.airflow.service.v1.TaskLogsRetentionConfig` is changed ([b624f04](https://github.com/googleapis/google-cloud-python/commit/b624f04da8a9b6461d4714f0f0bcf13f1f35fa31)) +* A comment for message `WorkloadsConfig` is changed ([b624f04](https://github.com/googleapis/google-cloud-python/commit/b624f04da8a9b6461d4714f0f0bcf13f1f35fa31)) + ## [1.13.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-orchestration-airflow-v1.13.0...google-cloud-orchestration-airflow-v1.13.1) (2024-07-30) diff --git a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service/gapic_version.py b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service/gapic_version.py index 558c8aab67c5..2159c8af6f8e 100644 --- a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service/gapic_version.py +++ b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "1.14.0" # {x-release-please-version} diff --git a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/gapic_version.py b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/gapic_version.py index 558c8aab67c5..2159c8af6f8e 100644 --- a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/gapic_version.py +++ b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "1.14.0" # {x-release-please-version} diff --git a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/gapic_version.py b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/gapic_version.py index 558c8aab67c5..2159c8af6f8e 100644 --- a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/gapic_version.py +++ b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "1.14.0" # {x-release-please-version} diff --git a/packages/google-cloud-orchestration-airflow/samples/generated_samples/snippet_metadata_google.cloud.orchestration.airflow.service.v1.json b/packages/google-cloud-orchestration-airflow/samples/generated_samples/snippet_metadata_google.cloud.orchestration.airflow.service.v1.json index 389370672713..39aa0d2d425d 100644 --- a/packages/google-cloud-orchestration-airflow/samples/generated_samples/snippet_metadata_google.cloud.orchestration.airflow.service.v1.json +++ b/packages/google-cloud-orchestration-airflow/samples/generated_samples/snippet_metadata_google.cloud.orchestration.airflow.service.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-orchestration-airflow", - "version": "0.1.0" + "version": "1.14.0" }, "snippets": [ { diff --git a/packages/google-cloud-orchestration-airflow/samples/generated_samples/snippet_metadata_google.cloud.orchestration.airflow.service.v1beta1.json b/packages/google-cloud-orchestration-airflow/samples/generated_samples/snippet_metadata_google.cloud.orchestration.airflow.service.v1beta1.json index 4c413d500bdb..e42c5d68150d 100644 --- a/packages/google-cloud-orchestration-airflow/samples/generated_samples/snippet_metadata_google.cloud.orchestration.airflow.service.v1beta1.json +++ b/packages/google-cloud-orchestration-airflow/samples/generated_samples/snippet_metadata_google.cloud.orchestration.airflow.service.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-orchestration-airflow-service", - "version": "0.1.0" + "version": "1.14.0" }, "snippets": [ { diff --git a/packages/google-maps-routeoptimization/CHANGELOG.md b/packages/google-maps-routeoptimization/CHANGELOG.md index 3588a5750cea..14bb0c6b2dc5 100644 --- a/packages/google-maps-routeoptimization/CHANGELOG.md +++ b/packages/google-maps-routeoptimization/CHANGELOG.md @@ -1,5 +1,28 @@ # Changelog +## [0.1.3](https://github.com/googleapis/google-cloud-python/compare/google-maps-routeoptimization-v0.1.2...google-maps-routeoptimization-v0.1.3) (2024-09-16) + + +### Features + +* [google-maps-routeoptimization] minor fields and documentation update ([366f6f1](https://github.com/googleapis/google-cloud-python/commit/366f6f10e29a9d9cc307cbd1f16deb4decf26050)) +* A new field `cost_per_kilometer_below_soft_max` is added to message `.google.maps.routeoptimization.v1.DistanceLimit` ([366f6f1](https://github.com/googleapis/google-cloud-python/commit/366f6f10e29a9d9cc307cbd1f16deb4decf26050)) +* A new field `route_modifiers` is added to message `.google.maps.routeoptimization.v1.Vehicle` ([366f6f1](https://github.com/googleapis/google-cloud-python/commit/366f6f10e29a9d9cc307cbd1f16deb4decf26050)) +* A new message `RouteModifiers` is added ([366f6f1](https://github.com/googleapis/google-cloud-python/commit/366f6f10e29a9d9cc307cbd1f16deb4decf26050)) + + +### Documentation + +* A comment for enum value `CODE_UNSPECIFIED` in enum `Code` is changed ([366f6f1](https://github.com/googleapis/google-cloud-python/commit/366f6f10e29a9d9cc307cbd1f16deb4decf26050)) +* A comment for enum value `DEFAULT_SOLVE` in enum `SolvingMode` is changed ([366f6f1](https://github.com/googleapis/google-cloud-python/commit/366f6f10e29a9d9cc307cbd1f16deb4decf26050)) +* A comment for enum value `RELAX_VISIT_TIMES_AND_SEQUENCE_AFTER_THRESHOLD` in enum `Level` is changed ([366f6f1](https://github.com/googleapis/google-cloud-python/commit/366f6f10e29a9d9cc307cbd1f16deb4decf26050)) +* A comment for field `code` in message `.google.maps.routeoptimization.v1.OptimizeToursValidationError` is changed ([366f6f1](https://github.com/googleapis/google-cloud-python/commit/366f6f10e29a9d9cc307cbd1f16deb4decf26050)) +* A comment for field `reasons` in message `.google.maps.routeoptimization.v1.SkippedShipment` is changed ([366f6f1](https://github.com/googleapis/google-cloud-python/commit/366f6f10e29a9d9cc307cbd1f16deb4decf26050)) +* A comment for field `validation_errors` in message `.google.maps.routeoptimization.v1.OptimizeToursResponse` is changed ([366f6f1](https://github.com/googleapis/google-cloud-python/commit/366f6f10e29a9d9cc307cbd1f16deb4decf26050)) +* A comment for message `OptimizeToursValidationError` is changed ([366f6f1](https://github.com/googleapis/google-cloud-python/commit/366f6f10e29a9d9cc307cbd1f16deb4decf26050)) +* A comment for message `TimeWindow` is changed ([366f6f1](https://github.com/googleapis/google-cloud-python/commit/366f6f10e29a9d9cc307cbd1f16deb4decf26050)) +* A comment for method `BatchOptimizeTours` in service `RouteOptimization` is changed ([366f6f1](https://github.com/googleapis/google-cloud-python/commit/366f6f10e29a9d9cc307cbd1f16deb4decf26050)) + ## [0.1.2](https://github.com/googleapis/google-cloud-python/compare/google-maps-routeoptimization-v0.1.1...google-maps-routeoptimization-v0.1.2) (2024-07-30) diff --git a/packages/google-maps-routeoptimization/google/maps/routeoptimization/gapic_version.py b/packages/google-maps-routeoptimization/google/maps/routeoptimization/gapic_version.py index 558c8aab67c5..114e40645800 100644 --- a/packages/google-maps-routeoptimization/google/maps/routeoptimization/gapic_version.py +++ b/packages/google-maps-routeoptimization/google/maps/routeoptimization/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.3" # {x-release-please-version} diff --git a/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/gapic_version.py b/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/gapic_version.py index 558c8aab67c5..114e40645800 100644 --- a/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/gapic_version.py +++ b/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.3" # {x-release-please-version} diff --git a/packages/google-maps-routeoptimization/samples/generated_samples/snippet_metadata_google.maps.routeoptimization.v1.json b/packages/google-maps-routeoptimization/samples/generated_samples/snippet_metadata_google.maps.routeoptimization.v1.json index c329d83ca2a2..ff99ce099d17 100644 --- a/packages/google-maps-routeoptimization/samples/generated_samples/snippet_metadata_google.maps.routeoptimization.v1.json +++ b/packages/google-maps-routeoptimization/samples/generated_samples/snippet_metadata_google.maps.routeoptimization.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-maps-routeoptimization", - "version": "0.1.0" + "version": "0.1.3" }, "snippets": [ { From 1f8b5640b0ac5397318ede4ebcfa120120ebccc8 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 20 Sep 2024 11:13:48 -0400 Subject: [PATCH 075/108] feat: [google-cloud-dialogflow] created new boolean fields in conversation model for zone isolation and zone separation compliance status (#13096) BEGIN_COMMIT_OVERRIDE feat: created new boolean fields in conversation model for zone isolation and zone separation compliance status END_COMMIT_OVERRIDE - [ ] Regenerate this pull request now. PiperOrigin-RevId: 676665642 Source-Link: https://github.com/googleapis/googleapis/commit/f87ae4487b303f32c3ddc9638649d32dda2e2776 Source-Link: https://github.com/googleapis/googleapis-gen/commit/d66ac41aa072bbc4367f7d2758d0d0ba7a7094a5 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWRpYWxvZ2Zsb3cvLk93bEJvdC55YW1sIiwiaCI6ImQ2NmFjNDFhYTA3MmJiYzQzNjdmN2QyNzU4ZDBkMGJhN2E3MDk0YTUifQ== --------- Co-authored-by: Owl Bot --- .../dialogflow_v2/types/conversation_model.py | 20 +++++++++++++++++++ .../dialogflow_v2/test_conversation_models.py | 16 +++++++++++++++ 2 files changed, 36 insertions(+) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/conversation_model.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/conversation_model.py index 21c3e927a509..0170ee992c2f 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/conversation_model.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/conversation_model.py @@ -89,6 +89,16 @@ class ConversationModel(proto.Message): Metadata for smart reply models. This field is a member of `oneof`_ ``model_metadata``. + satisfies_pzs (bool): + Output only. A read only boolean field + reflecting Zone Separation status of the model. + + This field is a member of `oneof`_ ``_satisfies_pzs``. + satisfies_pzi (bool): + Output only. A read only boolean field + reflecting Zone Isolation status of the model. + + This field is a member of `oneof`_ ``_satisfies_pzi``. """ class State(proto.Enum): @@ -183,6 +193,16 @@ class ModelType(proto.Enum): oneof="model_metadata", message="SmartReplyModelMetadata", ) + satisfies_pzs: bool = proto.Field( + proto.BOOL, + number=25, + optional=True, + ) + satisfies_pzi: bool = proto.Field( + proto.BOOL, + number=26, + optional=True, + ) class ConversationModelEvaluation(proto.Message): diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversation_models.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversation_models.py index cd7615c81d65..4f435eac5dd1 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversation_models.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversation_models.py @@ -1614,6 +1614,8 @@ def test_get_conversation_model(request_type, transport: str = "grpc"): display_name="display_name_value", state=conversation_model.ConversationModel.State.CREATING, language_code="language_code_value", + satisfies_pzs=True, + satisfies_pzi=True, ) response = client.get_conversation_model(request) @@ -1629,6 +1631,8 @@ def test_get_conversation_model(request_type, transport: str = "grpc"): assert response.display_name == "display_name_value" assert response.state == conversation_model.ConversationModel.State.CREATING assert response.language_code == "language_code_value" + assert response.satisfies_pzs is True + assert response.satisfies_pzi is True def test_get_conversation_model_empty_call(): @@ -1742,6 +1746,8 @@ async def test_get_conversation_model_empty_call_async(): display_name="display_name_value", state=conversation_model.ConversationModel.State.CREATING, language_code="language_code_value", + satisfies_pzs=True, + satisfies_pzi=True, ) ) response = await client.get_conversation_model() @@ -1817,6 +1823,8 @@ async def test_get_conversation_model_async( display_name="display_name_value", state=conversation_model.ConversationModel.State.CREATING, language_code="language_code_value", + satisfies_pzs=True, + satisfies_pzi=True, ) ) response = await client.get_conversation_model(request) @@ -1833,6 +1841,8 @@ async def test_get_conversation_model_async( assert response.display_name == "display_name_value" assert response.state == conversation_model.ConversationModel.State.CREATING assert response.language_code == "language_code_value" + assert response.satisfies_pzs is True + assert response.satisfies_pzi is True @pytest.mark.asyncio @@ -5028,6 +5038,8 @@ def test_create_conversation_model_rest(request_type): "language_code": "language_code_value", "article_suggestion_model_metadata": {"training_model_type": 2}, "smart_reply_model_metadata": {"training_model_type": 2}, + "satisfies_pzs": True, + "satisfies_pzi": True, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -5414,6 +5426,8 @@ def test_get_conversation_model_rest(request_type): display_name="display_name_value", state=conversation_model.ConversationModel.State.CREATING, language_code="language_code_value", + satisfies_pzs=True, + satisfies_pzi=True, ) # Wrap the value into a proper Response obj @@ -5433,6 +5447,8 @@ def test_get_conversation_model_rest(request_type): assert response.display_name == "display_name_value" assert response.state == conversation_model.ConversationModel.State.CREATING assert response.language_code == "language_code_value" + assert response.satisfies_pzs is True + assert response.satisfies_pzi is True def test_get_conversation_model_rest_use_cached_wrapped_rpc(): From 65f098a1125677c69240849703a0b97bcab7fc4c Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 20 Sep 2024 11:21:01 -0400 Subject: [PATCH 076/108] feat: [google-analytics-data] add `GetPropertyQuotasSnapshot` method to the Data API v1alpha (#13095) BEGIN_COMMIT_OVERRIDE feat: add `GetPropertyQuotasSnapshot` method to the Data API v1alpha feat: add `PropertyQuotasSnapshot` type to the Data API v1alpha docs: update the documentation for the `CreateReportTask` method END_COMMIT_OVERRIDE - [ ] Regenerate this pull request now. feat: add `PropertyQuotasSnapshot` type to the Data API v1alpha docs: update the documentation for the `CreateReportTask` method PiperOrigin-RevId: 676527881 Source-Link: https://github.com/googleapis/googleapis/commit/923b6f3167fc309d9501bc97a6ab67cfe522522e Source-Link: https://github.com/googleapis/googleapis-gen/commit/8729c5b4cdacca9673ceab43f90bffdc97aa5147 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWFuYWx5dGljcy1kYXRhLy5Pd2xCb3QueWFtbCIsImgiOiI4NzI5YzViNGNkYWNjYTk2NzNjZWFiNDNmOTBiZmZkYzk3YWE1MTQ3In0= --------- Co-authored-by: Owl Bot Co-authored-by: ohmayr --- .../google/analytics/data_v1alpha/__init__.py | 4 + .../data_v1alpha/gapic_metadata.json | 15 + .../alpha_analytics_data/async_client.py | 124 + .../services/alpha_analytics_data/client.py | 132 + .../alpha_analytics_data/transports/base.py | 17 + .../alpha_analytics_data/transports/grpc.py | 37 + .../transports/grpc_asyncio.py | 42 + .../alpha_analytics_data/transports/rest.py | 136 + .../analytics/data_v1alpha/types/__init__.py | 4 + .../data_v1alpha/types/analytics_data_api.py | 64 +- .../analytics/data_v1alpha/types/data.py | 4 +- ...data_get_property_quotas_snapshot_async.py | 52 + ..._data_get_property_quotas_snapshot_sync.py | 52 + ...etadata_google.analytics.data.v1alpha.json | 161 + .../scripts/fixup_data_v1alpha_keywords.py | 1 + .../data_v1alpha/test_alpha_analytics_data.py | 3373 ++++++++++------- 16 files changed, 2887 insertions(+), 1331 deletions(-) create mode 100644 packages/google-analytics-data/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_get_property_quotas_snapshot_async.py create mode 100644 packages/google-analytics-data/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_get_property_quotas_snapshot_sync.py diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/__init__.py b/packages/google-analytics-data/google/analytics/data_v1alpha/__init__.py index d27c32f3b750..4b5c6ad3ac51 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/__init__.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/__init__.py @@ -32,6 +32,7 @@ CreateRecurringAudienceListRequest, CreateReportTaskRequest, GetAudienceListRequest, + GetPropertyQuotasSnapshotRequest, GetRecurringAudienceListRequest, GetReportTaskRequest, ListAudienceListsRequest, @@ -40,6 +41,7 @@ ListRecurringAudienceListsResponse, ListReportTasksRequest, ListReportTasksResponse, + PropertyQuotasSnapshot, QueryAudienceListRequest, QueryAudienceListResponse, QueryReportTaskRequest, @@ -172,6 +174,7 @@ "FunnelStep", "FunnelSubReport", "GetAudienceListRequest", + "GetPropertyQuotasSnapshotRequest", "GetRecurringAudienceListRequest", "GetReportTaskRequest", "InListFilter", @@ -190,6 +193,7 @@ "NumericValue", "OrderBy", "PropertyQuota", + "PropertyQuotasSnapshot", "QueryAudienceListRequest", "QueryAudienceListResponse", "QueryReportTaskRequest", diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/gapic_metadata.json b/packages/google-analytics-data/google/analytics/data_v1alpha/gapic_metadata.json index 886097e06d3f..bb6c0b6f462d 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/gapic_metadata.json +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/gapic_metadata.json @@ -30,6 +30,11 @@ "get_audience_list" ] }, + "GetPropertyQuotasSnapshot": { + "methods": [ + "get_property_quotas_snapshot" + ] + }, "GetRecurringAudienceList": { "methods": [ "get_recurring_audience_list" @@ -100,6 +105,11 @@ "get_audience_list" ] }, + "GetPropertyQuotasSnapshot": { + "methods": [ + "get_property_quotas_snapshot" + ] + }, "GetRecurringAudienceList": { "methods": [ "get_recurring_audience_list" @@ -170,6 +180,11 @@ "get_audience_list" ] }, + "GetPropertyQuotasSnapshot": { + "methods": [ + "get_property_quotas_snapshot" + ] + }, "GetRecurringAudienceList": { "methods": [ "get_recurring_audience_list" diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/async_client.py b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/async_client.py index 5afbe71746b5..85f47086326b 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/async_client.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/async_client.py @@ -71,6 +71,12 @@ class AlphaAnalyticsDataAsyncClient: parse_audience_list_path = staticmethod( AlphaAnalyticsDataClient.parse_audience_list_path ) + property_quotas_snapshot_path = staticmethod( + AlphaAnalyticsDataClient.property_quotas_snapshot_path + ) + parse_property_quotas_snapshot_path = staticmethod( + AlphaAnalyticsDataClient.parse_property_quotas_snapshot_path + ) recurring_audience_list_path = staticmethod( AlphaAnalyticsDataClient.recurring_audience_list_path ) @@ -1468,6 +1474,118 @@ async def sample_list_recurring_audience_lists(): # Done; return the response. return response + async def get_property_quotas_snapshot( + self, + request: Optional[ + Union[analytics_data_api.GetPropertyQuotasSnapshotRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analytics_data_api.PropertyQuotasSnapshot: + r"""Get all property quotas organized by quota category + for a given property. This will charge 1 property quota + from the category with the most quota. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.analytics import data_v1alpha + + async def sample_get_property_quotas_snapshot(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataAsyncClient() + + # Initialize request argument(s) + request = data_v1alpha.GetPropertyQuotasSnapshotRequest( + name="name_value", + ) + + # Make the request + response = await client.get_property_quotas_snapshot(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.analytics.data_v1alpha.types.GetPropertyQuotasSnapshotRequest, dict]]): + The request object. A request to return the + PropertyQuotasSnapshot for a given + category. + name (:class:`str`): + Required. Quotas from this property will be listed in + the response. Format: + ``properties/{property}/propertyQuotasSnapshot`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.data_v1alpha.types.PropertyQuotasSnapshot: + Current state of all Property Quotas + organized by quota category. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_data_api.GetPropertyQuotasSnapshotRequest): + request = analytics_data_api.GetPropertyQuotasSnapshotRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_property_quotas_snapshot + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def create_report_task( self, request: Optional[ @@ -1485,6 +1603,12 @@ async def create_report_task( running asynchronous request to form a customized report of your Google Analytics event data. + A report task will be retained and available for + querying for 72 hours after it has been created. + + A report task created by one user can be listed and + queried by all users who have access to the property. + .. code-block:: python # This snippet has been automatically generated and should be regarded as a diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/client.py b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/client.py index 4a3bc827021a..2b333ae4af4a 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/client.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/client.py @@ -207,6 +207,21 @@ def parse_audience_list_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def property_quotas_snapshot_path( + property: str, + ) -> str: + """Returns a fully-qualified property_quotas_snapshot string.""" + return "properties/{property}/propertyQuotasSnapshot".format( + property=property, + ) + + @staticmethod + def parse_property_quotas_snapshot_path(path: str) -> Dict[str, str]: + """Parses a property_quotas_snapshot path into its component segments.""" + m = re.match(r"^properties/(?P.+?)/propertyQuotasSnapshot$", path) + return m.groupdict() if m else {} + @staticmethod def recurring_audience_list_path( property: str, @@ -1898,6 +1913,117 @@ def sample_list_recurring_audience_lists(): # Done; return the response. return response + def get_property_quotas_snapshot( + self, + request: Optional[ + Union[analytics_data_api.GetPropertyQuotasSnapshotRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analytics_data_api.PropertyQuotasSnapshot: + r"""Get all property quotas organized by quota category + for a given property. This will charge 1 property quota + from the category with the most quota. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.analytics import data_v1alpha + + def sample_get_property_quotas_snapshot(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataClient() + + # Initialize request argument(s) + request = data_v1alpha.GetPropertyQuotasSnapshotRequest( + name="name_value", + ) + + # Make the request + response = client.get_property_quotas_snapshot(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.analytics.data_v1alpha.types.GetPropertyQuotasSnapshotRequest, dict]): + The request object. A request to return the + PropertyQuotasSnapshot for a given + category. + name (str): + Required. Quotas from this property will be listed in + the response. Format: + ``properties/{property}/propertyQuotasSnapshot`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.data_v1alpha.types.PropertyQuotasSnapshot: + Current state of all Property Quotas + organized by quota category. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_data_api.GetPropertyQuotasSnapshotRequest): + request = analytics_data_api.GetPropertyQuotasSnapshotRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.get_property_quotas_snapshot + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def create_report_task( self, request: Optional[ @@ -1915,6 +2041,12 @@ def create_report_task( running asynchronous request to form a customized report of your Google Analytics event data. + A report task will be retained and available for + querying for 72 hours after it has been created. + + A report task created by one user can be listed and + queried by all users who have access to the property. + .. code-block:: python # This snippet has been automatically generated and should be regarded as a diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/base.py b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/base.py index c2c66c588816..3ba97b9f363e 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/base.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/base.py @@ -180,6 +180,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.get_property_quotas_snapshot: gapic_v1.method.wrap_method( + self.get_property_quotas_snapshot, + default_timeout=None, + client_info=client_info, + ), self.create_report_task: gapic_v1.method.wrap_method( self.create_report_task, default_timeout=None, @@ -320,6 +325,18 @@ def list_recurring_audience_lists( ]: raise NotImplementedError() + @property + def get_property_quotas_snapshot( + self, + ) -> Callable[ + [analytics_data_api.GetPropertyQuotasSnapshotRequest], + Union[ + analytics_data_api.PropertyQuotasSnapshot, + Awaitable[analytics_data_api.PropertyQuotasSnapshot], + ], + ]: + raise NotImplementedError() + @property def create_report_task( self, diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/grpc.py b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/grpc.py index b9962cad40e2..c43f7d864e80 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/grpc.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/grpc.py @@ -659,6 +659,37 @@ def list_recurring_audience_lists( ) return self._stubs["list_recurring_audience_lists"] + @property + def get_property_quotas_snapshot( + self, + ) -> Callable[ + [analytics_data_api.GetPropertyQuotasSnapshotRequest], + analytics_data_api.PropertyQuotasSnapshot, + ]: + r"""Return a callable for the get property quotas snapshot method over gRPC. + + Get all property quotas organized by quota category + for a given property. This will charge 1 property quota + from the category with the most quota. + + Returns: + Callable[[~.GetPropertyQuotasSnapshotRequest], + ~.PropertyQuotasSnapshot]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_property_quotas_snapshot" not in self._stubs: + self._stubs["get_property_quotas_snapshot"] = self.grpc_channel.unary_unary( + "/google.analytics.data.v1alpha.AlphaAnalyticsData/GetPropertyQuotasSnapshot", + request_serializer=analytics_data_api.GetPropertyQuotasSnapshotRequest.serialize, + response_deserializer=analytics_data_api.PropertyQuotasSnapshot.deserialize, + ) + return self._stubs["get_property_quotas_snapshot"] + @property def create_report_task( self, @@ -672,6 +703,12 @@ def create_report_task( running asynchronous request to form a customized report of your Google Analytics event data. + A report task will be retained and available for + querying for 72 hours after it has been created. + + A report task created by one user can be listed and + queried by all users who have access to the property. + Returns: Callable[[~.CreateReportTaskRequest], ~.Operation]: diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/grpc_asyncio.py b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/grpc_asyncio.py index c05a987fbb2b..a220f2ddb524 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/grpc_asyncio.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/grpc_asyncio.py @@ -667,6 +667,37 @@ def list_recurring_audience_lists( ) return self._stubs["list_recurring_audience_lists"] + @property + def get_property_quotas_snapshot( + self, + ) -> Callable[ + [analytics_data_api.GetPropertyQuotasSnapshotRequest], + Awaitable[analytics_data_api.PropertyQuotasSnapshot], + ]: + r"""Return a callable for the get property quotas snapshot method over gRPC. + + Get all property quotas organized by quota category + for a given property. This will charge 1 property quota + from the category with the most quota. + + Returns: + Callable[[~.GetPropertyQuotasSnapshotRequest], + Awaitable[~.PropertyQuotasSnapshot]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_property_quotas_snapshot" not in self._stubs: + self._stubs["get_property_quotas_snapshot"] = self.grpc_channel.unary_unary( + "/google.analytics.data.v1alpha.AlphaAnalyticsData/GetPropertyQuotasSnapshot", + request_serializer=analytics_data_api.GetPropertyQuotasSnapshotRequest.serialize, + response_deserializer=analytics_data_api.PropertyQuotasSnapshot.deserialize, + ) + return self._stubs["get_property_quotas_snapshot"] + @property def create_report_task( self, @@ -681,6 +712,12 @@ def create_report_task( running asynchronous request to form a customized report of your Google Analytics event data. + A report task will be retained and available for + querying for 72 hours after it has been created. + + A report task created by one user can be listed and + queried by all users who have access to the property. + Returns: Callable[[~.CreateReportTaskRequest], Awaitable[~.Operation]]: @@ -841,6 +878,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.get_property_quotas_snapshot: gapic_v1.method_async.wrap_method( + self.get_property_quotas_snapshot, + default_timeout=None, + client_info=client_info, + ), self.create_report_task: gapic_v1.method_async.wrap_method( self.create_report_task, default_timeout=None, diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/rest.py b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/rest.py index 5f98dacd404b..510c1d55640b 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/rest.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/rest.py @@ -103,6 +103,14 @@ def post_get_audience_list(self, response): logging.log(f"Received response: {response}") return response + def pre_get_property_quotas_snapshot(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_property_quotas_snapshot(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_recurring_audience_list(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -275,6 +283,31 @@ def post_get_audience_list( """ return response + def pre_get_property_quotas_snapshot( + self, + request: analytics_data_api.GetPropertyQuotasSnapshotRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + analytics_data_api.GetPropertyQuotasSnapshotRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for get_property_quotas_snapshot + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlphaAnalyticsData server. + """ + return request, metadata + + def post_get_property_quotas_snapshot( + self, response: analytics_data_api.PropertyQuotasSnapshot + ) -> analytics_data_api.PropertyQuotasSnapshot: + """Post-rpc interceptor for get_property_quotas_snapshot + + Override in a subclass to manipulate the response + after it is returned by the AlphaAnalyticsData server but before + it is returned to user code. + """ + return response + def pre_get_recurring_audience_list( self, request: analytics_data_api.GetRecurringAudienceListRequest, @@ -1002,6 +1035,98 @@ def __call__( resp = self._interceptor.post_get_audience_list(resp) return resp + class _GetPropertyQuotasSnapshot(AlphaAnalyticsDataRestStub): + def __hash__(self): + return hash("GetPropertyQuotasSnapshot") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: analytics_data_api.GetPropertyQuotasSnapshotRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analytics_data_api.PropertyQuotasSnapshot: + r"""Call the get property quotas + snapshot method over HTTP. + + Args: + request (~.analytics_data_api.GetPropertyQuotasSnapshotRequest): + The request object. A request to return the + PropertyQuotasSnapshot for a given + category. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.analytics_data_api.PropertyQuotasSnapshot: + Current state of all Property Quotas + organized by quota category. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=properties/*/propertyQuotasSnapshot}", + }, + ] + request, metadata = self._interceptor.pre_get_property_quotas_snapshot( + request, metadata + ) + pb_request = analytics_data_api.GetPropertyQuotasSnapshotRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = analytics_data_api.PropertyQuotasSnapshot() + pb_resp = analytics_data_api.PropertyQuotasSnapshot.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_property_quotas_snapshot(resp) + return resp + class _GetRecurringAudienceList(AlphaAnalyticsDataRestStub): def __hash__(self): return hash("GetRecurringAudienceList") @@ -1876,6 +2001,17 @@ def get_audience_list( # In C++ this would require a dynamic_cast return self._GetAudienceList(self._session, self._host, self._interceptor) # type: ignore + @property + def get_property_quotas_snapshot( + self, + ) -> Callable[ + [analytics_data_api.GetPropertyQuotasSnapshotRequest], + analytics_data_api.PropertyQuotasSnapshot, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetPropertyQuotasSnapshot(self._session, self._host, self._interceptor) # type: ignore + @property def get_recurring_audience_list( self, diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/types/__init__.py b/packages/google-analytics-data/google/analytics/data_v1alpha/types/__init__.py index be0c011beb28..c5f36ceee95f 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/types/__init__.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/types/__init__.py @@ -23,6 +23,7 @@ CreateRecurringAudienceListRequest, CreateReportTaskRequest, GetAudienceListRequest, + GetPropertyQuotasSnapshotRequest, GetRecurringAudienceListRequest, GetReportTaskRequest, ListAudienceListsRequest, @@ -31,6 +32,7 @@ ListRecurringAudienceListsResponse, ListReportTasksRequest, ListReportTasksResponse, + PropertyQuotasSnapshot, QueryAudienceListRequest, QueryAudienceListResponse, QueryReportTaskRequest, @@ -129,6 +131,7 @@ "CreateRecurringAudienceListRequest", "CreateReportTaskRequest", "GetAudienceListRequest", + "GetPropertyQuotasSnapshotRequest", "GetRecurringAudienceListRequest", "GetReportTaskRequest", "ListAudienceListsRequest", @@ -137,6 +140,7 @@ "ListRecurringAudienceListsResponse", "ListReportTasksRequest", "ListReportTasksResponse", + "PropertyQuotasSnapshot", "QueryAudienceListRequest", "QueryAudienceListResponse", "QueryReportTaskRequest", diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/types/analytics_data_api.py b/packages/google-analytics-data/google/analytics/data_v1alpha/types/analytics_data_api.py index 40cf2af6b247..5ef02adbab94 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/types/analytics_data_api.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/types/analytics_data_api.py @@ -31,6 +31,8 @@ "GetRecurringAudienceListRequest", "ListRecurringAudienceListsRequest", "ListRecurringAudienceListsResponse", + "GetPropertyQuotasSnapshotRequest", + "PropertyQuotasSnapshot", "GetAudienceListRequest", "ListAudienceListsRequest", "ListAudienceListsResponse", @@ -368,6 +370,60 @@ def raw_page(self): ) +class GetPropertyQuotasSnapshotRequest(proto.Message): + r"""A request to return the PropertyQuotasSnapshot for a given + category. + + Attributes: + name (str): + Required. Quotas from this property will be listed in the + response. Format: + ``properties/{property}/propertyQuotasSnapshot`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class PropertyQuotasSnapshot(proto.Message): + r"""Current state of all Property Quotas organized by quota + category. + + Attributes: + name (str): + Identifier. The property quota snapshot + resource name. + core_property_quota (google.analytics.data_v1alpha.types.PropertyQuota): + Property Quota for core property tokens + realtime_property_quota (google.analytics.data_v1alpha.types.PropertyQuota): + Property Quota for realtime property tokens + funnel_property_quota (google.analytics.data_v1alpha.types.PropertyQuota): + Property Quota for funnel property tokens + """ + + name: str = proto.Field( + proto.STRING, + number=4, + ) + core_property_quota: data.PropertyQuota = proto.Field( + proto.MESSAGE, + number=1, + message=data.PropertyQuota, + ) + realtime_property_quota: data.PropertyQuota = proto.Field( + proto.MESSAGE, + number=2, + message=data.PropertyQuota, + ) + funnel_property_quota: data.PropertyQuota = proto.Field( + proto.MESSAGE, + number=3, + message=data.PropertyQuota, + ) + + class GetAudienceListRequest(proto.Message): r"""A request to retrieve configuration metadata about a specific audience list. @@ -942,7 +998,7 @@ class RunFunnelReportRequest(proto.Message): Attributes: property (str): - Optional. A Google Analytics GA4 property identifier whose + Optional. A Google Analytics property identifier whose events are tracked. Specified in the URL path and not the body. To learn more, see `where to find your Property ID `__. @@ -1146,7 +1202,7 @@ class ReportTask(proto.Message): name (str): Output only. Identifier. The report task resource name assigned during creation. Format: - ``properties/{property}/reportTasks/{report_task}`` + "properties/{property}/reportTasks/{report_task}". report_definition (google.analytics.data_v1alpha.types.ReportTask.ReportDefinition): Optional. A report definition to fetch report data, which describes the structure of a report. @@ -1236,8 +1292,8 @@ class ReportDefinition(proto.Message): returned if they are not separately removed by a filter. Regardless of this ``keep_empty_rows`` setting, only data - recorded by the Google Analytics (GA4) property can be - displayed in a report. + recorded by the Google Analytics property can be displayed + in a report. For example if a property never logs a ``purchase`` event, then a query for the ``eventName`` dimension and diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/types/data.py b/packages/google-analytics-data/google/analytics/data_v1alpha/types/data.py index d0d65e10b736..f3e86639400b 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/types/data.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/types/data.py @@ -293,7 +293,7 @@ class MetricType(proto.Enum): class RestrictedMetricType(proto.Enum): r"""Categories of data that you may be restricted from viewing on - certain GA4 properties. + certain Google Analytics properties. Values: RESTRICTED_METRIC_TYPE_UNSPECIFIED (0): @@ -2231,7 +2231,7 @@ class Segment(proto.Message): particular line of products or who visit a specific part of your site or trigger certain events in your app. - To learn more, see `GA4 Segment + To learn more, see `Segment Builder `__. This message has `oneof`_ fields (mutually exclusive fields). diff --git a/packages/google-analytics-data/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_get_property_quotas_snapshot_async.py b/packages/google-analytics-data/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_get_property_quotas_snapshot_async.py new file mode 100644 index 000000000000..cfa47528bf6b --- /dev/null +++ b/packages/google-analytics-data/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_get_property_quotas_snapshot_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetPropertyQuotasSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-analytics-data + + +# [START analyticsdata_v1alpha_generated_AlphaAnalyticsData_GetPropertyQuotasSnapshot_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.analytics import data_v1alpha + + +async def sample_get_property_quotas_snapshot(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataAsyncClient() + + # Initialize request argument(s) + request = data_v1alpha.GetPropertyQuotasSnapshotRequest( + name="name_value", + ) + + # Make the request + response = await client.get_property_quotas_snapshot(request=request) + + # Handle the response + print(response) + +# [END analyticsdata_v1alpha_generated_AlphaAnalyticsData_GetPropertyQuotasSnapshot_async] diff --git a/packages/google-analytics-data/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_get_property_quotas_snapshot_sync.py b/packages/google-analytics-data/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_get_property_quotas_snapshot_sync.py new file mode 100644 index 000000000000..964edacdbb5d --- /dev/null +++ b/packages/google-analytics-data/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_get_property_quotas_snapshot_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetPropertyQuotasSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-analytics-data + + +# [START analyticsdata_v1alpha_generated_AlphaAnalyticsData_GetPropertyQuotasSnapshot_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.analytics import data_v1alpha + + +def sample_get_property_quotas_snapshot(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataClient() + + # Initialize request argument(s) + request = data_v1alpha.GetPropertyQuotasSnapshotRequest( + name="name_value", + ) + + # Make the request + response = client.get_property_quotas_snapshot(request=request) + + # Handle the response + print(response) + +# [END analyticsdata_v1alpha_generated_AlphaAnalyticsData_GetPropertyQuotasSnapshot_sync] diff --git a/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1alpha.json b/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1alpha.json index 619e88f4243b..15f8d0d6e6d8 100644 --- a/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1alpha.json +++ b/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1alpha.json @@ -679,6 +679,167 @@ ], "title": "analyticsdata_v1alpha_generated_alpha_analytics_data_get_audience_list_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataAsyncClient", + "shortName": "AlphaAnalyticsDataAsyncClient" + }, + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataAsyncClient.get_property_quotas_snapshot", + "method": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData.GetPropertyQuotasSnapshot", + "service": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData", + "shortName": "AlphaAnalyticsData" + }, + "shortName": "GetPropertyQuotasSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.analytics.data_v1alpha.types.GetPropertyQuotasSnapshotRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.analytics.data_v1alpha.types.PropertyQuotasSnapshot", + "shortName": "get_property_quotas_snapshot" + }, + "description": "Sample for GetPropertyQuotasSnapshot", + "file": "analyticsdata_v1alpha_generated_alpha_analytics_data_get_property_quotas_snapshot_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticsdata_v1alpha_generated_AlphaAnalyticsData_GetPropertyQuotasSnapshot_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticsdata_v1alpha_generated_alpha_analytics_data_get_property_quotas_snapshot_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataClient", + "shortName": "AlphaAnalyticsDataClient" + }, + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataClient.get_property_quotas_snapshot", + "method": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData.GetPropertyQuotasSnapshot", + "service": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData", + "shortName": "AlphaAnalyticsData" + }, + "shortName": "GetPropertyQuotasSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.analytics.data_v1alpha.types.GetPropertyQuotasSnapshotRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.analytics.data_v1alpha.types.PropertyQuotasSnapshot", + "shortName": "get_property_quotas_snapshot" + }, + "description": "Sample for GetPropertyQuotasSnapshot", + "file": "analyticsdata_v1alpha_generated_alpha_analytics_data_get_property_quotas_snapshot_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticsdata_v1alpha_generated_AlphaAnalyticsData_GetPropertyQuotasSnapshot_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticsdata_v1alpha_generated_alpha_analytics_data_get_property_quotas_snapshot_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-analytics-data/scripts/fixup_data_v1alpha_keywords.py b/packages/google-analytics-data/scripts/fixup_data_v1alpha_keywords.py index 416f17a987b4..463b61dcee73 100644 --- a/packages/google-analytics-data/scripts/fixup_data_v1alpha_keywords.py +++ b/packages/google-analytics-data/scripts/fixup_data_v1alpha_keywords.py @@ -43,6 +43,7 @@ class dataCallTransformer(cst.CSTTransformer): 'create_recurring_audience_list': ('parent', 'recurring_audience_list', ), 'create_report_task': ('parent', 'report_task', ), 'get_audience_list': ('name', ), + 'get_property_quotas_snapshot': ('name', ), 'get_recurring_audience_list': ('name', ), 'get_report_task': ('name', ), 'list_audience_lists': ('parent', 'page_size', 'page_token', ), diff --git a/packages/google-analytics-data/tests/unit/gapic/data_v1alpha/test_alpha_analytics_data.py b/packages/google-analytics-data/tests/unit/gapic/data_v1alpha/test_alpha_analytics_data.py index 16d76b2f738b..1a4da5db4733 100644 --- a/packages/google-analytics-data/tests/unit/gapic/data_v1alpha/test_alpha_analytics_data.py +++ b/packages/google-analytics-data/tests/unit/gapic/data_v1alpha/test_alpha_analytics_data.py @@ -5120,11 +5120,11 @@ async def test_list_recurring_audience_lists_async_pages(): @pytest.mark.parametrize( "request_type", [ - analytics_data_api.CreateReportTaskRequest, + analytics_data_api.GetPropertyQuotasSnapshotRequest, dict, ], ) -def test_create_report_task(request_type, transport: str = "grpc"): +def test_get_property_quotas_snapshot(request_type, transport: str = "grpc"): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5136,23 +5136,26 @@ def test_create_report_task(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_report_task), "__call__" + type(client.transport.get_property_quotas_snapshot), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.create_report_task(request) + call.return_value = analytics_data_api.PropertyQuotasSnapshot( + name="name_value", + ) + response = client.get_property_quotas_snapshot(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_data_api.CreateReportTaskRequest() + request = analytics_data_api.GetPropertyQuotasSnapshotRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, analytics_data_api.PropertyQuotasSnapshot) + assert response.name == "name_value" -def test_create_report_task_empty_call(): +def test_get_property_quotas_snapshot_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AlphaAnalyticsDataClient( @@ -5162,18 +5165,18 @@ def test_create_report_task_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_report_task), "__call__" + type(client.transport.get_property_quotas_snapshot), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_report_task() + client.get_property_quotas_snapshot() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_data_api.CreateReportTaskRequest() + assert args[0] == analytics_data_api.GetPropertyQuotasSnapshotRequest() -def test_create_report_task_non_empty_request_with_auto_populated_field(): +def test_get_property_quotas_snapshot_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AlphaAnalyticsDataClient( @@ -5184,26 +5187,26 @@ def test_create_report_task_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_data_api.CreateReportTaskRequest( - parent="parent_value", + request = analytics_data_api.GetPropertyQuotasSnapshotRequest( + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_report_task), "__call__" + type(client.transport.get_property_quotas_snapshot), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_report_task(request=request) + client.get_property_quotas_snapshot(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_data_api.CreateReportTaskRequest( - parent="parent_value", + assert args[0] == analytics_data_api.GetPropertyQuotasSnapshotRequest( + name="name_value", ) -def test_create_report_task_use_cached_wrapped_rpc(): +def test_get_property_quotas_snapshot_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5218,7 +5221,8 @@ def test_create_report_task_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_report_task in client._transport._wrapped_methods + client._transport.get_property_quotas_snapshot + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -5227,20 +5231,15 @@ def test_create_report_task_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_report_task + client._transport.get_property_quotas_snapshot ] = mock_rpc request = {} - client.create_report_task(request) + client.get_property_quotas_snapshot(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_report_task(request) + client.get_property_quotas_snapshot(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -5248,7 +5247,7 @@ def test_create_report_task_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_create_report_task_empty_call_async(): +async def test_get_property_quotas_snapshot_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AlphaAnalyticsDataAsyncClient( @@ -5258,20 +5257,22 @@ async def test_create_report_task_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_report_task), "__call__" + type(client.transport.get_property_quotas_snapshot), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + analytics_data_api.PropertyQuotasSnapshot( + name="name_value", + ) ) - response = await client.create_report_task() + response = await client.get_property_quotas_snapshot() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_data_api.CreateReportTaskRequest() + assert args[0] == analytics_data_api.GetPropertyQuotasSnapshotRequest() @pytest.mark.asyncio -async def test_create_report_task_async_use_cached_wrapped_rpc( +async def test_get_property_quotas_snapshot_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -5288,7 +5289,7 @@ async def test_create_report_task_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.create_report_task + client._client._transport.get_property_quotas_snapshot in client._client._transport._wrapped_methods ) @@ -5296,21 +5297,16 @@ async def test_create_report_task_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.create_report_task + client._client._transport.get_property_quotas_snapshot ] = mock_rpc request = {} - await client.create_report_task(request) + await client.get_property_quotas_snapshot(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_report_task(request) + await client.get_property_quotas_snapshot(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -5318,9 +5314,9 @@ async def test_create_report_task_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_create_report_task_async( +async def test_get_property_quotas_snapshot_async( transport: str = "grpc_asyncio", - request_type=analytics_data_api.CreateReportTaskRequest, + request_type=analytics_data_api.GetPropertyQuotasSnapshotRequest, ): client = AlphaAnalyticsDataAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5333,46 +5329,49 @@ async def test_create_report_task_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_report_task), "__call__" + type(client.transport.get_property_quotas_snapshot), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + analytics_data_api.PropertyQuotasSnapshot( + name="name_value", + ) ) - response = await client.create_report_task(request) + response = await client.get_property_quotas_snapshot(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_data_api.CreateReportTaskRequest() + request = analytics_data_api.GetPropertyQuotasSnapshotRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, analytics_data_api.PropertyQuotasSnapshot) + assert response.name == "name_value" @pytest.mark.asyncio -async def test_create_report_task_async_from_dict(): - await test_create_report_task_async(request_type=dict) +async def test_get_property_quotas_snapshot_async_from_dict(): + await test_get_property_quotas_snapshot_async(request_type=dict) -def test_create_report_task_field_headers(): +def test_get_property_quotas_snapshot_field_headers(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_data_api.CreateReportTaskRequest() + request = analytics_data_api.GetPropertyQuotasSnapshotRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_report_task), "__call__" + type(client.transport.get_property_quotas_snapshot), "__call__" ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.create_report_task(request) + call.return_value = analytics_data_api.PropertyQuotasSnapshot() + client.get_property_quotas_snapshot(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -5383,30 +5382,30 @@ def test_create_report_task_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_create_report_task_field_headers_async(): +async def test_get_property_quotas_snapshot_field_headers_async(): client = AlphaAnalyticsDataAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_data_api.CreateReportTaskRequest() + request = analytics_data_api.GetPropertyQuotasSnapshotRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_report_task), "__call__" + type(client.transport.get_property_quotas_snapshot), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") + analytics_data_api.PropertyQuotasSnapshot() ) - await client.create_report_task(request) + await client.get_property_quotas_snapshot(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -5417,41 +5416,37 @@ async def test_create_report_task_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_create_report_task_flattened(): +def test_get_property_quotas_snapshot_flattened(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_report_task), "__call__" + type(client.transport.get_property_quotas_snapshot), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = analytics_data_api.PropertyQuotasSnapshot() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.create_report_task( - parent="parent_value", - report_task=analytics_data_api.ReportTask(name="name_value"), + client.get_property_quotas_snapshot( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].report_task - mock_val = analytics_data_api.ReportTask(name="name_value") + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_create_report_task_flattened_error(): +def test_get_property_quotas_snapshot_flattened_error(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5459,50 +5454,45 @@ def test_create_report_task_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_report_task( - analytics_data_api.CreateReportTaskRequest(), - parent="parent_value", - report_task=analytics_data_api.ReportTask(name="name_value"), + client.get_property_quotas_snapshot( + analytics_data_api.GetPropertyQuotasSnapshotRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_create_report_task_flattened_async(): +async def test_get_property_quotas_snapshot_flattened_async(): client = AlphaAnalyticsDataAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_report_task), "__call__" + type(client.transport.get_property_quotas_snapshot), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = analytics_data_api.PropertyQuotasSnapshot() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + analytics_data_api.PropertyQuotasSnapshot() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.create_report_task( - parent="parent_value", - report_task=analytics_data_api.ReportTask(name="name_value"), + response = await client.get_property_quotas_snapshot( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].report_task - mock_val = analytics_data_api.ReportTask(name="name_value") + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_create_report_task_flattened_error_async(): +async def test_get_property_quotas_snapshot_flattened_error_async(): client = AlphaAnalyticsDataAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5510,21 +5500,20 @@ async def test_create_report_task_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_report_task( - analytics_data_api.CreateReportTaskRequest(), - parent="parent_value", - report_task=analytics_data_api.ReportTask(name="name_value"), + await client.get_property_quotas_snapshot( + analytics_data_api.GetPropertyQuotasSnapshotRequest(), + name="name_value", ) @pytest.mark.parametrize( "request_type", [ - analytics_data_api.QueryReportTaskRequest, + analytics_data_api.CreateReportTaskRequest, dict, ], ) -def test_query_report_task(request_type, transport: str = "grpc"): +def test_create_report_task(request_type, transport: str = "grpc"): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5536,26 +5525,23 @@ def test_query_report_task(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.query_report_task), "__call__" + type(client.transport.create_report_task), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_data_api.QueryReportTaskResponse( - row_count=992, - ) - response = client.query_report_task(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_report_task(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_data_api.QueryReportTaskRequest() + request = analytics_data_api.CreateReportTaskRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, analytics_data_api.QueryReportTaskResponse) - assert response.row_count == 992 + assert isinstance(response, future.Future) -def test_query_report_task_empty_call(): +def test_create_report_task_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AlphaAnalyticsDataClient( @@ -5565,18 +5551,18 @@ def test_query_report_task_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.query_report_task), "__call__" + type(client.transport.create_report_task), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.query_report_task() + client.create_report_task() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_data_api.QueryReportTaskRequest() + assert args[0] == analytics_data_api.CreateReportTaskRequest() -def test_query_report_task_non_empty_request_with_auto_populated_field(): +def test_create_report_task_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AlphaAnalyticsDataClient( @@ -5587,26 +5573,26 @@ def test_query_report_task_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_data_api.QueryReportTaskRequest( - name="name_value", + request = analytics_data_api.CreateReportTaskRequest( + parent="parent_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.query_report_task), "__call__" + type(client.transport.create_report_task), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.query_report_task(request=request) + client.create_report_task(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_data_api.QueryReportTaskRequest( - name="name_value", + assert args[0] == analytics_data_api.CreateReportTaskRequest( + parent="parent_value", ) -def test_query_report_task_use_cached_wrapped_rpc(): +def test_create_report_task_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5620,7 +5606,9 @@ def test_query_report_task_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.query_report_task in client._transport._wrapped_methods + assert ( + client._transport.create_report_task in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -5628,15 +5616,20 @@ def test_query_report_task_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.query_report_task + client._transport.create_report_task ] = mock_rpc request = {} - client.query_report_task(request) + client.create_report_task(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.query_report_task(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_report_task(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -5644,7 +5637,7 @@ def test_query_report_task_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_query_report_task_empty_call_async(): +async def test_create_report_task_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AlphaAnalyticsDataAsyncClient( @@ -5654,22 +5647,20 @@ async def test_query_report_task_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.query_report_task), "__call__" + type(client.transport.create_report_task), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.QueryReportTaskResponse( - row_count=992, - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.query_report_task() + response = await client.create_report_task() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_data_api.QueryReportTaskRequest() + assert args[0] == analytics_data_api.CreateReportTaskRequest() @pytest.mark.asyncio -async def test_query_report_task_async_use_cached_wrapped_rpc( +async def test_create_report_task_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -5686,7 +5677,7 @@ async def test_query_report_task_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.query_report_task + client._client._transport.create_report_task in client._client._transport._wrapped_methods ) @@ -5694,16 +5685,21 @@ async def test_query_report_task_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.query_report_task + client._client._transport.create_report_task ] = mock_rpc request = {} - await client.query_report_task(request) + await client.create_report_task(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.query_report_task(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_report_task(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -5711,9 +5707,9 @@ async def test_query_report_task_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_query_report_task_async( +async def test_create_report_task_async( transport: str = "grpc_asyncio", - request_type=analytics_data_api.QueryReportTaskRequest, + request_type=analytics_data_api.CreateReportTaskRequest, ): client = AlphaAnalyticsDataAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5726,49 +5722,46 @@ async def test_query_report_task_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.query_report_task), "__call__" + type(client.transport.create_report_task), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.QueryReportTaskResponse( - row_count=992, - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.query_report_task(request) + response = await client.create_report_task(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_data_api.QueryReportTaskRequest() + request = analytics_data_api.CreateReportTaskRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, analytics_data_api.QueryReportTaskResponse) - assert response.row_count == 992 + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_query_report_task_async_from_dict(): - await test_query_report_task_async(request_type=dict) +async def test_create_report_task_async_from_dict(): + await test_create_report_task_async(request_type=dict) -def test_query_report_task_field_headers(): +def test_create_report_task_field_headers(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_data_api.QueryReportTaskRequest() + request = analytics_data_api.CreateReportTaskRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.query_report_task), "__call__" + type(client.transport.create_report_task), "__call__" ) as call: - call.return_value = analytics_data_api.QueryReportTaskResponse() - client.query_report_task(request) + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_report_task(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -5779,30 +5772,30 @@ def test_query_report_task_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_query_report_task_field_headers_async(): +async def test_create_report_task_field_headers_async(): client = AlphaAnalyticsDataAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_data_api.QueryReportTaskRequest() + request = analytics_data_api.CreateReportTaskRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.query_report_task), "__call__" + type(client.transport.create_report_task), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.QueryReportTaskResponse() + operations_pb2.Operation(name="operations/op") ) - await client.query_report_task(request) + await client.create_report_task(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -5813,37 +5806,41 @@ async def test_query_report_task_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_query_report_task_flattened(): +def test_create_report_task_flattened(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.query_report_task), "__call__" + type(client.transport.create_report_task), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_data_api.QueryReportTaskResponse() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.query_report_task( - name="name_value", + client.create_report_task( + parent="parent_value", + report_task=analytics_data_api.ReportTask(name="name_value"), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].report_task + mock_val = analytics_data_api.ReportTask(name="name_value") assert arg == mock_val -def test_query_report_task_flattened_error(): +def test_create_report_task_flattened_error(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5851,45 +5848,50 @@ def test_query_report_task_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.query_report_task( - analytics_data_api.QueryReportTaskRequest(), - name="name_value", + client.create_report_task( + analytics_data_api.CreateReportTaskRequest(), + parent="parent_value", + report_task=analytics_data_api.ReportTask(name="name_value"), ) @pytest.mark.asyncio -async def test_query_report_task_flattened_async(): +async def test_create_report_task_flattened_async(): client = AlphaAnalyticsDataAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.query_report_task), "__call__" + type(client.transport.create_report_task), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_data_api.QueryReportTaskResponse() + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.QueryReportTaskResponse() + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.query_report_task( - name="name_value", + response = await client.create_report_task( + parent="parent_value", + report_task=analytics_data_api.ReportTask(name="name_value"), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].report_task + mock_val = analytics_data_api.ReportTask(name="name_value") assert arg == mock_val @pytest.mark.asyncio -async def test_query_report_task_flattened_error_async(): +async def test_create_report_task_flattened_error_async(): client = AlphaAnalyticsDataAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5897,20 +5899,21 @@ async def test_query_report_task_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.query_report_task( - analytics_data_api.QueryReportTaskRequest(), - name="name_value", + await client.create_report_task( + analytics_data_api.CreateReportTaskRequest(), + parent="parent_value", + report_task=analytics_data_api.ReportTask(name="name_value"), ) @pytest.mark.parametrize( "request_type", [ - analytics_data_api.GetReportTaskRequest, + analytics_data_api.QueryReportTaskRequest, dict, ], ) -def test_get_report_task(request_type, transport: str = "grpc"): +def test_query_report_task(request_type, transport: str = "grpc"): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5921,25 +5924,27 @@ def test_get_report_task(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: + with mock.patch.object( + type(client.transport.query_report_task), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_data_api.ReportTask( - name="name_value", + call.return_value = analytics_data_api.QueryReportTaskResponse( + row_count=992, ) - response = client.get_report_task(request) + response = client.query_report_task(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_data_api.GetReportTaskRequest() + request = analytics_data_api.QueryReportTaskRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, analytics_data_api.ReportTask) - assert response.name == "name_value" + assert isinstance(response, analytics_data_api.QueryReportTaskResponse) + assert response.row_count == 992 -def test_get_report_task_empty_call(): +def test_query_report_task_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AlphaAnalyticsDataClient( @@ -5948,17 +5953,19 @@ def test_get_report_task_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: + with mock.patch.object( + type(client.transport.query_report_task), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_report_task() + client.query_report_task() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_data_api.GetReportTaskRequest() + assert args[0] == analytics_data_api.QueryReportTaskRequest() -def test_get_report_task_non_empty_request_with_auto_populated_field(): +def test_query_report_task_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AlphaAnalyticsDataClient( @@ -5969,24 +5976,26 @@ def test_get_report_task_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_data_api.GetReportTaskRequest( + request = analytics_data_api.QueryReportTaskRequest( name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: + with mock.patch.object( + type(client.transport.query_report_task), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_report_task(request=request) + client.query_report_task(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_data_api.GetReportTaskRequest( + assert args[0] == analytics_data_api.QueryReportTaskRequest( name="name_value", ) -def test_get_report_task_use_cached_wrapped_rpc(): +def test_query_report_task_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -6000,21 +6009,23 @@ def test_get_report_task_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_report_task in client._transport._wrapped_methods + assert client._transport.query_report_task in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_report_task] = mock_rpc + client._transport._wrapped_methods[ + client._transport.query_report_task + ] = mock_rpc request = {} - client.get_report_task(request) + client.query_report_task(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_report_task(request) + client.query_report_task(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -6022,7 +6033,7 @@ def test_get_report_task_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_report_task_empty_call_async(): +async def test_query_report_task_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AlphaAnalyticsDataAsyncClient( @@ -6031,21 +6042,23 @@ async def test_get_report_task_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: + with mock.patch.object( + type(client.transport.query_report_task), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.ReportTask( - name="name_value", + analytics_data_api.QueryReportTaskResponse( + row_count=992, ) ) - response = await client.get_report_task() + response = await client.query_report_task() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_data_api.GetReportTaskRequest() + assert args[0] == analytics_data_api.QueryReportTaskRequest() @pytest.mark.asyncio -async def test_get_report_task_async_use_cached_wrapped_rpc( +async def test_query_report_task_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -6062,7 +6075,7 @@ async def test_get_report_task_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_report_task + client._client._transport.query_report_task in client._client._transport._wrapped_methods ) @@ -6070,16 +6083,16 @@ async def test_get_report_task_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_report_task + client._client._transport.query_report_task ] = mock_rpc request = {} - await client.get_report_task(request) + await client.query_report_task(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.get_report_task(request) + await client.query_report_task(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -6087,9 +6100,9 @@ async def test_get_report_task_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_get_report_task_async( +async def test_query_report_task_async( transport: str = "grpc_asyncio", - request_type=analytics_data_api.GetReportTaskRequest, + request_type=analytics_data_api.QueryReportTaskRequest, ): client = AlphaAnalyticsDataAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6101,46 +6114,50 @@ async def test_get_report_task_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: + with mock.patch.object( + type(client.transport.query_report_task), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.ReportTask( - name="name_value", + analytics_data_api.QueryReportTaskResponse( + row_count=992, ) ) - response = await client.get_report_task(request) + response = await client.query_report_task(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_data_api.GetReportTaskRequest() + request = analytics_data_api.QueryReportTaskRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, analytics_data_api.ReportTask) - assert response.name == "name_value" + assert isinstance(response, analytics_data_api.QueryReportTaskResponse) + assert response.row_count == 992 @pytest.mark.asyncio -async def test_get_report_task_async_from_dict(): - await test_get_report_task_async(request_type=dict) +async def test_query_report_task_async_from_dict(): + await test_query_report_task_async(request_type=dict) -def test_get_report_task_field_headers(): +def test_query_report_task_field_headers(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_data_api.GetReportTaskRequest() + request = analytics_data_api.QueryReportTaskRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: - call.return_value = analytics_data_api.ReportTask() - client.get_report_task(request) + with mock.patch.object( + type(client.transport.query_report_task), "__call__" + ) as call: + call.return_value = analytics_data_api.QueryReportTaskResponse() + client.query_report_task(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -6156,23 +6173,25 @@ def test_get_report_task_field_headers(): @pytest.mark.asyncio -async def test_get_report_task_field_headers_async(): +async def test_query_report_task_field_headers_async(): client = AlphaAnalyticsDataAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_data_api.GetReportTaskRequest() + request = analytics_data_api.QueryReportTaskRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: + with mock.patch.object( + type(client.transport.query_report_task), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.ReportTask() + analytics_data_api.QueryReportTaskResponse() ) - await client.get_report_task(request) + await client.query_report_task(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -6187,18 +6206,20 @@ async def test_get_report_task_field_headers_async(): ) in kw["metadata"] -def test_get_report_task_flattened(): +def test_query_report_task_flattened(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: + with mock.patch.object( + type(client.transport.query_report_task), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_data_api.ReportTask() + call.return_value = analytics_data_api.QueryReportTaskResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_report_task( + client.query_report_task( name="name_value", ) @@ -6211,7 +6232,7 @@ def test_get_report_task_flattened(): assert arg == mock_val -def test_get_report_task_flattened_error(): +def test_query_report_task_flattened_error(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -6219,29 +6240,31 @@ def test_get_report_task_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_report_task( - analytics_data_api.GetReportTaskRequest(), + client.query_report_task( + analytics_data_api.QueryReportTaskRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_get_report_task_flattened_async(): +async def test_query_report_task_flattened_async(): client = AlphaAnalyticsDataAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: + with mock.patch.object( + type(client.transport.query_report_task), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_data_api.ReportTask() + call.return_value = analytics_data_api.QueryReportTaskResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.ReportTask() + analytics_data_api.QueryReportTaskResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_report_task( + response = await client.query_report_task( name="name_value", ) @@ -6255,7 +6278,7 @@ async def test_get_report_task_flattened_async(): @pytest.mark.asyncio -async def test_get_report_task_flattened_error_async(): +async def test_query_report_task_flattened_error_async(): client = AlphaAnalyticsDataAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -6263,8 +6286,8 @@ async def test_get_report_task_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_report_task( - analytics_data_api.GetReportTaskRequest(), + await client.query_report_task( + analytics_data_api.QueryReportTaskRequest(), name="name_value", ) @@ -6272,11 +6295,11 @@ async def test_get_report_task_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - analytics_data_api.ListReportTasksRequest, + analytics_data_api.GetReportTaskRequest, dict, ], ) -def test_list_report_tasks(request_type, transport: str = "grpc"): +def test_get_report_task(request_type, transport: str = "grpc"): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -6287,27 +6310,25 @@ def test_list_report_tasks(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_report_tasks), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = analytics_data_api.ListReportTasksResponse( - next_page_token="next_page_token_value", + call.return_value = analytics_data_api.ReportTask( + name="name_value", ) - response = client.list_report_tasks(request) + response = client.get_report_task(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_data_api.ListReportTasksRequest() + request = analytics_data_api.GetReportTaskRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListReportTasksPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, analytics_data_api.ReportTask) + assert response.name == "name_value" -def test_list_report_tasks_empty_call(): +def test_get_report_task_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AlphaAnalyticsDataClient( @@ -6316,19 +6337,17 @@ def test_list_report_tasks_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_report_tasks), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_report_tasks() + client.get_report_task() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_data_api.ListReportTasksRequest() + assert args[0] == analytics_data_api.GetReportTaskRequest() -def test_list_report_tasks_non_empty_request_with_auto_populated_field(): +def test_get_report_task_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AlphaAnalyticsDataClient( @@ -6339,28 +6358,24 @@ def test_list_report_tasks_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_data_api.ListReportTasksRequest( - parent="parent_value", - page_token="page_token_value", + request = analytics_data_api.GetReportTaskRequest( + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_report_tasks), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_report_tasks(request=request) + client.get_report_task(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_data_api.ListReportTasksRequest( - parent="parent_value", - page_token="page_token_value", + assert args[0] == analytics_data_api.GetReportTaskRequest( + name="name_value", ) -def test_list_report_tasks_use_cached_wrapped_rpc(): +def test_get_report_task_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -6374,23 +6389,21 @@ def test_list_report_tasks_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_report_tasks in client._transport._wrapped_methods + assert client._transport.get_report_task in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_report_tasks - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_report_task] = mock_rpc request = {} - client.list_report_tasks(request) + client.get_report_task(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_report_tasks(request) + client.get_report_task(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -6398,7 +6411,7 @@ def test_list_report_tasks_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_report_tasks_empty_call_async(): +async def test_get_report_task_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AlphaAnalyticsDataAsyncClient( @@ -6407,23 +6420,21 @@ async def test_list_report_tasks_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_report_tasks), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.ListReportTasksResponse( - next_page_token="next_page_token_value", + analytics_data_api.ReportTask( + name="name_value", ) ) - response = await client.list_report_tasks() + response = await client.get_report_task() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_data_api.ListReportTasksRequest() + assert args[0] == analytics_data_api.GetReportTaskRequest() @pytest.mark.asyncio -async def test_list_report_tasks_async_use_cached_wrapped_rpc( +async def test_get_report_task_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -6440,7 +6451,7 @@ async def test_list_report_tasks_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.list_report_tasks + client._client._transport.get_report_task in client._client._transport._wrapped_methods ) @@ -6448,16 +6459,16 @@ async def test_list_report_tasks_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_report_tasks + client._client._transport.get_report_task ] = mock_rpc request = {} - await client.list_report_tasks(request) + await client.get_report_task(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.list_report_tasks(request) + await client.get_report_task(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -6465,9 +6476,9 @@ async def test_list_report_tasks_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_list_report_tasks_async( +async def test_get_report_task_async( transport: str = "grpc_asyncio", - request_type=analytics_data_api.ListReportTasksRequest, + request_type=analytics_data_api.GetReportTaskRequest, ): client = AlphaAnalyticsDataAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6479,50 +6490,46 @@ async def test_list_report_tasks_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_report_tasks), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.ListReportTasksResponse( - next_page_token="next_page_token_value", + analytics_data_api.ReportTask( + name="name_value", ) ) - response = await client.list_report_tasks(request) + response = await client.get_report_task(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_data_api.ListReportTasksRequest() + request = analytics_data_api.GetReportTaskRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListReportTasksAsyncPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, analytics_data_api.ReportTask) + assert response.name == "name_value" @pytest.mark.asyncio -async def test_list_report_tasks_async_from_dict(): - await test_list_report_tasks_async(request_type=dict) +async def test_get_report_task_async_from_dict(): + await test_get_report_task_async(request_type=dict) -def test_list_report_tasks_field_headers(): +def test_get_report_task_field_headers(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_data_api.ListReportTasksRequest() + request = analytics_data_api.GetReportTaskRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_report_tasks), "__call__" - ) as call: - call.return_value = analytics_data_api.ListReportTasksResponse() - client.list_report_tasks(request) + with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: + call.return_value = analytics_data_api.ReportTask() + client.get_report_task(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -6533,30 +6540,28 @@ def test_list_report_tasks_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_list_report_tasks_field_headers_async(): +async def test_get_report_task_field_headers_async(): client = AlphaAnalyticsDataAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_data_api.ListReportTasksRequest() + request = analytics_data_api.GetReportTaskRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_report_tasks), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.ListReportTasksResponse() + analytics_data_api.ReportTask() ) - await client.list_report_tasks(request) + await client.get_report_task(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -6567,37 +6572,35 @@ async def test_list_report_tasks_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_list_report_tasks_flattened(): +def test_get_report_task_flattened(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_report_tasks), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = analytics_data_api.ListReportTasksResponse() + call.return_value = analytics_data_api.ReportTask() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_report_tasks( - parent="parent_value", + client.get_report_task( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_list_report_tasks_flattened_error(): +def test_get_report_task_flattened_error(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -6605,45 +6608,43 @@ def test_list_report_tasks_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_report_tasks( - analytics_data_api.ListReportTasksRequest(), - parent="parent_value", + client.get_report_task( + analytics_data_api.GetReportTaskRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_list_report_tasks_flattened_async(): +async def test_get_report_task_flattened_async(): client = AlphaAnalyticsDataAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_report_tasks), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = analytics_data_api.ListReportTasksResponse() + call.return_value = analytics_data_api.ReportTask() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.ListReportTasksResponse() + analytics_data_api.ReportTask() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_report_tasks( - parent="parent_value", + response = await client.get_report_task( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_list_report_tasks_flattened_error_async(): +async def test_get_report_task_flattened_error_async(): client = AlphaAnalyticsDataAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -6651,111 +6652,499 @@ async def test_list_report_tasks_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_report_tasks( - analytics_data_api.ListReportTasksRequest(), - parent="parent_value", + await client.get_report_task( + analytics_data_api.GetReportTaskRequest(), + name="name_value", ) -def test_list_report_tasks_pager(transport_name: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + analytics_data_api.ListReportTasksRequest, + dict, + ], +) +def test_list_report_tasks(request_type, transport: str = "grpc"): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_report_tasks), "__call__" ) as call: - # Set the response to a series of pages. - call.side_effect = ( - analytics_data_api.ListReportTasksResponse( - report_tasks=[ - analytics_data_api.ReportTask(), - analytics_data_api.ReportTask(), - analytics_data_api.ReportTask(), - ], - next_page_token="abc", - ), - analytics_data_api.ListReportTasksResponse( - report_tasks=[], - next_page_token="def", - ), - analytics_data_api.ListReportTasksResponse( - report_tasks=[ - analytics_data_api.ReportTask(), - ], - next_page_token="ghi", - ), - analytics_data_api.ListReportTasksResponse( - report_tasks=[ - analytics_data_api.ReportTask(), - analytics_data_api.ReportTask(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.ListReportTasksResponse( + next_page_token="next_page_token_value", ) - pager = client.list_report_tasks(request={}, retry=retry, timeout=timeout) + response = client.list_report_tasks(request) - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = analytics_data_api.ListReportTasksRequest() + assert args[0] == request - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, analytics_data_api.ReportTask) for i in results) + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListReportTasksPager) + assert response.next_page_token == "next_page_token_value" -def test_list_report_tasks_pages(transport_name: str = "grpc"): +def test_list_report_tasks_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_report_tasks), "__call__" ) as call: - # Set the response to a series of pages. - call.side_effect = ( - analytics_data_api.ListReportTasksResponse( - report_tasks=[ - analytics_data_api.ReportTask(), - analytics_data_api.ReportTask(), - analytics_data_api.ReportTask(), - ], - next_page_token="abc", - ), - analytics_data_api.ListReportTasksResponse( - report_tasks=[], - next_page_token="def", - ), - analytics_data_api.ListReportTasksResponse( - report_tasks=[ - analytics_data_api.ReportTask(), - ], - next_page_token="ghi", - ), - analytics_data_api.ListReportTasksResponse( - report_tasks=[ - analytics_data_api.ReportTask(), - analytics_data_api.ReportTask(), - ], - ), - RuntimeError, + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) - pages = list(client.list_report_tasks(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - + client.list_report_tasks() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_data_api.ListReportTasksRequest() + + +def test_list_report_tasks_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = analytics_data_api.ListReportTasksRequest( + parent="parent_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_tasks), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_report_tasks(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_data_api.ListReportTasksRequest( + parent="parent_value", + page_token="page_token_value", + ) + + +def test_list_report_tasks_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_report_tasks in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_report_tasks + ] = mock_rpc + request = {} + client.list_report_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_report_tasks(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_report_tasks_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlphaAnalyticsDataAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_tasks), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analytics_data_api.ListReportTasksResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_report_tasks() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_data_api.ListReportTasksRequest() + + +@pytest.mark.asyncio +async def test_list_report_tasks_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AlphaAnalyticsDataAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_report_tasks + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_report_tasks + ] = mock_rpc + + request = {} + await client.list_report_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_report_tasks(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_report_tasks_async( + transport: str = "grpc_asyncio", + request_type=analytics_data_api.ListReportTasksRequest, +): + client = AlphaAnalyticsDataAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_tasks), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analytics_data_api.ListReportTasksResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_report_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = analytics_data_api.ListReportTasksRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListReportTasksAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_report_tasks_async_from_dict(): + await test_list_report_tasks_async(request_type=dict) + + +def test_list_report_tasks_field_headers(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_data_api.ListReportTasksRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_tasks), "__call__" + ) as call: + call.return_value = analytics_data_api.ListReportTasksResponse() + client.list_report_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_report_tasks_field_headers_async(): + client = AlphaAnalyticsDataAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_data_api.ListReportTasksRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_tasks), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analytics_data_api.ListReportTasksResponse() + ) + await client.list_report_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_report_tasks_flattened(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_tasks), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.ListReportTasksResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_report_tasks( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_report_tasks_flattened_error(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_report_tasks( + analytics_data_api.ListReportTasksRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_report_tasks_flattened_async(): + client = AlphaAnalyticsDataAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_tasks), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.ListReportTasksResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analytics_data_api.ListReportTasksResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_report_tasks( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_report_tasks_flattened_error_async(): + client = AlphaAnalyticsDataAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_report_tasks( + analytics_data_api.ListReportTasksRequest(), + parent="parent_value", + ) + + +def test_list_report_tasks_pager(transport_name: str = "grpc"): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_tasks), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_data_api.ListReportTasksResponse( + report_tasks=[ + analytics_data_api.ReportTask(), + analytics_data_api.ReportTask(), + analytics_data_api.ReportTask(), + ], + next_page_token="abc", + ), + analytics_data_api.ListReportTasksResponse( + report_tasks=[], + next_page_token="def", + ), + analytics_data_api.ListReportTasksResponse( + report_tasks=[ + analytics_data_api.ReportTask(), + ], + next_page_token="ghi", + ), + analytics_data_api.ListReportTasksResponse( + report_tasks=[ + analytics_data_api.ReportTask(), + analytics_data_api.ReportTask(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_report_tasks(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, analytics_data_api.ReportTask) for i in results) + + +def test_list_report_tasks_pages(transport_name: str = "grpc"): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_tasks), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_data_api.ListReportTasksResponse( + report_tasks=[ + analytics_data_api.ReportTask(), + analytics_data_api.ReportTask(), + analytics_data_api.ReportTask(), + ], + next_page_token="abc", + ), + analytics_data_api.ListReportTasksResponse( + report_tasks=[], + next_page_token="def", + ), + analytics_data_api.ListReportTasksResponse( + report_tasks=[ + analytics_data_api.ReportTask(), + ], + next_page_token="ghi", + ), + analytics_data_api.ListReportTasksResponse( + report_tasks=[ + analytics_data_api.ReportTask(), + analytics_data_api.ReportTask(), + ], + ), + RuntimeError, + ) + pages = list(client.list_report_tasks(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + @pytest.mark.asyncio async def test_list_report_tasks_async_pager(): client = AlphaAnalyticsDataAsyncClient( @@ -6859,47 +7248,293 @@ async def test_list_report_tasks_async_pages(): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize( - "request_type", - [ - analytics_data_api.RunFunnelReportRequest, - dict, - ], -) -def test_run_funnel_report_rest(request_type): - client = AlphaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) +@pytest.mark.parametrize( + "request_type", + [ + analytics_data_api.RunFunnelReportRequest, + dict, + ], +) +def test_run_funnel_report_rest(request_type): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"property": "properties/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = analytics_data_api.RunFunnelReportResponse( + kind="kind_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = analytics_data_api.RunFunnelReportResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.run_funnel_report(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_data_api.RunFunnelReportResponse) + assert response.kind == "kind_value" + + +def test_run_funnel_report_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.run_funnel_report in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.run_funnel_report + ] = mock_rpc + + request = {} + client.run_funnel_report(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.run_funnel_report(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_run_funnel_report_rest_interceptors(null_interceptor): + transport = transports.AlphaAnalyticsDataRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlphaAnalyticsDataRestInterceptor(), + ) + client = AlphaAnalyticsDataClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AlphaAnalyticsDataRestInterceptor, "post_run_funnel_report" + ) as post, mock.patch.object( + transports.AlphaAnalyticsDataRestInterceptor, "pre_run_funnel_report" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = analytics_data_api.RunFunnelReportRequest.pb( + analytics_data_api.RunFunnelReportRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = analytics_data_api.RunFunnelReportResponse.to_json( + analytics_data_api.RunFunnelReportResponse() + ) + + request = analytics_data_api.RunFunnelReportRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = analytics_data_api.RunFunnelReportResponse() + + client.run_funnel_report( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_run_funnel_report_rest_bad_request( + transport: str = "rest", request_type=analytics_data_api.RunFunnelReportRequest +): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"property": "properties/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.run_funnel_report(request) + + +def test_run_funnel_report_rest_error(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + analytics_data_api.CreateAudienceListRequest, + dict, + ], +) +def test_create_audience_list_rest(request_type): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "properties/sample1"} + request_init["audience_list"] = { + "name": "name_value", + "audience": "audience_value", + "audience_display_name": "audience_display_name_value", + "dimensions": [{"dimension_name": "dimension_name_value"}], + "state": 1, + "begin_creating_time": {"seconds": 751, "nanos": 543}, + "creation_quota_tokens_charged": 3070, + "row_count": 992, + "error_message": "error_message_value", + "percentage_completed": 0.2106, + "recurring_audience_list": "recurring_audience_list_value", + "webhook_notification": { + "uri": "uri_value", + "channel_token": "channel_token_value", + }, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = analytics_data_api.CreateAudienceListRequest.meta.fields[ + "audience_list" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] - # send a request that will satisfy transcoding - request_init = {"property": "properties/sample1"} + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["audience_list"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["audience_list"][field])): + del request_init["audience_list"][field][i][subfield] + else: + del request_init["audience_list"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_data_api.RunFunnelReportResponse( - kind="kind_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = analytics_data_api.RunFunnelReportResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.run_funnel_report(request) + response = client.create_audience_list(request) # Establish that the response is the type that we expect. - assert isinstance(response, analytics_data_api.RunFunnelReportResponse) - assert response.kind == "kind_value" + assert response.operation.name == "operations/spam" -def test_run_funnel_report_rest_use_cached_wrapped_rpc(): +def test_create_audience_list_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -6913,7 +7548,9 @@ def test_run_funnel_report_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.run_funnel_report in client._transport._wrapped_methods + assert ( + client._transport.create_audience_list in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -6921,24 +7558,117 @@ def test_run_funnel_report_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.run_funnel_report + client._transport.create_audience_list ] = mock_rpc request = {} - client.run_funnel_report(request) + client.create_audience_list(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.run_funnel_report(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_audience_list(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 +def test_create_audience_list_rest_required_fields( + request_type=analytics_data_api.CreateAudienceListRequest, +): + transport_class = transports.AlphaAnalyticsDataRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_audience_list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_audience_list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_audience_list(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_audience_list_rest_unset_required_fields(): + transport = transports.AlphaAnalyticsDataRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_audience_list._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "audienceList", + ) + ) + ) + + @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_run_funnel_report_rest_interceptors(null_interceptor): +def test_create_audience_list_rest_interceptors(null_interceptor): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -6951,14 +7681,16 @@ def test_run_funnel_report_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "post_run_funnel_report" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AlphaAnalyticsDataRestInterceptor, "post_create_audience_list" ) as post, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "pre_run_funnel_report" + transports.AlphaAnalyticsDataRestInterceptor, "pre_create_audience_list" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_data_api.RunFunnelReportRequest.pb( - analytics_data_api.RunFunnelReportRequest() + pb_message = analytics_data_api.CreateAudienceListRequest.pb( + analytics_data_api.CreateAudienceListRequest() ) transcode.return_value = { "method": "post", @@ -6970,55 +7702,111 @@ def test_run_funnel_report_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = analytics_data_api.RunFunnelReportResponse.to_json( - analytics_data_api.RunFunnelReportResponse() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = analytics_data_api.RunFunnelReportRequest() + request = analytics_data_api.CreateAudienceListRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_data_api.RunFunnelReportResponse() + post.return_value = operations_pb2.Operation() + + client.create_audience_list( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_audience_list_rest_bad_request( + transport: str = "rest", request_type=analytics_data_api.CreateAudienceListRequest +): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "properties/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_audience_list(request) + + +def test_create_audience_list_rest_flattened(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "properties/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + audience_list=analytics_data_api.AudienceList(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - client.run_funnel_report( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) + client.create_audience_list(**mock_args) - pre.assert_called_once() - post.assert_called_once() + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=properties/*}/audienceLists" % client.transport._host, + args[1], + ) -def test_run_funnel_report_rest_bad_request( - transport: str = "rest", request_type=analytics_data_api.RunFunnelReportRequest -): +def test_create_audience_list_rest_flattened_error(transport: str = "rest"): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # send a request that will satisfy transcoding - request_init = {"property": "properties/sample1"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.run_funnel_report(request) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_audience_list( + analytics_data_api.CreateAudienceListRequest(), + parent="parent_value", + audience_list=analytics_data_api.AudienceList(name="name_value"), + ) -def test_run_funnel_report_rest_error(): +def test_create_audience_list_rest_error(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -7027,125 +7815,44 @@ def test_run_funnel_report_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_data_api.CreateAudienceListRequest, + analytics_data_api.QueryAudienceListRequest, dict, ], ) -def test_create_audience_list_rest(request_type): +def test_query_audience_list_rest(request_type): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} - request_init["audience_list"] = { - "name": "name_value", - "audience": "audience_value", - "audience_display_name": "audience_display_name_value", - "dimensions": [{"dimension_name": "dimension_name_value"}], - "state": 1, - "begin_creating_time": {"seconds": 751, "nanos": 543}, - "creation_quota_tokens_charged": 3070, - "row_count": 992, - "error_message": "error_message_value", - "percentage_completed": 0.2106, - "recurring_audience_list": "recurring_audience_list_value", - "webhook_notification": { - "uri": "uri_value", - "channel_token": "channel_token_value", - }, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = analytics_data_api.CreateAudienceListRequest.meta.fields[ - "audience_list" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["audience_list"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["audience_list"][field])): - del request_init["audience_list"][field][i][subfield] - else: - del request_init["audience_list"][field][subfield] + request_init = {"name": "properties/sample1/audienceLists/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = analytics_data_api.QueryAudienceListResponse( + row_count=992, + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = analytics_data_api.QueryAudienceListResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_audience_list(request) + response = client.query_audience_list(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, analytics_data_api.QueryAudienceListResponse) + assert response.row_count == 992 -def test_create_audience_list_rest_use_cached_wrapped_rpc(): +def test_query_audience_list_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -7160,7 +7867,7 @@ def test_create_audience_list_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_audience_list in client._transport._wrapped_methods + client._transport.query_audience_list in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -7169,33 +7876,29 @@ def test_create_audience_list_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_audience_list + client._transport.query_audience_list ] = mock_rpc request = {} - client.create_audience_list(request) + client.query_audience_list(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_audience_list(request) + client.query_audience_list(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_audience_list_rest_required_fields( - request_type=analytics_data_api.CreateAudienceListRequest, +def test_query_audience_list_rest_required_fields( + request_type=analytics_data_api.QueryAudienceListRequest, ): transport_class = transports.AlphaAnalyticsDataRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -7206,21 +7909,21 @@ def test_create_audience_list_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_audience_list._get_unset_required_fields(jsonified_request) + ).query_audience_list._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_audience_list._get_unset_required_fields(jsonified_request) + ).query_audience_list._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7229,7 +7932,7 @@ def test_create_audience_list_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = analytics_data_api.QueryAudienceListResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -7249,37 +7952,32 @@ def test_create_audience_list_rest_required_fields( response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = analytics_data_api.QueryAudienceListResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_audience_list(request) + response = client.query_audience_list(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_audience_list_rest_unset_required_fields(): +def test_query_audience_list_rest_unset_required_fields(): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_audience_list._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(()) - & set( - ( - "parent", - "audienceList", - ) - ) - ) + unset_fields = transport.query_audience_list._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_audience_list_rest_interceptors(null_interceptor): +def test_query_audience_list_rest_interceptors(null_interceptor): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -7292,16 +7990,14 @@ def test_create_audience_list_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "post_create_audience_list" + transports.AlphaAnalyticsDataRestInterceptor, "post_query_audience_list" ) as post, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "pre_create_audience_list" + transports.AlphaAnalyticsDataRestInterceptor, "pre_query_audience_list" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_data_api.CreateAudienceListRequest.pb( - analytics_data_api.CreateAudienceListRequest() + pb_message = analytics_data_api.QueryAudienceListRequest.pb( + analytics_data_api.QueryAudienceListRequest() ) transcode.return_value = { "method": "post", @@ -7313,19 +8009,21 @@ def test_create_audience_list_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = ( + analytics_data_api.QueryAudienceListResponse.to_json( + analytics_data_api.QueryAudienceListResponse() + ) ) - request = analytics_data_api.CreateAudienceListRequest() + request = analytics_data_api.QueryAudienceListRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = analytics_data_api.QueryAudienceListResponse() - client.create_audience_list( + client.query_audience_list( request, metadata=[ ("key", "val"), @@ -7337,8 +8035,8 @@ def test_create_audience_list_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_audience_list_rest_bad_request( - transport: str = "rest", request_type=analytics_data_api.CreateAudienceListRequest +def test_query_audience_list_rest_bad_request( + transport: str = "rest", request_type=analytics_data_api.QueryAudienceListRequest ): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7346,7 +8044,7 @@ def test_create_audience_list_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} + request_init = {"name": "properties/sample1/audienceLists/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -7358,10 +8056,10 @@ def test_create_audience_list_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_audience_list(request) + client.query_audience_list(request) -def test_create_audience_list_rest_flattened(): +def test_query_audience_list_rest_flattened(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -7370,38 +8068,40 @@ def test_create_audience_list_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = analytics_data_api.QueryAudienceListResponse() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "properties/sample1"} + sample_request = {"name": "properties/sample1/audienceLists/sample2"} # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - audience_list=analytics_data_api.AudienceList(name="name_value"), + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = analytics_data_api.QueryAudienceListResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_audience_list(**mock_args) + client.query_audience_list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=properties/*}/audienceLists" % client.transport._host, + "%s/v1alpha/{name=properties/*/audienceLists/*}:query" + % client.transport._host, args[1], ) -def test_create_audience_list_rest_flattened_error(transport: str = "rest"): +def test_query_audience_list_rest_flattened_error(transport: str = "rest"): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -7410,14 +8110,13 @@ def test_create_audience_list_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_audience_list( - analytics_data_api.CreateAudienceListRequest(), - parent="parent_value", - audience_list=analytics_data_api.AudienceList(name="name_value"), + client.query_audience_list( + analytics_data_api.QueryAudienceListRequest(), + name="name_value", ) -def test_create_audience_list_rest_error(): +def test_query_audience_list_rest_error(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -7426,11 +8125,11 @@ def test_create_audience_list_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_data_api.QueryAudienceListRequest, + analytics_data_api.SheetExportAudienceListRequest, dict, ], ) -def test_query_audience_list_rest(request_type): +def test_sheet_export_audience_list_rest(request_type): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -7443,7 +8142,9 @@ def test_query_audience_list_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_data_api.QueryAudienceListResponse( + return_value = analytics_data_api.SheetExportAudienceListResponse( + spreadsheet_uri="spreadsheet_uri_value", + spreadsheet_id="spreadsheet_id_value", row_count=992, ) @@ -7451,19 +8152,23 @@ def test_query_audience_list_rest(request_type): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.QueryAudienceListResponse.pb(return_value) + return_value = analytics_data_api.SheetExportAudienceListResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.query_audience_list(request) + response = client.sheet_export_audience_list(request) # Establish that the response is the type that we expect. - assert isinstance(response, analytics_data_api.QueryAudienceListResponse) + assert isinstance(response, analytics_data_api.SheetExportAudienceListResponse) + assert response.spreadsheet_uri == "spreadsheet_uri_value" + assert response.spreadsheet_id == "spreadsheet_id_value" assert response.row_count == 992 -def test_query_audience_list_rest_use_cached_wrapped_rpc(): +def test_sheet_export_audience_list_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -7478,7 +8183,8 @@ def test_query_audience_list_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.query_audience_list in client._transport._wrapped_methods + client._transport.sheet_export_audience_list + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -7487,24 +8193,24 @@ def test_query_audience_list_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.query_audience_list + client._transport.sheet_export_audience_list ] = mock_rpc request = {} - client.query_audience_list(request) + client.sheet_export_audience_list(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.query_audience_list(request) + client.sheet_export_audience_list(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_query_audience_list_rest_required_fields( - request_type=analytics_data_api.QueryAudienceListRequest, +def test_sheet_export_audience_list_rest_required_fields( + request_type=analytics_data_api.SheetExportAudienceListRequest, ): transport_class = transports.AlphaAnalyticsDataRestTransport @@ -7520,7 +8226,7 @@ def test_query_audience_list_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).query_audience_list._get_unset_required_fields(jsonified_request) + ).sheet_export_audience_list._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -7529,7 +8235,7 @@ def test_query_audience_list_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).query_audience_list._get_unset_required_fields(jsonified_request) + ).sheet_export_audience_list._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -7543,7 +8249,7 @@ def test_query_audience_list_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = analytics_data_api.QueryAudienceListResponse() + return_value = analytics_data_api.SheetExportAudienceListResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -7565,30 +8271,32 @@ def test_query_audience_list_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.QueryAudienceListResponse.pb(return_value) + return_value = analytics_data_api.SheetExportAudienceListResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.query_audience_list(request) + response = client.sheet_export_audience_list(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_query_audience_list_rest_unset_required_fields(): +def test_sheet_export_audience_list_rest_unset_required_fields(): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.query_audience_list._get_unset_required_fields({}) + unset_fields = transport.sheet_export_audience_list._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_query_audience_list_rest_interceptors(null_interceptor): +def test_sheet_export_audience_list_rest_interceptors(null_interceptor): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -7601,14 +8309,14 @@ def test_query_audience_list_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "post_query_audience_list" + transports.AlphaAnalyticsDataRestInterceptor, "post_sheet_export_audience_list" ) as post, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "pre_query_audience_list" + transports.AlphaAnalyticsDataRestInterceptor, "pre_sheet_export_audience_list" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_data_api.QueryAudienceListRequest.pb( - analytics_data_api.QueryAudienceListRequest() + pb_message = analytics_data_api.SheetExportAudienceListRequest.pb( + analytics_data_api.SheetExportAudienceListRequest() ) transcode.return_value = { "method": "post", @@ -7621,20 +8329,20 @@ def test_query_audience_list_rest_interceptors(null_interceptor): req.return_value.status_code = 200 req.return_value.request = PreparedRequest() req.return_value._content = ( - analytics_data_api.QueryAudienceListResponse.to_json( - analytics_data_api.QueryAudienceListResponse() + analytics_data_api.SheetExportAudienceListResponse.to_json( + analytics_data_api.SheetExportAudienceListResponse() ) ) - request = analytics_data_api.QueryAudienceListRequest() + request = analytics_data_api.SheetExportAudienceListRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_data_api.QueryAudienceListResponse() + post.return_value = analytics_data_api.SheetExportAudienceListResponse() - client.query_audience_list( + client.sheet_export_audience_list( request, metadata=[ ("key", "val"), @@ -7646,8 +8354,9 @@ def test_query_audience_list_rest_interceptors(null_interceptor): post.assert_called_once() -def test_query_audience_list_rest_bad_request( - transport: str = "rest", request_type=analytics_data_api.QueryAudienceListRequest +def test_sheet_export_audience_list_rest_bad_request( + transport: str = "rest", + request_type=analytics_data_api.SheetExportAudienceListRequest, ): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7667,10 +8376,10 @@ def test_query_audience_list_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.query_audience_list(request) + client.sheet_export_audience_list(request) -def test_query_audience_list_rest_flattened(): +def test_sheet_export_audience_list_rest_flattened(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -7679,7 +8388,7 @@ def test_query_audience_list_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_data_api.QueryAudienceListResponse() + return_value = analytics_data_api.SheetExportAudienceListResponse() # get arguments that satisfy an http rule for this method sample_request = {"name": "properties/sample1/audienceLists/sample2"} @@ -7694,25 +8403,27 @@ def test_query_audience_list_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.QueryAudienceListResponse.pb(return_value) + return_value = analytics_data_api.SheetExportAudienceListResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.query_audience_list(**mock_args) + client.sheet_export_audience_list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=properties/*/audienceLists/*}:query" + "%s/v1alpha/{name=properties/*/audienceLists/*}:exportSheet" % client.transport._host, args[1], ) -def test_query_audience_list_rest_flattened_error(transport: str = "rest"): +def test_sheet_export_audience_list_rest_flattened_error(transport: str = "rest"): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -7721,13 +8432,13 @@ def test_query_audience_list_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.query_audience_list( - analytics_data_api.QueryAudienceListRequest(), + client.sheet_export_audience_list( + analytics_data_api.SheetExportAudienceListRequest(), name="name_value", ) -def test_query_audience_list_rest_error(): +def test_sheet_export_audience_list_rest_error(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -7736,11 +8447,11 @@ def test_query_audience_list_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_data_api.SheetExportAudienceListRequest, + analytics_data_api.GetAudienceListRequest, dict, ], ) -def test_sheet_export_audience_list_rest(request_type): +def test_get_audience_list_rest(request_type): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -7753,33 +8464,43 @@ def test_sheet_export_audience_list_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_data_api.SheetExportAudienceListResponse( - spreadsheet_uri="spreadsheet_uri_value", - spreadsheet_id="spreadsheet_id_value", + return_value = analytics_data_api.AudienceList( + name="name_value", + audience="audience_value", + audience_display_name="audience_display_name_value", + state=analytics_data_api.AudienceList.State.CREATING, + creation_quota_tokens_charged=3070, row_count=992, + error_message="error_message_value", + percentage_completed=0.2106, + recurring_audience_list="recurring_audience_list_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.SheetExportAudienceListResponse.pb( - return_value - ) + return_value = analytics_data_api.AudienceList.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.sheet_export_audience_list(request) + response = client.get_audience_list(request) # Establish that the response is the type that we expect. - assert isinstance(response, analytics_data_api.SheetExportAudienceListResponse) - assert response.spreadsheet_uri == "spreadsheet_uri_value" - assert response.spreadsheet_id == "spreadsheet_id_value" + assert isinstance(response, analytics_data_api.AudienceList) + assert response.name == "name_value" + assert response.audience == "audience_value" + assert response.audience_display_name == "audience_display_name_value" + assert response.state == analytics_data_api.AudienceList.State.CREATING + assert response.creation_quota_tokens_charged == 3070 assert response.row_count == 992 + assert response.error_message == "error_message_value" + assert math.isclose(response.percentage_completed, 0.2106, rel_tol=1e-6) + assert response.recurring_audience_list == "recurring_audience_list_value" -def test_sheet_export_audience_list_rest_use_cached_wrapped_rpc(): +def test_get_audience_list_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -7793,10 +8514,7 @@ def test_sheet_export_audience_list_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.sheet_export_audience_list - in client._transport._wrapped_methods - ) + assert client._transport.get_audience_list in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -7804,24 +8522,24 @@ def test_sheet_export_audience_list_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.sheet_export_audience_list + client._transport.get_audience_list ] = mock_rpc request = {} - client.sheet_export_audience_list(request) + client.get_audience_list(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.sheet_export_audience_list(request) + client.get_audience_list(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_sheet_export_audience_list_rest_required_fields( - request_type=analytics_data_api.SheetExportAudienceListRequest, +def test_get_audience_list_rest_required_fields( + request_type=analytics_data_api.GetAudienceListRequest, ): transport_class = transports.AlphaAnalyticsDataRestTransport @@ -7837,7 +8555,7 @@ def test_sheet_export_audience_list_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).sheet_export_audience_list._get_unset_required_fields(jsonified_request) + ).get_audience_list._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -7846,7 +8564,7 @@ def test_sheet_export_audience_list_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).sheet_export_audience_list._get_unset_required_fields(jsonified_request) + ).get_audience_list._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -7860,7 +8578,7 @@ def test_sheet_export_audience_list_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = analytics_data_api.SheetExportAudienceListResponse() + return_value = analytics_data_api.AudienceList() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -7872,42 +8590,39 @@ def test_sheet_export_audience_list_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.SheetExportAudienceListResponse.pb( - return_value - ) + return_value = analytics_data_api.AudienceList.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.sheet_export_audience_list(request) + response = client.get_audience_list(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_sheet_export_audience_list_rest_unset_required_fields(): +def test_get_audience_list_rest_unset_required_fields(): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.sheet_export_audience_list._get_unset_required_fields({}) + unset_fields = transport.get_audience_list._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_sheet_export_audience_list_rest_interceptors(null_interceptor): +def test_get_audience_list_rest_interceptors(null_interceptor): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -7920,14 +8635,14 @@ def test_sheet_export_audience_list_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "post_sheet_export_audience_list" + transports.AlphaAnalyticsDataRestInterceptor, "post_get_audience_list" ) as post, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "pre_sheet_export_audience_list" + transports.AlphaAnalyticsDataRestInterceptor, "pre_get_audience_list" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_data_api.SheetExportAudienceListRequest.pb( - analytics_data_api.SheetExportAudienceListRequest() + pb_message = analytics_data_api.GetAudienceListRequest.pb( + analytics_data_api.GetAudienceListRequest() ) transcode.return_value = { "method": "post", @@ -7939,21 +8654,19 @@ def test_sheet_export_audience_list_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ( - analytics_data_api.SheetExportAudienceListResponse.to_json( - analytics_data_api.SheetExportAudienceListResponse() - ) + req.return_value._content = analytics_data_api.AudienceList.to_json( + analytics_data_api.AudienceList() ) - request = analytics_data_api.SheetExportAudienceListRequest() + request = analytics_data_api.GetAudienceListRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_data_api.SheetExportAudienceListResponse() + post.return_value = analytics_data_api.AudienceList() - client.sheet_export_audience_list( + client.get_audience_list( request, metadata=[ ("key", "val"), @@ -7965,9 +8678,8 @@ def test_sheet_export_audience_list_rest_interceptors(null_interceptor): post.assert_called_once() -def test_sheet_export_audience_list_rest_bad_request( - transport: str = "rest", - request_type=analytics_data_api.SheetExportAudienceListRequest, +def test_get_audience_list_rest_bad_request( + transport: str = "rest", request_type=analytics_data_api.GetAudienceListRequest ): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7987,10 +8699,10 @@ def test_sheet_export_audience_list_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.sheet_export_audience_list(request) + client.get_audience_list(request) -def test_sheet_export_audience_list_rest_flattened(): +def test_get_audience_list_rest_flattened(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -7999,7 +8711,7 @@ def test_sheet_export_audience_list_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_data_api.SheetExportAudienceListResponse() + return_value = analytics_data_api.AudienceList() # get arguments that satisfy an http rule for this method sample_request = {"name": "properties/sample1/audienceLists/sample2"} @@ -8014,27 +8726,24 @@ def test_sheet_export_audience_list_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.SheetExportAudienceListResponse.pb( - return_value - ) + return_value = analytics_data_api.AudienceList.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.sheet_export_audience_list(**mock_args) + client.get_audience_list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=properties/*/audienceLists/*}:exportSheet" - % client.transport._host, + "%s/v1alpha/{name=properties/*/audienceLists/*}" % client.transport._host, args[1], ) -def test_sheet_export_audience_list_rest_flattened_error(transport: str = "rest"): +def test_get_audience_list_rest_flattened_error(transport: str = "rest"): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -8043,13 +8752,13 @@ def test_sheet_export_audience_list_rest_flattened_error(transport: str = "rest" # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.sheet_export_audience_list( - analytics_data_api.SheetExportAudienceListRequest(), + client.get_audience_list( + analytics_data_api.GetAudienceListRequest(), name="name_value", ) -def test_sheet_export_audience_list_rest_error(): +def test_get_audience_list_rest_error(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -8058,60 +8767,44 @@ def test_sheet_export_audience_list_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_data_api.GetAudienceListRequest, + analytics_data_api.ListAudienceListsRequest, dict, ], ) -def test_get_audience_list_rest(request_type): +def test_list_audience_lists_rest(request_type): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/audienceLists/sample2"} + request_init = {"parent": "properties/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_data_api.AudienceList( - name="name_value", - audience="audience_value", - audience_display_name="audience_display_name_value", - state=analytics_data_api.AudienceList.State.CREATING, - creation_quota_tokens_charged=3070, - row_count=992, - error_message="error_message_value", - percentage_completed=0.2106, - recurring_audience_list="recurring_audience_list_value", + return_value = analytics_data_api.ListAudienceListsResponse( + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.AudienceList.pb(return_value) + return_value = analytics_data_api.ListAudienceListsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_audience_list(request) + response = client.list_audience_lists(request) # Establish that the response is the type that we expect. - assert isinstance(response, analytics_data_api.AudienceList) - assert response.name == "name_value" - assert response.audience == "audience_value" - assert response.audience_display_name == "audience_display_name_value" - assert response.state == analytics_data_api.AudienceList.State.CREATING - assert response.creation_quota_tokens_charged == 3070 - assert response.row_count == 992 - assert response.error_message == "error_message_value" - assert math.isclose(response.percentage_completed, 0.2106, rel_tol=1e-6) - assert response.recurring_audience_list == "recurring_audience_list_value" + assert isinstance(response, pagers.ListAudienceListsPager) + assert response.next_page_token == "next_page_token_value" -def test_get_audience_list_rest_use_cached_wrapped_rpc(): +def test_list_audience_lists_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -8125,7 +8818,9 @@ def test_get_audience_list_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_audience_list in client._transport._wrapped_methods + assert ( + client._transport.list_audience_lists in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -8133,29 +8828,29 @@ def test_get_audience_list_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_audience_list + client._transport.list_audience_lists ] = mock_rpc request = {} - client.get_audience_list(request) + client.list_audience_lists(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_audience_list(request) + client.list_audience_lists(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_audience_list_rest_required_fields( - request_type=analytics_data_api.GetAudienceListRequest, +def test_list_audience_lists_rest_required_fields( + request_type=analytics_data_api.ListAudienceListsRequest, ): transport_class = transports.AlphaAnalyticsDataRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -8166,21 +8861,28 @@ def test_get_audience_list_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_audience_list._get_unset_required_fields(jsonified_request) + ).list_audience_lists._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_audience_list._get_unset_required_fields(jsonified_request) + ).list_audience_lists._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8189,7 +8891,7 @@ def test_get_audience_list_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = analytics_data_api.AudienceList() + return_value = analytics_data_api.ListAudienceListsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -8210,30 +8912,38 @@ def test_get_audience_list_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.AudienceList.pb(return_value) + return_value = analytics_data_api.ListAudienceListsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_audience_list(request) + response = client.list_audience_lists(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_audience_list_rest_unset_required_fields(): +def test_list_audience_lists_rest_unset_required_fields(): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_audience_list._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.list_audience_lists._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_audience_list_rest_interceptors(null_interceptor): +def test_list_audience_lists_rest_interceptors(null_interceptor): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -8246,14 +8956,14 @@ def test_get_audience_list_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "post_get_audience_list" + transports.AlphaAnalyticsDataRestInterceptor, "post_list_audience_lists" ) as post, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "pre_get_audience_list" + transports.AlphaAnalyticsDataRestInterceptor, "pre_list_audience_lists" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_data_api.GetAudienceListRequest.pb( - analytics_data_api.GetAudienceListRequest() + pb_message = analytics_data_api.ListAudienceListsRequest.pb( + analytics_data_api.ListAudienceListsRequest() ) transcode.return_value = { "method": "post", @@ -8265,19 +8975,21 @@ def test_get_audience_list_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = analytics_data_api.AudienceList.to_json( - analytics_data_api.AudienceList() + req.return_value._content = ( + analytics_data_api.ListAudienceListsResponse.to_json( + analytics_data_api.ListAudienceListsResponse() + ) ) - request = analytics_data_api.GetAudienceListRequest() + request = analytics_data_api.ListAudienceListsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_data_api.AudienceList() + post.return_value = analytics_data_api.ListAudienceListsResponse() - client.get_audience_list( + client.list_audience_lists( request, metadata=[ ("key", "val"), @@ -8289,8 +9001,8 @@ def test_get_audience_list_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_audience_list_rest_bad_request( - transport: str = "rest", request_type=analytics_data_api.GetAudienceListRequest +def test_list_audience_lists_rest_bad_request( + transport: str = "rest", request_type=analytics_data_api.ListAudienceListsRequest ): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8298,7 +9010,7 @@ def test_get_audience_list_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/audienceLists/sample2"} + request_init = {"parent": "properties/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -8310,10 +9022,10 @@ def test_get_audience_list_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_audience_list(request) + client.list_audience_lists(request) -def test_get_audience_list_rest_flattened(): +def test_list_audience_lists_rest_flattened(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -8322,14 +9034,14 @@ def test_get_audience_list_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_data_api.AudienceList() + return_value = analytics_data_api.ListAudienceListsResponse() # get arguments that satisfy an http rule for this method - sample_request = {"name": "properties/sample1/audienceLists/sample2"} + sample_request = {"parent": "properties/sample1"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) @@ -8337,24 +9049,24 @@ def test_get_audience_list_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.AudienceList.pb(return_value) + return_value = analytics_data_api.ListAudienceListsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_audience_list(**mock_args) + client.list_audience_lists(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=properties/*/audienceLists/*}" % client.transport._host, + "%s/v1alpha/{parent=properties/*}/audienceLists" % client.transport._host, args[1], ) -def test_get_audience_list_rest_flattened_error(transport: str = "rest"): +def test_list_audience_lists_rest_flattened_error(transport: str = "rest"): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -8363,26 +9075,83 @@ def test_get_audience_list_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_audience_list( - analytics_data_api.GetAudienceListRequest(), - name="name_value", + client.list_audience_lists( + analytics_data_api.ListAudienceListsRequest(), + parent="parent_value", ) -def test_get_audience_list_rest_error(): +def test_list_audience_lists_rest_pager(transport: str = "rest"): client = AlphaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + analytics_data_api.ListAudienceListsResponse( + audience_lists=[ + analytics_data_api.AudienceList(), + analytics_data_api.AudienceList(), + analytics_data_api.AudienceList(), + ], + next_page_token="abc", + ), + analytics_data_api.ListAudienceListsResponse( + audience_lists=[], + next_page_token="def", + ), + analytics_data_api.ListAudienceListsResponse( + audience_lists=[ + analytics_data_api.AudienceList(), + ], + next_page_token="ghi", + ), + analytics_data_api.ListAudienceListsResponse( + audience_lists=[ + analytics_data_api.AudienceList(), + analytics_data_api.AudienceList(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + analytics_data_api.ListAudienceListsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "properties/sample1"} + + pager = client.list_audience_lists(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, analytics_data_api.AudienceList) for i in results) + + pages = list(client.list_audience_lists(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - analytics_data_api.ListAudienceListsRequest, + analytics_data_api.CreateRecurringAudienceListRequest, dict, ], ) -def test_list_audience_lists_rest(request_type): +def test_create_recurring_audience_list_rest(request_type): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -8390,32 +9159,123 @@ def test_list_audience_lists_rest(request_type): # send a request that will satisfy transcoding request_init = {"parent": "properties/sample1"} + request_init["recurring_audience_list"] = { + "name": "name_value", + "audience": "audience_value", + "audience_display_name": "audience_display_name_value", + "dimensions": [{"dimension_name": "dimension_name_value"}], + "active_days_remaining": 2213, + "audience_lists": ["audience_lists_value1", "audience_lists_value2"], + "webhook_notification": { + "uri": "uri_value", + "channel_token": "channel_token_value", + }, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = analytics_data_api.CreateRecurringAudienceListRequest.meta.fields[ + "recurring_audience_list" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "recurring_audience_list" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["recurring_audience_list"][field])): + del request_init["recurring_audience_list"][field][i][subfield] + else: + del request_init["recurring_audience_list"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_data_api.ListAudienceListsResponse( - next_page_token="next_page_token_value", + return_value = analytics_data_api.RecurringAudienceList( + name="name_value", + audience="audience_value", + audience_display_name="audience_display_name_value", + active_days_remaining=2213, + audience_lists=["audience_lists_value"], ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.ListAudienceListsResponse.pb(return_value) + return_value = analytics_data_api.RecurringAudienceList.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_audience_lists(request) + response = client.create_recurring_audience_list(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAudienceListsPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, analytics_data_api.RecurringAudienceList) + assert response.name == "name_value" + assert response.audience == "audience_value" + assert response.audience_display_name == "audience_display_name_value" + assert response.active_days_remaining == 2213 + assert response.audience_lists == ["audience_lists_value"] -def test_list_audience_lists_rest_use_cached_wrapped_rpc(): +def test_create_recurring_audience_list_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -8430,7 +9290,8 @@ def test_list_audience_lists_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_audience_lists in client._transport._wrapped_methods + client._transport.create_recurring_audience_list + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -8439,24 +9300,24 @@ def test_list_audience_lists_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_audience_lists + client._transport.create_recurring_audience_list ] = mock_rpc request = {} - client.list_audience_lists(request) + client.create_recurring_audience_list(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_audience_lists(request) + client.create_recurring_audience_list(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_audience_lists_rest_required_fields( - request_type=analytics_data_api.ListAudienceListsRequest, +def test_create_recurring_audience_list_rest_required_fields( + request_type=analytics_data_api.CreateRecurringAudienceListRequest, ): transport_class = transports.AlphaAnalyticsDataRestTransport @@ -8472,7 +9333,7 @@ def test_list_audience_lists_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_audience_lists._get_unset_required_fields(jsonified_request) + ).create_recurring_audience_list._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -8481,14 +9342,7 @@ def test_list_audience_lists_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_audience_lists._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "page_size", - "page_token", - ) - ) + ).create_recurring_audience_list._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -8502,7 +9356,7 @@ def test_list_audience_lists_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = analytics_data_api.ListAudienceListsResponse() + return_value = analytics_data_api.RecurringAudienceList() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -8514,47 +9368,50 @@ def test_list_audience_lists_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.ListAudienceListsResponse.pb(return_value) + return_value = analytics_data_api.RecurringAudienceList.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_audience_lists(request) + response = client.create_recurring_audience_list(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_audience_lists_rest_unset_required_fields(): +def test_create_recurring_audience_list_rest_unset_required_fields(): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_audience_lists._get_unset_required_fields({}) + unset_fields = transport.create_recurring_audience_list._get_unset_required_fields( + {} + ) assert set(unset_fields) == ( - set( + set(()) + & set( ( - "pageSize", - "pageToken", + "parent", + "recurringAudienceList", ) ) - & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_audience_lists_rest_interceptors(null_interceptor): +def test_create_recurring_audience_list_rest_interceptors(null_interceptor): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -8567,14 +9424,16 @@ def test_list_audience_lists_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "post_list_audience_lists" + transports.AlphaAnalyticsDataRestInterceptor, + "post_create_recurring_audience_list", ) as post, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "pre_list_audience_lists" + transports.AlphaAnalyticsDataRestInterceptor, + "pre_create_recurring_audience_list", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_data_api.ListAudienceListsRequest.pb( - analytics_data_api.ListAudienceListsRequest() + pb_message = analytics_data_api.CreateRecurringAudienceListRequest.pb( + analytics_data_api.CreateRecurringAudienceListRequest() ) transcode.return_value = { "method": "post", @@ -8586,21 +9445,19 @@ def test_list_audience_lists_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ( - analytics_data_api.ListAudienceListsResponse.to_json( - analytics_data_api.ListAudienceListsResponse() - ) + req.return_value._content = analytics_data_api.RecurringAudienceList.to_json( + analytics_data_api.RecurringAudienceList() ) - request = analytics_data_api.ListAudienceListsRequest() + request = analytics_data_api.CreateRecurringAudienceListRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_data_api.ListAudienceListsResponse() + post.return_value = analytics_data_api.RecurringAudienceList() - client.list_audience_lists( + client.create_recurring_audience_list( request, metadata=[ ("key", "val"), @@ -8612,8 +9469,9 @@ def test_list_audience_lists_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_audience_lists_rest_bad_request( - transport: str = "rest", request_type=analytics_data_api.ListAudienceListsRequest +def test_create_recurring_audience_list_rest_bad_request( + transport: str = "rest", + request_type=analytics_data_api.CreateRecurringAudienceListRequest, ): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8633,10 +9491,10 @@ def test_list_audience_lists_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_audience_lists(request) + client.create_recurring_audience_list(request) -def test_list_audience_lists_rest_flattened(): +def test_create_recurring_audience_list_rest_flattened(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -8645,7 +9503,7 @@ def test_list_audience_lists_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_data_api.ListAudienceListsResponse() + return_value = analytics_data_api.RecurringAudienceList() # get arguments that satisfy an http rule for this method sample_request = {"parent": "properties/sample1"} @@ -8653,6 +9511,9 @@ def test_list_audience_lists_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", + recurring_audience_list=analytics_data_api.RecurringAudienceList( + name="name_value" + ), ) mock_args.update(sample_request) @@ -8660,24 +9521,25 @@ def test_list_audience_lists_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.ListAudienceListsResponse.pb(return_value) + return_value = analytics_data_api.RecurringAudienceList.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_audience_lists(**mock_args) + client.create_recurring_audience_list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=properties/*}/audienceLists" % client.transport._host, + "%s/v1alpha/{parent=properties/*}/recurringAudienceLists" + % client.transport._host, args[1], ) -def test_list_audience_lists_rest_flattened_error(transport: str = "rest"): +def test_create_recurring_audience_list_rest_flattened_error(transport: str = "rest"): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -8686,173 +9548,36 @@ def test_list_audience_lists_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_audience_lists( - analytics_data_api.ListAudienceListsRequest(), + client.create_recurring_audience_list( + analytics_data_api.CreateRecurringAudienceListRequest(), parent="parent_value", - ) - - -def test_list_audience_lists_rest_pager(transport: str = "rest"): - client = AlphaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - analytics_data_api.ListAudienceListsResponse( - audience_lists=[ - analytics_data_api.AudienceList(), - analytics_data_api.AudienceList(), - analytics_data_api.AudienceList(), - ], - next_page_token="abc", - ), - analytics_data_api.ListAudienceListsResponse( - audience_lists=[], - next_page_token="def", - ), - analytics_data_api.ListAudienceListsResponse( - audience_lists=[ - analytics_data_api.AudienceList(), - ], - next_page_token="ghi", - ), - analytics_data_api.ListAudienceListsResponse( - audience_lists=[ - analytics_data_api.AudienceList(), - analytics_data_api.AudienceList(), - ], + recurring_audience_list=analytics_data_api.RecurringAudienceList( + name="name_value" ), ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - analytics_data_api.ListAudienceListsResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "properties/sample1"} - - pager = client.list_audience_lists(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, analytics_data_api.AudienceList) for i in results) - - pages = list(client.list_audience_lists(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - analytics_data_api.CreateRecurringAudienceListRequest, - dict, - ], -) -def test_create_recurring_audience_list_rest(request_type): - client = AlphaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} - request_init["recurring_audience_list"] = { - "name": "name_value", - "audience": "audience_value", - "audience_display_name": "audience_display_name_value", - "dimensions": [{"dimension_name": "dimension_name_value"}], - "active_days_remaining": 2213, - "audience_lists": ["audience_lists_value1", "audience_lists_value2"], - "webhook_notification": { - "uri": "uri_value", - "channel_token": "channel_token_value", - }, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = analytics_data_api.CreateRecurringAudienceListRequest.meta.fields[ - "recurring_audience_list" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init[ - "recurring_audience_list" - ].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value +def test_create_recurring_audience_list_rest_error(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["recurring_audience_list"][field])): - del request_init["recurring_audience_list"][field][i][subfield] - else: - del request_init["recurring_audience_list"][field][subfield] +@pytest.mark.parametrize( + "request_type", + [ + analytics_data_api.GetRecurringAudienceListRequest, + dict, + ], +) +def test_get_recurring_audience_list_rest(request_type): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "properties/sample1/recurringAudienceLists/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -8875,7 +9600,7 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_recurring_audience_list(request) + response = client.get_recurring_audience_list(request) # Establish that the response is the type that we expect. assert isinstance(response, analytics_data_api.RecurringAudienceList) @@ -8886,7 +9611,7 @@ def get_message_fields(field): assert response.audience_lists == ["audience_lists_value"] -def test_create_recurring_audience_list_rest_use_cached_wrapped_rpc(): +def test_get_recurring_audience_list_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -8901,7 +9626,7 @@ def test_create_recurring_audience_list_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_recurring_audience_list + client._transport.get_recurring_audience_list in client._transport._wrapped_methods ) @@ -8911,29 +9636,29 @@ def test_create_recurring_audience_list_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_recurring_audience_list + client._transport.get_recurring_audience_list ] = mock_rpc request = {} - client.create_recurring_audience_list(request) + client.get_recurring_audience_list(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_recurring_audience_list(request) + client.get_recurring_audience_list(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_recurring_audience_list_rest_required_fields( - request_type=analytics_data_api.CreateRecurringAudienceListRequest, +def test_get_recurring_audience_list_rest_required_fields( + request_type=analytics_data_api.GetRecurringAudienceListRequest, ): transport_class = transports.AlphaAnalyticsDataRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -8944,21 +9669,21 @@ def test_create_recurring_audience_list_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_recurring_audience_list._get_unset_required_fields(jsonified_request) + ).get_recurring_audience_list._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_recurring_audience_list._get_unset_required_fields(jsonified_request) + ).get_recurring_audience_list._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8979,10 +9704,9 @@ def test_create_recurring_audience_list_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -8995,34 +9719,24 @@ def test_create_recurring_audience_list_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_recurring_audience_list(request) + response = client.get_recurring_audience_list(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_recurring_audience_list_rest_unset_required_fields(): +def test_get_recurring_audience_list_rest_unset_required_fields(): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_recurring_audience_list._get_unset_required_fields( - {} - ) - assert set(unset_fields) == ( - set(()) - & set( - ( - "parent", - "recurringAudienceList", - ) - ) - ) + unset_fields = transport.get_recurring_audience_list._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_recurring_audience_list_rest_interceptors(null_interceptor): +def test_get_recurring_audience_list_rest_interceptors(null_interceptor): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -9035,16 +9749,14 @@ def test_create_recurring_audience_list_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, - "post_create_recurring_audience_list", + transports.AlphaAnalyticsDataRestInterceptor, "post_get_recurring_audience_list" ) as post, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, - "pre_create_recurring_audience_list", + transports.AlphaAnalyticsDataRestInterceptor, "pre_get_recurring_audience_list" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_data_api.CreateRecurringAudienceListRequest.pb( - analytics_data_api.CreateRecurringAudienceListRequest() + pb_message = analytics_data_api.GetRecurringAudienceListRequest.pb( + analytics_data_api.GetRecurringAudienceListRequest() ) transcode.return_value = { "method": "post", @@ -9060,7 +9772,7 @@ def test_create_recurring_audience_list_rest_interceptors(null_interceptor): analytics_data_api.RecurringAudienceList() ) - request = analytics_data_api.CreateRecurringAudienceListRequest() + request = analytics_data_api.GetRecurringAudienceListRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -9068,7 +9780,7 @@ def test_create_recurring_audience_list_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = analytics_data_api.RecurringAudienceList() - client.create_recurring_audience_list( + client.get_recurring_audience_list( request, metadata=[ ("key", "val"), @@ -9080,9 +9792,9 @@ def test_create_recurring_audience_list_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_recurring_audience_list_rest_bad_request( +def test_get_recurring_audience_list_rest_bad_request( transport: str = "rest", - request_type=analytics_data_api.CreateRecurringAudienceListRequest, + request_type=analytics_data_api.GetRecurringAudienceListRequest, ): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9090,7 +9802,7 @@ def test_create_recurring_audience_list_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} + request_init = {"name": "properties/sample1/recurringAudienceLists/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -9102,10 +9814,10 @@ def test_create_recurring_audience_list_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_recurring_audience_list(request) + client.get_recurring_audience_list(request) -def test_create_recurring_audience_list_rest_flattened(): +def test_get_recurring_audience_list_rest_flattened(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -9117,14 +9829,11 @@ def test_create_recurring_audience_list_rest_flattened(): return_value = analytics_data_api.RecurringAudienceList() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "properties/sample1"} + sample_request = {"name": "properties/sample1/recurringAudienceLists/sample2"} # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - recurring_audience_list=analytics_data_api.RecurringAudienceList( - name="name_value" - ), + name="name_value", ) mock_args.update(sample_request) @@ -9137,20 +9846,20 @@ def test_create_recurring_audience_list_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_recurring_audience_list(**mock_args) + client.get_recurring_audience_list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=properties/*}/recurringAudienceLists" + "%s/v1alpha/{name=properties/*/recurringAudienceLists/*}" % client.transport._host, args[1], ) -def test_create_recurring_audience_list_rest_flattened_error(transport: str = "rest"): +def test_get_recurring_audience_list_rest_flattened_error(transport: str = "rest"): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9159,16 +9868,13 @@ def test_create_recurring_audience_list_rest_flattened_error(transport: str = "r # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_recurring_audience_list( - analytics_data_api.CreateRecurringAudienceListRequest(), - parent="parent_value", - recurring_audience_list=analytics_data_api.RecurringAudienceList( - name="name_value" - ), + client.get_recurring_audience_list( + analytics_data_api.GetRecurringAudienceListRequest(), + name="name_value", ) -def test_create_recurring_audience_list_rest_error(): +def test_get_recurring_audience_list_rest_error(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -9177,52 +9883,46 @@ def test_create_recurring_audience_list_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_data_api.GetRecurringAudienceListRequest, + analytics_data_api.ListRecurringAudienceListsRequest, dict, ], ) -def test_get_recurring_audience_list_rest(request_type): +def test_list_recurring_audience_lists_rest(request_type): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/recurringAudienceLists/sample2"} + request_init = {"parent": "properties/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_data_api.RecurringAudienceList( - name="name_value", - audience="audience_value", - audience_display_name="audience_display_name_value", - active_days_remaining=2213, - audience_lists=["audience_lists_value"], + return_value = analytics_data_api.ListRecurringAudienceListsResponse( + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.RecurringAudienceList.pb(return_value) + return_value = analytics_data_api.ListRecurringAudienceListsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_recurring_audience_list(request) + response = client.list_recurring_audience_lists(request) # Establish that the response is the type that we expect. - assert isinstance(response, analytics_data_api.RecurringAudienceList) - assert response.name == "name_value" - assert response.audience == "audience_value" - assert response.audience_display_name == "audience_display_name_value" - assert response.active_days_remaining == 2213 - assert response.audience_lists == ["audience_lists_value"] + assert isinstance(response, pagers.ListRecurringAudienceListsPager) + assert response.next_page_token == "next_page_token_value" -def test_get_recurring_audience_list_rest_use_cached_wrapped_rpc(): +def test_list_recurring_audience_lists_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -9237,7 +9937,7 @@ def test_get_recurring_audience_list_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_recurring_audience_list + client._transport.list_recurring_audience_lists in client._transport._wrapped_methods ) @@ -9247,29 +9947,29 @@ def test_get_recurring_audience_list_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_recurring_audience_list + client._transport.list_recurring_audience_lists ] = mock_rpc request = {} - client.get_recurring_audience_list(request) + client.list_recurring_audience_lists(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_recurring_audience_list(request) + client.list_recurring_audience_lists(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_recurring_audience_list_rest_required_fields( - request_type=analytics_data_api.GetRecurringAudienceListRequest, +def test_list_recurring_audience_lists_rest_required_fields( + request_type=analytics_data_api.ListRecurringAudienceListsRequest, ): transport_class = transports.AlphaAnalyticsDataRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -9280,21 +9980,28 @@ def test_get_recurring_audience_list_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_recurring_audience_list._get_unset_required_fields(jsonified_request) + ).list_recurring_audience_lists._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_recurring_audience_list._get_unset_required_fields(jsonified_request) + ).list_recurring_audience_lists._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9303,7 +10010,7 @@ def test_get_recurring_audience_list_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = analytics_data_api.RecurringAudienceList() + return_value = analytics_data_api.ListRecurringAudienceListsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -9324,30 +10031,42 @@ def test_get_recurring_audience_list_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.RecurringAudienceList.pb(return_value) + return_value = analytics_data_api.ListRecurringAudienceListsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_recurring_audience_list(request) + response = client.list_recurring_audience_lists(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_recurring_audience_list_rest_unset_required_fields(): +def test_list_recurring_audience_lists_rest_unset_required_fields(): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_recurring_audience_list._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.list_recurring_audience_lists._get_unset_required_fields( + {} + ) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_recurring_audience_list_rest_interceptors(null_interceptor): +def test_list_recurring_audience_lists_rest_interceptors(null_interceptor): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -9360,14 +10079,16 @@ def test_get_recurring_audience_list_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "post_get_recurring_audience_list" + transports.AlphaAnalyticsDataRestInterceptor, + "post_list_recurring_audience_lists", ) as post, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "pre_get_recurring_audience_list" + transports.AlphaAnalyticsDataRestInterceptor, + "pre_list_recurring_audience_lists", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_data_api.GetRecurringAudienceListRequest.pb( - analytics_data_api.GetRecurringAudienceListRequest() + pb_message = analytics_data_api.ListRecurringAudienceListsRequest.pb( + analytics_data_api.ListRecurringAudienceListsRequest() ) transcode.return_value = { "method": "post", @@ -9379,19 +10100,21 @@ def test_get_recurring_audience_list_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = analytics_data_api.RecurringAudienceList.to_json( - analytics_data_api.RecurringAudienceList() + req.return_value._content = ( + analytics_data_api.ListRecurringAudienceListsResponse.to_json( + analytics_data_api.ListRecurringAudienceListsResponse() + ) ) - request = analytics_data_api.GetRecurringAudienceListRequest() + request = analytics_data_api.ListRecurringAudienceListsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_data_api.RecurringAudienceList() + post.return_value = analytics_data_api.ListRecurringAudienceListsResponse() - client.get_recurring_audience_list( + client.list_recurring_audience_lists( request, metadata=[ ("key", "val"), @@ -9403,9 +10126,9 @@ def test_get_recurring_audience_list_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_recurring_audience_list_rest_bad_request( +def test_list_recurring_audience_lists_rest_bad_request( transport: str = "rest", - request_type=analytics_data_api.GetRecurringAudienceListRequest, + request_type=analytics_data_api.ListRecurringAudienceListsRequest, ): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9413,7 +10136,7 @@ def test_get_recurring_audience_list_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/recurringAudienceLists/sample2"} + request_init = {"parent": "properties/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -9425,10 +10148,10 @@ def test_get_recurring_audience_list_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_recurring_audience_list(request) + client.list_recurring_audience_lists(request) -def test_get_recurring_audience_list_rest_flattened(): +def test_list_recurring_audience_lists_rest_flattened(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -9437,14 +10160,14 @@ def test_get_recurring_audience_list_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_data_api.RecurringAudienceList() + return_value = analytics_data_api.ListRecurringAudienceListsResponse() # get arguments that satisfy an http rule for this method - sample_request = {"name": "properties/sample1/recurringAudienceLists/sample2"} + sample_request = {"parent": "properties/sample1"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) @@ -9452,25 +10175,27 @@ def test_get_recurring_audience_list_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.RecurringAudienceList.pb(return_value) + return_value = analytics_data_api.ListRecurringAudienceListsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_recurring_audience_list(**mock_args) + client.list_recurring_audience_lists(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=properties/*/recurringAudienceLists/*}" + "%s/v1alpha/{parent=properties/*}/recurringAudienceLists" % client.transport._host, args[1], ) -def test_get_recurring_audience_list_rest_flattened_error(transport: str = "rest"): +def test_list_recurring_audience_lists_rest_flattened_error(transport: str = "rest"): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9479,61 +10204,119 @@ def test_get_recurring_audience_list_rest_flattened_error(transport: str = "rest # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_recurring_audience_list( - analytics_data_api.GetRecurringAudienceListRequest(), - name="name_value", + client.list_recurring_audience_lists( + analytics_data_api.ListRecurringAudienceListsRequest(), + parent="parent_value", ) -def test_get_recurring_audience_list_rest_error(): +def test_list_recurring_audience_lists_rest_pager(transport: str = "rest"): client = AlphaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + analytics_data_api.ListRecurringAudienceListsResponse( + recurring_audience_lists=[ + analytics_data_api.RecurringAudienceList(), + analytics_data_api.RecurringAudienceList(), + analytics_data_api.RecurringAudienceList(), + ], + next_page_token="abc", + ), + analytics_data_api.ListRecurringAudienceListsResponse( + recurring_audience_lists=[], + next_page_token="def", + ), + analytics_data_api.ListRecurringAudienceListsResponse( + recurring_audience_lists=[ + analytics_data_api.RecurringAudienceList(), + ], + next_page_token="ghi", + ), + analytics_data_api.ListRecurringAudienceListsResponse( + recurring_audience_lists=[ + analytics_data_api.RecurringAudienceList(), + analytics_data_api.RecurringAudienceList(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + analytics_data_api.ListRecurringAudienceListsResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "properties/sample1"} + + pager = client.list_recurring_audience_lists(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, analytics_data_api.RecurringAudienceList) for i in results + ) + + pages = list(client.list_recurring_audience_lists(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - analytics_data_api.ListRecurringAudienceListsRequest, + analytics_data_api.GetPropertyQuotasSnapshotRequest, dict, ], ) -def test_list_recurring_audience_lists_rest(request_type): +def test_get_property_quotas_snapshot_rest(request_type): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} + request_init = {"name": "properties/sample1/propertyQuotasSnapshot"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_data_api.ListRecurringAudienceListsResponse( - next_page_token="next_page_token_value", + return_value = analytics_data_api.PropertyQuotasSnapshot( + name="name_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.ListRecurringAudienceListsResponse.pb( - return_value - ) + return_value = analytics_data_api.PropertyQuotasSnapshot.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_recurring_audience_lists(request) + response = client.get_property_quotas_snapshot(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListRecurringAudienceListsPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, analytics_data_api.PropertyQuotasSnapshot) + assert response.name == "name_value" -def test_list_recurring_audience_lists_rest_use_cached_wrapped_rpc(): +def test_get_property_quotas_snapshot_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -9548,7 +10331,7 @@ def test_list_recurring_audience_lists_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_recurring_audience_lists + client._transport.get_property_quotas_snapshot in client._transport._wrapped_methods ) @@ -9558,29 +10341,29 @@ def test_list_recurring_audience_lists_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_recurring_audience_lists + client._transport.get_property_quotas_snapshot ] = mock_rpc request = {} - client.list_recurring_audience_lists(request) + client.get_property_quotas_snapshot(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_recurring_audience_lists(request) + client.get_property_quotas_snapshot(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_recurring_audience_lists_rest_required_fields( - request_type=analytics_data_api.ListRecurringAudienceListsRequest, +def test_get_property_quotas_snapshot_rest_required_fields( + request_type=analytics_data_api.GetPropertyQuotasSnapshotRequest, ): transport_class = transports.AlphaAnalyticsDataRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -9591,28 +10374,21 @@ def test_list_recurring_audience_lists_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_recurring_audience_lists._get_unset_required_fields(jsonified_request) + ).get_property_quotas_snapshot._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_recurring_audience_lists._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "page_size", - "page_token", - ) - ) + ).get_property_quotas_snapshot._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9621,7 +10397,7 @@ def test_list_recurring_audience_lists_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = analytics_data_api.ListRecurringAudienceListsResponse() + return_value = analytics_data_api.PropertyQuotasSnapshot() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -9642,42 +10418,30 @@ def test_list_recurring_audience_lists_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.ListRecurringAudienceListsResponse.pb( - return_value - ) + return_value = analytics_data_api.PropertyQuotasSnapshot.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_recurring_audience_lists(request) + response = client.get_property_quotas_snapshot(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_recurring_audience_lists_rest_unset_required_fields(): +def test_get_property_quotas_snapshot_rest_unset_required_fields(): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_recurring_audience_lists._get_unset_required_fields( - {} - ) - assert set(unset_fields) == ( - set( - ( - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) + unset_fields = transport.get_property_quotas_snapshot._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_recurring_audience_lists_rest_interceptors(null_interceptor): +def test_get_property_quotas_snapshot_rest_interceptors(null_interceptor): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -9691,15 +10455,14 @@ def test_list_recurring_audience_lists_rest_interceptors(null_interceptor): path_template, "transcode" ) as transcode, mock.patch.object( transports.AlphaAnalyticsDataRestInterceptor, - "post_list_recurring_audience_lists", + "post_get_property_quotas_snapshot", ) as post, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, - "pre_list_recurring_audience_lists", + transports.AlphaAnalyticsDataRestInterceptor, "pre_get_property_quotas_snapshot" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_data_api.ListRecurringAudienceListsRequest.pb( - analytics_data_api.ListRecurringAudienceListsRequest() + pb_message = analytics_data_api.GetPropertyQuotasSnapshotRequest.pb( + analytics_data_api.GetPropertyQuotasSnapshotRequest() ) transcode.return_value = { "method": "post", @@ -9711,21 +10474,19 @@ def test_list_recurring_audience_lists_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ( - analytics_data_api.ListRecurringAudienceListsResponse.to_json( - analytics_data_api.ListRecurringAudienceListsResponse() - ) + req.return_value._content = analytics_data_api.PropertyQuotasSnapshot.to_json( + analytics_data_api.PropertyQuotasSnapshot() ) - request = analytics_data_api.ListRecurringAudienceListsRequest() + request = analytics_data_api.GetPropertyQuotasSnapshotRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_data_api.ListRecurringAudienceListsResponse() + post.return_value = analytics_data_api.PropertyQuotasSnapshot() - client.list_recurring_audience_lists( + client.get_property_quotas_snapshot( request, metadata=[ ("key", "val"), @@ -9737,9 +10498,9 @@ def test_list_recurring_audience_lists_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_recurring_audience_lists_rest_bad_request( +def test_get_property_quotas_snapshot_rest_bad_request( transport: str = "rest", - request_type=analytics_data_api.ListRecurringAudienceListsRequest, + request_type=analytics_data_api.GetPropertyQuotasSnapshotRequest, ): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9747,7 +10508,7 @@ def test_list_recurring_audience_lists_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} + request_init = {"name": "properties/sample1/propertyQuotasSnapshot"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -9759,10 +10520,10 @@ def test_list_recurring_audience_lists_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_recurring_audience_lists(request) + client.get_property_quotas_snapshot(request) -def test_list_recurring_audience_lists_rest_flattened(): +def test_get_property_quotas_snapshot_rest_flattened(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -9771,14 +10532,14 @@ def test_list_recurring_audience_lists_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_data_api.ListRecurringAudienceListsResponse() + return_value = analytics_data_api.PropertyQuotasSnapshot() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "properties/sample1"} + sample_request = {"name": "properties/sample1/propertyQuotasSnapshot"} # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + name="name_value", ) mock_args.update(sample_request) @@ -9786,27 +10547,25 @@ def test_list_recurring_audience_lists_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.ListRecurringAudienceListsResponse.pb( - return_value - ) + return_value = analytics_data_api.PropertyQuotasSnapshot.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_recurring_audience_lists(**mock_args) + client.get_property_quotas_snapshot(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=properties/*}/recurringAudienceLists" + "%s/v1alpha/{name=properties/*/propertyQuotasSnapshot}" % client.transport._host, args[1], ) -def test_list_recurring_audience_lists_rest_flattened_error(transport: str = "rest"): +def test_get_property_quotas_snapshot_rest_flattened_error(transport: str = "rest"): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9815,77 +10574,17 @@ def test_list_recurring_audience_lists_rest_flattened_error(transport: str = "re # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_recurring_audience_lists( - analytics_data_api.ListRecurringAudienceListsRequest(), - parent="parent_value", + client.get_property_quotas_snapshot( + analytics_data_api.GetPropertyQuotasSnapshotRequest(), + name="name_value", ) -def test_list_recurring_audience_lists_rest_pager(transport: str = "rest"): +def test_get_property_quotas_snapshot_rest_error(): client = AlphaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - analytics_data_api.ListRecurringAudienceListsResponse( - recurring_audience_lists=[ - analytics_data_api.RecurringAudienceList(), - analytics_data_api.RecurringAudienceList(), - analytics_data_api.RecurringAudienceList(), - ], - next_page_token="abc", - ), - analytics_data_api.ListRecurringAudienceListsResponse( - recurring_audience_lists=[], - next_page_token="def", - ), - analytics_data_api.ListRecurringAudienceListsResponse( - recurring_audience_lists=[ - analytics_data_api.RecurringAudienceList(), - ], - next_page_token="ghi", - ), - analytics_data_api.ListRecurringAudienceListsResponse( - recurring_audience_lists=[ - analytics_data_api.RecurringAudienceList(), - analytics_data_api.RecurringAudienceList(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - analytics_data_api.ListRecurringAudienceListsResponse.to_json(x) - for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "properties/sample1"} - - pager = client.list_recurring_audience_lists(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all( - isinstance(i, analytics_data_api.RecurringAudienceList) for i in results - ) - - pages = list(client.list_recurring_audience_lists(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", @@ -11498,6 +12197,7 @@ def test_alpha_analytics_data_base_transport(): "create_recurring_audience_list", "get_recurring_audience_list", "list_recurring_audience_lists", + "get_property_quotas_snapshot", "create_report_task", "query_report_task", "get_report_task", @@ -11835,6 +12535,9 @@ def test_alpha_analytics_data_client_transport_session_collision(transport_name) session1 = client1.transport.list_recurring_audience_lists._session session2 = client2.transport.list_recurring_audience_lists._session assert session1 != session2 + session1 = client1.transport.get_property_quotas_snapshot._session + session2 = client2.transport.get_property_quotas_snapshot._session + assert session1 != session2 session1 = client1.transport.create_report_task._session session2 = client2.transport.create_report_task._session assert session1 != session2 @@ -12032,9 +12735,29 @@ def test_parse_audience_list_path(): assert expected == actual -def test_recurring_audience_list_path(): +def test_property_quotas_snapshot_path(): property = "oyster" - recurring_audience_list = "nudibranch" + expected = "properties/{property}/propertyQuotasSnapshot".format( + property=property, + ) + actual = AlphaAnalyticsDataClient.property_quotas_snapshot_path(property) + assert expected == actual + + +def test_parse_property_quotas_snapshot_path(): + expected = { + "property": "nudibranch", + } + path = AlphaAnalyticsDataClient.property_quotas_snapshot_path(**expected) + + # Check that the path construction is reversible. + actual = AlphaAnalyticsDataClient.parse_property_quotas_snapshot_path(path) + assert expected == actual + + +def test_recurring_audience_list_path(): + property = "cuttlefish" + recurring_audience_list = "mussel" expected = ( "properties/{property}/recurringAudienceLists/{recurring_audience_list}".format( property=property, @@ -12049,8 +12772,8 @@ def test_recurring_audience_list_path(): def test_parse_recurring_audience_list_path(): expected = { - "property": "cuttlefish", - "recurring_audience_list": "mussel", + "property": "winkle", + "recurring_audience_list": "nautilus", } path = AlphaAnalyticsDataClient.recurring_audience_list_path(**expected) @@ -12060,8 +12783,8 @@ def test_parse_recurring_audience_list_path(): def test_report_task_path(): - property = "winkle" - report_task = "nautilus" + property = "scallop" + report_task = "abalone" expected = "properties/{property}/reportTasks/{report_task}".format( property=property, report_task=report_task, @@ -12072,8 +12795,8 @@ def test_report_task_path(): def test_parse_report_task_path(): expected = { - "property": "scallop", - "report_task": "abalone", + "property": "squid", + "report_task": "clam", } path = AlphaAnalyticsDataClient.report_task_path(**expected) @@ -12083,7 +12806,7 @@ def test_parse_report_task_path(): def test_common_billing_account_path(): - billing_account = "squid" + billing_account = "whelk" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -12093,7 +12816,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "clam", + "billing_account": "octopus", } path = AlphaAnalyticsDataClient.common_billing_account_path(**expected) @@ -12103,7 +12826,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "whelk" + folder = "oyster" expected = "folders/{folder}".format( folder=folder, ) @@ -12113,7 +12836,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "octopus", + "folder": "nudibranch", } path = AlphaAnalyticsDataClient.common_folder_path(**expected) @@ -12123,7 +12846,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "oyster" + organization = "cuttlefish" expected = "organizations/{organization}".format( organization=organization, ) @@ -12133,7 +12856,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "nudibranch", + "organization": "mussel", } path = AlphaAnalyticsDataClient.common_organization_path(**expected) @@ -12143,7 +12866,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "cuttlefish" + project = "winkle" expected = "projects/{project}".format( project=project, ) @@ -12153,7 +12876,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "mussel", + "project": "nautilus", } path = AlphaAnalyticsDataClient.common_project_path(**expected) @@ -12163,8 +12886,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "winkle" - location = "nautilus" + project = "scallop" + location = "abalone" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -12175,8 +12898,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "scallop", - "location": "abalone", + "project": "squid", + "location": "clam", } path = AlphaAnalyticsDataClient.common_location_path(**expected) From 2cc1550492a2b78ed7240aab84a8449de5e5afa2 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 20 Sep 2024 11:37:31 -0400 Subject: [PATCH 077/108] feat: [google-maps-places] add `routing_parameters` to SearchNearbyRequest and SearchTextRequest (#13092) BEGIN_COMMIT_OVERRIDE feat: action for publishing data profiles to SecOps (formelly known as Chronicle) feat: action for publishing data profiles to Security Command Center feat: discovery configs for AWS S3 buckets docs: small improvements and clarifications END_COMMIT_OVERRIDE - [ ] Regenerate this pull request now. feat: add `search_along_route_parameters` to SearchTextRequest feat: add `routing_summaries` to SearchNearbyResponse and SearchTextResponse docs: A comment for field `contextual_contents` in message `.google.maps.places.v1.SearchTextResponse` is changed to be more assertive docs: A comment for field `open_now` in message `.google.maps.places.v1.Place` is changed to clarify what it means with new-since-previous-comment current and secondary opening hours fields Introduces search along route and trip time features. PiperOrigin-RevId: 675760040 Source-Link: https://github.com/googleapis/googleapis/commit/42219196a8f02e2e17198ad88f4369324dfe1f09 Source-Link: https://github.com/googleapis/googleapis-gen/commit/9caa94e6a392aed9fcca81cb8091ffe9cdfce033 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLW1hcHMtcGxhY2VzLy5Pd2xCb3QueWFtbCIsImgiOiI5Y2FhOTRlNmEzOTJhZWQ5ZmNjYTgxY2I4MDkxZmZlOWNkZmNlMDMzIn0= --------- Co-authored-by: Owl Bot Co-authored-by: ohmayr --- .../google/maps/places/__init__.py | 12 ++ .../google/maps/places_v1/__init__.py | 12 ++ .../places_v1/services/places/async_client.py | 1 + .../maps/places_v1/services/places/client.py | 1 + .../google/maps/places_v1/types/__init__.py | 12 ++ .../google/maps/places_v1/types/place.py | 10 +- .../maps/places_v1/types/places_service.py | 135 +++++++++++++++++- .../google/maps/places_v1/types/polyline.py | 61 ++++++++ .../maps/places_v1/types/route_modifiers.py | 75 ++++++++++ .../places_v1/types/routing_preference.py | 70 +++++++++ .../maps/places_v1/types/routing_summary.py | 81 +++++++++++ .../maps/places_v1/types/travel_mode.py | 62 ++++++++ .../scripts/fixup_places_v1_keywords.py | 4 +- .../tests/unit/gapic/places_v1/test_places.py | 5 + 14 files changed, 535 insertions(+), 6 deletions(-) create mode 100644 packages/google-maps-places/google/maps/places_v1/types/polyline.py create mode 100644 packages/google-maps-places/google/maps/places_v1/types/route_modifiers.py create mode 100644 packages/google-maps-places/google/maps/places_v1/types/routing_preference.py create mode 100644 packages/google-maps-places/google/maps/places_v1/types/routing_summary.py create mode 100644 packages/google-maps-places/google/maps/places_v1/types/travel_mode.py diff --git a/packages/google-maps-places/google/maps/places/__init__.py b/packages/google-maps-places/google/maps/places/__init__.py index ba2545071c8b..28c346d421a8 100644 --- a/packages/google-maps-places/google/maps/places/__init__.py +++ b/packages/google-maps-places/google/maps/places/__init__.py @@ -34,13 +34,19 @@ GetPhotoMediaRequest, GetPlaceRequest, PhotoMedia, + RoutingParameters, SearchNearbyRequest, SearchNearbyResponse, SearchTextRequest, SearchTextResponse, ) +from google.maps.places_v1.types.polyline import Polyline from google.maps.places_v1.types.reference import References from google.maps.places_v1.types.review import Review +from google.maps.places_v1.types.route_modifiers import RouteModifiers +from google.maps.places_v1.types.routing_preference import RoutingPreference +from google.maps.places_v1.types.routing_summary import RoutingSummary +from google.maps.places_v1.types.travel_mode import TravelMode __all__ = ( "PlacesClient", @@ -60,10 +66,16 @@ "GetPhotoMediaRequest", "GetPlaceRequest", "PhotoMedia", + "RoutingParameters", "SearchNearbyRequest", "SearchNearbyResponse", "SearchTextRequest", "SearchTextResponse", + "Polyline", "References", "Review", + "RouteModifiers", + "RoutingPreference", + "RoutingSummary", + "TravelMode", ) diff --git a/packages/google-maps-places/google/maps/places_v1/__init__.py b/packages/google-maps-places/google/maps/places_v1/__init__.py index bb2075cda07a..106940dbdfc2 100644 --- a/packages/google-maps-places/google/maps/places_v1/__init__.py +++ b/packages/google-maps-places/google/maps/places_v1/__init__.py @@ -33,13 +33,19 @@ GetPhotoMediaRequest, GetPlaceRequest, PhotoMedia, + RoutingParameters, SearchNearbyRequest, SearchNearbyResponse, SearchTextRequest, SearchTextResponse, ) +from .types.polyline import Polyline from .types.reference import References from .types.review import Review +from .types.route_modifiers import RouteModifiers +from .types.routing_preference import RoutingPreference +from .types.routing_summary import RoutingSummary +from .types.travel_mode import TravelMode __all__ = ( "PlacesAsyncClient", @@ -58,11 +64,17 @@ "PhotoMedia", "Place", "PlacesClient", + "Polyline", "PriceLevel", "References", "Review", + "RouteModifiers", + "RoutingParameters", + "RoutingPreference", + "RoutingSummary", "SearchNearbyRequest", "SearchNearbyResponse", "SearchTextRequest", "SearchTextResponse", + "TravelMode", ) diff --git a/packages/google-maps-places/google/maps/places_v1/services/places/async_client.py b/packages/google-maps-places/google/maps/places_v1/services/places/async_client.py index 0e27906f0dc2..46a0d6df67af 100644 --- a/packages/google-maps-places/google/maps/places_v1/services/places/async_client.py +++ b/packages/google-maps-places/google/maps/places_v1/services/places/async_client.py @@ -54,6 +54,7 @@ place, places_service, review, + routing_summary, ) from .client import PlacesClient diff --git a/packages/google-maps-places/google/maps/places_v1/services/places/client.py b/packages/google-maps-places/google/maps/places_v1/services/places/client.py index bbf53396b61b..7db691a4a89b 100644 --- a/packages/google-maps-places/google/maps/places_v1/services/places/client.py +++ b/packages/google-maps-places/google/maps/places_v1/services/places/client.py @@ -60,6 +60,7 @@ place, places_service, review, + routing_summary, ) from .transports.base import DEFAULT_CLIENT_INFO, PlacesTransport diff --git a/packages/google-maps-places/google/maps/places_v1/types/__init__.py b/packages/google-maps-places/google/maps/places_v1/types/__init__.py index 99b294c1dc25..5b1baab78073 100644 --- a/packages/google-maps-places/google/maps/places_v1/types/__init__.py +++ b/packages/google-maps-places/google/maps/places_v1/types/__init__.py @@ -27,13 +27,19 @@ GetPhotoMediaRequest, GetPlaceRequest, PhotoMedia, + RoutingParameters, SearchNearbyRequest, SearchNearbyResponse, SearchTextRequest, SearchTextResponse, ) +from .polyline import Polyline from .reference import References from .review import Review +from .route_modifiers import RouteModifiers +from .routing_preference import RoutingPreference +from .routing_summary import RoutingSummary +from .travel_mode import TravelMode __all__ = ( "AuthorAttribution", @@ -51,10 +57,16 @@ "GetPhotoMediaRequest", "GetPlaceRequest", "PhotoMedia", + "RoutingParameters", "SearchNearbyRequest", "SearchNearbyResponse", "SearchTextRequest", "SearchTextResponse", + "Polyline", "References", "Review", + "RouteModifiers", + "RoutingPreference", + "RoutingSummary", + "TravelMode", ) diff --git a/packages/google-maps-places/google/maps/places_v1/types/place.py b/packages/google-maps-places/google/maps/places_v1/types/place.py index 99c9e8f693e9..0ce1df663c1b 100644 --- a/packages/google-maps-places/google/maps/places_v1/types/place.py +++ b/packages/google-maps-places/google/maps/places_v1/types/place.py @@ -439,9 +439,13 @@ class OpeningHours(proto.Message): Attributes: open_now (bool): - Is this place open right now? Always present - unless we lack time-of-day or timezone data for - these opening hours. + Whether the opening hours period is currently + active. For regular opening hours and current + opening hours, this field means whether the + place is open. For secondary opening hours and + current secondary opening hours, this field + means whether the secondary hours of this place + is active. This field is a member of `oneof`_ ``_open_now``. periods (MutableSequence[google.maps.places_v1.types.Place.OpeningHours.Period]): diff --git a/packages/google-maps-places/google/maps/places_v1/types/places_service.py b/packages/google-maps-places/google/maps/places_v1/types/places_service.py index c02ec339b860..476c2fbe06c8 100644 --- a/packages/google-maps-places/google/maps/places_v1/types/places_service.py +++ b/packages/google-maps-places/google/maps/places_v1/types/places_service.py @@ -22,11 +22,17 @@ import proto # type: ignore from google.maps.places_v1.types import contextual_content, ev_charging, geometry +from google.maps.places_v1.types import routing_preference as gmp_routing_preference from google.maps.places_v1.types import place as gmp_place +from google.maps.places_v1.types import polyline as gmp_polyline +from google.maps.places_v1.types import route_modifiers as gmp_route_modifiers +from google.maps.places_v1.types import routing_summary +from google.maps.places_v1.types import travel_mode as gmp_travel_mode __protobuf__ = proto.module( package="google.maps.places.v1", manifest={ + "RoutingParameters", "SearchNearbyRequest", "SearchNearbyResponse", "SearchTextRequest", @@ -40,6 +46,50 @@ ) +class RoutingParameters(proto.Message): + r"""Parameters to configure the routing calculations to the + places in the response, both along a route (where result ranking + will be influenced) and for calculating travel times on results. + + Attributes: + origin (google.type.latlng_pb2.LatLng): + Optional. An explicit routing origin that + overrides the origin defined in the polyline. By + default, the polyline origin is used. + travel_mode (google.maps.places_v1.types.TravelMode): + Optional. The travel mode. + route_modifiers (google.maps.places_v1.types.RouteModifiers): + Optional. The route modifiers. + routing_preference (google.maps.places_v1.types.RoutingPreference): + Optional. Specifies how to compute the routing summaries. + The server attempts to use the selected routing preference + to compute the route. The traffic aware routing preference + is only available for the ``DRIVE`` or ``TWO_WHEELER`` + ``travelMode``. + """ + + origin: latlng_pb2.LatLng = proto.Field( + proto.MESSAGE, + number=1, + message=latlng_pb2.LatLng, + ) + travel_mode: gmp_travel_mode.TravelMode = proto.Field( + proto.ENUM, + number=2, + enum=gmp_travel_mode.TravelMode, + ) + route_modifiers: gmp_route_modifiers.RouteModifiers = proto.Field( + proto.MESSAGE, + number=3, + message=gmp_route_modifiers.RouteModifiers, + ) + routing_preference: gmp_routing_preference.RoutingPreference = proto.Field( + proto.ENUM, + number=4, + enum=gmp_routing_preference.RoutingPreference, + ) + + class SearchNearbyRequest(proto.Message): r"""Request proto for Search Nearby. @@ -163,6 +213,9 @@ class SearchNearbyRequest(proto.Message): Required. The region to search. rank_preference (google.maps.places_v1.types.SearchNearbyRequest.RankPreference): How results will be ranked in the response. + routing_parameters (google.maps.places_v1.types.RoutingParameters): + Optional. Parameters that affect the routing + to the search results. """ class RankPreference(proto.Enum): @@ -238,6 +291,11 @@ class LocationRestriction(proto.Message): number=9, enum=RankPreference, ) + routing_parameters: "RoutingParameters" = proto.Field( + proto.MESSAGE, + number=10, + message="RoutingParameters", + ) class SearchNearbyResponse(proto.Message): @@ -248,6 +306,14 @@ class SearchNearbyResponse(proto.Message): A list of places that meets user's requirements like places types, number of places and specific location restriction. + routing_summaries (MutableSequence[google.maps.places_v1.types.RoutingSummary]): + A list of routing summaries where each entry + associates to the corresponding place in the + same index in the places field. If the routing + summary is not available for one of the places, + it will contain an empty entry. This list should + have as many entries as the list of places if + requested. """ places: MutableSequence[gmp_place.Place] = proto.RepeatedField( @@ -255,6 +321,13 @@ class SearchNearbyResponse(proto.Message): number=1, message=gmp_place.Place, ) + routing_summaries: MutableSequence[ + routing_summary.RoutingSummary + ] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=routing_summary.RoutingSummary, + ) class SearchTextRequest(proto.Message): @@ -327,6 +400,12 @@ class SearchTextRequest(proto.Message): ev_options (google.maps.places_v1.types.SearchTextRequest.EVOptions): Optional. Set the searchable EV options of a place search request. + routing_parameters (google.maps.places_v1.types.RoutingParameters): + Optional. Additional parameters for routing + to results. + search_along_route_parameters (google.maps.places_v1.types.SearchTextRequest.SearchAlongRouteParameters): + Optional. Additional parameters proto for + searching along a route. """ class RankPreference(proto.Enum): @@ -445,6 +524,35 @@ class EVOptions(proto.Message): enum=ev_charging.EVConnectorType, ) + class SearchAlongRouteParameters(proto.Message): + r"""Specifies a precalculated polyline from the `Routes + API `__ + defining the route to search. Searching along a route is similar to + using the ``locationBias`` or ``locationRestriction`` request option + to bias the search results. However, while the ``locationBias`` and + ``locationRestriction`` options let you specify a region to bias the + search results, this option lets you bias the results along a trip + route. + + Results are not guaranteed to be along the route provided, but + rather are ranked within the search area defined by the polyline + and, optionally, by the ``locationBias`` or ``locationRestriction`` + based on minimal detour times from origin to destination. The + results might be along an alternate route, especially if the + provided polyline does not define an optimal route from origin to + destination. + + Attributes: + polyline (google.maps.places_v1.types.Polyline): + Required. The route polyline. + """ + + polyline: gmp_polyline.Polyline = proto.Field( + proto.MESSAGE, + number=1, + message=gmp_polyline.Polyline, + ) + text_query: str = proto.Field( proto.STRING, number=1, @@ -502,6 +610,16 @@ class EVOptions(proto.Message): number=15, message=EVOptions, ) + routing_parameters: "RoutingParameters" = proto.Field( + proto.MESSAGE, + number=16, + message="RoutingParameters", + ) + search_along_route_parameters: SearchAlongRouteParameters = proto.Field( + proto.MESSAGE, + number=17, + message=SearchAlongRouteParameters, + ) class SearchTextResponse(proto.Message): @@ -511,6 +629,14 @@ class SearchTextResponse(proto.Message): places (MutableSequence[google.maps.places_v1.types.Place]): A list of places that meet the user's text search criteria. + routing_summaries (MutableSequence[google.maps.places_v1.types.RoutingSummary]): + A list of routing summaries where each entry + associates to the corresponding place in the + same index in the places field. If the routing + summary is not available for one of the places, + it will contain an empty entry. This list will + have as many entries as the list of places if + requested. contextual_contents (MutableSequence[google.maps.places_v1.types.ContextualContent]): Experimental: See https://developers.google.com/maps/documentation/places/web-service/experimental/places-generative @@ -522,7 +648,7 @@ class SearchTextResponse(proto.Message): in the request are preferred. If the contextual content is not available for one of the places, it will return non-contextual content. It will be empty only when the - content is unavailable for this place. This list should have + content is unavailable for this place. This list will have as many entries as the list of places if requested. """ @@ -531,6 +657,13 @@ class SearchTextResponse(proto.Message): number=1, message=gmp_place.Place, ) + routing_summaries: MutableSequence[ + routing_summary.RoutingSummary + ] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=routing_summary.RoutingSummary, + ) contextual_contents: MutableSequence[ contextual_content.ContextualContent ] = proto.RepeatedField( diff --git a/packages/google-maps-places/google/maps/places_v1/types/polyline.py b/packages/google-maps-places/google/maps/places_v1/types/polyline.py new file mode 100644 index 000000000000..c5f14fdb5642 --- /dev/null +++ b/packages/google-maps-places/google/maps/places_v1/types/polyline.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.maps.places.v1", + manifest={ + "Polyline", + }, +) + + +class Polyline(proto.Message): + r"""A route polyline. Only supports an `encoded + polyline `__, + which can be passed as a string and includes compression with + minimal lossiness. This is the Routes API default output. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + encoded_polyline (str): + An `encoded + polyline `__, + as returned by the `Routes API by + default `__. + See the + `encoder `__ + and + `decoder `__ + tools. + + This field is a member of `oneof`_ ``polyline_type``. + """ + + encoded_polyline: str = proto.Field( + proto.STRING, + number=1, + oneof="polyline_type", + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-maps-places/google/maps/places_v1/types/route_modifiers.py b/packages/google-maps-places/google/maps/places_v1/types/route_modifiers.py new file mode 100644 index 000000000000..59e720aa7696 --- /dev/null +++ b/packages/google-maps-places/google/maps/places_v1/types/route_modifiers.py @@ -0,0 +1,75 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.maps.places.v1", + manifest={ + "RouteModifiers", + }, +) + + +class RouteModifiers(proto.Message): + r"""Encapsulates a set of optional conditions to satisfy when + calculating the routes. + + Attributes: + avoid_tolls (bool): + Optional. When set to true, avoids toll roads where + reasonable, giving preference to routes not containing toll + roads. Applies only to the ``DRIVE`` and ``TWO_WHEELER`` + [``TravelMode``][google.maps.places.v1.TravelMode]. + avoid_highways (bool): + Optional. When set to true, avoids highways where + reasonable, giving preference to routes not containing + highways. Applies only to the ``DRIVE`` and ``TWO_WHEELER`` + [``TravelMode``][google.maps.places.v1.TravelMode]. + avoid_ferries (bool): + Optional. When set to true, avoids ferries where reasonable, + giving preference to routes not containing ferries. Applies + only to the ``DRIVE`` and ``TWO_WHEELER`` + [``TravelMode``][google.maps.places.v1.TravelMode]. + avoid_indoor (bool): + Optional. When set to true, avoids navigating indoors where + reasonable, giving preference to routes not containing + indoor navigation. Applies only to the ``WALK`` + [``TravelMode``][google.maps.places.v1.TravelMode]. + """ + + avoid_tolls: bool = proto.Field( + proto.BOOL, + number=1, + ) + avoid_highways: bool = proto.Field( + proto.BOOL, + number=2, + ) + avoid_ferries: bool = proto.Field( + proto.BOOL, + number=3, + ) + avoid_indoor: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-maps-places/google/maps/places_v1/types/routing_preference.py b/packages/google-maps-places/google/maps/places_v1/types/routing_preference.py new file mode 100644 index 000000000000..786d01140a2f --- /dev/null +++ b/packages/google-maps-places/google/maps/places_v1/types/routing_preference.py @@ -0,0 +1,70 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.maps.places.v1", + manifest={ + "RoutingPreference", + }, +) + + +class RoutingPreference(proto.Enum): + r"""A set of values that specify factors to take into + consideration when calculating the route. + + Values: + ROUTING_PREFERENCE_UNSPECIFIED (0): + No routing preference specified. Default to + ``TRAFFIC_UNAWARE``. + TRAFFIC_UNAWARE (1): + Computes routes without taking live traffic conditions into + consideration. Suitable when traffic conditions don't matter + or are not applicable. Using this value produces the lowest + latency. Note: For + [``TravelMode``][google.maps.places.v1.TravelMode] ``DRIVE`` + and ``TWO_WHEELER``, the route and duration chosen are based + on road network and average time-independent traffic + conditions, not current road conditions. Consequently, + routes may include roads that are temporarily closed. + Results for a given request may vary over time due to + changes in the road network, updated average traffic + conditions, and the distributed nature of the service. + Results may also vary between nearly-equivalent routes at + any time or frequency. + TRAFFIC_AWARE (2): + Calculates routes taking live traffic conditions into + consideration. In contrast to ``TRAFFIC_AWARE_OPTIMAL``, + some optimizations are applied to significantly reduce + latency. + TRAFFIC_AWARE_OPTIMAL (3): + Calculates the routes taking live traffic + conditions into consideration, without applying + most performance optimizations. Using this value + produces the highest latency. + """ + ROUTING_PREFERENCE_UNSPECIFIED = 0 + TRAFFIC_UNAWARE = 1 + TRAFFIC_AWARE = 2 + TRAFFIC_AWARE_OPTIMAL = 3 + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-maps-places/google/maps/places_v1/types/routing_summary.py b/packages/google-maps-places/google/maps/places_v1/types/routing_summary.py new file mode 100644 index 000000000000..7575fe18ed90 --- /dev/null +++ b/packages/google-maps-places/google/maps/places_v1/types/routing_summary.py @@ -0,0 +1,81 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import duration_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.maps.places.v1", + manifest={ + "RoutingSummary", + }, +) + + +class RoutingSummary(proto.Message): + r"""The duration and distance from the routing origin to a place in the + response, and a second leg from that place to the destination, if + requested. Note: Adding ``routingSummaries`` in the field mask + without also including either the ``routingParameters.origin`` + parameter or the + ``searchAlongRouteParameters.polyline.encodedPolyline`` parameter in + the request causes an error. + + Attributes: + legs (MutableSequence[google.maps.places_v1.types.RoutingSummary.Leg]): + The legs of the trip. + + When you calculate travel duration and distance from a set + origin, ``legs`` contains a single leg containing the + duration and distance from the origin to the destination. + When you do a search along route, ``legs`` contains two + legs: one from the origin to place, and one from the place + to the destination. + """ + + class Leg(proto.Message): + r"""A leg is a single portion of a journey from one location to + another. + + Attributes: + duration (google.protobuf.duration_pb2.Duration): + The time it takes to complete this leg of the + trip. + distance_meters (int): + The distance of this leg of the trip. + """ + + duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=1, + message=duration_pb2.Duration, + ) + distance_meters: int = proto.Field( + proto.INT32, + number=2, + ) + + legs: MutableSequence[Leg] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=Leg, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-maps-places/google/maps/places_v1/types/travel_mode.py b/packages/google-maps-places/google/maps/places_v1/types/travel_mode.py new file mode 100644 index 000000000000..bcb42ef66089 --- /dev/null +++ b/packages/google-maps-places/google/maps/places_v1/types/travel_mode.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.maps.places.v1", + manifest={ + "TravelMode", + }, +) + + +class TravelMode(proto.Enum): + r"""Travel mode options. These options map to what `Routes API + offers `__. + + Values: + TRAVEL_MODE_UNSPECIFIED (0): + No travel mode specified. Defaults to ``DRIVE``. + DRIVE (1): + Travel by passenger car. + BICYCLE (2): + Travel by bicycle. Not supported with + ``search_along_route_parameters``. + WALK (3): + Travel by walking. Not supported with + ``search_along_route_parameters``. + TWO_WHEELER (4): + Motorized two wheeled vehicles of all kinds such as scooters + and motorcycles. Note that this is distinct from the + ``BICYCLE`` travel mode which covers human-powered + transport. Not supported with + ``search_along_route_parameters``. Only supported in those + countries listed at `Countries and regions supported for + two-wheeled + vehicles `__. + """ + TRAVEL_MODE_UNSPECIFIED = 0 + DRIVE = 1 + BICYCLE = 2 + WALK = 3 + TWO_WHEELER = 4 + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-maps-places/scripts/fixup_places_v1_keywords.py b/packages/google-maps-places/scripts/fixup_places_v1_keywords.py index 057be614cc45..886ede01a438 100644 --- a/packages/google-maps-places/scripts/fixup_places_v1_keywords.py +++ b/packages/google-maps-places/scripts/fixup_places_v1_keywords.py @@ -42,8 +42,8 @@ class placesCallTransformer(cst.CSTTransformer): 'autocomplete_places': ('input', 'location_bias', 'location_restriction', 'included_primary_types', 'included_region_codes', 'language_code', 'region_code', 'origin', 'input_offset', 'include_query_predictions', 'session_token', ), 'get_photo_media': ('name', 'max_width_px', 'max_height_px', 'skip_http_redirect', ), 'get_place': ('name', 'language_code', 'region_code', 'session_token', ), - 'search_nearby': ('location_restriction', 'language_code', 'region_code', 'included_types', 'excluded_types', 'included_primary_types', 'excluded_primary_types', 'max_result_count', 'rank_preference', ), - 'search_text': ('text_query', 'language_code', 'region_code', 'rank_preference', 'included_type', 'open_now', 'min_rating', 'max_result_count', 'price_levels', 'strict_type_filtering', 'location_bias', 'location_restriction', 'ev_options', ), + 'search_nearby': ('location_restriction', 'language_code', 'region_code', 'included_types', 'excluded_types', 'included_primary_types', 'excluded_primary_types', 'max_result_count', 'rank_preference', 'routing_parameters', ), + 'search_text': ('text_query', 'language_code', 'region_code', 'rank_preference', 'included_type', 'open_now', 'min_rating', 'max_result_count', 'price_levels', 'strict_type_filtering', 'location_bias', 'location_restriction', 'ev_options', 'routing_parameters', 'search_along_route_parameters', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: diff --git a/packages/google-maps-places/tests/unit/gapic/places_v1/test_places.py b/packages/google-maps-places/tests/unit/gapic/places_v1/test_places.py index 0dbb9efbb41f..c7fa3af8519d 100644 --- a/packages/google-maps-places/tests/unit/gapic/places_v1/test_places.py +++ b/packages/google-maps-places/tests/unit/gapic/places_v1/test_places.py @@ -59,7 +59,12 @@ photo, place, places_service, + polyline, review, + route_modifiers, + routing_preference, + routing_summary, + travel_mode, ) From afcf7cbe57d6e0f183a113ba03bba9c288052969 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 20 Sep 2024 11:41:04 -0400 Subject: [PATCH 078/108] feat: [google-cloud-dlp] action for publishing data profiles to SecOps (formelly known as Chronicle) (#13094) BEGIN_COMMIT_OVERRIDE feat: action for publishing data profiles to SecOps (formelly known as Chronicle) feat: action for publishing data profiles to Security Command Center feat: discovery configs for AWS S3 buckets docs: small improvements and clarifications END_COMMIT_OVERRIDE - [ ] Regenerate this pull request now. feat: action for publishing data profiles to Security Command Center feat: discovery configs for AWS S3 buckets docs: small improvements and clarifications PiperOrigin-RevId: 676184918 Source-Link: https://github.com/googleapis/googleapis/commit/607c2ae82620153880a66898bcbf0a46d38d7d10 Source-Link: https://github.com/googleapis/googleapis-gen/commit/f92f1ae19895930aca6ad11aaeb1d4d9cd7a61a1 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWRscC8uT3dsQm90LnlhbWwiLCJoIjoiZjkyZjFhZTE5ODk1OTMwYWNhNmFkMTFhYWViMWQ0ZDljZDdhNjFhMSJ9 --------- Co-authored-by: Owl Bot Co-authored-by: ohmayr --- .../google/cloud/dlp/__init__.py | 28 + .../google/cloud/dlp/gapic_version.py | 2 +- .../google/cloud/dlp_v2/__init__.py | 28 + .../google/cloud/dlp_v2/gapic_version.py | 2 +- .../services/dlp_service/async_client.py | 1 + .../dlp_v2/services/dlp_service/client.py | 1 + .../services/dlp_service/transports/rest.py | 1 + .../google/cloud/dlp_v2/types/__init__.py | 28 + .../google/cloud/dlp_v2/types/dlp.py | 563 +++++++++++++++++- ...nippet_metadata_google.privacy.dlp.v2.json | 2 +- 10 files changed, 648 insertions(+), 8 deletions(-) diff --git a/packages/google-cloud-dlp/google/cloud/dlp/__init__.py b/packages/google-cloud-dlp/google/cloud/dlp/__init__.py index 4a4594889a9c..b77f85d4bdc6 100644 --- a/packages/google-cloud-dlp/google/cloud/dlp/__init__.py +++ b/packages/google-cloud-dlp/google/cloud/dlp/__init__.py @@ -26,7 +26,12 @@ ActivateJobTriggerRequest, AllOtherDatabaseResources, AllOtherResources, + AmazonS3Bucket, + AmazonS3BucketConditions, + AmazonS3BucketRegex, AnalyzeDataSourceRiskDetails, + AwsAccount, + AwsAccountRegex, BigQueryDiscoveryTarget, BigQueryRegex, BigQueryRegexes, @@ -111,6 +116,9 @@ DiscoveryFileStoreConditions, DiscoveryGenerationCadence, DiscoveryInspectTemplateModifiedCadence, + DiscoveryOtherCloudConditions, + DiscoveryOtherCloudFilter, + DiscoveryOtherCloudGenerationCadence, DiscoverySchemaModifiedCadence, DiscoveryStartingLocation, DiscoveryTableModifiedCadence, @@ -203,6 +211,12 @@ MetadataLocation, MetadataType, NullPercentageLevel, + OtherCloudDiscoveryStartingLocation, + OtherCloudDiscoveryTarget, + OtherCloudResourceCollection, + OtherCloudResourceRegex, + OtherCloudResourceRegexes, + OtherCloudSingleResourceReference, OtherInfoTypeSummary, OutputStorageConfig, PrimitiveTransformation, @@ -307,7 +321,12 @@ "ActivateJobTriggerRequest", "AllOtherDatabaseResources", "AllOtherResources", + "AmazonS3Bucket", + "AmazonS3BucketConditions", + "AmazonS3BucketRegex", "AnalyzeDataSourceRiskDetails", + "AwsAccount", + "AwsAccountRegex", "BigQueryDiscoveryTarget", "BigQueryRegex", "BigQueryRegexes", @@ -385,6 +404,9 @@ "DiscoveryFileStoreConditions", "DiscoveryGenerationCadence", "DiscoveryInspectTemplateModifiedCadence", + "DiscoveryOtherCloudConditions", + "DiscoveryOtherCloudFilter", + "DiscoveryOtherCloudGenerationCadence", "DiscoverySchemaModifiedCadence", "DiscoveryStartingLocation", "DiscoveryTableModifiedCadence", @@ -471,6 +493,12 @@ "Location", "Manual", "MetadataLocation", + "OtherCloudDiscoveryStartingLocation", + "OtherCloudDiscoveryTarget", + "OtherCloudResourceCollection", + "OtherCloudResourceRegex", + "OtherCloudResourceRegexes", + "OtherCloudSingleResourceReference", "OtherInfoTypeSummary", "OutputStorageConfig", "PrimitiveTransformation", diff --git a/packages/google-cloud-dlp/google/cloud/dlp/gapic_version.py b/packages/google-cloud-dlp/google/cloud/dlp/gapic_version.py index 3ed830e26f63..558c8aab67c5 100644 --- a/packages/google-cloud-dlp/google/cloud/dlp/gapic_version.py +++ b/packages/google-cloud-dlp/google/cloud/dlp/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.22.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-dlp/google/cloud/dlp_v2/__init__.py b/packages/google-cloud-dlp/google/cloud/dlp_v2/__init__.py index 80b0be2c935b..4d47e1ac68f6 100644 --- a/packages/google-cloud-dlp/google/cloud/dlp_v2/__init__.py +++ b/packages/google-cloud-dlp/google/cloud/dlp_v2/__init__.py @@ -25,7 +25,12 @@ ActivateJobTriggerRequest, AllOtherDatabaseResources, AllOtherResources, + AmazonS3Bucket, + AmazonS3BucketConditions, + AmazonS3BucketRegex, AnalyzeDataSourceRiskDetails, + AwsAccount, + AwsAccountRegex, BigQueryDiscoveryTarget, BigQueryRegex, BigQueryRegexes, @@ -110,6 +115,9 @@ DiscoveryFileStoreConditions, DiscoveryGenerationCadence, DiscoveryInspectTemplateModifiedCadence, + DiscoveryOtherCloudConditions, + DiscoveryOtherCloudFilter, + DiscoveryOtherCloudGenerationCadence, DiscoverySchemaModifiedCadence, DiscoveryStartingLocation, DiscoveryTableModifiedCadence, @@ -202,6 +210,12 @@ MetadataLocation, MetadataType, NullPercentageLevel, + OtherCloudDiscoveryStartingLocation, + OtherCloudDiscoveryTarget, + OtherCloudResourceCollection, + OtherCloudResourceRegex, + OtherCloudResourceRegexes, + OtherCloudSingleResourceReference, OtherInfoTypeSummary, OutputStorageConfig, PrimitiveTransformation, @@ -305,7 +319,12 @@ "ActivateJobTriggerRequest", "AllOtherDatabaseResources", "AllOtherResources", + "AmazonS3Bucket", + "AmazonS3BucketConditions", + "AmazonS3BucketRegex", "AnalyzeDataSourceRiskDetails", + "AwsAccount", + "AwsAccountRegex", "BigQueryDiscoveryTarget", "BigQueryField", "BigQueryKey", @@ -401,6 +420,9 @@ "DiscoveryFileStoreConditions", "DiscoveryGenerationCadence", "DiscoveryInspectTemplateModifiedCadence", + "DiscoveryOtherCloudConditions", + "DiscoveryOtherCloudFilter", + "DiscoveryOtherCloudGenerationCadence", "DiscoverySchemaModifiedCadence", "DiscoveryStartingLocation", "DiscoveryTableModifiedCadence", @@ -502,6 +524,12 @@ "MetadataLocation", "MetadataType", "NullPercentageLevel", + "OtherCloudDiscoveryStartingLocation", + "OtherCloudDiscoveryTarget", + "OtherCloudResourceCollection", + "OtherCloudResourceRegex", + "OtherCloudResourceRegexes", + "OtherCloudSingleResourceReference", "OtherInfoTypeSummary", "OutputStorageConfig", "PartitionId", diff --git a/packages/google-cloud-dlp/google/cloud/dlp_v2/gapic_version.py b/packages/google-cloud-dlp/google/cloud/dlp_v2/gapic_version.py index 3ed830e26f63..558c8aab67c5 100644 --- a/packages/google-cloud-dlp/google/cloud/dlp_v2/gapic_version.py +++ b/packages/google-cloud-dlp/google/cloud/dlp_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.22.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/async_client.py b/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/async_client.py index af7fdb0a0cee..2f384158ce0c 100644 --- a/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/async_client.py +++ b/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/async_client.py @@ -5352,6 +5352,7 @@ async def sample_get_file_store_data_profile(): The profile for a file store. - Cloud Storage: maps 1:1 with a bucket. + - Amazon S3: maps 1:1 with a bucket. """ # Create or coerce a protobuf request object. diff --git a/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/client.py b/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/client.py index 198f49f1e8e8..7163280e5c62 100644 --- a/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/client.py +++ b/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/client.py @@ -5877,6 +5877,7 @@ def sample_get_file_store_data_profile(): The profile for a file store. - Cloud Storage: maps 1:1 with a bucket. + - Amazon S3: maps 1:1 with a bucket. """ # Create or coerce a protobuf request object. diff --git a/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/transports/rest.py b/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/transports/rest.py index ed604aeb0b85..df56876a3b22 100644 --- a/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/transports/rest.py +++ b/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/transports/rest.py @@ -4046,6 +4046,7 @@ def __call__( The profile for a file store. - Cloud Storage: maps 1:1 with a bucket. + - Amazon S3: maps 1:1 with a bucket. """ diff --git a/packages/google-cloud-dlp/google/cloud/dlp_v2/types/__init__.py b/packages/google-cloud-dlp/google/cloud/dlp_v2/types/__init__.py index b660eb26c59b..aef58e903c8e 100644 --- a/packages/google-cloud-dlp/google/cloud/dlp_v2/types/__init__.py +++ b/packages/google-cloud-dlp/google/cloud/dlp_v2/types/__init__.py @@ -19,7 +19,12 @@ ActivateJobTriggerRequest, AllOtherDatabaseResources, AllOtherResources, + AmazonS3Bucket, + AmazonS3BucketConditions, + AmazonS3BucketRegex, AnalyzeDataSourceRiskDetails, + AwsAccount, + AwsAccountRegex, BigQueryDiscoveryTarget, BigQueryRegex, BigQueryRegexes, @@ -104,6 +109,9 @@ DiscoveryFileStoreConditions, DiscoveryGenerationCadence, DiscoveryInspectTemplateModifiedCadence, + DiscoveryOtherCloudConditions, + DiscoveryOtherCloudFilter, + DiscoveryOtherCloudGenerationCadence, DiscoverySchemaModifiedCadence, DiscoveryStartingLocation, DiscoveryTableModifiedCadence, @@ -196,6 +204,12 @@ MetadataLocation, MetadataType, NullPercentageLevel, + OtherCloudDiscoveryStartingLocation, + OtherCloudDiscoveryTarget, + OtherCloudResourceCollection, + OtherCloudResourceRegex, + OtherCloudResourceRegexes, + OtherCloudSingleResourceReference, OtherInfoTypeSummary, OutputStorageConfig, PrimitiveTransformation, @@ -298,7 +312,12 @@ "ActivateJobTriggerRequest", "AllOtherDatabaseResources", "AllOtherResources", + "AmazonS3Bucket", + "AmazonS3BucketConditions", + "AmazonS3BucketRegex", "AnalyzeDataSourceRiskDetails", + "AwsAccount", + "AwsAccountRegex", "BigQueryDiscoveryTarget", "BigQueryRegex", "BigQueryRegexes", @@ -376,6 +395,9 @@ "DiscoveryFileStoreConditions", "DiscoveryGenerationCadence", "DiscoveryInspectTemplateModifiedCadence", + "DiscoveryOtherCloudConditions", + "DiscoveryOtherCloudFilter", + "DiscoveryOtherCloudGenerationCadence", "DiscoverySchemaModifiedCadence", "DiscoveryStartingLocation", "DiscoveryTableModifiedCadence", @@ -462,6 +484,12 @@ "Location", "Manual", "MetadataLocation", + "OtherCloudDiscoveryStartingLocation", + "OtherCloudDiscoveryTarget", + "OtherCloudResourceCollection", + "OtherCloudResourceRegex", + "OtherCloudResourceRegexes", + "OtherCloudSingleResourceReference", "OtherInfoTypeSummary", "OutputStorageConfig", "PrimitiveTransformation", diff --git a/packages/google-cloud-dlp/google/cloud/dlp_v2/types/dlp.py b/packages/google-cloud-dlp/google/cloud/dlp_v2/types/dlp.py index a92c0cfd6de3..e5d778fcae6d 100644 --- a/packages/google-cloud-dlp/google/cloud/dlp_v2/types/dlp.py +++ b/packages/google-cloud-dlp/google/cloud/dlp_v2/types/dlp.py @@ -206,7 +206,21 @@ "DiscoveryCloudStorageGenerationCadence", "DiscoveryCloudStorageConditions", "DiscoveryFileStoreConditions", + "OtherCloudDiscoveryTarget", + "DiscoveryOtherCloudFilter", + "OtherCloudResourceCollection", + "OtherCloudResourceRegexes", + "OtherCloudResourceRegex", + "AwsAccountRegex", + "AmazonS3BucketRegex", + "OtherCloudSingleResourceReference", + "AwsAccount", + "AmazonS3Bucket", + "DiscoveryOtherCloudConditions", + "AmazonS3BucketConditions", + "DiscoveryOtherCloudGenerationCadence", "DiscoveryStartingLocation", + "OtherCloudDiscoveryStartingLocation", "AllOtherResources", "DlpJob", "GetDlpJobRequest", @@ -7827,6 +7841,18 @@ class DataProfileAction(proto.Message): pub_sub_notification (google.cloud.dlp_v2.types.DataProfileAction.PubSubNotification): Publish a message into the Pub/Sub topic. + This field is a member of `oneof`_ ``action``. + publish_to_chronicle (google.cloud.dlp_v2.types.DataProfileAction.PublishToChronicle): + Publishes generated data profiles to Google Security + Operations. For more information, see `Use Sensitive Data + Protection data in context-aware + analytics `__. + + This field is a member of `oneof`_ ``action``. + publish_to_scc (google.cloud.dlp_v2.types.DataProfileAction.PublishToSecurityCommandCenter): + Publishes findings to SCC for each data + profile. + This field is a member of `oneof`_ ``action``. tag_resources (google.cloud.dlp_v2.types.DataProfileAction.TagResources): Tags the profiled resources with the @@ -7947,6 +7973,18 @@ class DetailLevel(proto.Enum): ) ) + class PublishToChronicle(proto.Message): + r"""Message expressing intention to publish to Google Security + Operations. + + """ + + class PublishToSecurityCommandCenter(proto.Message): + r"""If set, a summary finding will be created/updated in SCC for + each profile. + + """ + class TagResources(proto.Message): r"""If set, attaches the [tags] (https://cloud.google.com/resource-manager/docs/tags/tags-overview) @@ -8062,6 +8100,18 @@ class TagValue(proto.Message): oneof="action", message=PubSubNotification, ) + publish_to_chronicle: PublishToChronicle = proto.Field( + proto.MESSAGE, + number=3, + oneof="action", + message=PublishToChronicle, + ) + publish_to_scc: PublishToSecurityCommandCenter = proto.Field( + proto.MESSAGE, + number=4, + oneof="action", + message=PublishToSecurityCommandCenter, + ) tag_resources: TagResources = proto.Field( proto.MESSAGE, number=8, @@ -8087,6 +8137,8 @@ class DataProfileJobConfig(proto.Message): service account that exists within this project must have access to all resources that are profiled, and the Cloud DLP API must be enabled. + other_cloud_starting_location (google.cloud.dlp_v2.types.OtherCloudDiscoveryStartingLocation): + Must be set only when scanning other clouds. inspect_templates (MutableSequence[str]): Detection logic for profile generation. @@ -8119,6 +8171,11 @@ class DataProfileJobConfig(proto.Message): proto.STRING, number=5, ) + other_cloud_starting_location: "OtherCloudDiscoveryStartingLocation" = proto.Field( + proto.MESSAGE, + number=8, + message="OtherCloudDiscoveryStartingLocation", + ) inspect_templates: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=7, @@ -8251,6 +8308,8 @@ class DiscoveryConfig(proto.Message): Display name (max 100 chars) org_config (google.cloud.dlp_v2.types.DiscoveryConfig.OrgConfig): Only set when the parent is an org. + other_cloud_starting_location (google.cloud.dlp_v2.types.OtherCloudDiscoveryStartingLocation): + Must be set only when scanning other clouds. inspect_templates (MutableSequence[str]): Detection logic for profile generation. @@ -8348,6 +8407,11 @@ class OrgConfig(proto.Message): number=2, message=OrgConfig, ) + other_cloud_starting_location: "OtherCloudDiscoveryStartingLocation" = proto.Field( + proto.MESSAGE, + number=12, + message="OtherCloudDiscoveryStartingLocation", + ) inspect_templates: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=3, @@ -8422,6 +8486,12 @@ class DiscoveryTarget(proto.Message): Cloud Storage target for Discovery. The first target to match a table will be the one applied. + This field is a member of `oneof`_ ``target``. + other_cloud_target (google.cloud.dlp_v2.types.OtherCloudDiscoveryTarget): + Other clouds target for discovery. The first + target to match a resource will be the one + applied. + This field is a member of `oneof`_ ``target``. """ @@ -8449,6 +8519,12 @@ class DiscoveryTarget(proto.Message): oneof="target", message="CloudStorageDiscoveryTarget", ) + other_cloud_target: "OtherCloudDiscoveryTarget" = proto.Field( + proto.MESSAGE, + number=5, + oneof="target", + message="OtherCloudDiscoveryTarget", + ) class BigQueryDiscoveryTarget(proto.Message): @@ -9592,6 +9668,414 @@ class DiscoveryFileStoreConditions(proto.Message): ) +class OtherCloudDiscoveryTarget(proto.Message): + r"""Target used to match against for discovery of resources from other + clouds. An `AWS connector in Security Command Center + (Enterprise `__ + is required to use this feature. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + data_source_type (google.cloud.dlp_v2.types.DataSourceType): + Required. The type of data profiles generated by this + discovery target. Supported values are: + + - aws/s3/bucket + filter (google.cloud.dlp_v2.types.DiscoveryOtherCloudFilter): + Required. The resources that the discovery + cadence applies to. The first target with a + matching filter will be the one to apply to a + resource. + conditions (google.cloud.dlp_v2.types.DiscoveryOtherCloudConditions): + Optional. In addition to matching the filter, + these conditions must be true before a profile + is generated. + generation_cadence (google.cloud.dlp_v2.types.DiscoveryOtherCloudGenerationCadence): + How often and when to update data profiles. + New resources that match both the filter and + conditions are scanned as quickly as possible + depending on system capacity. + + This field is a member of `oneof`_ ``cadence``. + disabled (google.cloud.dlp_v2.types.Disabled): + Disable profiling for resources that match + this filter. + + This field is a member of `oneof`_ ``cadence``. + """ + + data_source_type: "DataSourceType" = proto.Field( + proto.MESSAGE, + number=1, + message="DataSourceType", + ) + filter: "DiscoveryOtherCloudFilter" = proto.Field( + proto.MESSAGE, + number=2, + message="DiscoveryOtherCloudFilter", + ) + conditions: "DiscoveryOtherCloudConditions" = proto.Field( + proto.MESSAGE, + number=3, + message="DiscoveryOtherCloudConditions", + ) + generation_cadence: "DiscoveryOtherCloudGenerationCadence" = proto.Field( + proto.MESSAGE, + number=4, + oneof="cadence", + message="DiscoveryOtherCloudGenerationCadence", + ) + disabled: "Disabled" = proto.Field( + proto.MESSAGE, + number=5, + oneof="cadence", + message="Disabled", + ) + + +class DiscoveryOtherCloudFilter(proto.Message): + r"""Determines which resources from the other cloud will have + profiles generated. Includes the ability to filter by resource + names. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + collection (google.cloud.dlp_v2.types.OtherCloudResourceCollection): + A collection of resources for this filter to + apply to. + + This field is a member of `oneof`_ ``filter``. + single_resource (google.cloud.dlp_v2.types.OtherCloudSingleResourceReference): + The resource to scan. Configs using this + filter can only have one target (the target with + this single resource reference). + + This field is a member of `oneof`_ ``filter``. + others (google.cloud.dlp_v2.types.AllOtherResources): + Optional. Catch-all. This should always be + the last target in the list because anything + above it will apply first. Should only appear + once in a configuration. If none is specified, a + default one will be added automatically. + + This field is a member of `oneof`_ ``filter``. + """ + + collection: "OtherCloudResourceCollection" = proto.Field( + proto.MESSAGE, + number=1, + oneof="filter", + message="OtherCloudResourceCollection", + ) + single_resource: "OtherCloudSingleResourceReference" = proto.Field( + proto.MESSAGE, + number=2, + oneof="filter", + message="OtherCloudSingleResourceReference", + ) + others: "AllOtherResources" = proto.Field( + proto.MESSAGE, + number=100, + oneof="filter", + message="AllOtherResources", + ) + + +class OtherCloudResourceCollection(proto.Message): + r"""Match resources using regex filters. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + include_regexes (google.cloud.dlp_v2.types.OtherCloudResourceRegexes): + A collection of regular expressions to match + a resource against. + + This field is a member of `oneof`_ ``pattern``. + """ + + include_regexes: "OtherCloudResourceRegexes" = proto.Field( + proto.MESSAGE, + number=1, + oneof="pattern", + message="OtherCloudResourceRegexes", + ) + + +class OtherCloudResourceRegexes(proto.Message): + r"""A collection of regular expressions to determine what + resources to match against. + + Attributes: + patterns (MutableSequence[google.cloud.dlp_v2.types.OtherCloudResourceRegex]): + A group of regular expression patterns to + match against one or more resources. + Maximum of 100 entries. The sum of all regular + expression's length can't exceed 10 KiB. + """ + + patterns: MutableSequence["OtherCloudResourceRegex"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="OtherCloudResourceRegex", + ) + + +class OtherCloudResourceRegex(proto.Message): + r"""A pattern to match against one or more resources. At least one + pattern must be specified. Regular expressions use RE2 + `syntax `__; a guide can + be found under the google/re2 repository on GitHub. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + amazon_s3_bucket_regex (google.cloud.dlp_v2.types.AmazonS3BucketRegex): + Regex for Amazon S3 buckets. + + This field is a member of `oneof`_ ``resource_regex``. + """ + + amazon_s3_bucket_regex: "AmazonS3BucketRegex" = proto.Field( + proto.MESSAGE, + number=1, + oneof="resource_regex", + message="AmazonS3BucketRegex", + ) + + +class AwsAccountRegex(proto.Message): + r"""AWS account regex. + + Attributes: + account_id_regex (str): + Optional. Regex to test the AWS account ID + against. If empty, all accounts match. + """ + + account_id_regex: str = proto.Field( + proto.STRING, + number=1, + ) + + +class AmazonS3BucketRegex(proto.Message): + r"""Amazon S3 bucket regex. + + Attributes: + aws_account_regex (google.cloud.dlp_v2.types.AwsAccountRegex): + The AWS account regex. + bucket_name_regex (str): + Optional. Regex to test the bucket name + against. If empty, all buckets match. + """ + + aws_account_regex: "AwsAccountRegex" = proto.Field( + proto.MESSAGE, + number=1, + message="AwsAccountRegex", + ) + bucket_name_regex: str = proto.Field( + proto.STRING, + number=2, + ) + + +class OtherCloudSingleResourceReference(proto.Message): + r"""Identifies a single resource, like a single Amazon S3 bucket. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + amazon_s3_bucket (google.cloud.dlp_v2.types.AmazonS3Bucket): + Amazon S3 bucket. + + This field is a member of `oneof`_ ``resource``. + """ + + amazon_s3_bucket: "AmazonS3Bucket" = proto.Field( + proto.MESSAGE, + number=1, + oneof="resource", + message="AmazonS3Bucket", + ) + + +class AwsAccount(proto.Message): + r"""AWS account. + + Attributes: + account_id (str): + Required. AWS account ID. + """ + + account_id: str = proto.Field( + proto.STRING, + number=1, + ) + + +class AmazonS3Bucket(proto.Message): + r"""Amazon S3 bucket. + + Attributes: + aws_account (google.cloud.dlp_v2.types.AwsAccount): + The AWS account. + bucket_name (str): + Required. The bucket name. + """ + + aws_account: "AwsAccount" = proto.Field( + proto.MESSAGE, + number=1, + message="AwsAccount", + ) + bucket_name: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DiscoveryOtherCloudConditions(proto.Message): + r"""Requirements that must be true before a resource is profiled + for the first time. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + min_age (google.protobuf.duration_pb2.Duration): + Minimum age a resource must be before Cloud + DLP can profile it. Value must be 1 hour or + greater. + amazon_s3_bucket_conditions (google.cloud.dlp_v2.types.AmazonS3BucketConditions): + Amazon S3 bucket conditions. + + This field is a member of `oneof`_ ``conditions``. + """ + + min_age: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=1, + message=duration_pb2.Duration, + ) + amazon_s3_bucket_conditions: "AmazonS3BucketConditions" = proto.Field( + proto.MESSAGE, + number=2, + oneof="conditions", + message="AmazonS3BucketConditions", + ) + + +class AmazonS3BucketConditions(proto.Message): + r"""Amazon S3 bucket conditions. + + Attributes: + bucket_types (MutableSequence[google.cloud.dlp_v2.types.AmazonS3BucketConditions.BucketType]): + Optional. Bucket types that should be profiled. Optional. + Defaults to TYPE_ALL_SUPPORTED if unspecified. + object_storage_classes (MutableSequence[google.cloud.dlp_v2.types.AmazonS3BucketConditions.ObjectStorageClass]): + Optional. Object classes that should be profiled. Optional. + Defaults to ALL_SUPPORTED_CLASSES if unspecified. + """ + + class BucketType(proto.Enum): + r"""Supported Amazon S3 bucket types. Defaults to TYPE_ALL_SUPPORTED. + + Values: + TYPE_UNSPECIFIED (0): + Unused. + TYPE_ALL_SUPPORTED (1): + All supported classes. + TYPE_GENERAL_PURPOSE (2): + A general purpose Amazon S3 bucket. + """ + TYPE_UNSPECIFIED = 0 + TYPE_ALL_SUPPORTED = 1 + TYPE_GENERAL_PURPOSE = 2 + + class ObjectStorageClass(proto.Enum): + r"""Supported Amazon S3 object storage classes. Defaults to + ALL_SUPPORTED_CLASSES. + + Values: + UNSPECIFIED (0): + Unused. + ALL_SUPPORTED_CLASSES (1): + All supported classes. + STANDARD (2): + Standard object class. + STANDARD_INFREQUENT_ACCESS (4): + Standard - infrequent access object class. + GLACIER_INSTANT_RETRIEVAL (6): + Glacier - instant retrieval object class. + INTELLIGENT_TIERING (7): + Objects in the S3 Intelligent-Tiering access + tiers. + """ + UNSPECIFIED = 0 + ALL_SUPPORTED_CLASSES = 1 + STANDARD = 2 + STANDARD_INFREQUENT_ACCESS = 4 + GLACIER_INSTANT_RETRIEVAL = 6 + INTELLIGENT_TIERING = 7 + + bucket_types: MutableSequence[BucketType] = proto.RepeatedField( + proto.ENUM, + number=1, + enum=BucketType, + ) + object_storage_classes: MutableSequence[ObjectStorageClass] = proto.RepeatedField( + proto.ENUM, + number=2, + enum=ObjectStorageClass, + ) + + +class DiscoveryOtherCloudGenerationCadence(proto.Message): + r"""How often existing resources should have their profiles + refreshed. New resources are scanned as quickly as possible + depending on system capacity. + + Attributes: + refresh_frequency (google.cloud.dlp_v2.types.DataProfileUpdateFrequency): + Optional. Frequency to update profiles + regardless of whether the underlying resource + has changes. Defaults to never. + inspect_template_modified_cadence (google.cloud.dlp_v2.types.DiscoveryInspectTemplateModifiedCadence): + Optional. Governs when to update data profiles when the + inspection rules defined by the ``InspectTemplate`` change. + If not set, changing the template will not cause a data + profile to update. + """ + + refresh_frequency: "DataProfileUpdateFrequency" = proto.Field( + proto.ENUM, + number=1, + enum="DataProfileUpdateFrequency", + ) + inspect_template_modified_cadence: "DiscoveryInspectTemplateModifiedCadence" = ( + proto.Field( + proto.MESSAGE, + number=2, + message="DiscoveryInspectTemplateModifiedCadence", + ) + ) + + class DiscoveryStartingLocation(proto.Message): r"""The location to begin a discovery scan. Denotes an organization ID or folder ID within an organization. @@ -9627,6 +10111,62 @@ class DiscoveryStartingLocation(proto.Message): ) +class OtherCloudDiscoveryStartingLocation(proto.Message): + r"""The other cloud starting location for discovery. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + aws_location (google.cloud.dlp_v2.types.OtherCloudDiscoveryStartingLocation.AwsDiscoveryStartingLocation): + The AWS starting location for discovery. + + This field is a member of `oneof`_ ``location``. + """ + + class AwsDiscoveryStartingLocation(proto.Message): + r"""The AWS starting location for discovery. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + account_id (str): + The AWS account ID that this discovery config applies to. + Within an AWS organization, you can find the AWS account ID + inside an AWS account ARN. Example: + arn:{partition}:organizations::{management_account_id}:account/{org_id}/{account_id} + + This field is a member of `oneof`_ ``scope``. + all_asset_inventory_assets (bool): + All AWS assets stored in Asset Inventory that + didn't match other AWS discovery configs. + + This field is a member of `oneof`_ ``scope``. + """ + + account_id: str = proto.Field( + proto.STRING, + number=2, + oneof="scope", + ) + all_asset_inventory_assets: bool = proto.Field( + proto.BOOL, + number=3, + oneof="scope", + ) + + aws_location: AwsDiscoveryStartingLocation = proto.Field( + proto.MESSAGE, + number=1, + oneof="location", + message=AwsDiscoveryStartingLocation, + ) + + class AllOtherResources(proto.Message): r"""Match discovery resources not covered by any other filter.""" @@ -11260,7 +11800,7 @@ class ProjectDataProfile(proto.Message): name (str): The resource name of the profile. project_id (str): - Project ID that was profiled. + Project ID or account that was profiled. profile_last_generated (google.protobuf.timestamp_pb2.Timestamp): The last time the profile was generated. sensitivity_score (google.cloud.dlp_v2.types.SensitivityScore): @@ -11918,6 +12458,7 @@ class FileStoreDataProfile(proto.Message): r"""The profile for a file store. - Cloud Storage: maps 1:1 with a bucket. + - Amazon S3: maps 1:1 with a bucket. Attributes: name (str): @@ -11929,12 +12470,15 @@ class FileStoreDataProfile(proto.Message): for this file store. project_id (str): The Google Cloud project ID that owns the - resource. + resource. For Amazon S3 buckets, this is the AWS + Account Id. file_store_location (str): The location of the file store. - Cloud Storage: https://cloud.google.com/storage/docs/locations#available-locations + - Amazon S3: + https://docs.aws.amazon.com/general/latest/gr/rande.html#regional-endpoints data_storage_locations (MutableSequence[str]): For resources that have multiple storage locations, these are those regions. For Cloud Storage this is the list of @@ -11951,9 +12495,13 @@ class FileStoreDataProfile(proto.Message): The file store path. - Cloud Storage: ``gs://{bucket}`` + - Amazon S3: ``s3://{bucket}`` full_resource (str): The resource name of the resource profiled. https://cloud.google.com/apis/design/resource_names#full_resource_name + + Example format of an S3 bucket full resource name: + ``//cloudasset.googleapis.com/organizations/{org_id}/otherCloudConnections/aws/arn:aws:s3:::{bucket_name}`` config_snapshot (google.cloud.dlp_v2.types.DataProfileConfigSnapshot): The snapshot of the configurations used to generate the profile. @@ -12322,6 +12870,7 @@ class ListFileStoreDataProfilesRequest(proto.Message): - Supported fields/values: - ``project_id`` - The Google Cloud project ID. + - ``account_id`` - The AWS account ID. - ``file_store_path`` - The path like "gs://bucket". - ``data_source_type`` - The profile's data source type, like "google/storage/bucket". @@ -13003,9 +13552,13 @@ class DataSourceType(proto.Message): Attributes: data_source (str): - Output only. An identifying string to the - type of resource being profiled. Current values: - google/bigquery/table, google/project + Output only. An identifying string to the type of resource + being profiled. Current values: + + - google/bigquery/table + - google/project + - google/sql/table + - google/gcs/bucket """ data_source: str = proto.Field( diff --git a/packages/google-cloud-dlp/samples/generated_samples/snippet_metadata_google.privacy.dlp.v2.json b/packages/google-cloud-dlp/samples/generated_samples/snippet_metadata_google.privacy.dlp.v2.json index cb1c758e62fc..4da85d5c6cd9 100644 --- a/packages/google-cloud-dlp/samples/generated_samples/snippet_metadata_google.privacy.dlp.v2.json +++ b/packages/google-cloud-dlp/samples/generated_samples/snippet_metadata_google.privacy.dlp.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dlp", - "version": "3.22.0" + "version": "0.1.0" }, "snippets": [ { From e889809389c5b194ec77955664eb2859cde28d73 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sun, 22 Sep 2024 19:36:04 -0400 Subject: [PATCH 079/108] feat: [google-cloud-build] Add LEGACY_BUCKET option to DefaultLogsBucketBehavior (#13099) BEGIN_COMMIT_OVERRIDE feat: Add LEGACY_BUCKET option to DefaultLogsBucketBehavior docs: Sanitize docs END_COMMIT_OVERRIDE - [ ] Regenerate this pull request now. docs: Sanitize docs PiperOrigin-RevId: 677021009 Source-Link: https://github.com/googleapis/googleapis/commit/a18d9b2c3563527b26c4b713469e795b92795271 Source-Link: https://github.com/googleapis/googleapis-gen/commit/09d68f35365c74ad276cea3e7c26553a1485faa0 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWJ1aWxkLy5Pd2xCb3QueWFtbCIsImgiOiIwOWQ2OGYzNTM2NWM3NGFkMjc2Y2VhM2U3YzI2NTUzYTE0ODVmYWEwIn0= --------- Co-authored-by: Owl Bot --- .../cloud/devtools/cloudbuild_v1/types/cloudbuild.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py index a10715d0ffdf..995ae202614c 100644 --- a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py +++ b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py @@ -3278,7 +3278,7 @@ class LoggingMode(proto.Enum): NONE = 4 class DefaultLogsBucketBehavior(proto.Enum): - r"""Default GCS log bucket behavior options. + r"""Default Cloud Storage log bucket behavior options. Values: DEFAULT_LOGS_BUCKET_BEHAVIOR_UNSPECIFIED (0): @@ -3287,10 +3287,15 @@ class DefaultLogsBucketBehavior(proto.Enum): Bucket is located in user-owned project in the same region as the build. The builder service account must have access to create and - write to GCS buckets in the build project. + write to Cloud Storage buckets in the build + project. + LEGACY_BUCKET (2): + Bucket is located in a Google-owned project + and is not regionalized. """ DEFAULT_LOGS_BUCKET_BEHAVIOR_UNSPECIFIED = 0 REGIONAL_USER_OWNED_BUCKET = 1 + LEGACY_BUCKET = 2 class PoolOption(proto.Message): r"""Details about how a build should be executed on a ``WorkerPool``. From d6238e49a17caf54dd0fbc45215527beed057cc5 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sun, 22 Sep 2024 19:44:15 -0400 Subject: [PATCH 080/108] feat: [google-ai-generativelanguage] Add GoogleSearchRetrieval tool and candidate.grounding_metadata (#13098) BEGIN_COMMIT_OVERRIDE feat: Add GoogleSearchRetrieval tool and candidate.grounding_metadata feat: Add Schema.min_items feat: Add GenerationConfig.{presence_penalty, frequency_penalty, logprobs, response_logprobs, logprobs} and Candidate.{avg_logprobs, logprobs_result} feat: Add PredictionService (for Imagen) feat: Add HarmCategory.HARM_CATEGORY_CIVIC_INTEGRITY feat: Add HarmBlockThreshold.OFF feat: Add TunedModels.reader_project_numbers docs: Small fixes docs: Tag HarmCategories by the model family they're used on. END_COMMIT_OVERRIDE - [ ] Regenerate this pull request now. feat: Add Schema.min_items feat: Add GenerationConfig.{presence_penalty, frequency_penalty, logprobs, response_logprobs, logprobs} and Candidate.{avg_logprobs, logprobs_result} feat: Add PredictionService (for Imagen) feat: Add HarmCategory.HARM_CATEGORY_CIVIC_INTEGRITY feat: Add HarmBlockThreshold.OFF feat: Add TunedModels.reader_project_numbers docs: Small fixes docs: Tag HarmCategories by the model family they're used on. PiperOrigin-RevId: 676982731 Source-Link: https://github.com/googleapis/googleapis/commit/979f71cfaea54d9bc03543647da4392f052c801e Source-Link: https://github.com/googleapis/googleapis-gen/commit/f5e280f1d6258abe58a0b910102e97ce8d82d948 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWFpLWdlbmVyYXRpdmVsYW5ndWFnZS8uT3dsQm90LnlhbWwiLCJoIjoiZjVlMjgwZjFkNjI1OGFiZTU4YTBiOTEwMTAyZTk3Y2U4ZDgyZDk0OCJ9 BEGIN_NESTED_COMMIT feat: [google-ai-generativelanguage] Add GenerationConfig.{presence_penalty, frequency_penalty, logprobs, response_logprobs, logprobs} and Candidate.{avg_logprobs, logprobs_result} feat: Add HarmCategory.HARM_CATEGORY_CIVIC_INTEGRITY docs: Tag HarmCategories by the model family they're used on. feat: Add HarmBlockThreshold.OFF docs: Small fixes PiperOrigin-RevId: 676980561 Source-Link: https://github.com/googleapis/googleapis/commit/1cb097eee9a3f9210911063649eb0d2c3b023596 Source-Link: https://github.com/googleapis/googleapis-gen/commit/04d3969939b2bf4e3db9f1aef88d18a9bedc8ae7 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWFpLWdlbmVyYXRpdmVsYW5ndWFnZS8uT3dsQm90LnlhbWwiLCJoIjoiMDRkMzk2OTkzOWIyYmY0ZTNkYjlmMWFlZjg4ZDE4YTliZWRjOGFlNyJ9 END_NESTED_COMMIT --------- Co-authored-by: Owl Bot Co-authored-by: ohmayr --- .../prediction_service.rst | 6 + .../generativelanguage_v1beta/services_.rst | 1 + .../google/ai/generativelanguage/__init__.py | 32 + .../ai/generativelanguage/gapic_version.py | 2 +- .../ai/generativelanguage_v1/__init__.py | 2 + .../ai/generativelanguage_v1/gapic_version.py | 2 +- .../generative_service/transports/rest.py | 4 + .../services/model_service/transports/rest.py | 4 + .../generativelanguage_v1/types/__init__.py | 2 + .../types/generative_service.py | 161 + .../ai/generativelanguage_v1/types/safety.py | 37 +- .../ai/generativelanguage_v1beta/__init__.py | 27 + .../gapic_metadata.json | 34 + .../gapic_version.py | 2 +- .../services/prediction_service/__init__.py | 22 + .../prediction_service/async_client.py | 391 +++ .../services/prediction_service/client.py | 814 ++++++ .../prediction_service/transports/__init__.py | 36 + .../prediction_service/transports/base.py | 165 ++ .../prediction_service/transports/grpc.py | 274 ++ .../transports/grpc_asyncio.py | 285 ++ .../prediction_service/transports/rest.py | 313 ++ .../types/__init__.py | 21 + .../types/content.py | 93 +- .../types/generative_service.py | 394 ++- .../types/prediction_service.py | 79 + .../generativelanguage_v1beta/types/safety.py | 37 +- .../types/tuned_model.py | 7 + .../gapic_version.py | 2 +- .../gapic_version.py | 2 +- ...erated_prediction_service_predict_async.py | 56 + ...nerated_prediction_service_predict_sync.py | 56 + ...adata_google.ai.generativelanguage.v1.json | 2 +- ...a_google.ai.generativelanguage.v1beta.json | 171 +- ..._google.ai.generativelanguage.v1beta2.json | 2 +- ..._google.ai.generativelanguage.v1beta3.json | 2 +- ...ixup_generativelanguage_v1beta_keywords.py | 1 + .../test_cache_service.py | 8 + .../test_model_service.py | 16 + .../test_prediction_service.py | 2586 +++++++++++++++++ 40 files changed, 6100 insertions(+), 51 deletions(-) create mode 100644 packages/google-ai-generativelanguage/docs/generativelanguage_v1beta/prediction_service.rst create mode 100644 packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/__init__.py create mode 100644 packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/async_client.py create mode 100644 packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/client.py create mode 100644 packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/__init__.py create mode 100644 packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/base.py create mode 100644 packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/grpc.py create mode 100644 packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/grpc_asyncio.py create mode 100644 packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/rest.py create mode 100644 packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/prediction_service.py create mode 100644 packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_prediction_service_predict_async.py create mode 100644 packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_prediction_service_predict_sync.py create mode 100644 packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_prediction_service.py diff --git a/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta/prediction_service.rst b/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta/prediction_service.rst new file mode 100644 index 000000000000..7b2b932acacc --- /dev/null +++ b/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta/prediction_service.rst @@ -0,0 +1,6 @@ +PredictionService +----------------------------------- + +.. automodule:: google.ai.generativelanguage_v1beta.services.prediction_service + :members: + :inherited-members: diff --git a/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta/services_.rst b/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta/services_.rst index 24e6184e8b0e..7a7b5429bd6f 100644 --- a/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta/services_.rst +++ b/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta/services_.rst @@ -9,5 +9,6 @@ Services for Google Ai Generativelanguage v1beta API generative_service model_service permission_service + prediction_service retriever_service text_service diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage/__init__.py index c69803d506d6..750b54051c3f 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage/__init__.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage/__init__.py @@ -54,6 +54,12 @@ from google.ai.generativelanguage_v1beta.services.permission_service.client import ( PermissionServiceClient, ) +from google.ai.generativelanguage_v1beta.services.prediction_service.async_client import ( + PredictionServiceAsyncClient, +) +from google.ai.generativelanguage_v1beta.services.prediction_service.client import ( + PredictionServiceClient, +) from google.ai.generativelanguage_v1beta.services.retriever_service.async_client import ( RetrieverServiceAsyncClient, ) @@ -84,12 +90,14 @@ CodeExecution, CodeExecutionResult, Content, + DynamicRetrievalConfig, ExecutableCode, FileData, FunctionCall, FunctionCallingConfig, FunctionDeclaration, FunctionResponse, + GoogleSearchRetrieval, GroundingPassage, GroundingPassages, Part, @@ -132,6 +140,13 @@ GenerateContentResponse, GenerationConfig, GroundingAttribution, + GroundingChunk, + GroundingMetadata, + GroundingSupport, + LogprobsResult, + RetrievalMetadata, + SearchEntryPoint, + Segment, SemanticRetrieverConfig, TaskType, ) @@ -159,6 +174,10 @@ TransferOwnershipResponse, UpdatePermissionRequest, ) +from google.ai.generativelanguage_v1beta.types.prediction_service import ( + PredictRequest, + PredictResponse, +) from google.ai.generativelanguage_v1beta.types.retriever import ( Chunk, ChunkData, @@ -243,6 +262,8 @@ "ModelServiceAsyncClient", "PermissionServiceClient", "PermissionServiceAsyncClient", + "PredictionServiceClient", + "PredictionServiceAsyncClient", "RetrieverServiceClient", "RetrieverServiceAsyncClient", "TextServiceClient", @@ -260,12 +281,14 @@ "CodeExecution", "CodeExecutionResult", "Content", + "DynamicRetrievalConfig", "ExecutableCode", "FileData", "FunctionCall", "FunctionCallingConfig", "FunctionDeclaration", "FunctionResponse", + "GoogleSearchRetrieval", "GroundingPassage", "GroundingPassages", "Part", @@ -303,6 +326,13 @@ "GenerateContentResponse", "GenerationConfig", "GroundingAttribution", + "GroundingChunk", + "GroundingMetadata", + "GroundingSupport", + "LogprobsResult", + "RetrievalMetadata", + "SearchEntryPoint", + "Segment", "SemanticRetrieverConfig", "TaskType", "Model", @@ -325,6 +355,8 @@ "TransferOwnershipRequest", "TransferOwnershipResponse", "UpdatePermissionRequest", + "PredictRequest", + "PredictResponse", "Chunk", "ChunkData", "Condition", diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage/gapic_version.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage/gapic_version.py index 1699c98da708..558c8aab67c5 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage/gapic_version.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.9" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/__init__.py index 4c8665b1b49a..a383f98f4342 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/__init__.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/__init__.py @@ -37,6 +37,7 @@ GenerateContentRequest, GenerateContentResponse, GenerationConfig, + LogprobsResult, TaskType, ) from .types.model import Model @@ -66,6 +67,7 @@ "HarmCategory", "ListModelsRequest", "ListModelsResponse", + "LogprobsResult", "Model", "ModelServiceClient", "Part", diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/gapic_version.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/gapic_version.py index 1699c98da708..558c8aab67c5 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/gapic_version.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.9" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/transports/rest.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/transports/rest.py index 7498cba8d7e3..7caa772eb19f 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/transports/rest.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/transports/rest.py @@ -1057,6 +1057,10 @@ def __call__( "method": "get", "uri": "/v1/{name=tunedModels/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=generatedFiles/*}/operations/*", + }, ] request, metadata = self._interceptor.pre_get_operation(request, metadata) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/transports/rest.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/transports/rest.py index e21bd17d6e8b..a431622cc869 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/transports/rest.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/transports/rest.py @@ -572,6 +572,10 @@ def __call__( "method": "get", "uri": "/v1/{name=tunedModels/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=generatedFiles/*}/operations/*", + }, ] request, metadata = self._interceptor.pre_get_operation(request, metadata) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/__init__.py index 522ecb07c1c3..9156b856ee0e 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/__init__.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/__init__.py @@ -27,6 +27,7 @@ GenerateContentRequest, GenerateContentResponse, GenerationConfig, + LogprobsResult, TaskType, ) from .model import Model @@ -50,6 +51,7 @@ "GenerateContentRequest", "GenerateContentResponse", "GenerationConfig", + "LogprobsResult", "TaskType", "Model", "GetModelRequest", diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/generative_service.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/generative_service.py index e19c5b166abc..e8062906bfbf 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/generative_service.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/generative_service.py @@ -31,6 +31,7 @@ "GenerationConfig", "GenerateContentResponse", "Candidate", + "LogprobsResult", "EmbedContentRequest", "ContentEmbedding", "EmbedContentResponse", @@ -228,6 +229,58 @@ class GenerationConfig(proto.Message): doesn't allow setting ``top_k`` on requests. This field is a member of `oneof`_ ``_top_k``. + presence_penalty (float): + Optional. Presence penalty applied to the next token's + logprobs if the token has already been seen in the response. + + This penalty is binary on/off and not dependant on the + number of times the token is used (after the first). Use + [frequency_penalty][google.ai.generativelanguage.v1.GenerationConfig.frequency_penalty] + for a penalty that increases with each use. + + A positive penalty will discourage the use of tokens that + have already been used in the response, increasing the + vocabulary. + + A negative penalty will encourage the use of tokens that + have already been used in the response, decreasing the + vocabulary. + + This field is a member of `oneof`_ ``_presence_penalty``. + frequency_penalty (float): + Optional. Frequency penalty applied to the next token's + logprobs, multiplied by the number of times each token has + been seen in the respponse so far. + + A positive penalty will discourage the use of tokens that + have already been used, proportional to the number of times + the token has been used: The more a token is used, the more + dificult it is for the model to use that token again + increasing the vocabulary of responses. + + Caution: A *negative* penalty will encourage the model to + reuse tokens proportional to the number of times the token + has been used. Small negative values will reduce the + vocabulary of a response. Larger negative values will cause + the model to start repeating a common token until it hits + the + [max_output_tokens][google.ai.generativelanguage.v1.GenerationConfig.max_output_tokens] + limit: "...the the the the the...". + + This field is a member of `oneof`_ ``_frequency_penalty``. + response_logprobs (bool): + Optional. If true, export the logprobs + results in response. + + This field is a member of `oneof`_ ``_response_logprobs``. + logprobs (int): + Optional. Only valid if + [response_logprobs=True][google.ai.generativelanguage.v1.GenerationConfig.response_logprobs]. + This sets the number of top logprobs to return at each + decoding step in the + [Candidate.logprobs_result][google.ai.generativelanguage.v1.Candidate.logprobs_result]. + + This field is a member of `oneof`_ ``_logprobs``. """ candidate_count: int = proto.Field( @@ -259,6 +312,26 @@ class GenerationConfig(proto.Message): number=7, optional=True, ) + presence_penalty: float = proto.Field( + proto.FLOAT, + number=15, + optional=True, + ) + frequency_penalty: float = proto.Field( + proto.FLOAT, + number=16, + optional=True, + ) + response_logprobs: bool = proto.Field( + proto.BOOL, + number=17, + optional=True, + ) + logprobs: int = proto.Field( + proto.INT32, + number=18, + optional=True, + ) class GenerateContentResponse(proto.Message): @@ -414,6 +487,11 @@ class Candidate(proto.Message): foundational LLM's training data. token_count (int): Output only. Token count for this candidate. + avg_logprobs (float): + Output only. + logprobs_result (google.ai.generativelanguage_v1.types.LogprobsResult): + Output only. Log-likelihood scores for the + response tokens and top tokens """ class FinishReason(proto.Enum): @@ -494,6 +572,89 @@ class FinishReason(proto.Enum): proto.INT32, number=7, ) + avg_logprobs: float = proto.Field( + proto.DOUBLE, + number=10, + ) + logprobs_result: "LogprobsResult" = proto.Field( + proto.MESSAGE, + number=11, + message="LogprobsResult", + ) + + +class LogprobsResult(proto.Message): + r"""Logprobs Result + + Attributes: + top_candidates (MutableSequence[google.ai.generativelanguage_v1.types.LogprobsResult.TopCandidates]): + Length = total number of decoding steps. + chosen_candidates (MutableSequence[google.ai.generativelanguage_v1.types.LogprobsResult.Candidate]): + Length = total number of decoding steps. The chosen + candidates may or may not be in top_candidates. + """ + + class Candidate(proto.Message): + r"""Candidate for the logprobs token and score. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + token (str): + The candidate’s token string value. + + This field is a member of `oneof`_ ``_token``. + token_id (int): + The candidate’s token id value. + + This field is a member of `oneof`_ ``_token_id``. + log_probability (float): + The candidate's log probability. + + This field is a member of `oneof`_ ``_log_probability``. + """ + + token: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + token_id: int = proto.Field( + proto.INT32, + number=3, + optional=True, + ) + log_probability: float = proto.Field( + proto.FLOAT, + number=2, + optional=True, + ) + + class TopCandidates(proto.Message): + r"""Candidates with top log probabilities at each decoding step. + + Attributes: + candidates (MutableSequence[google.ai.generativelanguage_v1.types.LogprobsResult.Candidate]): + Sorted by log probability in descending + order. + """ + + candidates: MutableSequence["LogprobsResult.Candidate"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="LogprobsResult.Candidate", + ) + + top_candidates: MutableSequence[TopCandidates] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=TopCandidates, + ) + chosen_candidates: MutableSequence[Candidate] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=Candidate, + ) class EmbedContentRequest(proto.Message): diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/safety.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/safety.py index 100fc75977da..2a75fd715410 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/safety.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/safety.py @@ -39,31 +39,32 @@ class HarmCategory(proto.Enum): HARM_CATEGORY_UNSPECIFIED (0): Category is unspecified. HARM_CATEGORY_DEROGATORY (1): - Negative or harmful comments targeting - identity and/or protected attribute. + **PaLM** - Negative or harmful comments targeting identity + and/or protected attribute. HARM_CATEGORY_TOXICITY (2): - Content that is rude, disrespectful, or - profane. + **PaLM** - Content that is rude, disrespectful, or profane. HARM_CATEGORY_VIOLENCE (3): - Describes scenarios depicting violence - against an individual or group, or general - descriptions of gore. + **PaLM** - Describes scenarios depicting violence against an + individual or group, or general descriptions of gore. HARM_CATEGORY_SEXUAL (4): - Contains references to sexual acts or other - lewd content. + **PaLM** - Contains references to sexual acts or other lewd + content. HARM_CATEGORY_MEDICAL (5): - Promotes unchecked medical advice. + **PaLM** - Promotes unchecked medical advice. HARM_CATEGORY_DANGEROUS (6): - Dangerous content that promotes, facilitates, - or encourages harmful acts. + **PaLM** - Dangerous content that promotes, facilitates, or + encourages harmful acts. HARM_CATEGORY_HARASSMENT (7): - Harasment content. + **Gemini** - Harassment content. HARM_CATEGORY_HATE_SPEECH (8): - Hate speech and content. + **Gemini** - Hate speech and content. HARM_CATEGORY_SEXUALLY_EXPLICIT (9): - Sexually explicit content. + **Gemini** - Sexually explicit content. HARM_CATEGORY_DANGEROUS_CONTENT (10): - Dangerous content. + **Gemini** - Dangerous content. + HARM_CATEGORY_CIVIC_INTEGRITY (11): + **Gemini** - Content that may be used to harm civic + integrity. """ HARM_CATEGORY_UNSPECIFIED = 0 HARM_CATEGORY_DEROGATORY = 1 @@ -76,6 +77,7 @@ class HarmCategory(proto.Enum): HARM_CATEGORY_HATE_SPEECH = 8 HARM_CATEGORY_SEXUALLY_EXPLICIT = 9 HARM_CATEGORY_DANGEROUS_CONTENT = 10 + HARM_CATEGORY_CIVIC_INTEGRITY = 11 class SafetyRating(proto.Message): @@ -170,12 +172,15 @@ class HarmBlockThreshold(proto.Enum): be allowed. BLOCK_NONE (4): All content will be allowed. + OFF (5): + Turn off the safety filter. """ HARM_BLOCK_THRESHOLD_UNSPECIFIED = 0 BLOCK_LOW_AND_ABOVE = 1 BLOCK_MEDIUM_AND_ABOVE = 2 BLOCK_ONLY_HIGH = 3 BLOCK_NONE = 4 + OFF = 5 category: "HarmCategory" = proto.Field( proto.ENUM, diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/__init__.py index c692fa7725c9..73da8c53fefc 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/__init__.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/__init__.py @@ -30,6 +30,10 @@ PermissionServiceAsyncClient, PermissionServiceClient, ) +from .services.prediction_service import ( + PredictionServiceAsyncClient, + PredictionServiceClient, +) from .services.retriever_service import ( RetrieverServiceAsyncClient, RetrieverServiceClient, @@ -50,12 +54,14 @@ CodeExecution, CodeExecutionResult, Content, + DynamicRetrievalConfig, ExecutableCode, FileData, FunctionCall, FunctionCallingConfig, FunctionDeclaration, FunctionResponse, + GoogleSearchRetrieval, GroundingPassage, GroundingPassages, Part, @@ -98,6 +104,13 @@ GenerateContentResponse, GenerationConfig, GroundingAttribution, + GroundingChunk, + GroundingMetadata, + GroundingSupport, + LogprobsResult, + RetrievalMetadata, + SearchEntryPoint, + Segment, SemanticRetrieverConfig, TaskType, ) @@ -125,6 +138,7 @@ TransferOwnershipResponse, UpdatePermissionRequest, ) +from .types.prediction_service import PredictRequest, PredictResponse from .types.retriever import ( Chunk, ChunkData, @@ -203,6 +217,7 @@ "GenerativeServiceAsyncClient", "ModelServiceAsyncClient", "PermissionServiceAsyncClient", + "PredictionServiceAsyncClient", "RetrieverServiceAsyncClient", "TextServiceAsyncClient", "AttributionSourceId", @@ -256,6 +271,7 @@ "DeleteTunedModelRequest", "DiscussServiceClient", "Document", + "DynamicRetrievalConfig", "EmbedContentRequest", "EmbedContentResponse", "EmbedTextRequest", @@ -288,9 +304,13 @@ "GetModelRequest", "GetPermissionRequest", "GetTunedModelRequest", + "GoogleSearchRetrieval", "GroundingAttribution", + "GroundingChunk", + "GroundingMetadata", "GroundingPassage", "GroundingPassages", + "GroundingSupport", "HarmCategory", "Hyperparameters", "ListCachedContentsRequest", @@ -309,6 +329,7 @@ "ListPermissionsResponse", "ListTunedModelsRequest", "ListTunedModelsResponse", + "LogprobsResult", "Message", "MessagePrompt", "MetadataFilter", @@ -317,16 +338,22 @@ "Part", "Permission", "PermissionServiceClient", + "PredictRequest", + "PredictResponse", + "PredictionServiceClient", "QueryCorpusRequest", "QueryCorpusResponse", "QueryDocumentRequest", "QueryDocumentResponse", "RelevantChunk", + "RetrievalMetadata", "RetrieverServiceClient", "SafetyFeedback", "SafetyRating", "SafetySetting", "Schema", + "SearchEntryPoint", + "Segment", "SemanticRetrieverConfig", "StringList", "TaskType", diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/gapic_metadata.json b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/gapic_metadata.json index 24a3b2565007..7fd1909f6ca0 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/gapic_metadata.json +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/gapic_metadata.json @@ -569,6 +569,40 @@ } } }, + "PredictionService": { + "clients": { + "grpc": { + "libraryClient": "PredictionServiceClient", + "rpcs": { + "Predict": { + "methods": [ + "predict" + ] + } + } + }, + "grpc-async": { + "libraryClient": "PredictionServiceAsyncClient", + "rpcs": { + "Predict": { + "methods": [ + "predict" + ] + } + } + }, + "rest": { + "libraryClient": "PredictionServiceClient", + "rpcs": { + "Predict": { + "methods": [ + "predict" + ] + } + } + } + } + }, "RetrieverService": { "clients": { "grpc": { diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/gapic_version.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/gapic_version.py index 1699c98da708..558c8aab67c5 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/gapic_version.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.9" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/__init__.py new file mode 100644 index 000000000000..6c64cf5ad1c0 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import PredictionServiceAsyncClient +from .client import PredictionServiceClient + +__all__ = ( + "PredictionServiceClient", + "PredictionServiceAsyncClient", +) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/async_client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/async_client.py new file mode 100644 index 000000000000..f9e04e3e2aea --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/async_client.py @@ -0,0 +1,391 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ai.generativelanguage_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore + +from google.ai.generativelanguage_v1beta.types import prediction_service + +from .client import PredictionServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, PredictionServiceTransport +from .transports.grpc_asyncio import PredictionServiceGrpcAsyncIOTransport + + +class PredictionServiceAsyncClient: + """A service for online predictions and explanations.""" + + _client: PredictionServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = PredictionServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = PredictionServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = PredictionServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = PredictionServiceClient._DEFAULT_UNIVERSE + + model_path = staticmethod(PredictionServiceClient.model_path) + parse_model_path = staticmethod(PredictionServiceClient.parse_model_path) + common_billing_account_path = staticmethod( + PredictionServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + PredictionServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(PredictionServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + PredictionServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + PredictionServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + PredictionServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(PredictionServiceClient.common_project_path) + parse_common_project_path = staticmethod( + PredictionServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(PredictionServiceClient.common_location_path) + parse_common_location_path = staticmethod( + PredictionServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PredictionServiceAsyncClient: The constructed client. + """ + return PredictionServiceClient.from_service_account_info.__func__(PredictionServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PredictionServiceAsyncClient: The constructed client. + """ + return PredictionServiceClient.from_service_account_file.__func__(PredictionServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return PredictionServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> PredictionServiceTransport: + """Returns the transport used by the client instance. + + Returns: + PredictionServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = PredictionServiceClient.get_transport_class + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + PredictionServiceTransport, + Callable[..., PredictionServiceTransport], + ] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the prediction service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,PredictionServiceTransport,Callable[..., PredictionServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the PredictionServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = PredictionServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def predict( + self, + request: Optional[Union[prediction_service.PredictRequest, dict]] = None, + *, + model: Optional[str] = None, + instances: Optional[MutableSequence[struct_pb2.Value]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> prediction_service.PredictResponse: + r"""Performs a prediction request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + async def sample_predict(): + # Create a client + client = generativelanguage_v1beta.PredictionServiceAsyncClient() + + # Initialize request argument(s) + instances = generativelanguage_v1beta.Value() + instances.null_value = "NULL_VALUE" + + request = generativelanguage_v1beta.PredictRequest( + model="model_value", + instances=instances, + ) + + # Make the request + response = await client.predict(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1beta.types.PredictRequest, dict]]): + The request object. Request message for + [PredictionService.Predict][google.ai.generativelanguage.v1beta.PredictionService.Predict]. + model (:class:`str`): + Required. The name of the model for prediction. Format: + ``name=models/{model}``. + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instances (:class:`MutableSequence[google.protobuf.struct_pb2.Value]`): + Required. The instances that are the + input to the prediction call. + + This corresponds to the ``instances`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.PredictResponse: + Response message for [PredictionService.Predict]. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([model, instances]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, prediction_service.PredictRequest): + request = prediction_service.PredictRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if instances: + request.instances.extend(instances) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.predict] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "PredictionServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("PredictionServiceAsyncClient",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/client.py new file mode 100644 index 000000000000..48736239098d --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/client.py @@ -0,0 +1,814 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ai.generativelanguage_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore + +from google.ai.generativelanguage_v1beta.types import prediction_service + +from .transports.base import DEFAULT_CLIENT_INFO, PredictionServiceTransport +from .transports.grpc import PredictionServiceGrpcTransport +from .transports.grpc_asyncio import PredictionServiceGrpcAsyncIOTransport +from .transports.rest import PredictionServiceRestTransport + + +class PredictionServiceClientMeta(type): + """Metaclass for the PredictionService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[PredictionServiceTransport]] + _transport_registry["grpc"] = PredictionServiceGrpcTransport + _transport_registry["grpc_asyncio"] = PredictionServiceGrpcAsyncIOTransport + _transport_registry["rest"] = PredictionServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[PredictionServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class PredictionServiceClient(metaclass=PredictionServiceClientMeta): + """A service for online predictions and explanations.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "generativelanguage.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "generativelanguage.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PredictionServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PredictionServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> PredictionServiceTransport: + """Returns the transport used by the client instance. + + Returns: + PredictionServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def model_path( + model: str, + ) -> str: + """Returns a fully-qualified model string.""" + return "models/{model}".format( + model=model, + ) + + @staticmethod + def parse_model_path(path: str) -> Dict[str, str]: + """Parses a model path into its component segments.""" + m = re.match(r"^models/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = PredictionServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = PredictionServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = PredictionServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = PredictionServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = PredictionServiceClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or PredictionServiceClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + PredictionServiceTransport, + Callable[..., PredictionServiceTransport], + ] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the prediction service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,PredictionServiceTransport,Callable[..., PredictionServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the PredictionServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = PredictionServiceClient._read_environment_variables() + self._client_cert_source = PredictionServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = PredictionServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, PredictionServiceTransport) + if transport_provided: + # transport is a PredictionServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(PredictionServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or PredictionServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[PredictionServiceTransport], + Callable[..., PredictionServiceTransport], + ] = ( + PredictionServiceClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., PredictionServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def predict( + self, + request: Optional[Union[prediction_service.PredictRequest, dict]] = None, + *, + model: Optional[str] = None, + instances: Optional[MutableSequence[struct_pb2.Value]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> prediction_service.PredictResponse: + r"""Performs a prediction request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + def sample_predict(): + # Create a client + client = generativelanguage_v1beta.PredictionServiceClient() + + # Initialize request argument(s) + instances = generativelanguage_v1beta.Value() + instances.null_value = "NULL_VALUE" + + request = generativelanguage_v1beta.PredictRequest( + model="model_value", + instances=instances, + ) + + # Make the request + response = client.predict(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1beta.types.PredictRequest, dict]): + The request object. Request message for + [PredictionService.Predict][google.ai.generativelanguage.v1beta.PredictionService.Predict]. + model (str): + Required. The name of the model for prediction. Format: + ``name=models/{model}``. + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instances (MutableSequence[google.protobuf.struct_pb2.Value]): + Required. The instances that are the + input to the prediction call. + + This corresponds to the ``instances`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.PredictResponse: + Response message for [PredictionService.Predict]. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([model, instances]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, prediction_service.PredictRequest): + request = prediction_service.PredictRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if instances is not None: + request.instances.extend(instances) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.predict] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "PredictionServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("PredictionServiceClient",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/__init__.py new file mode 100644 index 000000000000..d6d645ba1ff1 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import PredictionServiceTransport +from .grpc import PredictionServiceGrpcTransport +from .grpc_asyncio import PredictionServiceGrpcAsyncIOTransport +from .rest import PredictionServiceRestInterceptor, PredictionServiceRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[PredictionServiceTransport]] +_transport_registry["grpc"] = PredictionServiceGrpcTransport +_transport_registry["grpc_asyncio"] = PredictionServiceGrpcAsyncIOTransport +_transport_registry["rest"] = PredictionServiceRestTransport + +__all__ = ( + "PredictionServiceTransport", + "PredictionServiceGrpcTransport", + "PredictionServiceGrpcAsyncIOTransport", + "PredictionServiceRestTransport", + "PredictionServiceRestInterceptor", +) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/base.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/base.py new file mode 100644 index 000000000000..1b36658ad423 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/base.py @@ -0,0 +1,165 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ai.generativelanguage_v1beta import gapic_version as package_version +from google.ai.generativelanguage_v1beta.types import prediction_service + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class PredictionServiceTransport(abc.ABC): + """Abstract transport class for PredictionService.""" + + AUTH_SCOPES = () + + DEFAULT_HOST: str = "generativelanguage.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'generativelanguage.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.predict: gapic_v1.method.wrap_method( + self.predict, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def predict( + self, + ) -> Callable[ + [prediction_service.PredictRequest], + Union[ + prediction_service.PredictResponse, + Awaitable[prediction_service.PredictResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("PredictionServiceTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/grpc.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/grpc.py new file mode 100644 index 000000000000..285c2ff8af46 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/grpc.py @@ -0,0 +1,274 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore + +from google.ai.generativelanguage_v1beta.types import prediction_service + +from .base import DEFAULT_CLIENT_INFO, PredictionServiceTransport + + +class PredictionServiceGrpcTransport(PredictionServiceTransport): + """gRPC backend transport for PredictionService. + + A service for online predictions and explanations. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'generativelanguage.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def predict( + self, + ) -> Callable[ + [prediction_service.PredictRequest], prediction_service.PredictResponse + ]: + r"""Return a callable for the predict method over gRPC. + + Performs a prediction request. + + Returns: + Callable[[~.PredictRequest], + ~.PredictResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "predict" not in self._stubs: + self._stubs["predict"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.PredictionService/Predict", + request_serializer=prediction_service.PredictRequest.serialize, + response_deserializer=prediction_service.PredictResponse.deserialize, + ) + return self._stubs["predict"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("PredictionServiceGrpcTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/grpc_asyncio.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..1348f51f6706 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/grpc_asyncio.py @@ -0,0 +1,285 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.ai.generativelanguage_v1beta.types import prediction_service + +from .base import DEFAULT_CLIENT_INFO, PredictionServiceTransport +from .grpc import PredictionServiceGrpcTransport + + +class PredictionServiceGrpcAsyncIOTransport(PredictionServiceTransport): + """gRPC AsyncIO backend transport for PredictionService. + + A service for online predictions and explanations. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'generativelanguage.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def predict( + self, + ) -> Callable[ + [prediction_service.PredictRequest], + Awaitable[prediction_service.PredictResponse], + ]: + r"""Return a callable for the predict method over gRPC. + + Performs a prediction request. + + Returns: + Callable[[~.PredictRequest], + Awaitable[~.PredictResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "predict" not in self._stubs: + self._stubs["predict"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.PredictionService/Predict", + request_serializer=prediction_service.PredictRequest.serialize, + response_deserializer=prediction_service.PredictResponse.deserialize, + ) + return self._stubs["predict"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.predict: gapic_v1.method_async.wrap_method( + self.predict, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("PredictionServiceGrpcAsyncIOTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/rest.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/rest.py new file mode 100644 index 000000000000..0fd462caa988 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/rest.py @@ -0,0 +1,313 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.ai.generativelanguage_v1beta.types import prediction_service + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import PredictionServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class PredictionServiceRestInterceptor: + """Interceptor for PredictionService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the PredictionServiceRestTransport. + + .. code-block:: python + class MyCustomPredictionServiceInterceptor(PredictionServiceRestInterceptor): + def pre_predict(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_predict(self, response): + logging.log(f"Received response: {response}") + return response + + transport = PredictionServiceRestTransport(interceptor=MyCustomPredictionServiceInterceptor()) + client = PredictionServiceClient(transport=transport) + + + """ + + def pre_predict( + self, + request: prediction_service.PredictRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[prediction_service.PredictRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for predict + + Override in a subclass to manipulate the request or metadata + before they are sent to the PredictionService server. + """ + return request, metadata + + def post_predict( + self, response: prediction_service.PredictResponse + ) -> prediction_service.PredictResponse: + """Post-rpc interceptor for predict + + Override in a subclass to manipulate the response + after it is returned by the PredictionService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class PredictionServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: PredictionServiceRestInterceptor + + +class PredictionServiceRestTransport(PredictionServiceTransport): + """REST backend transport for PredictionService. + + A service for online predictions and explanations. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[PredictionServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'generativelanguage.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or PredictionServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _Predict(PredictionServiceRestStub): + def __hash__(self): + return hash("Predict") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: prediction_service.PredictRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> prediction_service.PredictResponse: + r"""Call the predict method over HTTP. + + Args: + request (~.prediction_service.PredictRequest): + The request object. Request message for + [PredictionService.Predict][google.ai.generativelanguage.v1beta.PredictionService.Predict]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.prediction_service.PredictResponse: + Response message for [PredictionService.Predict]. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{model=models/*}:predict", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_predict(request, metadata) + pb_request = prediction_service.PredictRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = prediction_service.PredictResponse() + pb_resp = prediction_service.PredictResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_predict(resp) + return resp + + @property + def predict( + self, + ) -> Callable[ + [prediction_service.PredictRequest], prediction_service.PredictResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Predict(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("PredictionServiceRestTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/__init__.py index 89b4f8ad01b8..9dd7a564142d 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/__init__.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/__init__.py @@ -28,12 +28,14 @@ CodeExecution, CodeExecutionResult, Content, + DynamicRetrievalConfig, ExecutableCode, FileData, FunctionCall, FunctionCallingConfig, FunctionDeclaration, FunctionResponse, + GoogleSearchRetrieval, GroundingPassage, GroundingPassages, Part, @@ -76,6 +78,13 @@ GenerateContentResponse, GenerationConfig, GroundingAttribution, + GroundingChunk, + GroundingMetadata, + GroundingSupport, + LogprobsResult, + RetrievalMetadata, + SearchEntryPoint, + Segment, SemanticRetrieverConfig, TaskType, ) @@ -103,6 +112,7 @@ TransferOwnershipResponse, UpdatePermissionRequest, ) +from .prediction_service import PredictRequest, PredictResponse from .retriever import ( Chunk, ChunkData, @@ -188,12 +198,14 @@ "CodeExecution", "CodeExecutionResult", "Content", + "DynamicRetrievalConfig", "ExecutableCode", "FileData", "FunctionCall", "FunctionCallingConfig", "FunctionDeclaration", "FunctionResponse", + "GoogleSearchRetrieval", "GroundingPassage", "GroundingPassages", "Part", @@ -231,6 +243,13 @@ "GenerateContentResponse", "GenerationConfig", "GroundingAttribution", + "GroundingChunk", + "GroundingMetadata", + "GroundingSupport", + "LogprobsResult", + "RetrievalMetadata", + "SearchEntryPoint", + "Segment", "SemanticRetrieverConfig", "TaskType", "Model", @@ -253,6 +272,8 @@ "TransferOwnershipRequest", "TransferOwnershipResponse", "UpdatePermissionRequest", + "PredictRequest", + "PredictResponse", "Chunk", "ChunkData", "Condition", diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/content.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/content.py index bbdbf7f24bc8..6b5d37cd15ce 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/content.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/content.py @@ -31,6 +31,8 @@ "ExecutableCode", "CodeExecutionResult", "Tool", + "GoogleSearchRetrieval", + "DynamicRetrievalConfig", "CodeExecution", "ToolConfig", "FunctionCallingConfig", @@ -354,14 +356,18 @@ class Tool(proto.Message): The model or system does not execute the function. Instead the defined function may be returned as a - [FunctionCall][content.part.function_call] with arguments to - the client side for execution. The model may decide to call - a subset of these functions by populating - [FunctionCall][content.part.function_call] in the response. - The next conversation turn may contain a - [FunctionResponse][content.part.function_response] with the - [content.role] "function" generation context for the next - model turn. + [FunctionCall][google.ai.generativelanguage.v1beta.Part.function_call] + with arguments to the client side for execution. The model + may decide to call a subset of these functions by populating + [FunctionCall][google.ai.generativelanguage.v1beta.Part.function_call] + in the response. The next conversation turn may contain a + [FunctionResponse][google.ai.generativelanguage.v1beta.Part.function_response] + with the + [Content.role][google.ai.generativelanguage.v1beta.Content.role] + "function" generation context for the next model turn. + google_search_retrieval (google.ai.generativelanguage_v1beta.types.GoogleSearchRetrieval): + Optional. Retrieval tool that is powered by + Google search. code_execution (google.ai.generativelanguage_v1beta.types.CodeExecution): Optional. Enables the model to execute code as part of generation. @@ -372,6 +378,11 @@ class Tool(proto.Message): number=1, message="FunctionDeclaration", ) + google_search_retrieval: "GoogleSearchRetrieval" = proto.Field( + proto.MESSAGE, + number=2, + message="GoogleSearchRetrieval", + ) code_execution: "CodeExecution" = proto.Field( proto.MESSAGE, number=3, @@ -379,6 +390,65 @@ class Tool(proto.Message): ) +class GoogleSearchRetrieval(proto.Message): + r"""Tool to retrieve public web data for grounding, powered by + Google. + + Attributes: + dynamic_retrieval_config (google.ai.generativelanguage_v1beta.types.DynamicRetrievalConfig): + Specifies the dynamic retrieval configuration + for the given source. + """ + + dynamic_retrieval_config: "DynamicRetrievalConfig" = proto.Field( + proto.MESSAGE, + number=1, + message="DynamicRetrievalConfig", + ) + + +class DynamicRetrievalConfig(proto.Message): + r"""Describes the options to customize dynamic retrieval. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + mode (google.ai.generativelanguage_v1beta.types.DynamicRetrievalConfig.Mode): + The mode of the predictor to be used in + dynamic retrieval. + dynamic_threshold (float): + The threshold to be used in dynamic + retrieval. If not set, a system default value is + used. + + This field is a member of `oneof`_ ``_dynamic_threshold``. + """ + + class Mode(proto.Enum): + r"""The mode of the predictor to be used in dynamic retrieval. + + Values: + MODE_UNSPECIFIED (0): + Always trigger retrieval. + MODE_DYNAMIC (1): + Run retrieval only when system decides it is + necessary. + """ + MODE_UNSPECIFIED = 0 + MODE_DYNAMIC = 1 + + mode: Mode = proto.Field( + proto.ENUM, + number=1, + enum=Mode, + ) + dynamic_threshold: float = proto.Field( + proto.FLOAT, + number=2, + optional=True, + ) + + class CodeExecution(proto.Message): r"""Tool that executes code generated by the model, and automatically returns the result to the model. @@ -608,6 +678,9 @@ class Schema(proto.Message): max_items (int): Optional. Maximum number of the elements for Type.ARRAY. + min_items (int): + Optional. Minimum number of the elements for + Type.ARRAY. properties (MutableMapping[str, google.ai.generativelanguage_v1beta.types.Schema]): Optional. Properties of Type.OBJECT. required (MutableSequence[str]): @@ -645,6 +718,10 @@ class Schema(proto.Message): proto.INT64, number=21, ) + min_items: int = proto.Field( + proto.INT64, + number=22, + ) properties: MutableMapping[str, "Schema"] = proto.MapField( proto.STRING, proto.MESSAGE, diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/generative_service.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/generative_service.py index b31b07aa4299..edc4c8ec0ff8 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/generative_service.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/generative_service.py @@ -32,8 +32,15 @@ "SemanticRetrieverConfig", "GenerateContentResponse", "Candidate", + "LogprobsResult", "AttributionSourceId", "GroundingAttribution", + "RetrievalMetadata", + "GroundingMetadata", + "SearchEntryPoint", + "GroundingChunk", + "Segment", + "GroundingSupport", "GenerateAnswerRequest", "GenerateAnswerResponse", "EmbedContentRequest", @@ -289,7 +296,8 @@ class GenerationConfig(proto.Message): Optional. MIME type of the generated candidate text. Supported MIME types are: ``text/plain``: (default) Text output. ``application/json``: JSON response in the response - candidates. Refer to the + candidates. ``text/x.enum``: ENUM as a string response in + the response candidates. Refer to the `docs `__ for a list of all supported text MIME types. response_schema (google.ai.generativelanguage_v1beta.types.Schema): @@ -303,6 +311,58 @@ class GenerationConfig(proto.Message): JSON response. Refer to the `JSON text generation guide `__ for more details. + presence_penalty (float): + Optional. Presence penalty applied to the next token's + logprobs if the token has already been seen in the response. + + This penalty is binary on/off and not dependant on the + number of times the token is used (after the first). Use + [frequency_penalty][google.ai.generativelanguage.v1beta.GenerationConfig.frequency_penalty] + for a penalty that increases with each use. + + A positive penalty will discourage the use of tokens that + have already been used in the response, increasing the + vocabulary. + + A negative penalty will encourage the use of tokens that + have already been used in the response, decreasing the + vocabulary. + + This field is a member of `oneof`_ ``_presence_penalty``. + frequency_penalty (float): + Optional. Frequency penalty applied to the next token's + logprobs, multiplied by the number of times each token has + been seen in the respponse so far. + + A positive penalty will discourage the use of tokens that + have already been used, proportional to the number of times + the token has been used: The more a token is used, the more + dificult it is for the model to use that token again + increasing the vocabulary of responses. + + Caution: A *negative* penalty will encourage the model to + reuse tokens proportional to the number of times the token + has been used. Small negative values will reduce the + vocabulary of a response. Larger negative values will cause + the model to start repeating a common token until it hits + the + [max_output_tokens][google.ai.generativelanguage.v1beta.GenerationConfig.max_output_tokens] + limit: "...the the the the the...". + + This field is a member of `oneof`_ ``_frequency_penalty``. + response_logprobs (bool): + Optional. If true, export the logprobs + results in response. + + This field is a member of `oneof`_ ``_response_logprobs``. + logprobs (int): + Optional. Only valid if + [response_logprobs=True][google.ai.generativelanguage.v1beta.GenerationConfig.response_logprobs]. + This sets the number of top logprobs to return at each + decoding step in the + [Candidate.logprobs_result][google.ai.generativelanguage.v1beta.Candidate.logprobs_result]. + + This field is a member of `oneof`_ ``_logprobs``. """ candidate_count: int = proto.Field( @@ -343,6 +403,26 @@ class GenerationConfig(proto.Message): number=14, message=gag_content.Schema, ) + presence_penalty: float = proto.Field( + proto.FLOAT, + number=15, + optional=True, + ) + frequency_penalty: float = proto.Field( + proto.FLOAT, + number=16, + optional=True, + ) + response_logprobs: bool = proto.Field( + proto.BOOL, + number=17, + optional=True, + ) + logprobs: int = proto.Field( + proto.INT32, + number=18, + optional=True, + ) class SemanticRetrieverConfig(proto.Message): @@ -565,6 +645,15 @@ class Candidate(proto.Message): contributed to a grounded answer. This field is populated for ``GenerateAnswer`` calls. + grounding_metadata (google.ai.generativelanguage_v1beta.types.GroundingMetadata): + Output only. Grounding metadata for the candidate. + + This field is populated for ``GenerateContent`` calls. + avg_logprobs (float): + Output only. + logprobs_result (google.ai.generativelanguage_v1beta.types.LogprobsResult): + Output only. Log-likelihood scores for the + response tokens and top tokens """ class FinishReason(proto.Enum): @@ -652,6 +741,94 @@ class FinishReason(proto.Enum): number=8, message="GroundingAttribution", ) + grounding_metadata: "GroundingMetadata" = proto.Field( + proto.MESSAGE, + number=9, + message="GroundingMetadata", + ) + avg_logprobs: float = proto.Field( + proto.DOUBLE, + number=10, + ) + logprobs_result: "LogprobsResult" = proto.Field( + proto.MESSAGE, + number=11, + message="LogprobsResult", + ) + + +class LogprobsResult(proto.Message): + r"""Logprobs Result + + Attributes: + top_candidates (MutableSequence[google.ai.generativelanguage_v1beta.types.LogprobsResult.TopCandidates]): + Length = total number of decoding steps. + chosen_candidates (MutableSequence[google.ai.generativelanguage_v1beta.types.LogprobsResult.Candidate]): + Length = total number of decoding steps. The chosen + candidates may or may not be in top_candidates. + """ + + class Candidate(proto.Message): + r"""Candidate for the logprobs token and score. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + token (str): + The candidate’s token string value. + + This field is a member of `oneof`_ ``_token``. + token_id (int): + The candidate’s token id value. + + This field is a member of `oneof`_ ``_token_id``. + log_probability (float): + The candidate's log probability. + + This field is a member of `oneof`_ ``_log_probability``. + """ + + token: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + token_id: int = proto.Field( + proto.INT32, + number=3, + optional=True, + ) + log_probability: float = proto.Field( + proto.FLOAT, + number=2, + optional=True, + ) + + class TopCandidates(proto.Message): + r"""Candidates with top log probabilities at each decoding step. + + Attributes: + candidates (MutableSequence[google.ai.generativelanguage_v1beta.types.LogprobsResult.Candidate]): + Sorted by log probability in descending + order. + """ + + candidates: MutableSequence["LogprobsResult.Candidate"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="LogprobsResult.Candidate", + ) + + top_candidates: MutableSequence[TopCandidates] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=TopCandidates, + ) + chosen_candidates: MutableSequence[Candidate] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=Candidate, + ) class AttributionSourceId(proto.Message): @@ -758,6 +935,219 @@ class GroundingAttribution(proto.Message): ) +class RetrievalMetadata(proto.Message): + r"""Metadata related to retrieval in the grounding flow. + + Attributes: + google_search_dynamic_retrieval_score (float): + Optional. Score indicating how likely information from + google search could help answer the prompt. The score is in + the range [0, 1], where 0 is the least likely and 1 is the + most likely. This score is only populated when google search + grounding and dynamic retrieval is enabled. It will be + compared to the threshold to determine whether to trigger + google search. + """ + + google_search_dynamic_retrieval_score: float = proto.Field( + proto.FLOAT, + number=2, + ) + + +class GroundingMetadata(proto.Message): + r"""Metadata returned to client when grounding is enabled. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + search_entry_point (google.ai.generativelanguage_v1beta.types.SearchEntryPoint): + Optional. Google search entry for the + following-up web searches. + + This field is a member of `oneof`_ ``_search_entry_point``. + grounding_chunks (MutableSequence[google.ai.generativelanguage_v1beta.types.GroundingChunk]): + List of supporting references retrieved from + specified grounding source. + grounding_supports (MutableSequence[google.ai.generativelanguage_v1beta.types.GroundingSupport]): + List of grounding support. + retrieval_metadata (google.ai.generativelanguage_v1beta.types.RetrievalMetadata): + Metadata related to retrieval in the + grounding flow. + + This field is a member of `oneof`_ ``_retrieval_metadata``. + """ + + search_entry_point: "SearchEntryPoint" = proto.Field( + proto.MESSAGE, + number=1, + optional=True, + message="SearchEntryPoint", + ) + grounding_chunks: MutableSequence["GroundingChunk"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="GroundingChunk", + ) + grounding_supports: MutableSequence["GroundingSupport"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="GroundingSupport", + ) + retrieval_metadata: "RetrievalMetadata" = proto.Field( + proto.MESSAGE, + number=4, + optional=True, + message="RetrievalMetadata", + ) + + +class SearchEntryPoint(proto.Message): + r"""Google search entry point. + + Attributes: + rendered_content (str): + Optional. Web content snippet that can be + embedded in a web page or an app webview. + sdk_blob (bytes): + Optional. Base64 encoded JSON representing + array of tuple. + """ + + rendered_content: str = proto.Field( + proto.STRING, + number=1, + ) + sdk_blob: bytes = proto.Field( + proto.BYTES, + number=2, + ) + + +class GroundingChunk(proto.Message): + r"""Grounding chunk. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + web (google.ai.generativelanguage_v1beta.types.GroundingChunk.Web): + Grounding chunk from the web. + + This field is a member of `oneof`_ ``chunk_type``. + """ + + class Web(proto.Message): + r"""Chunk from the web. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + uri (str): + URI reference of the chunk. + + This field is a member of `oneof`_ ``_uri``. + title (str): + Title of the chunk. + + This field is a member of `oneof`_ ``_title``. + """ + + uri: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + title: str = proto.Field( + proto.STRING, + number=2, + optional=True, + ) + + web: Web = proto.Field( + proto.MESSAGE, + number=1, + oneof="chunk_type", + message=Web, + ) + + +class Segment(proto.Message): + r"""Segment of the content. + + Attributes: + part_index (int): + Output only. The index of a Part object + within its parent Content object. + start_index (int): + Output only. Start index in the given Part, + measured in bytes. Offset from the start of the + Part, inclusive, starting at zero. + end_index (int): + Output only. End index in the given Part, + measured in bytes. Offset from the start of the + Part, exclusive, starting at zero. + text (str): + Output only. The text corresponding to the + segment from the response. + """ + + part_index: int = proto.Field( + proto.INT32, + number=1, + ) + start_index: int = proto.Field( + proto.INT32, + number=2, + ) + end_index: int = proto.Field( + proto.INT32, + number=3, + ) + text: str = proto.Field( + proto.STRING, + number=4, + ) + + +class GroundingSupport(proto.Message): + r"""Grounding support. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + segment (google.ai.generativelanguage_v1beta.types.Segment): + Segment of the content this support belongs + to. + + This field is a member of `oneof`_ ``_segment``. + grounding_chunk_indices (MutableSequence[int]): + A list of indices (into 'grounding_chunk') specifying the + citations associated with the claim. For instance [1,3,4] + means that grounding_chunk[1], grounding_chunk[3], + grounding_chunk[4] are the retrieved content attributed to + the claim. + confidence_scores (MutableSequence[float]): + Confidence score of the support references. Ranges from 0 to + 1. 1 is the most confident. This list must have the same + size as the grounding_chunk_indices. + """ + + segment: "Segment" = proto.Field( + proto.MESSAGE, + number=1, + optional=True, + message="Segment", + ) + grounding_chunk_indices: MutableSequence[int] = proto.RepeatedField( + proto.INT32, + number=2, + ) + confidence_scores: MutableSequence[float] = proto.RepeatedField( + proto.FLOAT, + number=3, + ) + + class GenerateAnswerRequest(proto.Message): r"""Request to generate a grounded answer from the ``Model``. @@ -1207,7 +1597,7 @@ class CountTokensResponse(proto.Message): ``prompt`` into. Always non-negative. cached_content_token_count (int): Number of tokens in the cached part of the - prompt, i.e. in the cached content. + prompt (the cached content). """ total_tokens: int = proto.Field( diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/prediction_service.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/prediction_service.py new file mode 100644 index 000000000000..b6a659782edf --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/prediction_service.py @@ -0,0 +1,79 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import struct_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.ai.generativelanguage.v1beta", + manifest={ + "PredictRequest", + "PredictResponse", + }, +) + + +class PredictRequest(proto.Message): + r"""Request message for + [PredictionService.Predict][google.ai.generativelanguage.v1beta.PredictionService.Predict]. + + Attributes: + model (str): + Required. The name of the model for prediction. Format: + ``name=models/{model}``. + instances (MutableSequence[google.protobuf.struct_pb2.Value]): + Required. The instances that are the input to + the prediction call. + parameters (google.protobuf.struct_pb2.Value): + Optional. The parameters that govern the + prediction call. + """ + + model: str = proto.Field( + proto.STRING, + number=1, + ) + instances: MutableSequence[struct_pb2.Value] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=struct_pb2.Value, + ) + parameters: struct_pb2.Value = proto.Field( + proto.MESSAGE, + number=3, + message=struct_pb2.Value, + ) + + +class PredictResponse(proto.Message): + r"""Response message for [PredictionService.Predict]. + + Attributes: + predictions (MutableSequence[google.protobuf.struct_pb2.Value]): + The outputs of the prediction call. + """ + + predictions: MutableSequence[struct_pb2.Value] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=struct_pb2.Value, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/safety.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/safety.py index 113590701d4b..8ede1042a0ac 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/safety.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/safety.py @@ -41,31 +41,32 @@ class HarmCategory(proto.Enum): HARM_CATEGORY_UNSPECIFIED (0): Category is unspecified. HARM_CATEGORY_DEROGATORY (1): - Negative or harmful comments targeting - identity and/or protected attribute. + **PaLM** - Negative or harmful comments targeting identity + and/or protected attribute. HARM_CATEGORY_TOXICITY (2): - Content that is rude, disrespectful, or - profane. + **PaLM** - Content that is rude, disrespectful, or profane. HARM_CATEGORY_VIOLENCE (3): - Describes scenarios depicting violence - against an individual or group, or general - descriptions of gore. + **PaLM** - Describes scenarios depicting violence against an + individual or group, or general descriptions of gore. HARM_CATEGORY_SEXUAL (4): - Contains references to sexual acts or other - lewd content. + **PaLM** - Contains references to sexual acts or other lewd + content. HARM_CATEGORY_MEDICAL (5): - Promotes unchecked medical advice. + **PaLM** - Promotes unchecked medical advice. HARM_CATEGORY_DANGEROUS (6): - Dangerous content that promotes, facilitates, - or encourages harmful acts. + **PaLM** - Dangerous content that promotes, facilitates, or + encourages harmful acts. HARM_CATEGORY_HARASSMENT (7): - Harasment content. + **Gemini** - Harassment content. HARM_CATEGORY_HATE_SPEECH (8): - Hate speech and content. + **Gemini** - Hate speech and content. HARM_CATEGORY_SEXUALLY_EXPLICIT (9): - Sexually explicit content. + **Gemini** - Sexually explicit content. HARM_CATEGORY_DANGEROUS_CONTENT (10): - Dangerous content. + **Gemini** - Dangerous content. + HARM_CATEGORY_CIVIC_INTEGRITY (11): + **Gemini** - Content that may be used to harm civic + integrity. """ HARM_CATEGORY_UNSPECIFIED = 0 HARM_CATEGORY_DEROGATORY = 1 @@ -78,6 +79,7 @@ class HarmCategory(proto.Enum): HARM_CATEGORY_HATE_SPEECH = 8 HARM_CATEGORY_SEXUALLY_EXPLICIT = 9 HARM_CATEGORY_DANGEROUS_CONTENT = 10 + HARM_CATEGORY_CIVIC_INTEGRITY = 11 class ContentFilter(proto.Message): @@ -249,12 +251,15 @@ class HarmBlockThreshold(proto.Enum): be allowed. BLOCK_NONE (4): All content will be allowed. + OFF (5): + Turn off the safety filter. """ HARM_BLOCK_THRESHOLD_UNSPECIFIED = 0 BLOCK_LOW_AND_ABOVE = 1 BLOCK_MEDIUM_AND_ABOVE = 2 BLOCK_ONLY_HIGH = 3 BLOCK_NONE = 4 + OFF = 5 category: "HarmCategory" = proto.Field( proto.ENUM, diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/tuned_model.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/tuned_model.py index a45283f33632..3be0f7e6b586 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/tuned_model.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/tuned_model.py @@ -118,6 +118,9 @@ class TunedModel(proto.Message): tuning_task (google.ai.generativelanguage_v1beta.types.TuningTask): Required. The tuning task that creates the tuned model. + reader_project_numbers (MutableSequence[int]): + Optional. List of project numbers that have + read access to the tuned model. """ class State(proto.Enum): @@ -196,6 +199,10 @@ class State(proto.Enum): number=10, message="TuningTask", ) + reader_project_numbers: MutableSequence[int] = proto.RepeatedField( + proto.INT64, + number=14, + ) class TunedModelSource(proto.Message): diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/gapic_version.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/gapic_version.py index 1699c98da708..558c8aab67c5 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/gapic_version.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.9" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/gapic_version.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/gapic_version.py index 1699c98da708..558c8aab67c5 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/gapic_version.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.9" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_prediction_service_predict_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_prediction_service_predict_async.py new file mode 100644 index 000000000000..851ebfa44e4d --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_prediction_service_predict_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Predict +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_PredictionService_Predict_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +async def sample_predict(): + # Create a client + client = generativelanguage_v1beta.PredictionServiceAsyncClient() + + # Initialize request argument(s) + instances = generativelanguage_v1beta.Value() + instances.null_value = "NULL_VALUE" + + request = generativelanguage_v1beta.PredictRequest( + model="model_value", + instances=instances, + ) + + # Make the request + response = await client.predict(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_PredictionService_Predict_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_prediction_service_predict_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_prediction_service_predict_sync.py new file mode 100644 index 000000000000..ade0be26d986 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_prediction_service_predict_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Predict +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_PredictionService_Predict_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +def sample_predict(): + # Create a client + client = generativelanguage_v1beta.PredictionServiceClient() + + # Initialize request argument(s) + instances = generativelanguage_v1beta.Value() + instances.null_value = "NULL_VALUE" + + request = generativelanguage_v1beta.PredictRequest( + model="model_value", + instances=instances, + ) + + # Make the request + response = client.predict(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_PredictionService_Predict_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1.json b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1.json index dcb6ad5e6a9e..d6c3fe4c5051 100644 --- a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1.json +++ b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-ai-generativelanguage", - "version": "0.6.9" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta.json b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta.json index a6e1502bce78..c418dfa10386 100644 --- a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta.json +++ b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-ai-generativelanguage", - "version": "0.6.9" + "version": "0.1.0" }, "snippets": [ { @@ -4953,6 +4953,175 @@ ], "title": "generativelanguage_v1beta_generated_permission_service_update_permission_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1beta.PredictionServiceAsyncClient", + "shortName": "PredictionServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.PredictionServiceAsyncClient.predict", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.PredictionService.Predict", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.PredictionService", + "shortName": "PredictionService" + }, + "shortName": "Predict" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.PredictRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "instances", + "type": "MutableSequence[google.protobuf.struct_pb2.Value]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.PredictResponse", + "shortName": "predict" + }, + "description": "Sample for Predict", + "file": "generativelanguage_v1beta_generated_prediction_service_predict_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_PredictionService_Predict_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_prediction_service_predict_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1beta.PredictionServiceClient", + "shortName": "PredictionServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.PredictionServiceClient.predict", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.PredictionService.Predict", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.PredictionService", + "shortName": "PredictionService" + }, + "shortName": "Predict" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.PredictRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "instances", + "type": "MutableSequence[google.protobuf.struct_pb2.Value]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.PredictResponse", + "shortName": "predict" + }, + "description": "Sample for Predict", + "file": "generativelanguage_v1beta_generated_prediction_service_predict_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_PredictionService_Predict_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_prediction_service_predict_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta2.json b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta2.json index d3fc92d09eaa..5b7d0a0509b4 100644 --- a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta2.json +++ b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-ai-generativelanguage", - "version": "0.6.9" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta3.json b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta3.json index d9c470b9e07c..91de9e353f90 100644 --- a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta3.json +++ b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta3.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-ai-generativelanguage", - "version": "0.6.9" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-ai-generativelanguage/scripts/fixup_generativelanguage_v1beta_keywords.py b/packages/google-ai-generativelanguage/scripts/fixup_generativelanguage_v1beta_keywords.py index dcb5cdfbb55c..8e69225c75de 100644 --- a/packages/google-ai-generativelanguage/scripts/fixup_generativelanguage_v1beta_keywords.py +++ b/packages/google-ai-generativelanguage/scripts/fixup_generativelanguage_v1beta_keywords.py @@ -83,6 +83,7 @@ class generativelanguageCallTransformer(cst.CSTTransformer): 'list_models': ('page_size', 'page_token', ), 'list_permissions': ('parent', 'page_size', 'page_token', ), 'list_tuned_models': ('page_size', 'page_token', 'filter', ), + 'predict': ('model', 'instances', 'parameters', ), 'query_corpus': ('name', 'query', 'metadata_filters', 'results_count', ), 'query_document': ('name', 'query', 'results_count', 'metadata_filters', ), 'stream_generate_content': ('model', 'contents', 'system_instruction', 'tools', 'tool_config', 'safety_settings', 'generation_config', 'cached_content', ), diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_cache_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_cache_service.py index 46ecbbbaeafa..1cc7b01d0c19 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_cache_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_cache_service.py @@ -3349,11 +3349,15 @@ def test_create_cached_content_rest(request_type): "enum": ["enum_value1", "enum_value2"], "items": {}, "max_items": 967, + "min_items": 965, "properties": {}, "required": ["required_value1", "required_value2"], }, } ], + "google_search_retrieval": { + "dynamic_retrieval_config": {"mode": 1, "dynamic_threshold": 0.1809} + }, "code_execution": {}, } ], @@ -4097,11 +4101,15 @@ def test_update_cached_content_rest(request_type): "enum": ["enum_value1", "enum_value2"], "items": {}, "max_items": 967, + "min_items": 965, "properties": {}, "required": ["required_value1", "required_value2"], }, } ], + "google_search_retrieval": { + "dynamic_retrieval_config": {"mode": 1, "dynamic_threshold": 0.1809} + }, "code_execution": {}, } ], diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_model_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_model_service.py index c8b4aed2becb..93ee6b5f5eb4 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_model_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_model_service.py @@ -2059,6 +2059,7 @@ def test_get_tuned_model(request_type, transport: str = "grpc"): top_p=0.546, top_k=541, state=tuned_model.TunedModel.State.CREATING, + reader_project_numbers=[2340], base_model="base_model_value", ) response = client.get_tuned_model(request) @@ -2078,6 +2079,7 @@ def test_get_tuned_model(request_type, transport: str = "grpc"): assert math.isclose(response.top_p, 0.546, rel_tol=1e-6) assert response.top_k == 541 assert response.state == tuned_model.TunedModel.State.CREATING + assert response.reader_project_numbers == [2340] def test_get_tuned_model_empty_call(): @@ -2183,6 +2185,7 @@ async def test_get_tuned_model_empty_call_async(): top_p=0.546, top_k=541, state=tuned_model.TunedModel.State.CREATING, + reader_project_numbers=[2340], ) ) response = await client.get_tuned_model() @@ -2258,6 +2261,7 @@ async def test_get_tuned_model_async( top_p=0.546, top_k=541, state=tuned_model.TunedModel.State.CREATING, + reader_project_numbers=[2340], ) ) response = await client.get_tuned_model(request) @@ -2277,6 +2281,7 @@ async def test_get_tuned_model_async( assert math.isclose(response.top_p, 0.546, rel_tol=1e-6) assert response.top_k == 541 assert response.state == tuned_model.TunedModel.State.CREATING + assert response.reader_project_numbers == [2340] @pytest.mark.asyncio @@ -3346,6 +3351,7 @@ def test_update_tuned_model(request_type, transport: str = "grpc"): top_p=0.546, top_k=541, state=gag_tuned_model.TunedModel.State.CREATING, + reader_project_numbers=[2340], base_model="base_model_value", ) response = client.update_tuned_model(request) @@ -3365,6 +3371,7 @@ def test_update_tuned_model(request_type, transport: str = "grpc"): assert math.isclose(response.top_p, 0.546, rel_tol=1e-6) assert response.top_k == 541 assert response.state == gag_tuned_model.TunedModel.State.CREATING + assert response.reader_project_numbers == [2340] def test_update_tuned_model_empty_call(): @@ -3476,6 +3483,7 @@ async def test_update_tuned_model_empty_call_async(): top_p=0.546, top_k=541, state=gag_tuned_model.TunedModel.State.CREATING, + reader_project_numbers=[2340], ) ) response = await client.update_tuned_model() @@ -3553,6 +3561,7 @@ async def test_update_tuned_model_async( top_p=0.546, top_k=541, state=gag_tuned_model.TunedModel.State.CREATING, + reader_project_numbers=[2340], ) ) response = await client.update_tuned_model(request) @@ -3572,6 +3581,7 @@ async def test_update_tuned_model_async( assert math.isclose(response.top_p, 0.546, rel_tol=1e-6) assert response.top_k == 541 assert response.state == gag_tuned_model.TunedModel.State.CREATING + assert response.reader_project_numbers == [2340] @pytest.mark.asyncio @@ -4757,6 +4767,7 @@ def test_get_tuned_model_rest(request_type): top_p=0.546, top_k=541, state=tuned_model.TunedModel.State.CREATING, + reader_project_numbers=[2340], base_model="base_model_value", ) @@ -4780,6 +4791,7 @@ def test_get_tuned_model_rest(request_type): assert math.isclose(response.top_p, 0.546, rel_tol=1e-6) assert response.top_k == 541 assert response.state == tuned_model.TunedModel.State.CREATING + assert response.reader_project_numbers == [2340] def test_get_tuned_model_rest_use_cached_wrapped_rpc(): @@ -5372,6 +5384,7 @@ def test_create_tuned_model_rest(request_type): "batch_size": 1052, }, }, + "reader_project_numbers": [2341, 2342], } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -5784,6 +5797,7 @@ def test_update_tuned_model_rest(request_type): "batch_size": 1052, }, }, + "reader_project_numbers": [2341, 2342], } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -5865,6 +5879,7 @@ def get_message_fields(field): top_p=0.546, top_k=541, state=gag_tuned_model.TunedModel.State.CREATING, + reader_project_numbers=[2340], base_model="base_model_value", ) @@ -5888,6 +5903,7 @@ def get_message_fields(field): assert math.isclose(response.top_p, 0.546, rel_tol=1e-6) assert response.top_k == 541 assert response.state == gag_tuned_model.TunedModel.State.CREATING + assert response.reader_project_numbers == [2340] def test_update_tuned_model_rest_use_cached_wrapped_rpc(): diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_prediction_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_prediction_service.py new file mode 100644 index 000000000000..5532205c7f0a --- /dev/null +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_prediction_service.py @@ -0,0 +1,2586 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import json_format +from google.protobuf import struct_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.ai.generativelanguage_v1beta.services.prediction_service import ( + PredictionServiceAsyncClient, + PredictionServiceClient, + transports, +) +from google.ai.generativelanguage_v1beta.types import prediction_service + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert PredictionServiceClient._get_default_mtls_endpoint(None) is None + assert ( + PredictionServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + PredictionServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + PredictionServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + PredictionServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + PredictionServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +def test__read_environment_variables(): + assert PredictionServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert PredictionServiceClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert PredictionServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + PredictionServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert PredictionServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert PredictionServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert PredictionServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + PredictionServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert PredictionServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert PredictionServiceClient._get_client_cert_source(None, False) is None + assert ( + PredictionServiceClient._get_client_cert_source( + mock_provided_cert_source, False + ) + is None + ) + assert ( + PredictionServiceClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + PredictionServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + PredictionServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + PredictionServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PredictionServiceClient), +) +@mock.patch.object( + PredictionServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PredictionServiceAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = PredictionServiceClient._DEFAULT_UNIVERSE + default_endpoint = PredictionServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = PredictionServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + PredictionServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + PredictionServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == PredictionServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + PredictionServiceClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + PredictionServiceClient._get_api_endpoint( + None, None, default_universe, "always" + ) + == PredictionServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + PredictionServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == PredictionServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + PredictionServiceClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + PredictionServiceClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + PredictionServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + PredictionServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + PredictionServiceClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + PredictionServiceClient._get_universe_domain(None, None) + == PredictionServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + PredictionServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (PredictionServiceClient, transports.PredictionServiceGrpcTransport, "grpc"), + (PredictionServiceClient, transports.PredictionServiceRestTransport, "rest"), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (PredictionServiceClient, "grpc"), + (PredictionServiceAsyncClient, "grpc_asyncio"), + (PredictionServiceClient, "rest"), + ], +) +def test_prediction_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "generativelanguage.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com/" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.PredictionServiceGrpcTransport, "grpc"), + (transports.PredictionServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.PredictionServiceRestTransport, "rest"), + ], +) +def test_prediction_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (PredictionServiceClient, "grpc"), + (PredictionServiceAsyncClient, "grpc_asyncio"), + (PredictionServiceClient, "rest"), + ], +) +def test_prediction_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "generativelanguage.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com/" + ) + + +def test_prediction_service_client_get_transport_class(): + transport = PredictionServiceClient.get_transport_class() + available_transports = [ + transports.PredictionServiceGrpcTransport, + transports.PredictionServiceRestTransport, + ] + assert transport in available_transports + + transport = PredictionServiceClient.get_transport_class("grpc") + assert transport == transports.PredictionServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (PredictionServiceClient, transports.PredictionServiceGrpcTransport, "grpc"), + ( + PredictionServiceAsyncClient, + transports.PredictionServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (PredictionServiceClient, transports.PredictionServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + PredictionServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PredictionServiceClient), +) +@mock.patch.object( + PredictionServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PredictionServiceAsyncClient), +) +def test_prediction_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(PredictionServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(PredictionServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com/" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com/", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + PredictionServiceClient, + transports.PredictionServiceGrpcTransport, + "grpc", + "true", + ), + ( + PredictionServiceAsyncClient, + transports.PredictionServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + PredictionServiceClient, + transports.PredictionServiceGrpcTransport, + "grpc", + "false", + ), + ( + PredictionServiceAsyncClient, + transports.PredictionServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + PredictionServiceClient, + transports.PredictionServiceRestTransport, + "rest", + "true", + ), + ( + PredictionServiceClient, + transports.PredictionServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + PredictionServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PredictionServiceClient), +) +@mock.patch.object( + PredictionServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PredictionServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_prediction_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [PredictionServiceClient, PredictionServiceAsyncClient] +) +@mock.patch.object( + PredictionServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(PredictionServiceClient), +) +@mock.patch.object( + PredictionServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(PredictionServiceAsyncClient), +) +def test_prediction_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", [PredictionServiceClient, PredictionServiceAsyncClient] +) +@mock.patch.object( + PredictionServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PredictionServiceClient), +) +@mock.patch.object( + PredictionServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PredictionServiceAsyncClient), +) +def test_prediction_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = PredictionServiceClient._DEFAULT_UNIVERSE + default_endpoint = PredictionServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = PredictionServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (PredictionServiceClient, transports.PredictionServiceGrpcTransport, "grpc"), + ( + PredictionServiceAsyncClient, + transports.PredictionServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (PredictionServiceClient, transports.PredictionServiceRestTransport, "rest"), + ], +) +def test_prediction_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + PredictionServiceClient, + transports.PredictionServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + PredictionServiceAsyncClient, + transports.PredictionServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + PredictionServiceClient, + transports.PredictionServiceRestTransport, + "rest", + None, + ), + ], +) +def test_prediction_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_prediction_service_client_client_options_from_dict(): + with mock.patch( + "google.ai.generativelanguage_v1beta.services.prediction_service.transports.PredictionServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = PredictionServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + PredictionServiceClient, + transports.PredictionServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + PredictionServiceAsyncClient, + transports.PredictionServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_prediction_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "generativelanguage.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=(), + scopes=None, + default_host="generativelanguage.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + prediction_service.PredictRequest, + dict, + ], +) +def test_predict(request_type, transport: str = "grpc"): + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.predict), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = prediction_service.PredictResponse() + response = client.predict(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = prediction_service.PredictRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, prediction_service.PredictResponse) + + +def test_predict_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.predict), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.predict() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == prediction_service.PredictRequest() + + +def test_predict_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = prediction_service.PredictRequest( + model="model_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.predict), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.predict(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == prediction_service.PredictRequest( + model="model_value", + ) + + +def test_predict_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.predict in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.predict] = mock_rpc + request = {} + client.predict(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.predict(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_predict_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PredictionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.predict), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + prediction_service.PredictResponse() + ) + response = await client.predict() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == prediction_service.PredictRequest() + + +@pytest.mark.asyncio +async def test_predict_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = PredictionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.predict + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.predict + ] = mock_rpc + + request = {} + await client.predict(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.predict(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_predict_async( + transport: str = "grpc_asyncio", request_type=prediction_service.PredictRequest +): + client = PredictionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.predict), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + prediction_service.PredictResponse() + ) + response = await client.predict(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = prediction_service.PredictRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, prediction_service.PredictResponse) + + +@pytest.mark.asyncio +async def test_predict_async_from_dict(): + await test_predict_async(request_type=dict) + + +def test_predict_field_headers(): + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = prediction_service.PredictRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.predict), "__call__") as call: + call.return_value = prediction_service.PredictResponse() + client.predict(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_predict_field_headers_async(): + client = PredictionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = prediction_service.PredictRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.predict), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + prediction_service.PredictResponse() + ) + await client.predict(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +def test_predict_flattened(): + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.predict), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = prediction_service.PredictResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.predict( + model="model_value", + instances=[struct_pb2.Value(null_value=struct_pb2.NullValue.NULL_VALUE)], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].instances + mock_val = [struct_pb2.Value(null_value=struct_pb2.NullValue.NULL_VALUE)] + assert arg == mock_val + + +def test_predict_flattened_error(): + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.predict( + prediction_service.PredictRequest(), + model="model_value", + instances=[struct_pb2.Value(null_value=struct_pb2.NullValue.NULL_VALUE)], + ) + + +@pytest.mark.asyncio +async def test_predict_flattened_async(): + client = PredictionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.predict), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = prediction_service.PredictResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + prediction_service.PredictResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.predict( + model="model_value", + instances=[struct_pb2.Value(null_value=struct_pb2.NullValue.NULL_VALUE)], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].instances + mock_val = [struct_pb2.Value(null_value=struct_pb2.NullValue.NULL_VALUE)] + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_predict_flattened_error_async(): + client = PredictionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.predict( + prediction_service.PredictRequest(), + model="model_value", + instances=[struct_pb2.Value(null_value=struct_pb2.NullValue.NULL_VALUE)], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + prediction_service.PredictRequest, + dict, + ], +) +def test_predict_rest(request_type): + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = prediction_service.PredictResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = prediction_service.PredictResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.predict(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, prediction_service.PredictResponse) + + +def test_predict_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.predict in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.predict] = mock_rpc + + request = {} + client.predict(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.predict(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_predict_rest_required_fields(request_type=prediction_service.PredictRequest): + transport_class = transports.PredictionServiceRestTransport + + request_init = {} + request_init["model"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).predict._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["model"] = "model_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).predict._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "model" in jsonified_request + assert jsonified_request["model"] == "model_value" + + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = prediction_service.PredictResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = prediction_service.PredictResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.predict(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_predict_rest_unset_required_fields(): + transport = transports.PredictionServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.predict._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "model", + "instances", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_predict_rest_interceptors(null_interceptor): + transport = transports.PredictionServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.PredictionServiceRestInterceptor(), + ) + client = PredictionServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.PredictionServiceRestInterceptor, "post_predict" + ) as post, mock.patch.object( + transports.PredictionServiceRestInterceptor, "pre_predict" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = prediction_service.PredictRequest.pb( + prediction_service.PredictRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = prediction_service.PredictResponse.to_json( + prediction_service.PredictResponse() + ) + + request = prediction_service.PredictRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = prediction_service.PredictResponse() + + client.predict( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_predict_rest_bad_request( + transport: str = "rest", request_type=prediction_service.PredictRequest +): + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.predict(request) + + +def test_predict_rest_flattened(): + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = prediction_service.PredictResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"model": "models/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + model="model_value", + instances=[struct_pb2.Value(null_value=struct_pb2.NullValue.NULL_VALUE)], + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = prediction_service.PredictResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.predict(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{model=models/*}:predict" % client.transport._host, args[1] + ) + + +def test_predict_rest_flattened_error(transport: str = "rest"): + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.predict( + prediction_service.PredictRequest(), + model="model_value", + instances=[struct_pb2.Value(null_value=struct_pb2.NullValue.NULL_VALUE)], + ) + + +def test_predict_rest_error(): + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.PredictionServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.PredictionServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PredictionServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.PredictionServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = PredictionServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = PredictionServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.PredictionServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PredictionServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.PredictionServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = PredictionServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.PredictionServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.PredictionServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.PredictionServiceGrpcTransport, + transports.PredictionServiceGrpcAsyncIOTransport, + transports.PredictionServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = PredictionServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.PredictionServiceGrpcTransport, + ) + + +def test_prediction_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.PredictionServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_prediction_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.ai.generativelanguage_v1beta.services.prediction_service.transports.PredictionServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.PredictionServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ("predict",) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_prediction_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.ai.generativelanguage_v1beta.services.prediction_service.transports.PredictionServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.PredictionServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=(), + quota_project_id="octopus", + ) + + +def test_prediction_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.ai.generativelanguage_v1beta.services.prediction_service.transports.PredictionServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.PredictionServiceTransport() + adc.assert_called_once() + + +def test_prediction_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + PredictionServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=(), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.PredictionServiceGrpcTransport, + transports.PredictionServiceGrpcAsyncIOTransport, + ], +) +def test_prediction_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=(), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.PredictionServiceGrpcTransport, + transports.PredictionServiceGrpcAsyncIOTransport, + transports.PredictionServiceRestTransport, + ], +) +def test_prediction_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com/" + api_audience_tests = [None, "https://language2.com/"] + api_audience_expect = [host, "https://language2.com/"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.PredictionServiceGrpcTransport, grpc_helpers), + (transports.PredictionServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_prediction_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "generativelanguage.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=(), + scopes=["1", "2"], + default_host="generativelanguage.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.PredictionServiceGrpcTransport, + transports.PredictionServiceGrpcAsyncIOTransport, + ], +) +def test_prediction_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_prediction_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.PredictionServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_prediction_service_host_no_port(transport_name): + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="generativelanguage.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "generativelanguage.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com/" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_prediction_service_host_with_port(transport_name): + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="generativelanguage.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "generativelanguage.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com:8000/" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_prediction_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = PredictionServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = PredictionServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.predict._session + session2 = client2.transport.predict._session + assert session1 != session2 + + +def test_prediction_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.PredictionServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_prediction_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.PredictionServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.PredictionServiceGrpcTransport, + transports.PredictionServiceGrpcAsyncIOTransport, + ], +) +def test_prediction_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.PredictionServiceGrpcTransport, + transports.PredictionServiceGrpcAsyncIOTransport, + ], +) +def test_prediction_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_model_path(): + model = "squid" + expected = "models/{model}".format( + model=model, + ) + actual = PredictionServiceClient.model_path(model) + assert expected == actual + + +def test_parse_model_path(): + expected = { + "model": "clam", + } + path = PredictionServiceClient.model_path(**expected) + + # Check that the path construction is reversible. + actual = PredictionServiceClient.parse_model_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = PredictionServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = PredictionServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = PredictionServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = PredictionServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = PredictionServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = PredictionServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = PredictionServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = PredictionServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = PredictionServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format( + project=project, + ) + actual = PredictionServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = PredictionServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = PredictionServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = PredictionServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = PredictionServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = PredictionServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.PredictionServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.PredictionServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = PredictionServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = PredictionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (PredictionServiceClient, transports.PredictionServiceGrpcTransport), + ( + PredictionServiceAsyncClient, + transports.PredictionServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) From e727cc0e98e37d55882215182f86c2a7d23154ef Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sun, 22 Sep 2024 20:52:35 -0400 Subject: [PATCH 081/108] feat: [google-cloud-gdchardwaremanagement] add an order type field to distinguish a fulfillment request from a sales inquiry (#13090) BEGIN_COMMIT_OVERRIDE feat: add an order type field to distinguish a fulfillment request from a sales inquiry feat: add support to mark comments as read or unread feat: rename zone state signal READY_FOR_SITE_TURNUP to FACTORY_TURNUP_CHECKS_PASSED docs: clarify how access_times are used END_COMMIT_OVERRIDE - [ ] Regenerate this pull request now. feat: add support to mark comments as read or unread feat: rename zone state signal READY_FOR_SITE_TURNUP to FACTORY_TURNUP_CHECKS_PASSED docs: clarify how access_times are used PiperOrigin-RevId: 675275984 Source-Link: https://github.com/googleapis/googleapis/commit/fde103ca32090688564bc86d8a430450d59dded7 Source-Link: https://github.com/googleapis/googleapis-gen/commit/a58cb7449fccaa7e9c76bd5a137e79aefa45ede7 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWdkY2hhcmR3YXJlbWFuYWdlbWVudC8uT3dsQm90LnlhbWwiLCJoIjoiYTU4Y2I3NDQ5ZmNjYWE3ZTljNzZiZDVhMTM3ZTc5YWVmYTQ1ZWRlNyJ9 --------- Co-authored-by: Owl Bot Co-authored-by: ohmayr --- .../cloud/gdchardwaremanagement/__init__.py | 6 + .../gdchardwaremanagement_v1alpha/__init__.py | 6 + .../gapic_metadata.json | 15 + .../gdc_hardware_management/async_client.py | 120 + .../gdc_hardware_management/client.py | 117 + .../transports/base.py | 14 + .../transports/grpc.py | 31 + .../transports/grpc_asyncio.py | 36 + .../transports/rest.py | 134 + .../types/__init__.py | 6 + .../types/resources.py | 145 +- .../types/service.py | 85 + ...nagement_record_action_on_comment_async.py | 53 + ...anagement_record_action_on_comment_sync.py | 53 + ...e.cloud.gdchardwaremanagement.v1alpha.json | 169 + ..._gdchardwaremanagement_v1alpha_keywords.py | 3 +- .../test_gdc_hardware_management.py | 8956 +++++++++-------- 17 files changed, 5851 insertions(+), 4098 deletions(-) create mode 100644 packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_record_action_on_comment_async.py create mode 100644 packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_record_action_on_comment_sync.py diff --git a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement/__init__.py b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement/__init__.py index 8f1261e786ed..6804d0c2e133 100644 --- a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement/__init__.py +++ b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement/__init__.py @@ -29,6 +29,7 @@ Comment, Contact, Dimensions, + Entity, Hardware, HardwareConfig, HardwareGroup, @@ -44,6 +45,7 @@ SkuConfig, SkuInstance, Subnet, + SubscriptionConfig, TimePeriod, Zone, ZoneNetworkConfig, @@ -84,6 +86,7 @@ ListZonesRequest, ListZonesResponse, OperationMetadata, + RecordActionOnCommentRequest, SignalZoneStateRequest, SubmitOrderRequest, UpdateHardwareGroupRequest, @@ -114,9 +117,11 @@ "SkuConfig", "SkuInstance", "Subnet", + "SubscriptionConfig", "TimePeriod", "Zone", "ZoneNetworkConfig", + "Entity", "PowerSupply", "CreateCommentRequest", "CreateHardwareGroupRequest", @@ -153,6 +158,7 @@ "ListZonesRequest", "ListZonesResponse", "OperationMetadata", + "RecordActionOnCommentRequest", "SignalZoneStateRequest", "SubmitOrderRequest", "UpdateHardwareGroupRequest", diff --git a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/__init__.py b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/__init__.py index adfdd5d5be4b..4be645a5703c 100644 --- a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/__init__.py +++ b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/__init__.py @@ -27,6 +27,7 @@ Comment, Contact, Dimensions, + Entity, Hardware, HardwareConfig, HardwareGroup, @@ -42,6 +43,7 @@ SkuConfig, SkuInstance, Subnet, + SubscriptionConfig, TimePeriod, Zone, ZoneNetworkConfig, @@ -82,6 +84,7 @@ ListZonesRequest, ListZonesResponse, OperationMetadata, + RecordActionOnCommentRequest, SignalZoneStateRequest, SubmitOrderRequest, UpdateHardwareGroupRequest, @@ -107,6 +110,7 @@ "DeleteOrderRequest", "DeleteZoneRequest", "Dimensions", + "Entity", "GDCHardwareManagementClient", "GetChangeLogEntryRequest", "GetCommentRequest", @@ -143,6 +147,7 @@ "OrganizationContact", "PowerSupply", "RackSpace", + "RecordActionOnCommentRequest", "SignalZoneStateRequest", "Site", "Sku", @@ -150,6 +155,7 @@ "SkuInstance", "SubmitOrderRequest", "Subnet", + "SubscriptionConfig", "TimePeriod", "UpdateHardwareGroupRequest", "UpdateHardwareRequest", diff --git a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/gapic_metadata.json b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/gapic_metadata.json index b5713b6e1b21..3d9830bcaec9 100644 --- a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/gapic_metadata.json +++ b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/gapic_metadata.json @@ -140,6 +140,11 @@ "list_zones" ] }, + "RecordActionOnComment": { + "methods": [ + "record_action_on_comment" + ] + }, "SignalZoneState": { "methods": [ "signal_zone_state" @@ -310,6 +315,11 @@ "list_zones" ] }, + "RecordActionOnComment": { + "methods": [ + "record_action_on_comment" + ] + }, "SignalZoneState": { "methods": [ "signal_zone_state" @@ -480,6 +490,11 @@ "list_zones" ] }, + "RecordActionOnComment": { + "methods": [ + "record_action_on_comment" + ] + }, "SignalZoneState": { "methods": [ "signal_zone_state" diff --git a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/async_client.py b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/async_client.py index b0755c816a88..7c2f13b80903 100644 --- a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/async_client.py +++ b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/async_client.py @@ -3237,6 +3237,126 @@ async def sample_create_comment(): # Done; return the response. return response + async def record_action_on_comment( + self, + request: Optional[Union[service.RecordActionOnCommentRequest, dict]] = None, + *, + name: Optional[str] = None, + action_type: Optional[service.RecordActionOnCommentRequest.ActionType] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Comment: + r"""Record Action on a Comment. If the Action specified + in the request is READ, the viewed time in the comment + is set to the time the request was received. If the + comment is already marked as read, subsequent calls will + be ignored. If the Action is UNREAD, the viewed time is + cleared from the comment. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gdchardwaremanagement_v1alpha + + async def sample_record_action_on_comment(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.RecordActionOnCommentRequest( + name="name_value", + action_type="UNREAD", + ) + + # Make the request + response = await client.record_action_on_comment(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.gdchardwaremanagement_v1alpha.types.RecordActionOnCommentRequest, dict]]): + The request object. A request to record an action on a + comment. + name (:class:`str`): + Required. The name of the comment. Format: + ``projects/{project}/locations/{location}/orders/{order}/comments/{comment}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + action_type (:class:`google.cloud.gdchardwaremanagement_v1alpha.types.RecordActionOnCommentRequest.ActionType`): + Required. The action type of the + recorded action. + + This corresponds to the ``action_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.gdchardwaremanagement_v1alpha.types.Comment: + A comment on an order. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, action_type]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.RecordActionOnCommentRequest): + request = service.RecordActionOnCommentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if action_type is not None: + request.action_type = action_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.record_action_on_comment + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def list_change_log_entries( self, request: Optional[Union[service.ListChangeLogEntriesRequest, dict]] = None, diff --git a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/client.py b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/client.py index 44e392dffb0f..fe8281c62c7d 100644 --- a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/client.py +++ b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/client.py @@ -3734,6 +3734,123 @@ def sample_create_comment(): # Done; return the response. return response + def record_action_on_comment( + self, + request: Optional[Union[service.RecordActionOnCommentRequest, dict]] = None, + *, + name: Optional[str] = None, + action_type: Optional[service.RecordActionOnCommentRequest.ActionType] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Comment: + r"""Record Action on a Comment. If the Action specified + in the request is READ, the viewed time in the comment + is set to the time the request was received. If the + comment is already marked as read, subsequent calls will + be ignored. If the Action is UNREAD, the viewed time is + cleared from the comment. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gdchardwaremanagement_v1alpha + + def sample_record_action_on_comment(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.RecordActionOnCommentRequest( + name="name_value", + action_type="UNREAD", + ) + + # Make the request + response = client.record_action_on_comment(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.gdchardwaremanagement_v1alpha.types.RecordActionOnCommentRequest, dict]): + The request object. A request to record an action on a + comment. + name (str): + Required. The name of the comment. Format: + ``projects/{project}/locations/{location}/orders/{order}/comments/{comment}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + action_type (google.cloud.gdchardwaremanagement_v1alpha.types.RecordActionOnCommentRequest.ActionType): + Required. The action type of the + recorded action. + + This corresponds to the ``action_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.gdchardwaremanagement_v1alpha.types.Comment: + A comment on an order. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, action_type]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.RecordActionOnCommentRequest): + request = service.RecordActionOnCommentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if action_type is not None: + request.action_type = action_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.record_action_on_comment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def list_change_log_entries( self, request: Optional[Union[service.ListChangeLogEntriesRequest, dict]] = None, diff --git a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/transports/base.py b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/transports/base.py index 2aacf3ed6d5a..1af0337b882d 100644 --- a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/transports/base.py +++ b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/transports/base.py @@ -407,6 +407,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.record_action_on_comment: gapic_v1.method.wrap_method( + self.record_action_on_comment, + default_timeout=None, + client_info=client_info, + ), self.list_change_log_entries: gapic_v1.method.wrap_method( self.list_change_log_entries, default_retry=retries.Retry( @@ -753,6 +758,15 @@ def create_comment( ]: raise NotImplementedError() + @property + def record_action_on_comment( + self, + ) -> Callable[ + [service.RecordActionOnCommentRequest], + Union[resources.Comment, Awaitable[resources.Comment]], + ]: + raise NotImplementedError() + @property def list_change_log_entries( self, diff --git a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/transports/grpc.py b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/transports/grpc.py index 50edb419994a..a9126a268ef9 100644 --- a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/transports/grpc.py +++ b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/transports/grpc.py @@ -845,6 +845,37 @@ def create_comment( ) return self._stubs["create_comment"] + @property + def record_action_on_comment( + self, + ) -> Callable[[service.RecordActionOnCommentRequest], resources.Comment]: + r"""Return a callable for the record action on comment method over gRPC. + + Record Action on a Comment. If the Action specified + in the request is READ, the viewed time in the comment + is set to the time the request was received. If the + comment is already marked as read, subsequent calls will + be ignored. If the Action is UNREAD, the viewed time is + cleared from the comment. + + Returns: + Callable[[~.RecordActionOnCommentRequest], + ~.Comment]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "record_action_on_comment" not in self._stubs: + self._stubs["record_action_on_comment"] = self.grpc_channel.unary_unary( + "/google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement/RecordActionOnComment", + request_serializer=service.RecordActionOnCommentRequest.serialize, + response_deserializer=resources.Comment.deserialize, + ) + return self._stubs["record_action_on_comment"] + @property def list_change_log_entries( self, diff --git a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/transports/grpc_asyncio.py b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/transports/grpc_asyncio.py index 1dfcb6395532..12579c465cb7 100644 --- a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/transports/grpc_asyncio.py +++ b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/transports/grpc_asyncio.py @@ -868,6 +868,37 @@ def create_comment( ) return self._stubs["create_comment"] + @property + def record_action_on_comment( + self, + ) -> Callable[[service.RecordActionOnCommentRequest], Awaitable[resources.Comment]]: + r"""Return a callable for the record action on comment method over gRPC. + + Record Action on a Comment. If the Action specified + in the request is READ, the viewed time in the comment + is set to the time the request was received. If the + comment is already marked as read, subsequent calls will + be ignored. If the Action is UNREAD, the viewed time is + cleared from the comment. + + Returns: + Callable[[~.RecordActionOnCommentRequest], + Awaitable[~.Comment]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "record_action_on_comment" not in self._stubs: + self._stubs["record_action_on_comment"] = self.grpc_channel.unary_unary( + "/google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement/RecordActionOnComment", + request_serializer=service.RecordActionOnCommentRequest.serialize, + response_deserializer=resources.Comment.deserialize, + ) + return self._stubs["record_action_on_comment"] + @property def list_change_log_entries( self, @@ -1411,6 +1442,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.record_action_on_comment: gapic_v1.method_async.wrap_method( + self.record_action_on_comment, + default_timeout=None, + client_info=client_info, + ), self.list_change_log_entries: gapic_v1.method_async.wrap_method( self.list_change_log_entries, default_retry=retries.AsyncRetry( diff --git a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/transports/rest.py b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/transports/rest.py index b95064ee417e..25c96778ddb7 100644 --- a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/transports/rest.py +++ b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/transports/rest.py @@ -280,6 +280,14 @@ def post_list_zones(self, response): logging.log(f"Received response: {response}") return response + def pre_record_action_on_comment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_record_action_on_comment(self, response): + logging.log(f"Received response: {response}") + return response + def pre_signal_zone_state(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -892,6 +900,29 @@ def post_list_zones( """ return response + def pre_record_action_on_comment( + self, + request: service.RecordActionOnCommentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.RecordActionOnCommentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for record_action_on_comment + + Override in a subclass to manipulate the request or metadata + before they are sent to the GDCHardwareManagement server. + """ + return request, metadata + + def post_record_action_on_comment( + self, response: resources.Comment + ) -> resources.Comment: + """Post-rpc interceptor for record_action_on_comment + + Override in a subclass to manipulate the response + after it is returned by the GDCHardwareManagement server but before + it is returned to user code. + """ + return response + def pre_signal_zone_state( self, request: service.SignalZoneStateRequest, @@ -3615,6 +3646,101 @@ def __call__( resp = self._interceptor.post_list_zones(resp) return resp + class _RecordActionOnComment(GDCHardwareManagementRestStub): + def __hash__(self): + return hash("RecordActionOnComment") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.RecordActionOnCommentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Comment: + r"""Call the record action on comment method over HTTP. + + Args: + request (~.service.RecordActionOnCommentRequest): + The request object. A request to record an action on a + comment. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.resources.Comment: + A comment on an order. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/orders/*/comments/*}:recordAction", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_record_action_on_comment( + request, metadata + ) + pb_request = service.RecordActionOnCommentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resources.Comment() + pb_resp = resources.Comment.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_record_action_on_comment(resp) + return resp + class _SignalZoneState(GDCHardwareManagementRestStub): def __hash__(self): return hash("SignalZoneState") @@ -4483,6 +4609,14 @@ def list_zones( # In C++ this would require a dynamic_cast return self._ListZones(self._session, self._host, self._interceptor) # type: ignore + @property + def record_action_on_comment( + self, + ) -> Callable[[service.RecordActionOnCommentRequest], resources.Comment]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RecordActionOnComment(self._session, self._host, self._interceptor) # type: ignore + @property def signal_zone_state( self, diff --git a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/types/__init__.py b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/types/__init__.py index 920359d3bd38..b800bd4e5181 100644 --- a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/types/__init__.py +++ b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/types/__init__.py @@ -18,6 +18,7 @@ Comment, Contact, Dimensions, + Entity, Hardware, HardwareConfig, HardwareGroup, @@ -33,6 +34,7 @@ SkuConfig, SkuInstance, Subnet, + SubscriptionConfig, TimePeriod, Zone, ZoneNetworkConfig, @@ -73,6 +75,7 @@ ListZonesRequest, ListZonesResponse, OperationMetadata, + RecordActionOnCommentRequest, SignalZoneStateRequest, SubmitOrderRequest, UpdateHardwareGroupRequest, @@ -101,9 +104,11 @@ "SkuConfig", "SkuInstance", "Subnet", + "SubscriptionConfig", "TimePeriod", "Zone", "ZoneNetworkConfig", + "Entity", "PowerSupply", "CreateCommentRequest", "CreateHardwareGroupRequest", @@ -140,6 +145,7 @@ "ListZonesRequest", "ListZonesResponse", "OperationMetadata", + "RecordActionOnCommentRequest", "SignalZoneStateRequest", "SubmitOrderRequest", "UpdateHardwareGroupRequest", diff --git a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/types/resources.py b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/types/resources.py index 54d902b9ae68..9cf90c8096c5 100644 --- a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/types/resources.py +++ b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/types/resources.py @@ -29,6 +29,7 @@ package="google.cloud.gdchardwaremanagement.v1alpha", manifest={ "PowerSupply", + "Entity", "Order", "Site", "HardwareGroup", @@ -50,6 +51,7 @@ "Dimensions", "RackSpace", "HardwareLocation", + "SubscriptionConfig", }, ) @@ -70,6 +72,25 @@ class PowerSupply(proto.Enum): POWER_SUPPLY_DC = 2 +class Entity(proto.Enum): + r"""Entity is used to denote an organization or party. + + Values: + ENTITY_UNSPECIFIED (0): + Entity is unspecified. + GOOGLE (1): + Google. + CUSTOMER (2): + Customer. + VENDOR (3): + Vendor. + """ + ENTITY_UNSPECIFIED = 0 + GOOGLE = 1 + CUSTOMER = 2 + VENDOR = 3 + + class Order(proto.Message): r"""An order for GDC hardware. @@ -140,6 +161,9 @@ class State(proto.Enum): has not been submitted yet. SUBMITTED (2): Order has been submitted to Google. + INFO_COMPLETE (12): + All information required from the customer + for fulfillment of the order is complete. ACCEPTED (3): Order has been accepted by Google. ADDITIONAL_INFO_NEEDED (4): @@ -167,6 +191,7 @@ class State(proto.Enum): STATE_UNSPECIFIED = 0 DRAFT = 1 SUBMITTED = 2 + INFO_COMPLETE = 12 ACCEPTED = 3 ADDITIONAL_INFO_NEEDED = 4 BUILDING = 5 @@ -297,17 +322,30 @@ class Site(proto.Message): Optional. The time periods when the site is accessible. If this field is empty, the site is accessible at all times. + + This field is used by Google to schedule the + initial installation as well as any later + hardware maintenance. You may update this at any + time. For example, if the initial installation + is requested during off-hours but maintenance + should be performed during regular business + hours, you should update the access times after + initial installation is complete. notes (str): Optional. Any additional notes for this Site. Please include information about: - - - security or access restrictions - - any regulations affecting the technicians - visiting the site - - any special process or approval required to - move the equipment - - whether a representative will be available - during site visits + - security or access restrictions + - any regulations affecting the technicians + visiting the site + - any special process or approval required to + move the equipment + - whether a representative will be available + during site visits + + customer_site_id (str): + Optional. Customer defined identifier for + this Site. This can be used to identify the site + in the customer's own systems. """ name: str = proto.Field( @@ -355,6 +393,10 @@ class Site(proto.Message): proto.STRING, number=27, ) + customer_site_id: str = proto.Field( + proto.STRING, + number=28, + ) class HardwareGroup(proto.Message): @@ -679,6 +721,14 @@ class Comment(proto.Message): text (str): Required. Text of this comment. The length of text must be <= 1000 characters. + customer_viewed_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Timestamp of the first time this + comment was viewed by the customer. If the + comment wasn't viewed then this timestamp will + be unset. + author_entity (google.cloud.gdchardwaremanagement_v1alpha.types.Entity): + Output only. The entity the author belongs + to. """ name: str = proto.Field( @@ -703,6 +753,16 @@ class Comment(proto.Message): proto.STRING, number=5, ) + customer_viewed_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + author_entity: "Entity" = proto.Field( + proto.ENUM, + number=7, + enum="Entity", + ) class ChangeLogEntry(proto.Message): @@ -881,6 +941,9 @@ class Zone(proto.Message): globally_unique_id (str): Output only. Globally unique identifier generated for this Edge Zone. + subscription_configs (MutableSequence[google.cloud.gdchardwaremanagement_v1alpha.types.SubscriptionConfig]): + Output only. Subscription configurations for + this zone. """ class State(proto.Enum): @@ -960,6 +1023,11 @@ class State(proto.Enum): proto.STRING, number=12, ) + subscription_configs: MutableSequence["SubscriptionConfig"] = proto.RepeatedField( + proto.MESSAGE, + number=13, + message="SubscriptionConfig", + ) class OrganizationContact(proto.Message): @@ -1502,4 +1570,65 @@ class HardwareLocation(proto.Message): ) +class SubscriptionConfig(proto.Message): + r"""A message to store a subscription configuration. + + Attributes: + subscription_id (str): + Output only. The unique identifier of the + subscription. + billing_id (str): + Output only. The Google Cloud Billing ID that + the subscription is created under. + state (google.cloud.gdchardwaremanagement_v1alpha.types.SubscriptionConfig.SubscriptionState): + Output only. The current state of the + subscription. + """ + + class SubscriptionState(proto.Enum): + r"""Enum to represent the state of the subscription. + + Values: + SUBSCRIPTION_STATE_UNSPECIFIED (0): + State is unspecified. + ACTIVE (1): + Active state means that the subscription has + been created successfully and billing is + happening. + INACTIVE (2): + Inactive means that the subscription has been + created successfully, but billing has not + started yet. + ERROR (3): + The subscription is in an erroneous state. + FAILED_TO_RETRIEVE (4): + The subscription state failed to be + retrieved. This may be a transient issue. The + user should retry the request. + COMPLETED (5): + The subscription has been completed, because + it has reached the end date. + """ + SUBSCRIPTION_STATE_UNSPECIFIED = 0 + ACTIVE = 1 + INACTIVE = 2 + ERROR = 3 + FAILED_TO_RETRIEVE = 4 + COMPLETED = 5 + + subscription_id: str = proto.Field( + proto.STRING, + number=1, + ) + billing_id: str = proto.Field( + proto.STRING, + number=2, + ) + state: SubscriptionState = proto.Field( + proto.ENUM, + number=3, + enum=SubscriptionState, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/types/service.py b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/types/service.py index 9821128023c7..88c0224f1a48 100644 --- a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/types/service.py +++ b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/types/service.py @@ -54,6 +54,7 @@ "ListCommentsResponse", "GetCommentRequest", "CreateCommentRequest", + "RecordActionOnCommentRequest", "ListChangeLogEntriesRequest", "ListChangeLogEntriesResponse", "GetChangeLogEntryRequest", @@ -280,8 +281,44 @@ class SubmitOrderRequest(proto.Message): request_id (str): Optional. An optional unique identifier for this request. See `AIP-155 `__. + type_ (google.cloud.gdchardwaremanagement_v1alpha.types.SubmitOrderRequest.Type): + Optional. Type of this request. If unset, the request type + is assumed to be ``INFO_PENDING``. """ + class Type(proto.Enum): + r"""Valid types of submit order request. + + Values: + TYPE_UNSPECIFIED (0): + Request type is unspecified. This should not + be used. + INFO_PENDING (1): + Use this request type to submit your order + and initiate conversation with Google. After + this submission, you will not be able to modify + the number or SKU of your ordered hardware. + Please note that this order will not be ready + for fulfillment yet until you provide more + information, such as zone network configuration, + hardware physical and installation information, + etc. + If you are submitting an order for a SKU type of + RACK, please use this request type, as + additional information will be required outside + of the API. + INFO_COMPLETE (2): + Use this request type if and when you are ready to submit + your order for fulfillment. In addition to the information + required for ``INFO_PENDING``, the order must contain all + required information, such as zone network configuration, + hardware physical and installation information, etc. Further + changes to any order information will no longer be allowed. + """ + TYPE_UNSPECIFIED = 0 + INFO_PENDING = 1 + INFO_COMPLETE = 2 + name: str = proto.Field( proto.STRING, number=1, @@ -290,6 +327,11 @@ class SubmitOrderRequest(proto.Message): proto.STRING, number=2, ) + type_: Type = proto.Field( + proto.ENUM, + number=3, + enum=Type, + ) class ListSitesRequest(proto.Message): @@ -963,6 +1005,44 @@ class CreateCommentRequest(proto.Message): ) +class RecordActionOnCommentRequest(proto.Message): + r"""A request to record an action on a comment. + + Attributes: + name (str): + Required. The name of the comment. Format: + ``projects/{project}/locations/{location}/orders/{order}/comments/{comment}`` + action_type (google.cloud.gdchardwaremanagement_v1alpha.types.RecordActionOnCommentRequest.ActionType): + Required. The action type of the recorded + action. + """ + + class ActionType(proto.Enum): + r"""Valid action types of Comment. + + Values: + ACTION_TYPE_UNSPECIFIED (0): + Action is unspecified. + READ (1): + Mark comment as read. + UNREAD (2): + Mark comment as unread. + """ + ACTION_TYPE_UNSPECIFIED = 0 + READ = 1 + UNREAD = 2 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + action_type: ActionType = proto.Field( + proto.ENUM, + number=2, + enum=ActionType, + ) + + class ListChangeLogEntriesRequest(proto.Message): r"""A request to list change log entries. @@ -1354,12 +1434,17 @@ class StateSignal(proto.Enum): Values: STATE_SIGNAL_UNSPECIFIED (0): State signal of the zone is unspecified. + FACTORY_TURNUP_CHECKS_PASSED (1): + The Zone is ready for site turnup. READY_FOR_SITE_TURNUP (1): The Zone is ready for site turnup. + Deprecated, but not deleted. FACTORY_TURNUP_CHECKS_FAILED (2): The Zone failed in factory turnup checks. """ + _pb_options = {"allow_alias": True} STATE_SIGNAL_UNSPECIFIED = 0 + FACTORY_TURNUP_CHECKS_PASSED = 1 READY_FOR_SITE_TURNUP = 1 FACTORY_TURNUP_CHECKS_FAILED = 2 diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_record_action_on_comment_async.py b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_record_action_on_comment_async.py new file mode 100644 index 000000000000..64fa35ff6d8a --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_record_action_on_comment_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RecordActionOnComment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gdchardwaremanagement + + +# [START gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_RecordActionOnComment_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gdchardwaremanagement_v1alpha + + +async def sample_record_action_on_comment(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.RecordActionOnCommentRequest( + name="name_value", + action_type="UNREAD", + ) + + # Make the request + response = await client.record_action_on_comment(request=request) + + # Handle the response + print(response) + +# [END gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_RecordActionOnComment_async] diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_record_action_on_comment_sync.py b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_record_action_on_comment_sync.py new file mode 100644 index 000000000000..6658459e3cda --- /dev/null +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_record_action_on_comment_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RecordActionOnComment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gdchardwaremanagement + + +# [START gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_RecordActionOnComment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gdchardwaremanagement_v1alpha + + +def sample_record_action_on_comment(): + # Create a client + client = gdchardwaremanagement_v1alpha.GDCHardwareManagementClient() + + # Initialize request argument(s) + request = gdchardwaremanagement_v1alpha.RecordActionOnCommentRequest( + name="name_value", + action_type="UNREAD", + ) + + # Make the request + response = client.record_action_on_comment(request=request) + + # Handle the response + print(response) + +# [END gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_RecordActionOnComment_sync] diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/snippet_metadata_google.cloud.gdchardwaremanagement.v1alpha.json b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/snippet_metadata_google.cloud.gdchardwaremanagement.v1alpha.json index 9716952b0d31..588b33c7fb6e 100644 --- a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/snippet_metadata_google.cloud.gdchardwaremanagement.v1alpha.json +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/snippet_metadata_google.cloud.gdchardwaremanagement.v1alpha.json @@ -4293,6 +4293,175 @@ ], "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_list_zones_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient", + "shortName": "GDCHardwareManagementAsyncClient" + }, + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementAsyncClient.record_action_on_comment", + "method": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement.RecordActionOnComment", + "service": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement", + "shortName": "GDCHardwareManagement" + }, + "shortName": "RecordActionOnComment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.RecordActionOnCommentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "action_type", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.RecordActionOnCommentRequest.ActionType" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.gdchardwaremanagement_v1alpha.types.Comment", + "shortName": "record_action_on_comment" + }, + "description": "Sample for RecordActionOnComment", + "file": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_record_action_on_comment_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_RecordActionOnComment_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_record_action_on_comment_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementClient", + "shortName": "GDCHardwareManagementClient" + }, + "fullName": "google.cloud.gdchardwaremanagement_v1alpha.GDCHardwareManagementClient.record_action_on_comment", + "method": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement.RecordActionOnComment", + "service": { + "fullName": "google.cloud.gdchardwaremanagement.v1alpha.GDCHardwareManagement", + "shortName": "GDCHardwareManagement" + }, + "shortName": "RecordActionOnComment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.RecordActionOnCommentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "action_type", + "type": "google.cloud.gdchardwaremanagement_v1alpha.types.RecordActionOnCommentRequest.ActionType" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.gdchardwaremanagement_v1alpha.types.Comment", + "shortName": "record_action_on_comment" + }, + "description": "Sample for RecordActionOnComment", + "file": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_record_action_on_comment_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gdchardwaremanagement_v1alpha_generated_GDCHardwareManagement_RecordActionOnComment_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gdchardwaremanagement_v1alpha_generated_gdc_hardware_management_record_action_on_comment_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-gdchardwaremanagement/scripts/fixup_gdchardwaremanagement_v1alpha_keywords.py b/packages/google-cloud-gdchardwaremanagement/scripts/fixup_gdchardwaremanagement_v1alpha_keywords.py index 0a8496e2ccc3..c10e8cef3d54 100644 --- a/packages/google-cloud-gdchardwaremanagement/scripts/fixup_gdchardwaremanagement_v1alpha_keywords.py +++ b/packages/google-cloud-gdchardwaremanagement/scripts/fixup_gdchardwaremanagement_v1alpha_keywords.py @@ -65,8 +65,9 @@ class gdchardwaremanagementCallTransformer(cst.CSTTransformer): 'list_sites': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_skus': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_zones': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'record_action_on_comment': ('name', 'action_type', ), 'signal_zone_state': ('name', 'state_signal', 'request_id', ), - 'submit_order': ('name', 'request_id', ), + 'submit_order': ('name', 'request_id', 'type_', ), 'update_hardware': ('update_mask', 'hardware', 'request_id', ), 'update_hardware_group': ('update_mask', 'hardware_group', 'request_id', ), 'update_order': ('update_mask', 'order', 'request_id', ), diff --git a/packages/google-cloud-gdchardwaremanagement/tests/unit/gapic/gdchardwaremanagement_v1alpha/test_gdc_hardware_management.py b/packages/google-cloud-gdchardwaremanagement/tests/unit/gapic/gdchardwaremanagement_v1alpha/test_gdc_hardware_management.py index 72d9a360aa19..dc7856c427c9 100644 --- a/packages/google-cloud-gdchardwaremanagement/tests/unit/gapic/gdchardwaremanagement_v1alpha/test_gdc_hardware_management.py +++ b/packages/google-cloud-gdchardwaremanagement/tests/unit/gapic/gdchardwaremanagement_v1alpha/test_gdc_hardware_management.py @@ -4275,6 +4275,7 @@ def test_get_site(request_type, transport: str = "grpc"): description="description_value", google_maps_pin_uri="google_maps_pin_uri_value", notes="notes_value", + customer_site_id="customer_site_id_value", ) response = client.get_site(request) @@ -4291,6 +4292,7 @@ def test_get_site(request_type, transport: str = "grpc"): assert response.description == "description_value" assert response.google_maps_pin_uri == "google_maps_pin_uri_value" assert response.notes == "notes_value" + assert response.customer_site_id == "customer_site_id_value" def test_get_site_empty_call(): @@ -4394,6 +4396,7 @@ async def test_get_site_empty_call_async(): description="description_value", google_maps_pin_uri="google_maps_pin_uri_value", notes="notes_value", + customer_site_id="customer_site_id_value", ) ) response = await client.get_site() @@ -4465,6 +4468,7 @@ async def test_get_site_async( description="description_value", google_maps_pin_uri="google_maps_pin_uri_value", notes="notes_value", + customer_site_id="customer_site_id_value", ) ) response = await client.get_site(request) @@ -4482,6 +4486,7 @@ async def test_get_site_async( assert response.description == "description_value" assert response.google_maps_pin_uri == "google_maps_pin_uri_value" assert response.notes == "notes_value" + assert response.customer_site_id == "customer_site_id_value" @pytest.mark.asyncio @@ -10302,6 +10307,7 @@ def test_get_comment(request_type, transport: str = "grpc"): name="name_value", author="author_value", text="text_value", + author_entity=resources.Entity.GOOGLE, ) response = client.get_comment(request) @@ -10316,6 +10322,7 @@ def test_get_comment(request_type, transport: str = "grpc"): assert response.name == "name_value" assert response.author == "author_value" assert response.text == "text_value" + assert response.author_entity == resources.Entity.GOOGLE def test_get_comment_empty_call(): @@ -10417,6 +10424,7 @@ async def test_get_comment_empty_call_async(): name="name_value", author="author_value", text="text_value", + author_entity=resources.Entity.GOOGLE, ) ) response = await client.get_comment() @@ -10488,6 +10496,7 @@ async def test_get_comment_async( name="name_value", author="author_value", text="text_value", + author_entity=resources.Entity.GOOGLE, ) ) response = await client.get_comment(request) @@ -10503,6 +10512,7 @@ async def test_get_comment_async( assert response.name == "name_value" assert response.author == "author_value" assert response.text == "text_value" + assert response.author_entity == resources.Entity.GOOGLE @pytest.mark.asyncio @@ -11043,11 +11053,11 @@ async def test_create_comment_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - service.ListChangeLogEntriesRequest, + service.RecordActionOnCommentRequest, dict, ], ) -def test_list_change_log_entries(request_type, transport: str = "grpc"): +def test_record_action_on_comment(request_type, transport: str = "grpc"): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -11059,28 +11069,32 @@ def test_list_change_log_entries(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_change_log_entries), "__call__" + type(client.transport.record_action_on_comment), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = service.ListChangeLogEntriesResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + call.return_value = resources.Comment( + name="name_value", + author="author_value", + text="text_value", + author_entity=resources.Entity.GOOGLE, ) - response = client.list_change_log_entries(request) + response = client.record_action_on_comment(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.ListChangeLogEntriesRequest() + request = service.RecordActionOnCommentRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListChangeLogEntriesPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, resources.Comment) + assert response.name == "name_value" + assert response.author == "author_value" + assert response.text == "text_value" + assert response.author_entity == resources.Entity.GOOGLE -def test_list_change_log_entries_empty_call(): +def test_record_action_on_comment_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = GDCHardwareManagementClient( @@ -11090,18 +11104,18 @@ def test_list_change_log_entries_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_change_log_entries), "__call__" + type(client.transport.record_action_on_comment), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_change_log_entries() + client.record_action_on_comment() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.ListChangeLogEntriesRequest() + assert args[0] == service.RecordActionOnCommentRequest() -def test_list_change_log_entries_non_empty_request_with_auto_populated_field(): +def test_record_action_on_comment_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = GDCHardwareManagementClient( @@ -11112,32 +11126,26 @@ def test_list_change_log_entries_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.ListChangeLogEntriesRequest( - parent="parent_value", - page_token="page_token_value", - filter="filter_value", - order_by="order_by_value", + request = service.RecordActionOnCommentRequest( + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_change_log_entries), "__call__" + type(client.transport.record_action_on_comment), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_change_log_entries(request=request) + client.record_action_on_comment(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.ListChangeLogEntriesRequest( - parent="parent_value", - page_token="page_token_value", - filter="filter_value", - order_by="order_by_value", + assert args[0] == service.RecordActionOnCommentRequest( + name="name_value", ) -def test_list_change_log_entries_use_cached_wrapped_rpc(): +def test_record_action_on_comment_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -11152,7 +11160,7 @@ def test_list_change_log_entries_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_change_log_entries + client._transport.record_action_on_comment in client._transport._wrapped_methods ) @@ -11162,15 +11170,15 @@ def test_list_change_log_entries_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_change_log_entries + client._transport.record_action_on_comment ] = mock_rpc request = {} - client.list_change_log_entries(request) + client.record_action_on_comment(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_change_log_entries(request) + client.record_action_on_comment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -11178,7 +11186,7 @@ def test_list_change_log_entries_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_change_log_entries_empty_call_async(): +async def test_record_action_on_comment_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = GDCHardwareManagementAsyncClient( @@ -11188,23 +11196,25 @@ async def test_list_change_log_entries_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_change_log_entries), "__call__" + type(client.transport.record_action_on_comment), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.ListChangeLogEntriesResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + resources.Comment( + name="name_value", + author="author_value", + text="text_value", + author_entity=resources.Entity.GOOGLE, ) ) - response = await client.list_change_log_entries() + response = await client.record_action_on_comment() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.ListChangeLogEntriesRequest() + assert args[0] == service.RecordActionOnCommentRequest() @pytest.mark.asyncio -async def test_list_change_log_entries_async_use_cached_wrapped_rpc( +async def test_record_action_on_comment_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -11221,7 +11231,7 @@ async def test_list_change_log_entries_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.list_change_log_entries + client._client._transport.record_action_on_comment in client._client._transport._wrapped_methods ) @@ -11229,16 +11239,16 @@ async def test_list_change_log_entries_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_change_log_entries + client._client._transport.record_action_on_comment ] = mock_rpc request = {} - await client.list_change_log_entries(request) + await client.record_action_on_comment(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.list_change_log_entries(request) + await client.record_action_on_comment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -11246,8 +11256,8 @@ async def test_list_change_log_entries_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_list_change_log_entries_async( - transport: str = "grpc_asyncio", request_type=service.ListChangeLogEntriesRequest +async def test_record_action_on_comment_async( + transport: str = "grpc_asyncio", request_type=service.RecordActionOnCommentRequest ): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11260,51 +11270,55 @@ async def test_list_change_log_entries_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_change_log_entries), "__call__" + type(client.transport.record_action_on_comment), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.ListChangeLogEntriesResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + resources.Comment( + name="name_value", + author="author_value", + text="text_value", + author_entity=resources.Entity.GOOGLE, ) ) - response = await client.list_change_log_entries(request) + response = await client.record_action_on_comment(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.ListChangeLogEntriesRequest() + request = service.RecordActionOnCommentRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListChangeLogEntriesAsyncPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, resources.Comment) + assert response.name == "name_value" + assert response.author == "author_value" + assert response.text == "text_value" + assert response.author_entity == resources.Entity.GOOGLE @pytest.mark.asyncio -async def test_list_change_log_entries_async_from_dict(): - await test_list_change_log_entries_async(request_type=dict) +async def test_record_action_on_comment_async_from_dict(): + await test_record_action_on_comment_async(request_type=dict) -def test_list_change_log_entries_field_headers(): +def test_record_action_on_comment_field_headers(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.ListChangeLogEntriesRequest() + request = service.RecordActionOnCommentRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_change_log_entries), "__call__" + type(client.transport.record_action_on_comment), "__call__" ) as call: - call.return_value = service.ListChangeLogEntriesResponse() - client.list_change_log_entries(request) + call.return_value = resources.Comment() + client.record_action_on_comment(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -11315,30 +11329,28 @@ def test_list_change_log_entries_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_list_change_log_entries_field_headers_async(): +async def test_record_action_on_comment_field_headers_async(): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.ListChangeLogEntriesRequest() + request = service.RecordActionOnCommentRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_change_log_entries), "__call__" + type(client.transport.record_action_on_comment), "__call__" ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.ListChangeLogEntriesResponse() - ) - await client.list_change_log_entries(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Comment()) + await client.record_action_on_comment(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -11349,37 +11361,41 @@ async def test_list_change_log_entries_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_list_change_log_entries_flattened(): +def test_record_action_on_comment_flattened(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_change_log_entries), "__call__" + type(client.transport.record_action_on_comment), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = service.ListChangeLogEntriesResponse() + call.return_value = resources.Comment() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_change_log_entries( - parent="parent_value", + client.record_action_on_comment( + name="name_value", + action_type=service.RecordActionOnCommentRequest.ActionType.READ, ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].action_type + mock_val = service.RecordActionOnCommentRequest.ActionType.READ assert arg == mock_val -def test_list_change_log_entries_flattened_error(): +def test_record_action_on_comment_flattened_error(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -11387,45 +11403,48 @@ def test_list_change_log_entries_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_change_log_entries( - service.ListChangeLogEntriesRequest(), - parent="parent_value", + client.record_action_on_comment( + service.RecordActionOnCommentRequest(), + name="name_value", + action_type=service.RecordActionOnCommentRequest.ActionType.READ, ) @pytest.mark.asyncio -async def test_list_change_log_entries_flattened_async(): +async def test_record_action_on_comment_flattened_async(): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_change_log_entries), "__call__" + type(client.transport.record_action_on_comment), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = service.ListChangeLogEntriesResponse() + call.return_value = resources.Comment() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.ListChangeLogEntriesResponse() - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Comment()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_change_log_entries( - parent="parent_value", + response = await client.record_action_on_comment( + name="name_value", + action_type=service.RecordActionOnCommentRequest.ActionType.READ, ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].action_type + mock_val = service.RecordActionOnCommentRequest.ActionType.READ assert arg == mock_val @pytest.mark.asyncio -async def test_list_change_log_entries_flattened_error_async(): +async def test_record_action_on_comment_flattened_error_async(): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -11433,276 +11452,75 @@ async def test_list_change_log_entries_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_change_log_entries( - service.ListChangeLogEntriesRequest(), - parent="parent_value", + await client.record_action_on_comment( + service.RecordActionOnCommentRequest(), + name="name_value", + action_type=service.RecordActionOnCommentRequest.ActionType.READ, ) -def test_list_change_log_entries_pager(transport_name: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + service.ListChangeLogEntriesRequest, + dict, + ], +) +def test_list_change_log_entries(request_type, transport: str = "grpc"): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_change_log_entries), "__call__" ) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListChangeLogEntriesResponse( - change_log_entries=[ - resources.ChangeLogEntry(), - resources.ChangeLogEntry(), - resources.ChangeLogEntry(), - ], - next_page_token="abc", - ), - service.ListChangeLogEntriesResponse( - change_log_entries=[], - next_page_token="def", - ), - service.ListChangeLogEntriesResponse( - change_log_entries=[ - resources.ChangeLogEntry(), - ], - next_page_token="ghi", - ), - service.ListChangeLogEntriesResponse( - change_log_entries=[ - resources.ChangeLogEntry(), - resources.ChangeLogEntry(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + # Designate an appropriate return value for the call. + call.return_value = service.ListChangeLogEntriesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) - pager = client.list_change_log_entries(request={}, retry=retry, timeout=timeout) + response = client.list_change_log_entries(request) - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.ListChangeLogEntriesRequest() + assert args[0] == request - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.ChangeLogEntry) for i in results) + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListChangeLogEntriesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_list_change_log_entries_pages(transport_name: str = "grpc"): +def test_list_change_log_entries_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_change_log_entries), "__call__" ) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListChangeLogEntriesResponse( - change_log_entries=[ - resources.ChangeLogEntry(), - resources.ChangeLogEntry(), - resources.ChangeLogEntry(), - ], - next_page_token="abc", - ), - service.ListChangeLogEntriesResponse( - change_log_entries=[], - next_page_token="def", - ), - service.ListChangeLogEntriesResponse( - change_log_entries=[ - resources.ChangeLogEntry(), - ], - next_page_token="ghi", - ), - service.ListChangeLogEntriesResponse( - change_log_entries=[ - resources.ChangeLogEntry(), - resources.ChangeLogEntry(), - ], - ), - RuntimeError, + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) - pages = list(client.list_change_log_entries(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + client.list_change_log_entries() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListChangeLogEntriesRequest() -@pytest.mark.asyncio -async def test_list_change_log_entries_async_pager(): - client = GDCHardwareManagementAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_change_log_entries), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListChangeLogEntriesResponse( - change_log_entries=[ - resources.ChangeLogEntry(), - resources.ChangeLogEntry(), - resources.ChangeLogEntry(), - ], - next_page_token="abc", - ), - service.ListChangeLogEntriesResponse( - change_log_entries=[], - next_page_token="def", - ), - service.ListChangeLogEntriesResponse( - change_log_entries=[ - resources.ChangeLogEntry(), - ], - next_page_token="ghi", - ), - service.ListChangeLogEntriesResponse( - change_log_entries=[ - resources.ChangeLogEntry(), - resources.ChangeLogEntry(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_change_log_entries( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, resources.ChangeLogEntry) for i in responses) - - -@pytest.mark.asyncio -async def test_list_change_log_entries_async_pages(): - client = GDCHardwareManagementAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_change_log_entries), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListChangeLogEntriesResponse( - change_log_entries=[ - resources.ChangeLogEntry(), - resources.ChangeLogEntry(), - resources.ChangeLogEntry(), - ], - next_page_token="abc", - ), - service.ListChangeLogEntriesResponse( - change_log_entries=[], - next_page_token="def", - ), - service.ListChangeLogEntriesResponse( - change_log_entries=[ - resources.ChangeLogEntry(), - ], - next_page_token="ghi", - ), - service.ListChangeLogEntriesResponse( - change_log_entries=[ - resources.ChangeLogEntry(), - resources.ChangeLogEntry(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_change_log_entries(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - service.GetChangeLogEntryRequest, - dict, - ], -) -def test_get_change_log_entry(request_type, transport: str = "grpc"): - client = GDCHardwareManagementClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_change_log_entry), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = resources.ChangeLogEntry( - name="name_value", - log="log_value", - ) - response = client.get_change_log_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.GetChangeLogEntryRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, resources.ChangeLogEntry) - assert response.name == "name_value" - assert response.log == "log_value" - - -def test_get_change_log_entry_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = GDCHardwareManagementClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_change_log_entry), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_change_log_entry() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.GetChangeLogEntryRequest() - - -def test_get_change_log_entry_non_empty_request_with_auto_populated_field(): +def test_list_change_log_entries_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = GDCHardwareManagementClient( @@ -11713,26 +11531,32 @@ def test_get_change_log_entry_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.GetChangeLogEntryRequest( - name="name_value", + request = service.ListChangeLogEntriesRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_change_log_entry), "__call__" + type(client.transport.list_change_log_entries), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_change_log_entry(request=request) + client.list_change_log_entries(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.GetChangeLogEntryRequest( - name="name_value", + assert args[0] == service.ListChangeLogEntriesRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", ) -def test_get_change_log_entry_use_cached_wrapped_rpc(): +def test_list_change_log_entries_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -11747,7 +11571,8 @@ def test_get_change_log_entry_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_change_log_entry in client._transport._wrapped_methods + client._transport.list_change_log_entries + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -11756,15 +11581,15 @@ def test_get_change_log_entry_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_change_log_entry + client._transport.list_change_log_entries ] = mock_rpc request = {} - client.get_change_log_entry(request) + client.list_change_log_entries(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_change_log_entry(request) + client.list_change_log_entries(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -11772,7 +11597,7 @@ def test_get_change_log_entry_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_change_log_entry_empty_call_async(): +async def test_list_change_log_entries_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = GDCHardwareManagementAsyncClient( @@ -11782,23 +11607,23 @@ async def test_get_change_log_entry_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_change_log_entry), "__call__" + type(client.transport.list_change_log_entries), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.ChangeLogEntry( - name="name_value", - log="log_value", + service.ListChangeLogEntriesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) ) - response = await client.get_change_log_entry() + response = await client.list_change_log_entries() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.GetChangeLogEntryRequest() + assert args[0] == service.ListChangeLogEntriesRequest() @pytest.mark.asyncio -async def test_get_change_log_entry_async_use_cached_wrapped_rpc( +async def test_list_change_log_entries_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -11815,7 +11640,7 @@ async def test_get_change_log_entry_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_change_log_entry + client._client._transport.list_change_log_entries in client._client._transport._wrapped_methods ) @@ -11823,16 +11648,16 @@ async def test_get_change_log_entry_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_change_log_entry + client._client._transport.list_change_log_entries ] = mock_rpc request = {} - await client.get_change_log_entry(request) + await client.list_change_log_entries(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.get_change_log_entry(request) + await client.list_change_log_entries(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -11840,8 +11665,8 @@ async def test_get_change_log_entry_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_get_change_log_entry_async( - transport: str = "grpc_asyncio", request_type=service.GetChangeLogEntryRequest +async def test_list_change_log_entries_async( + transport: str = "grpc_asyncio", request_type=service.ListChangeLogEntriesRequest ): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11854,51 +11679,51 @@ async def test_get_change_log_entry_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_change_log_entry), "__call__" + type(client.transport.list_change_log_entries), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.ChangeLogEntry( - name="name_value", - log="log_value", + service.ListChangeLogEntriesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) ) - response = await client.get_change_log_entry(request) + response = await client.list_change_log_entries(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.GetChangeLogEntryRequest() + request = service.ListChangeLogEntriesRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.ChangeLogEntry) - assert response.name == "name_value" - assert response.log == "log_value" + assert isinstance(response, pagers.ListChangeLogEntriesAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio -async def test_get_change_log_entry_async_from_dict(): - await test_get_change_log_entry_async(request_type=dict) +async def test_list_change_log_entries_async_from_dict(): + await test_list_change_log_entries_async(request_type=dict) -def test_get_change_log_entry_field_headers(): +def test_list_change_log_entries_field_headers(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.GetChangeLogEntryRequest() + request = service.ListChangeLogEntriesRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_change_log_entry), "__call__" + type(client.transport.list_change_log_entries), "__call__" ) as call: - call.return_value = resources.ChangeLogEntry() - client.get_change_log_entry(request) + call.return_value = service.ListChangeLogEntriesResponse() + client.list_change_log_entries(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -11909,30 +11734,30 @@ def test_get_change_log_entry_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_change_log_entry_field_headers_async(): +async def test_list_change_log_entries_field_headers_async(): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.GetChangeLogEntryRequest() + request = service.ListChangeLogEntriesRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_change_log_entry), "__call__" + type(client.transport.list_change_log_entries), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.ChangeLogEntry() + service.ListChangeLogEntriesResponse() ) - await client.get_change_log_entry(request) + await client.list_change_log_entries(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -11943,37 +11768,37 @@ async def test_get_change_log_entry_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_get_change_log_entry_flattened(): +def test_list_change_log_entries_flattened(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_change_log_entry), "__call__" + type(client.transport.list_change_log_entries), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.ChangeLogEntry() + call.return_value = service.ListChangeLogEntriesResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_change_log_entry( - name="name_value", + client.list_change_log_entries( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val -def test_get_change_log_entry_flattened_error(): +def test_list_change_log_entries_flattened_error(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -11981,45 +11806,45 @@ def test_get_change_log_entry_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_change_log_entry( - service.GetChangeLogEntryRequest(), - name="name_value", + client.list_change_log_entries( + service.ListChangeLogEntriesRequest(), + parent="parent_value", ) @pytest.mark.asyncio -async def test_get_change_log_entry_flattened_async(): +async def test_list_change_log_entries_flattened_async(): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_change_log_entry), "__call__" + type(client.transport.list_change_log_entries), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.ChangeLogEntry() + call.return_value = service.ListChangeLogEntriesResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.ChangeLogEntry() + service.ListChangeLogEntriesResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_change_log_entry( - name="name_value", + response = await client.list_change_log_entries( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio -async def test_get_change_log_entry_flattened_error_async(): +async def test_list_change_log_entries_flattened_error_async(): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -12027,132 +11852,338 @@ async def test_get_change_log_entry_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_change_log_entry( - service.GetChangeLogEntryRequest(), - name="name_value", + await client.list_change_log_entries( + service.ListChangeLogEntriesRequest(), + parent="parent_value", ) -@pytest.mark.parametrize( - "request_type", - [ - service.ListSkusRequest, - dict, - ], -) -def test_list_skus(request_type, transport: str = "grpc"): +def test_list_change_log_entries_pager(transport_name: str = "grpc"): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport=transport_name, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_skus), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListSkusResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + with mock.patch.object( + type(client.transport.list_change_log_entries), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListChangeLogEntriesResponse( + change_log_entries=[ + resources.ChangeLogEntry(), + resources.ChangeLogEntry(), + resources.ChangeLogEntry(), + ], + next_page_token="abc", + ), + service.ListChangeLogEntriesResponse( + change_log_entries=[], + next_page_token="def", + ), + service.ListChangeLogEntriesResponse( + change_log_entries=[ + resources.ChangeLogEntry(), + ], + next_page_token="ghi", + ), + service.ListChangeLogEntriesResponse( + change_log_entries=[ + resources.ChangeLogEntry(), + resources.ChangeLogEntry(), + ], + ), + RuntimeError, ) - response = client.list_skus(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.ListSkusRequest() - assert args[0] == request + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_change_log_entries(request={}, retry=retry, timeout=timeout) - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSkusPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.ChangeLogEntry) for i in results) -def test_list_skus_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. +def test_list_change_log_entries_pages(transport_name: str = "grpc"): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_skus), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object( + type(client.transport.list_change_log_entries), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListChangeLogEntriesResponse( + change_log_entries=[ + resources.ChangeLogEntry(), + resources.ChangeLogEntry(), + resources.ChangeLogEntry(), + ], + next_page_token="abc", + ), + service.ListChangeLogEntriesResponse( + change_log_entries=[], + next_page_token="def", + ), + service.ListChangeLogEntriesResponse( + change_log_entries=[ + resources.ChangeLogEntry(), + ], + next_page_token="ghi", + ), + service.ListChangeLogEntriesResponse( + change_log_entries=[ + resources.ChangeLogEntry(), + resources.ChangeLogEntry(), + ], + ), + RuntimeError, ) - client.list_skus() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.ListSkusRequest() + pages = list(client.list_change_log_entries(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token -def test_list_skus_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = GDCHardwareManagementClient( +@pytest.mark.asyncio +async def test_list_change_log_entries_async_pager(): + client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.ListSkusRequest( - parent="parent_value", - page_token="page_token_value", - filter="filter_value", - order_by="order_by_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_skus), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object( + type(client.transport.list_change_log_entries), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListChangeLogEntriesResponse( + change_log_entries=[ + resources.ChangeLogEntry(), + resources.ChangeLogEntry(), + resources.ChangeLogEntry(), + ], + next_page_token="abc", + ), + service.ListChangeLogEntriesResponse( + change_log_entries=[], + next_page_token="def", + ), + service.ListChangeLogEntriesResponse( + change_log_entries=[ + resources.ChangeLogEntry(), + ], + next_page_token="ghi", + ), + service.ListChangeLogEntriesResponse( + change_log_entries=[ + resources.ChangeLogEntry(), + resources.ChangeLogEntry(), + ], + ), + RuntimeError, ) - client.list_skus(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.ListSkusRequest( - parent="parent_value", - page_token="page_token_value", - filter="filter_value", - order_by="order_by_value", + async_pager = await client.list_change_log_entries( + request={}, ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + assert len(responses) == 6 + assert all(isinstance(i, resources.ChangeLogEntry) for i in responses) -def test_list_skus_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = GDCHardwareManagementClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + +@pytest.mark.asyncio +async def test_list_change_log_entries_async_pages(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_change_log_entries), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListChangeLogEntriesResponse( + change_log_entries=[ + resources.ChangeLogEntry(), + resources.ChangeLogEntry(), + resources.ChangeLogEntry(), + ], + next_page_token="abc", + ), + service.ListChangeLogEntriesResponse( + change_log_entries=[], + next_page_token="def", + ), + service.ListChangeLogEntriesResponse( + change_log_entries=[ + resources.ChangeLogEntry(), + ], + next_page_token="ghi", + ), + service.ListChangeLogEntriesResponse( + change_log_entries=[ + resources.ChangeLogEntry(), + resources.ChangeLogEntry(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_change_log_entries(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetChangeLogEntryRequest, + dict, + ], +) +def test_get_change_log_entry(request_type, transport: str = "grpc"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_change_log_entry), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.ChangeLogEntry( + name="name_value", + log="log_value", + ) + response = client.get_change_log_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.GetChangeLogEntryRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.ChangeLogEntry) + assert response.name == "name_value" + assert response.log == "log_value" + + +def test_get_change_log_entry_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_change_log_entry), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_change_log_entry() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetChangeLogEntryRequest() + + +def test_get_change_log_entry_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.GetChangeLogEntryRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_change_log_entry), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_change_log_entry(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetChangeLogEntryRequest( + name="name_value", + ) + + +def test_get_change_log_entry_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) # Should wrap all calls on client creation assert wrapper_fn.call_count > 0 wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_skus in client._transport._wrapped_methods + assert ( + client._transport.get_change_log_entry in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_skus] = mock_rpc + client._transport._wrapped_methods[ + client._transport.get_change_log_entry + ] = mock_rpc request = {} - client.list_skus(request) + client.get_change_log_entry(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_skus(request) + client.get_change_log_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -12160,7 +12191,7 @@ def test_list_skus_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_skus_empty_call_async(): +async def test_get_change_log_entry_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = GDCHardwareManagementAsyncClient( @@ -12169,22 +12200,26 @@ async def test_list_skus_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_skus), "__call__") as call: + with mock.patch.object( + type(client.transport.get_change_log_entry), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.ListSkusResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + resources.ChangeLogEntry( + name="name_value", + log="log_value", ) ) - response = await client.list_skus() + response = await client.get_change_log_entry() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.ListSkusRequest() + assert args[0] == service.GetChangeLogEntryRequest() @pytest.mark.asyncio -async def test_list_skus_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_get_change_log_entry_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -12199,7 +12234,7 @@ async def test_list_skus_async_use_cached_wrapped_rpc(transport: str = "grpc_asy # Ensure method has been cached assert ( - client._client._transport.list_skus + client._client._transport.get_change_log_entry in client._client._transport._wrapped_methods ) @@ -12207,16 +12242,16 @@ async def test_list_skus_async_use_cached_wrapped_rpc(transport: str = "grpc_asy mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_skus + client._client._transport.get_change_log_entry ] = mock_rpc request = {} - await client.list_skus(request) + await client.get_change_log_entry(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.list_skus(request) + await client.get_change_log_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -12224,8 +12259,8 @@ async def test_list_skus_async_use_cached_wrapped_rpc(transport: str = "grpc_asy @pytest.mark.asyncio -async def test_list_skus_async( - transport: str = "grpc_asyncio", request_type=service.ListSkusRequest +async def test_get_change_log_entry_async( + transport: str = "grpc_asyncio", request_type=service.GetChangeLogEntryRequest ): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12237,48 +12272,52 @@ async def test_list_skus_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_skus), "__call__") as call: + with mock.patch.object( + type(client.transport.get_change_log_entry), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.ListSkusResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + resources.ChangeLogEntry( + name="name_value", + log="log_value", ) ) - response = await client.list_skus(request) + response = await client.get_change_log_entry(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.ListSkusRequest() + request = service.GetChangeLogEntryRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSkusAsyncPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, resources.ChangeLogEntry) + assert response.name == "name_value" + assert response.log == "log_value" @pytest.mark.asyncio -async def test_list_skus_async_from_dict(): - await test_list_skus_async(request_type=dict) +async def test_get_change_log_entry_async_from_dict(): + await test_get_change_log_entry_async(request_type=dict) -def test_list_skus_field_headers(): +def test_get_change_log_entry_field_headers(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.ListSkusRequest() + request = service.GetChangeLogEntryRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_skus), "__call__") as call: - call.return_value = service.ListSkusResponse() - client.list_skus(request) + with mock.patch.object( + type(client.transport.get_change_log_entry), "__call__" + ) as call: + call.return_value = resources.ChangeLogEntry() + client.get_change_log_entry(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -12289,28 +12328,30 @@ def test_list_skus_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_list_skus_field_headers_async(): +async def test_get_change_log_entry_field_headers_async(): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.ListSkusRequest() + request = service.GetChangeLogEntryRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_skus), "__call__") as call: + with mock.patch.object( + type(client.transport.get_change_log_entry), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.ListSkusResponse() + resources.ChangeLogEntry() ) - await client.list_skus(request) + await client.get_change_log_entry(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -12321,35 +12362,37 @@ async def test_list_skus_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_list_skus_flattened(): +def test_get_change_log_entry_flattened(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_skus), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListSkusResponse() + with mock.patch.object( + type(client.transport.get_change_log_entry), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.ChangeLogEntry() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_skus( - parent="parent_value", + client.get_change_log_entry( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_list_skus_flattened_error(): +def test_get_change_log_entry_flattened_error(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -12357,43 +12400,45 @@ def test_list_skus_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_skus( - service.ListSkusRequest(), - parent="parent_value", + client.get_change_log_entry( + service.GetChangeLogEntryRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_list_skus_flattened_async(): +async def test_get_change_log_entry_flattened_async(): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_skus), "__call__") as call: + with mock.patch.object( + type(client.transport.get_change_log_entry), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = service.ListSkusResponse() + call.return_value = resources.ChangeLogEntry() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.ListSkusResponse() + resources.ChangeLogEntry() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_skus( - parent="parent_value", + response = await client.get_change_log_entry( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_list_skus_flattened_error_async(): +async def test_get_change_log_entry_flattened_error_async(): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -12401,330 +12446,132 @@ async def test_list_skus_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_skus( - service.ListSkusRequest(), - parent="parent_value", + await client.get_change_log_entry( + service.GetChangeLogEntryRequest(), + name="name_value", ) -def test_list_skus_pager(transport_name: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + service.ListSkusRequest, + dict, + ], +) +def test_list_skus(request_type, transport: str = "grpc"): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_skus), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListSkusResponse( - skus=[ - resources.Sku(), - resources.Sku(), - resources.Sku(), - ], - next_page_token="abc", - ), - service.ListSkusResponse( - skus=[], - next_page_token="def", - ), - service.ListSkusResponse( - skus=[ - resources.Sku(), - ], - next_page_token="ghi", - ), - service.ListSkusResponse( - skus=[ - resources.Sku(), - resources.Sku(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + # Designate an appropriate return value for the call. + call.return_value = service.ListSkusResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) - pager = client.list_skus(request={}, retry=retry, timeout=timeout) + response = client.list_skus(request) - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.ListSkusRequest() + assert args[0] == request - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.Sku) for i in results) + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSkusPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_list_skus_pages(transport_name: str = "grpc"): +def test_list_skus_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_skus), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListSkusResponse( - skus=[ - resources.Sku(), - resources.Sku(), - resources.Sku(), - ], - next_page_token="abc", - ), - service.ListSkusResponse( - skus=[], - next_page_token="def", - ), - service.ListSkusResponse( - skus=[ - resources.Sku(), - ], - next_page_token="ghi", - ), - service.ListSkusResponse( - skus=[ - resources.Sku(), - resources.Sku(), - ], - ), - RuntimeError, + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) - pages = list(client.list_skus(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + client.list_skus() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListSkusRequest() -@pytest.mark.asyncio -async def test_list_skus_async_pager(): - client = GDCHardwareManagementAsyncClient( +def test_list_skus_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.ListSkusRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_skus), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListSkusResponse( - skus=[ - resources.Sku(), - resources.Sku(), - resources.Sku(), - ], - next_page_token="abc", - ), - service.ListSkusResponse( - skus=[], - next_page_token="def", - ), - service.ListSkusResponse( - skus=[ - resources.Sku(), - ], - next_page_token="ghi", - ), - service.ListSkusResponse( - skus=[ - resources.Sku(), - resources.Sku(), - ], - ), - RuntimeError, + with mock.patch.object(type(client.transport.list_skus), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) - async_pager = await client.list_skus( - request={}, + client.list_skus(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListSkusRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - assert len(responses) == 6 - assert all(isinstance(i, resources.Sku) for i in responses) +def test_list_skus_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) -@pytest.mark.asyncio -async def test_list_skus_async_pages(): - client = GDCHardwareManagementAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_skus), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListSkusResponse( - skus=[ - resources.Sku(), - resources.Sku(), - resources.Sku(), - ], - next_page_token="abc", - ), - service.ListSkusResponse( - skus=[], - next_page_token="def", - ), - service.ListSkusResponse( - skus=[ - resources.Sku(), - ], - next_page_token="ghi", - ), - service.ListSkusResponse( - skus=[ - resources.Sku(), - resources.Sku(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_skus(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - service.GetSkuRequest, - dict, - ], -) -def test_get_sku(request_type, transport: str = "grpc"): - client = GDCHardwareManagementClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_sku), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = resources.Sku( - name="name_value", - display_name="display_name_value", - description="description_value", - revision_id="revision_id_value", - is_active=True, - type_=resources.Sku.Type.RACK, - vcpu_count=1094, - ) - response = client.get_sku(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.GetSkuRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, resources.Sku) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.revision_id == "revision_id_value" - assert response.is_active is True - assert response.type_ == resources.Sku.Type.RACK - assert response.vcpu_count == 1094 - - -def test_get_sku_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = GDCHardwareManagementClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_sku), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_sku() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.GetSkuRequest() - - -def test_get_sku_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = GDCHardwareManagementClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.GetSkuRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_sku), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_sku(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.GetSkuRequest( - name="name_value", - ) - - -def test_get_sku_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = GDCHardwareManagementClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_sku in client._transport._wrapped_methods + assert client._transport.list_skus in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_sku] = mock_rpc + client._transport._wrapped_methods[client._transport.list_skus] = mock_rpc request = {} - client.get_sku(request) + client.list_skus(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_sku(request) + client.list_skus(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -12732,7 +12579,7 @@ def test_get_sku_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_sku_empty_call_async(): +async def test_list_skus_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = GDCHardwareManagementAsyncClient( @@ -12741,27 +12588,22 @@ async def test_get_sku_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_sku), "__call__") as call: + with mock.patch.object(type(client.transport.list_skus), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.Sku( - name="name_value", - display_name="display_name_value", - description="description_value", - revision_id="revision_id_value", - is_active=True, - type_=resources.Sku.Type.RACK, - vcpu_count=1094, + service.ListSkusResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) ) - response = await client.get_sku() + response = await client.list_skus() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.GetSkuRequest() + assert args[0] == service.ListSkusRequest() @pytest.mark.asyncio -async def test_get_sku_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_list_skus_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -12776,7 +12618,7 @@ async def test_get_sku_async_use_cached_wrapped_rpc(transport: str = "grpc_async # Ensure method has been cached assert ( - client._client._transport.get_sku + client._client._transport.list_skus in client._client._transport._wrapped_methods ) @@ -12784,16 +12626,16 @@ async def test_get_sku_async_use_cached_wrapped_rpc(transport: str = "grpc_async mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_sku + client._client._transport.list_skus ] = mock_rpc request = {} - await client.get_sku(request) + await client.list_skus(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.get_sku(request) + await client.list_skus(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -12801,8 +12643,8 @@ async def test_get_sku_async_use_cached_wrapped_rpc(transport: str = "grpc_async @pytest.mark.asyncio -async def test_get_sku_async( - transport: str = "grpc_asyncio", request_type=service.GetSkuRequest +async def test_list_skus_async( + transport: str = "grpc_asyncio", request_type=service.ListSkusRequest ): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12814,58 +12656,48 @@ async def test_get_sku_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_sku), "__call__") as call: + with mock.patch.object(type(client.transport.list_skus), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.Sku( - name="name_value", - display_name="display_name_value", - description="description_value", - revision_id="revision_id_value", - is_active=True, - type_=resources.Sku.Type.RACK, - vcpu_count=1094, + service.ListSkusResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) ) - response = await client.get_sku(request) + response = await client.list_skus(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.GetSkuRequest() + request = service.ListSkusRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.Sku) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.revision_id == "revision_id_value" - assert response.is_active is True - assert response.type_ == resources.Sku.Type.RACK - assert response.vcpu_count == 1094 + assert isinstance(response, pagers.ListSkusAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio -async def test_get_sku_async_from_dict(): - await test_get_sku_async(request_type=dict) +async def test_list_skus_async_from_dict(): + await test_list_skus_async(request_type=dict) -def test_get_sku_field_headers(): +def test_list_skus_field_headers(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.GetSkuRequest() + request = service.ListSkusRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_sku), "__call__") as call: - call.return_value = resources.Sku() - client.get_sku(request) + with mock.patch.object(type(client.transport.list_skus), "__call__") as call: + call.return_value = service.ListSkusResponse() + client.list_skus(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -12876,26 +12708,28 @@ def test_get_sku_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_sku_field_headers_async(): +async def test_list_skus_field_headers_async(): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.GetSkuRequest() + request = service.ListSkusRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_sku), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Sku()) - await client.get_sku(request) + with mock.patch.object(type(client.transport.list_skus), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListSkusResponse() + ) + await client.list_skus(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -12906,35 +12740,35 @@ async def test_get_sku_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_get_sku_flattened(): +def test_list_skus_flattened(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_sku), "__call__") as call: + with mock.patch.object(type(client.transport.list_skus), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.Sku() + call.return_value = service.ListSkusResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_sku( - name="name_value", + client.list_skus( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val -def test_get_sku_flattened_error(): +def test_list_skus_flattened_error(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -12942,41 +12776,43 @@ def test_get_sku_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_sku( - service.GetSkuRequest(), - name="name_value", + client.list_skus( + service.ListSkusRequest(), + parent="parent_value", ) @pytest.mark.asyncio -async def test_get_sku_flattened_async(): +async def test_list_skus_flattened_async(): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_sku), "__call__") as call: + with mock.patch.object(type(client.transport.list_skus), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.Sku() + call.return_value = service.ListSkusResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Sku()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListSkusResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_sku( - name="name_value", + response = await client.list_skus( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio -async def test_get_sku_flattened_error_async(): +async def test_list_skus_flattened_error_async(): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -12984,164 +12820,367 @@ async def test_get_sku_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_sku( - service.GetSkuRequest(), - name="name_value", + await client.list_skus( + service.ListSkusRequest(), + parent="parent_value", ) -@pytest.mark.parametrize( - "request_type", - [ - service.ListZonesRequest, - dict, - ], -) -def test_list_zones(request_type, transport: str = "grpc"): +def test_list_skus_pager(transport_name: str = "grpc"): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport=transport_name, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_zones), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListZonesResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + with mock.patch.object(type(client.transport.list_skus), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListSkusResponse( + skus=[ + resources.Sku(), + resources.Sku(), + resources.Sku(), + ], + next_page_token="abc", + ), + service.ListSkusResponse( + skus=[], + next_page_token="def", + ), + service.ListSkusResponse( + skus=[ + resources.Sku(), + ], + next_page_token="ghi", + ), + service.ListSkusResponse( + skus=[ + resources.Sku(), + resources.Sku(), + ], + ), + RuntimeError, ) - response = client.list_zones(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.ListZonesRequest() - assert args[0] == request + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_skus(request={}, retry=retry, timeout=timeout) - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListZonesPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Sku) for i in results) -def test_list_zones_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. + +def test_list_skus_pages(transport_name: str = "grpc"): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_zones), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.list_skus), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListSkusResponse( + skus=[ + resources.Sku(), + resources.Sku(), + resources.Sku(), + ], + next_page_token="abc", + ), + service.ListSkusResponse( + skus=[], + next_page_token="def", + ), + service.ListSkusResponse( + skus=[ + resources.Sku(), + ], + next_page_token="ghi", + ), + service.ListSkusResponse( + skus=[ + resources.Sku(), + resources.Sku(), + ], + ), + RuntimeError, ) - client.list_zones() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.ListZonesRequest() + pages = list(client.list_skus(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token -def test_list_zones_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = GDCHardwareManagementClient( +@pytest.mark.asyncio +async def test_list_skus_async_pager(): + client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.ListZonesRequest( - parent="parent_value", - page_token="page_token_value", - filter="filter_value", - order_by="order_by_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_zones), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_zones(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.ListZonesRequest( - parent="parent_value", - page_token="page_token_value", - filter="filter_value", - order_by="order_by_value", - ) - - -def test_list_zones_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = GDCHardwareManagementClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + with mock.patch.object( + type(client.transport.list_skus), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListSkusResponse( + skus=[ + resources.Sku(), + resources.Sku(), + resources.Sku(), + ], + next_page_token="abc", + ), + service.ListSkusResponse( + skus=[], + next_page_token="def", + ), + service.ListSkusResponse( + skus=[ + resources.Sku(), + ], + next_page_token="ghi", + ), + service.ListSkusResponse( + skus=[ + resources.Sku(), + resources.Sku(), + ], + ), + RuntimeError, ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_zones in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + async_pager = await client.list_skus( + request={}, ) - client._transport._wrapped_methods[client._transport.list_zones] = mock_rpc - request = {} - client.list_zones(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_zones(request) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + assert len(responses) == 6 + assert all(isinstance(i, resources.Sku) for i in responses) @pytest.mark.asyncio -async def test_list_zones_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. +async def test_list_skus_async_pages(): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_zones), "__call__") as call: - # Designate an appropriate return value for the call. + with mock.patch.object( + type(client.transport.list_skus), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListSkusResponse( + skus=[ + resources.Sku(), + resources.Sku(), + resources.Sku(), + ], + next_page_token="abc", + ), + service.ListSkusResponse( + skus=[], + next_page_token="def", + ), + service.ListSkusResponse( + skus=[ + resources.Sku(), + ], + next_page_token="ghi", + ), + service.ListSkusResponse( + skus=[ + resources.Sku(), + resources.Sku(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_skus(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetSkuRequest, + dict, + ], +) +def test_get_sku(request_type, transport: str = "grpc"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_sku), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Sku( + name="name_value", + display_name="display_name_value", + description="description_value", + revision_id="revision_id_value", + is_active=True, + type_=resources.Sku.Type.RACK, + vcpu_count=1094, + ) + response = client.get_sku(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.GetSkuRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Sku) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.revision_id == "revision_id_value" + assert response.is_active is True + assert response.type_ == resources.Sku.Type.RACK + assert response.vcpu_count == 1094 + + +def test_get_sku_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_sku), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_sku() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetSkuRequest() + + +def test_get_sku_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.GetSkuRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_sku), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_sku(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetSkuRequest( + name="name_value", + ) + + +def test_get_sku_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_sku in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_sku] = mock_rpc + request = {} + client.get_sku(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_sku(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_sku_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_sku), "__call__") as call: + # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.ListZonesResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + resources.Sku( + name="name_value", + display_name="display_name_value", + description="description_value", + revision_id="revision_id_value", + is_active=True, + type_=resources.Sku.Type.RACK, + vcpu_count=1094, ) ) - response = await client.list_zones() + response = await client.get_sku() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.ListZonesRequest() + assert args[0] == service.GetSkuRequest() @pytest.mark.asyncio -async def test_list_zones_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_get_sku_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -13156,7 +13195,7 @@ async def test_list_zones_async_use_cached_wrapped_rpc(transport: str = "grpc_as # Ensure method has been cached assert ( - client._client._transport.list_zones + client._client._transport.get_sku in client._client._transport._wrapped_methods ) @@ -13164,16 +13203,16 @@ async def test_list_zones_async_use_cached_wrapped_rpc(transport: str = "grpc_as mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_zones + client._client._transport.get_sku ] = mock_rpc request = {} - await client.list_zones(request) + await client.get_sku(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.list_zones(request) + await client.get_sku(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -13181,8 +13220,8 @@ async def test_list_zones_async_use_cached_wrapped_rpc(transport: str = "grpc_as @pytest.mark.asyncio -async def test_list_zones_async( - transport: str = "grpc_asyncio", request_type=service.ListZonesRequest +async def test_get_sku_async( + transport: str = "grpc_asyncio", request_type=service.GetSkuRequest ): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13194,50 +13233,60 @@ async def test_list_zones_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_zones), "__call__") as call: + with mock.patch.object(type(client.transport.get_sku), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.ListZonesResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + resources.Sku( + name="name_value", + display_name="display_name_value", + description="description_value", + revision_id="revision_id_value", + is_active=True, + type_=resources.Sku.Type.RACK, + vcpu_count=1094, ) ) - response = await client.list_zones(request) + response = await client.get_sku(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.ListZonesRequest() + request = service.GetSkuRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListZonesAsyncPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, resources.Sku) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.revision_id == "revision_id_value" + assert response.is_active is True + assert response.type_ == resources.Sku.Type.RACK + assert response.vcpu_count == 1094 @pytest.mark.asyncio -async def test_list_zones_async_from_dict(): - await test_list_zones_async(request_type=dict) +async def test_get_sku_async_from_dict(): + await test_get_sku_async(request_type=dict) -def test_list_zones_field_headers(): +def test_get_sku_field_headers(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.ListZonesRequest() + request = service.GetSkuRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_zones), "__call__") as call: - call.return_value = service.ListZonesResponse() - client.list_zones(request) - - # Establish that the underlying gRPC stub method was called. + with mock.patch.object(type(client.transport.get_sku), "__call__") as call: + call.return_value = resources.Sku() + client.get_sku(request) + + # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request @@ -13246,28 +13295,26 @@ def test_list_zones_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_list_zones_field_headers_async(): +async def test_get_sku_field_headers_async(): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.ListZonesRequest() + request = service.GetSkuRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_zones), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.ListZonesResponse() - ) - await client.list_zones(request) + with mock.patch.object(type(client.transport.get_sku), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Sku()) + await client.get_sku(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -13278,35 +13325,35 @@ async def test_list_zones_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_list_zones_flattened(): +def test_get_sku_flattened(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_zones), "__call__") as call: + with mock.patch.object(type(client.transport.get_sku), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = service.ListZonesResponse() + call.return_value = resources.Sku() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_zones( - parent="parent_value", + client.get_sku( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_list_zones_flattened_error(): +def test_get_sku_flattened_error(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -13314,43 +13361,41 @@ def test_list_zones_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_zones( - service.ListZonesRequest(), - parent="parent_value", + client.get_sku( + service.GetSkuRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_list_zones_flattened_async(): +async def test_get_sku_flattened_async(): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_zones), "__call__") as call: + with mock.patch.object(type(client.transport.get_sku), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = service.ListZonesResponse() + call.return_value = resources.Sku() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.ListZonesResponse() - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Sku()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_zones( - parent="parent_value", + response = await client.get_sku( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_list_zones_flattened_error_async(): +async def test_get_sku_flattened_error_async(): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -13358,298 +13403,104 @@ async def test_list_zones_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_zones( - service.ListZonesRequest(), - parent="parent_value", + await client.get_sku( + service.GetSkuRequest(), + name="name_value", ) -def test_list_zones_pager(transport_name: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + service.ListZonesRequest, + dict, + ], +) +def test_list_zones(request_type, transport: str = "grpc"): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_zones), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListZonesResponse( - zones=[ - resources.Zone(), - resources.Zone(), - resources.Zone(), - ], - next_page_token="abc", - ), - service.ListZonesResponse( - zones=[], - next_page_token="def", - ), - service.ListZonesResponse( - zones=[ - resources.Zone(), - ], - next_page_token="ghi", - ), - service.ListZonesResponse( - zones=[ - resources.Zone(), - resources.Zone(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + # Designate an appropriate return value for the call. + call.return_value = service.ListZonesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) - pager = client.list_zones(request={}, retry=retry, timeout=timeout) + response = client.list_zones(request) - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.ListZonesRequest() + assert args[0] == request - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.Zone) for i in results) + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListZonesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_list_zones_pages(transport_name: str = "grpc"): +def test_list_zones_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_zones), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListZonesResponse( - zones=[ - resources.Zone(), - resources.Zone(), - resources.Zone(), - ], - next_page_token="abc", - ), - service.ListZonesResponse( - zones=[], - next_page_token="def", - ), - service.ListZonesResponse( - zones=[ - resources.Zone(), - ], - next_page_token="ghi", - ), - service.ListZonesResponse( - zones=[ - resources.Zone(), - resources.Zone(), - ], - ), - RuntimeError, + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) - pages = list(client.list_zones(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + client.list_zones() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListZonesRequest() -@pytest.mark.asyncio -async def test_list_zones_async_pager(): - client = GDCHardwareManagementAsyncClient( +def test_list_zones_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.ListZonesRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_zones), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListZonesResponse( - zones=[ - resources.Zone(), - resources.Zone(), - resources.Zone(), - ], - next_page_token="abc", - ), - service.ListZonesResponse( - zones=[], - next_page_token="def", - ), - service.ListZonesResponse( - zones=[ - resources.Zone(), - ], - next_page_token="ghi", - ), - service.ListZonesResponse( - zones=[ - resources.Zone(), - resources.Zone(), - ], - ), - RuntimeError, + with mock.patch.object(type(client.transport.list_zones), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) - async_pager = await client.list_zones( - request={}, + client.list_zones(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListZonesRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - assert len(responses) == 6 - assert all(isinstance(i, resources.Zone) for i in responses) - -@pytest.mark.asyncio -async def test_list_zones_async_pages(): - client = GDCHardwareManagementAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_zones), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListZonesResponse( - zones=[ - resources.Zone(), - resources.Zone(), - resources.Zone(), - ], - next_page_token="abc", - ), - service.ListZonesResponse( - zones=[], - next_page_token="def", - ), - service.ListZonesResponse( - zones=[ - resources.Zone(), - ], - next_page_token="ghi", - ), - service.ListZonesResponse( - zones=[ - resources.Zone(), - resources.Zone(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_zones(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - service.GetZoneRequest, - dict, - ], -) -def test_get_zone(request_type, transport: str = "grpc"): - client = GDCHardwareManagementClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_zone), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = resources.Zone( - name="name_value", - display_name="display_name_value", - state=resources.Zone.State.ADDITIONAL_INFO_NEEDED, - ciq_uri="ciq_uri_value", - globally_unique_id="globally_unique_id_value", - ) - response = client.get_zone(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.GetZoneRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, resources.Zone) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.state == resources.Zone.State.ADDITIONAL_INFO_NEEDED - assert response.ciq_uri == "ciq_uri_value" - assert response.globally_unique_id == "globally_unique_id_value" - - -def test_get_zone_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = GDCHardwareManagementClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_zone), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_zone() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.GetZoneRequest() - - -def test_get_zone_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = GDCHardwareManagementClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.GetZoneRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_zone), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_zone(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.GetZoneRequest( - name="name_value", - ) - - -def test_get_zone_use_cached_wrapped_rpc(): +def test_list_zones_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -13663,21 +13514,21 @@ def test_get_zone_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_zone in client._transport._wrapped_methods + assert client._transport.list_zones in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_zone] = mock_rpc + client._transport._wrapped_methods[client._transport.list_zones] = mock_rpc request = {} - client.get_zone(request) + client.list_zones(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_zone(request) + client.list_zones(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -13685,7 +13536,7 @@ def test_get_zone_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_zone_empty_call_async(): +async def test_list_zones_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = GDCHardwareManagementAsyncClient( @@ -13694,25 +13545,22 @@ async def test_get_zone_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_zone), "__call__") as call: + with mock.patch.object(type(client.transport.list_zones), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.Zone( - name="name_value", - display_name="display_name_value", - state=resources.Zone.State.ADDITIONAL_INFO_NEEDED, - ciq_uri="ciq_uri_value", - globally_unique_id="globally_unique_id_value", + service.ListZonesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) ) - response = await client.get_zone() + response = await client.list_zones() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.GetZoneRequest() + assert args[0] == service.ListZonesRequest() @pytest.mark.asyncio -async def test_get_zone_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_list_zones_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -13727,7 +13575,7 @@ async def test_get_zone_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn # Ensure method has been cached assert ( - client._client._transport.get_zone + client._client._transport.list_zones in client._client._transport._wrapped_methods ) @@ -13735,16 +13583,16 @@ async def test_get_zone_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_zone + client._client._transport.list_zones ] = mock_rpc request = {} - await client.get_zone(request) + await client.list_zones(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.get_zone(request) + await client.list_zones(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -13752,8 +13600,8 @@ async def test_get_zone_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn @pytest.mark.asyncio -async def test_get_zone_async( - transport: str = "grpc_asyncio", request_type=service.GetZoneRequest +async def test_list_zones_async( + transport: str = "grpc_asyncio", request_type=service.ListZonesRequest ): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13765,54 +13613,48 @@ async def test_get_zone_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_zone), "__call__") as call: + with mock.patch.object(type(client.transport.list_zones), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.Zone( - name="name_value", - display_name="display_name_value", - state=resources.Zone.State.ADDITIONAL_INFO_NEEDED, - ciq_uri="ciq_uri_value", - globally_unique_id="globally_unique_id_value", + service.ListZonesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) ) - response = await client.get_zone(request) + response = await client.list_zones(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.GetZoneRequest() + request = service.ListZonesRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.Zone) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.state == resources.Zone.State.ADDITIONAL_INFO_NEEDED - assert response.ciq_uri == "ciq_uri_value" - assert response.globally_unique_id == "globally_unique_id_value" + assert isinstance(response, pagers.ListZonesAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio -async def test_get_zone_async_from_dict(): - await test_get_zone_async(request_type=dict) +async def test_list_zones_async_from_dict(): + await test_list_zones_async(request_type=dict) -def test_get_zone_field_headers(): +def test_list_zones_field_headers(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.GetZoneRequest() + request = service.ListZonesRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_zone), "__call__") as call: - call.return_value = resources.Zone() - client.get_zone(request) + with mock.patch.object(type(client.transport.list_zones), "__call__") as call: + call.return_value = service.ListZonesResponse() + client.list_zones(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -13823,26 +13665,28 @@ def test_get_zone_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_zone_field_headers_async(): +async def test_list_zones_field_headers_async(): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.GetZoneRequest() + request = service.ListZonesRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_zone), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Zone()) - await client.get_zone(request) + with mock.patch.object(type(client.transport.list_zones), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListZonesResponse() + ) + await client.list_zones(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -13853,35 +13697,35 @@ async def test_get_zone_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_get_zone_flattened(): +def test_list_zones_flattened(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_zone), "__call__") as call: + with mock.patch.object(type(client.transport.list_zones), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.Zone() + call.return_value = service.ListZonesResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_zone( - name="name_value", + client.list_zones( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val -def test_get_zone_flattened_error(): +def test_list_zones_flattened_error(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -13889,41 +13733,43 @@ def test_get_zone_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_zone( - service.GetZoneRequest(), - name="name_value", + client.list_zones( + service.ListZonesRequest(), + parent="parent_value", ) @pytest.mark.asyncio -async def test_get_zone_flattened_async(): +async def test_list_zones_flattened_async(): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_zone), "__call__") as call: + with mock.patch.object(type(client.transport.list_zones), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.Zone() + call.return_value = service.ListZonesResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Zone()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListZonesResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_zone( - name="name_value", + response = await client.list_zones( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio -async def test_get_zone_flattened_error_async(): +async def test_list_zones_flattened_error_async(): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -13931,20 +13777,214 @@ async def test_get_zone_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_zone( - service.GetZoneRequest(), - name="name_value", + await client.list_zones( + service.ListZonesRequest(), + parent="parent_value", + ) + + +def test_list_zones_pager(transport_name: str = "grpc"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_zones), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListZonesResponse( + zones=[ + resources.Zone(), + resources.Zone(), + resources.Zone(), + ], + next_page_token="abc", + ), + service.ListZonesResponse( + zones=[], + next_page_token="def", + ), + service.ListZonesResponse( + zones=[ + resources.Zone(), + ], + next_page_token="ghi", + ), + service.ListZonesResponse( + zones=[ + resources.Zone(), + resources.Zone(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_zones(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Zone) for i in results) + + +def test_list_zones_pages(transport_name: str = "grpc"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_zones), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListZonesResponse( + zones=[ + resources.Zone(), + resources.Zone(), + resources.Zone(), + ], + next_page_token="abc", + ), + service.ListZonesResponse( + zones=[], + next_page_token="def", + ), + service.ListZonesResponse( + zones=[ + resources.Zone(), + ], + next_page_token="ghi", + ), + service.ListZonesResponse( + zones=[ + resources.Zone(), + resources.Zone(), + ], + ), + RuntimeError, + ) + pages = list(client.list_zones(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_zones_async_pager(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_zones), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListZonesResponse( + zones=[ + resources.Zone(), + resources.Zone(), + resources.Zone(), + ], + next_page_token="abc", + ), + service.ListZonesResponse( + zones=[], + next_page_token="def", + ), + service.ListZonesResponse( + zones=[ + resources.Zone(), + ], + next_page_token="ghi", + ), + service.ListZonesResponse( + zones=[ + resources.Zone(), + resources.Zone(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_zones( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.Zone) for i in responses) + + +@pytest.mark.asyncio +async def test_list_zones_async_pages(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_zones), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListZonesResponse( + zones=[ + resources.Zone(), + resources.Zone(), + resources.Zone(), + ], + next_page_token="abc", + ), + service.ListZonesResponse( + zones=[], + next_page_token="def", + ), + service.ListZonesResponse( + zones=[ + resources.Zone(), + ], + next_page_token="ghi", + ), + service.ListZonesResponse( + zones=[ + resources.Zone(), + resources.Zone(), + ], + ), + RuntimeError, ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_zones(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.parametrize( "request_type", [ - service.CreateZoneRequest, + service.GetZoneRequest, dict, ], ) -def test_create_zone(request_type, transport: str = "grpc"): +def test_get_zone(request_type, transport: str = "grpc"): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -13955,22 +13995,33 @@ def test_create_zone(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_zone), "__call__") as call: + with mock.patch.object(type(client.transport.get_zone), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.create_zone(request) + call.return_value = resources.Zone( + name="name_value", + display_name="display_name_value", + state=resources.Zone.State.ADDITIONAL_INFO_NEEDED, + ciq_uri="ciq_uri_value", + globally_unique_id="globally_unique_id_value", + ) + response = client.get_zone(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.CreateZoneRequest() + request = service.GetZoneRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, resources.Zone) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.state == resources.Zone.State.ADDITIONAL_INFO_NEEDED + assert response.ciq_uri == "ciq_uri_value" + assert response.globally_unique_id == "globally_unique_id_value" -def test_create_zone_empty_call(): +def test_get_zone_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = GDCHardwareManagementClient( @@ -13979,17 +14030,17 @@ def test_create_zone_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_zone), "__call__") as call: + with mock.patch.object(type(client.transport.get_zone), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_zone() + client.get_zone() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.CreateZoneRequest() + assert args[0] == service.GetZoneRequest() -def test_create_zone_non_empty_request_with_auto_populated_field(): +def test_get_zone_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = GDCHardwareManagementClient( @@ -14000,26 +14051,24 @@ def test_create_zone_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.CreateZoneRequest( - parent="parent_value", - zone_id="zone_id_value", + request = service.GetZoneRequest( + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_zone), "__call__") as call: + with mock.patch.object(type(client.transport.get_zone), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_zone(request=request) + client.get_zone(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.CreateZoneRequest( - parent="parent_value", - zone_id="zone_id_value", + assert args[0] == service.GetZoneRequest( + name="name_value", ) -def test_create_zone_use_cached_wrapped_rpc(): +def test_get_zone_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -14033,26 +14082,21 @@ def test_create_zone_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_zone in client._transport._wrapped_methods + assert client._transport.get_zone in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_zone] = mock_rpc + client._transport._wrapped_methods[client._transport.get_zone] = mock_rpc request = {} - client.create_zone(request) + client.get_zone(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_zone(request) + client.get_zone(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -14060,7 +14104,7 @@ def test_create_zone_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_create_zone_empty_call_async(): +async def test_get_zone_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = GDCHardwareManagementAsyncClient( @@ -14069,21 +14113,25 @@ async def test_create_zone_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_zone), "__call__") as call: + with mock.patch.object(type(client.transport.get_zone), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + resources.Zone( + name="name_value", + display_name="display_name_value", + state=resources.Zone.State.ADDITIONAL_INFO_NEEDED, + ciq_uri="ciq_uri_value", + globally_unique_id="globally_unique_id_value", + ) ) - response = await client.create_zone() + response = await client.get_zone() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.CreateZoneRequest() + assert args[0] == service.GetZoneRequest() @pytest.mark.asyncio -async def test_create_zone_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): +async def test_get_zone_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -14098,7 +14146,7 @@ async def test_create_zone_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.create_zone + client._client._transport.get_zone in client._client._transport._wrapped_methods ) @@ -14106,21 +14154,16 @@ async def test_create_zone_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.create_zone + client._client._transport.get_zone ] = mock_rpc request = {} - await client.create_zone(request) + await client.get_zone(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_zone(request) + await client.get_zone(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -14128,8 +14171,8 @@ async def test_create_zone_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_create_zone_async( - transport: str = "grpc_asyncio", request_type=service.CreateZoneRequest +async def test_get_zone_async( + transport: str = "grpc_asyncio", request_type=service.GetZoneRequest ): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -14141,43 +14184,54 @@ async def test_create_zone_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_zone), "__call__") as call: + with mock.patch.object(type(client.transport.get_zone), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + resources.Zone( + name="name_value", + display_name="display_name_value", + state=resources.Zone.State.ADDITIONAL_INFO_NEEDED, + ciq_uri="ciq_uri_value", + globally_unique_id="globally_unique_id_value", + ) ) - response = await client.create_zone(request) + response = await client.get_zone(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.CreateZoneRequest() + request = service.GetZoneRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, resources.Zone) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.state == resources.Zone.State.ADDITIONAL_INFO_NEEDED + assert response.ciq_uri == "ciq_uri_value" + assert response.globally_unique_id == "globally_unique_id_value" @pytest.mark.asyncio -async def test_create_zone_async_from_dict(): - await test_create_zone_async(request_type=dict) +async def test_get_zone_async_from_dict(): + await test_get_zone_async(request_type=dict) -def test_create_zone_field_headers(): +def test_get_zone_field_headers(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.CreateZoneRequest() + request = service.GetZoneRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_zone), "__call__") as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.create_zone(request) + with mock.patch.object(type(client.transport.get_zone), "__call__") as call: + call.return_value = resources.Zone() + client.get_zone(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -14188,28 +14242,26 @@ def test_create_zone_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_create_zone_field_headers_async(): +async def test_get_zone_field_headers_async(): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.CreateZoneRequest() + request = service.GetZoneRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_zone), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") - ) - await client.create_zone(request) + with mock.patch.object(type(client.transport.get_zone), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Zone()) + await client.get_zone(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -14220,43 +14272,35 @@ async def test_create_zone_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_create_zone_flattened(): +def test_get_zone_flattened(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_zone), "__call__") as call: + with mock.patch.object(type(client.transport.get_zone), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = resources.Zone() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.create_zone( - parent="parent_value", - zone=resources.Zone(name="name_value"), - zone_id="zone_id_value", + client.get_zone( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].zone - mock_val = resources.Zone(name="name_value") - assert arg == mock_val - arg = args[0].zone_id - mock_val = "zone_id_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_create_zone_flattened_error(): +def test_get_zone_flattened_error(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -14264,53 +14308,41 @@ def test_create_zone_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_zone( - service.CreateZoneRequest(), - parent="parent_value", - zone=resources.Zone(name="name_value"), - zone_id="zone_id_value", + client.get_zone( + service.GetZoneRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_create_zone_flattened_async(): +async def test_get_zone_flattened_async(): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_zone), "__call__") as call: + with mock.patch.object(type(client.transport.get_zone), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = resources.Zone() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Zone()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.create_zone( - parent="parent_value", - zone=resources.Zone(name="name_value"), - zone_id="zone_id_value", + response = await client.get_zone( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].zone - mock_val = resources.Zone(name="name_value") - assert arg == mock_val - arg = args[0].zone_id - mock_val = "zone_id_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_create_zone_flattened_error_async(): +async def test_get_zone_flattened_error_async(): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -14318,22 +14350,20 @@ async def test_create_zone_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_zone( - service.CreateZoneRequest(), - parent="parent_value", - zone=resources.Zone(name="name_value"), - zone_id="zone_id_value", + await client.get_zone( + service.GetZoneRequest(), + name="name_value", ) @pytest.mark.parametrize( "request_type", [ - service.UpdateZoneRequest, + service.CreateZoneRequest, dict, ], ) -def test_update_zone(request_type, transport: str = "grpc"): +def test_create_zone(request_type, transport: str = "grpc"): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -14344,22 +14374,22 @@ def test_update_zone(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_zone), "__call__") as call: + with mock.patch.object(type(client.transport.create_zone), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.update_zone(request) + response = client.create_zone(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.UpdateZoneRequest() + request = service.CreateZoneRequest() assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) -def test_update_zone_empty_call(): +def test_create_zone_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = GDCHardwareManagementClient( @@ -14368,17 +14398,17 @@ def test_update_zone_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_zone), "__call__") as call: + with mock.patch.object(type(client.transport.create_zone), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_zone() + client.create_zone() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.UpdateZoneRequest() + assert args[0] == service.CreateZoneRequest() -def test_update_zone_non_empty_request_with_auto_populated_field(): +def test_create_zone_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = GDCHardwareManagementClient( @@ -14389,20 +14419,26 @@ def test_update_zone_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.UpdateZoneRequest() + request = service.CreateZoneRequest( + parent="parent_value", + zone_id="zone_id_value", + ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_zone), "__call__") as call: + with mock.patch.object(type(client.transport.create_zone), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_zone(request=request) + client.create_zone(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.UpdateZoneRequest() + assert args[0] == service.CreateZoneRequest( + parent="parent_value", + zone_id="zone_id_value", + ) -def test_update_zone_use_cached_wrapped_rpc(): +def test_create_zone_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -14416,16 +14452,16 @@ def test_update_zone_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_zone in client._transport._wrapped_methods + assert client._transport.create_zone in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.update_zone] = mock_rpc + client._transport._wrapped_methods[client._transport.create_zone] = mock_rpc request = {} - client.update_zone(request) + client.create_zone(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -14435,7 +14471,7 @@ def test_update_zone_use_cached_wrapped_rpc(): # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.update_zone(request) + client.create_zone(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -14443,7 +14479,7 @@ def test_update_zone_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_update_zone_empty_call_async(): +async def test_create_zone_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = GDCHardwareManagementAsyncClient( @@ -14452,19 +14488,19 @@ async def test_update_zone_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_zone), "__call__") as call: + with mock.patch.object(type(client.transport.create_zone), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.update_zone() + response = await client.create_zone() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.UpdateZoneRequest() + assert args[0] == service.CreateZoneRequest() @pytest.mark.asyncio -async def test_update_zone_async_use_cached_wrapped_rpc( +async def test_create_zone_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -14481,7 +14517,7 @@ async def test_update_zone_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.update_zone + client._client._transport.create_zone in client._client._transport._wrapped_methods ) @@ -14489,11 +14525,11 @@ async def test_update_zone_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.update_zone + client._client._transport.create_zone ] = mock_rpc request = {} - await client.update_zone(request) + await client.create_zone(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -14503,7 +14539,7 @@ async def test_update_zone_async_use_cached_wrapped_rpc( # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - await client.update_zone(request) + await client.create_zone(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -14511,8 +14547,8 @@ async def test_update_zone_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_update_zone_async( - transport: str = "grpc_asyncio", request_type=service.UpdateZoneRequest +async def test_create_zone_async( + transport: str = "grpc_asyncio", request_type=service.CreateZoneRequest ): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -14524,17 +14560,17 @@ async def test_update_zone_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_zone), "__call__") as call: + with mock.patch.object(type(client.transport.create_zone), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.update_zone(request) + response = await client.create_zone(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.UpdateZoneRequest() + request = service.CreateZoneRequest() assert args[0] == request # Establish that the response is the type that we expect. @@ -14542,25 +14578,25 @@ async def test_update_zone_async( @pytest.mark.asyncio -async def test_update_zone_async_from_dict(): - await test_update_zone_async(request_type=dict) +async def test_create_zone_async_from_dict(): + await test_create_zone_async(request_type=dict) -def test_update_zone_field_headers(): +def test_create_zone_field_headers(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.UpdateZoneRequest() + request = service.CreateZoneRequest() - request.zone.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_zone), "__call__") as call: + with mock.patch.object(type(client.transport.create_zone), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.update_zone(request) + client.create_zone(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -14571,28 +14607,28 @@ def test_update_zone_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "zone.name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_update_zone_field_headers_async(): +async def test_create_zone_field_headers_async(): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.UpdateZoneRequest() + request = service.CreateZoneRequest() - request.zone.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_zone), "__call__") as call: + with mock.patch.object(type(client.transport.create_zone), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.update_zone(request) + await client.create_zone(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -14603,39 +14639,43 @@ async def test_update_zone_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "zone.name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_update_zone_flattened(): +def test_create_zone_flattened(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_zone), "__call__") as call: + with mock.patch.object(type(client.transport.create_zone), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.update_zone( + client.create_zone( + parent="parent_value", zone=resources.Zone(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + zone_id="zone_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val arg = args[0].zone mock_val = resources.Zone(name="name_value") assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].zone_id + mock_val = "zone_id_value" assert arg == mock_val -def test_update_zone_flattened_error(): +def test_create_zone_flattened_error(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -14643,21 +14683,22 @@ def test_update_zone_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_zone( - service.UpdateZoneRequest(), + client.create_zone( + service.CreateZoneRequest(), + parent="parent_value", zone=resources.Zone(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + zone_id="zone_id_value", ) @pytest.mark.asyncio -async def test_update_zone_flattened_async(): +async def test_create_zone_flattened_async(): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_zone), "__call__") as call: + with mock.patch.object(type(client.transport.create_zone), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -14666,25 +14707,29 @@ async def test_update_zone_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.update_zone( + response = await client.create_zone( + parent="parent_value", zone=resources.Zone(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + zone_id="zone_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val arg = args[0].zone mock_val = resources.Zone(name="name_value") assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].zone_id + mock_val = "zone_id_value" assert arg == mock_val @pytest.mark.asyncio -async def test_update_zone_flattened_error_async(): +async def test_create_zone_flattened_error_async(): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -14692,21 +14737,22 @@ async def test_update_zone_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_zone( - service.UpdateZoneRequest(), + await client.create_zone( + service.CreateZoneRequest(), + parent="parent_value", zone=resources.Zone(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + zone_id="zone_id_value", ) @pytest.mark.parametrize( "request_type", [ - service.DeleteZoneRequest, + service.UpdateZoneRequest, dict, ], ) -def test_delete_zone(request_type, transport: str = "grpc"): +def test_update_zone(request_type, transport: str = "grpc"): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -14717,22 +14763,22 @@ def test_delete_zone(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_zone), "__call__") as call: + with mock.patch.object(type(client.transport.update_zone), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.delete_zone(request) + response = client.update_zone(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.DeleteZoneRequest() + request = service.UpdateZoneRequest() assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) -def test_delete_zone_empty_call(): +def test_update_zone_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = GDCHardwareManagementClient( @@ -14741,17 +14787,17 @@ def test_delete_zone_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_zone), "__call__") as call: + with mock.patch.object(type(client.transport.update_zone), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_zone() + client.update_zone() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.DeleteZoneRequest() + assert args[0] == service.UpdateZoneRequest() -def test_delete_zone_non_empty_request_with_auto_populated_field(): +def test_update_zone_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = GDCHardwareManagementClient( @@ -14762,24 +14808,20 @@ def test_delete_zone_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.DeleteZoneRequest( - name="name_value", - ) + request = service.UpdateZoneRequest() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_zone), "__call__") as call: + with mock.patch.object(type(client.transport.update_zone), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_zone(request=request) + client.update_zone(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.DeleteZoneRequest( - name="name_value", - ) + assert args[0] == service.UpdateZoneRequest() -def test_delete_zone_use_cached_wrapped_rpc(): +def test_update_zone_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -14793,16 +14835,16 @@ def test_delete_zone_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_zone in client._transport._wrapped_methods + assert client._transport.update_zone in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.delete_zone] = mock_rpc + client._transport._wrapped_methods[client._transport.update_zone] = mock_rpc request = {} - client.delete_zone(request) + client.update_zone(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -14812,7 +14854,7 @@ def test_delete_zone_use_cached_wrapped_rpc(): # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.delete_zone(request) + client.update_zone(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -14820,7 +14862,7 @@ def test_delete_zone_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_delete_zone_empty_call_async(): +async def test_update_zone_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = GDCHardwareManagementAsyncClient( @@ -14829,19 +14871,19 @@ async def test_delete_zone_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_zone), "__call__") as call: + with mock.patch.object(type(client.transport.update_zone), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.delete_zone() + response = await client.update_zone() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.DeleteZoneRequest() + assert args[0] == service.UpdateZoneRequest() @pytest.mark.asyncio -async def test_delete_zone_async_use_cached_wrapped_rpc( +async def test_update_zone_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -14858,7 +14900,7 @@ async def test_delete_zone_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.delete_zone + client._client._transport.update_zone in client._client._transport._wrapped_methods ) @@ -14866,11 +14908,11 @@ async def test_delete_zone_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.delete_zone + client._client._transport.update_zone ] = mock_rpc request = {} - await client.delete_zone(request) + await client.update_zone(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -14880,7 +14922,7 @@ async def test_delete_zone_async_use_cached_wrapped_rpc( # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - await client.delete_zone(request) + await client.update_zone(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -14888,8 +14930,8 @@ async def test_delete_zone_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_delete_zone_async( - transport: str = "grpc_asyncio", request_type=service.DeleteZoneRequest +async def test_update_zone_async( + transport: str = "grpc_asyncio", request_type=service.UpdateZoneRequest ): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -14901,17 +14943,17 @@ async def test_delete_zone_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_zone), "__call__") as call: + with mock.patch.object(type(client.transport.update_zone), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.delete_zone(request) + response = await client.update_zone(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.DeleteZoneRequest() + request = service.UpdateZoneRequest() assert args[0] == request # Establish that the response is the type that we expect. @@ -14919,25 +14961,25 @@ async def test_delete_zone_async( @pytest.mark.asyncio -async def test_delete_zone_async_from_dict(): - await test_delete_zone_async(request_type=dict) +async def test_update_zone_async_from_dict(): + await test_update_zone_async(request_type=dict) -def test_delete_zone_field_headers(): +def test_update_zone_field_headers(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.DeleteZoneRequest() + request = service.UpdateZoneRequest() - request.name = "name_value" + request.zone.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_zone), "__call__") as call: + with mock.patch.object(type(client.transport.update_zone), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.delete_zone(request) + client.update_zone(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -14948,28 +14990,28 @@ def test_delete_zone_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "zone.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_delete_zone_field_headers_async(): +async def test_update_zone_field_headers_async(): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.DeleteZoneRequest() + request = service.UpdateZoneRequest() - request.name = "name_value" + request.zone.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_zone), "__call__") as call: + with mock.patch.object(type(client.transport.update_zone), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.delete_zone(request) + await client.update_zone(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -14980,35 +15022,39 @@ async def test_delete_zone_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "zone.name=name_value", ) in kw["metadata"] -def test_delete_zone_flattened(): +def test_update_zone_flattened(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_zone), "__call__") as call: + with mock.patch.object(type(client.transport.update_zone), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_zone( - name="name_value", + client.update_zone( + zone=resources.Zone(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].zone + mock_val = resources.Zone(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val -def test_delete_zone_flattened_error(): +def test_update_zone_flattened_error(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -15016,20 +15062,21 @@ def test_delete_zone_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_zone( - service.DeleteZoneRequest(), - name="name_value", + client.update_zone( + service.UpdateZoneRequest(), + zone=resources.Zone(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio -async def test_delete_zone_flattened_async(): +async def test_update_zone_flattened_async(): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_zone), "__call__") as call: + with mock.patch.object(type(client.transport.update_zone), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -15038,21 +15085,25 @@ async def test_delete_zone_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_zone( - name="name_value", + response = await client.update_zone( + zone=resources.Zone(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].zone + mock_val = resources.Zone(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @pytest.mark.asyncio -async def test_delete_zone_flattened_error_async(): +async def test_update_zone_flattened_error_async(): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -15060,20 +15111,21 @@ async def test_delete_zone_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_zone( - service.DeleteZoneRequest(), - name="name_value", + await client.update_zone( + service.UpdateZoneRequest(), + zone=resources.Zone(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.parametrize( "request_type", [ - service.SignalZoneStateRequest, + service.DeleteZoneRequest, dict, ], ) -def test_signal_zone_state(request_type, transport: str = "grpc"): +def test_delete_zone(request_type, transport: str = "grpc"): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -15084,24 +15136,22 @@ def test_signal_zone_state(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.signal_zone_state), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_zone), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.signal_zone_state(request) + response = client.delete_zone(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.SignalZoneStateRequest() + request = service.DeleteZoneRequest() assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) -def test_signal_zone_state_empty_call(): +def test_delete_zone_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = GDCHardwareManagementClient( @@ -15110,19 +15160,17 @@ def test_signal_zone_state_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.signal_zone_state), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_zone), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.signal_zone_state() + client.delete_zone() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.SignalZoneStateRequest() + assert args[0] == service.DeleteZoneRequest() -def test_signal_zone_state_non_empty_request_with_auto_populated_field(): +def test_delete_zone_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = GDCHardwareManagementClient( @@ -15133,26 +15181,24 @@ def test_signal_zone_state_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.SignalZoneStateRequest( + request = service.DeleteZoneRequest( name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.signal_zone_state), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_zone), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.signal_zone_state(request=request) + client.delete_zone(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.SignalZoneStateRequest( + assert args[0] == service.DeleteZoneRequest( name="name_value", ) -def test_signal_zone_state_use_cached_wrapped_rpc(): +def test_delete_zone_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -15166,18 +15212,16 @@ def test_signal_zone_state_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.signal_zone_state in client._transport._wrapped_methods + assert client._transport.delete_zone in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.signal_zone_state - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_zone] = mock_rpc request = {} - client.signal_zone_state(request) + client.delete_zone(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -15187,7 +15231,7 @@ def test_signal_zone_state_use_cached_wrapped_rpc(): # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.signal_zone_state(request) + client.delete_zone(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -15195,7 +15239,7 @@ def test_signal_zone_state_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_signal_zone_state_empty_call_async(): +async def test_delete_zone_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = GDCHardwareManagementAsyncClient( @@ -15204,21 +15248,19 @@ async def test_signal_zone_state_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.signal_zone_state), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_zone), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.signal_zone_state() + response = await client.delete_zone() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.SignalZoneStateRequest() + assert args[0] == service.DeleteZoneRequest() @pytest.mark.asyncio -async def test_signal_zone_state_async_use_cached_wrapped_rpc( +async def test_delete_zone_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -15235,7 +15277,7 @@ async def test_signal_zone_state_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.signal_zone_state + client._client._transport.delete_zone in client._client._transport._wrapped_methods ) @@ -15243,11 +15285,11 @@ async def test_signal_zone_state_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.signal_zone_state + client._client._transport.delete_zone ] = mock_rpc request = {} - await client.signal_zone_state(request) + await client.delete_zone(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -15257,7 +15299,7 @@ async def test_signal_zone_state_async_use_cached_wrapped_rpc( # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - await client.signal_zone_state(request) + await client.delete_zone(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -15265,8 +15307,8 @@ async def test_signal_zone_state_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_signal_zone_state_async( - transport: str = "grpc_asyncio", request_type=service.SignalZoneStateRequest +async def test_delete_zone_async( + transport: str = "grpc_asyncio", request_type=service.DeleteZoneRequest ): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15278,19 +15320,17 @@ async def test_signal_zone_state_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.signal_zone_state), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_zone), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.signal_zone_state(request) + response = await client.delete_zone(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.SignalZoneStateRequest() + request = service.DeleteZoneRequest() assert args[0] == request # Establish that the response is the type that we expect. @@ -15298,27 +15338,25 @@ async def test_signal_zone_state_async( @pytest.mark.asyncio -async def test_signal_zone_state_async_from_dict(): - await test_signal_zone_state_async(request_type=dict) +async def test_delete_zone_async_from_dict(): + await test_delete_zone_async(request_type=dict) -def test_signal_zone_state_field_headers(): +def test_delete_zone_field_headers(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.SignalZoneStateRequest() + request = service.DeleteZoneRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.signal_zone_state), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_zone), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.signal_zone_state(request) + client.delete_zone(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -15334,25 +15372,23 @@ def test_signal_zone_state_field_headers(): @pytest.mark.asyncio -async def test_signal_zone_state_field_headers_async(): +async def test_delete_zone_field_headers_async(): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.SignalZoneStateRequest() + request = service.DeleteZoneRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.signal_zone_state), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_zone), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.signal_zone_state(request) + await client.delete_zone(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -15367,22 +15403,19 @@ async def test_signal_zone_state_field_headers_async(): ) in kw["metadata"] -def test_signal_zone_state_flattened(): +def test_delete_zone_flattened(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.signal_zone_state), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_zone), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.signal_zone_state( + client.delete_zone( name="name_value", - state_signal=service.SignalZoneStateRequest.StateSignal.READY_FOR_SITE_TURNUP, ) # Establish that the underlying call was made with the expected @@ -15392,12 +15425,9 @@ def test_signal_zone_state_flattened(): arg = args[0].name mock_val = "name_value" assert arg == mock_val - arg = args[0].state_signal - mock_val = service.SignalZoneStateRequest.StateSignal.READY_FOR_SITE_TURNUP - assert arg == mock_val -def test_signal_zone_state_flattened_error(): +def test_delete_zone_flattened_error(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -15405,23 +15435,20 @@ def test_signal_zone_state_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.signal_zone_state( - service.SignalZoneStateRequest(), + client.delete_zone( + service.DeleteZoneRequest(), name="name_value", - state_signal=service.SignalZoneStateRequest.StateSignal.READY_FOR_SITE_TURNUP, ) @pytest.mark.asyncio -async def test_signal_zone_state_flattened_async(): +async def test_delete_zone_flattened_async(): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.signal_zone_state), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_zone), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -15430,9 +15457,8 @@ async def test_signal_zone_state_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.signal_zone_state( + response = await client.delete_zone( name="name_value", - state_signal=service.SignalZoneStateRequest.StateSignal.READY_FOR_SITE_TURNUP, ) # Establish that the underlying call was made with the expected @@ -15442,13 +15468,10 @@ async def test_signal_zone_state_flattened_async(): arg = args[0].name mock_val = "name_value" assert arg == mock_val - arg = args[0].state_signal - mock_val = service.SignalZoneStateRequest.StateSignal.READY_FOR_SITE_TURNUP - assert arg == mock_val @pytest.mark.asyncio -async def test_signal_zone_state_flattened_error_async(): +async def test_delete_zone_flattened_error_async(): client = GDCHardwareManagementAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -15456,56 +15479,845 @@ async def test_signal_zone_state_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.signal_zone_state( - service.SignalZoneStateRequest(), + await client.delete_zone( + service.DeleteZoneRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.SignalZoneStateRequest, + dict, + ], +) +def test_signal_zone_state(request_type, transport: str = "grpc"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.signal_zone_state), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.signal_zone_state(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.SignalZoneStateRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_signal_zone_state_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.signal_zone_state), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.signal_zone_state() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.SignalZoneStateRequest() + + +def test_signal_zone_state_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.SignalZoneStateRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.signal_zone_state), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.signal_zone_state(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.SignalZoneStateRequest( name="name_value", - state_signal=service.SignalZoneStateRequest.StateSignal.READY_FOR_SITE_TURNUP, ) +def test_signal_zone_state_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.signal_zone_state in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.signal_zone_state + ] = mock_rpc + request = {} + client.signal_zone_state(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.signal_zone_state(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_signal_zone_state_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.signal_zone_state), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.signal_zone_state() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.SignalZoneStateRequest() + + +@pytest.mark.asyncio +async def test_signal_zone_state_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.signal_zone_state + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.signal_zone_state + ] = mock_rpc + + request = {} + await client.signal_zone_state(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.signal_zone_state(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_signal_zone_state_async( + transport: str = "grpc_asyncio", request_type=service.SignalZoneStateRequest +): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.signal_zone_state), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.signal_zone_state(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.SignalZoneStateRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_signal_zone_state_async_from_dict(): + await test_signal_zone_state_async(request_type=dict) + + +def test_signal_zone_state_field_headers(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.SignalZoneStateRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.signal_zone_state), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.signal_zone_state(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_signal_zone_state_field_headers_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.SignalZoneStateRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.signal_zone_state), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.signal_zone_state(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_signal_zone_state_flattened(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.signal_zone_state), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.signal_zone_state( + name="name_value", + state_signal=service.SignalZoneStateRequest.StateSignal.FACTORY_TURNUP_CHECKS_PASSED, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].state_signal + mock_val = ( + service.SignalZoneStateRequest.StateSignal.FACTORY_TURNUP_CHECKS_PASSED + ) + assert arg == mock_val + + +def test_signal_zone_state_flattened_error(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.signal_zone_state( + service.SignalZoneStateRequest(), + name="name_value", + state_signal=service.SignalZoneStateRequest.StateSignal.FACTORY_TURNUP_CHECKS_PASSED, + ) + + +@pytest.mark.asyncio +async def test_signal_zone_state_flattened_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.signal_zone_state), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.signal_zone_state( + name="name_value", + state_signal=service.SignalZoneStateRequest.StateSignal.FACTORY_TURNUP_CHECKS_PASSED, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].state_signal + mock_val = ( + service.SignalZoneStateRequest.StateSignal.FACTORY_TURNUP_CHECKS_PASSED + ) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_signal_zone_state_flattened_error_async(): + client = GDCHardwareManagementAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.signal_zone_state( + service.SignalZoneStateRequest(), + name="name_value", + state_signal=service.SignalZoneStateRequest.StateSignal.FACTORY_TURNUP_CHECKS_PASSED, + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.ListOrdersRequest, + dict, + ], +) +def test_list_orders_rest(request_type): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListOrdersResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListOrdersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_orders(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListOrdersPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_orders_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_orders in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_orders] = mock_rpc + + request = {} + client.list_orders(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_orders(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_orders_rest_required_fields(request_type=service.ListOrdersRequest): + transport_class = transports.GDCHardwareManagementRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_orders._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_orders._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.ListOrdersResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = service.ListOrdersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_orders(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_orders_rest_unset_required_fields(): + transport = transports.GDCHardwareManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_orders._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_orders_rest_interceptors(null_interceptor): + transport = transports.GDCHardwareManagementRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.GDCHardwareManagementRestInterceptor(), + ) + client = GDCHardwareManagementClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "post_list_orders" + ) as post, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "pre_list_orders" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.ListOrdersRequest.pb(service.ListOrdersRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = service.ListOrdersResponse.to_json( + service.ListOrdersResponse() + ) + + request = service.ListOrdersRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = service.ListOrdersResponse() + + client.list_orders( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_orders_rest_bad_request( + transport: str = "rest", request_type=service.ListOrdersRequest +): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_orders(request) + + +def test_list_orders_rest_flattened(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListOrdersResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListOrdersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_orders(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=projects/*/locations/*}/orders" + % client.transport._host, + args[1], + ) + + +def test_list_orders_rest_flattened_error(transport: str = "rest"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_orders( + service.ListOrdersRequest(), + parent="parent_value", + ) + + +def test_list_orders_rest_pager(transport: str = "rest"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + service.ListOrdersResponse( + orders=[ + resources.Order(), + resources.Order(), + resources.Order(), + ], + next_page_token="abc", + ), + service.ListOrdersResponse( + orders=[], + next_page_token="def", + ), + service.ListOrdersResponse( + orders=[ + resources.Order(), + ], + next_page_token="ghi", + ), + service.ListOrdersResponse( + orders=[ + resources.Order(), + resources.Order(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(service.ListOrdersResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_orders(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Order) for i in results) + + pages = list(client.list_orders(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + @pytest.mark.parametrize( "request_type", [ - service.ListOrdersRequest, + service.GetOrderRequest, dict, ], ) -def test_list_orders_rest(request_type): +def test_get_order_rest(request_type): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/orders/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = service.ListOrdersResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + return_value = resources.Order( + name="name_value", + display_name="display_name_value", + state=resources.Order.State.DRAFT, + target_workloads=["target_workloads_value"], + customer_motivation="customer_motivation_value", + region_code="region_code_value", + order_form_uri="order_form_uri_value", + type_=resources.Order.Type.PAID, + billing_id="billing_id_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = service.ListOrdersResponse.pb(return_value) + return_value = resources.Order.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_orders(request) + response = client.get_order(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListOrdersPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, resources.Order) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.state == resources.Order.State.DRAFT + assert response.target_workloads == ["target_workloads_value"] + assert response.customer_motivation == "customer_motivation_value" + assert response.region_code == "region_code_value" + assert response.order_form_uri == "order_form_uri_value" + assert response.type_ == resources.Order.Type.PAID + assert response.billing_id == "billing_id_value" -def test_list_orders_rest_use_cached_wrapped_rpc(): +def test_get_order_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -15519,33 +16331,33 @@ def test_list_orders_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_orders in client._transport._wrapped_methods + assert client._transport.get_order in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_orders] = mock_rpc + client._transport._wrapped_methods[client._transport.get_order] = mock_rpc request = {} - client.list_orders(request) + client.get_order(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_orders(request) + client.get_order(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_orders_rest_required_fields(request_type=service.ListOrdersRequest): +def test_get_order_rest_required_fields(request_type=service.GetOrderRequest): transport_class = transports.GDCHardwareManagementRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -15556,30 +16368,21 @@ def test_list_orders_rest_required_fields(request_type=service.ListOrdersRequest unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_orders._get_unset_required_fields(jsonified_request) + ).get_order._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_orders._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", - ) - ) + ).get_order._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15588,7 +16391,7 @@ def test_list_orders_rest_required_fields(request_type=service.ListOrdersRequest request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = service.ListOrdersResponse() + return_value = resources.Order() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -15609,40 +16412,30 @@ def test_list_orders_rest_required_fields(request_type=service.ListOrdersRequest response_value.status_code = 200 # Convert return value to protobuf type - return_value = service.ListOrdersResponse.pb(return_value) + return_value = resources.Order.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_orders(request) + response = client.get_order(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_orders_rest_unset_required_fields(): +def test_get_order_rest_unset_required_fields(): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_orders._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "orderBy", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) + unset_fields = transport.get_order._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_orders_rest_interceptors(null_interceptor): +def test_get_order_rest_interceptors(null_interceptor): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -15655,13 +16448,13 @@ def test_list_orders_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "post_list_orders" + transports.GDCHardwareManagementRestInterceptor, "post_get_order" ) as post, mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "pre_list_orders" + transports.GDCHardwareManagementRestInterceptor, "pre_get_order" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.ListOrdersRequest.pb(service.ListOrdersRequest()) + pb_message = service.GetOrderRequest.pb(service.GetOrderRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -15672,19 +16465,17 @@ def test_list_orders_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = service.ListOrdersResponse.to_json( - service.ListOrdersResponse() - ) + req.return_value._content = resources.Order.to_json(resources.Order()) - request = service.ListOrdersRequest() + request = service.GetOrderRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = service.ListOrdersResponse() + post.return_value = resources.Order() - client.list_orders( + client.get_order( request, metadata=[ ("key", "val"), @@ -15696,8 +16487,8 @@ def test_list_orders_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_orders_rest_bad_request( - transport: str = "rest", request_type=service.ListOrdersRequest +def test_get_order_rest_bad_request( + transport: str = "rest", request_type=service.GetOrderRequest ): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15705,7 +16496,7 @@ def test_list_orders_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/orders/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -15717,10 +16508,10 @@ def test_list_orders_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_orders(request) + client.get_order(request) -def test_list_orders_rest_flattened(): +def test_get_order_rest_flattened(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -15729,14 +16520,14 @@ def test_list_orders_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = service.ListOrdersResponse() + return_value = resources.Order() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = {"name": "projects/sample1/locations/sample2/orders/sample3"} # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + name="name_value", ) mock_args.update(sample_request) @@ -15744,25 +16535,25 @@ def test_list_orders_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = service.ListOrdersResponse.pb(return_value) + return_value = resources.Order.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_orders(**mock_args) + client.get_order(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=projects/*/locations/*}/orders" + "%s/v1alpha/{name=projects/*/locations/*/orders/*}" % client.transport._host, args[1], ) -def test_list_orders_rest_flattened_error(transport: str = "rest"): +def test_get_order_rest_flattened_error(transport: str = "rest"): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -15771,130 +16562,182 @@ def test_list_orders_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_orders( - service.ListOrdersRequest(), - parent="parent_value", + client.get_order( + service.GetOrderRequest(), + name="name_value", ) -def test_list_orders_rest_pager(transport: str = "rest"): +def test_get_order_rest_error(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.CreateOrderRequest, + dict, + ], +) +def test_create_order_rest(request_type): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - service.ListOrdersResponse( - orders=[ - resources.Order(), - resources.Order(), - resources.Order(), - ], - next_page_token="abc", - ), - service.ListOrdersResponse( - orders=[], - next_page_token="def", - ), - service.ListOrdersResponse( - orders=[ - resources.Order(), - ], - next_page_token="ghi", - ), - service.ListOrdersResponse( - orders=[ - resources.Order(), - resources.Order(), - ], - ), - ) - # Two responses for two calls - response = response + response + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["order"] = { + "name": "name_value", + "display_name": "display_name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "state": 1, + "organization_contact": { + "address": { + "revision": 879, + "region_code": "region_code_value", + "language_code": "language_code_value", + "postal_code": "postal_code_value", + "sorting_code": "sorting_code_value", + "administrative_area": "administrative_area_value", + "locality": "locality_value", + "sublocality": "sublocality_value", + "address_lines": ["address_lines_value1", "address_lines_value2"], + "recipients": ["recipients_value1", "recipients_value2"], + "organization": "organization_value", + }, + "email": "email_value", + "phone": "phone_value", + "contacts": [ + { + "given_name": "given_name_value", + "family_name": "family_name_value", + "email": "email_value", + "phone": "phone_value", + "time_zone": {"id": "id_value", "version": "version_value"}, + "reachable_times": [ + { + "start_time": { + "hours": 561, + "minutes": 773, + "seconds": 751, + "nanos": 543, + }, + "end_time": {}, + "days": [1], + } + ], + } + ], + }, + "target_workloads": ["target_workloads_value1", "target_workloads_value2"], + "customer_motivation": "customer_motivation_value", + "fulfillment_time": {}, + "region_code": "region_code_value", + "order_form_uri": "order_form_uri_value", + "type_": 1, + "submit_time": {}, + "billing_id": "billing_id_value", + "existing_hardware": [ + { + "site": "site_value", + "rack_location": "rack_location_value", + "rack_space": [{"start_rack_unit": 1613, "end_rack_unit": 1366}], + } + ], + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 - # Wrap the values into proper Response objs - response = tuple(service.ListOrdersResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values + # Determine if the message type is proto-plus or protobuf + test_field = service.CreateOrderRequest.meta.fields["order"] - sample_request = {"parent": "projects/sample1/locations/sample2"} + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] - pager = client.list_orders(request=sample_request) + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.Order) for i in results) + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields - pages = list(client.list_orders(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + subfields_not_in_runtime = [] -@pytest.mark.parametrize( - "request_type", - [ - service.GetOrderRequest, - dict, - ], -) -def test_get_order_rest(request_type): - client = GDCHardwareManagementClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["order"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/orders/sample3"} + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["order"][field])): + del request_init["order"][field][i][subfield] + else: + del request_init["order"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Order( - name="name_value", - display_name="display_name_value", - state=resources.Order.State.DRAFT, - target_workloads=["target_workloads_value"], - customer_motivation="customer_motivation_value", - region_code="region_code_value", - order_form_uri="order_form_uri_value", - type_=resources.Order.Type.PAID, - billing_id="billing_id_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.Order.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_order(request) + response = client.create_order(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.Order) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.state == resources.Order.State.DRAFT - assert response.target_workloads == ["target_workloads_value"] - assert response.customer_motivation == "customer_motivation_value" - assert response.region_code == "region_code_value" - assert response.order_form_uri == "order_form_uri_value" - assert response.type_ == resources.Order.Type.PAID - assert response.billing_id == "billing_id_value" + assert response.operation.name == "operations/spam" -def test_get_order_rest_use_cached_wrapped_rpc(): +def test_create_order_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -15908,33 +16751,37 @@ def test_get_order_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_order in client._transport._wrapped_methods + assert client._transport.create_order in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_order] = mock_rpc + client._transport._wrapped_methods[client._transport.create_order] = mock_rpc request = {} - client.get_order(request) + client.create_order(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_order(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_order(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_order_rest_required_fields(request_type=service.GetOrderRequest): +def test_create_order_rest_required_fields(request_type=service.CreateOrderRequest): transport_class = transports.GDCHardwareManagementRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -15945,21 +16792,28 @@ def test_get_order_rest_required_fields(request_type=service.GetOrderRequest): unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_order._get_unset_required_fields(jsonified_request) + ).create_order._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_order._get_unset_required_fields(jsonified_request) + ).create_order._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "order_id", + "request_id", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15968,7 +16822,7 @@ def test_get_order_rest_required_fields(request_type=service.GetOrderRequest): request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.Order() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -15980,39 +16834,50 @@ def test_get_order_rest_required_fields(request_type=service.GetOrderRequest): pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = resources.Order.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_order(request) + response = client.create_order(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_order_rest_unset_required_fields(): +def test_create_order_rest_unset_required_fields(): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_order._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.create_order._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "orderId", + "requestId", + ) + ) + & set( + ( + "parent", + "order", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_order_rest_interceptors(null_interceptor): +def test_create_order_rest_interceptors(null_interceptor): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -16025,13 +16890,15 @@ def test_get_order_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "post_get_order" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "post_create_order" ) as post, mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "pre_get_order" + transports.GDCHardwareManagementRestInterceptor, "pre_create_order" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.GetOrderRequest.pb(service.GetOrderRequest()) + pb_message = service.CreateOrderRequest.pb(service.CreateOrderRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -16042,17 +16909,19 @@ def test_get_order_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.Order.to_json(resources.Order()) + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) - request = service.GetOrderRequest() + request = service.CreateOrderRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.Order() + post.return_value = operations_pb2.Operation() - client.get_order( + client.create_order( request, metadata=[ ("key", "val"), @@ -16064,8 +16933,8 @@ def test_get_order_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_order_rest_bad_request( - transport: str = "rest", request_type=service.GetOrderRequest +def test_create_order_rest_bad_request( + transport: str = "rest", request_type=service.CreateOrderRequest ): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16073,7 +16942,7 @@ def test_get_order_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/orders/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -16085,10 +16954,10 @@ def test_get_order_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_order(request) + client.create_order(request) -def test_get_order_rest_flattened(): +def test_create_order_rest_flattened(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -16097,40 +16966,40 @@ def test_get_order_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Order() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/locations/sample2/orders/sample3"} + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", + order=resources.Order(name="name_value"), + order_id="order_id_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.Order.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_order(**mock_args) + client.create_order(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=projects/*/locations/*/orders/*}" + "%s/v1alpha/{parent=projects/*/locations/*}/orders" % client.transport._host, args[1], ) -def test_get_order_rest_flattened_error(transport: str = "rest"): +def test_create_order_rest_flattened_error(transport: str = "rest"): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -16139,13 +17008,15 @@ def test_get_order_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_order( - service.GetOrderRequest(), - name="name_value", + client.create_order( + service.CreateOrderRequest(), + parent="parent_value", + order=resources.Order(name="name_value"), + order_id="order_id_value", ) -def test_get_order_rest_error(): +def test_create_order_rest_error(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -16154,20 +17025,22 @@ def test_get_order_rest_error(): @pytest.mark.parametrize( "request_type", [ - service.CreateOrderRequest, + service.UpdateOrderRequest, dict, ], ) -def test_create_order_rest(request_type): +def test_update_order_rest(request_type): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "order": {"name": "projects/sample1/locations/sample2/orders/sample3"} + } request_init["order"] = { - "name": "name_value", + "name": "projects/sample1/locations/sample2/orders/sample3", "display_name": "display_name_value", "create_time": {"seconds": 751, "nanos": 543}, "update_time": {}, @@ -16232,7 +17105,7 @@ def test_create_order_rest(request_type): # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = service.CreateOrderRequest.meta.fields["order"] + test_field = service.UpdateOrderRequest.meta.fields["order"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -16308,13 +17181,13 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_order(request) + response = client.update_order(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_create_order_rest_use_cached_wrapped_rpc(): +def test_update_order_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -16328,17 +17201,17 @@ def test_create_order_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_order in client._transport._wrapped_methods + assert client._transport.update_order in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_order] = mock_rpc + client._transport._wrapped_methods[client._transport.update_order] = mock_rpc request = {} - client.create_order(request) + client.update_order(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -16347,18 +17220,17 @@ def test_create_order_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.create_order(request) + client.update_order(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_order_rest_required_fields(request_type=service.CreateOrderRequest): +def test_update_order_rest_required_fields(request_type=service.UpdateOrderRequest): transport_class = transports.GDCHardwareManagementRestTransport request_init = {} - request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -16369,28 +17241,24 @@ def test_create_order_rest_required_fields(request_type=service.CreateOrderReque unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_order._get_unset_required_fields(jsonified_request) + ).update_order._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_order._get_unset_required_fields(jsonified_request) + ).update_order._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "order_id", "request_id", + "update_mask", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16411,7 +17279,7 @@ def test_create_order_rest_required_fields(request_type=service.CreateOrderReque pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "patch", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -16424,29 +17292,29 @@ def test_create_order_rest_required_fields(request_type=service.CreateOrderReque response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_order(request) + response = client.update_order(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_order_rest_unset_required_fields(): +def test_update_order_rest_unset_required_fields(): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_order._get_unset_required_fields({}) + unset_fields = transport.update_order._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "orderId", "requestId", + "updateMask", ) ) & set( ( - "parent", + "updateMask", "order", ) ) @@ -16454,7 +17322,7 @@ def test_create_order_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_order_rest_interceptors(null_interceptor): +def test_update_order_rest_interceptors(null_interceptor): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -16469,13 +17337,13 @@ def test_create_order_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "post_create_order" + transports.GDCHardwareManagementRestInterceptor, "post_update_order" ) as post, mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "pre_create_order" + transports.GDCHardwareManagementRestInterceptor, "pre_update_order" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.CreateOrderRequest.pb(service.CreateOrderRequest()) + pb_message = service.UpdateOrderRequest.pb(service.UpdateOrderRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -16490,7 +17358,7 @@ def test_create_order_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = service.CreateOrderRequest() + request = service.UpdateOrderRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -16498,7 +17366,7 @@ def test_create_order_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.create_order( + client.update_order( request, metadata=[ ("key", "val"), @@ -16510,8 +17378,8 @@ def test_create_order_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_order_rest_bad_request( - transport: str = "rest", request_type=service.CreateOrderRequest +def test_update_order_rest_bad_request( + transport: str = "rest", request_type=service.UpdateOrderRequest ): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16519,7 +17387,9 @@ def test_create_order_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "order": {"name": "projects/sample1/locations/sample2/orders/sample3"} + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -16531,10 +17401,10 @@ def test_create_order_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_order(request) + client.update_order(request) -def test_create_order_rest_flattened(): +def test_update_order_rest_flattened(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -16546,13 +17416,14 @@ def test_create_order_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "order": {"name": "projects/sample1/locations/sample2/orders/sample3"} + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", order=resources.Order(name="name_value"), - order_id="order_id_value", + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -16563,20 +17434,20 @@ def test_create_order_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_order(**mock_args) + client.update_order(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=projects/*/locations/*}/orders" + "%s/v1alpha/{order.name=projects/*/locations/*/orders/*}" % client.transport._host, args[1], ) -def test_create_order_rest_flattened_error(transport: str = "rest"): +def test_update_order_rest_flattened_error(transport: str = "rest"): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -16585,15 +17456,14 @@ def test_create_order_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_order( - service.CreateOrderRequest(), - parent="parent_value", + client.update_order( + service.UpdateOrderRequest(), order=resources.Order(name="name_value"), - order_id="order_id_value", + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_create_order_rest_error(): +def test_update_order_rest_error(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -16602,148 +17472,18 @@ def test_create_order_rest_error(): @pytest.mark.parametrize( "request_type", [ - service.UpdateOrderRequest, + service.DeleteOrderRequest, dict, ], ) -def test_update_order_rest(request_type): +def test_delete_order_rest(request_type): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "order": {"name": "projects/sample1/locations/sample2/orders/sample3"} - } - request_init["order"] = { - "name": "projects/sample1/locations/sample2/orders/sample3", - "display_name": "display_name_value", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "labels": {}, - "state": 1, - "organization_contact": { - "address": { - "revision": 879, - "region_code": "region_code_value", - "language_code": "language_code_value", - "postal_code": "postal_code_value", - "sorting_code": "sorting_code_value", - "administrative_area": "administrative_area_value", - "locality": "locality_value", - "sublocality": "sublocality_value", - "address_lines": ["address_lines_value1", "address_lines_value2"], - "recipients": ["recipients_value1", "recipients_value2"], - "organization": "organization_value", - }, - "email": "email_value", - "phone": "phone_value", - "contacts": [ - { - "given_name": "given_name_value", - "family_name": "family_name_value", - "email": "email_value", - "phone": "phone_value", - "time_zone": {"id": "id_value", "version": "version_value"}, - "reachable_times": [ - { - "start_time": { - "hours": 561, - "minutes": 773, - "seconds": 751, - "nanos": 543, - }, - "end_time": {}, - "days": [1], - } - ], - } - ], - }, - "target_workloads": ["target_workloads_value1", "target_workloads_value2"], - "customer_motivation": "customer_motivation_value", - "fulfillment_time": {}, - "region_code": "region_code_value", - "order_form_uri": "order_form_uri_value", - "type_": 1, - "submit_time": {}, - "billing_id": "billing_id_value", - "existing_hardware": [ - { - "site": "site_value", - "rack_location": "rack_location_value", - "rack_space": [{"start_rack_unit": 1613, "end_rack_unit": 1366}], - } - ], - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = service.UpdateOrderRequest.meta.fields["order"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["order"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["order"][field])): - del request_init["order"][field][i][subfield] - else: - del request_init["order"][field][subfield] + request_init = {"name": "projects/sample1/locations/sample2/orders/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -16758,13 +17498,13 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_order(request) + response = client.delete_order(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_update_order_rest_use_cached_wrapped_rpc(): +def test_delete_order_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -16778,17 +17518,17 @@ def test_update_order_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_order in client._transport._wrapped_methods + assert client._transport.delete_order in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.update_order] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_order] = mock_rpc request = {} - client.update_order(request) + client.delete_order(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -16797,17 +17537,18 @@ def test_update_order_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.update_order(request) + client.delete_order(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_order_rest_required_fields(request_type=service.UpdateOrderRequest): +def test_delete_order_rest_required_fields(request_type=service.DeleteOrderRequest): transport_class = transports.GDCHardwareManagementRestTransport request_init = {} + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -16818,24 +17559,28 @@ def test_update_order_rest_required_fields(request_type=service.UpdateOrderReque unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_order._get_unset_required_fields(jsonified_request) + ).delete_order._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_order._get_unset_required_fields(jsonified_request) + ).delete_order._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( + "force", "request_id", - "update_mask", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16856,10 +17601,9 @@ def test_update_order_rest_required_fields(request_type=service.UpdateOrderReque pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -16869,37 +17613,32 @@ def test_update_order_rest_required_fields(request_type=service.UpdateOrderReque response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_order(request) + response = client.delete_order(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_order_rest_unset_required_fields(): +def test_delete_order_rest_unset_required_fields(): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_order._get_unset_required_fields({}) + unset_fields = transport.delete_order._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "requestId", - "updateMask", - ) - ) - & set( - ( - "updateMask", - "order", + "force", + "requestId", ) ) + & set(("name",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_order_rest_interceptors(null_interceptor): +def test_delete_order_rest_interceptors(null_interceptor): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -16914,13 +17653,13 @@ def test_update_order_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "post_update_order" + transports.GDCHardwareManagementRestInterceptor, "post_delete_order" ) as post, mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "pre_update_order" + transports.GDCHardwareManagementRestInterceptor, "pre_delete_order" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.UpdateOrderRequest.pb(service.UpdateOrderRequest()) + pb_message = service.DeleteOrderRequest.pb(service.DeleteOrderRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -16935,7 +17674,7 @@ def test_update_order_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = service.UpdateOrderRequest() + request = service.DeleteOrderRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -16943,7 +17682,7 @@ def test_update_order_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.update_order( + client.delete_order( request, metadata=[ ("key", "val"), @@ -16955,8 +17694,8 @@ def test_update_order_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_order_rest_bad_request( - transport: str = "rest", request_type=service.UpdateOrderRequest +def test_delete_order_rest_bad_request( + transport: str = "rest", request_type=service.DeleteOrderRequest ): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16964,9 +17703,7 @@ def test_update_order_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "order": {"name": "projects/sample1/locations/sample2/orders/sample3"} - } + request_init = {"name": "projects/sample1/locations/sample2/orders/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -16978,10 +17715,10 @@ def test_update_order_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_order(request) + client.delete_order(request) -def test_update_order_rest_flattened(): +def test_delete_order_rest_flattened(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -16993,14 +17730,11 @@ def test_update_order_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = { - "order": {"name": "projects/sample1/locations/sample2/orders/sample3"} - } + sample_request = {"name": "projects/sample1/locations/sample2/orders/sample3"} # get truthy value for each flattened field mock_args = dict( - order=resources.Order(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + name="name_value", ) mock_args.update(sample_request) @@ -17011,20 +17745,20 @@ def test_update_order_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_order(**mock_args) + client.delete_order(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{order.name=projects/*/locations/*/orders/*}" + "%s/v1alpha/{name=projects/*/locations/*/orders/*}" % client.transport._host, args[1], ) -def test_update_order_rest_flattened_error(transport: str = "rest"): +def test_delete_order_rest_flattened_error(transport: str = "rest"): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -17033,14 +17767,13 @@ def test_update_order_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_order( - service.UpdateOrderRequest(), - order=resources.Order(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.delete_order( + service.DeleteOrderRequest(), + name="name_value", ) -def test_update_order_rest_error(): +def test_delete_order_rest_error(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -17049,11 +17782,11 @@ def test_update_order_rest_error(): @pytest.mark.parametrize( "request_type", [ - service.DeleteOrderRequest, + service.SubmitOrderRequest, dict, ], ) -def test_delete_order_rest(request_type): +def test_submit_order_rest(request_type): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -17075,13 +17808,13 @@ def test_delete_order_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_order(request) + response = client.submit_order(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_delete_order_rest_use_cached_wrapped_rpc(): +def test_submit_order_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -17095,17 +17828,17 @@ def test_delete_order_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_order in client._transport._wrapped_methods + assert client._transport.submit_order in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.delete_order] = mock_rpc + client._transport._wrapped_methods[client._transport.submit_order] = mock_rpc request = {} - client.delete_order(request) + client.submit_order(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -17114,14 +17847,14 @@ def test_delete_order_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.delete_order(request) + client.submit_order(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_order_rest_required_fields(request_type=service.DeleteOrderRequest): +def test_submit_order_rest_required_fields(request_type=service.SubmitOrderRequest): transport_class = transports.GDCHardwareManagementRestTransport request_init = {} @@ -17136,7 +17869,7 @@ def test_delete_order_rest_required_fields(request_type=service.DeleteOrderReque unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_order._get_unset_required_fields(jsonified_request) + ).submit_order._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -17145,14 +17878,7 @@ def test_delete_order_rest_required_fields(request_type=service.DeleteOrderReque unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_order._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "force", - "request_id", - ) - ) + ).submit_order._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -17178,9 +17904,10 @@ def test_delete_order_rest_required_fields(request_type=service.DeleteOrderReque pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -17190,32 +17917,24 @@ def test_delete_order_rest_required_fields(request_type=service.DeleteOrderReque response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_order(request) + response = client.submit_order(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_order_rest_unset_required_fields(): +def test_submit_order_rest_unset_required_fields(): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_order._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "force", - "requestId", - ) - ) - & set(("name",)) - ) + unset_fields = transport.submit_order._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_order_rest_interceptors(null_interceptor): +def test_submit_order_rest_interceptors(null_interceptor): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -17230,13 +17949,13 @@ def test_delete_order_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "post_delete_order" + transports.GDCHardwareManagementRestInterceptor, "post_submit_order" ) as post, mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "pre_delete_order" + transports.GDCHardwareManagementRestInterceptor, "pre_submit_order" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.DeleteOrderRequest.pb(service.DeleteOrderRequest()) + pb_message = service.SubmitOrderRequest.pb(service.SubmitOrderRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -17251,7 +17970,7 @@ def test_delete_order_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = service.DeleteOrderRequest() + request = service.SubmitOrderRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -17259,7 +17978,7 @@ def test_delete_order_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.delete_order( + client.submit_order( request, metadata=[ ("key", "val"), @@ -17271,8 +17990,8 @@ def test_delete_order_rest_interceptors(null_interceptor): post.assert_called_once() -def test_delete_order_rest_bad_request( - transport: str = "rest", request_type=service.DeleteOrderRequest +def test_submit_order_rest_bad_request( + transport: str = "rest", request_type=service.SubmitOrderRequest ): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -17292,10 +18011,10 @@ def test_delete_order_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_order(request) + client.submit_order(request) -def test_delete_order_rest_flattened(): +def test_submit_order_rest_flattened(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -17322,20 +18041,20 @@ def test_delete_order_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_order(**mock_args) + client.submit_order(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=projects/*/locations/*/orders/*}" + "%s/v1alpha/{name=projects/*/locations/*/orders/*}:submit" % client.transport._host, args[1], ) -def test_delete_order_rest_flattened_error(transport: str = "rest"): +def test_submit_order_rest_flattened_error(transport: str = "rest"): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -17344,13 +18063,13 @@ def test_delete_order_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_order( - service.DeleteOrderRequest(), + client.submit_order( + service.SubmitOrderRequest(), name="name_value", ) -def test_delete_order_rest_error(): +def test_submit_order_rest_error(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -17359,39 +18078,46 @@ def test_delete_order_rest_error(): @pytest.mark.parametrize( "request_type", [ - service.SubmitOrderRequest, + service.ListSitesRequest, dict, ], ) -def test_submit_order_rest(request_type): +def test_list_sites_rest(request_type): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/orders/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = service.ListSitesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListSitesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.submit_order(request) + response = client.list_sites(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, pagers.ListSitesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_submit_order_rest_use_cached_wrapped_rpc(): +def test_list_sites_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -17405,37 +18131,33 @@ def test_submit_order_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.submit_order in client._transport._wrapped_methods + assert client._transport.list_sites in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.submit_order] = mock_rpc + client._transport._wrapped_methods[client._transport.list_sites] = mock_rpc request = {} - client.submit_order(request) + client.list_sites(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.submit_order(request) + client.list_sites(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_submit_order_rest_required_fields(request_type=service.SubmitOrderRequest): +def test_list_sites_rest_required_fields(request_type=service.ListSitesRequest): transport_class = transports.GDCHardwareManagementRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -17446,21 +18168,30 @@ def test_submit_order_rest_required_fields(request_type=service.SubmitOrderReque unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).submit_order._get_unset_required_fields(jsonified_request) + ).list_sites._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).submit_order._get_unset_required_fields(jsonified_request) + ).list_sites._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -17469,7 +18200,7 @@ def test_submit_order_rest_required_fields(request_type=service.SubmitOrderReque request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = service.ListSitesResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -17481,37 +18212,49 @@ def test_submit_order_rest_required_fields(request_type=service.SubmitOrderReque pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = service.ListSitesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.submit_order(request) + response = client.list_sites(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_submit_order_rest_unset_required_fields(): +def test_list_sites_rest_unset_required_fields(): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.submit_order._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.list_sites._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_submit_order_rest_interceptors(null_interceptor): +def test_list_sites_rest_interceptors(null_interceptor): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -17524,15 +18267,13 @@ def test_submit_order_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "post_submit_order" + transports.GDCHardwareManagementRestInterceptor, "post_list_sites" ) as post, mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "pre_submit_order" + transports.GDCHardwareManagementRestInterceptor, "pre_list_sites" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.SubmitOrderRequest.pb(service.SubmitOrderRequest()) + pb_message = service.ListSitesRequest.pb(service.ListSitesRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -17543,19 +18284,19 @@ def test_submit_order_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = service.ListSitesResponse.to_json( + service.ListSitesResponse() ) - request = service.SubmitOrderRequest() + request = service.ListSitesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = service.ListSitesResponse() - client.submit_order( + client.list_sites( request, metadata=[ ("key", "val"), @@ -17567,8 +18308,8 @@ def test_submit_order_rest_interceptors(null_interceptor): post.assert_called_once() -def test_submit_order_rest_bad_request( - transport: str = "rest", request_type=service.SubmitOrderRequest +def test_list_sites_rest_bad_request( + transport: str = "rest", request_type=service.ListSitesRequest ): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -17576,7 +18317,7 @@ def test_submit_order_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/orders/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -17588,10 +18329,10 @@ def test_submit_order_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.submit_order(request) + client.list_sites(request) -def test_submit_order_rest_flattened(): +def test_list_sites_rest_flattened(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -17600,38 +18341,39 @@ def test_submit_order_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = service.ListSitesResponse() # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/locations/sample2/orders/sample3"} + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListSitesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.submit_order(**mock_args) + client.list_sites(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=projects/*/locations/*/orders/*}:submit" - % client.transport._host, + "%s/v1alpha/{parent=projects/*/locations/*}/sites" % client.transport._host, args[1], ) -def test_submit_order_rest_flattened_error(transport: str = "rest"): +def test_list_sites_rest_flattened_error(transport: str = "rest"): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -17640,61 +18382,124 @@ def test_submit_order_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.submit_order( - service.SubmitOrderRequest(), - name="name_value", + client.list_sites( + service.ListSitesRequest(), + parent="parent_value", ) -def test_submit_order_rest_error(): +def test_list_sites_rest_pager(transport: str = "rest"): client = GDCHardwareManagementClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + service.ListSitesResponse( + sites=[ + resources.Site(), + resources.Site(), + resources.Site(), + ], + next_page_token="abc", + ), + service.ListSitesResponse( + sites=[], + next_page_token="def", + ), + service.ListSitesResponse( + sites=[ + resources.Site(), + ], + next_page_token="ghi", + ), + service.ListSitesResponse( + sites=[ + resources.Site(), + resources.Site(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(service.ListSitesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_sites(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Site) for i in results) + + pages = list(client.list_sites(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - service.ListSitesRequest, + service.GetSiteRequest, dict, ], ) -def test_list_sites_rest(request_type): +def test_get_site_rest(request_type): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/sites/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = service.ListSitesResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + return_value = resources.Site( + name="name_value", + display_name="display_name_value", + description="description_value", + google_maps_pin_uri="google_maps_pin_uri_value", + notes="notes_value", + customer_site_id="customer_site_id_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = service.ListSitesResponse.pb(return_value) + return_value = resources.Site.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_sites(request) + response = client.get_site(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSitesPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, resources.Site) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.google_maps_pin_uri == "google_maps_pin_uri_value" + assert response.notes == "notes_value" + assert response.customer_site_id == "customer_site_id_value" -def test_list_sites_rest_use_cached_wrapped_rpc(): +def test_get_site_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -17708,33 +18513,33 @@ def test_list_sites_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_sites in client._transport._wrapped_methods + assert client._transport.get_site in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_sites] = mock_rpc + client._transport._wrapped_methods[client._transport.get_site] = mock_rpc request = {} - client.list_sites(request) + client.get_site(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_sites(request) + client.get_site(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_sites_rest_required_fields(request_type=service.ListSitesRequest): +def test_get_site_rest_required_fields(request_type=service.GetSiteRequest): transport_class = transports.GDCHardwareManagementRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -17745,30 +18550,21 @@ def test_list_sites_rest_required_fields(request_type=service.ListSitesRequest): unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_sites._get_unset_required_fields(jsonified_request) + ).get_site._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_sites._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", - ) - ) + ).get_site._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -17777,7 +18573,7 @@ def test_list_sites_rest_required_fields(request_type=service.ListSitesRequest): request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = service.ListSitesResponse() + return_value = resources.Site() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -17798,40 +18594,30 @@ def test_list_sites_rest_required_fields(request_type=service.ListSitesRequest): response_value.status_code = 200 # Convert return value to protobuf type - return_value = service.ListSitesResponse.pb(return_value) + return_value = resources.Site.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_sites(request) + response = client.get_site(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_sites_rest_unset_required_fields(): +def test_get_site_rest_unset_required_fields(): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_sites._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "orderBy", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) + unset_fields = transport.get_site._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_sites_rest_interceptors(null_interceptor): +def test_get_site_rest_interceptors(null_interceptor): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -17844,13 +18630,13 @@ def test_list_sites_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "post_list_sites" + transports.GDCHardwareManagementRestInterceptor, "post_get_site" ) as post, mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "pre_list_sites" + transports.GDCHardwareManagementRestInterceptor, "pre_get_site" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.ListSitesRequest.pb(service.ListSitesRequest()) + pb_message = service.GetSiteRequest.pb(service.GetSiteRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -17861,19 +18647,17 @@ def test_list_sites_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = service.ListSitesResponse.to_json( - service.ListSitesResponse() - ) + req.return_value._content = resources.Site.to_json(resources.Site()) - request = service.ListSitesRequest() + request = service.GetSiteRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = service.ListSitesResponse() + post.return_value = resources.Site() - client.list_sites( + client.get_site( request, metadata=[ ("key", "val"), @@ -17885,8 +18669,8 @@ def test_list_sites_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_sites_rest_bad_request( - transport: str = "rest", request_type=service.ListSitesRequest +def test_get_site_rest_bad_request( + transport: str = "rest", request_type=service.GetSiteRequest ): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -17894,7 +18678,7 @@ def test_list_sites_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/sites/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -17906,10 +18690,10 @@ def test_list_sites_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_sites(request) + client.get_site(request) -def test_list_sites_rest_flattened(): +def test_get_site_rest_flattened(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -17918,163 +18702,212 @@ def test_list_sites_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = service.ListSitesResponse() + return_value = resources.Site() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = {"name": "projects/sample1/locations/sample2/sites/sample3"} # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = service.ListSitesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.list_sites(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1alpha/{parent=projects/*/locations/*}/sites" % client.transport._host, - args[1], - ) - - -def test_list_sites_rest_flattened_error(transport: str = "rest"): - client = GDCHardwareManagementClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_sites( - service.ListSitesRequest(), - parent="parent_value", + name="name_value", ) - - -def test_list_sites_rest_pager(transport: str = "rest"): - client = GDCHardwareManagementClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - service.ListSitesResponse( - sites=[ - resources.Site(), - resources.Site(), - resources.Site(), - ], - next_page_token="abc", - ), - service.ListSitesResponse( - sites=[], - next_page_token="def", - ), - service.ListSitesResponse( - sites=[ - resources.Site(), - ], - next_page_token="ghi", - ), - service.ListSitesResponse( - sites=[ - resources.Site(), - resources.Site(), - ], - ), + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Site.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_site(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=projects/*/locations/*/sites/*}" % client.transport._host, + args[1], ) - # Two responses for two calls - response = response + response - # Wrap the values into proper Response objs - response = tuple(service.ListSitesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - sample_request = {"parent": "projects/sample1/locations/sample2"} +def test_get_site_rest_flattened_error(transport: str = "rest"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - pager = client.list_sites(request=sample_request) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_site( + service.GetSiteRequest(), + name="name_value", + ) - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.Site) for i in results) - pages = list(client.list_sites(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token +def test_get_site_rest_error(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) @pytest.mark.parametrize( "request_type", [ - service.GetSiteRequest, + service.CreateSiteRequest, dict, ], ) -def test_get_site_rest(request_type): +def test_create_site_rest(request_type): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/sites/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["site"] = { + "name": "name_value", + "display_name": "display_name_value", + "description": "description_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "organization_contact": { + "address": { + "revision": 879, + "region_code": "region_code_value", + "language_code": "language_code_value", + "postal_code": "postal_code_value", + "sorting_code": "sorting_code_value", + "administrative_area": "administrative_area_value", + "locality": "locality_value", + "sublocality": "sublocality_value", + "address_lines": ["address_lines_value1", "address_lines_value2"], + "recipients": ["recipients_value1", "recipients_value2"], + "organization": "organization_value", + }, + "email": "email_value", + "phone": "phone_value", + "contacts": [ + { + "given_name": "given_name_value", + "family_name": "family_name_value", + "email": "email_value", + "phone": "phone_value", + "time_zone": {"id": "id_value", "version": "version_value"}, + "reachable_times": [ + { + "start_time": { + "hours": 561, + "minutes": 773, + "seconds": 751, + "nanos": 543, + }, + "end_time": {}, + "days": [1], + } + ], + } + ], + }, + "google_maps_pin_uri": "google_maps_pin_uri_value", + "access_times": {}, + "notes": "notes_value", + "customer_site_id": "customer_site_id_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = service.CreateSiteRequest.meta.fields["site"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["site"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["site"][field])): + del request_init["site"][field][i][subfield] + else: + del request_init["site"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Site( - name="name_value", - display_name="display_name_value", - description="description_value", - google_maps_pin_uri="google_maps_pin_uri_value", - notes="notes_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.Site.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_site(request) + response = client.create_site(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.Site) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.google_maps_pin_uri == "google_maps_pin_uri_value" - assert response.notes == "notes_value" + assert response.operation.name == "operations/spam" -def test_get_site_rest_use_cached_wrapped_rpc(): +def test_create_site_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -18088,33 +18921,37 @@ def test_get_site_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_site in client._transport._wrapped_methods + assert client._transport.create_site in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_site] = mock_rpc + client._transport._wrapped_methods[client._transport.create_site] = mock_rpc request = {} - client.get_site(request) + client.create_site(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_site(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_site(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_site_rest_required_fields(request_type=service.GetSiteRequest): +def test_create_site_rest_required_fields(request_type=service.CreateSiteRequest): transport_class = transports.GDCHardwareManagementRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -18125,21 +18962,28 @@ def test_get_site_rest_required_fields(request_type=service.GetSiteRequest): unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_site._get_unset_required_fields(jsonified_request) + ).create_site._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_site._get_unset_required_fields(jsonified_request) + ).create_site._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "site_id", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -18148,7 +18992,7 @@ def test_get_site_rest_required_fields(request_type=service.GetSiteRequest): request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.Site() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -18160,39 +19004,50 @@ def test_get_site_rest_required_fields(request_type=service.GetSiteRequest): pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = resources.Site.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_site(request) + response = client.create_site(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_site_rest_unset_required_fields(): +def test_create_site_rest_unset_required_fields(): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_site._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - + unset_fields = transport.create_site._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "requestId", + "siteId", + ) + ) + & set( + ( + "parent", + "site", + ) + ) + ) + @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_site_rest_interceptors(null_interceptor): +def test_create_site_rest_interceptors(null_interceptor): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -18205,13 +19060,15 @@ def test_get_site_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "post_get_site" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "post_create_site" ) as post, mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "pre_get_site" + transports.GDCHardwareManagementRestInterceptor, "pre_create_site" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.GetSiteRequest.pb(service.GetSiteRequest()) + pb_message = service.CreateSiteRequest.pb(service.CreateSiteRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -18222,17 +19079,19 @@ def test_get_site_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.Site.to_json(resources.Site()) + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) - request = service.GetSiteRequest() + request = service.CreateSiteRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.Site() + post.return_value = operations_pb2.Operation() - client.get_site( + client.create_site( request, metadata=[ ("key", "val"), @@ -18244,8 +19103,8 @@ def test_get_site_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_site_rest_bad_request( - transport: str = "rest", request_type=service.GetSiteRequest +def test_create_site_rest_bad_request( + transport: str = "rest", request_type=service.CreateSiteRequest ): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -18253,7 +19112,7 @@ def test_get_site_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/sites/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -18265,10 +19124,10 @@ def test_get_site_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_site(request) + client.create_site(request) -def test_get_site_rest_flattened(): +def test_create_site_rest_flattened(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -18277,39 +19136,39 @@ def test_get_site_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Site() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/locations/sample2/sites/sample3"} + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", + site=resources.Site(name="name_value"), + site_id="site_id_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.Site.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_site(**mock_args) + client.create_site(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=projects/*/locations/*/sites/*}" % client.transport._host, + "%s/v1alpha/{parent=projects/*/locations/*}/sites" % client.transport._host, args[1], ) -def test_get_site_rest_flattened_error(transport: str = "rest"): +def test_create_site_rest_flattened_error(transport: str = "rest"): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -18318,13 +19177,15 @@ def test_get_site_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_site( - service.GetSiteRequest(), - name="name_value", + client.create_site( + service.CreateSiteRequest(), + parent="parent_value", + site=resources.Site(name="name_value"), + site_id="site_id_value", ) -def test_get_site_rest_error(): +def test_create_site_rest_error(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -18333,20 +19194,22 @@ def test_get_site_rest_error(): @pytest.mark.parametrize( "request_type", [ - service.CreateSiteRequest, + service.UpdateSiteRequest, dict, ], ) -def test_create_site_rest(request_type): +def test_update_site_rest(request_type): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "site": {"name": "projects/sample1/locations/sample2/sites/sample3"} + } request_init["site"] = { - "name": "name_value", + "name": "projects/sample1/locations/sample2/sites/sample3", "display_name": "display_name_value", "description": "description_value", "create_time": {"seconds": 751, "nanos": 543}, @@ -18393,13 +19256,14 @@ def test_create_site_rest(request_type): "google_maps_pin_uri": "google_maps_pin_uri_value", "access_times": {}, "notes": "notes_value", + "customer_site_id": "customer_site_id_value", } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = service.CreateSiteRequest.meta.fields["site"] + test_field = service.UpdateSiteRequest.meta.fields["site"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -18475,13 +19339,13 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_site(request) + response = client.update_site(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_create_site_rest_use_cached_wrapped_rpc(): +def test_update_site_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -18495,17 +19359,17 @@ def test_create_site_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_site in client._transport._wrapped_methods + assert client._transport.update_site in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_site] = mock_rpc + client._transport._wrapped_methods[client._transport.update_site] = mock_rpc request = {} - client.create_site(request) + client.update_site(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -18514,18 +19378,17 @@ def test_create_site_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.create_site(request) + client.update_site(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_site_rest_required_fields(request_type=service.CreateSiteRequest): +def test_update_site_rest_required_fields(request_type=service.UpdateSiteRequest): transport_class = transports.GDCHardwareManagementRestTransport request_init = {} - request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -18536,28 +19399,24 @@ def test_create_site_rest_required_fields(request_type=service.CreateSiteRequest unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_site._get_unset_required_fields(jsonified_request) + ).update_site._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_site._get_unset_required_fields(jsonified_request) + ).update_site._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( "request_id", - "site_id", + "update_mask", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -18578,7 +19437,7 @@ def test_create_site_rest_required_fields(request_type=service.CreateSiteRequest pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "patch", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -18591,29 +19450,29 @@ def test_create_site_rest_required_fields(request_type=service.CreateSiteRequest response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_site(request) + response = client.update_site(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_site_rest_unset_required_fields(): +def test_update_site_rest_unset_required_fields(): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_site._get_unset_required_fields({}) + unset_fields = transport.update_site._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( "requestId", - "siteId", + "updateMask", ) ) & set( ( - "parent", + "updateMask", "site", ) ) @@ -18621,7 +19480,7 @@ def test_create_site_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_site_rest_interceptors(null_interceptor): +def test_update_site_rest_interceptors(null_interceptor): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -18636,13 +19495,13 @@ def test_create_site_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "post_create_site" + transports.GDCHardwareManagementRestInterceptor, "post_update_site" ) as post, mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "pre_create_site" + transports.GDCHardwareManagementRestInterceptor, "pre_update_site" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.CreateSiteRequest.pb(service.CreateSiteRequest()) + pb_message = service.UpdateSiteRequest.pb(service.UpdateSiteRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -18657,7 +19516,7 @@ def test_create_site_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = service.CreateSiteRequest() + request = service.UpdateSiteRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -18665,7 +19524,7 @@ def test_create_site_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.create_site( + client.update_site( request, metadata=[ ("key", "val"), @@ -18677,8 +19536,8 @@ def test_create_site_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_site_rest_bad_request( - transport: str = "rest", request_type=service.CreateSiteRequest +def test_update_site_rest_bad_request( + transport: str = "rest", request_type=service.UpdateSiteRequest ): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -18686,7 +19545,9 @@ def test_create_site_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "site": {"name": "projects/sample1/locations/sample2/sites/sample3"} + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -18698,10 +19559,10 @@ def test_create_site_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_site(request) + client.update_site(request) -def test_create_site_rest_flattened(): +def test_update_site_rest_flattened(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -18713,13 +19574,14 @@ def test_create_site_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "site": {"name": "projects/sample1/locations/sample2/sites/sample3"} + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", site=resources.Site(name="name_value"), - site_id="site_id_value", + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -18730,195 +19592,84 @@ def test_create_site_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_site(**mock_args) + client.update_site(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=projects/*/locations/*}/sites" % client.transport._host, + "%s/v1alpha/{site.name=projects/*/locations/*/sites/*}" + % client.transport._host, args[1], ) -def test_create_site_rest_flattened_error(transport: str = "rest"): - client = GDCHardwareManagementClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_site( - service.CreateSiteRequest(), - parent="parent_value", - site=resources.Site(name="name_value"), - site_id="site_id_value", - ) - - -def test_create_site_rest_error(): - client = GDCHardwareManagementClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - service.UpdateSiteRequest, - dict, - ], -) -def test_update_site_rest(request_type): +def test_update_site_rest_flattened_error(transport: str = "rest"): client = GDCHardwareManagementClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "site": {"name": "projects/sample1/locations/sample2/sites/sample3"} - } - request_init["site"] = { - "name": "projects/sample1/locations/sample2/sites/sample3", - "display_name": "display_name_value", - "description": "description_value", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "labels": {}, - "organization_contact": { - "address": { - "revision": 879, - "region_code": "region_code_value", - "language_code": "language_code_value", - "postal_code": "postal_code_value", - "sorting_code": "sorting_code_value", - "administrative_area": "administrative_area_value", - "locality": "locality_value", - "sublocality": "sublocality_value", - "address_lines": ["address_lines_value1", "address_lines_value2"], - "recipients": ["recipients_value1", "recipients_value2"], - "organization": "organization_value", - }, - "email": "email_value", - "phone": "phone_value", - "contacts": [ - { - "given_name": "given_name_value", - "family_name": "family_name_value", - "email": "email_value", - "phone": "phone_value", - "time_zone": {"id": "id_value", "version": "version_value"}, - "reachable_times": [ - { - "start_time": { - "hours": 561, - "minutes": 773, - "seconds": 751, - "nanos": 543, - }, - "end_time": {}, - "days": [1], - } - ], - } - ], - }, - "google_maps_pin_uri": "google_maps_pin_uri_value", - "access_times": {}, - "notes": "notes_value", - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = service.UpdateSiteRequest.meta.fields["site"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_site( + service.UpdateSiteRequest(), + site=resources.Site(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) - subfields_not_in_runtime = [] - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["site"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value +def test_update_site_rest_error(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["site"][field])): - del request_init["site"][field][i][subfield] - else: - del request_init["site"][field][subfield] +@pytest.mark.parametrize( + "request_type", + [ + service.ListHardwareGroupsRequest, + dict, + ], +) +def test_list_hardware_groups_rest(request_type): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/orders/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = service.ListHardwareGroupsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListHardwareGroupsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_site(request) + response = client.list_hardware_groups(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, pagers.ListHardwareGroupsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_update_site_rest_use_cached_wrapped_rpc(): +def test_list_hardware_groups_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -18932,36 +19683,39 @@ def test_update_site_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_site in client._transport._wrapped_methods + assert ( + client._transport.list_hardware_groups in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.update_site] = mock_rpc + client._transport._wrapped_methods[ + client._transport.list_hardware_groups + ] = mock_rpc request = {} - client.update_site(request) + client.list_hardware_groups(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_site(request) + client.list_hardware_groups(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_site_rest_required_fields(request_type=service.UpdateSiteRequest): +def test_list_hardware_groups_rest_required_fields( + request_type=service.ListHardwareGroupsRequest, +): transport_class = transports.GDCHardwareManagementRestTransport request_init = {} + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -18972,24 +19726,30 @@ def test_update_site_rest_required_fields(request_type=service.UpdateSiteRequest unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_site._get_unset_required_fields(jsonified_request) + ).list_hardware_groups._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["parent"] = "parent_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_site._get_unset_required_fields(jsonified_request) + ).list_hardware_groups._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "request_id", - "update_mask", + "filter", + "order_by", + "page_size", + "page_token", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -18998,7 +19758,7 @@ def test_update_site_rest_required_fields(request_type=service.UpdateSiteRequest request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = service.ListHardwareGroupsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -19010,50 +19770,49 @@ def test_update_site_rest_required_fields(request_type=service.UpdateSiteRequest pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = service.ListHardwareGroupsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_site(request) + response = client.list_hardware_groups(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_site_rest_unset_required_fields(): +def test_list_hardware_groups_rest_unset_required_fields(): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_site._get_unset_required_fields({}) + unset_fields = transport.list_hardware_groups._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "requestId", - "updateMask", - ) - ) - & set( - ( - "updateMask", - "site", + "filter", + "orderBy", + "pageSize", + "pageToken", ) ) + & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_site_rest_interceptors(null_interceptor): +def test_list_hardware_groups_rest_interceptors(null_interceptor): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -19066,15 +19825,15 @@ def test_update_site_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "post_update_site" + transports.GDCHardwareManagementRestInterceptor, "post_list_hardware_groups" ) as post, mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "pre_update_site" + transports.GDCHardwareManagementRestInterceptor, "pre_list_hardware_groups" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.UpdateSiteRequest.pb(service.UpdateSiteRequest()) + pb_message = service.ListHardwareGroupsRequest.pb( + service.ListHardwareGroupsRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -19085,19 +19844,19 @@ def test_update_site_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = service.ListHardwareGroupsResponse.to_json( + service.ListHardwareGroupsResponse() ) - request = service.UpdateSiteRequest() + request = service.ListHardwareGroupsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = service.ListHardwareGroupsResponse() - client.update_site( + client.list_hardware_groups( request, metadata=[ ("key", "val"), @@ -19109,8 +19868,8 @@ def test_update_site_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_site_rest_bad_request( - transport: str = "rest", request_type=service.UpdateSiteRequest +def test_list_hardware_groups_rest_bad_request( + transport: str = "rest", request_type=service.ListHardwareGroupsRequest ): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -19118,9 +19877,7 @@ def test_update_site_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "site": {"name": "projects/sample1/locations/sample2/sites/sample3"} - } + request_init = {"parent": "projects/sample1/locations/sample2/orders/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -19132,10 +19889,10 @@ def test_update_site_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_site(request) + client.list_hardware_groups(request) -def test_update_site_rest_flattened(): +def test_list_hardware_groups_rest_flattened(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -19144,105 +19901,168 @@ def test_update_site_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = service.ListHardwareGroupsResponse() # get arguments that satisfy an http rule for this method - sample_request = { - "site": {"name": "projects/sample1/locations/sample2/sites/sample3"} - } + sample_request = {"parent": "projects/sample1/locations/sample2/orders/sample3"} # get truthy value for each flattened field mock_args = dict( - site=resources.Site(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + parent="parent_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListHardwareGroupsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_site(**mock_args) + client.list_hardware_groups(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{site.name=projects/*/locations/*/sites/*}" + "%s/v1alpha/{parent=projects/*/locations/*/orders/*}/hardwareGroups" % client.transport._host, args[1], ) -def test_update_site_rest_flattened_error(transport: str = "rest"): - client = GDCHardwareManagementClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_site( - service.UpdateSiteRequest(), - site=resources.Site(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) +def test_list_hardware_groups_rest_flattened_error(transport: str = "rest"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_hardware_groups( + service.ListHardwareGroupsRequest(), + parent="parent_value", + ) + + +def test_list_hardware_groups_rest_pager(transport: str = "rest"): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + service.ListHardwareGroupsResponse( + hardware_groups=[ + resources.HardwareGroup(), + resources.HardwareGroup(), + resources.HardwareGroup(), + ], + next_page_token="abc", + ), + service.ListHardwareGroupsResponse( + hardware_groups=[], + next_page_token="def", + ), + service.ListHardwareGroupsResponse( + hardware_groups=[ + resources.HardwareGroup(), + ], + next_page_token="ghi", + ), + service.ListHardwareGroupsResponse( + hardware_groups=[ + resources.HardwareGroup(), + resources.HardwareGroup(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + service.ListHardwareGroupsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2/orders/sample3"} + + pager = client.list_hardware_groups(request=sample_request) + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.HardwareGroup) for i in results) -def test_update_site_rest_error(): - client = GDCHardwareManagementClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) + pages = list(client.list_hardware_groups(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.parametrize( "request_type", [ - service.ListHardwareGroupsRequest, + service.GetHardwareGroupRequest, dict, ], ) -def test_list_hardware_groups_rest(request_type): +def test_get_hardware_group_rest(request_type): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/orders/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/orders/sample3/hardwareGroups/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = service.ListHardwareGroupsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + return_value = resources.HardwareGroup( + name="name_value", + hardware_count=1494, + site="site_value", + state=resources.HardwareGroup.State.ADDITIONAL_INFO_NEEDED, + zone="zone_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = service.ListHardwareGroupsResponse.pb(return_value) + return_value = resources.HardwareGroup.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_hardware_groups(request) + response = client.get_hardware_group(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListHardwareGroupsPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, resources.HardwareGroup) + assert response.name == "name_value" + assert response.hardware_count == 1494 + assert response.site == "site_value" + assert response.state == resources.HardwareGroup.State.ADDITIONAL_INFO_NEEDED + assert response.zone == "zone_value" -def test_list_hardware_groups_rest_use_cached_wrapped_rpc(): +def test_get_hardware_group_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -19257,7 +20077,7 @@ def test_list_hardware_groups_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_hardware_groups in client._transport._wrapped_methods + client._transport.get_hardware_group in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -19266,29 +20086,29 @@ def test_list_hardware_groups_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_hardware_groups + client._transport.get_hardware_group ] = mock_rpc request = {} - client.list_hardware_groups(request) + client.get_hardware_group(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_hardware_groups(request) + client.get_hardware_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_hardware_groups_rest_required_fields( - request_type=service.ListHardwareGroupsRequest, +def test_get_hardware_group_rest_required_fields( + request_type=service.GetHardwareGroupRequest, ): transport_class = transports.GDCHardwareManagementRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -19299,30 +20119,21 @@ def test_list_hardware_groups_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_hardware_groups._get_unset_required_fields(jsonified_request) + ).get_hardware_group._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_hardware_groups._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", - ) - ) + ).get_hardware_group._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -19331,7 +20142,7 @@ def test_list_hardware_groups_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = service.ListHardwareGroupsResponse() + return_value = resources.HardwareGroup() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -19352,40 +20163,30 @@ def test_list_hardware_groups_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = service.ListHardwareGroupsResponse.pb(return_value) + return_value = resources.HardwareGroup.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_hardware_groups(request) + response = client.get_hardware_group(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_hardware_groups_rest_unset_required_fields(): +def test_get_hardware_group_rest_unset_required_fields(): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_hardware_groups._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "orderBy", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) + unset_fields = transport.get_hardware_group._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_hardware_groups_rest_interceptors(null_interceptor): +def test_get_hardware_group_rest_interceptors(null_interceptor): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -19398,14 +20199,14 @@ def test_list_hardware_groups_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "post_list_hardware_groups" + transports.GDCHardwareManagementRestInterceptor, "post_get_hardware_group" ) as post, mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "pre_list_hardware_groups" + transports.GDCHardwareManagementRestInterceptor, "pre_get_hardware_group" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.ListHardwareGroupsRequest.pb( - service.ListHardwareGroupsRequest() + pb_message = service.GetHardwareGroupRequest.pb( + service.GetHardwareGroupRequest() ) transcode.return_value = { "method": "post", @@ -19417,19 +20218,19 @@ def test_list_hardware_groups_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = service.ListHardwareGroupsResponse.to_json( - service.ListHardwareGroupsResponse() + req.return_value._content = resources.HardwareGroup.to_json( + resources.HardwareGroup() ) - request = service.ListHardwareGroupsRequest() + request = service.GetHardwareGroupRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = service.ListHardwareGroupsResponse() + post.return_value = resources.HardwareGroup() - client.list_hardware_groups( + client.get_hardware_group( request, metadata=[ ("key", "val"), @@ -19441,8 +20242,8 @@ def test_list_hardware_groups_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_hardware_groups_rest_bad_request( - transport: str = "rest", request_type=service.ListHardwareGroupsRequest +def test_get_hardware_group_rest_bad_request( + transport: str = "rest", request_type=service.GetHardwareGroupRequest ): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -19450,7 +20251,9 @@ def test_list_hardware_groups_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/orders/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/orders/sample3/hardwareGroups/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -19462,10 +20265,10 @@ def test_list_hardware_groups_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_hardware_groups(request) + client.get_hardware_group(request) -def test_list_hardware_groups_rest_flattened(): +def test_get_hardware_group_rest_flattened(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -19474,14 +20277,16 @@ def test_list_hardware_groups_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = service.ListHardwareGroupsResponse() + return_value = resources.HardwareGroup() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2/orders/sample3"} + sample_request = { + "name": "projects/sample1/locations/sample2/orders/sample3/hardwareGroups/sample4" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + name="name_value", ) mock_args.update(sample_request) @@ -19489,25 +20294,25 @@ def test_list_hardware_groups_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = service.ListHardwareGroupsResponse.pb(return_value) + return_value = resources.HardwareGroup.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_hardware_groups(**mock_args) + client.get_hardware_group(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=projects/*/locations/*/orders/*}/hardwareGroups" + "%s/v1alpha/{name=projects/*/locations/*/orders/*/hardwareGroups/*}" % client.transport._host, args[1], ) -def test_list_hardware_groups_rest_flattened_error(transport: str = "rest"): +def test_get_hardware_group_rest_flattened_error(transport: str = "rest"): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -19516,126 +20321,137 @@ def test_list_hardware_groups_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_hardware_groups( - service.ListHardwareGroupsRequest(), - parent="parent_value", + client.get_hardware_group( + service.GetHardwareGroupRequest(), + name="name_value", ) -def test_list_hardware_groups_rest_pager(transport: str = "rest"): +def test_get_hardware_group_rest_error(): + client = GDCHardwareManagementClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.CreateHardwareGroupRequest, + dict, + ], +) +def test_create_hardware_group_rest(request_type): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - service.ListHardwareGroupsResponse( - hardware_groups=[ - resources.HardwareGroup(), - resources.HardwareGroup(), - resources.HardwareGroup(), - ], - next_page_token="abc", - ), - service.ListHardwareGroupsResponse( - hardware_groups=[], - next_page_token="def", - ), - service.ListHardwareGroupsResponse( - hardware_groups=[ - resources.HardwareGroup(), - ], - next_page_token="ghi", - ), - service.ListHardwareGroupsResponse( - hardware_groups=[ - resources.HardwareGroup(), - resources.HardwareGroup(), - ], - ), - ) - # Two responses for two calls - response = response + response + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/orders/sample3"} + request_init["hardware_group"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "hardware_count": 1494, + "config": { + "sku": "sku_value", + "power_supply": 1, + "subscription_duration_months": 3042, + }, + "site": "site_value", + "state": 1, + "zone": "zone_value", + "requested_installation_date": {"year": 433, "month": 550, "day": 318}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 - # Wrap the values into proper Response objs - response = tuple( - service.ListHardwareGroupsResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values + # Determine if the message type is proto-plus or protobuf + test_field = service.CreateHardwareGroupRequest.meta.fields["hardware_group"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] - sample_request = {"parent": "projects/sample1/locations/sample2/orders/sample3"} + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - pager = client.list_hardware_groups(request=sample_request) + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.HardwareGroup) for i in results) + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] - pages = list(client.list_hardware_groups(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + subfields_not_in_runtime = [] + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["hardware_group"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value -@pytest.mark.parametrize( - "request_type", - [ - service.GetHardwareGroupRequest, - dict, - ], -) -def test_get_hardware_group_rest(request_type): - client = GDCHardwareManagementClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/orders/sample3/hardwareGroups/sample4" - } + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["hardware_group"][field])): + del request_init["hardware_group"][field][i][subfield] + else: + del request_init["hardware_group"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.HardwareGroup( - name="name_value", - hardware_count=1494, - site="site_value", - state=resources.HardwareGroup.State.ADDITIONAL_INFO_NEEDED, - zone="zone_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.HardwareGroup.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_hardware_group(request) + response = client.create_hardware_group(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.HardwareGroup) - assert response.name == "name_value" - assert response.hardware_count == 1494 - assert response.site == "site_value" - assert response.state == resources.HardwareGroup.State.ADDITIONAL_INFO_NEEDED - assert response.zone == "zone_value" + assert response.operation.name == "operations/spam" -def test_get_hardware_group_rest_use_cached_wrapped_rpc(): +def test_create_hardware_group_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -19650,7 +20466,8 @@ def test_get_hardware_group_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_hardware_group in client._transport._wrapped_methods + client._transport.create_hardware_group + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -19659,29 +20476,33 @@ def test_get_hardware_group_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_hardware_group + client._transport.create_hardware_group ] = mock_rpc request = {} - client.get_hardware_group(request) + client.create_hardware_group(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_hardware_group(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_hardware_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_hardware_group_rest_required_fields( - request_type=service.GetHardwareGroupRequest, +def test_create_hardware_group_rest_required_fields( + request_type=service.CreateHardwareGroupRequest, ): transport_class = transports.GDCHardwareManagementRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -19692,21 +20513,28 @@ def test_get_hardware_group_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_hardware_group._get_unset_required_fields(jsonified_request) + ).create_hardware_group._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_hardware_group._get_unset_required_fields(jsonified_request) + ).create_hardware_group._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "hardware_group_id", + "request_id", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -19715,7 +20543,7 @@ def test_get_hardware_group_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.HardwareGroup() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -19727,39 +20555,50 @@ def test_get_hardware_group_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = resources.HardwareGroup.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_hardware_group(request) + response = client.create_hardware_group(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_hardware_group_rest_unset_required_fields(): +def test_create_hardware_group_rest_unset_required_fields(): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_hardware_group._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.create_hardware_group._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "hardwareGroupId", + "requestId", + ) + ) + & set( + ( + "parent", + "hardwareGroup", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_hardware_group_rest_interceptors(null_interceptor): +def test_create_hardware_group_rest_interceptors(null_interceptor): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -19772,14 +20611,16 @@ def test_get_hardware_group_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "post_get_hardware_group" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "post_create_hardware_group" ) as post, mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "pre_get_hardware_group" + transports.GDCHardwareManagementRestInterceptor, "pre_create_hardware_group" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.GetHardwareGroupRequest.pb( - service.GetHardwareGroupRequest() + pb_message = service.CreateHardwareGroupRequest.pb( + service.CreateHardwareGroupRequest() ) transcode.return_value = { "method": "post", @@ -19791,19 +20632,19 @@ def test_get_hardware_group_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.HardwareGroup.to_json( - resources.HardwareGroup() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = service.GetHardwareGroupRequest() + request = service.CreateHardwareGroupRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.HardwareGroup() + post.return_value = operations_pb2.Operation() - client.get_hardware_group( + client.create_hardware_group( request, metadata=[ ("key", "val"), @@ -19815,8 +20656,8 @@ def test_get_hardware_group_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_hardware_group_rest_bad_request( - transport: str = "rest", request_type=service.GetHardwareGroupRequest +def test_create_hardware_group_rest_bad_request( + transport: str = "rest", request_type=service.CreateHardwareGroupRequest ): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -19824,9 +20665,7 @@ def test_get_hardware_group_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/orders/sample3/hardwareGroups/sample4" - } + request_init = {"parent": "projects/sample1/locations/sample2/orders/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -19838,10 +20677,10 @@ def test_get_hardware_group_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_hardware_group(request) + client.create_hardware_group(request) -def test_get_hardware_group_rest_flattened(): +def test_create_hardware_group_rest_flattened(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -19850,42 +20689,40 @@ def test_get_hardware_group_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.HardwareGroup() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/orders/sample3/hardwareGroups/sample4" - } + sample_request = {"parent": "projects/sample1/locations/sample2/orders/sample3"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", + hardware_group=resources.HardwareGroup(name="name_value"), + hardware_group_id="hardware_group_id_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.HardwareGroup.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_hardware_group(**mock_args) + client.create_hardware_group(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=projects/*/locations/*/orders/*/hardwareGroups/*}" + "%s/v1alpha/{parent=projects/*/locations/*/orders/*}/hardwareGroups" % client.transport._host, args[1], ) -def test_get_hardware_group_rest_flattened_error(transport: str = "rest"): +def test_create_hardware_group_rest_flattened_error(transport: str = "rest"): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -19894,13 +20731,15 @@ def test_get_hardware_group_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_hardware_group( - service.GetHardwareGroupRequest(), - name="name_value", + client.create_hardware_group( + service.CreateHardwareGroupRequest(), + parent="parent_value", + hardware_group=resources.HardwareGroup(name="name_value"), + hardware_group_id="hardware_group_id_value", ) -def test_get_hardware_group_rest_error(): +def test_create_hardware_group_rest_error(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -19909,20 +20748,24 @@ def test_get_hardware_group_rest_error(): @pytest.mark.parametrize( "request_type", [ - service.CreateHardwareGroupRequest, + service.UpdateHardwareGroupRequest, dict, ], ) -def test_create_hardware_group_rest(request_type): +def test_update_hardware_group_rest(request_type): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/orders/sample3"} + request_init = { + "hardware_group": { + "name": "projects/sample1/locations/sample2/orders/sample3/hardwareGroups/sample4" + } + } request_init["hardware_group"] = { - "name": "name_value", + "name": "projects/sample1/locations/sample2/orders/sample3/hardwareGroups/sample4", "create_time": {"seconds": 751, "nanos": 543}, "update_time": {}, "labels": {}, @@ -19942,7 +20785,7 @@ def test_create_hardware_group_rest(request_type): # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = service.CreateHardwareGroupRequest.meta.fields["hardware_group"] + test_field = service.UpdateHardwareGroupRequest.meta.fields["hardware_group"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -20018,13 +20861,13 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_hardware_group(request) + response = client.update_hardware_group(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_create_hardware_group_rest_use_cached_wrapped_rpc(): +def test_update_hardware_group_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -20039,7 +20882,7 @@ def test_create_hardware_group_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_hardware_group + client._transport.update_hardware_group in client._transport._wrapped_methods ) @@ -20049,11 +20892,11 @@ def test_create_hardware_group_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_hardware_group + client._transport.update_hardware_group ] = mock_rpc request = {} - client.create_hardware_group(request) + client.update_hardware_group(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -20062,20 +20905,19 @@ def test_create_hardware_group_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.create_hardware_group(request) + client.update_hardware_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_hardware_group_rest_required_fields( - request_type=service.CreateHardwareGroupRequest, +def test_update_hardware_group_rest_required_fields( + request_type=service.UpdateHardwareGroupRequest, ): transport_class = transports.GDCHardwareManagementRestTransport request_init = {} - request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -20086,28 +20928,24 @@ def test_create_hardware_group_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_hardware_group._get_unset_required_fields(jsonified_request) + ).update_hardware_group._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_hardware_group._get_unset_required_fields(jsonified_request) + ).update_hardware_group._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "hardware_group_id", "request_id", + "update_mask", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -20128,7 +20966,7 @@ def test_create_hardware_group_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "patch", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -20141,29 +20979,29 @@ def test_create_hardware_group_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_hardware_group(request) + response = client.update_hardware_group(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_hardware_group_rest_unset_required_fields(): +def test_update_hardware_group_rest_unset_required_fields(): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_hardware_group._get_unset_required_fields({}) + unset_fields = transport.update_hardware_group._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "hardwareGroupId", "requestId", + "updateMask", ) ) & set( ( - "parent", + "updateMask", "hardwareGroup", ) ) @@ -20171,7 +21009,7 @@ def test_create_hardware_group_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_hardware_group_rest_interceptors(null_interceptor): +def test_update_hardware_group_rest_interceptors(null_interceptor): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -20186,14 +21024,14 @@ def test_create_hardware_group_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "post_create_hardware_group" + transports.GDCHardwareManagementRestInterceptor, "post_update_hardware_group" ) as post, mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "pre_create_hardware_group" + transports.GDCHardwareManagementRestInterceptor, "pre_update_hardware_group" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.CreateHardwareGroupRequest.pb( - service.CreateHardwareGroupRequest() + pb_message = service.UpdateHardwareGroupRequest.pb( + service.UpdateHardwareGroupRequest() ) transcode.return_value = { "method": "post", @@ -20209,7 +21047,7 @@ def test_create_hardware_group_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = service.CreateHardwareGroupRequest() + request = service.UpdateHardwareGroupRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -20217,7 +21055,7 @@ def test_create_hardware_group_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.create_hardware_group( + client.update_hardware_group( request, metadata=[ ("key", "val"), @@ -20229,8 +21067,8 @@ def test_create_hardware_group_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_hardware_group_rest_bad_request( - transport: str = "rest", request_type=service.CreateHardwareGroupRequest +def test_update_hardware_group_rest_bad_request( + transport: str = "rest", request_type=service.UpdateHardwareGroupRequest ): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -20238,7 +21076,11 @@ def test_create_hardware_group_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/orders/sample3"} + request_init = { + "hardware_group": { + "name": "projects/sample1/locations/sample2/orders/sample3/hardwareGroups/sample4" + } + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -20250,10 +21092,10 @@ def test_create_hardware_group_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_hardware_group(request) + client.update_hardware_group(request) -def test_create_hardware_group_rest_flattened(): +def test_update_hardware_group_rest_flattened(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -20265,13 +21107,16 @@ def test_create_hardware_group_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2/orders/sample3"} + sample_request = { + "hardware_group": { + "name": "projects/sample1/locations/sample2/orders/sample3/hardwareGroups/sample4" + } + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", hardware_group=resources.HardwareGroup(name="name_value"), - hardware_group_id="hardware_group_id_value", + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -20282,20 +21127,20 @@ def test_create_hardware_group_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_hardware_group(**mock_args) + client.update_hardware_group(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=projects/*/locations/*/orders/*}/hardwareGroups" + "%s/v1alpha/{hardware_group.name=projects/*/locations/*/orders/*/hardwareGroups/*}" % client.transport._host, args[1], ) -def test_create_hardware_group_rest_flattened_error(transport: str = "rest"): +def test_update_hardware_group_rest_flattened_error(transport: str = "rest"): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -20304,15 +21149,14 @@ def test_create_hardware_group_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_hardware_group( - service.CreateHardwareGroupRequest(), - parent="parent_value", + client.update_hardware_group( + service.UpdateHardwareGroupRequest(), hardware_group=resources.HardwareGroup(name="name_value"), - hardware_group_id="hardware_group_id_value", + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_create_hardware_group_rest_error(): +def test_update_hardware_group_rest_error(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -20321,11 +21165,11 @@ def test_create_hardware_group_rest_error(): @pytest.mark.parametrize( "request_type", [ - service.UpdateHardwareGroupRequest, + service.DeleteHardwareGroupRequest, dict, ], ) -def test_update_hardware_group_rest(request_type): +def test_delete_hardware_group_rest(request_type): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -20333,93 +21177,8 @@ def test_update_hardware_group_rest(request_type): # send a request that will satisfy transcoding request_init = { - "hardware_group": { - "name": "projects/sample1/locations/sample2/orders/sample3/hardwareGroups/sample4" - } - } - request_init["hardware_group"] = { - "name": "projects/sample1/locations/sample2/orders/sample3/hardwareGroups/sample4", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "labels": {}, - "hardware_count": 1494, - "config": { - "sku": "sku_value", - "power_supply": 1, - "subscription_duration_months": 3042, - }, - "site": "site_value", - "state": 1, - "zone": "zone_value", - "requested_installation_date": {"year": 433, "month": 550, "day": 318}, + "name": "projects/sample1/locations/sample2/orders/sample3/hardwareGroups/sample4" } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = service.UpdateHardwareGroupRequest.meta.fields["hardware_group"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["hardware_group"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["hardware_group"][field])): - del request_init["hardware_group"][field][i][subfield] - else: - del request_init["hardware_group"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -20434,13 +21193,13 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_hardware_group(request) + response = client.delete_hardware_group(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_update_hardware_group_rest_use_cached_wrapped_rpc(): +def test_delete_hardware_group_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -20455,7 +21214,7 @@ def test_update_hardware_group_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.update_hardware_group + client._transport.delete_hardware_group in client._transport._wrapped_methods ) @@ -20465,11 +21224,11 @@ def test_update_hardware_group_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.update_hardware_group + client._transport.delete_hardware_group ] = mock_rpc request = {} - client.update_hardware_group(request) + client.delete_hardware_group(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -20478,19 +21237,20 @@ def test_update_hardware_group_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.update_hardware_group(request) + client.delete_hardware_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_hardware_group_rest_required_fields( - request_type=service.UpdateHardwareGroupRequest, +def test_delete_hardware_group_rest_required_fields( + request_type=service.DeleteHardwareGroupRequest, ): transport_class = transports.GDCHardwareManagementRestTransport request_init = {} + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -20501,24 +21261,23 @@ def test_update_hardware_group_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_hardware_group._get_unset_required_fields(jsonified_request) + ).delete_hardware_group._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_hardware_group._get_unset_required_fields(jsonified_request) + ).delete_hardware_group._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "request_id", - "update_mask", - ) - ) + assert not set(unset_fields) - set(("request_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -20539,10 +21298,9 @@ def test_update_hardware_group_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -20552,37 +21310,24 @@ def test_update_hardware_group_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_hardware_group(request) + response = client.delete_hardware_group(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_hardware_group_rest_unset_required_fields(): +def test_delete_hardware_group_rest_unset_required_fields(): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_hardware_group._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "requestId", - "updateMask", - ) - ) - & set( - ( - "updateMask", - "hardwareGroup", - ) - ) - ) + unset_fields = transport.delete_hardware_group._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_hardware_group_rest_interceptors(null_interceptor): +def test_delete_hardware_group_rest_interceptors(null_interceptor): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -20597,14 +21342,14 @@ def test_update_hardware_group_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "post_update_hardware_group" + transports.GDCHardwareManagementRestInterceptor, "post_delete_hardware_group" ) as post, mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "pre_update_hardware_group" + transports.GDCHardwareManagementRestInterceptor, "pre_delete_hardware_group" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.UpdateHardwareGroupRequest.pb( - service.UpdateHardwareGroupRequest() + pb_message = service.DeleteHardwareGroupRequest.pb( + service.DeleteHardwareGroupRequest() ) transcode.return_value = { "method": "post", @@ -20620,7 +21365,7 @@ def test_update_hardware_group_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = service.UpdateHardwareGroupRequest() + request = service.DeleteHardwareGroupRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -20628,7 +21373,7 @@ def test_update_hardware_group_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.update_hardware_group( + client.delete_hardware_group( request, metadata=[ ("key", "val"), @@ -20640,8 +21385,8 @@ def test_update_hardware_group_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_hardware_group_rest_bad_request( - transport: str = "rest", request_type=service.UpdateHardwareGroupRequest +def test_delete_hardware_group_rest_bad_request( + transport: str = "rest", request_type=service.DeleteHardwareGroupRequest ): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -20650,9 +21395,7 @@ def test_update_hardware_group_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "hardware_group": { - "name": "projects/sample1/locations/sample2/orders/sample3/hardwareGroups/sample4" - } + "name": "projects/sample1/locations/sample2/orders/sample3/hardwareGroups/sample4" } request = request_type(**request_init) @@ -20665,10 +21408,10 @@ def test_update_hardware_group_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_hardware_group(request) + client.delete_hardware_group(request) -def test_update_hardware_group_rest_flattened(): +def test_delete_hardware_group_rest_flattened(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -20681,15 +21424,12 @@ def test_update_hardware_group_rest_flattened(): # get arguments that satisfy an http rule for this method sample_request = { - "hardware_group": { - "name": "projects/sample1/locations/sample2/orders/sample3/hardwareGroups/sample4" - } + "name": "projects/sample1/locations/sample2/orders/sample3/hardwareGroups/sample4" } # get truthy value for each flattened field mock_args = dict( - hardware_group=resources.HardwareGroup(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + name="name_value", ) mock_args.update(sample_request) @@ -20700,20 +21440,20 @@ def test_update_hardware_group_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_hardware_group(**mock_args) + client.delete_hardware_group(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{hardware_group.name=projects/*/locations/*/orders/*/hardwareGroups/*}" + "%s/v1alpha/{name=projects/*/locations/*/orders/*/hardwareGroups/*}" % client.transport._host, args[1], ) -def test_update_hardware_group_rest_flattened_error(transport: str = "rest"): +def test_delete_hardware_group_rest_flattened_error(transport: str = "rest"): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -20722,14 +21462,13 @@ def test_update_hardware_group_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_hardware_group( - service.UpdateHardwareGroupRequest(), - hardware_group=resources.HardwareGroup(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.delete_hardware_group( + service.DeleteHardwareGroupRequest(), + name="name_value", ) -def test_update_hardware_group_rest_error(): +def test_delete_hardware_group_rest_error(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -20738,41 +21477,46 @@ def test_update_hardware_group_rest_error(): @pytest.mark.parametrize( "request_type", [ - service.DeleteHardwareGroupRequest, + service.ListHardwareRequest, dict, ], ) -def test_delete_hardware_group_rest(request_type): +def test_list_hardware_rest(request_type): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/orders/sample3/hardwareGroups/sample4" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = service.ListHardwareResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListHardwareResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_hardware_group(request) + response = client.list_hardware(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, pagers.ListHardwarePager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_delete_hardware_group_rest_use_cached_wrapped_rpc(): +def test_list_hardware_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -20786,44 +21530,33 @@ def test_delete_hardware_group_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.delete_hardware_group - in client._transport._wrapped_methods - ) + assert client._transport.list_hardware in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_hardware_group - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_hardware] = mock_rpc request = {} - client.delete_hardware_group(request) + client.list_hardware(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_hardware_group(request) + client.list_hardware(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_hardware_group_rest_required_fields( - request_type=service.DeleteHardwareGroupRequest, -): +def test_list_hardware_rest_required_fields(request_type=service.ListHardwareRequest): transport_class = transports.GDCHardwareManagementRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -20834,23 +21567,30 @@ def test_delete_hardware_group_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_hardware_group._get_unset_required_fields(jsonified_request) + ).list_hardware._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_hardware_group._get_unset_required_fields(jsonified_request) + ).list_hardware._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("request_id",)) + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -20859,7 +21599,7 @@ def test_delete_hardware_group_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = service.ListHardwareResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -20871,36 +21611,49 @@ def test_delete_hardware_group_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "get", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = service.ListHardwareResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_hardware_group(request) + response = client.list_hardware(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_hardware_group_rest_unset_required_fields(): +def test_list_hardware_rest_unset_required_fields(): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_hardware_group._get_unset_required_fields({}) - assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + unset_fields = transport.list_hardware._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_hardware_group_rest_interceptors(null_interceptor): +def test_list_hardware_rest_interceptors(null_interceptor): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -20913,17 +21666,13 @@ def test_delete_hardware_group_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "post_delete_hardware_group" + transports.GDCHardwareManagementRestInterceptor, "post_list_hardware" ) as post, mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "pre_delete_hardware_group" + transports.GDCHardwareManagementRestInterceptor, "pre_list_hardware" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.DeleteHardwareGroupRequest.pb( - service.DeleteHardwareGroupRequest() - ) + pb_message = service.ListHardwareRequest.pb(service.ListHardwareRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -20934,19 +21683,19 @@ def test_delete_hardware_group_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = service.ListHardwareResponse.to_json( + service.ListHardwareResponse() ) - request = service.DeleteHardwareGroupRequest() + request = service.ListHardwareRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = service.ListHardwareResponse() - client.delete_hardware_group( + client.list_hardware( request, metadata=[ ("key", "val"), @@ -20958,8 +21707,8 @@ def test_delete_hardware_group_rest_interceptors(null_interceptor): post.assert_called_once() -def test_delete_hardware_group_rest_bad_request( - transport: str = "rest", request_type=service.DeleteHardwareGroupRequest +def test_list_hardware_rest_bad_request( + transport: str = "rest", request_type=service.ListHardwareRequest ): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -20967,9 +21716,7 @@ def test_delete_hardware_group_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/orders/sample3/hardwareGroups/sample4" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -20981,10 +21728,10 @@ def test_delete_hardware_group_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_hardware_group(request) + client.list_hardware(request) -def test_delete_hardware_group_rest_flattened(): +def test_list_hardware_rest_flattened(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -20993,40 +21740,40 @@ def test_delete_hardware_group_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = service.ListHardwareResponse() # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/orders/sample3/hardwareGroups/sample4" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListHardwareResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_hardware_group(**mock_args) + client.list_hardware(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=projects/*/locations/*/orders/*/hardwareGroups/*}" + "%s/v1alpha/{parent=projects/*/locations/*}/hardware" % client.transport._host, args[1], ) -def test_delete_hardware_group_rest_flattened_error(transport: str = "rest"): +def test_list_hardware_rest_flattened_error(transport: str = "rest"): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -21035,61 +21782,128 @@ def test_delete_hardware_group_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_hardware_group( - service.DeleteHardwareGroupRequest(), - name="name_value", + client.list_hardware( + service.ListHardwareRequest(), + parent="parent_value", ) -def test_delete_hardware_group_rest_error(): +def test_list_hardware_rest_pager(transport: str = "rest"): client = GDCHardwareManagementClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + service.ListHardwareResponse( + hardware=[ + resources.Hardware(), + resources.Hardware(), + resources.Hardware(), + ], + next_page_token="abc", + ), + service.ListHardwareResponse( + hardware=[], + next_page_token="def", + ), + service.ListHardwareResponse( + hardware=[ + resources.Hardware(), + ], + next_page_token="ghi", + ), + service.ListHardwareResponse( + hardware=[ + resources.Hardware(), + resources.Hardware(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(service.ListHardwareResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_hardware(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Hardware) for i in results) + + pages = list(client.list_hardware(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - service.ListHardwareRequest, + service.GetHardwareRequest, dict, ], ) -def test_list_hardware_rest(request_type): +def test_get_hardware_rest(request_type): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/hardware/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = service.ListHardwareResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + return_value = resources.Hardware( + name="name_value", + display_name="display_name_value", + order="order_value", + hardware_group="hardware_group_value", + site="site_value", + state=resources.Hardware.State.ADDITIONAL_INFO_NEEDED, + ciq_uri="ciq_uri_value", + zone="zone_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = service.ListHardwareResponse.pb(return_value) + return_value = resources.Hardware.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_hardware(request) + response = client.get_hardware(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListHardwarePager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, resources.Hardware) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.order == "order_value" + assert response.hardware_group == "hardware_group_value" + assert response.site == "site_value" + assert response.state == resources.Hardware.State.ADDITIONAL_INFO_NEEDED + assert response.ciq_uri == "ciq_uri_value" + assert response.zone == "zone_value" -def test_list_hardware_rest_use_cached_wrapped_rpc(): +def test_get_hardware_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -21103,33 +21917,33 @@ def test_list_hardware_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_hardware in client._transport._wrapped_methods + assert client._transport.get_hardware in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_hardware] = mock_rpc + client._transport._wrapped_methods[client._transport.get_hardware] = mock_rpc request = {} - client.list_hardware(request) + client.get_hardware(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_hardware(request) + client.get_hardware(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_hardware_rest_required_fields(request_type=service.ListHardwareRequest): +def test_get_hardware_rest_required_fields(request_type=service.GetHardwareRequest): transport_class = transports.GDCHardwareManagementRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -21140,30 +21954,21 @@ def test_list_hardware_rest_required_fields(request_type=service.ListHardwareReq unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_hardware._get_unset_required_fields(jsonified_request) + ).get_hardware._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_hardware._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", - ) - ) + ).get_hardware._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -21172,7 +21977,7 @@ def test_list_hardware_rest_required_fields(request_type=service.ListHardwareReq request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = service.ListHardwareResponse() + return_value = resources.Hardware() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -21193,40 +21998,30 @@ def test_list_hardware_rest_required_fields(request_type=service.ListHardwareReq response_value.status_code = 200 # Convert return value to protobuf type - return_value = service.ListHardwareResponse.pb(return_value) + return_value = resources.Hardware.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_hardware(request) + response = client.get_hardware(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_hardware_rest_unset_required_fields(): +def test_get_hardware_rest_unset_required_fields(): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_hardware._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "orderBy", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) + unset_fields = transport.get_hardware._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_hardware_rest_interceptors(null_interceptor): +def test_get_hardware_rest_interceptors(null_interceptor): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -21239,13 +22034,13 @@ def test_list_hardware_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "post_list_hardware" + transports.GDCHardwareManagementRestInterceptor, "post_get_hardware" ) as post, mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "pre_list_hardware" + transports.GDCHardwareManagementRestInterceptor, "pre_get_hardware" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.ListHardwareRequest.pb(service.ListHardwareRequest()) + pb_message = service.GetHardwareRequest.pb(service.GetHardwareRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -21256,19 +22051,17 @@ def test_list_hardware_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = service.ListHardwareResponse.to_json( - service.ListHardwareResponse() - ) + req.return_value._content = resources.Hardware.to_json(resources.Hardware()) - request = service.ListHardwareRequest() + request = service.GetHardwareRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = service.ListHardwareResponse() + post.return_value = resources.Hardware() - client.list_hardware( + client.get_hardware( request, metadata=[ ("key", "val"), @@ -21280,8 +22073,8 @@ def test_list_hardware_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_hardware_rest_bad_request( - transport: str = "rest", request_type=service.ListHardwareRequest +def test_get_hardware_rest_bad_request( + transport: str = "rest", request_type=service.GetHardwareRequest ): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -21289,7 +22082,7 @@ def test_list_hardware_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/hardware/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -21301,10 +22094,10 @@ def test_list_hardware_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_hardware(request) + client.get_hardware(request) -def test_list_hardware_rest_flattened(): +def test_get_hardware_rest_flattened(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -21313,14 +22106,14 @@ def test_list_hardware_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = service.ListHardwareResponse() + return_value = resources.Hardware() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = {"name": "projects/sample1/locations/sample2/hardware/sample3"} # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + name="name_value", ) mock_args.update(sample_request) @@ -21328,25 +22121,25 @@ def test_list_hardware_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = service.ListHardwareResponse.pb(return_value) + return_value = resources.Hardware.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_hardware(**mock_args) + client.get_hardware(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=projects/*/locations/*}/hardware" + "%s/v1alpha/{name=projects/*/locations/*/hardware/*}" % client.transport._host, args[1], ) -def test_list_hardware_rest_flattened_error(transport: str = "rest"): +def test_get_hardware_rest_flattened_error(transport: str = "rest"): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -21355,128 +22148,160 @@ def test_list_hardware_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_hardware( - service.ListHardwareRequest(), - parent="parent_value", + client.get_hardware( + service.GetHardwareRequest(), + name="name_value", ) -def test_list_hardware_rest_pager(transport: str = "rest"): +def test_get_hardware_rest_error(): client = GDCHardwareManagementClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - service.ListHardwareResponse( - hardware=[ - resources.Hardware(), - resources.Hardware(), - resources.Hardware(), - ], - next_page_token="abc", - ), - service.ListHardwareResponse( - hardware=[], - next_page_token="def", - ), - service.ListHardwareResponse( - hardware=[ - resources.Hardware(), - ], - next_page_token="ghi", - ), - service.ListHardwareResponse( - hardware=[ - resources.Hardware(), - resources.Hardware(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(service.ListHardwareResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_hardware(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.Hardware) for i in results) - - pages = list(client.list_hardware(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - service.GetHardwareRequest, + service.CreateHardwareRequest, dict, ], ) -def test_get_hardware_rest(request_type): +def test_create_hardware_rest(request_type): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/hardware/sample3"} + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["hardware"] = { + "name": "name_value", + "display_name": "display_name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "order": "order_value", + "hardware_group": "hardware_group_value", + "site": "site_value", + "state": 1, + "ciq_uri": "ciq_uri_value", + "config": { + "sku": "sku_value", + "power_supply": 1, + "subscription_duration_months": 3042, + }, + "estimated_installation_date": {"year": 433, "month": 550, "day": 318}, + "physical_info": { + "power_receptacle": 1, + "network_uplink": 1, + "voltage": 1, + "amperes": 1, + }, + "installation_info": { + "rack_location": "rack_location_value", + "power_distance_meters": 2246, + "switch_distance_meters": 2347, + "rack_unit_dimensions": { + "width_inches": 0.1273, + "height_inches": 0.13620000000000002, + "depth_inches": 0.1262, + }, + "rack_space": {"start_rack_unit": 1613, "end_rack_unit": 1366}, + "rack_type": 1, + }, + "zone": "zone_value", + "requested_installation_date": {}, + "actual_installation_date": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = service.CreateHardwareRequest.meta.fields["hardware"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["hardware"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["hardware"][field])): + del request_init["hardware"][field][i][subfield] + else: + del request_init["hardware"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Hardware( - name="name_value", - display_name="display_name_value", - order="order_value", - hardware_group="hardware_group_value", - site="site_value", - state=resources.Hardware.State.ADDITIONAL_INFO_NEEDED, - ciq_uri="ciq_uri_value", - zone="zone_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.Hardware.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_hardware(request) + response = client.create_hardware(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.Hardware) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.order == "order_value" - assert response.hardware_group == "hardware_group_value" - assert response.site == "site_value" - assert response.state == resources.Hardware.State.ADDITIONAL_INFO_NEEDED - assert response.ciq_uri == "ciq_uri_value" - assert response.zone == "zone_value" + assert response.operation.name == "operations/spam" -def test_get_hardware_rest_use_cached_wrapped_rpc(): +def test_create_hardware_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -21490,33 +22315,39 @@ def test_get_hardware_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_hardware in client._transport._wrapped_methods + assert client._transport.create_hardware in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_hardware] = mock_rpc + client._transport._wrapped_methods[client._transport.create_hardware] = mock_rpc request = {} - client.get_hardware(request) + client.create_hardware(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_hardware(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_hardware(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_hardware_rest_required_fields(request_type=service.GetHardwareRequest): +def test_create_hardware_rest_required_fields( + request_type=service.CreateHardwareRequest, +): transport_class = transports.GDCHardwareManagementRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -21527,21 +22358,23 @@ def test_get_hardware_rest_required_fields(request_type=service.GetHardwareReque unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_hardware._get_unset_required_fields(jsonified_request) + ).create_hardware._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_hardware._get_unset_required_fields(jsonified_request) + ).create_hardware._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("hardware_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -21550,7 +22383,7 @@ def test_get_hardware_rest_required_fields(request_type=service.GetHardwareReque request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.Hardware() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -21562,39 +22395,45 @@ def test_get_hardware_rest_required_fields(request_type=service.GetHardwareReque pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = resources.Hardware.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_hardware(request) + response = client.create_hardware(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_hardware_rest_unset_required_fields(): +def test_create_hardware_rest_unset_required_fields(): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_hardware._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.create_hardware._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("hardwareId",)) + & set( + ( + "parent", + "hardware", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_hardware_rest_interceptors(null_interceptor): +def test_create_hardware_rest_interceptors(null_interceptor): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -21607,13 +22446,15 @@ def test_get_hardware_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "post_get_hardware" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "post_create_hardware" ) as post, mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "pre_get_hardware" + transports.GDCHardwareManagementRestInterceptor, "pre_create_hardware" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.GetHardwareRequest.pb(service.GetHardwareRequest()) + pb_message = service.CreateHardwareRequest.pb(service.CreateHardwareRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -21624,17 +22465,19 @@ def test_get_hardware_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.Hardware.to_json(resources.Hardware()) + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) - request = service.GetHardwareRequest() + request = service.CreateHardwareRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.Hardware() + post.return_value = operations_pb2.Operation() - client.get_hardware( + client.create_hardware( request, metadata=[ ("key", "val"), @@ -21646,8 +22489,8 @@ def test_get_hardware_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_hardware_rest_bad_request( - transport: str = "rest", request_type=service.GetHardwareRequest +def test_create_hardware_rest_bad_request( + transport: str = "rest", request_type=service.CreateHardwareRequest ): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -21655,7 +22498,7 @@ def test_get_hardware_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/hardware/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -21667,10 +22510,10 @@ def test_get_hardware_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_hardware(request) + client.create_hardware(request) -def test_get_hardware_rest_flattened(): +def test_create_hardware_rest_flattened(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -21679,40 +22522,40 @@ def test_get_hardware_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Hardware() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/locations/sample2/hardware/sample3"} + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", + hardware=resources.Hardware(name="name_value"), + hardware_id="hardware_id_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.Hardware.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_hardware(**mock_args) + client.create_hardware(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=projects/*/locations/*/hardware/*}" + "%s/v1alpha/{parent=projects/*/locations/*}/hardware" % client.transport._host, args[1], ) -def test_get_hardware_rest_flattened_error(transport: str = "rest"): +def test_create_hardware_rest_flattened_error(transport: str = "rest"): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -21721,13 +22564,15 @@ def test_get_hardware_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_hardware( - service.GetHardwareRequest(), - name="name_value", + client.create_hardware( + service.CreateHardwareRequest(), + parent="parent_value", + hardware=resources.Hardware(name="name_value"), + hardware_id="hardware_id_value", ) -def test_get_hardware_rest_error(): +def test_create_hardware_rest_error(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -21736,20 +22581,22 @@ def test_get_hardware_rest_error(): @pytest.mark.parametrize( "request_type", [ - service.CreateHardwareRequest, + service.UpdateHardwareRequest, dict, ], ) -def test_create_hardware_rest(request_type): +def test_update_hardware_rest(request_type): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "hardware": {"name": "projects/sample1/locations/sample2/hardware/sample3"} + } request_init["hardware"] = { - "name": "name_value", + "name": "projects/sample1/locations/sample2/hardware/sample3", "display_name": "display_name_value", "create_time": {"seconds": 751, "nanos": 543}, "update_time": {}, @@ -21792,7 +22639,7 @@ def test_create_hardware_rest(request_type): # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = service.CreateHardwareRequest.meta.fields["hardware"] + test_field = service.UpdateHardwareRequest.meta.fields["hardware"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -21868,13 +22715,13 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_hardware(request) + response = client.update_hardware(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_create_hardware_rest_use_cached_wrapped_rpc(): +def test_update_hardware_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -21888,17 +22735,17 @@ def test_create_hardware_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_hardware in client._transport._wrapped_methods + assert client._transport.update_hardware in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_hardware] = mock_rpc + client._transport._wrapped_methods[client._transport.update_hardware] = mock_rpc request = {} - client.create_hardware(request) + client.update_hardware(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -21907,20 +22754,19 @@ def test_create_hardware_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.create_hardware(request) + client.update_hardware(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_hardware_rest_required_fields( - request_type=service.CreateHardwareRequest, +def test_update_hardware_rest_required_fields( + request_type=service.UpdateHardwareRequest, ): transport_class = transports.GDCHardwareManagementRestTransport request_init = {} - request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -21931,23 +22777,24 @@ def test_create_hardware_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_hardware._get_unset_required_fields(jsonified_request) + ).update_hardware._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_hardware._get_unset_required_fields(jsonified_request) + ).update_hardware._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("hardware_id",)) + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -21968,7 +22815,7 @@ def test_create_hardware_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "patch", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -21981,24 +22828,29 @@ def test_create_hardware_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_hardware(request) + response = client.update_hardware(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_hardware_rest_unset_required_fields(): +def test_update_hardware_rest_unset_required_fields(): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_hardware._get_unset_required_fields({}) + unset_fields = transport.update_hardware._get_unset_required_fields({}) assert set(unset_fields) == ( - set(("hardwareId",)) + set( + ( + "requestId", + "updateMask", + ) + ) & set( ( - "parent", + "updateMask", "hardware", ) ) @@ -22006,7 +22858,7 @@ def test_create_hardware_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_hardware_rest_interceptors(null_interceptor): +def test_update_hardware_rest_interceptors(null_interceptor): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -22021,13 +22873,13 @@ def test_create_hardware_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "post_create_hardware" + transports.GDCHardwareManagementRestInterceptor, "post_update_hardware" ) as post, mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "pre_create_hardware" + transports.GDCHardwareManagementRestInterceptor, "pre_update_hardware" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.CreateHardwareRequest.pb(service.CreateHardwareRequest()) + pb_message = service.UpdateHardwareRequest.pb(service.UpdateHardwareRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -22042,7 +22894,7 @@ def test_create_hardware_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = service.CreateHardwareRequest() + request = service.UpdateHardwareRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -22050,7 +22902,7 @@ def test_create_hardware_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.create_hardware( + client.update_hardware( request, metadata=[ ("key", "val"), @@ -22062,8 +22914,8 @@ def test_create_hardware_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_hardware_rest_bad_request( - transport: str = "rest", request_type=service.CreateHardwareRequest +def test_update_hardware_rest_bad_request( + transport: str = "rest", request_type=service.UpdateHardwareRequest ): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -22071,7 +22923,9 @@ def test_create_hardware_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "hardware": {"name": "projects/sample1/locations/sample2/hardware/sample3"} + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -22083,10 +22937,10 @@ def test_create_hardware_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_hardware(request) + client.update_hardware(request) -def test_create_hardware_rest_flattened(): +def test_update_hardware_rest_flattened(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -22098,13 +22952,14 @@ def test_create_hardware_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "hardware": {"name": "projects/sample1/locations/sample2/hardware/sample3"} + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", hardware=resources.Hardware(name="name_value"), - hardware_id="hardware_id_value", + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -22115,37 +22970,36 @@ def test_create_hardware_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_hardware(**mock_args) + client.update_hardware(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=projects/*/locations/*}/hardware" + "%s/v1alpha/{hardware.name=projects/*/locations/*/hardware/*}" % client.transport._host, args[1], ) -def test_create_hardware_rest_flattened_error(transport: str = "rest"): +def test_update_hardware_rest_flattened_error(transport: str = "rest"): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_hardware( - service.CreateHardwareRequest(), - parent="parent_value", + # fields is an error. + with pytest.raises(ValueError): + client.update_hardware( + service.UpdateHardwareRequest(), hardware=resources.Hardware(name="name_value"), - hardware_id="hardware_id_value", + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_create_hardware_rest_error(): +def test_update_hardware_rest_error(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -22154,126 +23008,18 @@ def test_create_hardware_rest_error(): @pytest.mark.parametrize( "request_type", [ - service.UpdateHardwareRequest, + service.DeleteHardwareRequest, dict, ], ) -def test_update_hardware_rest(request_type): +def test_delete_hardware_rest(request_type): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "hardware": {"name": "projects/sample1/locations/sample2/hardware/sample3"} - } - request_init["hardware"] = { - "name": "projects/sample1/locations/sample2/hardware/sample3", - "display_name": "display_name_value", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "labels": {}, - "order": "order_value", - "hardware_group": "hardware_group_value", - "site": "site_value", - "state": 1, - "ciq_uri": "ciq_uri_value", - "config": { - "sku": "sku_value", - "power_supply": 1, - "subscription_duration_months": 3042, - }, - "estimated_installation_date": {"year": 433, "month": 550, "day": 318}, - "physical_info": { - "power_receptacle": 1, - "network_uplink": 1, - "voltage": 1, - "amperes": 1, - }, - "installation_info": { - "rack_location": "rack_location_value", - "power_distance_meters": 2246, - "switch_distance_meters": 2347, - "rack_unit_dimensions": { - "width_inches": 0.1273, - "height_inches": 0.13620000000000002, - "depth_inches": 0.1262, - }, - "rack_space": {"start_rack_unit": 1613, "end_rack_unit": 1366}, - "rack_type": 1, - }, - "zone": "zone_value", - "requested_installation_date": {}, - "actual_installation_date": {}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = service.UpdateHardwareRequest.meta.fields["hardware"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["hardware"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["hardware"][field])): - del request_init["hardware"][field][i][subfield] - else: - del request_init["hardware"][field][subfield] + request_init = {"name": "projects/sample1/locations/sample2/hardware/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -22288,13 +23034,13 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_hardware(request) + response = client.delete_hardware(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_update_hardware_rest_use_cached_wrapped_rpc(): +def test_delete_hardware_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -22308,17 +23054,17 @@ def test_update_hardware_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_hardware in client._transport._wrapped_methods + assert client._transport.delete_hardware in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.update_hardware] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_hardware] = mock_rpc request = {} - client.update_hardware(request) + client.delete_hardware(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -22327,19 +23073,20 @@ def test_update_hardware_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.update_hardware(request) + client.delete_hardware(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_hardware_rest_required_fields( - request_type=service.UpdateHardwareRequest, +def test_delete_hardware_rest_required_fields( + request_type=service.DeleteHardwareRequest, ): transport_class = transports.GDCHardwareManagementRestTransport request_init = {} + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -22350,24 +23097,23 @@ def test_update_hardware_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_hardware._get_unset_required_fields(jsonified_request) + ).delete_hardware._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_hardware._get_unset_required_fields(jsonified_request) + ).delete_hardware._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "request_id", - "update_mask", - ) - ) + assert not set(unset_fields) - set(("request_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -22388,10 +23134,9 @@ def test_update_hardware_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -22401,37 +23146,24 @@ def test_update_hardware_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_hardware(request) + response = client.delete_hardware(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_hardware_rest_unset_required_fields(): +def test_delete_hardware_rest_unset_required_fields(): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_hardware._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "requestId", - "updateMask", - ) - ) - & set( - ( - "updateMask", - "hardware", - ) - ) - ) + unset_fields = transport.delete_hardware._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_hardware_rest_interceptors(null_interceptor): +def test_delete_hardware_rest_interceptors(null_interceptor): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -22446,13 +23178,13 @@ def test_update_hardware_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "post_update_hardware" + transports.GDCHardwareManagementRestInterceptor, "post_delete_hardware" ) as post, mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "pre_update_hardware" + transports.GDCHardwareManagementRestInterceptor, "pre_delete_hardware" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.UpdateHardwareRequest.pb(service.UpdateHardwareRequest()) + pb_message = service.DeleteHardwareRequest.pb(service.DeleteHardwareRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -22467,7 +23199,7 @@ def test_update_hardware_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = service.UpdateHardwareRequest() + request = service.DeleteHardwareRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -22475,7 +23207,7 @@ def test_update_hardware_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.update_hardware( + client.delete_hardware( request, metadata=[ ("key", "val"), @@ -22487,8 +23219,8 @@ def test_update_hardware_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_hardware_rest_bad_request( - transport: str = "rest", request_type=service.UpdateHardwareRequest +def test_delete_hardware_rest_bad_request( + transport: str = "rest", request_type=service.DeleteHardwareRequest ): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -22496,9 +23228,7 @@ def test_update_hardware_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "hardware": {"name": "projects/sample1/locations/sample2/hardware/sample3"} - } + request_init = {"name": "projects/sample1/locations/sample2/hardware/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -22510,10 +23240,10 @@ def test_update_hardware_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_hardware(request) + client.delete_hardware(request) -def test_update_hardware_rest_flattened(): +def test_delete_hardware_rest_flattened(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -22525,14 +23255,11 @@ def test_update_hardware_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = { - "hardware": {"name": "projects/sample1/locations/sample2/hardware/sample3"} - } + sample_request = {"name": "projects/sample1/locations/sample2/hardware/sample3"} # get truthy value for each flattened field mock_args = dict( - hardware=resources.Hardware(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + name="name_value", ) mock_args.update(sample_request) @@ -22543,20 +23270,20 @@ def test_update_hardware_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_hardware(**mock_args) + client.delete_hardware(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{hardware.name=projects/*/locations/*/hardware/*}" + "%s/v1alpha/{name=projects/*/locations/*/hardware/*}" % client.transport._host, args[1], ) -def test_update_hardware_rest_flattened_error(transport: str = "rest"): +def test_delete_hardware_rest_flattened_error(transport: str = "rest"): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -22565,14 +23292,13 @@ def test_update_hardware_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_hardware( - service.UpdateHardwareRequest(), - hardware=resources.Hardware(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.delete_hardware( + service.DeleteHardwareRequest(), + name="name_value", ) -def test_update_hardware_rest_error(): +def test_delete_hardware_rest_error(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -22581,39 +23307,46 @@ def test_update_hardware_rest_error(): @pytest.mark.parametrize( "request_type", [ - service.DeleteHardwareRequest, + service.ListCommentsRequest, dict, ], ) -def test_delete_hardware_rest(request_type): +def test_list_comments_rest(request_type): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/hardware/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2/orders/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = service.ListCommentsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListCommentsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_hardware(request) + response = client.list_comments(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, pagers.ListCommentsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_delete_hardware_rest_use_cached_wrapped_rpc(): +def test_list_comments_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -22627,39 +23360,33 @@ def test_delete_hardware_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_hardware in client._transport._wrapped_methods + assert client._transport.list_comments in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.delete_hardware] = mock_rpc + client._transport._wrapped_methods[client._transport.list_comments] = mock_rpc request = {} - client.delete_hardware(request) + client.list_comments(request) # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() + assert mock_rpc.call_count == 1 - client.delete_hardware(request) + client.list_comments(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_hardware_rest_required_fields( - request_type=service.DeleteHardwareRequest, -): +def test_list_comments_rest_required_fields(request_type=service.ListCommentsRequest): transport_class = transports.GDCHardwareManagementRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -22670,23 +23397,30 @@ def test_delete_hardware_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_hardware._get_unset_required_fields(jsonified_request) + ).list_comments._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_hardware._get_unset_required_fields(jsonified_request) + ).list_comments._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("request_id",)) + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -22695,7 +23429,7 @@ def test_delete_hardware_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = service.ListCommentsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -22707,36 +23441,49 @@ def test_delete_hardware_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "get", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = service.ListCommentsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_hardware(request) + response = client.list_comments(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_hardware_rest_unset_required_fields(): +def test_list_comments_rest_unset_required_fields(): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_hardware._get_unset_required_fields({}) - assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + unset_fields = transport.list_comments._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_hardware_rest_interceptors(null_interceptor): +def test_list_comments_rest_interceptors(null_interceptor): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -22749,15 +23496,13 @@ def test_delete_hardware_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "post_delete_hardware" + transports.GDCHardwareManagementRestInterceptor, "post_list_comments" ) as post, mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "pre_delete_hardware" + transports.GDCHardwareManagementRestInterceptor, "pre_list_comments" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.DeleteHardwareRequest.pb(service.DeleteHardwareRequest()) + pb_message = service.ListCommentsRequest.pb(service.ListCommentsRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -22768,19 +23513,19 @@ def test_delete_hardware_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = service.ListCommentsResponse.to_json( + service.ListCommentsResponse() ) - request = service.DeleteHardwareRequest() + request = service.ListCommentsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = service.ListCommentsResponse() - client.delete_hardware( + client.list_comments( request, metadata=[ ("key", "val"), @@ -22792,8 +23537,8 @@ def test_delete_hardware_rest_interceptors(null_interceptor): post.assert_called_once() -def test_delete_hardware_rest_bad_request( - transport: str = "rest", request_type=service.DeleteHardwareRequest +def test_list_comments_rest_bad_request( + transport: str = "rest", request_type=service.ListCommentsRequest ): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -22801,7 +23546,7 @@ def test_delete_hardware_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/hardware/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2/orders/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -22813,10 +23558,10 @@ def test_delete_hardware_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_hardware(request) + client.list_comments(request) -def test_delete_hardware_rest_flattened(): +def test_list_comments_rest_flattened(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -22825,38 +23570,40 @@ def test_delete_hardware_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = service.ListCommentsResponse() # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/locations/sample2/hardware/sample3"} + sample_request = {"parent": "projects/sample1/locations/sample2/orders/sample3"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListCommentsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_hardware(**mock_args) + client.list_comments(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=projects/*/locations/*/hardware/*}" + "%s/v1alpha/{parent=projects/*/locations/*/orders/*}/comments" % client.transport._host, args[1], ) -def test_delete_hardware_rest_flattened_error(transport: str = "rest"): +def test_list_comments_rest_flattened_error(transport: str = "rest"): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -22865,61 +23612,122 @@ def test_delete_hardware_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_hardware( - service.DeleteHardwareRequest(), - name="name_value", + client.list_comments( + service.ListCommentsRequest(), + parent="parent_value", ) -def test_delete_hardware_rest_error(): +def test_list_comments_rest_pager(transport: str = "rest"): client = GDCHardwareManagementClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + service.ListCommentsResponse( + comments=[ + resources.Comment(), + resources.Comment(), + resources.Comment(), + ], + next_page_token="abc", + ), + service.ListCommentsResponse( + comments=[], + next_page_token="def", + ), + service.ListCommentsResponse( + comments=[ + resources.Comment(), + ], + next_page_token="ghi", + ), + service.ListCommentsResponse( + comments=[ + resources.Comment(), + resources.Comment(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(service.ListCommentsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2/orders/sample3"} + + pager = client.list_comments(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Comment) for i in results) + + pages = list(client.list_comments(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - service.ListCommentsRequest, + service.GetCommentRequest, dict, ], ) -def test_list_comments_rest(request_type): +def test_get_comment_rest(request_type): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/orders/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/orders/sample3/comments/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = service.ListCommentsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + return_value = resources.Comment( + name="name_value", + author="author_value", + text="text_value", + author_entity=resources.Entity.GOOGLE, ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = service.ListCommentsResponse.pb(return_value) + return_value = resources.Comment.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_comments(request) + response = client.get_comment(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListCommentsPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, resources.Comment) + assert response.name == "name_value" + assert response.author == "author_value" + assert response.text == "text_value" + assert response.author_entity == resources.Entity.GOOGLE -def test_list_comments_rest_use_cached_wrapped_rpc(): +def test_get_comment_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -22933,33 +23741,33 @@ def test_list_comments_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_comments in client._transport._wrapped_methods + assert client._transport.get_comment in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_comments] = mock_rpc + client._transport._wrapped_methods[client._transport.get_comment] = mock_rpc request = {} - client.list_comments(request) + client.get_comment(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_comments(request) + client.get_comment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_comments_rest_required_fields(request_type=service.ListCommentsRequest): +def test_get_comment_rest_required_fields(request_type=service.GetCommentRequest): transport_class = transports.GDCHardwareManagementRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -22970,30 +23778,21 @@ def test_list_comments_rest_required_fields(request_type=service.ListCommentsReq unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_comments._get_unset_required_fields(jsonified_request) + ).get_comment._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_comments._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", - ) - ) + ).get_comment._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -23002,7 +23801,7 @@ def test_list_comments_rest_required_fields(request_type=service.ListCommentsReq request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = service.ListCommentsResponse() + return_value = resources.Comment() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -23023,40 +23822,30 @@ def test_list_comments_rest_required_fields(request_type=service.ListCommentsReq response_value.status_code = 200 # Convert return value to protobuf type - return_value = service.ListCommentsResponse.pb(return_value) + return_value = resources.Comment.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_comments(request) + response = client.get_comment(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_comments_rest_unset_required_fields(): +def test_get_comment_rest_unset_required_fields(): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_comments._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "orderBy", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) + unset_fields = transport.get_comment._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_comments_rest_interceptors(null_interceptor): +def test_get_comment_rest_interceptors(null_interceptor): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -23069,13 +23858,13 @@ def test_list_comments_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "post_list_comments" + transports.GDCHardwareManagementRestInterceptor, "post_get_comment" ) as post, mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "pre_list_comments" + transports.GDCHardwareManagementRestInterceptor, "pre_get_comment" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.ListCommentsRequest.pb(service.ListCommentsRequest()) + pb_message = service.GetCommentRequest.pb(service.GetCommentRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -23086,19 +23875,17 @@ def test_list_comments_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = service.ListCommentsResponse.to_json( - service.ListCommentsResponse() - ) + req.return_value._content = resources.Comment.to_json(resources.Comment()) - request = service.ListCommentsRequest() + request = service.GetCommentRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = service.ListCommentsResponse() + post.return_value = resources.Comment() - client.list_comments( + client.get_comment( request, metadata=[ ("key", "val"), @@ -23110,8 +23897,8 @@ def test_list_comments_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_comments_rest_bad_request( - transport: str = "rest", request_type=service.ListCommentsRequest +def test_get_comment_rest_bad_request( + transport: str = "rest", request_type=service.GetCommentRequest ): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -23119,7 +23906,9 @@ def test_list_comments_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/orders/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/orders/sample3/comments/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -23131,10 +23920,10 @@ def test_list_comments_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_comments(request) + client.get_comment(request) -def test_list_comments_rest_flattened(): +def test_get_comment_rest_flattened(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -23143,14 +23932,16 @@ def test_list_comments_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = service.ListCommentsResponse() + return_value = resources.Comment() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2/orders/sample3"} + sample_request = { + "name": "projects/sample1/locations/sample2/orders/sample3/comments/sample4" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + name="name_value", ) mock_args.update(sample_request) @@ -23158,25 +23949,25 @@ def test_list_comments_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = service.ListCommentsResponse.pb(return_value) + return_value = resources.Comment.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_comments(**mock_args) + client.get_comment(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=projects/*/locations/*/orders/*}/comments" + "%s/v1alpha/{name=projects/*/locations/*/orders/*/comments/*}" % client.transport._host, args[1], ) -def test_list_comments_rest_flattened_error(transport: str = "rest"): +def test_get_comment_rest_flattened_error(transport: str = "rest"): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -23185,120 +23976,130 @@ def test_list_comments_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_comments( - service.ListCommentsRequest(), - parent="parent_value", + client.get_comment( + service.GetCommentRequest(), + name="name_value", ) -def test_list_comments_rest_pager(transport: str = "rest"): +def test_get_comment_rest_error(): client = GDCHardwareManagementClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - service.ListCommentsResponse( - comments=[ - resources.Comment(), - resources.Comment(), - resources.Comment(), - ], - next_page_token="abc", - ), - service.ListCommentsResponse( - comments=[], - next_page_token="def", - ), - service.ListCommentsResponse( - comments=[ - resources.Comment(), - ], - next_page_token="ghi", - ), - service.ListCommentsResponse( - comments=[ - resources.Comment(), - resources.Comment(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(service.ListCommentsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2/orders/sample3"} - - pager = client.list_comments(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.Comment) for i in results) - - pages = list(client.list_comments(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - service.GetCommentRequest, + service.CreateCommentRequest, dict, ], ) -def test_get_comment_rest(request_type): +def test_create_comment_rest(request_type): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/orders/sample3/comments/sample4" + request_init = {"parent": "projects/sample1/locations/sample2/orders/sample3"} + request_init["comment"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "labels": {}, + "author": "author_value", + "text": "text_value", + "customer_viewed_time": {}, + "author_entity": 1, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = service.CreateCommentRequest.meta.fields["comment"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["comment"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["comment"][field])): + del request_init["comment"][field][i][subfield] + else: + del request_init["comment"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Comment( - name="name_value", - author="author_value", - text="text_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.Comment.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_comment(request) + response = client.create_comment(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.Comment) - assert response.name == "name_value" - assert response.author == "author_value" - assert response.text == "text_value" + assert response.operation.name == "operations/spam" -def test_get_comment_rest_use_cached_wrapped_rpc(): +def test_create_comment_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -23312,33 +24113,37 @@ def test_get_comment_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_comment in client._transport._wrapped_methods + assert client._transport.create_comment in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_comment] = mock_rpc + client._transport._wrapped_methods[client._transport.create_comment] = mock_rpc request = {} - client.get_comment(request) + client.create_comment(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_comment(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_comment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_comment_rest_required_fields(request_type=service.GetCommentRequest): +def test_create_comment_rest_required_fields(request_type=service.CreateCommentRequest): transport_class = transports.GDCHardwareManagementRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -23349,21 +24154,28 @@ def test_get_comment_rest_required_fields(request_type=service.GetCommentRequest unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_comment._get_unset_required_fields(jsonified_request) + ).create_comment._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_comment._get_unset_required_fields(jsonified_request) + ).create_comment._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "comment_id", + "request_id", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -23372,7 +24184,7 @@ def test_get_comment_rest_required_fields(request_type=service.GetCommentRequest request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.Comment() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -23384,39 +24196,50 @@ def test_get_comment_rest_required_fields(request_type=service.GetCommentRequest pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = resources.Comment.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_comment(request) + response = client.create_comment(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_comment_rest_unset_required_fields(): +def test_create_comment_rest_unset_required_fields(): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_comment._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.create_comment._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "commentId", + "requestId", + ) + ) + & set( + ( + "parent", + "comment", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_comment_rest_interceptors(null_interceptor): +def test_create_comment_rest_interceptors(null_interceptor): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -23429,13 +24252,15 @@ def test_get_comment_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "post_get_comment" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "post_create_comment" ) as post, mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "pre_get_comment" + transports.GDCHardwareManagementRestInterceptor, "pre_create_comment" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.GetCommentRequest.pb(service.GetCommentRequest()) + pb_message = service.CreateCommentRequest.pb(service.CreateCommentRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -23446,17 +24271,19 @@ def test_get_comment_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.Comment.to_json(resources.Comment()) + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) - request = service.GetCommentRequest() + request = service.CreateCommentRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.Comment() + post.return_value = operations_pb2.Operation() - client.get_comment( + client.create_comment( request, metadata=[ ("key", "val"), @@ -23468,8 +24295,8 @@ def test_get_comment_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_comment_rest_bad_request( - transport: str = "rest", request_type=service.GetCommentRequest +def test_create_comment_rest_bad_request( + transport: str = "rest", request_type=service.CreateCommentRequest ): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -23477,9 +24304,7 @@ def test_get_comment_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/orders/sample3/comments/sample4" - } + request_init = {"parent": "projects/sample1/locations/sample2/orders/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -23491,10 +24316,10 @@ def test_get_comment_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_comment(request) + client.create_comment(request) -def test_get_comment_rest_flattened(): +def test_create_comment_rest_flattened(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -23503,42 +24328,40 @@ def test_get_comment_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Comment() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/orders/sample3/comments/sample4" - } + sample_request = {"parent": "projects/sample1/locations/sample2/orders/sample3"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", + comment=resources.Comment(name="name_value"), + comment_id="comment_id_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.Comment.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_comment(**mock_args) + client.create_comment(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=projects/*/locations/*/orders/*/comments/*}" + "%s/v1alpha/{parent=projects/*/locations/*/orders/*}/comments" % client.transport._host, args[1], ) -def test_get_comment_rest_flattened_error(transport: str = "rest"): +def test_create_comment_rest_flattened_error(transport: str = "rest"): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -23547,13 +24370,15 @@ def test_get_comment_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_comment( - service.GetCommentRequest(), - name="name_value", + client.create_comment( + service.CreateCommentRequest(), + parent="parent_value", + comment=resources.Comment(name="name_value"), + comment_id="comment_id_value", ) -def test_get_comment_rest_error(): +def test_create_comment_rest_error(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -23562,113 +24387,52 @@ def test_get_comment_rest_error(): @pytest.mark.parametrize( "request_type", [ - service.CreateCommentRequest, + service.RecordActionOnCommentRequest, dict, ], ) -def test_create_comment_rest(request_type): +def test_record_action_on_comment_rest(request_type): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/orders/sample3"} - request_init["comment"] = { - "name": "name_value", - "create_time": {"seconds": 751, "nanos": 543}, - "labels": {}, - "author": "author_value", - "text": "text_value", + request_init = { + "name": "projects/sample1/locations/sample2/orders/sample3/comments/sample4" } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = service.CreateCommentRequest.meta.fields["comment"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["comment"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["comment"][field])): - del request_init["comment"][field][i][subfield] - else: - del request_init["comment"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = resources.Comment( + name="name_value", + author="author_value", + text="text_value", + author_entity=resources.Entity.GOOGLE, + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Comment.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_comment(request) + response = client.record_action_on_comment(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, resources.Comment) + assert response.name == "name_value" + assert response.author == "author_value" + assert response.text == "text_value" + assert response.author_entity == resources.Entity.GOOGLE -def test_create_comment_rest_use_cached_wrapped_rpc(): +def test_record_action_on_comment_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -23682,37 +24446,40 @@ def test_create_comment_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_comment in client._transport._wrapped_methods + assert ( + client._transport.record_action_on_comment + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_comment] = mock_rpc + client._transport._wrapped_methods[ + client._transport.record_action_on_comment + ] = mock_rpc request = {} - client.create_comment(request) + client.record_action_on_comment(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_comment(request) + client.record_action_on_comment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_comment_rest_required_fields(request_type=service.CreateCommentRequest): +def test_record_action_on_comment_rest_required_fields( + request_type=service.RecordActionOnCommentRequest, +): transport_class = transports.GDCHardwareManagementRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -23723,28 +24490,21 @@ def test_create_comment_rest_required_fields(request_type=service.CreateCommentR unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_comment._get_unset_required_fields(jsonified_request) + ).record_action_on_comment._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_comment._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "comment_id", - "request_id", - ) - ) + ).record_action_on_comment._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -23753,7 +24513,7 @@ def test_create_comment_rest_required_fields(request_type=service.CreateCommentR request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = resources.Comment() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -23773,42 +24533,40 @@ def test_create_comment_rest_required_fields(request_type=service.CreateCommentR response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.Comment.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_comment(request) + response = client.record_action_on_comment(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_comment_rest_unset_required_fields(): +def test_record_action_on_comment_rest_unset_required_fields(): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_comment._get_unset_required_fields({}) + unset_fields = transport.record_action_on_comment._get_unset_required_fields({}) assert set(unset_fields) == ( - set( - ( - "commentId", - "requestId", - ) - ) + set(()) & set( ( - "parent", - "comment", + "name", + "actionType", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_comment_rest_interceptors(null_interceptor): +def test_record_action_on_comment_rest_interceptors(null_interceptor): transport = transports.GDCHardwareManagementRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -23821,15 +24579,15 @@ def test_create_comment_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "post_create_comment" + transports.GDCHardwareManagementRestInterceptor, "post_record_action_on_comment" ) as post, mock.patch.object( - transports.GDCHardwareManagementRestInterceptor, "pre_create_comment" + transports.GDCHardwareManagementRestInterceptor, "pre_record_action_on_comment" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.CreateCommentRequest.pb(service.CreateCommentRequest()) + pb_message = service.RecordActionOnCommentRequest.pb( + service.RecordActionOnCommentRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -23840,19 +24598,17 @@ def test_create_comment_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() - ) + req.return_value._content = resources.Comment.to_json(resources.Comment()) - request = service.CreateCommentRequest() + request = service.RecordActionOnCommentRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = resources.Comment() - client.create_comment( + client.record_action_on_comment( request, metadata=[ ("key", "val"), @@ -23864,8 +24620,8 @@ def test_create_comment_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_comment_rest_bad_request( - transport: str = "rest", request_type=service.CreateCommentRequest +def test_record_action_on_comment_rest_bad_request( + transport: str = "rest", request_type=service.RecordActionOnCommentRequest ): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), @@ -23873,7 +24629,9 @@ def test_create_comment_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/orders/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/orders/sample3/comments/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -23885,10 +24643,10 @@ def test_create_comment_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_comment(request) + client.record_action_on_comment(request) -def test_create_comment_rest_flattened(): +def test_record_action_on_comment_rest_flattened(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -23897,40 +24655,43 @@ def test_create_comment_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = resources.Comment() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2/orders/sample3"} + sample_request = { + "name": "projects/sample1/locations/sample2/orders/sample3/comments/sample4" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - comment=resources.Comment(name="name_value"), - comment_id="comment_id_value", + name="name_value", + action_type=service.RecordActionOnCommentRequest.ActionType.READ, ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Comment.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_comment(**mock_args) + client.record_action_on_comment(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=projects/*/locations/*/orders/*}/comments" + "%s/v1alpha/{name=projects/*/locations/*/orders/*/comments/*}:recordAction" % client.transport._host, args[1], ) -def test_create_comment_rest_flattened_error(transport: str = "rest"): +def test_record_action_on_comment_rest_flattened_error(transport: str = "rest"): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -23939,15 +24700,14 @@ def test_create_comment_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_comment( - service.CreateCommentRequest(), - parent="parent_value", - comment=resources.Comment(name="name_value"), - comment_id="comment_id_value", + client.record_action_on_comment( + service.RecordActionOnCommentRequest(), + name="name_value", + action_type=service.RecordActionOnCommentRequest.ActionType.READ, ) -def test_create_comment_rest_error(): +def test_record_action_on_comment_rest_error(): client = GDCHardwareManagementClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -26069,6 +26829,13 @@ def test_create_zone_rest(request_type): "kubernetes_ipv4_subnet": {}, }, "globally_unique_id": "globally_unique_id_value", + "subscription_configs": [ + { + "subscription_id": "subscription_id_value", + "billing_id": "billing_id_value", + "state": 1, + } + ], } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -26498,6 +27265,13 @@ def test_update_zone_rest(request_type): "kubernetes_ipv4_subnet": {}, }, "globally_unique_id": "globally_unique_id_value", + "subscription_configs": [ + { + "subscription_id": "subscription_id_value", + "billing_id": "billing_id_value", + "state": 1, + } + ], } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -27428,7 +28202,7 @@ def test_signal_zone_state_rest_flattened(): # get truthy value for each flattened field mock_args = dict( name="name_value", - state_signal=service.SignalZoneStateRequest.StateSignal.READY_FOR_SITE_TURNUP, + state_signal=service.SignalZoneStateRequest.StateSignal.FACTORY_TURNUP_CHECKS_PASSED, ) mock_args.update(sample_request) @@ -27464,7 +28238,7 @@ def test_signal_zone_state_rest_flattened_error(transport: str = "rest"): client.signal_zone_state( service.SignalZoneStateRequest(), name="name_value", - state_signal=service.SignalZoneStateRequest.StateSignal.READY_FOR_SITE_TURNUP, + state_signal=service.SignalZoneStateRequest.StateSignal.FACTORY_TURNUP_CHECKS_PASSED, ) @@ -27636,6 +28410,7 @@ def test_gdc_hardware_management_base_transport(): "list_comments", "get_comment", "create_comment", + "record_action_on_comment", "list_change_log_entries", "get_change_log_entry", "list_skus", @@ -28005,6 +28780,9 @@ def test_gdc_hardware_management_client_transport_session_collision(transport_na session1 = client1.transport.create_comment._session session2 = client2.transport.create_comment._session assert session1 != session2 + session1 = client1.transport.record_action_on_comment._session + session2 = client2.transport.record_action_on_comment._session + assert session1 != session2 session1 = client1.transport.list_change_log_entries._session session2 = client2.transport.list_change_log_entries._session assert session1 != session2 From b4c977059e075c73781c179b26fdf915548e65c4 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 23 Sep 2024 12:08:14 -0400 Subject: [PATCH 082/108] fix!: [google-cloud-kms] Pagination feature is introduced for method `ListKeyHandles` in service `Autokey` (#13093) BEGIN_COMMIT_OVERRIDE fix!: Pagination feature is introduced for method ListKeyHandles in service Autokey feat: Adding a state field for AutokeyConfig docs: Field service_resolvers in message .google.cloud.kms.v1.EkmConnection is Explicitly is marked as to have field behavior of Optional docs: A comment for field destroy_scheduled_duration in message .google.cloud.kms.v1.CryptoKey is updated for the default duration END_COMMIT_OVERRIDE - [ ] Regenerate this pull request now. feat: Adding a state field for AutokeyConfig docs: Field service_resolvers in message .google.cloud.kms.v1.EkmConnection is Explicitly is marked as to have field behavior of Optional docs: A comment for field `destroy_scheduled_duration` in message `.google.cloud.kms.v1.CryptoKey` is updated for the default duration PiperOrigin-RevId: 676068244 Source-Link: https://github.com/googleapis/googleapis/commit/42492c963aaac713339511a145fbefcd78f95880 Source-Link: https://github.com/googleapis/googleapis-gen/commit/47432180bdfba879fc7f82c4c451181702f25009 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWttcy8uT3dsQm90LnlhbWwiLCJoIjoiNDc0MzIxODBiZGZiYTg3OWZjN2Y4MmM0YzQ1MTE4MTcwMmYyNTAwOSJ9 --------- Co-authored-by: Owl Bot Co-authored-by: ohmayr --- .../google-cloud-kms/docs/kms_v1/autokey.rst | 4 + .../kms_v1/services/autokey/async_client.py | 30 +- .../cloud/kms_v1/services/autokey/client.py | 29 +- .../cloud/kms_v1/services/autokey/pagers.py | 193 +++++++++++ .../services/autokey/transports/grpc.py | 5 +- .../autokey/transports/grpc_asyncio.py | 5 +- .../services/autokey/transports/rest.py | 5 +- .../services/autokey_admin/async_client.py | 4 +- .../kms_v1/services/autokey_admin/client.py | 3 +- .../services/autokey_admin/transports/grpc.py | 3 +- .../autokey_admin/transports/grpc_asyncio.py | 3 +- .../services/autokey_admin/transports/rest.py | 3 +- .../google/cloud/kms_v1/types/autokey.py | 33 ++ .../cloud/kms_v1/types/autokey_admin.py | 30 ++ .../google/cloud/kms_v1/types/ekm_service.py | 2 +- .../google/cloud/kms_v1/types/resources.py | 2 +- ...enerated_autokey_list_key_handles_async.py | 5 +- ...generated_autokey_list_key_handles_sync.py | 5 +- .../snippet_metadata_google.cloud.kms.v1.json | 16 +- .../scripts/fixup_kms_v1_keywords.py | 2 +- .../tests/unit/gapic/kms_v1/test_autokey.py | 300 +++++++++++++++++- .../unit/gapic/kms_v1/test_autokey_admin.py | 15 + 22 files changed, 648 insertions(+), 49 deletions(-) create mode 100644 packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/pagers.py diff --git a/packages/google-cloud-kms/docs/kms_v1/autokey.rst b/packages/google-cloud-kms/docs/kms_v1/autokey.rst index 2335ac5e9da8..266646a17c28 100644 --- a/packages/google-cloud-kms/docs/kms_v1/autokey.rst +++ b/packages/google-cloud-kms/docs/kms_v1/autokey.rst @@ -4,3 +4,7 @@ Autokey .. automodule:: google.cloud.kms_v1.services.autokey :members: :inherited-members: + +.. automodule:: google.cloud.kms_v1.services.autokey.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/async_client.py b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/async_client.py index 6f5a61fc292d..e79da53a08e6 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/async_client.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/async_client.py @@ -49,6 +49,7 @@ from google.iam.v1 import policy_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore +from google.cloud.kms_v1.services.autokey import pagers from google.cloud.kms_v1.types import autokey from .client import AutokeyClient @@ -57,8 +58,9 @@ class AutokeyAsyncClient: - """Provides interfaces for using Cloud KMS Autokey to provision new - [CryptoKeys][google.cloud.kms.v1.CryptoKey], ready for Customer + """Provides interfaces for using `Cloud KMS + Autokey `__ to provision + new [CryptoKeys][google.cloud.kms.v1.CryptoKey], ready for Customer Managed Encryption Key (CMEK) use, on-demand. To support certain client tooling, this feature is modeled around a [KeyHandle][google.cloud.kms.v1.KeyHandle] resource: creating a @@ -544,7 +546,7 @@ async def list_key_handles( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> autokey.ListKeyHandlesResponse: + ) -> pagers.ListKeyHandlesAsyncPager: r"""Lists [KeyHandles][google.cloud.kms.v1.KeyHandle]. .. code-block:: python @@ -568,10 +570,11 @@ async def sample_list_key_handles(): ) # Make the request - response = await client.list_key_handles(request=request) + page_result = client.list_key_handles(request=request) # Handle the response - print(response) + async for response in page_result: + print(response) Args: request (Optional[Union[google.cloud.kms_v1.types.ListKeyHandlesRequest, dict]]): @@ -593,10 +596,13 @@ async def sample_list_key_handles(): sent along with the request as metadata. Returns: - google.cloud.kms_v1.types.ListKeyHandlesResponse: + google.cloud.kms_v1.services.autokey.pagers.ListKeyHandlesAsyncPager: Response message for [Autokey.ListKeyHandles][google.cloud.kms.v1.Autokey.ListKeyHandles]. + Iterating over this object will yield results and + resolve additional pages automatically. + """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -641,6 +647,17 @@ async def sample_list_key_handles(): metadata=metadata, ) + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListKeyHandlesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + # Done; return the response. return response @@ -855,6 +872,7 @@ async def set_iam_policy( **JSON Example** :: + { "bindings": [ { diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/client.py b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/client.py index e17e6a6fb76e..8656ffb6a4db 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/client.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/client.py @@ -55,6 +55,7 @@ from google.iam.v1 import policy_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore +from google.cloud.kms_v1.services.autokey import pagers from google.cloud.kms_v1.types import autokey from .transports.base import DEFAULT_CLIENT_INFO, AutokeyTransport @@ -99,8 +100,9 @@ def get_transport_class( class AutokeyClient(metaclass=AutokeyClientMeta): - """Provides interfaces for using Cloud KMS Autokey to provision new - [CryptoKeys][google.cloud.kms.v1.CryptoKey], ready for Customer + """Provides interfaces for using `Cloud KMS + Autokey `__ to provision + new [CryptoKeys][google.cloud.kms.v1.CryptoKey], ready for Customer Managed Encryption Key (CMEK) use, on-demand. To support certain client tooling, this feature is modeled around a [KeyHandle][google.cloud.kms.v1.KeyHandle] resource: creating a @@ -986,7 +988,7 @@ def list_key_handles( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> autokey.ListKeyHandlesResponse: + ) -> pagers.ListKeyHandlesPager: r"""Lists [KeyHandles][google.cloud.kms.v1.KeyHandle]. .. code-block:: python @@ -1010,10 +1012,11 @@ def sample_list_key_handles(): ) # Make the request - response = client.list_key_handles(request=request) + page_result = client.list_key_handles(request=request) # Handle the response - print(response) + for response in page_result: + print(response) Args: request (Union[google.cloud.kms_v1.types.ListKeyHandlesRequest, dict]): @@ -1035,10 +1038,13 @@ def sample_list_key_handles(): sent along with the request as metadata. Returns: - google.cloud.kms_v1.types.ListKeyHandlesResponse: + google.cloud.kms_v1.services.autokey.pagers.ListKeyHandlesPager: Response message for [Autokey.ListKeyHandles][google.cloud.kms.v1.Autokey.ListKeyHandles]. + Iterating over this object will yield results and + resolve additional pages automatically. + """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -1080,6 +1086,17 @@ def sample_list_key_handles(): metadata=metadata, ) + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListKeyHandlesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + # Done; return the response. return response diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/pagers.py b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/pagers.py new file mode 100644 index 000000000000..5ba18404a1ec --- /dev/null +++ b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/pagers.py @@ -0,0 +1,193 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, + Union, +) + +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.cloud.kms_v1.types import autokey + + +class ListKeyHandlesPager: + """A pager for iterating through ``list_key_handles`` requests. + + This class thinly wraps an initial + :class:`google.cloud.kms_v1.types.ListKeyHandlesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``key_handles`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListKeyHandles`` requests and continue to iterate + through the ``key_handles`` field on the + corresponding responses. + + All the usual :class:`google.cloud.kms_v1.types.ListKeyHandlesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., autokey.ListKeyHandlesResponse], + request: autokey.ListKeyHandlesRequest, + response: autokey.ListKeyHandlesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.kms_v1.types.ListKeyHandlesRequest): + The initial request object. + response (google.cloud.kms_v1.types.ListKeyHandlesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = autokey.ListKeyHandlesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[autokey.ListKeyHandlesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[autokey.KeyHandle]: + for page in self.pages: + yield from page.key_handles + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListKeyHandlesAsyncPager: + """A pager for iterating through ``list_key_handles`` requests. + + This class thinly wraps an initial + :class:`google.cloud.kms_v1.types.ListKeyHandlesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``key_handles`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListKeyHandles`` requests and continue to iterate + through the ``key_handles`` field on the + corresponding responses. + + All the usual :class:`google.cloud.kms_v1.types.ListKeyHandlesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[autokey.ListKeyHandlesResponse]], + request: autokey.ListKeyHandlesRequest, + response: autokey.ListKeyHandlesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.kms_v1.types.ListKeyHandlesRequest): + The initial request object. + response (google.cloud.kms_v1.types.ListKeyHandlesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = autokey.ListKeyHandlesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[autokey.ListKeyHandlesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[autokey.KeyHandle]: + async def async_generator(): + async for page in self.pages: + for response in page.key_handles: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/transports/grpc.py b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/transports/grpc.py index daf4f02878d3..e248c23a9c2f 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/transports/grpc.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/transports/grpc.py @@ -34,8 +34,9 @@ class AutokeyGrpcTransport(AutokeyTransport): """gRPC backend transport for Autokey. - Provides interfaces for using Cloud KMS Autokey to provision new - [CryptoKeys][google.cloud.kms.v1.CryptoKey], ready for Customer + Provides interfaces for using `Cloud KMS + Autokey `__ to provision + new [CryptoKeys][google.cloud.kms.v1.CryptoKey], ready for Customer Managed Encryption Key (CMEK) use, on-demand. To support certain client tooling, this feature is modeled around a [KeyHandle][google.cloud.kms.v1.KeyHandle] resource: creating a diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/transports/grpc_asyncio.py b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/transports/grpc_asyncio.py index 0028a1b6eb14..efd29a509e7f 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/transports/grpc_asyncio.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/transports/grpc_asyncio.py @@ -37,8 +37,9 @@ class AutokeyGrpcAsyncIOTransport(AutokeyTransport): """gRPC AsyncIO backend transport for Autokey. - Provides interfaces for using Cloud KMS Autokey to provision new - [CryptoKeys][google.cloud.kms.v1.CryptoKey], ready for Customer + Provides interfaces for using `Cloud KMS + Autokey `__ to provision + new [CryptoKeys][google.cloud.kms.v1.CryptoKey], ready for Customer Managed Encryption Key (CMEK) use, on-demand. To support certain client tooling, this feature is modeled around a [KeyHandle][google.cloud.kms.v1.KeyHandle] resource: creating a diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/transports/rest.py b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/transports/rest.py index d53e8639b8d9..2d9832d218dc 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/transports/rest.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/transports/rest.py @@ -316,8 +316,9 @@ class AutokeyRestStub: class AutokeyRestTransport(AutokeyTransport): """REST backend transport for Autokey. - Provides interfaces for using Cloud KMS Autokey to provision new - [CryptoKeys][google.cloud.kms.v1.CryptoKey], ready for Customer + Provides interfaces for using `Cloud KMS + Autokey `__ to provision + new [CryptoKeys][google.cloud.kms.v1.CryptoKey], ready for Customer Managed Encryption Key (CMEK) use, on-demand. To support certain client tooling, this feature is modeled around a [KeyHandle][google.cloud.kms.v1.KeyHandle] resource: creating a diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/async_client.py b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/async_client.py index 6c285d379502..af84851a3916 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/async_client.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/async_client.py @@ -56,7 +56,8 @@ class AutokeyAdminAsyncClient: - """Provides interfaces for managing Cloud KMS Autokey folder-level + """Provides interfaces for managing `Cloud KMS + Autokey `__ folder-level configurations. A configuration is inherited by all descendent projects. A configuration at one folder overrides any other configurations in its ancestry. Setting a configuration on a folder @@ -825,6 +826,7 @@ async def set_iam_policy( **JSON Example** :: + { "bindings": [ { diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/client.py b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/client.py index ea8aa35437b3..9d2044169727 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/client.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/client.py @@ -98,7 +98,8 @@ def get_transport_class( class AutokeyAdminClient(metaclass=AutokeyAdminClientMeta): - """Provides interfaces for managing Cloud KMS Autokey folder-level + """Provides interfaces for managing `Cloud KMS + Autokey `__ folder-level configurations. A configuration is inherited by all descendent projects. A configuration at one folder overrides any other configurations in its ancestry. Setting a configuration on a folder diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/transports/grpc.py b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/transports/grpc.py index 1b7ac8d3dc43..6ee4354598fb 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/transports/grpc.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/transports/grpc.py @@ -34,7 +34,8 @@ class AutokeyAdminGrpcTransport(AutokeyAdminTransport): """gRPC backend transport for AutokeyAdmin. - Provides interfaces for managing Cloud KMS Autokey folder-level + Provides interfaces for managing `Cloud KMS + Autokey `__ folder-level configurations. A configuration is inherited by all descendent projects. A configuration at one folder overrides any other configurations in its ancestry. Setting a configuration on a folder diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/transports/grpc_asyncio.py b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/transports/grpc_asyncio.py index b1c8dfa31ad4..f6fcb383e2f5 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/transports/grpc_asyncio.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/transports/grpc_asyncio.py @@ -37,7 +37,8 @@ class AutokeyAdminGrpcAsyncIOTransport(AutokeyAdminTransport): """gRPC AsyncIO backend transport for AutokeyAdmin. - Provides interfaces for managing Cloud KMS Autokey folder-level + Provides interfaces for managing `Cloud KMS + Autokey `__ folder-level configurations. A configuration is inherited by all descendent projects. A configuration at one folder overrides any other configurations in its ancestry. Setting a configuration on a folder diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/transports/rest.py b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/transports/rest.py index a6b03e22c972..e8affeb02ab6 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/transports/rest.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/transports/rest.py @@ -316,7 +316,8 @@ class AutokeyAdminRestStub: class AutokeyAdminRestTransport(AutokeyAdminTransport): """REST backend transport for AutokeyAdmin. - Provides interfaces for managing Cloud KMS Autokey folder-level + Provides interfaces for managing `Cloud KMS + Autokey `__ folder-level configurations. A configuration is inherited by all descendent projects. A configuration at one folder overrides any other configurations in its ancestry. Setting a configuration on a folder diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/types/autokey.py b/packages/google-cloud-kms/google/cloud/kms_v1/types/autokey.py index 3a5e93a32f61..f94b8284eeb5 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/types/autokey.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/types/autokey.py @@ -145,6 +145,19 @@ class ListKeyHandlesRequest(proto.Message): Required. Name of the resource project and location from which to list [KeyHandles][google.cloud.kms.v1.KeyHandle], e.g. ``projects/{PROJECT_ID}/locations/{LOCATION}``. + page_size (int): + Optional. Optional limit on the number of + [KeyHandles][google.cloud.kms.v1.KeyHandle] to include in + the response. The service may return fewer than this value. + Further [KeyHandles][google.cloud.kms.v1.KeyHandle] can + subsequently be obtained by including the + [ListKeyHandlesResponse.next_page_token][google.cloud.kms.v1.ListKeyHandlesResponse.next_page_token] + in a subsequent request. If unspecified, at most 100 + [KeyHandles][google.cloud.kms.v1.KeyHandle] will be + returned. + page_token (str): + Optional. Optional pagination token, returned earlier via + [ListKeyHandlesResponse.next_page_token][google.cloud.kms.v1.ListKeyHandlesResponse.next_page_token]. filter (str): Optional. Filter to apply when listing [KeyHandles][google.cloud.kms.v1.KeyHandle], e.g. @@ -155,6 +168,14 @@ class ListKeyHandlesRequest(proto.Message): proto.STRING, number=1, ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) filter: str = proto.Field( proto.STRING, number=4, @@ -168,13 +189,25 @@ class ListKeyHandlesResponse(proto.Message): Attributes: key_handles (MutableSequence[google.cloud.kms_v1.types.KeyHandle]): Resulting [KeyHandles][google.cloud.kms.v1.KeyHandle]. + next_page_token (str): + A token to retrieve next page of results. Pass this value in + [ListKeyHandlesRequest.page_token][google.cloud.kms.v1.ListKeyHandlesRequest.page_token] + to retrieve the next page of results. """ + @property + def raw_page(self): + return self + key_handles: MutableSequence["KeyHandle"] = proto.RepeatedField( proto.MESSAGE, number=1, message="KeyHandle", ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/types/autokey_admin.py b/packages/google-cloud-kms/google/cloud/kms_v1/types/autokey_admin.py index b190c6d01c97..d99b60ed72cc 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/types/autokey_admin.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/types/autokey_admin.py @@ -97,8 +97,33 @@ class AutokeyConfig(proto.Message): for this key project must be granted the ``cloudkms.admin`` role (or pertinent permissions). A request with an empty key project field will clear the configuration. + state (google.cloud.kms_v1.types.AutokeyConfig.State): + Output only. The state for the AutokeyConfig. """ + class State(proto.Enum): + r"""The states AutokeyConfig can be in. + + Values: + STATE_UNSPECIFIED (0): + The state of the AutokeyConfig is + unspecified. + ACTIVE (1): + The AutokeyConfig is currently active. + KEY_PROJECT_DELETED (2): + A previously configured key project has been + deleted and the current AutokeyConfig is + unusable. + UNINITIALIZED (3): + The AutokeyConfig is not yet initialized or + has been reset to its default uninitialized + state. + """ + STATE_UNSPECIFIED = 0 + ACTIVE = 1 + KEY_PROJECT_DELETED = 2 + UNINITIALIZED = 3 + name: str = proto.Field( proto.STRING, number=1, @@ -107,6 +132,11 @@ class AutokeyConfig(proto.Message): proto.STRING, number=2, ) + state: State = proto.Field( + proto.ENUM, + number=4, + enum=State, + ) class ShowEffectiveAutokeyConfigRequest(proto.Message): diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/types/ekm_service.py b/packages/google-cloud-kms/google/cloud/kms_v1/types/ekm_service.py index 6e55bed6e26f..8fda5560ca60 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/types/ekm_service.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/types/ekm_service.py @@ -348,7 +348,7 @@ class EkmConnection(proto.Message): [EkmConnection][google.cloud.kms.v1.EkmConnection] was created. service_resolvers (MutableSequence[google.cloud.kms_v1.types.EkmConnection.ServiceResolver]): - A list of + Optional. A list of [ServiceResolvers][google.cloud.kms.v1.EkmConnection.ServiceResolver] where the EKM can be reached. There should be one ServiceResolver per EKM replica. Currently, only a single diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/types/resources.py b/packages/google-cloud-kms/google/cloud/kms_v1/types/resources.py index d7f70db29d1a..1cf5fc19392b 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/types/resources.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/types/resources.py @@ -276,7 +276,7 @@ class CryptoKey(proto.Message): state before transitioning to [DESTROYED][google.cloud.kms.v1.CryptoKeyVersion.CryptoKeyVersionState.DESTROYED]. If not specified at creation time, the default duration is - 24 hours. + 30 days. crypto_key_backend (str): Immutable. The resource name of the backend environment where the key material for all diff --git a/packages/google-cloud-kms/samples/generated_samples/cloudkms_v1_generated_autokey_list_key_handles_async.py b/packages/google-cloud-kms/samples/generated_samples/cloudkms_v1_generated_autokey_list_key_handles_async.py index e7cfa3289ce7..bc76498134df 100644 --- a/packages/google-cloud-kms/samples/generated_samples/cloudkms_v1_generated_autokey_list_key_handles_async.py +++ b/packages/google-cloud-kms/samples/generated_samples/cloudkms_v1_generated_autokey_list_key_handles_async.py @@ -44,9 +44,10 @@ async def sample_list_key_handles(): ) # Make the request - response = await client.list_key_handles(request=request) + page_result = client.list_key_handles(request=request) # Handle the response - print(response) + async for response in page_result: + print(response) # [END cloudkms_v1_generated_Autokey_ListKeyHandles_async] diff --git a/packages/google-cloud-kms/samples/generated_samples/cloudkms_v1_generated_autokey_list_key_handles_sync.py b/packages/google-cloud-kms/samples/generated_samples/cloudkms_v1_generated_autokey_list_key_handles_sync.py index 78f4b24566a0..6a7ef9a327e8 100644 --- a/packages/google-cloud-kms/samples/generated_samples/cloudkms_v1_generated_autokey_list_key_handles_sync.py +++ b/packages/google-cloud-kms/samples/generated_samples/cloudkms_v1_generated_autokey_list_key_handles_sync.py @@ -44,9 +44,10 @@ def sample_list_key_handles(): ) # Make the request - response = client.list_key_handles(request=request) + page_result = client.list_key_handles(request=request) # Handle the response - print(response) + for response in page_result: + print(response) # [END cloudkms_v1_generated_Autokey_ListKeyHandles_sync] diff --git a/packages/google-cloud-kms/samples/generated_samples/snippet_metadata_google.cloud.kms.v1.json b/packages/google-cloud-kms/samples/generated_samples/snippet_metadata_google.cloud.kms.v1.json index d50814dbd477..a5e165bff4c4 100644 --- a/packages/google-cloud-kms/samples/generated_samples/snippet_metadata_google.cloud.kms.v1.json +++ b/packages/google-cloud-kms/samples/generated_samples/snippet_metadata_google.cloud.kms.v1.json @@ -879,7 +879,7 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.kms_v1.types.ListKeyHandlesResponse", + "resultType": "google.cloud.kms_v1.services.autokey.pagers.ListKeyHandlesAsyncPager", "shortName": "list_key_handles" }, "description": "Sample for ListKeyHandles", @@ -889,12 +889,12 @@ "regionTag": "cloudkms_v1_generated_Autokey_ListKeyHandles_async", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -914,7 +914,7 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } @@ -959,7 +959,7 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.kms_v1.types.ListKeyHandlesResponse", + "resultType": "google.cloud.kms_v1.services.autokey.pagers.ListKeyHandlesPager", "shortName": "list_key_handles" }, "description": "Sample for ListKeyHandles", @@ -969,12 +969,12 @@ "regionTag": "cloudkms_v1_generated_Autokey_ListKeyHandles_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -994,7 +994,7 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } diff --git a/packages/google-cloud-kms/scripts/fixup_kms_v1_keywords.py b/packages/google-cloud-kms/scripts/fixup_kms_v1_keywords.py index 76ad33b20666..7a838e47d117 100644 --- a/packages/google-cloud-kms/scripts/fixup_kms_v1_keywords.py +++ b/packages/google-cloud-kms/scripts/fixup_kms_v1_keywords.py @@ -65,7 +65,7 @@ class kmsCallTransformer(cst.CSTTransformer): 'list_crypto_key_versions': ('parent', 'page_size', 'page_token', 'view', 'filter', 'order_by', ), 'list_ekm_connections': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_import_jobs': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), - 'list_key_handles': ('parent', 'filter', ), + 'list_key_handles': ('parent', 'page_size', 'page_token', 'filter', ), 'list_key_rings': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'mac_sign': ('name', 'data', 'data_crc32c', ), 'mac_verify': ('name', 'data', 'mac', 'data_crc32c', 'mac_crc32c', ), diff --git a/packages/google-cloud-kms/tests/unit/gapic/kms_v1/test_autokey.py b/packages/google-cloud-kms/tests/unit/gapic/kms_v1/test_autokey.py index 3e124c98ef19..6d5f1694bcd9 100644 --- a/packages/google-cloud-kms/tests/unit/gapic/kms_v1/test_autokey.py +++ b/packages/google-cloud-kms/tests/unit/gapic/kms_v1/test_autokey.py @@ -60,6 +60,7 @@ from google.cloud.kms_v1.services.autokey import ( AutokeyAsyncClient, AutokeyClient, + pagers, transports, ) from google.cloud.kms_v1.types import autokey @@ -1866,7 +1867,9 @@ def test_list_key_handles(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_key_handles), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = autokey.ListKeyHandlesResponse() + call.return_value = autokey.ListKeyHandlesResponse( + next_page_token="next_page_token_value", + ) response = client.list_key_handles(request) # Establish that the underlying gRPC stub method was called. @@ -1876,7 +1879,8 @@ def test_list_key_handles(request_type, transport: str = "grpc"): assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, autokey.ListKeyHandlesResponse) + assert isinstance(response, pagers.ListKeyHandlesPager) + assert response.next_page_token == "next_page_token_value" def test_list_key_handles_empty_call(): @@ -1911,6 +1915,7 @@ def test_list_key_handles_non_empty_request_with_auto_populated_field(): # if they meet the requirements of AIP 4235. request = autokey.ListKeyHandlesRequest( parent="parent_value", + page_token="page_token_value", filter="filter_value", ) @@ -1924,6 +1929,7 @@ def test_list_key_handles_non_empty_request_with_auto_populated_field(): _, args, _ = call.mock_calls[0] assert args[0] == autokey.ListKeyHandlesRequest( parent="parent_value", + page_token="page_token_value", filter="filter_value", ) @@ -1978,7 +1984,9 @@ async def test_list_key_handles_empty_call_async(): with mock.patch.object(type(client.transport.list_key_handles), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - autokey.ListKeyHandlesResponse() + autokey.ListKeyHandlesResponse( + next_page_token="next_page_token_value", + ) ) response = await client.list_key_handles() call.assert_called() @@ -2045,7 +2053,9 @@ async def test_list_key_handles_async( with mock.patch.object(type(client.transport.list_key_handles), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - autokey.ListKeyHandlesResponse() + autokey.ListKeyHandlesResponse( + next_page_token="next_page_token_value", + ) ) response = await client.list_key_handles(request) @@ -2056,7 +2066,8 @@ async def test_list_key_handles_async( assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, autokey.ListKeyHandlesResponse) + assert isinstance(response, pagers.ListKeyHandlesAsyncPager) + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio @@ -2207,6 +2218,200 @@ async def test_list_key_handles_flattened_error_async(): ) +def test_list_key_handles_pager(transport_name: str = "grpc"): + client = AutokeyClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_key_handles), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + autokey.ListKeyHandlesResponse( + key_handles=[ + autokey.KeyHandle(), + autokey.KeyHandle(), + autokey.KeyHandle(), + ], + next_page_token="abc", + ), + autokey.ListKeyHandlesResponse( + key_handles=[], + next_page_token="def", + ), + autokey.ListKeyHandlesResponse( + key_handles=[ + autokey.KeyHandle(), + ], + next_page_token="ghi", + ), + autokey.ListKeyHandlesResponse( + key_handles=[ + autokey.KeyHandle(), + autokey.KeyHandle(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_key_handles(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, autokey.KeyHandle) for i in results) + + +def test_list_key_handles_pages(transport_name: str = "grpc"): + client = AutokeyClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_key_handles), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + autokey.ListKeyHandlesResponse( + key_handles=[ + autokey.KeyHandle(), + autokey.KeyHandle(), + autokey.KeyHandle(), + ], + next_page_token="abc", + ), + autokey.ListKeyHandlesResponse( + key_handles=[], + next_page_token="def", + ), + autokey.ListKeyHandlesResponse( + key_handles=[ + autokey.KeyHandle(), + ], + next_page_token="ghi", + ), + autokey.ListKeyHandlesResponse( + key_handles=[ + autokey.KeyHandle(), + autokey.KeyHandle(), + ], + ), + RuntimeError, + ) + pages = list(client.list_key_handles(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_key_handles_async_pager(): + client = AutokeyAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_key_handles), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + autokey.ListKeyHandlesResponse( + key_handles=[ + autokey.KeyHandle(), + autokey.KeyHandle(), + autokey.KeyHandle(), + ], + next_page_token="abc", + ), + autokey.ListKeyHandlesResponse( + key_handles=[], + next_page_token="def", + ), + autokey.ListKeyHandlesResponse( + key_handles=[ + autokey.KeyHandle(), + ], + next_page_token="ghi", + ), + autokey.ListKeyHandlesResponse( + key_handles=[ + autokey.KeyHandle(), + autokey.KeyHandle(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_key_handles( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, autokey.KeyHandle) for i in responses) + + +@pytest.mark.asyncio +async def test_list_key_handles_async_pages(): + client = AutokeyAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_key_handles), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + autokey.ListKeyHandlesResponse( + key_handles=[ + autokey.KeyHandle(), + autokey.KeyHandle(), + autokey.KeyHandle(), + ], + next_page_token="abc", + ), + autokey.ListKeyHandlesResponse( + key_handles=[], + next_page_token="def", + ), + autokey.ListKeyHandlesResponse( + key_handles=[ + autokey.KeyHandle(), + ], + next_page_token="ghi", + ), + autokey.ListKeyHandlesResponse( + key_handles=[ + autokey.KeyHandle(), + autokey.KeyHandle(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_key_handles(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + @pytest.mark.parametrize( "request_type", [ @@ -2910,7 +3115,9 @@ def test_list_key_handles_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = autokey.ListKeyHandlesResponse() + return_value = autokey.ListKeyHandlesResponse( + next_page_token="next_page_token_value", + ) # Wrap the value into a proper Response obj response_value = Response() @@ -2924,7 +3131,8 @@ def test_list_key_handles_rest(request_type): response = client.list_key_handles(request) # Establish that the response is the type that we expect. - assert isinstance(response, autokey.ListKeyHandlesResponse) + assert isinstance(response, pagers.ListKeyHandlesPager) + assert response.next_page_token == "next_page_token_value" def test_list_key_handles_rest_use_cached_wrapped_rpc(): @@ -2993,7 +3201,13 @@ def test_list_key_handles_rest_required_fields( credentials=ga_credentials.AnonymousCredentials() ).list_key_handles._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter",)) + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -3047,7 +3261,16 @@ def test_list_key_handles_rest_unset_required_fields(): ) unset_fields = transport.list_key_handles._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter",)) & set(("parent",))) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -3183,11 +3406,66 @@ def test_list_key_handles_rest_flattened_error(transport: str = "rest"): ) -def test_list_key_handles_rest_error(): +def test_list_key_handles_rest_pager(transport: str = "rest"): client = AutokeyClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + autokey.ListKeyHandlesResponse( + key_handles=[ + autokey.KeyHandle(), + autokey.KeyHandle(), + autokey.KeyHandle(), + ], + next_page_token="abc", + ), + autokey.ListKeyHandlesResponse( + key_handles=[], + next_page_token="def", + ), + autokey.ListKeyHandlesResponse( + key_handles=[ + autokey.KeyHandle(), + ], + next_page_token="ghi", + ), + autokey.ListKeyHandlesResponse( + key_handles=[ + autokey.KeyHandle(), + autokey.KeyHandle(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(autokey.ListKeyHandlesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_key_handles(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, autokey.KeyHandle) for i in results) + + pages = list(client.list_key_handles(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. diff --git a/packages/google-cloud-kms/tests/unit/gapic/kms_v1/test_autokey_admin.py b/packages/google-cloud-kms/tests/unit/gapic/kms_v1/test_autokey_admin.py index 6155ff4520d9..0e7ca7eda0b3 100644 --- a/packages/google-cloud-kms/tests/unit/gapic/kms_v1/test_autokey_admin.py +++ b/packages/google-cloud-kms/tests/unit/gapic/kms_v1/test_autokey_admin.py @@ -1127,6 +1127,7 @@ def test_update_autokey_config(request_type, transport: str = "grpc"): call.return_value = autokey_admin.AutokeyConfig( name="name_value", key_project="key_project_value", + state=autokey_admin.AutokeyConfig.State.ACTIVE, ) response = client.update_autokey_config(request) @@ -1140,6 +1141,7 @@ def test_update_autokey_config(request_type, transport: str = "grpc"): assert isinstance(response, autokey_admin.AutokeyConfig) assert response.name == "name_value" assert response.key_project == "key_project_value" + assert response.state == autokey_admin.AutokeyConfig.State.ACTIVE def test_update_autokey_config_empty_call(): @@ -1247,6 +1249,7 @@ async def test_update_autokey_config_empty_call_async(): autokey_admin.AutokeyConfig( name="name_value", key_project="key_project_value", + state=autokey_admin.AutokeyConfig.State.ACTIVE, ) ) response = await client.update_autokey_config() @@ -1320,6 +1323,7 @@ async def test_update_autokey_config_async( autokey_admin.AutokeyConfig( name="name_value", key_project="key_project_value", + state=autokey_admin.AutokeyConfig.State.ACTIVE, ) ) response = await client.update_autokey_config(request) @@ -1334,6 +1338,7 @@ async def test_update_autokey_config_async( assert isinstance(response, autokey_admin.AutokeyConfig) assert response.name == "name_value" assert response.key_project == "key_project_value" + assert response.state == autokey_admin.AutokeyConfig.State.ACTIVE @pytest.mark.asyncio @@ -1527,6 +1532,7 @@ def test_get_autokey_config(request_type, transport: str = "grpc"): call.return_value = autokey_admin.AutokeyConfig( name="name_value", key_project="key_project_value", + state=autokey_admin.AutokeyConfig.State.ACTIVE, ) response = client.get_autokey_config(request) @@ -1540,6 +1546,7 @@ def test_get_autokey_config(request_type, transport: str = "grpc"): assert isinstance(response, autokey_admin.AutokeyConfig) assert response.name == "name_value" assert response.key_project == "key_project_value" + assert response.state == autokey_admin.AutokeyConfig.State.ACTIVE def test_get_autokey_config_empty_call(): @@ -1650,6 +1657,7 @@ async def test_get_autokey_config_empty_call_async(): autokey_admin.AutokeyConfig( name="name_value", key_project="key_project_value", + state=autokey_admin.AutokeyConfig.State.ACTIVE, ) ) response = await client.get_autokey_config() @@ -1722,6 +1730,7 @@ async def test_get_autokey_config_async( autokey_admin.AutokeyConfig( name="name_value", key_project="key_project_value", + state=autokey_admin.AutokeyConfig.State.ACTIVE, ) ) response = await client.get_autokey_config(request) @@ -1736,6 +1745,7 @@ async def test_get_autokey_config_async( assert isinstance(response, autokey_admin.AutokeyConfig) assert response.name == "name_value" assert response.key_project == "key_project_value" + assert response.state == autokey_admin.AutokeyConfig.State.ACTIVE @pytest.mark.asyncio @@ -2301,6 +2311,7 @@ def test_update_autokey_config_rest(request_type): request_init["autokey_config"] = { "name": "folders/sample1/autokeyConfig", "key_project": "key_project_value", + "state": 1, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -2377,6 +2388,7 @@ def get_message_fields(field): return_value = autokey_admin.AutokeyConfig( name="name_value", key_project="key_project_value", + state=autokey_admin.AutokeyConfig.State.ACTIVE, ) # Wrap the value into a proper Response obj @@ -2394,6 +2406,7 @@ def get_message_fields(field): assert isinstance(response, autokey_admin.AutokeyConfig) assert response.name == "name_value" assert response.key_project == "key_project_value" + assert response.state == autokey_admin.AutokeyConfig.State.ACTIVE def test_update_autokey_config_rest_use_cached_wrapped_rpc(): @@ -2695,6 +2708,7 @@ def test_get_autokey_config_rest(request_type): return_value = autokey_admin.AutokeyConfig( name="name_value", key_project="key_project_value", + state=autokey_admin.AutokeyConfig.State.ACTIVE, ) # Wrap the value into a proper Response obj @@ -2712,6 +2726,7 @@ def test_get_autokey_config_rest(request_type): assert isinstance(response, autokey_admin.AutokeyConfig) assert response.name == "name_value" assert response.key_project == "key_project_value" + assert response.state == autokey_admin.AutokeyConfig.State.ACTIVE def test_get_autokey_config_rest_use_cached_wrapped_rpc(): From 7b4b877e9295ac6324dadf5c1b8fe06d97a49c5c Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 23 Sep 2024 13:05:40 -0400 Subject: [PATCH 083/108] chore: release main (#13097) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit :robot: I have created a release *beep* *boop* ---
google-ai-generativelanguage: 0.6.10 ## [0.6.10](https://github.com/googleapis/google-cloud-python/compare/google-ai-generativelanguage-v0.6.9...google-ai-generativelanguage-v0.6.10) (2024-09-23) ### Features * Add GenerationConfig.{presence_penalty, frequency_penalty, logprobs, response_logprobs, logprobs} and Candidate.{avg_logprobs, logprobs_result} ([d6238e4](https://github.com/googleapis/google-cloud-python/commit/d6238e49a17caf54dd0fbc45215527beed057cc5)) * Add GoogleSearchRetrieval tool and candidate.grounding_metadata ([d6238e4](https://github.com/googleapis/google-cloud-python/commit/d6238e49a17caf54dd0fbc45215527beed057cc5)) * Add HarmBlockThreshold.OFF ([d6238e4](https://github.com/googleapis/google-cloud-python/commit/d6238e49a17caf54dd0fbc45215527beed057cc5)) * Add HarmCategory.HARM_CATEGORY_CIVIC_INTEGRITY ([d6238e4](https://github.com/googleapis/google-cloud-python/commit/d6238e49a17caf54dd0fbc45215527beed057cc5)) * Add PredictionService (for Imagen) ([d6238e4](https://github.com/googleapis/google-cloud-python/commit/d6238e49a17caf54dd0fbc45215527beed057cc5)) * Add Schema.min_items ([d6238e4](https://github.com/googleapis/google-cloud-python/commit/d6238e49a17caf54dd0fbc45215527beed057cc5)) * Add TunedModels.reader_project_numbers ([d6238e4](https://github.com/googleapis/google-cloud-python/commit/d6238e49a17caf54dd0fbc45215527beed057cc5)) ### Documentation * Small fixes ([d6238e4](https://github.com/googleapis/google-cloud-python/commit/d6238e49a17caf54dd0fbc45215527beed057cc5)) * Tag HarmCategories by the model family they're used on. ([d6238e4](https://github.com/googleapis/google-cloud-python/commit/d6238e49a17caf54dd0fbc45215527beed057cc5))
google-analytics-data: 0.18.12 ## [0.18.12](https://github.com/googleapis/google-cloud-python/compare/google-analytics-data-v0.18.11...google-analytics-data-v0.18.12) (2024-09-23) ### Features * add `GetPropertyQuotasSnapshot` method to the Data API v1alpha ([65f098a](https://github.com/googleapis/google-cloud-python/commit/65f098a1125677c69240849703a0b97bcab7fc4c)) * add `PropertyQuotasSnapshot` type to the Data API v1alpha ([65f098a](https://github.com/googleapis/google-cloud-python/commit/65f098a1125677c69240849703a0b97bcab7fc4c)) ### Documentation * update the documentation for the `CreateReportTask` method ([65f098a](https://github.com/googleapis/google-cloud-python/commit/65f098a1125677c69240849703a0b97bcab7fc4c))
google-cloud-build: 3.25.0 ## [3.25.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-build-v3.24.2...google-cloud-build-v3.25.0) (2024-09-23) ### Features * Add LEGACY_BUCKET option to DefaultLogsBucketBehavior ([e889809](https://github.com/googleapis/google-cloud-python/commit/e889809389c5b194ec77955664eb2859cde28d73)) ### Documentation * Sanitize docs ([e889809](https://github.com/googleapis/google-cloud-python/commit/e889809389c5b194ec77955664eb2859cde28d73))
google-cloud-dialogflow: 2.32.0 ## [2.32.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dialogflow-v2.31.0...google-cloud-dialogflow-v2.32.0) (2024-09-23) ### Features * created new boolean fields in conversation model for zone isolation and zone separation compliance status ([1f8b564](https://github.com/googleapis/google-cloud-python/commit/1f8b5640b0ac5397318ede4ebcfa120120ebccc8))
google-cloud-dlp: 3.23.0 ## [3.23.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dlp-v3.22.0...google-cloud-dlp-v3.23.0) (2024-09-23) ### Features * action for publishing data profiles to SecOps (formelly known as Chronicle) ([afcf7cb](https://github.com/googleapis/google-cloud-python/commit/afcf7cbe57d6e0f183a113ba03bba9c288052969)) * action for publishing data profiles to Security Command Center ([afcf7cb](https://github.com/googleapis/google-cloud-python/commit/afcf7cbe57d6e0f183a113ba03bba9c288052969)) * discovery configs for AWS S3 buckets ([afcf7cb](https://github.com/googleapis/google-cloud-python/commit/afcf7cbe57d6e0f183a113ba03bba9c288052969)) ### Documentation * small improvements and clarifications ([afcf7cb](https://github.com/googleapis/google-cloud-python/commit/afcf7cbe57d6e0f183a113ba03bba9c288052969))
google-cloud-gdchardwaremanagement: 0.1.4 ## [0.1.4](https://github.com/googleapis/google-cloud-python/compare/google-cloud-gdchardwaremanagement-v0.1.3...google-cloud-gdchardwaremanagement-v0.1.4) (2024-09-23) ### Features * add an order type field to distinguish a fulfillment request from a sales inquiry ([e727cc0](https://github.com/googleapis/google-cloud-python/commit/e727cc0e98e37d55882215182f86c2a7d23154ef)) * add support to mark comments as read or unread ([e727cc0](https://github.com/googleapis/google-cloud-python/commit/e727cc0e98e37d55882215182f86c2a7d23154ef)) * rename zone state signal READY_FOR_SITE_TURNUP to FACTORY_TURNUP_CHECKS_PASSED ([e727cc0](https://github.com/googleapis/google-cloud-python/commit/e727cc0e98e37d55882215182f86c2a7d23154ef)) ### Documentation * clarify how access_times are used ([e727cc0](https://github.com/googleapis/google-cloud-python/commit/e727cc0e98e37d55882215182f86c2a7d23154ef))
google-cloud-kms: 3.0.0 ## [3.0.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-kms-v2.24.2...google-cloud-kms-v3.0.0) (2024-09-23) ### ⚠ BREAKING CHANGES * Pagination feature is introduced for method ListKeyHandles in service Autokey ### Features * Adding a state field for AutokeyConfig ([b4c9770](https://github.com/googleapis/google-cloud-python/commit/b4c977059e075c73781c179b26fdf915548e65c4)) ### Bug Fixes * Pagination feature is introduced for method ListKeyHandles in service Autokey ([b4c9770](https://github.com/googleapis/google-cloud-python/commit/b4c977059e075c73781c179b26fdf915548e65c4)) ### Documentation * A comment for field destroy_scheduled_duration in message .google.cloud.kms.v1.CryptoKey is updated for the default duration ([b4c9770](https://github.com/googleapis/google-cloud-python/commit/b4c977059e075c73781c179b26fdf915548e65c4)) * Field service_resolvers in message .google.cloud.kms.v1.EkmConnection is Explicitly is marked as to have field behavior of Optional ([b4c9770](https://github.com/googleapis/google-cloud-python/commit/b4c977059e075c73781c179b26fdf915548e65c4))
google-maps-places: 0.1.18 ## [0.1.18](https://github.com/googleapis/google-cloud-python/compare/google-maps-places-v0.1.17...google-maps-places-v0.1.18) (2024-09-23) ### Features * action for publishing data profiles to SecOps (formelly known as Chronicle) ([2cc1550](https://github.com/googleapis/google-cloud-python/commit/2cc1550492a2b78ed7240aab84a8449de5e5afa2)) * action for publishing data profiles to Security Command Center ([2cc1550](https://github.com/googleapis/google-cloud-python/commit/2cc1550492a2b78ed7240aab84a8449de5e5afa2)) * discovery configs for AWS S3 buckets ([2cc1550](https://github.com/googleapis/google-cloud-python/commit/2cc1550492a2b78ed7240aab84a8449de5e5afa2)) ### Documentation * small improvements and clarifications ([2cc1550](https://github.com/googleapis/google-cloud-python/commit/2cc1550492a2b78ed7240aab84a8449de5e5afa2))
--- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .release-please-manifest.json | 16 +++++++------- .../google-ai-generativelanguage/CHANGELOG.md | 19 ++++++++++++++++ .../ai/generativelanguage/gapic_version.py | 2 +- .../ai/generativelanguage_v1/gapic_version.py | 2 +- .../gapic_version.py | 2 +- .../gapic_version.py | 2 +- .../gapic_version.py | 2 +- ...adata_google.ai.generativelanguage.v1.json | 2 +- ...a_google.ai.generativelanguage.v1beta.json | 2 +- ..._google.ai.generativelanguage.v1beta2.json | 2 +- ..._google.ai.generativelanguage.v1beta3.json | 2 +- packages/google-analytics-data/CHANGELOG.md | 13 +++++++++++ .../google/analytics/data/gapic_version.py | 2 +- .../analytics/data_v1alpha/gapic_version.py | 2 +- .../analytics/data_v1beta/gapic_version.py | 2 +- ...etadata_google.analytics.data.v1alpha.json | 2 +- ...metadata_google.analytics.data.v1beta.json | 2 +- packages/google-cloud-build/CHANGELOG.md | 12 ++++++++++ .../devtools/cloudbuild/gapic_version.py | 2 +- .../devtools/cloudbuild_v1/gapic_version.py | 2 +- .../devtools/cloudbuild_v2/gapic_version.py | 2 +- ...etadata_google.devtools.cloudbuild.v1.json | 2 +- ...etadata_google.devtools.cloudbuild.v2.json | 2 +- packages/google-cloud-dialogflow/CHANGELOG.md | 7 ++++++ .../google/cloud/dialogflow/gapic_version.py | 2 +- .../cloud/dialogflow_v2/gapic_version.py | 2 +- .../cloud/dialogflow_v2beta1/gapic_version.py | 2 +- ...t_metadata_google.cloud.dialogflow.v2.json | 2 +- ...adata_google.cloud.dialogflow.v2beta1.json | 2 +- packages/google-cloud-dlp/CHANGELOG.md | 14 ++++++++++++ .../google/cloud/dlp/gapic_version.py | 2 +- .../google/cloud/dlp_v2/gapic_version.py | 2 +- ...nippet_metadata_google.privacy.dlp.v2.json | 2 +- .../CHANGELOG.md | 14 ++++++++++++ .../gdchardwaremanagement/gapic_version.py | 2 +- .../gapic_version.py | 2 +- ...e.cloud.gdchardwaremanagement.v1alpha.json | 2 +- packages/google-cloud-kms/CHANGELOG.md | 22 +++++++++++++++++++ .../google/cloud/kms/gapic_version.py | 2 +- .../google/cloud/kms_v1/gapic_version.py | 2 +- .../snippet_metadata_google.cloud.kms.v1.json | 2 +- packages/google-maps-places/CHANGELOG.md | 14 ++++++++++++ .../google/maps/places/gapic_version.py | 2 +- .../google/maps/places_v1/gapic_version.py | 2 +- ...nippet_metadata_google.maps.places.v1.json | 2 +- 45 files changed, 159 insertions(+), 44 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 19f3bed603ce..9b7f01b6663b 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,9 +1,9 @@ { "packages/google-ads-admanager": "0.1.2", "packages/google-ads-marketingplatform-admin": "0.1.0", - "packages/google-ai-generativelanguage": "0.6.9", + "packages/google-ai-generativelanguage": "0.6.10", "packages/google-analytics-admin": "0.23.0", - "packages/google-analytics-data": "0.18.11", + "packages/google-analytics-data": "0.18.12", "packages/google-apps-card": "0.1.4", "packages/google-apps-chat": "0.1.11", "packages/google-apps-events-subscriptions": "0.1.2", @@ -46,7 +46,7 @@ "packages/google-cloud-billing": "1.13.6", "packages/google-cloud-billing-budgets": "1.14.5", "packages/google-cloud-binary-authorization": "1.10.5", - "packages/google-cloud-build": "3.24.2", + "packages/google-cloud-build": "3.25.0", "packages/google-cloud-certificate-manager": "1.7.2", "packages/google-cloud-channel": "1.18.5", "packages/google-cloud-cloudcontrolspartner": "0.2.0", @@ -73,10 +73,10 @@ "packages/google-cloud-datastream": "1.9.5", "packages/google-cloud-deploy": "2.0.1", "packages/google-cloud-developerconnect": "0.1.2", - "packages/google-cloud-dialogflow": "2.31.0", + "packages/google-cloud-dialogflow": "2.32.0", "packages/google-cloud-dialogflow-cx": "1.35.0", "packages/google-cloud-discoveryengine": "0.12.2", - "packages/google-cloud-dlp": "3.22.0", + "packages/google-cloud-dlp": "3.23.0", "packages/google-cloud-dms": "1.9.5", "packages/google-cloud-documentai": "2.32.0", "packages/google-cloud-domains": "1.7.5", @@ -88,7 +88,7 @@ "packages/google-cloud-eventarc-publishing": "0.6.11", "packages/google-cloud-filestore": "1.9.5", "packages/google-cloud-functions": "1.17.0", - "packages/google-cloud-gdchardwaremanagement": "0.1.3", + "packages/google-cloud-gdchardwaremanagement": "0.1.4", "packages/google-cloud-gke-backup": "0.5.11", "packages/google-cloud-gke-connect-gateway": "0.9.0", "packages/google-cloud-gke-hub": "1.14.2", @@ -98,7 +98,7 @@ "packages/google-cloud-iam-logging": "1.3.5", "packages/google-cloud-iap": "1.13.5", "packages/google-cloud-ids": "1.7.5", - "packages/google-cloud-kms": "2.24.2", + "packages/google-cloud-kms": "3.0.0", "packages/google-cloud-kms-inventory": "0.2.8", "packages/google-cloud-language": "2.14.0", "packages/google-cloud-life-sciences": "0.9.12", @@ -182,7 +182,7 @@ "packages/google-maps-fleetengine": "0.2.2", "packages/google-maps-fleetengine-delivery": "0.2.4", "packages/google-maps-mapsplatformdatasets": "0.4.2", - "packages/google-maps-places": "0.1.17", + "packages/google-maps-places": "0.1.18", "packages/google-maps-routeoptimization": "0.1.3", "packages/google-maps-routing": "0.6.10", "packages/google-maps-solar": "0.1.2", diff --git a/packages/google-ai-generativelanguage/CHANGELOG.md b/packages/google-ai-generativelanguage/CHANGELOG.md index d3b8538f00c3..c6fa336083ab 100644 --- a/packages/google-ai-generativelanguage/CHANGELOG.md +++ b/packages/google-ai-generativelanguage/CHANGELOG.md @@ -1,5 +1,24 @@ # Changelog +## [0.6.10](https://github.com/googleapis/google-cloud-python/compare/google-ai-generativelanguage-v0.6.9...google-ai-generativelanguage-v0.6.10) (2024-09-23) + + +### Features + +* Add GenerationConfig.{presence_penalty, frequency_penalty, logprobs, response_logprobs, logprobs} and Candidate.{avg_logprobs, logprobs_result} ([d6238e4](https://github.com/googleapis/google-cloud-python/commit/d6238e49a17caf54dd0fbc45215527beed057cc5)) +* Add GoogleSearchRetrieval tool and candidate.grounding_metadata ([d6238e4](https://github.com/googleapis/google-cloud-python/commit/d6238e49a17caf54dd0fbc45215527beed057cc5)) +* Add HarmBlockThreshold.OFF ([d6238e4](https://github.com/googleapis/google-cloud-python/commit/d6238e49a17caf54dd0fbc45215527beed057cc5)) +* Add HarmCategory.HARM_CATEGORY_CIVIC_INTEGRITY ([d6238e4](https://github.com/googleapis/google-cloud-python/commit/d6238e49a17caf54dd0fbc45215527beed057cc5)) +* Add PredictionService (for Imagen) ([d6238e4](https://github.com/googleapis/google-cloud-python/commit/d6238e49a17caf54dd0fbc45215527beed057cc5)) +* Add Schema.min_items ([d6238e4](https://github.com/googleapis/google-cloud-python/commit/d6238e49a17caf54dd0fbc45215527beed057cc5)) +* Add TunedModels.reader_project_numbers ([d6238e4](https://github.com/googleapis/google-cloud-python/commit/d6238e49a17caf54dd0fbc45215527beed057cc5)) + + +### Documentation + +* Small fixes ([d6238e4](https://github.com/googleapis/google-cloud-python/commit/d6238e49a17caf54dd0fbc45215527beed057cc5)) +* Tag HarmCategories by the model family they're used on. ([d6238e4](https://github.com/googleapis/google-cloud-python/commit/d6238e49a17caf54dd0fbc45215527beed057cc5)) + ## [0.6.9](https://github.com/googleapis/google-cloud-python/compare/google-ai-generativelanguage-v0.6.8...google-ai-generativelanguage-v0.6.9) (2024-08-19) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage/gapic_version.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage/gapic_version.py index 558c8aab67c5..8ebdaa033b52 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage/gapic_version.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.6.10" # {x-release-please-version} diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/gapic_version.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/gapic_version.py index 558c8aab67c5..8ebdaa033b52 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/gapic_version.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.6.10" # {x-release-please-version} diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/gapic_version.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/gapic_version.py index 558c8aab67c5..8ebdaa033b52 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/gapic_version.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.6.10" # {x-release-please-version} diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/gapic_version.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/gapic_version.py index 558c8aab67c5..8ebdaa033b52 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/gapic_version.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.6.10" # {x-release-please-version} diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/gapic_version.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/gapic_version.py index 558c8aab67c5..8ebdaa033b52 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/gapic_version.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.6.10" # {x-release-please-version} diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1.json b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1.json index d6c3fe4c5051..416353581730 100644 --- a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1.json +++ b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-ai-generativelanguage", - "version": "0.1.0" + "version": "0.6.10" }, "snippets": [ { diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta.json b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta.json index c418dfa10386..a2110fd118ef 100644 --- a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta.json +++ b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-ai-generativelanguage", - "version": "0.1.0" + "version": "0.6.10" }, "snippets": [ { diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta2.json b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta2.json index 5b7d0a0509b4..865de14ffa13 100644 --- a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta2.json +++ b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-ai-generativelanguage", - "version": "0.1.0" + "version": "0.6.10" }, "snippets": [ { diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta3.json b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta3.json index 91de9e353f90..7fbde27c9197 100644 --- a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta3.json +++ b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta3.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-ai-generativelanguage", - "version": "0.1.0" + "version": "0.6.10" }, "snippets": [ { diff --git a/packages/google-analytics-data/CHANGELOG.md b/packages/google-analytics-data/CHANGELOG.md index ad685dbf13a6..f863e9dda9cd 100644 --- a/packages/google-analytics-data/CHANGELOG.md +++ b/packages/google-analytics-data/CHANGELOG.md @@ -1,5 +1,18 @@ # Changelog +## [0.18.12](https://github.com/googleapis/google-cloud-python/compare/google-analytics-data-v0.18.11...google-analytics-data-v0.18.12) (2024-09-23) + + +### Features + +* add `GetPropertyQuotasSnapshot` method to the Data API v1alpha ([65f098a](https://github.com/googleapis/google-cloud-python/commit/65f098a1125677c69240849703a0b97bcab7fc4c)) +* add `PropertyQuotasSnapshot` type to the Data API v1alpha ([65f098a](https://github.com/googleapis/google-cloud-python/commit/65f098a1125677c69240849703a0b97bcab7fc4c)) + + +### Documentation + +* update the documentation for the `CreateReportTask` method ([65f098a](https://github.com/googleapis/google-cloud-python/commit/65f098a1125677c69240849703a0b97bcab7fc4c)) + ## [0.18.11](https://github.com/googleapis/google-cloud-python/compare/google-analytics-data-v0.18.10...google-analytics-data-v0.18.11) (2024-08-08) diff --git a/packages/google-analytics-data/google/analytics/data/gapic_version.py b/packages/google-analytics-data/google/analytics/data/gapic_version.py index 558c8aab67c5..48dad06a30d5 100644 --- a/packages/google-analytics-data/google/analytics/data/gapic_version.py +++ b/packages/google-analytics-data/google/analytics/data/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.18.12" # {x-release-please-version} diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/gapic_version.py b/packages/google-analytics-data/google/analytics/data_v1alpha/gapic_version.py index 558c8aab67c5..48dad06a30d5 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/gapic_version.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.18.12" # {x-release-please-version} diff --git a/packages/google-analytics-data/google/analytics/data_v1beta/gapic_version.py b/packages/google-analytics-data/google/analytics/data_v1beta/gapic_version.py index 558c8aab67c5..48dad06a30d5 100644 --- a/packages/google-analytics-data/google/analytics/data_v1beta/gapic_version.py +++ b/packages/google-analytics-data/google/analytics/data_v1beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.18.12" # {x-release-please-version} diff --git a/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1alpha.json b/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1alpha.json index 15f8d0d6e6d8..d5b5816eb6e5 100644 --- a/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1alpha.json +++ b/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1alpha.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-analytics-data", - "version": "0.1.0" + "version": "0.18.12" }, "snippets": [ { diff --git a/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1beta.json b/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1beta.json index 29e86a085403..753d0fcebd81 100644 --- a/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1beta.json +++ b/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-analytics-data", - "version": "0.1.0" + "version": "0.18.12" }, "snippets": [ { diff --git a/packages/google-cloud-build/CHANGELOG.md b/packages/google-cloud-build/CHANGELOG.md index 619b570d55f6..fb07ad800441 100644 --- a/packages/google-cloud-build/CHANGELOG.md +++ b/packages/google-cloud-build/CHANGELOG.md @@ -4,6 +4,18 @@ [1]: https://pypi.org/project/google-cloud-build/#history +## [3.25.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-build-v3.24.2...google-cloud-build-v3.25.0) (2024-09-23) + + +### Features + +* Add LEGACY_BUCKET option to DefaultLogsBucketBehavior ([e889809](https://github.com/googleapis/google-cloud-python/commit/e889809389c5b194ec77955664eb2859cde28d73)) + + +### Documentation + +* Sanitize docs ([e889809](https://github.com/googleapis/google-cloud-python/commit/e889809389c5b194ec77955664eb2859cde28d73)) + ## [3.24.2](https://github.com/googleapis/google-cloud-python/compare/google-cloud-build-v3.24.1...google-cloud-build-v3.24.2) (2024-07-30) diff --git a/packages/google-cloud-build/google/cloud/devtools/cloudbuild/gapic_version.py b/packages/google-cloud-build/google/cloud/devtools/cloudbuild/gapic_version.py index 558c8aab67c5..8adcea73e25d 100644 --- a/packages/google-cloud-build/google/cloud/devtools/cloudbuild/gapic_version.py +++ b/packages/google-cloud-build/google/cloud/devtools/cloudbuild/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "3.25.0" # {x-release-please-version} diff --git a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/gapic_version.py b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/gapic_version.py index 558c8aab67c5..8adcea73e25d 100644 --- a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/gapic_version.py +++ b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "3.25.0" # {x-release-please-version} diff --git a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v2/gapic_version.py b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v2/gapic_version.py index 558c8aab67c5..8adcea73e25d 100644 --- a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v2/gapic_version.py +++ b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "3.25.0" # {x-release-please-version} diff --git a/packages/google-cloud-build/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v1.json b/packages/google-cloud-build/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v1.json index e379efab560f..66e42a84ba95 100644 --- a/packages/google-cloud-build/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v1.json +++ b/packages/google-cloud-build/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-build", - "version": "0.1.0" + "version": "3.25.0" }, "snippets": [ { diff --git a/packages/google-cloud-build/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v2.json b/packages/google-cloud-build/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v2.json index 818d3fc2029c..f4891e033575 100644 --- a/packages/google-cloud-build/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v2.json +++ b/packages/google-cloud-build/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-build", - "version": "0.1.0" + "version": "3.25.0" }, "snippets": [ { diff --git a/packages/google-cloud-dialogflow/CHANGELOG.md b/packages/google-cloud-dialogflow/CHANGELOG.md index a2dbd7ce4862..cf54bc09dc6d 100644 --- a/packages/google-cloud-dialogflow/CHANGELOG.md +++ b/packages/google-cloud-dialogflow/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/dialogflow/#history +## [2.32.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dialogflow-v2.31.0...google-cloud-dialogflow-v2.32.0) (2024-09-23) + + +### Features + +* created new boolean fields in conversation model for zone isolation and zone separation compliance status ([1f8b564](https://github.com/googleapis/google-cloud-python/commit/1f8b5640b0ac5397318ede4ebcfa120120ebccc8)) + ## [2.31.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dialogflow-v2.30.2...google-cloud-dialogflow-v2.31.0) (2024-08-08) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow/gapic_version.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow/gapic_version.py index 558c8aab67c5..c82b1e137507 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow/gapic_version.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "2.32.0" # {x-release-please-version} diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/gapic_version.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/gapic_version.py index 558c8aab67c5..c82b1e137507 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/gapic_version.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "2.32.0" # {x-release-please-version} diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/gapic_version.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/gapic_version.py index 558c8aab67c5..c82b1e137507 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/gapic_version.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "2.32.0" # {x-release-please-version} diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.v2.json b/packages/google-cloud-dialogflow/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.v2.json index dde14d384e60..7e99cd1321e2 100644 --- a/packages/google-cloud-dialogflow/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.v2.json +++ b/packages/google-cloud-dialogflow/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dialogflow", - "version": "0.1.0" + "version": "2.32.0" }, "snippets": [ { diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.v2beta1.json b/packages/google-cloud-dialogflow/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.v2beta1.json index 58a96bc185e8..a9752b2203e1 100644 --- a/packages/google-cloud-dialogflow/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.v2beta1.json +++ b/packages/google-cloud-dialogflow/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.v2beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dialogflow", - "version": "0.1.0" + "version": "2.32.0" }, "snippets": [ { diff --git a/packages/google-cloud-dlp/CHANGELOG.md b/packages/google-cloud-dlp/CHANGELOG.md index 589d11fd9bed..fa617bb88d88 100644 --- a/packages/google-cloud-dlp/CHANGELOG.md +++ b/packages/google-cloud-dlp/CHANGELOG.md @@ -4,6 +4,20 @@ [1]: https://pypi.org/project/google-cloud-dlp/#history +## [3.23.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dlp-v3.22.0...google-cloud-dlp-v3.23.0) (2024-09-23) + + +### Features + +* action for publishing data profiles to SecOps (formelly known as Chronicle) ([afcf7cb](https://github.com/googleapis/google-cloud-python/commit/afcf7cbe57d6e0f183a113ba03bba9c288052969)) +* action for publishing data profiles to Security Command Center ([afcf7cb](https://github.com/googleapis/google-cloud-python/commit/afcf7cbe57d6e0f183a113ba03bba9c288052969)) +* discovery configs for AWS S3 buckets ([afcf7cb](https://github.com/googleapis/google-cloud-python/commit/afcf7cbe57d6e0f183a113ba03bba9c288052969)) + + +### Documentation + +* small improvements and clarifications ([afcf7cb](https://github.com/googleapis/google-cloud-python/commit/afcf7cbe57d6e0f183a113ba03bba9c288052969)) + ## [3.22.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dlp-v3.21.0...google-cloud-dlp-v3.22.0) (2024-08-19) diff --git a/packages/google-cloud-dlp/google/cloud/dlp/gapic_version.py b/packages/google-cloud-dlp/google/cloud/dlp/gapic_version.py index 558c8aab67c5..9304602da4e3 100644 --- a/packages/google-cloud-dlp/google/cloud/dlp/gapic_version.py +++ b/packages/google-cloud-dlp/google/cloud/dlp/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "3.23.0" # {x-release-please-version} diff --git a/packages/google-cloud-dlp/google/cloud/dlp_v2/gapic_version.py b/packages/google-cloud-dlp/google/cloud/dlp_v2/gapic_version.py index 558c8aab67c5..9304602da4e3 100644 --- a/packages/google-cloud-dlp/google/cloud/dlp_v2/gapic_version.py +++ b/packages/google-cloud-dlp/google/cloud/dlp_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "3.23.0" # {x-release-please-version} diff --git a/packages/google-cloud-dlp/samples/generated_samples/snippet_metadata_google.privacy.dlp.v2.json b/packages/google-cloud-dlp/samples/generated_samples/snippet_metadata_google.privacy.dlp.v2.json index 4da85d5c6cd9..0555761e7f11 100644 --- a/packages/google-cloud-dlp/samples/generated_samples/snippet_metadata_google.privacy.dlp.v2.json +++ b/packages/google-cloud-dlp/samples/generated_samples/snippet_metadata_google.privacy.dlp.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dlp", - "version": "0.1.0" + "version": "3.23.0" }, "snippets": [ { diff --git a/packages/google-cloud-gdchardwaremanagement/CHANGELOG.md b/packages/google-cloud-gdchardwaremanagement/CHANGELOG.md index c00b0d36bea8..08b9cafd12e1 100644 --- a/packages/google-cloud-gdchardwaremanagement/CHANGELOG.md +++ b/packages/google-cloud-gdchardwaremanagement/CHANGELOG.md @@ -1,5 +1,19 @@ # Changelog +## [0.1.4](https://github.com/googleapis/google-cloud-python/compare/google-cloud-gdchardwaremanagement-v0.1.3...google-cloud-gdchardwaremanagement-v0.1.4) (2024-09-23) + + +### Features + +* add an order type field to distinguish a fulfillment request from a sales inquiry ([e727cc0](https://github.com/googleapis/google-cloud-python/commit/e727cc0e98e37d55882215182f86c2a7d23154ef)) +* add support to mark comments as read or unread ([e727cc0](https://github.com/googleapis/google-cloud-python/commit/e727cc0e98e37d55882215182f86c2a7d23154ef)) +* rename zone state signal READY_FOR_SITE_TURNUP to FACTORY_TURNUP_CHECKS_PASSED ([e727cc0](https://github.com/googleapis/google-cloud-python/commit/e727cc0e98e37d55882215182f86c2a7d23154ef)) + + +### Documentation + +* clarify how access_times are used ([e727cc0](https://github.com/googleapis/google-cloud-python/commit/e727cc0e98e37d55882215182f86c2a7d23154ef)) + ## [0.1.3](https://github.com/googleapis/google-cloud-python/compare/google-cloud-gdchardwaremanagement-v0.1.2...google-cloud-gdchardwaremanagement-v0.1.3) (2024-07-30) diff --git a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement/gapic_version.py b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement/gapic_version.py index 558c8aab67c5..937ede8823ef 100644 --- a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement/gapic_version.py +++ b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.4" # {x-release-please-version} diff --git a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/gapic_version.py b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/gapic_version.py index 558c8aab67c5..937ede8823ef 100644 --- a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/gapic_version.py +++ b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.4" # {x-release-please-version} diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/snippet_metadata_google.cloud.gdchardwaremanagement.v1alpha.json b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/snippet_metadata_google.cloud.gdchardwaremanagement.v1alpha.json index 588b33c7fb6e..d6af9346263f 100644 --- a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/snippet_metadata_google.cloud.gdchardwaremanagement.v1alpha.json +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/snippet_metadata_google.cloud.gdchardwaremanagement.v1alpha.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-gdchardwaremanagement", - "version": "0.1.0" + "version": "0.1.4" }, "snippets": [ { diff --git a/packages/google-cloud-kms/CHANGELOG.md b/packages/google-cloud-kms/CHANGELOG.md index e46b3a3ebd63..22ad6d9a6bcf 100644 --- a/packages/google-cloud-kms/CHANGELOG.md +++ b/packages/google-cloud-kms/CHANGELOG.md @@ -4,6 +4,28 @@ [1]: https://pypi.org/project/google-cloud-kms/#history +## [3.0.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-kms-v2.24.2...google-cloud-kms-v3.0.0) (2024-09-23) + + +### ⚠ BREAKING CHANGES + +* Pagination feature is introduced for method ListKeyHandles in service Autokey + +### Features + +* Adding a state field for AutokeyConfig ([b4c9770](https://github.com/googleapis/google-cloud-python/commit/b4c977059e075c73781c179b26fdf915548e65c4)) + + +### Bug Fixes + +* Pagination feature is introduced for method ListKeyHandles in service Autokey ([b4c9770](https://github.com/googleapis/google-cloud-python/commit/b4c977059e075c73781c179b26fdf915548e65c4)) + + +### Documentation + +* A comment for field destroy_scheduled_duration in message .google.cloud.kms.v1.CryptoKey is updated for the default duration ([b4c9770](https://github.com/googleapis/google-cloud-python/commit/b4c977059e075c73781c179b26fdf915548e65c4)) +* Field service_resolvers in message .google.cloud.kms.v1.EkmConnection is Explicitly is marked as to have field behavior of Optional ([b4c9770](https://github.com/googleapis/google-cloud-python/commit/b4c977059e075c73781c179b26fdf915548e65c4)) + ## [2.24.2](https://github.com/googleapis/google-cloud-python/compare/google-cloud-kms-v2.24.1...google-cloud-kms-v2.24.2) (2024-07-30) diff --git a/packages/google-cloud-kms/google/cloud/kms/gapic_version.py b/packages/google-cloud-kms/google/cloud/kms/gapic_version.py index 558c8aab67c5..b657023d6068 100644 --- a/packages/google-cloud-kms/google/cloud/kms/gapic_version.py +++ b/packages/google-cloud-kms/google/cloud/kms/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "3.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/gapic_version.py b/packages/google-cloud-kms/google/cloud/kms_v1/gapic_version.py index 558c8aab67c5..b657023d6068 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/gapic_version.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "3.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-kms/samples/generated_samples/snippet_metadata_google.cloud.kms.v1.json b/packages/google-cloud-kms/samples/generated_samples/snippet_metadata_google.cloud.kms.v1.json index a5e165bff4c4..1630512d4e6c 100644 --- a/packages/google-cloud-kms/samples/generated_samples/snippet_metadata_google.cloud.kms.v1.json +++ b/packages/google-cloud-kms/samples/generated_samples/snippet_metadata_google.cloud.kms.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-kms", - "version": "0.1.0" + "version": "3.0.0" }, "snippets": [ { diff --git a/packages/google-maps-places/CHANGELOG.md b/packages/google-maps-places/CHANGELOG.md index be0f7fb9c67e..7bb089f1c273 100644 --- a/packages/google-maps-places/CHANGELOG.md +++ b/packages/google-maps-places/CHANGELOG.md @@ -1,5 +1,19 @@ # Changelog +## [0.1.18](https://github.com/googleapis/google-cloud-python/compare/google-maps-places-v0.1.17...google-maps-places-v0.1.18) (2024-09-23) + + +### Features + +* action for publishing data profiles to SecOps (formelly known as Chronicle) ([2cc1550](https://github.com/googleapis/google-cloud-python/commit/2cc1550492a2b78ed7240aab84a8449de5e5afa2)) +* action for publishing data profiles to Security Command Center ([2cc1550](https://github.com/googleapis/google-cloud-python/commit/2cc1550492a2b78ed7240aab84a8449de5e5afa2)) +* discovery configs for AWS S3 buckets ([2cc1550](https://github.com/googleapis/google-cloud-python/commit/2cc1550492a2b78ed7240aab84a8449de5e5afa2)) + + +### Documentation + +* small improvements and clarifications ([2cc1550](https://github.com/googleapis/google-cloud-python/commit/2cc1550492a2b78ed7240aab84a8449de5e5afa2)) + ## [0.1.17](https://github.com/googleapis/google-cloud-python/compare/google-maps-places-v0.1.16...google-maps-places-v0.1.17) (2024-07-30) diff --git a/packages/google-maps-places/google/maps/places/gapic_version.py b/packages/google-maps-places/google/maps/places/gapic_version.py index 558c8aab67c5..3c51a1157647 100644 --- a/packages/google-maps-places/google/maps/places/gapic_version.py +++ b/packages/google-maps-places/google/maps/places/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.18" # {x-release-please-version} diff --git a/packages/google-maps-places/google/maps/places_v1/gapic_version.py b/packages/google-maps-places/google/maps/places_v1/gapic_version.py index 558c8aab67c5..3c51a1157647 100644 --- a/packages/google-maps-places/google/maps/places_v1/gapic_version.py +++ b/packages/google-maps-places/google/maps/places_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.18" # {x-release-please-version} diff --git a/packages/google-maps-places/samples/generated_samples/snippet_metadata_google.maps.places.v1.json b/packages/google-maps-places/samples/generated_samples/snippet_metadata_google.maps.places.v1.json index 74eb224b30a6..e9caf90c3929 100644 --- a/packages/google-maps-places/samples/generated_samples/snippet_metadata_google.maps.places.v1.json +++ b/packages/google-maps-places/samples/generated_samples/snippet_metadata_google.maps.places.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-maps-places", - "version": "0.1.0" + "version": "0.1.18" }, "snippets": [ { From c638f1f55a85a228ec6385095ca1befb54067188 Mon Sep 17 00:00:00 2001 From: "owlbot-bootstrapper[bot]" <104649659+owlbot-bootstrapper[bot]@users.noreply.github.com> Date: Mon, 23 Sep 2024 17:14:28 -0400 Subject: [PATCH 084/108] feat: add initial files for google.cloud.oracledatabase.v1 (#13100) Source-Link: https://github.com/googleapis/googleapis-gen/commit/09d68f35365c74ad276cea3e7c26553a1485faa0 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLW9yYWNsZWRhdGFiYXNlLy5Pd2xCb3QueWFtbCIsImgiOiIwOWQ2OGYzNTM2NWM3NGFkMjc2Y2VhM2U3YzI2NTUzYTE0ODVmYWEwIn0= PiperOrigin-RevId: 367526014 --------- Co-authored-by: Owlbot Bootstrapper Co-authored-by: Owl Bot Co-authored-by: ohmayr --- .../google-cloud-oracledatabase/.OwlBot.yaml | 18 + .../google-cloud-oracledatabase/.coveragerc | 13 + packages/google-cloud-oracledatabase/.flake8 | 33 + .../google-cloud-oracledatabase/.gitignore | 63 + .../.repo-metadata.json | 17 + .../google-cloud-oracledatabase/CHANGELOG.md | 1 + .../CODE_OF_CONDUCT.md | 95 + .../CONTRIBUTING.rst | 271 + packages/google-cloud-oracledatabase/LICENSE | 202 + .../google-cloud-oracledatabase/MANIFEST.in | 25 + .../google-cloud-oracledatabase/README.rst | 108 + .../docs/CHANGELOG.md | 1 + .../docs/README.rst | 1 + .../docs/_static/custom.css | 20 + .../docs/_templates/layout.html | 50 + .../google-cloud-oracledatabase/docs/conf.py | 384 + .../docs/index.rst | 28 + .../docs/multiprocessing.rst | 7 + .../oracledatabase_v1/oracle_database.rst | 10 + .../docs/oracledatabase_v1/services_.rst | 6 + .../docs/oracledatabase_v1/types_.rst | 6 + .../docs/summary_overview.md | 22 + .../google/cloud/oracledatabase/__init__.py | 177 + .../cloud/oracledatabase/gapic_version.py | 16 + .../google/cloud/oracledatabase/py.typed | 2 + .../cloud/oracledatabase_v1/__init__.py | 168 + .../oracledatabase_v1/gapic_metadata.json | 128 + .../cloud/oracledatabase_v1/gapic_version.py | 16 + .../google/cloud/oracledatabase_v1/py.typed | 2 + .../oracledatabase_v1/services/__init__.py | 15 + .../services/oracle_database/__init__.py | 18 + .../services/oracle_database/client.py | 4073 ++++++ .../services/oracle_database/pagers.py | 876 ++ .../oracle_database/transports/__init__.py | 30 + .../oracle_database/transports/base.py | 731 ++ .../oracle_database/transports/rest.py | 3718 ++++++ .../cloud/oracledatabase_v1/types/__init__.py | 157 + .../types/autonomous_database.py | 1421 +++ .../autonomous_database_character_set.py | 78 + .../types/autonomous_db_backup.py | 289 + .../types/autonomous_db_version.py | 71 + .../cloud/oracledatabase_v1/types/common.py | 47 + .../cloud/oracledatabase_v1/types/db_node.py | 158 + .../oracledatabase_v1/types/db_server.py | 163 + .../types/db_system_shape.py | 117 + .../oracledatabase_v1/types/entitlement.py | 127 + .../oracledatabase_v1/types/exadata_infra.py | 468 + .../oracledatabase_v1/types/gi_version.py | 54 + .../types/location_metadata.py | 45 + .../oracledatabase_v1/types/oracledatabase.py | 1244 ++ .../oracledatabase_v1/types/vm_cluster.py | 437 + packages/google-cloud-oracledatabase/mypy.ini | 3 + .../google-cloud-oracledatabase/noxfile.py | 452 + ...atabase_create_autonomous_database_sync.py | 62 + ...reate_cloud_exadata_infrastructure_sync.py | 57 + ...e_database_create_cloud_vm_cluster_sync.py | 64 + ...atabase_delete_autonomous_database_sync.py | 56 + ...elete_cloud_exadata_infrastructure_sync.py | 56 + ...e_database_delete_cloud_vm_cluster_sync.py | 56 + ...enerate_autonomous_database_wallet_sync.py | 53 + ...e_database_get_autonomous_database_sync.py | 52 + ...e_get_cloud_exadata_infrastructure_sync.py | 52 + ...acle_database_get_cloud_vm_cluster_sync.py | 52 + ...e_list_autonomous_database_backups_sync.py | 53 + ...autonomous_database_character_sets_sync.py | 53 + ...database_list_autonomous_databases_sync.py | 53 + ...tabase_list_autonomous_db_versions_sync.py | 53 + ...list_cloud_exadata_infrastructures_sync.py | 53 + ...le_database_list_cloud_vm_clusters_sync.py | 53 + ...ated_oracle_database_list_db_nodes_sync.py | 53 + ...ed_oracle_database_list_db_servers_sync.py | 53 + ...cle_database_list_db_system_shapes_sync.py | 53 + ..._oracle_database_list_entitlements_sync.py | 53 + ...d_oracle_database_list_gi_versions_sync.py | 53 + ...tabase_restore_autonomous_database_sync.py | 56 + ...tadata_google.cloud.oracledatabase.v1.json | 1815 +++ .../scripts/decrypt-secrets.sh | 46 + .../fixup_oracledatabase_v1_keywords.py | 197 + packages/google-cloud-oracledatabase/setup.py | 95 + .../testing/.gitignore | 3 + .../testing/constraints-3.10.txt | 6 + .../testing/constraints-3.11.txt | 6 + .../testing/constraints-3.12.txt | 6 + .../testing/constraints-3.7.txt | 10 + .../testing/constraints-3.8.txt | 6 + .../testing/constraints-3.9.txt | 6 + .../tests/__init__.py | 15 + .../tests/unit/__init__.py | 15 + .../tests/unit/gapic/__init__.py | 15 + .../unit/gapic/oracledatabase_v1/__init__.py | 15 + .../oracledatabase_v1/test_oracle_database.py | 10589 ++++++++++++++++ 91 files changed, 30726 insertions(+) create mode 100644 packages/google-cloud-oracledatabase/.OwlBot.yaml create mode 100644 packages/google-cloud-oracledatabase/.coveragerc create mode 100644 packages/google-cloud-oracledatabase/.flake8 create mode 100644 packages/google-cloud-oracledatabase/.gitignore create mode 100644 packages/google-cloud-oracledatabase/.repo-metadata.json create mode 100644 packages/google-cloud-oracledatabase/CHANGELOG.md create mode 100644 packages/google-cloud-oracledatabase/CODE_OF_CONDUCT.md create mode 100644 packages/google-cloud-oracledatabase/CONTRIBUTING.rst create mode 100644 packages/google-cloud-oracledatabase/LICENSE create mode 100644 packages/google-cloud-oracledatabase/MANIFEST.in create mode 100644 packages/google-cloud-oracledatabase/README.rst create mode 120000 packages/google-cloud-oracledatabase/docs/CHANGELOG.md create mode 120000 packages/google-cloud-oracledatabase/docs/README.rst create mode 100644 packages/google-cloud-oracledatabase/docs/_static/custom.css create mode 100644 packages/google-cloud-oracledatabase/docs/_templates/layout.html create mode 100644 packages/google-cloud-oracledatabase/docs/conf.py create mode 100644 packages/google-cloud-oracledatabase/docs/index.rst create mode 100644 packages/google-cloud-oracledatabase/docs/multiprocessing.rst create mode 100644 packages/google-cloud-oracledatabase/docs/oracledatabase_v1/oracle_database.rst create mode 100644 packages/google-cloud-oracledatabase/docs/oracledatabase_v1/services_.rst create mode 100644 packages/google-cloud-oracledatabase/docs/oracledatabase_v1/types_.rst create mode 100644 packages/google-cloud-oracledatabase/docs/summary_overview.md create mode 100644 packages/google-cloud-oracledatabase/google/cloud/oracledatabase/__init__.py create mode 100644 packages/google-cloud-oracledatabase/google/cloud/oracledatabase/gapic_version.py create mode 100644 packages/google-cloud-oracledatabase/google/cloud/oracledatabase/py.typed create mode 100644 packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/__init__.py create mode 100644 packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/gapic_metadata.json create mode 100644 packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/gapic_version.py create mode 100644 packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/py.typed create mode 100644 packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/__init__.py create mode 100644 packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/__init__.py create mode 100644 packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/client.py create mode 100644 packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/pagers.py create mode 100644 packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/transports/__init__.py create mode 100644 packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/transports/base.py create mode 100644 packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/transports/rest.py create mode 100644 packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/__init__.py create mode 100644 packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/autonomous_database.py create mode 100644 packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/autonomous_database_character_set.py create mode 100644 packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/autonomous_db_backup.py create mode 100644 packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/autonomous_db_version.py create mode 100644 packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/common.py create mode 100644 packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/db_node.py create mode 100644 packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/db_server.py create mode 100644 packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/db_system_shape.py create mode 100644 packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/entitlement.py create mode 100644 packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/exadata_infra.py create mode 100644 packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/gi_version.py create mode 100644 packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/location_metadata.py create mode 100644 packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/oracledatabase.py create mode 100644 packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/vm_cluster.py create mode 100644 packages/google-cloud-oracledatabase/mypy.ini create mode 100644 packages/google-cloud-oracledatabase/noxfile.py create mode 100644 packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_create_autonomous_database_sync.py create mode 100644 packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_create_cloud_exadata_infrastructure_sync.py create mode 100644 packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_create_cloud_vm_cluster_sync.py create mode 100644 packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_delete_autonomous_database_sync.py create mode 100644 packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_delete_cloud_exadata_infrastructure_sync.py create mode 100644 packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_delete_cloud_vm_cluster_sync.py create mode 100644 packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_generate_autonomous_database_wallet_sync.py create mode 100644 packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_get_autonomous_database_sync.py create mode 100644 packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_get_cloud_exadata_infrastructure_sync.py create mode 100644 packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_get_cloud_vm_cluster_sync.py create mode 100644 packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_database_backups_sync.py create mode 100644 packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_database_character_sets_sync.py create mode 100644 packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_databases_sync.py create mode 100644 packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_db_versions_sync.py create mode 100644 packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_cloud_exadata_infrastructures_sync.py create mode 100644 packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_cloud_vm_clusters_sync.py create mode 100644 packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_db_nodes_sync.py create mode 100644 packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_db_servers_sync.py create mode 100644 packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_db_system_shapes_sync.py create mode 100644 packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_entitlements_sync.py create mode 100644 packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_gi_versions_sync.py create mode 100644 packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_restore_autonomous_database_sync.py create mode 100644 packages/google-cloud-oracledatabase/samples/generated_samples/snippet_metadata_google.cloud.oracledatabase.v1.json create mode 100755 packages/google-cloud-oracledatabase/scripts/decrypt-secrets.sh create mode 100644 packages/google-cloud-oracledatabase/scripts/fixup_oracledatabase_v1_keywords.py create mode 100644 packages/google-cloud-oracledatabase/setup.py create mode 100644 packages/google-cloud-oracledatabase/testing/.gitignore create mode 100644 packages/google-cloud-oracledatabase/testing/constraints-3.10.txt create mode 100644 packages/google-cloud-oracledatabase/testing/constraints-3.11.txt create mode 100644 packages/google-cloud-oracledatabase/testing/constraints-3.12.txt create mode 100644 packages/google-cloud-oracledatabase/testing/constraints-3.7.txt create mode 100644 packages/google-cloud-oracledatabase/testing/constraints-3.8.txt create mode 100644 packages/google-cloud-oracledatabase/testing/constraints-3.9.txt create mode 100644 packages/google-cloud-oracledatabase/tests/__init__.py create mode 100644 packages/google-cloud-oracledatabase/tests/unit/__init__.py create mode 100644 packages/google-cloud-oracledatabase/tests/unit/gapic/__init__.py create mode 100644 packages/google-cloud-oracledatabase/tests/unit/gapic/oracledatabase_v1/__init__.py create mode 100644 packages/google-cloud-oracledatabase/tests/unit/gapic/oracledatabase_v1/test_oracle_database.py diff --git a/packages/google-cloud-oracledatabase/.OwlBot.yaml b/packages/google-cloud-oracledatabase/.OwlBot.yaml new file mode 100644 index 000000000000..ebf74202aadf --- /dev/null +++ b/packages/google-cloud-oracledatabase/.OwlBot.yaml @@ -0,0 +1,18 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +deep-copy-regex: + - source: /google/cloud/oracledatabase/(v.*)/.*-py + dest: /owl-bot-staging/google-cloud-oracledatabase/$1 +api-name: google-cloud-oracledatabase diff --git a/packages/google-cloud-oracledatabase/.coveragerc b/packages/google-cloud-oracledatabase/.coveragerc new file mode 100644 index 000000000000..645dc04f340e --- /dev/null +++ b/packages/google-cloud-oracledatabase/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/oracledatabase/__init__.py + google/cloud/oracledatabase/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/packages/google-cloud-oracledatabase/.flake8 b/packages/google-cloud-oracledatabase/.flake8 new file mode 100644 index 000000000000..32986c79287a --- /dev/null +++ b/packages/google-cloud-oracledatabase/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E231, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/packages/google-cloud-oracledatabase/.gitignore b/packages/google-cloud-oracledatabase/.gitignore new file mode 100644 index 000000000000..b4243ced74e4 --- /dev/null +++ b/packages/google-cloud-oracledatabase/.gitignore @@ -0,0 +1,63 @@ +*.py[cod] +*.sw[op] + +# C extensions +*.so + +# Packages +*.egg +*.egg-info +dist +build +eggs +.eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 +__pycache__ + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.nox +.cache +.pytest_cache + + +# Mac +.DS_Store + +# JetBrains +.idea + +# VS Code +.vscode + +# emacs +*~ + +# Built documentation +docs/_build +bigquery/docs/generated +docs.metadata + +# Virtual environment +env/ + +# Test logs +coverage.xml +*sponge_log.xml + +# System test environment variables. +system_tests/local_test_setup + +# Make sure a generated file isn't accidentally committed. +pylintrc +pylintrc.test diff --git a/packages/google-cloud-oracledatabase/.repo-metadata.json b/packages/google-cloud-oracledatabase/.repo-metadata.json new file mode 100644 index 000000000000..6d1b2164a92f --- /dev/null +++ b/packages/google-cloud-oracledatabase/.repo-metadata.json @@ -0,0 +1,17 @@ +{ + "name": "google-cloud-oracledatabase", + "name_pretty": "Oracle Database@Google Cloud API", + "api_description": "The Oracle Database@Google Cloud API provides a set of APIs to manage Oracle database services, such as Exadata and Autonomous Databases.", + "product_documentation": "https://cloud.google.com/oracle/database/docs", + "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-oracledatabase/latest", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=1492565", + "release_level": "preview", + "language": "python", + "library_type": "GAPIC_AUTO", + "repo": "googleapis/google-cloud-python", + "distribution_name": "google-cloud-oracledatabase", + "api_id": "oracledatabase.googleapis.com", + "default_version": "v1", + "codeowner_team": "", + "api_shortname": "oracledatabase" +} diff --git a/packages/google-cloud-oracledatabase/CHANGELOG.md b/packages/google-cloud-oracledatabase/CHANGELOG.md new file mode 100644 index 000000000000..5ddad421e08f --- /dev/null +++ b/packages/google-cloud-oracledatabase/CHANGELOG.md @@ -0,0 +1 @@ +# Changelog \ No newline at end of file diff --git a/packages/google-cloud-oracledatabase/CODE_OF_CONDUCT.md b/packages/google-cloud-oracledatabase/CODE_OF_CONDUCT.md new file mode 100644 index 000000000000..039f43681204 --- /dev/null +++ b/packages/google-cloud-oracledatabase/CODE_OF_CONDUCT.md @@ -0,0 +1,95 @@ + +# Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/packages/google-cloud-oracledatabase/CONTRIBUTING.rst b/packages/google-cloud-oracledatabase/CONTRIBUTING.rst new file mode 100644 index 000000000000..9b24d1115e7f --- /dev/null +++ b/packages/google-cloud-oracledatabase/CONTRIBUTING.rst @@ -0,0 +1,271 @@ +.. Generated by synthtool. DO NOT EDIT! +############ +Contributing +############ + +#. **Please sign one of the contributor license agreements below.** +#. Fork the repo, develop and test your code changes, add docs. +#. Make sure that your commit messages clearly describe the changes. +#. Send a pull request. (Please Read: `Faster Pull Request Reviews`_) + +.. _Faster Pull Request Reviews: https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews + +.. contents:: Here are some guidelines for hacking on the Google Cloud Client libraries. + +*************** +Adding Features +*************** + +In order to add a feature: + +- The feature must be documented in both the API and narrative + documentation. + +- The feature must work fully on the following CPython versions: + 3.7, 3.8, 3.9, 3.10, 3.11 and 3.12 on both UNIX and Windows. + +- The feature must not add unnecessary dependencies (where + "unnecessary" is of course subjective, but new dependencies should + be discussed). + +**************************** +Using a Development Checkout +**************************** + +You'll have to create a development environment using a Git checkout: + +- While logged into your GitHub account, navigate to the + ``google-cloud-python`` `repo`_ on GitHub. + +- Fork and clone the ``google-cloud-python`` repository to your GitHub account by + clicking the "Fork" button. + +- Clone your fork of ``google-cloud-python`` from your GitHub account to your local + computer, substituting your account username and specifying the destination + as ``hack-on-google-cloud-python``. E.g.:: + + $ cd ${HOME} + $ git clone git@github.com:USERNAME/google-cloud-python.git hack-on-google-cloud-python + $ cd hack-on-google-cloud-python + # Configure remotes such that you can pull changes from the googleapis/google-cloud-python + # repository into your local repository. + $ git remote add upstream git@github.com:googleapis/google-cloud-python.git + # fetch and merge changes from upstream into main + $ git fetch upstream + $ git merge upstream/main + +Now your local repo is set up such that you will push changes to your GitHub +repo, from which you can submit a pull request. + +To work on the codebase and run the tests, we recommend using ``nox``, +but you can also use a ``virtualenv`` of your own creation. + +.. _repo: https://github.com/googleapis/google-cloud-python + +Using ``nox`` +============= + +We use `nox `__ to instrument our tests. + +- To test your changes, run unit tests with ``nox``:: + $ nox -s unit + +- To run a single unit test:: + + $ nox -s unit-3.12 -- -k + + + .. note:: + + The unit tests and system tests are described in the + ``noxfile.py`` files in each directory. + +.. nox: https://pypi.org/project/nox/ + +***************************************** +I'm getting weird errors... Can you help? +***************************************** + +If the error mentions ``Python.h`` not being found, +install ``python-dev`` and try again. +On Debian/Ubuntu:: + + $ sudo apt-get install python-dev + +************ +Coding Style +************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: + + $ nox -s blacken + +- PEP8 compliance is required, with exceptions defined in the linter configuration. + If you have ``nox`` installed, you can test that you have not introduced + any non-compliant code via:: + + $ nox -s lint + +- In order to make ``nox -s lint`` run faster, you can set some environment + variables:: + + export GOOGLE_CLOUD_TESTING_REMOTE="upstream" + export GOOGLE_CLOUD_TESTING_BRANCH="main" + + By doing this, you are specifying the location of the most up-to-date + version of ``google-cloud-python``. The + remote name ``upstream`` should point to the official ``googleapis`` + checkout and the branch should be the default branch on that remote (``main``). + +- This repository contains configuration for the + `pre-commit `__ tool, which automates checking + our linters during a commit. If you have it installed on your ``$PATH``, + you can enable enforcing those checks via: + +.. code-block:: bash + + $ pre-commit install + pre-commit installed at .git/hooks/pre-commit + +Exceptions to PEP8: + +- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for + "Function-Under-Test"), which is PEP8-incompliant, but more readable. + Some also use a local variable, ``MUT`` (short for "Module-Under-Test"). + +******************** +Running System Tests +******************** + +- To run system tests, you can execute:: + + # Run all system tests + $ nox -s system + + # Run a single system test + $ nox -s system-3.12 -- -k + + + .. note:: + + System tests are only configured to run under Python 3.8, 3.9, 3.10, 3.11 and 3.12. + For expediency, we do not run them in older versions of Python 3. + + This alone will not run the tests. You'll need to change some local + auth settings and change some configuration in your project to + run all the tests. + +- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. + +************* +Test Coverage +************* + +- The codebase *must* have 100% test statement coverage after each commit. + You can test coverage via ``nox -s cover``. + +****************************************************** +Documentation Coverage and Building HTML Documentation +****************************************************** + +If you fix a bug, and the bug requires an API or behavior modification, all +documentation in this package which references that API or behavior must be +changed to reflect the bug fix, ideally in the same commit that fixes the bug +or adds the feature. + +Build the docs via: + + $ nox -s docs + +************************* +Samples and code snippets +************************* + +Code samples and snippets live in the `samples/` catalogue. Feel free to +provide more examples, but make sure to write tests for those examples. +Each folder containing example code requires its own `noxfile.py` script +which automates testing. If you decide to create a new folder, you can +base it on the `samples/snippets` folder (providing `noxfile.py` and +the requirements files). + +The tests will run against a real Google Cloud Project, so you should +configure them just like the System Tests. + +- To run sample tests, you can execute:: + + # Run all tests in a folder + $ cd samples/snippets + $ nox -s py-3.8 + + # Run a single sample test + $ cd samples/snippets + $ nox -s py-3.8 -- -k + +******************************************** +Note About ``README`` as it pertains to PyPI +******************************************** + +The `description on PyPI`_ for the project comes directly from the +``README``. Due to the reStructuredText (``rst``) parser used by +PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` +instead of +``https://github.com/googleapis/google-cloud-python/blob/main/CONTRIBUTING.rst``) +may cause problems creating links or rendering the description. + +.. _description on PyPI: https://pypi.org/project/google-cloud-oracledatabase + + +************************* +Supported Python Versions +************************* + +We support: + +- `Python 3.7`_ +- `Python 3.8`_ +- `Python 3.9`_ +- `Python 3.10`_ +- `Python 3.11`_ +- `Python 3.12`_ + +.. _Python 3.7: https://docs.python.org/3.7/ +.. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ +.. _Python 3.10: https://docs.python.org/3.10/ +.. _Python 3.11: https://docs.python.org/3.11/ +.. _Python 3.12: https://docs.python.org/3.12/ + + +Supported versions can be found in our ``noxfile.py`` `config`_. + +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/packages/google-cloud-oracledatabase/noxfile.py + + +********** +Versioning +********** + +This library follows `Semantic Versioning`_. + +.. _Semantic Versioning: http://semver.org/ + +Some packages are currently in major version zero (``0.y.z``), which means that +anything may change at any time and the public API should not be considered +stable. + +****************************** +Contributor License Agreements +****************************** + +Before we can accept your pull requests you'll need to sign a Contributor +License Agreement (CLA): + +- **If you are an individual writing original source code** and **you own the + intellectual property**, then you'll need to sign an + `individual CLA `__. +- **If you work for a company that wants to allow you to contribute your work**, + then you'll need to sign a + `corporate CLA `__. + +You can sign these electronically (just scroll to the bottom). After that, +we'll be able to accept your pull requests. diff --git a/packages/google-cloud-oracledatabase/LICENSE b/packages/google-cloud-oracledatabase/LICENSE new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/packages/google-cloud-oracledatabase/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/google-cloud-oracledatabase/MANIFEST.in b/packages/google-cloud-oracledatabase/MANIFEST.in new file mode 100644 index 000000000000..d6814cd60037 --- /dev/null +++ b/packages/google-cloud-oracledatabase/MANIFEST.in @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +include README.rst LICENSE +recursive-include google *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ + +# Exclude scripts for samples readmegen +prune scripts/readme-gen diff --git a/packages/google-cloud-oracledatabase/README.rst b/packages/google-cloud-oracledatabase/README.rst new file mode 100644 index 000000000000..c050103f1ecd --- /dev/null +++ b/packages/google-cloud-oracledatabase/README.rst @@ -0,0 +1,108 @@ +Python Client for Oracle Database@Google Cloud API +================================================== + +|preview| |pypi| |versions| + +`Oracle Database@Google Cloud API`_: The Oracle Database@Google Cloud API provides a set of APIs to manage Oracle database services, such as Exadata and Autonomous Databases. + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. |preview| image:: https://img.shields.io/badge/support-preview-orange.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels +.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-oracledatabase.svg + :target: https://pypi.org/project/google-cloud-oracledatabase/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-oracledatabase.svg + :target: https://pypi.org/project/google-cloud-oracledatabase/ +.. _Oracle Database@Google Cloud API: https://cloud.google.com/oracle/database/docs +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/google-cloud-oracledatabase/latest/summary_overview +.. _Product Documentation: https://cloud.google.com/oracle/database/docs + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Oracle Database@Google Cloud API.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Oracle Database@Google Cloud API.: https://cloud.google.com/oracle/database/docs +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a virtual environment using `venv`_. `venv`_ is a tool that +creates isolated Python environments. These isolated environments can have separate +versions of Python packages, which allows you to isolate one project's dependencies +from the dependencies of other projects. + +With `venv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`venv`: https://docs.python.org/3/library/venv.html + + +Code samples and snippets +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Code samples and snippets live in the `samples/`_ folder. + +.. _samples/: https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-oracledatabase/samples + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of +Python. + +Python >= 3.7 + +.. _active: https://devguide.python.org/devcycle/#in-development-main-branch +.. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches + +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python <= 3.6 + +If you are using an `end-of-life`_ +version of Python, we recommend that you update as soon as possible to an actively supported version. + +.. _end-of-life: https://devguide.python.org/devcycle/#end-of-life-branches + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + pip install google-cloud-oracledatabase + + +Windows +^^^^^^^ + +.. code-block:: console + + py -m venv + .\\Scripts\activate + pip install google-cloud-oracledatabase + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for Oracle Database@Google Cloud API + to see other available methods on the client. +- Read the `Oracle Database@Google Cloud API Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `README`_ to see the full list of Cloud + APIs that we cover. + +.. _Oracle Database@Google Cloud API Product documentation: https://cloud.google.com/oracle/database/docs +.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst diff --git a/packages/google-cloud-oracledatabase/docs/CHANGELOG.md b/packages/google-cloud-oracledatabase/docs/CHANGELOG.md new file mode 120000 index 000000000000..04c99a55caae --- /dev/null +++ b/packages/google-cloud-oracledatabase/docs/CHANGELOG.md @@ -0,0 +1 @@ +../CHANGELOG.md \ No newline at end of file diff --git a/packages/google-cloud-oracledatabase/docs/README.rst b/packages/google-cloud-oracledatabase/docs/README.rst new file mode 120000 index 000000000000..89a0106941ff --- /dev/null +++ b/packages/google-cloud-oracledatabase/docs/README.rst @@ -0,0 +1 @@ +../README.rst \ No newline at end of file diff --git a/packages/google-cloud-oracledatabase/docs/_static/custom.css b/packages/google-cloud-oracledatabase/docs/_static/custom.css new file mode 100644 index 000000000000..b0a295464b23 --- /dev/null +++ b/packages/google-cloud-oracledatabase/docs/_static/custom.css @@ -0,0 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/google-cloud-oracledatabase/docs/_templates/layout.html b/packages/google-cloud-oracledatabase/docs/_templates/layout.html new file mode 100644 index 000000000000..6316a537f72b --- /dev/null +++ b/packages/google-cloud-oracledatabase/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/packages/google-cloud-oracledatabase/docs/conf.py b/packages/google-cloud-oracledatabase/docs/conf.py new file mode 100644 index 000000000000..a4b21f79d825 --- /dev/null +++ b/packages/google-cloud-oracledatabase/docs/conf.py @@ -0,0 +1,384 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# google-cloud-oracledatabase documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import os +import shlex +import sys + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.5.5" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "recommonmark", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_options = {"members": True} +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = "google-cloud-oracledatabase" +copyright = "2019, Google" +author = "Google APIs" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for google-cloud-oracledatabase", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-oracledatabase-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + #'preamble': '', + # Latex figure (float) alignment + #'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-oracledatabase.tex", + "google-cloud-oracledatabase Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-oracledatabase", + "google-cloud-oracledatabase Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-oracledatabase", + "google-cloud-oracledatabase Documentation", + author, + "google-cloud-oracledatabase", + "google-cloud-oracledatabase Library", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/packages/google-cloud-oracledatabase/docs/index.rst b/packages/google-cloud-oracledatabase/docs/index.rst new file mode 100644 index 000000000000..77ff04e09fa0 --- /dev/null +++ b/packages/google-cloud-oracledatabase/docs/index.rst @@ -0,0 +1,28 @@ +.. include:: README.rst + +.. include:: multiprocessing.rst + + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + oracledatabase_v1/services_ + oracledatabase_v1/types_ + + +Changelog +--------- + +For a list of all ``google-cloud-oracledatabase`` releases: + +.. toctree:: + :maxdepth: 2 + + CHANGELOG + +.. toctree:: + :hidden: + + summary_overview.md diff --git a/packages/google-cloud-oracledatabase/docs/multiprocessing.rst b/packages/google-cloud-oracledatabase/docs/multiprocessing.rst new file mode 100644 index 000000000000..536d17b2ea65 --- /dev/null +++ b/packages/google-cloud-oracledatabase/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/packages/google-cloud-oracledatabase/docs/oracledatabase_v1/oracle_database.rst b/packages/google-cloud-oracledatabase/docs/oracledatabase_v1/oracle_database.rst new file mode 100644 index 000000000000..ef9ce591ce83 --- /dev/null +++ b/packages/google-cloud-oracledatabase/docs/oracledatabase_v1/oracle_database.rst @@ -0,0 +1,10 @@ +OracleDatabase +-------------------------------- + +.. automodule:: google.cloud.oracledatabase_v1.services.oracle_database + :members: + :inherited-members: + +.. automodule:: google.cloud.oracledatabase_v1.services.oracle_database.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-oracledatabase/docs/oracledatabase_v1/services_.rst b/packages/google-cloud-oracledatabase/docs/oracledatabase_v1/services_.rst new file mode 100644 index 000000000000..8b9decce8ef1 --- /dev/null +++ b/packages/google-cloud-oracledatabase/docs/oracledatabase_v1/services_.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Oracledatabase v1 API +=============================================== +.. toctree:: + :maxdepth: 2 + + oracle_database diff --git a/packages/google-cloud-oracledatabase/docs/oracledatabase_v1/types_.rst b/packages/google-cloud-oracledatabase/docs/oracledatabase_v1/types_.rst new file mode 100644 index 000000000000..addba88a94ee --- /dev/null +++ b/packages/google-cloud-oracledatabase/docs/oracledatabase_v1/types_.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Oracledatabase v1 API +============================================ + +.. automodule:: google.cloud.oracledatabase_v1.types + :members: + :show-inheritance: diff --git a/packages/google-cloud-oracledatabase/docs/summary_overview.md b/packages/google-cloud-oracledatabase/docs/summary_overview.md new file mode 100644 index 000000000000..326e6e99fa26 --- /dev/null +++ b/packages/google-cloud-oracledatabase/docs/summary_overview.md @@ -0,0 +1,22 @@ +[ +This is a templated file. Adding content to this file may result in it being +reverted. Instead, if you want to place additional content, create an +"overview_content.md" file in `docs/` directory. The Sphinx tool will +pick up on the content and merge the content. +]: # + +# Oracle Database@Google Cloud API + +Overview of the APIs available for Oracle Database@Google Cloud API. + +## All entries + +Classes, methods and properties & attributes for +Oracle Database@Google Cloud API API. + +[classes](https://cloud.google.com/python/docs/reference/google-cloud-oracledatabase/latest/summary_class.html) + +[methods](https://cloud.google.com/python/docs/reference/google-cloud-oracledatabase/latest/summary_method.html) + +[properties and +attributes](https://cloud.google.com/python/docs/reference/google-cloud-oracledatabase/latest/summary_property.html) diff --git a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase/__init__.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase/__init__.py new file mode 100644 index 000000000000..29f02c59e323 --- /dev/null +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase/__init__.py @@ -0,0 +1,177 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.oracledatabase import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.oracledatabase_v1.services.oracle_database.client import ( + OracleDatabaseClient, +) +from google.cloud.oracledatabase_v1.types.autonomous_database import ( + AllConnectionStrings, + AutonomousDatabase, + AutonomousDatabaseApex, + AutonomousDatabaseConnectionStrings, + AutonomousDatabaseConnectionUrls, + AutonomousDatabaseProperties, + AutonomousDatabaseStandbySummary, + DatabaseConnectionStringProfile, + DBWorkload, + GenerateType, + OperationsInsightsState, + ScheduledOperationDetails, + State, +) +from google.cloud.oracledatabase_v1.types.autonomous_database_character_set import ( + AutonomousDatabaseCharacterSet, +) +from google.cloud.oracledatabase_v1.types.autonomous_db_backup import ( + AutonomousDatabaseBackup, + AutonomousDatabaseBackupProperties, +) +from google.cloud.oracledatabase_v1.types.autonomous_db_version import ( + AutonomousDbVersion, +) +from google.cloud.oracledatabase_v1.types.common import CustomerContact +from google.cloud.oracledatabase_v1.types.db_node import DbNode, DbNodeProperties +from google.cloud.oracledatabase_v1.types.db_server import DbServer, DbServerProperties +from google.cloud.oracledatabase_v1.types.db_system_shape import DbSystemShape +from google.cloud.oracledatabase_v1.types.entitlement import ( + CloudAccountDetails, + Entitlement, +) +from google.cloud.oracledatabase_v1.types.exadata_infra import ( + CloudExadataInfrastructure, + CloudExadataInfrastructureProperties, + MaintenanceWindow, +) +from google.cloud.oracledatabase_v1.types.gi_version import GiVersion +from google.cloud.oracledatabase_v1.types.location_metadata import LocationMetadata +from google.cloud.oracledatabase_v1.types.oracledatabase import ( + CreateAutonomousDatabaseRequest, + CreateCloudExadataInfrastructureRequest, + CreateCloudVmClusterRequest, + DeleteAutonomousDatabaseRequest, + DeleteCloudExadataInfrastructureRequest, + DeleteCloudVmClusterRequest, + GenerateAutonomousDatabaseWalletRequest, + GenerateAutonomousDatabaseWalletResponse, + GetAutonomousDatabaseRequest, + GetCloudExadataInfrastructureRequest, + GetCloudVmClusterRequest, + ListAutonomousDatabaseBackupsRequest, + ListAutonomousDatabaseBackupsResponse, + ListAutonomousDatabaseCharacterSetsRequest, + ListAutonomousDatabaseCharacterSetsResponse, + ListAutonomousDatabasesRequest, + ListAutonomousDatabasesResponse, + ListAutonomousDbVersionsRequest, + ListAutonomousDbVersionsResponse, + ListCloudExadataInfrastructuresRequest, + ListCloudExadataInfrastructuresResponse, + ListCloudVmClustersRequest, + ListCloudVmClustersResponse, + ListDbNodesRequest, + ListDbNodesResponse, + ListDbServersRequest, + ListDbServersResponse, + ListDbSystemShapesRequest, + ListDbSystemShapesResponse, + ListEntitlementsRequest, + ListEntitlementsResponse, + ListGiVersionsRequest, + ListGiVersionsResponse, + OperationMetadata, + RestoreAutonomousDatabaseRequest, +) +from google.cloud.oracledatabase_v1.types.vm_cluster import ( + CloudVmCluster, + CloudVmClusterProperties, + DataCollectionOptions, +) + +__all__ = ( + "OracleDatabaseClient", + "AllConnectionStrings", + "AutonomousDatabase", + "AutonomousDatabaseApex", + "AutonomousDatabaseConnectionStrings", + "AutonomousDatabaseConnectionUrls", + "AutonomousDatabaseProperties", + "AutonomousDatabaseStandbySummary", + "DatabaseConnectionStringProfile", + "ScheduledOperationDetails", + "DBWorkload", + "GenerateType", + "OperationsInsightsState", + "State", + "AutonomousDatabaseCharacterSet", + "AutonomousDatabaseBackup", + "AutonomousDatabaseBackupProperties", + "AutonomousDbVersion", + "CustomerContact", + "DbNode", + "DbNodeProperties", + "DbServer", + "DbServerProperties", + "DbSystemShape", + "CloudAccountDetails", + "Entitlement", + "CloudExadataInfrastructure", + "CloudExadataInfrastructureProperties", + "MaintenanceWindow", + "GiVersion", + "LocationMetadata", + "CreateAutonomousDatabaseRequest", + "CreateCloudExadataInfrastructureRequest", + "CreateCloudVmClusterRequest", + "DeleteAutonomousDatabaseRequest", + "DeleteCloudExadataInfrastructureRequest", + "DeleteCloudVmClusterRequest", + "GenerateAutonomousDatabaseWalletRequest", + "GenerateAutonomousDatabaseWalletResponse", + "GetAutonomousDatabaseRequest", + "GetCloudExadataInfrastructureRequest", + "GetCloudVmClusterRequest", + "ListAutonomousDatabaseBackupsRequest", + "ListAutonomousDatabaseBackupsResponse", + "ListAutonomousDatabaseCharacterSetsRequest", + "ListAutonomousDatabaseCharacterSetsResponse", + "ListAutonomousDatabasesRequest", + "ListAutonomousDatabasesResponse", + "ListAutonomousDbVersionsRequest", + "ListAutonomousDbVersionsResponse", + "ListCloudExadataInfrastructuresRequest", + "ListCloudExadataInfrastructuresResponse", + "ListCloudVmClustersRequest", + "ListCloudVmClustersResponse", + "ListDbNodesRequest", + "ListDbNodesResponse", + "ListDbServersRequest", + "ListDbServersResponse", + "ListDbSystemShapesRequest", + "ListDbSystemShapesResponse", + "ListEntitlementsRequest", + "ListEntitlementsResponse", + "ListGiVersionsRequest", + "ListGiVersionsResponse", + "OperationMetadata", + "RestoreAutonomousDatabaseRequest", + "CloudVmCluster", + "CloudVmClusterProperties", + "DataCollectionOptions", +) diff --git a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase/gapic_version.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase/gapic_version.py new file mode 100644 index 000000000000..558c8aab67c5 --- /dev/null +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase/py.typed b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase/py.typed new file mode 100644 index 000000000000..cd7e437b6f62 --- /dev/null +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-oracledatabase package uses inline types. diff --git a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/__init__.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/__init__.py new file mode 100644 index 000000000000..225fa0bdbb4c --- /dev/null +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/__init__.py @@ -0,0 +1,168 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.oracledatabase_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.oracle_database import OracleDatabaseClient +from .types.autonomous_database import ( + AllConnectionStrings, + AutonomousDatabase, + AutonomousDatabaseApex, + AutonomousDatabaseConnectionStrings, + AutonomousDatabaseConnectionUrls, + AutonomousDatabaseProperties, + AutonomousDatabaseStandbySummary, + DatabaseConnectionStringProfile, + DBWorkload, + GenerateType, + OperationsInsightsState, + ScheduledOperationDetails, + State, +) +from .types.autonomous_database_character_set import AutonomousDatabaseCharacterSet +from .types.autonomous_db_backup import ( + AutonomousDatabaseBackup, + AutonomousDatabaseBackupProperties, +) +from .types.autonomous_db_version import AutonomousDbVersion +from .types.common import CustomerContact +from .types.db_node import DbNode, DbNodeProperties +from .types.db_server import DbServer, DbServerProperties +from .types.db_system_shape import DbSystemShape +from .types.entitlement import CloudAccountDetails, Entitlement +from .types.exadata_infra import ( + CloudExadataInfrastructure, + CloudExadataInfrastructureProperties, + MaintenanceWindow, +) +from .types.gi_version import GiVersion +from .types.location_metadata import LocationMetadata +from .types.oracledatabase import ( + CreateAutonomousDatabaseRequest, + CreateCloudExadataInfrastructureRequest, + CreateCloudVmClusterRequest, + DeleteAutonomousDatabaseRequest, + DeleteCloudExadataInfrastructureRequest, + DeleteCloudVmClusterRequest, + GenerateAutonomousDatabaseWalletRequest, + GenerateAutonomousDatabaseWalletResponse, + GetAutonomousDatabaseRequest, + GetCloudExadataInfrastructureRequest, + GetCloudVmClusterRequest, + ListAutonomousDatabaseBackupsRequest, + ListAutonomousDatabaseBackupsResponse, + ListAutonomousDatabaseCharacterSetsRequest, + ListAutonomousDatabaseCharacterSetsResponse, + ListAutonomousDatabasesRequest, + ListAutonomousDatabasesResponse, + ListAutonomousDbVersionsRequest, + ListAutonomousDbVersionsResponse, + ListCloudExadataInfrastructuresRequest, + ListCloudExadataInfrastructuresResponse, + ListCloudVmClustersRequest, + ListCloudVmClustersResponse, + ListDbNodesRequest, + ListDbNodesResponse, + ListDbServersRequest, + ListDbServersResponse, + ListDbSystemShapesRequest, + ListDbSystemShapesResponse, + ListEntitlementsRequest, + ListEntitlementsResponse, + ListGiVersionsRequest, + ListGiVersionsResponse, + OperationMetadata, + RestoreAutonomousDatabaseRequest, +) +from .types.vm_cluster import ( + CloudVmCluster, + CloudVmClusterProperties, + DataCollectionOptions, +) + +__all__ = ( + "AllConnectionStrings", + "AutonomousDatabase", + "AutonomousDatabaseApex", + "AutonomousDatabaseBackup", + "AutonomousDatabaseBackupProperties", + "AutonomousDatabaseCharacterSet", + "AutonomousDatabaseConnectionStrings", + "AutonomousDatabaseConnectionUrls", + "AutonomousDatabaseProperties", + "AutonomousDatabaseStandbySummary", + "AutonomousDbVersion", + "CloudAccountDetails", + "CloudExadataInfrastructure", + "CloudExadataInfrastructureProperties", + "CloudVmCluster", + "CloudVmClusterProperties", + "CreateAutonomousDatabaseRequest", + "CreateCloudExadataInfrastructureRequest", + "CreateCloudVmClusterRequest", + "CustomerContact", + "DBWorkload", + "DataCollectionOptions", + "DatabaseConnectionStringProfile", + "DbNode", + "DbNodeProperties", + "DbServer", + "DbServerProperties", + "DbSystemShape", + "DeleteAutonomousDatabaseRequest", + "DeleteCloudExadataInfrastructureRequest", + "DeleteCloudVmClusterRequest", + "Entitlement", + "GenerateAutonomousDatabaseWalletRequest", + "GenerateAutonomousDatabaseWalletResponse", + "GenerateType", + "GetAutonomousDatabaseRequest", + "GetCloudExadataInfrastructureRequest", + "GetCloudVmClusterRequest", + "GiVersion", + "ListAutonomousDatabaseBackupsRequest", + "ListAutonomousDatabaseBackupsResponse", + "ListAutonomousDatabaseCharacterSetsRequest", + "ListAutonomousDatabaseCharacterSetsResponse", + "ListAutonomousDatabasesRequest", + "ListAutonomousDatabasesResponse", + "ListAutonomousDbVersionsRequest", + "ListAutonomousDbVersionsResponse", + "ListCloudExadataInfrastructuresRequest", + "ListCloudExadataInfrastructuresResponse", + "ListCloudVmClustersRequest", + "ListCloudVmClustersResponse", + "ListDbNodesRequest", + "ListDbNodesResponse", + "ListDbServersRequest", + "ListDbServersResponse", + "ListDbSystemShapesRequest", + "ListDbSystemShapesResponse", + "ListEntitlementsRequest", + "ListEntitlementsResponse", + "ListGiVersionsRequest", + "ListGiVersionsResponse", + "LocationMetadata", + "MaintenanceWindow", + "OperationMetadata", + "OperationsInsightsState", + "OracleDatabaseClient", + "RestoreAutonomousDatabaseRequest", + "ScheduledOperationDetails", + "State", +) diff --git a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/gapic_metadata.json b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/gapic_metadata.json new file mode 100644 index 000000000000..847abe3bdc22 --- /dev/null +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/gapic_metadata.json @@ -0,0 +1,128 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.oracledatabase_v1", + "protoPackage": "google.cloud.oracledatabase.v1", + "schema": "1.0", + "services": { + "OracleDatabase": { + "clients": { + "rest": { + "libraryClient": "OracleDatabaseClient", + "rpcs": { + "CreateAutonomousDatabase": { + "methods": [ + "create_autonomous_database" + ] + }, + "CreateCloudExadataInfrastructure": { + "methods": [ + "create_cloud_exadata_infrastructure" + ] + }, + "CreateCloudVmCluster": { + "methods": [ + "create_cloud_vm_cluster" + ] + }, + "DeleteAutonomousDatabase": { + "methods": [ + "delete_autonomous_database" + ] + }, + "DeleteCloudExadataInfrastructure": { + "methods": [ + "delete_cloud_exadata_infrastructure" + ] + }, + "DeleteCloudVmCluster": { + "methods": [ + "delete_cloud_vm_cluster" + ] + }, + "GenerateAutonomousDatabaseWallet": { + "methods": [ + "generate_autonomous_database_wallet" + ] + }, + "GetAutonomousDatabase": { + "methods": [ + "get_autonomous_database" + ] + }, + "GetCloudExadataInfrastructure": { + "methods": [ + "get_cloud_exadata_infrastructure" + ] + }, + "GetCloudVmCluster": { + "methods": [ + "get_cloud_vm_cluster" + ] + }, + "ListAutonomousDatabaseBackups": { + "methods": [ + "list_autonomous_database_backups" + ] + }, + "ListAutonomousDatabaseCharacterSets": { + "methods": [ + "list_autonomous_database_character_sets" + ] + }, + "ListAutonomousDatabases": { + "methods": [ + "list_autonomous_databases" + ] + }, + "ListAutonomousDbVersions": { + "methods": [ + "list_autonomous_db_versions" + ] + }, + "ListCloudExadataInfrastructures": { + "methods": [ + "list_cloud_exadata_infrastructures" + ] + }, + "ListCloudVmClusters": { + "methods": [ + "list_cloud_vm_clusters" + ] + }, + "ListDbNodes": { + "methods": [ + "list_db_nodes" + ] + }, + "ListDbServers": { + "methods": [ + "list_db_servers" + ] + }, + "ListDbSystemShapes": { + "methods": [ + "list_db_system_shapes" + ] + }, + "ListEntitlements": { + "methods": [ + "list_entitlements" + ] + }, + "ListGiVersions": { + "methods": [ + "list_gi_versions" + ] + }, + "RestoreAutonomousDatabase": { + "methods": [ + "restore_autonomous_database" + ] + } + } + } + } + } + } +} diff --git a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/gapic_version.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/gapic_version.py new file mode 100644 index 000000000000..558c8aab67c5 --- /dev/null +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/py.typed b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/py.typed new file mode 100644 index 000000000000..cd7e437b6f62 --- /dev/null +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-oracledatabase package uses inline types. diff --git a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/__init__.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/__init__.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/__init__.py new file mode 100644 index 000000000000..947b9516b5e7 --- /dev/null +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/__init__.py @@ -0,0 +1,18 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import OracleDatabaseClient + +__all__ = ("OracleDatabaseClient",) diff --git a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/client.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/client.py new file mode 100644 index 000000000000..9a4182820e59 --- /dev/null +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/client.py @@ -0,0 +1,4073 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.oracledatabase_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.oracledatabase_v1.services.oracle_database import pagers +from google.cloud.oracledatabase_v1.types import ( + autonomous_database_character_set, + autonomous_db_backup, + autonomous_db_version, + db_node, + db_server, + db_system_shape, + entitlement, + exadata_infra, + gi_version, + oracledatabase, + vm_cluster, +) +from google.cloud.oracledatabase_v1.types import ( + autonomous_database as gco_autonomous_database, +) +from google.cloud.oracledatabase_v1.types import autonomous_database + +from .transports.base import DEFAULT_CLIENT_INFO, OracleDatabaseTransport +from .transports.rest import OracleDatabaseRestTransport + + +class OracleDatabaseClientMeta(type): + """Metaclass for the OracleDatabase client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[OracleDatabaseTransport]] + _transport_registry["rest"] = OracleDatabaseRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[OracleDatabaseTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class OracleDatabaseClient(metaclass=OracleDatabaseClientMeta): + """Service describing handlers for resources""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "oracledatabase.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "oracledatabase.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + OracleDatabaseClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + OracleDatabaseClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> OracleDatabaseTransport: + """Returns the transport used by the client instance. + + Returns: + OracleDatabaseTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def autonomous_database_path( + project: str, + location: str, + autonomous_database: str, + ) -> str: + """Returns a fully-qualified autonomous_database string.""" + return "projects/{project}/locations/{location}/autonomousDatabases/{autonomous_database}".format( + project=project, + location=location, + autonomous_database=autonomous_database, + ) + + @staticmethod + def parse_autonomous_database_path(path: str) -> Dict[str, str]: + """Parses a autonomous_database path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/autonomousDatabases/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def autonomous_database_backup_path( + project: str, + location: str, + autonomous_database_backup: str, + ) -> str: + """Returns a fully-qualified autonomous_database_backup string.""" + return "projects/{project}/locations/{location}/autonomousDatabaseBackups/{autonomous_database_backup}".format( + project=project, + location=location, + autonomous_database_backup=autonomous_database_backup, + ) + + @staticmethod + def parse_autonomous_database_backup_path(path: str) -> Dict[str, str]: + """Parses a autonomous_database_backup path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/autonomousDatabaseBackups/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def autonomous_database_character_set_path( + project: str, + location: str, + autonomous_database_character_set: str, + ) -> str: + """Returns a fully-qualified autonomous_database_character_set string.""" + return "projects/{project}/locations/{location}/autonomousDatabaseCharacterSets/{autonomous_database_character_set}".format( + project=project, + location=location, + autonomous_database_character_set=autonomous_database_character_set, + ) + + @staticmethod + def parse_autonomous_database_character_set_path(path: str) -> Dict[str, str]: + """Parses a autonomous_database_character_set path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/autonomousDatabaseCharacterSets/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def autonomous_db_version_path( + project: str, + location: str, + autonomous_db_version: str, + ) -> str: + """Returns a fully-qualified autonomous_db_version string.""" + return "projects/{project}/locations/{location}/autonomousDbVersions/{autonomous_db_version}".format( + project=project, + location=location, + autonomous_db_version=autonomous_db_version, + ) + + @staticmethod + def parse_autonomous_db_version_path(path: str) -> Dict[str, str]: + """Parses a autonomous_db_version path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/autonomousDbVersions/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def cloud_exadata_infrastructure_path( + project: str, + location: str, + cloud_exadata_infrastructure: str, + ) -> str: + """Returns a fully-qualified cloud_exadata_infrastructure string.""" + return "projects/{project}/locations/{location}/cloudExadataInfrastructures/{cloud_exadata_infrastructure}".format( + project=project, + location=location, + cloud_exadata_infrastructure=cloud_exadata_infrastructure, + ) + + @staticmethod + def parse_cloud_exadata_infrastructure_path(path: str) -> Dict[str, str]: + """Parses a cloud_exadata_infrastructure path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/cloudExadataInfrastructures/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def cloud_vm_cluster_path( + project: str, + location: str, + cloud_vm_cluster: str, + ) -> str: + """Returns a fully-qualified cloud_vm_cluster string.""" + return "projects/{project}/locations/{location}/cloudVmClusters/{cloud_vm_cluster}".format( + project=project, + location=location, + cloud_vm_cluster=cloud_vm_cluster, + ) + + @staticmethod + def parse_cloud_vm_cluster_path(path: str) -> Dict[str, str]: + """Parses a cloud_vm_cluster path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/cloudVmClusters/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def db_node_path( + project: str, + location: str, + cloud_vm_cluster: str, + db_node: str, + ) -> str: + """Returns a fully-qualified db_node string.""" + return "projects/{project}/locations/{location}/cloudVmClusters/{cloud_vm_cluster}/dbNodes/{db_node}".format( + project=project, + location=location, + cloud_vm_cluster=cloud_vm_cluster, + db_node=db_node, + ) + + @staticmethod + def parse_db_node_path(path: str) -> Dict[str, str]: + """Parses a db_node path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/cloudVmClusters/(?P.+?)/dbNodes/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def db_server_path( + project: str, + location: str, + cloud_exadata_infrastructure: str, + db_server: str, + ) -> str: + """Returns a fully-qualified db_server string.""" + return "projects/{project}/locations/{location}/cloudExadataInfrastructures/{cloud_exadata_infrastructure}/dbServers/{db_server}".format( + project=project, + location=location, + cloud_exadata_infrastructure=cloud_exadata_infrastructure, + db_server=db_server, + ) + + @staticmethod + def parse_db_server_path(path: str) -> Dict[str, str]: + """Parses a db_server path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/cloudExadataInfrastructures/(?P.+?)/dbServers/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def db_system_shape_path( + project: str, + location: str, + db_system_shape: str, + ) -> str: + """Returns a fully-qualified db_system_shape string.""" + return "projects/{project}/locations/{location}/dbSystemShapes/{db_system_shape}".format( + project=project, + location=location, + db_system_shape=db_system_shape, + ) + + @staticmethod + def parse_db_system_shape_path(path: str) -> Dict[str, str]: + """Parses a db_system_shape path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/dbSystemShapes/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def entitlement_path( + project: str, + location: str, + entitlement: str, + ) -> str: + """Returns a fully-qualified entitlement string.""" + return ( + "projects/{project}/locations/{location}/entitlements/{entitlement}".format( + project=project, + location=location, + entitlement=entitlement, + ) + ) + + @staticmethod + def parse_entitlement_path(path: str) -> Dict[str, str]: + """Parses a entitlement path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/entitlements/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def gi_version_path( + project: str, + location: str, + gi_version: str, + ) -> str: + """Returns a fully-qualified gi_version string.""" + return "projects/{project}/locations/{location}/giVersions/{gi_version}".format( + project=project, + location=location, + gi_version=gi_version, + ) + + @staticmethod + def parse_gi_version_path(path: str) -> Dict[str, str]: + """Parses a gi_version path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/giVersions/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def network_path( + project: str, + network: str, + ) -> str: + """Returns a fully-qualified network string.""" + return "projects/{project}/global/networks/{network}".format( + project=project, + network=network, + ) + + @staticmethod + def parse_network_path(path: str) -> Dict[str, str]: + """Parses a network path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/global/networks/(?P.+?)$", path + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = OracleDatabaseClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = OracleDatabaseClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = OracleDatabaseClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = OracleDatabaseClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = OracleDatabaseClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or OracleDatabaseClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, OracleDatabaseTransport, Callable[..., OracleDatabaseTransport]] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the oracle database client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,OracleDatabaseTransport,Callable[..., OracleDatabaseTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the OracleDatabaseTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = OracleDatabaseClient._read_environment_variables() + self._client_cert_source = OracleDatabaseClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = OracleDatabaseClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, OracleDatabaseTransport) + if transport_provided: + # transport is a OracleDatabaseTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(OracleDatabaseTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or OracleDatabaseClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[OracleDatabaseTransport], Callable[..., OracleDatabaseTransport] + ] = ( + OracleDatabaseClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., OracleDatabaseTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def list_cloud_exadata_infrastructures( + self, + request: Optional[ + Union[oracledatabase.ListCloudExadataInfrastructuresRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListCloudExadataInfrastructuresPager: + r"""Lists Exadata Infrastructures in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import oracledatabase_v1 + + def sample_list_cloud_exadata_infrastructures(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.ListCloudExadataInfrastructuresRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_cloud_exadata_infrastructures(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.oracledatabase_v1.types.ListCloudExadataInfrastructuresRequest, dict]): + The request object. The request for ``CloudExadataInfrastructures.List``. + parent (str): + Required. The parent value for + CloudExadataInfrastructure in the + following format: + projects/{project}/locations/{location}. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.oracledatabase_v1.services.oracle_database.pagers.ListCloudExadataInfrastructuresPager: + The response for CloudExadataInfrastructures.list. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, oracledatabase.ListCloudExadataInfrastructuresRequest + ): + request = oracledatabase.ListCloudExadataInfrastructuresRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_cloud_exadata_infrastructures + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListCloudExadataInfrastructuresPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_cloud_exadata_infrastructure( + self, + request: Optional[ + Union[oracledatabase.GetCloudExadataInfrastructureRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> exadata_infra.CloudExadataInfrastructure: + r"""Gets details of a single Exadata Infrastructure. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import oracledatabase_v1 + + def sample_get_cloud_exadata_infrastructure(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.GetCloudExadataInfrastructureRequest( + name="name_value", + ) + + # Make the request + response = client.get_cloud_exadata_infrastructure(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.oracledatabase_v1.types.GetCloudExadataInfrastructureRequest, dict]): + The request object. The request for ``CloudExadataInfrastructure.Get``. + name (str): + Required. The name of the Cloud Exadata Infrastructure + in the following format: + projects/{project}/locations/{location}/cloudExadataInfrastructures/{cloud_exadata_infrastructure}. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.oracledatabase_v1.types.CloudExadataInfrastructure: + Represents CloudExadataInfrastructure + resource. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/CloudExadataInfrastructure/ + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, oracledatabase.GetCloudExadataInfrastructureRequest): + request = oracledatabase.GetCloudExadataInfrastructureRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.get_cloud_exadata_infrastructure + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_cloud_exadata_infrastructure( + self, + request: Optional[ + Union[oracledatabase.CreateCloudExadataInfrastructureRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + cloud_exadata_infrastructure: Optional[ + exadata_infra.CloudExadataInfrastructure + ] = None, + cloud_exadata_infrastructure_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new Exadata Infrastructure in a given + project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import oracledatabase_v1 + + def sample_create_cloud_exadata_infrastructure(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.CreateCloudExadataInfrastructureRequest( + parent="parent_value", + cloud_exadata_infrastructure_id="cloud_exadata_infrastructure_id_value", + ) + + # Make the request + operation = client.create_cloud_exadata_infrastructure(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.oracledatabase_v1.types.CreateCloudExadataInfrastructureRequest, dict]): + The request object. The request for ``CloudExadataInfrastructure.Create``. + parent (str): + Required. The parent value for + CloudExadataInfrastructure in the + following format: + projects/{project}/locations/{location}. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cloud_exadata_infrastructure (google.cloud.oracledatabase_v1.types.CloudExadataInfrastructure): + Required. Details of the Exadata + Infrastructure instance to create. + + This corresponds to the ``cloud_exadata_infrastructure`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cloud_exadata_infrastructure_id (str): + Required. The ID of the Exadata Infrastructure to + create. This value is restricted to + (^`a-z <[a-z0-9-]{0,61}[a-z0-9]>`__?$) and must be a + maximum of 63 characters in length. The value must start + with a letter and end with a letter or a number. + + This corresponds to the ``cloud_exadata_infrastructure_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.oracledatabase_v1.types.CloudExadataInfrastructure` Represents CloudExadataInfrastructure resource. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/CloudExadataInfrastructure/ + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [parent, cloud_exadata_infrastructure, cloud_exadata_infrastructure_id] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, oracledatabase.CreateCloudExadataInfrastructureRequest + ): + request = oracledatabase.CreateCloudExadataInfrastructureRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if cloud_exadata_infrastructure is not None: + request.cloud_exadata_infrastructure = cloud_exadata_infrastructure + if cloud_exadata_infrastructure_id is not None: + request.cloud_exadata_infrastructure_id = ( + cloud_exadata_infrastructure_id + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.create_cloud_exadata_infrastructure + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + exadata_infra.CloudExadataInfrastructure, + metadata_type=oracledatabase.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_cloud_exadata_infrastructure( + self, + request: Optional[ + Union[oracledatabase.DeleteCloudExadataInfrastructureRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a single Exadata Infrastructure. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import oracledatabase_v1 + + def sample_delete_cloud_exadata_infrastructure(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.DeleteCloudExadataInfrastructureRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_cloud_exadata_infrastructure(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.oracledatabase_v1.types.DeleteCloudExadataInfrastructureRequest, dict]): + The request object. The request for ``CloudExadataInfrastructure.Delete``. + name (str): + Required. The name of the Cloud Exadata Infrastructure + in the following format: + projects/{project}/locations/{location}/cloudExadataInfrastructures/{cloud_exadata_infrastructure}. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, oracledatabase.DeleteCloudExadataInfrastructureRequest + ): + request = oracledatabase.DeleteCloudExadataInfrastructureRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.delete_cloud_exadata_infrastructure + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=oracledatabase.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_cloud_vm_clusters( + self, + request: Optional[ + Union[oracledatabase.ListCloudVmClustersRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListCloudVmClustersPager: + r"""Lists the VM Clusters in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import oracledatabase_v1 + + def sample_list_cloud_vm_clusters(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.ListCloudVmClustersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_cloud_vm_clusters(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.oracledatabase_v1.types.ListCloudVmClustersRequest, dict]): + The request object. The request for ``CloudVmCluster.List``. + parent (str): + Required. The name of the parent in + the following format: + projects/{project}/locations/{location}. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.oracledatabase_v1.services.oracle_database.pagers.ListCloudVmClustersPager: + The response for CloudVmCluster.List. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, oracledatabase.ListCloudVmClustersRequest): + request = oracledatabase.ListCloudVmClustersRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_cloud_vm_clusters] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListCloudVmClustersPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_cloud_vm_cluster( + self, + request: Optional[Union[oracledatabase.GetCloudVmClusterRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> vm_cluster.CloudVmCluster: + r"""Gets details of a single VM Cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import oracledatabase_v1 + + def sample_get_cloud_vm_cluster(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.GetCloudVmClusterRequest( + name="name_value", + ) + + # Make the request + response = client.get_cloud_vm_cluster(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.oracledatabase_v1.types.GetCloudVmClusterRequest, dict]): + The request object. The request for ``CloudVmCluster.Get``. + name (str): + Required. The name of the Cloud VM Cluster in the + following format: + projects/{project}/locations/{location}/cloudVmClusters/{cloud_vm_cluster}. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.oracledatabase_v1.types.CloudVmCluster: + Details of the Cloud VM Cluster + resource. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/CloudVmCluster/ + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, oracledatabase.GetCloudVmClusterRequest): + request = oracledatabase.GetCloudVmClusterRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_cloud_vm_cluster] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_cloud_vm_cluster( + self, + request: Optional[ + Union[oracledatabase.CreateCloudVmClusterRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + cloud_vm_cluster: Optional[vm_cluster.CloudVmCluster] = None, + cloud_vm_cluster_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new VM Cluster in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import oracledatabase_v1 + + def sample_create_cloud_vm_cluster(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + cloud_vm_cluster = oracledatabase_v1.CloudVmCluster() + cloud_vm_cluster.exadata_infrastructure = "exadata_infrastructure_value" + cloud_vm_cluster.cidr = "cidr_value" + cloud_vm_cluster.backup_subnet_cidr = "backup_subnet_cidr_value" + cloud_vm_cluster.network = "network_value" + + request = oracledatabase_v1.CreateCloudVmClusterRequest( + parent="parent_value", + cloud_vm_cluster_id="cloud_vm_cluster_id_value", + cloud_vm_cluster=cloud_vm_cluster, + ) + + # Make the request + operation = client.create_cloud_vm_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.oracledatabase_v1.types.CreateCloudVmClusterRequest, dict]): + The request object. The request for ``CloudVmCluster.Create``. + parent (str): + Required. The name of the parent in + the following format: + projects/{project}/locations/{location}. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cloud_vm_cluster (google.cloud.oracledatabase_v1.types.CloudVmCluster): + Required. The resource being created + This corresponds to the ``cloud_vm_cluster`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cloud_vm_cluster_id (str): + Required. The ID of the VM Cluster to create. This value + is restricted to (^`a-z <[a-z0-9-]{0,61}[a-z0-9]>`__?$) + and must be a maximum of 63 characters in length. The + value must start with a letter and end with a letter or + a number. + + This corresponds to the ``cloud_vm_cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.oracledatabase_v1.types.CloudVmCluster` Details of the Cloud VM Cluster resource. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/CloudVmCluster/ + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, cloud_vm_cluster, cloud_vm_cluster_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, oracledatabase.CreateCloudVmClusterRequest): + request = oracledatabase.CreateCloudVmClusterRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if cloud_vm_cluster is not None: + request.cloud_vm_cluster = cloud_vm_cluster + if cloud_vm_cluster_id is not None: + request.cloud_vm_cluster_id = cloud_vm_cluster_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_cloud_vm_cluster] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + vm_cluster.CloudVmCluster, + metadata_type=oracledatabase.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_cloud_vm_cluster( + self, + request: Optional[ + Union[oracledatabase.DeleteCloudVmClusterRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a single VM Cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import oracledatabase_v1 + + def sample_delete_cloud_vm_cluster(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.DeleteCloudVmClusterRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_cloud_vm_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.oracledatabase_v1.types.DeleteCloudVmClusterRequest, dict]): + The request object. The request for ``CloudVmCluster.Delete``. + name (str): + Required. The name of the Cloud VM Cluster in the + following format: + projects/{project}/locations/{location}/cloudVmClusters/{cloud_vm_cluster}. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, oracledatabase.DeleteCloudVmClusterRequest): + request = oracledatabase.DeleteCloudVmClusterRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_cloud_vm_cluster] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=oracledatabase.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_entitlements( + self, + request: Optional[Union[oracledatabase.ListEntitlementsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListEntitlementsPager: + r"""Lists the entitlements in a given project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import oracledatabase_v1 + + def sample_list_entitlements(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.ListEntitlementsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_entitlements(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.oracledatabase_v1.types.ListEntitlementsRequest, dict]): + The request object. The request for ``Entitlement.List``. + parent (str): + Required. The parent value for the + entitlement in the following format: + projects/{project}/locations/{location}. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.oracledatabase_v1.services.oracle_database.pagers.ListEntitlementsPager: + The response for Entitlement.List. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, oracledatabase.ListEntitlementsRequest): + request = oracledatabase.ListEntitlementsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_entitlements] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListEntitlementsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_db_servers( + self, + request: Optional[Union[oracledatabase.ListDbServersRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDbServersPager: + r"""Lists the database servers of an Exadata + Infrastructure instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import oracledatabase_v1 + + def sample_list_db_servers(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.ListDbServersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_db_servers(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.oracledatabase_v1.types.ListDbServersRequest, dict]): + The request object. The request for ``DbServer.List``. + parent (str): + Required. The parent value for + database server in the following format: + projects/{project}/locations/{location}/cloudExadataInfrastructures/{cloudExadataInfrastructure}. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.oracledatabase_v1.services.oracle_database.pagers.ListDbServersPager: + The response for DbServer.List. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, oracledatabase.ListDbServersRequest): + request = oracledatabase.ListDbServersRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_db_servers] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDbServersPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_db_nodes( + self, + request: Optional[Union[oracledatabase.ListDbNodesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDbNodesPager: + r"""Lists the database nodes of a VM Cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import oracledatabase_v1 + + def sample_list_db_nodes(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.ListDbNodesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_db_nodes(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.oracledatabase_v1.types.ListDbNodesRequest, dict]): + The request object. The request for ``DbNode.List``. + parent (str): + Required. The parent value for + database node in the following format: + projects/{project}/locations/{location}/cloudVmClusters/{cloudVmCluster}. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.oracledatabase_v1.services.oracle_database.pagers.ListDbNodesPager: + The response for DbNode.List. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, oracledatabase.ListDbNodesRequest): + request = oracledatabase.ListDbNodesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_db_nodes] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDbNodesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_gi_versions( + self, + request: Optional[Union[oracledatabase.ListGiVersionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListGiVersionsPager: + r"""Lists all the valid Oracle Grid Infrastructure (GI) + versions for the given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import oracledatabase_v1 + + def sample_list_gi_versions(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.ListGiVersionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_gi_versions(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.oracledatabase_v1.types.ListGiVersionsRequest, dict]): + The request object. The request for ``GiVersion.List``. + parent (str): + Required. The parent value for Grid + Infrastructure Version in the following + format: Format: + projects/{project}/locations/{location}. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.oracledatabase_v1.services.oracle_database.pagers.ListGiVersionsPager: + The response for GiVersion.List. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, oracledatabase.ListGiVersionsRequest): + request = oracledatabase.ListGiVersionsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_gi_versions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListGiVersionsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_db_system_shapes( + self, + request: Optional[Union[oracledatabase.ListDbSystemShapesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDbSystemShapesPager: + r"""Lists the database system shapes available for the + project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import oracledatabase_v1 + + def sample_list_db_system_shapes(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.ListDbSystemShapesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_db_system_shapes(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.oracledatabase_v1.types.ListDbSystemShapesRequest, dict]): + The request object. The request for ``DbSystemShape.List``. + parent (str): + Required. The parent value for + Database System Shapes in the following + format: + projects/{project}/locations/{location}. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.oracledatabase_v1.services.oracle_database.pagers.ListDbSystemShapesPager: + The response for DbSystemShape.List. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, oracledatabase.ListDbSystemShapesRequest): + request = oracledatabase.ListDbSystemShapesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_db_system_shapes] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDbSystemShapesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_autonomous_databases( + self, + request: Optional[ + Union[oracledatabase.ListAutonomousDatabasesRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAutonomousDatabasesPager: + r"""Lists the Autonomous Databases in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import oracledatabase_v1 + + def sample_list_autonomous_databases(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.ListAutonomousDatabasesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_autonomous_databases(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.oracledatabase_v1.types.ListAutonomousDatabasesRequest, dict]): + The request object. The request for ``AutonomousDatabase.List``. + parent (str): + Required. The parent value for the + Autonomous Database in the following + format: + projects/{project}/locations/{location}. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.oracledatabase_v1.services.oracle_database.pagers.ListAutonomousDatabasesPager: + The response for AutonomousDatabase.List. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, oracledatabase.ListAutonomousDatabasesRequest): + request = oracledatabase.ListAutonomousDatabasesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_autonomous_databases + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListAutonomousDatabasesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_autonomous_database( + self, + request: Optional[ + Union[oracledatabase.GetAutonomousDatabaseRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> autonomous_database.AutonomousDatabase: + r"""Gets the details of a single Autonomous Database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import oracledatabase_v1 + + def sample_get_autonomous_database(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.GetAutonomousDatabaseRequest( + name="name_value", + ) + + # Make the request + response = client.get_autonomous_database(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.oracledatabase_v1.types.GetAutonomousDatabaseRequest, dict]): + The request object. The request for ``AutonomousDatabase.Get``. + name (str): + Required. The name of the Autonomous Database in the + following format: + projects/{project}/locations/{location}/autonomousDatabases/{autonomous_database}. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.oracledatabase_v1.types.AutonomousDatabase: + Details of the Autonomous Database + resource. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/AutonomousDatabase/ + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, oracledatabase.GetAutonomousDatabaseRequest): + request = oracledatabase.GetAutonomousDatabaseRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_autonomous_database] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_autonomous_database( + self, + request: Optional[ + Union[oracledatabase.CreateAutonomousDatabaseRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + autonomous_database: Optional[ + gco_autonomous_database.AutonomousDatabase + ] = None, + autonomous_database_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new Autonomous Database in a given project + and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import oracledatabase_v1 + + def sample_create_autonomous_database(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + autonomous_database = oracledatabase_v1.AutonomousDatabase() + autonomous_database.network = "network_value" + autonomous_database.cidr = "cidr_value" + + request = oracledatabase_v1.CreateAutonomousDatabaseRequest( + parent="parent_value", + autonomous_database_id="autonomous_database_id_value", + autonomous_database=autonomous_database, + ) + + # Make the request + operation = client.create_autonomous_database(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.oracledatabase_v1.types.CreateAutonomousDatabaseRequest, dict]): + The request object. The request for ``AutonomousDatabase.Create``. + parent (str): + Required. The name of the parent in + the following format: + projects/{project}/locations/{location}. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + autonomous_database (google.cloud.oracledatabase_v1.types.AutonomousDatabase): + Required. The Autonomous Database + being created. + + This corresponds to the ``autonomous_database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + autonomous_database_id (str): + Required. The ID of the Autonomous Database to create. + This value is restricted to + (^`a-z <[a-z0-9-]{0,61}[a-z0-9]>`__?$) and must be a + maximum of 63 characters in length. The value must start + with a letter and end with a letter or a number. + + This corresponds to the ``autonomous_database_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.oracledatabase_v1.types.AutonomousDatabase` Details of the Autonomous Database resource. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/AutonomousDatabase/ + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [parent, autonomous_database, autonomous_database_id] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, oracledatabase.CreateAutonomousDatabaseRequest): + request = oracledatabase.CreateAutonomousDatabaseRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if autonomous_database is not None: + request.autonomous_database = autonomous_database + if autonomous_database_id is not None: + request.autonomous_database_id = autonomous_database_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.create_autonomous_database + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gco_autonomous_database.AutonomousDatabase, + metadata_type=oracledatabase.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_autonomous_database( + self, + request: Optional[ + Union[oracledatabase.DeleteAutonomousDatabaseRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a single Autonomous Database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import oracledatabase_v1 + + def sample_delete_autonomous_database(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.DeleteAutonomousDatabaseRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_autonomous_database(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.oracledatabase_v1.types.DeleteAutonomousDatabaseRequest, dict]): + The request object. The request for ``AutonomousDatabase.Delete``. + name (str): + Required. The name of the resource in the following + format: + projects/{project}/locations/{location}/autonomousDatabases/{autonomous_database}. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, oracledatabase.DeleteAutonomousDatabaseRequest): + request = oracledatabase.DeleteAutonomousDatabaseRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.delete_autonomous_database + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=oracledatabase.OperationMetadata, + ) + + # Done; return the response. + return response + + def restore_autonomous_database( + self, + request: Optional[ + Union[oracledatabase.RestoreAutonomousDatabaseRequest, dict] + ] = None, + *, + name: Optional[str] = None, + restore_time: Optional[timestamp_pb2.Timestamp] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Restores a single Autonomous Database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import oracledatabase_v1 + + def sample_restore_autonomous_database(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.RestoreAutonomousDatabaseRequest( + name="name_value", + ) + + # Make the request + operation = client.restore_autonomous_database(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.oracledatabase_v1.types.RestoreAutonomousDatabaseRequest, dict]): + The request object. The request for ``AutonomousDatabase.Restore``. + name (str): + Required. The name of the Autonomous Database in the + following format: + projects/{project}/locations/{location}/autonomousDatabases/{autonomous_database}. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + restore_time (google.protobuf.timestamp_pb2.Timestamp): + Required. The time and date to + restore the database to. + + This corresponds to the ``restore_time`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.oracledatabase_v1.types.AutonomousDatabase` Details of the Autonomous Database resource. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/AutonomousDatabase/ + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, restore_time]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, oracledatabase.RestoreAutonomousDatabaseRequest): + request = oracledatabase.RestoreAutonomousDatabaseRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if restore_time is not None: + request.restore_time = restore_time + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.restore_autonomous_database + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + autonomous_database.AutonomousDatabase, + metadata_type=oracledatabase.OperationMetadata, + ) + + # Done; return the response. + return response + + def generate_autonomous_database_wallet( + self, + request: Optional[ + Union[oracledatabase.GenerateAutonomousDatabaseWalletRequest, dict] + ] = None, + *, + name: Optional[str] = None, + type_: Optional[autonomous_database.GenerateType] = None, + is_regional: Optional[bool] = None, + password: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> oracledatabase.GenerateAutonomousDatabaseWalletResponse: + r"""Generates a wallet for an Autonomous Database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import oracledatabase_v1 + + def sample_generate_autonomous_database_wallet(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.GenerateAutonomousDatabaseWalletRequest( + name="name_value", + password="password_value", + ) + + # Make the request + response = client.generate_autonomous_database_wallet(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.oracledatabase_v1.types.GenerateAutonomousDatabaseWalletRequest, dict]): + The request object. The request for ``AutonomousDatabase.GenerateWallet``. + name (str): + Required. The name of the Autonomous Database in the + following format: + projects/{project}/locations/{location}/autonomousDatabases/{autonomous_database}. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + type_ (google.cloud.oracledatabase_v1.types.GenerateType): + Optional. The type of wallet + generation for the Autonomous Database. + The default value is SINGLE. + + This corresponds to the ``type_`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + is_regional (bool): + Optional. True when requesting + regional connection strings in PDB + connect info, applicable to cross-region + Data Guard only. + + This corresponds to the ``is_regional`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + password (str): + Required. The password used to + encrypt the keys inside the wallet. The + password must be a minimum of 8 + characters. + + This corresponds to the ``password`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.oracledatabase_v1.types.GenerateAutonomousDatabaseWalletResponse: + The response for AutonomousDatabase.GenerateWallet. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, type_, is_regional, password]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, oracledatabase.GenerateAutonomousDatabaseWalletRequest + ): + request = oracledatabase.GenerateAutonomousDatabaseWalletRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if type_ is not None: + request.type_ = type_ + if is_regional is not None: + request.is_regional = is_regional + if password is not None: + request.password = password + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.generate_autonomous_database_wallet + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_autonomous_db_versions( + self, + request: Optional[ + Union[oracledatabase.ListAutonomousDbVersionsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAutonomousDbVersionsPager: + r"""Lists all the available Autonomous Database versions + for a project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import oracledatabase_v1 + + def sample_list_autonomous_db_versions(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.ListAutonomousDbVersionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_autonomous_db_versions(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.oracledatabase_v1.types.ListAutonomousDbVersionsRequest, dict]): + The request object. The request for ``AutonomousDbVersion.List``. + parent (str): + Required. The parent value for the + Autonomous Database in the following + format: + projects/{project}/locations/{location}. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.oracledatabase_v1.services.oracle_database.pagers.ListAutonomousDbVersionsPager: + The response for AutonomousDbVersion.List. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, oracledatabase.ListAutonomousDbVersionsRequest): + request = oracledatabase.ListAutonomousDbVersionsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_autonomous_db_versions + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListAutonomousDbVersionsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_autonomous_database_character_sets( + self, + request: Optional[ + Union[oracledatabase.ListAutonomousDatabaseCharacterSetsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAutonomousDatabaseCharacterSetsPager: + r"""Lists Autonomous Database Character Sets in a given + project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import oracledatabase_v1 + + def sample_list_autonomous_database_character_sets(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.ListAutonomousDatabaseCharacterSetsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_autonomous_database_character_sets(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.oracledatabase_v1.types.ListAutonomousDatabaseCharacterSetsRequest, dict]): + The request object. The request for ``AutonomousDatabaseCharacterSet.List``. + parent (str): + Required. The parent value for the + Autonomous Database in the following + format: + projects/{project}/locations/{location}. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.oracledatabase_v1.services.oracle_database.pagers.ListAutonomousDatabaseCharacterSetsPager: + The response for AutonomousDatabaseCharacterSet.List. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, oracledatabase.ListAutonomousDatabaseCharacterSetsRequest + ): + request = oracledatabase.ListAutonomousDatabaseCharacterSetsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_autonomous_database_character_sets + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListAutonomousDatabaseCharacterSetsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_autonomous_database_backups( + self, + request: Optional[ + Union[oracledatabase.ListAutonomousDatabaseBackupsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAutonomousDatabaseBackupsPager: + r"""Lists the long-term and automatic backups of an + Autonomous Database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import oracledatabase_v1 + + def sample_list_autonomous_database_backups(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.ListAutonomousDatabaseBackupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_autonomous_database_backups(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.oracledatabase_v1.types.ListAutonomousDatabaseBackupsRequest, dict]): + The request object. The request for ``AutonomousDatabaseBackup.List``. + parent (str): + Required. The parent value for + ListAutonomousDatabaseBackups in the + following format: + projects/{project}/locations/{location}. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.oracledatabase_v1.services.oracle_database.pagers.ListAutonomousDatabaseBackupsPager: + The response for AutonomousDatabaseBackup.List. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, oracledatabase.ListAutonomousDatabaseBackupsRequest): + request = oracledatabase.ListAutonomousDatabaseBackupsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_autonomous_database_backups + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListAutonomousDatabaseBackupsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "OracleDatabaseClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("OracleDatabaseClient",) diff --git a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/pagers.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/pagers.py new file mode 100644 index 000000000000..111ec9a9d392 --- /dev/null +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/pagers.py @@ -0,0 +1,876 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, + Union, +) + +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.cloud.oracledatabase_v1.types import ( + autonomous_database, + autonomous_database_character_set, + autonomous_db_backup, + autonomous_db_version, + db_node, + db_server, + db_system_shape, + entitlement, + exadata_infra, + gi_version, + oracledatabase, + vm_cluster, +) + + +class ListCloudExadataInfrastructuresPager: + """A pager for iterating through ``list_cloud_exadata_infrastructures`` requests. + + This class thinly wraps an initial + :class:`google.cloud.oracledatabase_v1.types.ListCloudExadataInfrastructuresResponse` object, and + provides an ``__iter__`` method to iterate through its + ``cloud_exadata_infrastructures`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListCloudExadataInfrastructures`` requests and continue to iterate + through the ``cloud_exadata_infrastructures`` field on the + corresponding responses. + + All the usual :class:`google.cloud.oracledatabase_v1.types.ListCloudExadataInfrastructuresResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., oracledatabase.ListCloudExadataInfrastructuresResponse], + request: oracledatabase.ListCloudExadataInfrastructuresRequest, + response: oracledatabase.ListCloudExadataInfrastructuresResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.oracledatabase_v1.types.ListCloudExadataInfrastructuresRequest): + The initial request object. + response (google.cloud.oracledatabase_v1.types.ListCloudExadataInfrastructuresResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = oracledatabase.ListCloudExadataInfrastructuresRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[oracledatabase.ListCloudExadataInfrastructuresResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[exadata_infra.CloudExadataInfrastructure]: + for page in self.pages: + yield from page.cloud_exadata_infrastructures + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListCloudVmClustersPager: + """A pager for iterating through ``list_cloud_vm_clusters`` requests. + + This class thinly wraps an initial + :class:`google.cloud.oracledatabase_v1.types.ListCloudVmClustersResponse` object, and + provides an ``__iter__`` method to iterate through its + ``cloud_vm_clusters`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListCloudVmClusters`` requests and continue to iterate + through the ``cloud_vm_clusters`` field on the + corresponding responses. + + All the usual :class:`google.cloud.oracledatabase_v1.types.ListCloudVmClustersResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., oracledatabase.ListCloudVmClustersResponse], + request: oracledatabase.ListCloudVmClustersRequest, + response: oracledatabase.ListCloudVmClustersResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.oracledatabase_v1.types.ListCloudVmClustersRequest): + The initial request object. + response (google.cloud.oracledatabase_v1.types.ListCloudVmClustersResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = oracledatabase.ListCloudVmClustersRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[oracledatabase.ListCloudVmClustersResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[vm_cluster.CloudVmCluster]: + for page in self.pages: + yield from page.cloud_vm_clusters + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListEntitlementsPager: + """A pager for iterating through ``list_entitlements`` requests. + + This class thinly wraps an initial + :class:`google.cloud.oracledatabase_v1.types.ListEntitlementsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``entitlements`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListEntitlements`` requests and continue to iterate + through the ``entitlements`` field on the + corresponding responses. + + All the usual :class:`google.cloud.oracledatabase_v1.types.ListEntitlementsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., oracledatabase.ListEntitlementsResponse], + request: oracledatabase.ListEntitlementsRequest, + response: oracledatabase.ListEntitlementsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.oracledatabase_v1.types.ListEntitlementsRequest): + The initial request object. + response (google.cloud.oracledatabase_v1.types.ListEntitlementsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = oracledatabase.ListEntitlementsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[oracledatabase.ListEntitlementsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[entitlement.Entitlement]: + for page in self.pages: + yield from page.entitlements + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListDbServersPager: + """A pager for iterating through ``list_db_servers`` requests. + + This class thinly wraps an initial + :class:`google.cloud.oracledatabase_v1.types.ListDbServersResponse` object, and + provides an ``__iter__`` method to iterate through its + ``db_servers`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDbServers`` requests and continue to iterate + through the ``db_servers`` field on the + corresponding responses. + + All the usual :class:`google.cloud.oracledatabase_v1.types.ListDbServersResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., oracledatabase.ListDbServersResponse], + request: oracledatabase.ListDbServersRequest, + response: oracledatabase.ListDbServersResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.oracledatabase_v1.types.ListDbServersRequest): + The initial request object. + response (google.cloud.oracledatabase_v1.types.ListDbServersResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = oracledatabase.ListDbServersRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[oracledatabase.ListDbServersResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[db_server.DbServer]: + for page in self.pages: + yield from page.db_servers + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListDbNodesPager: + """A pager for iterating through ``list_db_nodes`` requests. + + This class thinly wraps an initial + :class:`google.cloud.oracledatabase_v1.types.ListDbNodesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``db_nodes`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDbNodes`` requests and continue to iterate + through the ``db_nodes`` field on the + corresponding responses. + + All the usual :class:`google.cloud.oracledatabase_v1.types.ListDbNodesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., oracledatabase.ListDbNodesResponse], + request: oracledatabase.ListDbNodesRequest, + response: oracledatabase.ListDbNodesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.oracledatabase_v1.types.ListDbNodesRequest): + The initial request object. + response (google.cloud.oracledatabase_v1.types.ListDbNodesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = oracledatabase.ListDbNodesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[oracledatabase.ListDbNodesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[db_node.DbNode]: + for page in self.pages: + yield from page.db_nodes + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListGiVersionsPager: + """A pager for iterating through ``list_gi_versions`` requests. + + This class thinly wraps an initial + :class:`google.cloud.oracledatabase_v1.types.ListGiVersionsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``gi_versions`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListGiVersions`` requests and continue to iterate + through the ``gi_versions`` field on the + corresponding responses. + + All the usual :class:`google.cloud.oracledatabase_v1.types.ListGiVersionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., oracledatabase.ListGiVersionsResponse], + request: oracledatabase.ListGiVersionsRequest, + response: oracledatabase.ListGiVersionsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.oracledatabase_v1.types.ListGiVersionsRequest): + The initial request object. + response (google.cloud.oracledatabase_v1.types.ListGiVersionsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = oracledatabase.ListGiVersionsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[oracledatabase.ListGiVersionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[gi_version.GiVersion]: + for page in self.pages: + yield from page.gi_versions + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListDbSystemShapesPager: + """A pager for iterating through ``list_db_system_shapes`` requests. + + This class thinly wraps an initial + :class:`google.cloud.oracledatabase_v1.types.ListDbSystemShapesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``db_system_shapes`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDbSystemShapes`` requests and continue to iterate + through the ``db_system_shapes`` field on the + corresponding responses. + + All the usual :class:`google.cloud.oracledatabase_v1.types.ListDbSystemShapesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., oracledatabase.ListDbSystemShapesResponse], + request: oracledatabase.ListDbSystemShapesRequest, + response: oracledatabase.ListDbSystemShapesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.oracledatabase_v1.types.ListDbSystemShapesRequest): + The initial request object. + response (google.cloud.oracledatabase_v1.types.ListDbSystemShapesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = oracledatabase.ListDbSystemShapesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[oracledatabase.ListDbSystemShapesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[db_system_shape.DbSystemShape]: + for page in self.pages: + yield from page.db_system_shapes + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListAutonomousDatabasesPager: + """A pager for iterating through ``list_autonomous_databases`` requests. + + This class thinly wraps an initial + :class:`google.cloud.oracledatabase_v1.types.ListAutonomousDatabasesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``autonomous_databases`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListAutonomousDatabases`` requests and continue to iterate + through the ``autonomous_databases`` field on the + corresponding responses. + + All the usual :class:`google.cloud.oracledatabase_v1.types.ListAutonomousDatabasesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., oracledatabase.ListAutonomousDatabasesResponse], + request: oracledatabase.ListAutonomousDatabasesRequest, + response: oracledatabase.ListAutonomousDatabasesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.oracledatabase_v1.types.ListAutonomousDatabasesRequest): + The initial request object. + response (google.cloud.oracledatabase_v1.types.ListAutonomousDatabasesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = oracledatabase.ListAutonomousDatabasesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[oracledatabase.ListAutonomousDatabasesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[autonomous_database.AutonomousDatabase]: + for page in self.pages: + yield from page.autonomous_databases + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListAutonomousDbVersionsPager: + """A pager for iterating through ``list_autonomous_db_versions`` requests. + + This class thinly wraps an initial + :class:`google.cloud.oracledatabase_v1.types.ListAutonomousDbVersionsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``autonomous_db_versions`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListAutonomousDbVersions`` requests and continue to iterate + through the ``autonomous_db_versions`` field on the + corresponding responses. + + All the usual :class:`google.cloud.oracledatabase_v1.types.ListAutonomousDbVersionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., oracledatabase.ListAutonomousDbVersionsResponse], + request: oracledatabase.ListAutonomousDbVersionsRequest, + response: oracledatabase.ListAutonomousDbVersionsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.oracledatabase_v1.types.ListAutonomousDbVersionsRequest): + The initial request object. + response (google.cloud.oracledatabase_v1.types.ListAutonomousDbVersionsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = oracledatabase.ListAutonomousDbVersionsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[oracledatabase.ListAutonomousDbVersionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[autonomous_db_version.AutonomousDbVersion]: + for page in self.pages: + yield from page.autonomous_db_versions + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListAutonomousDatabaseCharacterSetsPager: + """A pager for iterating through ``list_autonomous_database_character_sets`` requests. + + This class thinly wraps an initial + :class:`google.cloud.oracledatabase_v1.types.ListAutonomousDatabaseCharacterSetsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``autonomous_database_character_sets`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListAutonomousDatabaseCharacterSets`` requests and continue to iterate + through the ``autonomous_database_character_sets`` field on the + corresponding responses. + + All the usual :class:`google.cloud.oracledatabase_v1.types.ListAutonomousDatabaseCharacterSetsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., oracledatabase.ListAutonomousDatabaseCharacterSetsResponse + ], + request: oracledatabase.ListAutonomousDatabaseCharacterSetsRequest, + response: oracledatabase.ListAutonomousDatabaseCharacterSetsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.oracledatabase_v1.types.ListAutonomousDatabaseCharacterSetsRequest): + The initial request object. + response (google.cloud.oracledatabase_v1.types.ListAutonomousDatabaseCharacterSetsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = oracledatabase.ListAutonomousDatabaseCharacterSetsRequest( + request + ) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages( + self, + ) -> Iterator[oracledatabase.ListAutonomousDatabaseCharacterSetsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__( + self, + ) -> Iterator[autonomous_database_character_set.AutonomousDatabaseCharacterSet]: + for page in self.pages: + yield from page.autonomous_database_character_sets + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListAutonomousDatabaseBackupsPager: + """A pager for iterating through ``list_autonomous_database_backups`` requests. + + This class thinly wraps an initial + :class:`google.cloud.oracledatabase_v1.types.ListAutonomousDatabaseBackupsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``autonomous_database_backups`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListAutonomousDatabaseBackups`` requests and continue to iterate + through the ``autonomous_database_backups`` field on the + corresponding responses. + + All the usual :class:`google.cloud.oracledatabase_v1.types.ListAutonomousDatabaseBackupsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., oracledatabase.ListAutonomousDatabaseBackupsResponse], + request: oracledatabase.ListAutonomousDatabaseBackupsRequest, + response: oracledatabase.ListAutonomousDatabaseBackupsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.oracledatabase_v1.types.ListAutonomousDatabaseBackupsRequest): + The initial request object. + response (google.cloud.oracledatabase_v1.types.ListAutonomousDatabaseBackupsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = oracledatabase.ListAutonomousDatabaseBackupsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[oracledatabase.ListAutonomousDatabaseBackupsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[autonomous_db_backup.AutonomousDatabaseBackup]: + for page in self.pages: + yield from page.autonomous_database_backups + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/transports/__init__.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/transports/__init__.py new file mode 100644 index 000000000000..91a06d71780e --- /dev/null +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/transports/__init__.py @@ -0,0 +1,30 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import OracleDatabaseTransport +from .rest import OracleDatabaseRestInterceptor, OracleDatabaseRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[OracleDatabaseTransport]] +_transport_registry["rest"] = OracleDatabaseRestTransport + +__all__ = ( + "OracleDatabaseTransport", + "OracleDatabaseRestTransport", + "OracleDatabaseRestInterceptor", +) diff --git a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/transports/base.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/transports/base.py new file mode 100644 index 000000000000..ced22db4e8d6 --- /dev/null +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/transports/base.py @@ -0,0 +1,731 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.oracledatabase_v1 import gapic_version as package_version +from google.cloud.oracledatabase_v1.types import ( + autonomous_database, + exadata_infra, + oracledatabase, + vm_cluster, +) + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class OracleDatabaseTransport(abc.ABC): + """Abstract transport class for OracleDatabase.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "oracledatabase.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'oracledatabase.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_cloud_exadata_infrastructures: gapic_v1.method.wrap_method( + self.list_cloud_exadata_infrastructures, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_cloud_exadata_infrastructure: gapic_v1.method.wrap_method( + self.get_cloud_exadata_infrastructure, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_cloud_exadata_infrastructure: gapic_v1.method.wrap_method( + self.create_cloud_exadata_infrastructure, + default_timeout=None, + client_info=client_info, + ), + self.delete_cloud_exadata_infrastructure: gapic_v1.method.wrap_method( + self.delete_cloud_exadata_infrastructure, + default_timeout=None, + client_info=client_info, + ), + self.list_cloud_vm_clusters: gapic_v1.method.wrap_method( + self.list_cloud_vm_clusters, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_cloud_vm_cluster: gapic_v1.method.wrap_method( + self.get_cloud_vm_cluster, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_cloud_vm_cluster: gapic_v1.method.wrap_method( + self.create_cloud_vm_cluster, + default_timeout=None, + client_info=client_info, + ), + self.delete_cloud_vm_cluster: gapic_v1.method.wrap_method( + self.delete_cloud_vm_cluster, + default_timeout=None, + client_info=client_info, + ), + self.list_entitlements: gapic_v1.method.wrap_method( + self.list_entitlements, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_db_servers: gapic_v1.method.wrap_method( + self.list_db_servers, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_db_nodes: gapic_v1.method.wrap_method( + self.list_db_nodes, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_gi_versions: gapic_v1.method.wrap_method( + self.list_gi_versions, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_db_system_shapes: gapic_v1.method.wrap_method( + self.list_db_system_shapes, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_autonomous_databases: gapic_v1.method.wrap_method( + self.list_autonomous_databases, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_autonomous_database: gapic_v1.method.wrap_method( + self.get_autonomous_database, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_autonomous_database: gapic_v1.method.wrap_method( + self.create_autonomous_database, + default_timeout=None, + client_info=client_info, + ), + self.delete_autonomous_database: gapic_v1.method.wrap_method( + self.delete_autonomous_database, + default_timeout=None, + client_info=client_info, + ), + self.restore_autonomous_database: gapic_v1.method.wrap_method( + self.restore_autonomous_database, + default_timeout=None, + client_info=client_info, + ), + self.generate_autonomous_database_wallet: gapic_v1.method.wrap_method( + self.generate_autonomous_database_wallet, + default_timeout=None, + client_info=client_info, + ), + self.list_autonomous_db_versions: gapic_v1.method.wrap_method( + self.list_autonomous_db_versions, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_autonomous_database_character_sets: gapic_v1.method.wrap_method( + self.list_autonomous_database_character_sets, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_autonomous_database_backups: gapic_v1.method.wrap_method( + self.list_autonomous_database_backups, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def list_cloud_exadata_infrastructures( + self, + ) -> Callable[ + [oracledatabase.ListCloudExadataInfrastructuresRequest], + Union[ + oracledatabase.ListCloudExadataInfrastructuresResponse, + Awaitable[oracledatabase.ListCloudExadataInfrastructuresResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_cloud_exadata_infrastructure( + self, + ) -> Callable[ + [oracledatabase.GetCloudExadataInfrastructureRequest], + Union[ + exadata_infra.CloudExadataInfrastructure, + Awaitable[exadata_infra.CloudExadataInfrastructure], + ], + ]: + raise NotImplementedError() + + @property + def create_cloud_exadata_infrastructure( + self, + ) -> Callable[ + [oracledatabase.CreateCloudExadataInfrastructureRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_cloud_exadata_infrastructure( + self, + ) -> Callable[ + [oracledatabase.DeleteCloudExadataInfrastructureRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_cloud_vm_clusters( + self, + ) -> Callable[ + [oracledatabase.ListCloudVmClustersRequest], + Union[ + oracledatabase.ListCloudVmClustersResponse, + Awaitable[oracledatabase.ListCloudVmClustersResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_cloud_vm_cluster( + self, + ) -> Callable[ + [oracledatabase.GetCloudVmClusterRequest], + Union[vm_cluster.CloudVmCluster, Awaitable[vm_cluster.CloudVmCluster]], + ]: + raise NotImplementedError() + + @property + def create_cloud_vm_cluster( + self, + ) -> Callable[ + [oracledatabase.CreateCloudVmClusterRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_cloud_vm_cluster( + self, + ) -> Callable[ + [oracledatabase.DeleteCloudVmClusterRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_entitlements( + self, + ) -> Callable[ + [oracledatabase.ListEntitlementsRequest], + Union[ + oracledatabase.ListEntitlementsResponse, + Awaitable[oracledatabase.ListEntitlementsResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_db_servers( + self, + ) -> Callable[ + [oracledatabase.ListDbServersRequest], + Union[ + oracledatabase.ListDbServersResponse, + Awaitable[oracledatabase.ListDbServersResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_db_nodes( + self, + ) -> Callable[ + [oracledatabase.ListDbNodesRequest], + Union[ + oracledatabase.ListDbNodesResponse, + Awaitable[oracledatabase.ListDbNodesResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_gi_versions( + self, + ) -> Callable[ + [oracledatabase.ListGiVersionsRequest], + Union[ + oracledatabase.ListGiVersionsResponse, + Awaitable[oracledatabase.ListGiVersionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_db_system_shapes( + self, + ) -> Callable[ + [oracledatabase.ListDbSystemShapesRequest], + Union[ + oracledatabase.ListDbSystemShapesResponse, + Awaitable[oracledatabase.ListDbSystemShapesResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_autonomous_databases( + self, + ) -> Callable[ + [oracledatabase.ListAutonomousDatabasesRequest], + Union[ + oracledatabase.ListAutonomousDatabasesResponse, + Awaitable[oracledatabase.ListAutonomousDatabasesResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_autonomous_database( + self, + ) -> Callable[ + [oracledatabase.GetAutonomousDatabaseRequest], + Union[ + autonomous_database.AutonomousDatabase, + Awaitable[autonomous_database.AutonomousDatabase], + ], + ]: + raise NotImplementedError() + + @property + def create_autonomous_database( + self, + ) -> Callable[ + [oracledatabase.CreateAutonomousDatabaseRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_autonomous_database( + self, + ) -> Callable[ + [oracledatabase.DeleteAutonomousDatabaseRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def restore_autonomous_database( + self, + ) -> Callable[ + [oracledatabase.RestoreAutonomousDatabaseRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def generate_autonomous_database_wallet( + self, + ) -> Callable[ + [oracledatabase.GenerateAutonomousDatabaseWalletRequest], + Union[ + oracledatabase.GenerateAutonomousDatabaseWalletResponse, + Awaitable[oracledatabase.GenerateAutonomousDatabaseWalletResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_autonomous_db_versions( + self, + ) -> Callable[ + [oracledatabase.ListAutonomousDbVersionsRequest], + Union[ + oracledatabase.ListAutonomousDbVersionsResponse, + Awaitable[oracledatabase.ListAutonomousDbVersionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_autonomous_database_character_sets( + self, + ) -> Callable[ + [oracledatabase.ListAutonomousDatabaseCharacterSetsRequest], + Union[ + oracledatabase.ListAutonomousDatabaseCharacterSetsResponse, + Awaitable[oracledatabase.ListAutonomousDatabaseCharacterSetsResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_autonomous_database_backups( + self, + ) -> Callable[ + [oracledatabase.ListAutonomousDatabaseBackupsRequest], + Union[ + oracledatabase.ListAutonomousDatabaseBackupsResponse, + Awaitable[oracledatabase.ListAutonomousDatabaseBackupsResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + raise NotImplementedError() + + @property + def get_location( + self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("OracleDatabaseTransport",) diff --git a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/transports/rest.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/transports/rest.py new file mode 100644 index 000000000000..ad8d2e4a9c29 --- /dev/null +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/transports/rest.py @@ -0,0 +1,3718 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.oracledatabase_v1.types import ( + autonomous_database, + exadata_infra, + oracledatabase, + vm_cluster, +) + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import OracleDatabaseTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class OracleDatabaseRestInterceptor: + """Interceptor for OracleDatabase. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the OracleDatabaseRestTransport. + + .. code-block:: python + class MyCustomOracleDatabaseInterceptor(OracleDatabaseRestInterceptor): + def pre_create_autonomous_database(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_autonomous_database(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_cloud_exadata_infrastructure(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_cloud_exadata_infrastructure(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_cloud_vm_cluster(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_cloud_vm_cluster(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_autonomous_database(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_autonomous_database(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_cloud_exadata_infrastructure(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_cloud_exadata_infrastructure(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_cloud_vm_cluster(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_cloud_vm_cluster(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_generate_autonomous_database_wallet(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_generate_autonomous_database_wallet(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_autonomous_database(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_autonomous_database(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_cloud_exadata_infrastructure(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_cloud_exadata_infrastructure(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_cloud_vm_cluster(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_cloud_vm_cluster(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_autonomous_database_backups(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_autonomous_database_backups(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_autonomous_database_character_sets(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_autonomous_database_character_sets(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_autonomous_databases(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_autonomous_databases(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_autonomous_db_versions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_autonomous_db_versions(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_cloud_exadata_infrastructures(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_cloud_exadata_infrastructures(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_cloud_vm_clusters(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_cloud_vm_clusters(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_db_nodes(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_db_nodes(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_db_servers(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_db_servers(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_db_system_shapes(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_db_system_shapes(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_entitlements(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_entitlements(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_gi_versions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_gi_versions(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_restore_autonomous_database(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_restore_autonomous_database(self, response): + logging.log(f"Received response: {response}") + return response + + transport = OracleDatabaseRestTransport(interceptor=MyCustomOracleDatabaseInterceptor()) + client = OracleDatabaseClient(transport=transport) + + + """ + + def pre_create_autonomous_database( + self, + request: oracledatabase.CreateAutonomousDatabaseRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + oracledatabase.CreateAutonomousDatabaseRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for create_autonomous_database + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_create_autonomous_database( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_autonomous_database + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + + def pre_create_cloud_exadata_infrastructure( + self, + request: oracledatabase.CreateCloudExadataInfrastructureRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + oracledatabase.CreateCloudExadataInfrastructureRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for create_cloud_exadata_infrastructure + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_create_cloud_exadata_infrastructure( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_cloud_exadata_infrastructure + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + + def pre_create_cloud_vm_cluster( + self, + request: oracledatabase.CreateCloudVmClusterRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[oracledatabase.CreateCloudVmClusterRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_cloud_vm_cluster + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_create_cloud_vm_cluster( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_cloud_vm_cluster + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + + def pre_delete_autonomous_database( + self, + request: oracledatabase.DeleteAutonomousDatabaseRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + oracledatabase.DeleteAutonomousDatabaseRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for delete_autonomous_database + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_delete_autonomous_database( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_autonomous_database + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + + def pre_delete_cloud_exadata_infrastructure( + self, + request: oracledatabase.DeleteCloudExadataInfrastructureRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + oracledatabase.DeleteCloudExadataInfrastructureRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for delete_cloud_exadata_infrastructure + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_delete_cloud_exadata_infrastructure( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_cloud_exadata_infrastructure + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + + def pre_delete_cloud_vm_cluster( + self, + request: oracledatabase.DeleteCloudVmClusterRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[oracledatabase.DeleteCloudVmClusterRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_cloud_vm_cluster + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_delete_cloud_vm_cluster( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_cloud_vm_cluster + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + + def pre_generate_autonomous_database_wallet( + self, + request: oracledatabase.GenerateAutonomousDatabaseWalletRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + oracledatabase.GenerateAutonomousDatabaseWalletRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for generate_autonomous_database_wallet + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_generate_autonomous_database_wallet( + self, response: oracledatabase.GenerateAutonomousDatabaseWalletResponse + ) -> oracledatabase.GenerateAutonomousDatabaseWalletResponse: + """Post-rpc interceptor for generate_autonomous_database_wallet + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + + def pre_get_autonomous_database( + self, + request: oracledatabase.GetAutonomousDatabaseRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[oracledatabase.GetAutonomousDatabaseRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_autonomous_database + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_get_autonomous_database( + self, response: autonomous_database.AutonomousDatabase + ) -> autonomous_database.AutonomousDatabase: + """Post-rpc interceptor for get_autonomous_database + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + + def pre_get_cloud_exadata_infrastructure( + self, + request: oracledatabase.GetCloudExadataInfrastructureRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + oracledatabase.GetCloudExadataInfrastructureRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for get_cloud_exadata_infrastructure + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_get_cloud_exadata_infrastructure( + self, response: exadata_infra.CloudExadataInfrastructure + ) -> exadata_infra.CloudExadataInfrastructure: + """Post-rpc interceptor for get_cloud_exadata_infrastructure + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + + def pre_get_cloud_vm_cluster( + self, + request: oracledatabase.GetCloudVmClusterRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[oracledatabase.GetCloudVmClusterRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_cloud_vm_cluster + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_get_cloud_vm_cluster( + self, response: vm_cluster.CloudVmCluster + ) -> vm_cluster.CloudVmCluster: + """Post-rpc interceptor for get_cloud_vm_cluster + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + + def pre_list_autonomous_database_backups( + self, + request: oracledatabase.ListAutonomousDatabaseBackupsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + oracledatabase.ListAutonomousDatabaseBackupsRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for list_autonomous_database_backups + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_list_autonomous_database_backups( + self, response: oracledatabase.ListAutonomousDatabaseBackupsResponse + ) -> oracledatabase.ListAutonomousDatabaseBackupsResponse: + """Post-rpc interceptor for list_autonomous_database_backups + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + + def pre_list_autonomous_database_character_sets( + self, + request: oracledatabase.ListAutonomousDatabaseCharacterSetsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + oracledatabase.ListAutonomousDatabaseCharacterSetsRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for list_autonomous_database_character_sets + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_list_autonomous_database_character_sets( + self, response: oracledatabase.ListAutonomousDatabaseCharacterSetsResponse + ) -> oracledatabase.ListAutonomousDatabaseCharacterSetsResponse: + """Post-rpc interceptor for list_autonomous_database_character_sets + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + + def pre_list_autonomous_databases( + self, + request: oracledatabase.ListAutonomousDatabasesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + oracledatabase.ListAutonomousDatabasesRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for list_autonomous_databases + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_list_autonomous_databases( + self, response: oracledatabase.ListAutonomousDatabasesResponse + ) -> oracledatabase.ListAutonomousDatabasesResponse: + """Post-rpc interceptor for list_autonomous_databases + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + + def pre_list_autonomous_db_versions( + self, + request: oracledatabase.ListAutonomousDbVersionsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + oracledatabase.ListAutonomousDbVersionsRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for list_autonomous_db_versions + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_list_autonomous_db_versions( + self, response: oracledatabase.ListAutonomousDbVersionsResponse + ) -> oracledatabase.ListAutonomousDbVersionsResponse: + """Post-rpc interceptor for list_autonomous_db_versions + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + + def pre_list_cloud_exadata_infrastructures( + self, + request: oracledatabase.ListCloudExadataInfrastructuresRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + oracledatabase.ListCloudExadataInfrastructuresRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for list_cloud_exadata_infrastructures + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_list_cloud_exadata_infrastructures( + self, response: oracledatabase.ListCloudExadataInfrastructuresResponse + ) -> oracledatabase.ListCloudExadataInfrastructuresResponse: + """Post-rpc interceptor for list_cloud_exadata_infrastructures + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + + def pre_list_cloud_vm_clusters( + self, + request: oracledatabase.ListCloudVmClustersRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[oracledatabase.ListCloudVmClustersRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_cloud_vm_clusters + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_list_cloud_vm_clusters( + self, response: oracledatabase.ListCloudVmClustersResponse + ) -> oracledatabase.ListCloudVmClustersResponse: + """Post-rpc interceptor for list_cloud_vm_clusters + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + + def pre_list_db_nodes( + self, + request: oracledatabase.ListDbNodesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[oracledatabase.ListDbNodesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_db_nodes + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_list_db_nodes( + self, response: oracledatabase.ListDbNodesResponse + ) -> oracledatabase.ListDbNodesResponse: + """Post-rpc interceptor for list_db_nodes + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + + def pre_list_db_servers( + self, + request: oracledatabase.ListDbServersRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[oracledatabase.ListDbServersRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_db_servers + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_list_db_servers( + self, response: oracledatabase.ListDbServersResponse + ) -> oracledatabase.ListDbServersResponse: + """Post-rpc interceptor for list_db_servers + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + + def pre_list_db_system_shapes( + self, + request: oracledatabase.ListDbSystemShapesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[oracledatabase.ListDbSystemShapesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_db_system_shapes + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_list_db_system_shapes( + self, response: oracledatabase.ListDbSystemShapesResponse + ) -> oracledatabase.ListDbSystemShapesResponse: + """Post-rpc interceptor for list_db_system_shapes + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + + def pre_list_entitlements( + self, + request: oracledatabase.ListEntitlementsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[oracledatabase.ListEntitlementsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_entitlements + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_list_entitlements( + self, response: oracledatabase.ListEntitlementsResponse + ) -> oracledatabase.ListEntitlementsResponse: + """Post-rpc interceptor for list_entitlements + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + + def pre_list_gi_versions( + self, + request: oracledatabase.ListGiVersionsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[oracledatabase.ListGiVersionsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_gi_versions + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_list_gi_versions( + self, response: oracledatabase.ListGiVersionsResponse + ) -> oracledatabase.ListGiVersionsResponse: + """Post-rpc interceptor for list_gi_versions + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + + def pre_restore_autonomous_database( + self, + request: oracledatabase.RestoreAutonomousDatabaseRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + oracledatabase.RestoreAutonomousDatabaseRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for restore_autonomous_database + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_restore_autonomous_database( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for restore_autonomous_database + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_delete_operation(self, response: None) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class OracleDatabaseRestStub: + _session: AuthorizedSession + _host: str + _interceptor: OracleDatabaseRestInterceptor + + +class OracleDatabaseRestTransport(OracleDatabaseTransport): + """REST backend transport for OracleDatabase. + + Service describing handlers for resources + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "oracledatabase.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[OracleDatabaseRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'oracledatabase.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or OracleDatabaseRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + ], + "google.longrunning.Operations.DeleteOperation": [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _CreateAutonomousDatabase(OracleDatabaseRestStub): + def __hash__(self): + return hash("CreateAutonomousDatabase") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "autonomousDatabaseId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: oracledatabase.CreateAutonomousDatabaseRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create autonomous + database method over HTTP. + + Args: + request (~.oracledatabase.CreateAutonomousDatabaseRequest): + The request object. The request for ``AutonomousDatabase.Create``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/autonomousDatabases", + "body": "autonomous_database", + }, + ] + request, metadata = self._interceptor.pre_create_autonomous_database( + request, metadata + ) + pb_request = oracledatabase.CreateAutonomousDatabaseRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_autonomous_database(resp) + return resp + + class _CreateCloudExadataInfrastructure(OracleDatabaseRestStub): + def __hash__(self): + return hash("CreateCloudExadataInfrastructure") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "cloudExadataInfrastructureId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: oracledatabase.CreateCloudExadataInfrastructureRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create cloud exadata + infrastructure method over HTTP. + + Args: + request (~.oracledatabase.CreateCloudExadataInfrastructureRequest): + The request object. The request for ``CloudExadataInfrastructure.Create``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/cloudExadataInfrastructures", + "body": "cloud_exadata_infrastructure", + }, + ] + ( + request, + metadata, + ) = self._interceptor.pre_create_cloud_exadata_infrastructure( + request, metadata + ) + pb_request = oracledatabase.CreateCloudExadataInfrastructureRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_cloud_exadata_infrastructure(resp) + return resp + + class _CreateCloudVmCluster(OracleDatabaseRestStub): + def __hash__(self): + return hash("CreateCloudVmCluster") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "cloudVmClusterId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: oracledatabase.CreateCloudVmClusterRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create cloud vm cluster method over HTTP. + + Args: + request (~.oracledatabase.CreateCloudVmClusterRequest): + The request object. The request for ``CloudVmCluster.Create``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/cloudVmClusters", + "body": "cloud_vm_cluster", + }, + ] + request, metadata = self._interceptor.pre_create_cloud_vm_cluster( + request, metadata + ) + pb_request = oracledatabase.CreateCloudVmClusterRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_cloud_vm_cluster(resp) + return resp + + class _DeleteAutonomousDatabase(OracleDatabaseRestStub): + def __hash__(self): + return hash("DeleteAutonomousDatabase") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: oracledatabase.DeleteAutonomousDatabaseRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete autonomous + database method over HTTP. + + Args: + request (~.oracledatabase.DeleteAutonomousDatabaseRequest): + The request object. The request for ``AutonomousDatabase.Delete``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/autonomousDatabases/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_autonomous_database( + request, metadata + ) + pb_request = oracledatabase.DeleteAutonomousDatabaseRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_autonomous_database(resp) + return resp + + class _DeleteCloudExadataInfrastructure(OracleDatabaseRestStub): + def __hash__(self): + return hash("DeleteCloudExadataInfrastructure") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: oracledatabase.DeleteCloudExadataInfrastructureRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete cloud exadata + infrastructure method over HTTP. + + Args: + request (~.oracledatabase.DeleteCloudExadataInfrastructureRequest): + The request object. The request for ``CloudExadataInfrastructure.Delete``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/cloudExadataInfrastructures/*}", + }, + ] + ( + request, + metadata, + ) = self._interceptor.pre_delete_cloud_exadata_infrastructure( + request, metadata + ) + pb_request = oracledatabase.DeleteCloudExadataInfrastructureRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_cloud_exadata_infrastructure(resp) + return resp + + class _DeleteCloudVmCluster(OracleDatabaseRestStub): + def __hash__(self): + return hash("DeleteCloudVmCluster") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: oracledatabase.DeleteCloudVmClusterRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete cloud vm cluster method over HTTP. + + Args: + request (~.oracledatabase.DeleteCloudVmClusterRequest): + The request object. The request for ``CloudVmCluster.Delete``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/cloudVmClusters/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_cloud_vm_cluster( + request, metadata + ) + pb_request = oracledatabase.DeleteCloudVmClusterRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_cloud_vm_cluster(resp) + return resp + + class _GenerateAutonomousDatabaseWallet(OracleDatabaseRestStub): + def __hash__(self): + return hash("GenerateAutonomousDatabaseWallet") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: oracledatabase.GenerateAutonomousDatabaseWalletRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> oracledatabase.GenerateAutonomousDatabaseWalletResponse: + r"""Call the generate autonomous + database wallet method over HTTP. + + Args: + request (~.oracledatabase.GenerateAutonomousDatabaseWalletRequest): + The request object. The request for ``AutonomousDatabase.GenerateWallet``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.oracledatabase.GenerateAutonomousDatabaseWalletResponse: + The response for ``AutonomousDatabase.GenerateWallet``. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/autonomousDatabases/*}:generateWallet", + "body": "*", + }, + ] + ( + request, + metadata, + ) = self._interceptor.pre_generate_autonomous_database_wallet( + request, metadata + ) + pb_request = oracledatabase.GenerateAutonomousDatabaseWalletRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = oracledatabase.GenerateAutonomousDatabaseWalletResponse() + pb_resp = oracledatabase.GenerateAutonomousDatabaseWalletResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_generate_autonomous_database_wallet(resp) + return resp + + class _GetAutonomousDatabase(OracleDatabaseRestStub): + def __hash__(self): + return hash("GetAutonomousDatabase") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: oracledatabase.GetAutonomousDatabaseRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> autonomous_database.AutonomousDatabase: + r"""Call the get autonomous database method over HTTP. + + Args: + request (~.oracledatabase.GetAutonomousDatabaseRequest): + The request object. The request for ``AutonomousDatabase.Get``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.autonomous_database.AutonomousDatabase: + Details of the Autonomous Database + resource. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/AutonomousDatabase/ + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/autonomousDatabases/*}", + }, + ] + request, metadata = self._interceptor.pre_get_autonomous_database( + request, metadata + ) + pb_request = oracledatabase.GetAutonomousDatabaseRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = autonomous_database.AutonomousDatabase() + pb_resp = autonomous_database.AutonomousDatabase.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_autonomous_database(resp) + return resp + + class _GetCloudExadataInfrastructure(OracleDatabaseRestStub): + def __hash__(self): + return hash("GetCloudExadataInfrastructure") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: oracledatabase.GetCloudExadataInfrastructureRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> exadata_infra.CloudExadataInfrastructure: + r"""Call the get cloud exadata + infrastructure method over HTTP. + + Args: + request (~.oracledatabase.GetCloudExadataInfrastructureRequest): + The request object. The request for ``CloudExadataInfrastructure.Get``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.exadata_infra.CloudExadataInfrastructure: + Represents CloudExadataInfrastructure + resource. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/CloudExadataInfrastructure/ + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/cloudExadataInfrastructures/*}", + }, + ] + request, metadata = self._interceptor.pre_get_cloud_exadata_infrastructure( + request, metadata + ) + pb_request = oracledatabase.GetCloudExadataInfrastructureRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = exadata_infra.CloudExadataInfrastructure() + pb_resp = exadata_infra.CloudExadataInfrastructure.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_cloud_exadata_infrastructure(resp) + return resp + + class _GetCloudVmCluster(OracleDatabaseRestStub): + def __hash__(self): + return hash("GetCloudVmCluster") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: oracledatabase.GetCloudVmClusterRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> vm_cluster.CloudVmCluster: + r"""Call the get cloud vm cluster method over HTTP. + + Args: + request (~.oracledatabase.GetCloudVmClusterRequest): + The request object. The request for ``CloudVmCluster.Get``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.vm_cluster.CloudVmCluster: + Details of the Cloud VM Cluster + resource. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/CloudVmCluster/ + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/cloudVmClusters/*}", + }, + ] + request, metadata = self._interceptor.pre_get_cloud_vm_cluster( + request, metadata + ) + pb_request = oracledatabase.GetCloudVmClusterRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = vm_cluster.CloudVmCluster() + pb_resp = vm_cluster.CloudVmCluster.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_cloud_vm_cluster(resp) + return resp + + class _ListAutonomousDatabaseBackups(OracleDatabaseRestStub): + def __hash__(self): + return hash("ListAutonomousDatabaseBackups") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: oracledatabase.ListAutonomousDatabaseBackupsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> oracledatabase.ListAutonomousDatabaseBackupsResponse: + r"""Call the list autonomous database + backups method over HTTP. + + Args: + request (~.oracledatabase.ListAutonomousDatabaseBackupsRequest): + The request object. The request for ``AutonomousDatabaseBackup.List``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.oracledatabase.ListAutonomousDatabaseBackupsResponse: + The response for ``AutonomousDatabaseBackup.List``. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/autonomousDatabaseBackups", + }, + ] + request, metadata = self._interceptor.pre_list_autonomous_database_backups( + request, metadata + ) + pb_request = oracledatabase.ListAutonomousDatabaseBackupsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = oracledatabase.ListAutonomousDatabaseBackupsResponse() + pb_resp = oracledatabase.ListAutonomousDatabaseBackupsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_autonomous_database_backups(resp) + return resp + + class _ListAutonomousDatabaseCharacterSets(OracleDatabaseRestStub): + def __hash__(self): + return hash("ListAutonomousDatabaseCharacterSets") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: oracledatabase.ListAutonomousDatabaseCharacterSetsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> oracledatabase.ListAutonomousDatabaseCharacterSetsResponse: + r"""Call the list autonomous database + character sets method over HTTP. + + Args: + request (~.oracledatabase.ListAutonomousDatabaseCharacterSetsRequest): + The request object. The request for ``AutonomousDatabaseCharacterSet.List``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.oracledatabase.ListAutonomousDatabaseCharacterSetsResponse: + The response for + ``AutonomousDatabaseCharacterSet.List``. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/autonomousDatabaseCharacterSets", + }, + ] + ( + request, + metadata, + ) = self._interceptor.pre_list_autonomous_database_character_sets( + request, metadata + ) + pb_request = oracledatabase.ListAutonomousDatabaseCharacterSetsRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = oracledatabase.ListAutonomousDatabaseCharacterSetsResponse() + pb_resp = oracledatabase.ListAutonomousDatabaseCharacterSetsResponse.pb( + resp + ) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_autonomous_database_character_sets(resp) + return resp + + class _ListAutonomousDatabases(OracleDatabaseRestStub): + def __hash__(self): + return hash("ListAutonomousDatabases") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: oracledatabase.ListAutonomousDatabasesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> oracledatabase.ListAutonomousDatabasesResponse: + r"""Call the list autonomous databases method over HTTP. + + Args: + request (~.oracledatabase.ListAutonomousDatabasesRequest): + The request object. The request for ``AutonomousDatabase.List``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.oracledatabase.ListAutonomousDatabasesResponse: + The response for ``AutonomousDatabase.List``. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/autonomousDatabases", + }, + ] + request, metadata = self._interceptor.pre_list_autonomous_databases( + request, metadata + ) + pb_request = oracledatabase.ListAutonomousDatabasesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = oracledatabase.ListAutonomousDatabasesResponse() + pb_resp = oracledatabase.ListAutonomousDatabasesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_autonomous_databases(resp) + return resp + + class _ListAutonomousDbVersions(OracleDatabaseRestStub): + def __hash__(self): + return hash("ListAutonomousDbVersions") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: oracledatabase.ListAutonomousDbVersionsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> oracledatabase.ListAutonomousDbVersionsResponse: + r"""Call the list autonomous db + versions method over HTTP. + + Args: + request (~.oracledatabase.ListAutonomousDbVersionsRequest): + The request object. The request for ``AutonomousDbVersion.List``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.oracledatabase.ListAutonomousDbVersionsResponse: + The response for ``AutonomousDbVersion.List``. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/autonomousDbVersions", + }, + ] + request, metadata = self._interceptor.pre_list_autonomous_db_versions( + request, metadata + ) + pb_request = oracledatabase.ListAutonomousDbVersionsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = oracledatabase.ListAutonomousDbVersionsResponse() + pb_resp = oracledatabase.ListAutonomousDbVersionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_autonomous_db_versions(resp) + return resp + + class _ListCloudExadataInfrastructures(OracleDatabaseRestStub): + def __hash__(self): + return hash("ListCloudExadataInfrastructures") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: oracledatabase.ListCloudExadataInfrastructuresRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> oracledatabase.ListCloudExadataInfrastructuresResponse: + r"""Call the list cloud exadata + infrastructures method over HTTP. + + Args: + request (~.oracledatabase.ListCloudExadataInfrastructuresRequest): + The request object. The request for ``CloudExadataInfrastructures.List``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.oracledatabase.ListCloudExadataInfrastructuresResponse: + The response for ``CloudExadataInfrastructures.list``. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/cloudExadataInfrastructures", + }, + ] + ( + request, + metadata, + ) = self._interceptor.pre_list_cloud_exadata_infrastructures( + request, metadata + ) + pb_request = oracledatabase.ListCloudExadataInfrastructuresRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = oracledatabase.ListCloudExadataInfrastructuresResponse() + pb_resp = oracledatabase.ListCloudExadataInfrastructuresResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_cloud_exadata_infrastructures(resp) + return resp + + class _ListCloudVmClusters(OracleDatabaseRestStub): + def __hash__(self): + return hash("ListCloudVmClusters") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: oracledatabase.ListCloudVmClustersRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> oracledatabase.ListCloudVmClustersResponse: + r"""Call the list cloud vm clusters method over HTTP. + + Args: + request (~.oracledatabase.ListCloudVmClustersRequest): + The request object. The request for ``CloudVmCluster.List``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.oracledatabase.ListCloudVmClustersResponse: + The response for ``CloudVmCluster.List``. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/cloudVmClusters", + }, + ] + request, metadata = self._interceptor.pre_list_cloud_vm_clusters( + request, metadata + ) + pb_request = oracledatabase.ListCloudVmClustersRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = oracledatabase.ListCloudVmClustersResponse() + pb_resp = oracledatabase.ListCloudVmClustersResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_cloud_vm_clusters(resp) + return resp + + class _ListDbNodes(OracleDatabaseRestStub): + def __hash__(self): + return hash("ListDbNodes") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: oracledatabase.ListDbNodesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> oracledatabase.ListDbNodesResponse: + r"""Call the list db nodes method over HTTP. + + Args: + request (~.oracledatabase.ListDbNodesRequest): + The request object. The request for ``DbNode.List``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.oracledatabase.ListDbNodesResponse: + The response for ``DbNode.List``. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/cloudVmClusters/*}/dbNodes", + }, + ] + request, metadata = self._interceptor.pre_list_db_nodes(request, metadata) + pb_request = oracledatabase.ListDbNodesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = oracledatabase.ListDbNodesResponse() + pb_resp = oracledatabase.ListDbNodesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_db_nodes(resp) + return resp + + class _ListDbServers(OracleDatabaseRestStub): + def __hash__(self): + return hash("ListDbServers") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: oracledatabase.ListDbServersRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> oracledatabase.ListDbServersResponse: + r"""Call the list db servers method over HTTP. + + Args: + request (~.oracledatabase.ListDbServersRequest): + The request object. The request for ``DbServer.List``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.oracledatabase.ListDbServersResponse: + The response for ``DbServer.List``. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/cloudExadataInfrastructures/*}/dbServers", + }, + ] + request, metadata = self._interceptor.pre_list_db_servers(request, metadata) + pb_request = oracledatabase.ListDbServersRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = oracledatabase.ListDbServersResponse() + pb_resp = oracledatabase.ListDbServersResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_db_servers(resp) + return resp + + class _ListDbSystemShapes(OracleDatabaseRestStub): + def __hash__(self): + return hash("ListDbSystemShapes") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: oracledatabase.ListDbSystemShapesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> oracledatabase.ListDbSystemShapesResponse: + r"""Call the list db system shapes method over HTTP. + + Args: + request (~.oracledatabase.ListDbSystemShapesRequest): + The request object. The request for ``DbSystemShape.List``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.oracledatabase.ListDbSystemShapesResponse: + The response for ``DbSystemShape.List``. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/dbSystemShapes", + }, + ] + request, metadata = self._interceptor.pre_list_db_system_shapes( + request, metadata + ) + pb_request = oracledatabase.ListDbSystemShapesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = oracledatabase.ListDbSystemShapesResponse() + pb_resp = oracledatabase.ListDbSystemShapesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_db_system_shapes(resp) + return resp + + class _ListEntitlements(OracleDatabaseRestStub): + def __hash__(self): + return hash("ListEntitlements") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: oracledatabase.ListEntitlementsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> oracledatabase.ListEntitlementsResponse: + r"""Call the list entitlements method over HTTP. + + Args: + request (~.oracledatabase.ListEntitlementsRequest): + The request object. The request for ``Entitlement.List``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.oracledatabase.ListEntitlementsResponse: + The response for ``Entitlement.List``. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/entitlements", + }, + ] + request, metadata = self._interceptor.pre_list_entitlements( + request, metadata + ) + pb_request = oracledatabase.ListEntitlementsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = oracledatabase.ListEntitlementsResponse() + pb_resp = oracledatabase.ListEntitlementsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_entitlements(resp) + return resp + + class _ListGiVersions(OracleDatabaseRestStub): + def __hash__(self): + return hash("ListGiVersions") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: oracledatabase.ListGiVersionsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> oracledatabase.ListGiVersionsResponse: + r"""Call the list gi versions method over HTTP. + + Args: + request (~.oracledatabase.ListGiVersionsRequest): + The request object. The request for ``GiVersion.List``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.oracledatabase.ListGiVersionsResponse: + The response for ``GiVersion.List``. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/giVersions", + }, + ] + request, metadata = self._interceptor.pre_list_gi_versions( + request, metadata + ) + pb_request = oracledatabase.ListGiVersionsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = oracledatabase.ListGiVersionsResponse() + pb_resp = oracledatabase.ListGiVersionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_gi_versions(resp) + return resp + + class _RestoreAutonomousDatabase(OracleDatabaseRestStub): + def __hash__(self): + return hash("RestoreAutonomousDatabase") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: oracledatabase.RestoreAutonomousDatabaseRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the restore autonomous + database method over HTTP. + + Args: + request (~.oracledatabase.RestoreAutonomousDatabaseRequest): + The request object. The request for ``AutonomousDatabase.Restore``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/autonomousDatabases/*}:restore", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_restore_autonomous_database( + request, metadata + ) + pb_request = oracledatabase.RestoreAutonomousDatabaseRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_restore_autonomous_database(resp) + return resp + + @property + def create_autonomous_database( + self, + ) -> Callable[ + [oracledatabase.CreateAutonomousDatabaseRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateAutonomousDatabase(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_cloud_exadata_infrastructure( + self, + ) -> Callable[ + [oracledatabase.CreateCloudExadataInfrastructureRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateCloudExadataInfrastructure(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_cloud_vm_cluster( + self, + ) -> Callable[ + [oracledatabase.CreateCloudVmClusterRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateCloudVmCluster(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_autonomous_database( + self, + ) -> Callable[ + [oracledatabase.DeleteAutonomousDatabaseRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteAutonomousDatabase(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_cloud_exadata_infrastructure( + self, + ) -> Callable[ + [oracledatabase.DeleteCloudExadataInfrastructureRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteCloudExadataInfrastructure(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_cloud_vm_cluster( + self, + ) -> Callable[ + [oracledatabase.DeleteCloudVmClusterRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteCloudVmCluster(self._session, self._host, self._interceptor) # type: ignore + + @property + def generate_autonomous_database_wallet( + self, + ) -> Callable[ + [oracledatabase.GenerateAutonomousDatabaseWalletRequest], + oracledatabase.GenerateAutonomousDatabaseWalletResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GenerateAutonomousDatabaseWallet(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_autonomous_database( + self, + ) -> Callable[ + [oracledatabase.GetAutonomousDatabaseRequest], + autonomous_database.AutonomousDatabase, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetAutonomousDatabase(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_cloud_exadata_infrastructure( + self, + ) -> Callable[ + [oracledatabase.GetCloudExadataInfrastructureRequest], + exadata_infra.CloudExadataInfrastructure, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetCloudExadataInfrastructure(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_cloud_vm_cluster( + self, + ) -> Callable[[oracledatabase.GetCloudVmClusterRequest], vm_cluster.CloudVmCluster]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetCloudVmCluster(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_autonomous_database_backups( + self, + ) -> Callable[ + [oracledatabase.ListAutonomousDatabaseBackupsRequest], + oracledatabase.ListAutonomousDatabaseBackupsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListAutonomousDatabaseBackups(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_autonomous_database_character_sets( + self, + ) -> Callable[ + [oracledatabase.ListAutonomousDatabaseCharacterSetsRequest], + oracledatabase.ListAutonomousDatabaseCharacterSetsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListAutonomousDatabaseCharacterSets(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_autonomous_databases( + self, + ) -> Callable[ + [oracledatabase.ListAutonomousDatabasesRequest], + oracledatabase.ListAutonomousDatabasesResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListAutonomousDatabases(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_autonomous_db_versions( + self, + ) -> Callable[ + [oracledatabase.ListAutonomousDbVersionsRequest], + oracledatabase.ListAutonomousDbVersionsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListAutonomousDbVersions(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_cloud_exadata_infrastructures( + self, + ) -> Callable[ + [oracledatabase.ListCloudExadataInfrastructuresRequest], + oracledatabase.ListCloudExadataInfrastructuresResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListCloudExadataInfrastructures(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_cloud_vm_clusters( + self, + ) -> Callable[ + [oracledatabase.ListCloudVmClustersRequest], + oracledatabase.ListCloudVmClustersResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListCloudVmClusters(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_db_nodes( + self, + ) -> Callable[ + [oracledatabase.ListDbNodesRequest], oracledatabase.ListDbNodesResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListDbNodes(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_db_servers( + self, + ) -> Callable[ + [oracledatabase.ListDbServersRequest], oracledatabase.ListDbServersResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListDbServers(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_db_system_shapes( + self, + ) -> Callable[ + [oracledatabase.ListDbSystemShapesRequest], + oracledatabase.ListDbSystemShapesResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListDbSystemShapes(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_entitlements( + self, + ) -> Callable[ + [oracledatabase.ListEntitlementsRequest], + oracledatabase.ListEntitlementsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListEntitlements(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_gi_versions( + self, + ) -> Callable[ + [oracledatabase.ListGiVersionsRequest], oracledatabase.ListGiVersionsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListGiVersions(self._session, self._host, self._interceptor) # type: ignore + + @property + def restore_autonomous_database( + self, + ) -> Callable[ + [oracledatabase.RestoreAutonomousDatabaseRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RestoreAutonomousDatabase(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(OracleDatabaseRestStub): + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_location(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.Location() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_location(resp) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(OracleDatabaseRestStub): + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*}/locations", + }, + ] + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_locations(resp) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(OracleDatabaseRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation(OracleDatabaseRestStub): + def __call__( + self, + request: operations_pb2.DeleteOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_delete_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(OracleDatabaseRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(OracleDatabaseRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("OracleDatabaseRestTransport",) diff --git a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/__init__.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/__init__.py new file mode 100644 index 000000000000..e5079e7c48c9 --- /dev/null +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/__init__.py @@ -0,0 +1,157 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .autonomous_database import ( + AllConnectionStrings, + AutonomousDatabase, + AutonomousDatabaseApex, + AutonomousDatabaseConnectionStrings, + AutonomousDatabaseConnectionUrls, + AutonomousDatabaseProperties, + AutonomousDatabaseStandbySummary, + DatabaseConnectionStringProfile, + DBWorkload, + GenerateType, + OperationsInsightsState, + ScheduledOperationDetails, + State, +) +from .autonomous_database_character_set import AutonomousDatabaseCharacterSet +from .autonomous_db_backup import ( + AutonomousDatabaseBackup, + AutonomousDatabaseBackupProperties, +) +from .autonomous_db_version import AutonomousDbVersion +from .common import CustomerContact +from .db_node import DbNode, DbNodeProperties +from .db_server import DbServer, DbServerProperties +from .db_system_shape import DbSystemShape +from .entitlement import CloudAccountDetails, Entitlement +from .exadata_infra import ( + CloudExadataInfrastructure, + CloudExadataInfrastructureProperties, + MaintenanceWindow, +) +from .gi_version import GiVersion +from .location_metadata import LocationMetadata +from .oracledatabase import ( + CreateAutonomousDatabaseRequest, + CreateCloudExadataInfrastructureRequest, + CreateCloudVmClusterRequest, + DeleteAutonomousDatabaseRequest, + DeleteCloudExadataInfrastructureRequest, + DeleteCloudVmClusterRequest, + GenerateAutonomousDatabaseWalletRequest, + GenerateAutonomousDatabaseWalletResponse, + GetAutonomousDatabaseRequest, + GetCloudExadataInfrastructureRequest, + GetCloudVmClusterRequest, + ListAutonomousDatabaseBackupsRequest, + ListAutonomousDatabaseBackupsResponse, + ListAutonomousDatabaseCharacterSetsRequest, + ListAutonomousDatabaseCharacterSetsResponse, + ListAutonomousDatabasesRequest, + ListAutonomousDatabasesResponse, + ListAutonomousDbVersionsRequest, + ListAutonomousDbVersionsResponse, + ListCloudExadataInfrastructuresRequest, + ListCloudExadataInfrastructuresResponse, + ListCloudVmClustersRequest, + ListCloudVmClustersResponse, + ListDbNodesRequest, + ListDbNodesResponse, + ListDbServersRequest, + ListDbServersResponse, + ListDbSystemShapesRequest, + ListDbSystemShapesResponse, + ListEntitlementsRequest, + ListEntitlementsResponse, + ListGiVersionsRequest, + ListGiVersionsResponse, + OperationMetadata, + RestoreAutonomousDatabaseRequest, +) +from .vm_cluster import CloudVmCluster, CloudVmClusterProperties, DataCollectionOptions + +__all__ = ( + "AllConnectionStrings", + "AutonomousDatabase", + "AutonomousDatabaseApex", + "AutonomousDatabaseConnectionStrings", + "AutonomousDatabaseConnectionUrls", + "AutonomousDatabaseProperties", + "AutonomousDatabaseStandbySummary", + "DatabaseConnectionStringProfile", + "ScheduledOperationDetails", + "DBWorkload", + "GenerateType", + "OperationsInsightsState", + "State", + "AutonomousDatabaseCharacterSet", + "AutonomousDatabaseBackup", + "AutonomousDatabaseBackupProperties", + "AutonomousDbVersion", + "CustomerContact", + "DbNode", + "DbNodeProperties", + "DbServer", + "DbServerProperties", + "DbSystemShape", + "CloudAccountDetails", + "Entitlement", + "CloudExadataInfrastructure", + "CloudExadataInfrastructureProperties", + "MaintenanceWindow", + "GiVersion", + "LocationMetadata", + "CreateAutonomousDatabaseRequest", + "CreateCloudExadataInfrastructureRequest", + "CreateCloudVmClusterRequest", + "DeleteAutonomousDatabaseRequest", + "DeleteCloudExadataInfrastructureRequest", + "DeleteCloudVmClusterRequest", + "GenerateAutonomousDatabaseWalletRequest", + "GenerateAutonomousDatabaseWalletResponse", + "GetAutonomousDatabaseRequest", + "GetCloudExadataInfrastructureRequest", + "GetCloudVmClusterRequest", + "ListAutonomousDatabaseBackupsRequest", + "ListAutonomousDatabaseBackupsResponse", + "ListAutonomousDatabaseCharacterSetsRequest", + "ListAutonomousDatabaseCharacterSetsResponse", + "ListAutonomousDatabasesRequest", + "ListAutonomousDatabasesResponse", + "ListAutonomousDbVersionsRequest", + "ListAutonomousDbVersionsResponse", + "ListCloudExadataInfrastructuresRequest", + "ListCloudExadataInfrastructuresResponse", + "ListCloudVmClustersRequest", + "ListCloudVmClustersResponse", + "ListDbNodesRequest", + "ListDbNodesResponse", + "ListDbServersRequest", + "ListDbServersResponse", + "ListDbSystemShapesRequest", + "ListDbSystemShapesResponse", + "ListEntitlementsRequest", + "ListEntitlementsResponse", + "ListGiVersionsRequest", + "ListGiVersionsResponse", + "OperationMetadata", + "RestoreAutonomousDatabaseRequest", + "CloudVmCluster", + "CloudVmClusterProperties", + "DataCollectionOptions", +) diff --git a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/autonomous_database.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/autonomous_database.py new file mode 100644 index 000000000000..907ef93bc4d5 --- /dev/null +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/autonomous_database.py @@ -0,0 +1,1421 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +from google.type import timeofday_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.oracledatabase_v1.types import common + +__protobuf__ = proto.module( + package="google.cloud.oracledatabase.v1", + manifest={ + "GenerateType", + "State", + "OperationsInsightsState", + "DBWorkload", + "AutonomousDatabase", + "AutonomousDatabaseProperties", + "AutonomousDatabaseApex", + "AutonomousDatabaseConnectionStrings", + "DatabaseConnectionStringProfile", + "AllConnectionStrings", + "AutonomousDatabaseConnectionUrls", + "AutonomousDatabaseStandbySummary", + "ScheduledOperationDetails", + }, +) + + +class GenerateType(proto.Enum): + r"""The type of wallet generation. + + Values: + GENERATE_TYPE_UNSPECIFIED (0): + Default unspecified value. + ALL (1): + Used to generate wallet for all databases in + the region. + SINGLE (2): + Used to generate wallet for a single + database. + """ + GENERATE_TYPE_UNSPECIFIED = 0 + ALL = 1 + SINGLE = 2 + + +class State(proto.Enum): + r"""The various lifecycle states of the Autonomous Database. + + Values: + STATE_UNSPECIFIED (0): + Default unspecified value. + PROVISIONING (1): + Indicates that the Autonomous Database is in + provisioning state. + AVAILABLE (2): + Indicates that the Autonomous Database is in + available state. + STOPPING (3): + Indicates that the Autonomous Database is in + stopping state. + STOPPED (4): + Indicates that the Autonomous Database is in + stopped state. + STARTING (5): + Indicates that the Autonomous Database is in + starting state. + TERMINATING (6): + Indicates that the Autonomous Database is in + terminating state. + TERMINATED (7): + Indicates that the Autonomous Database is in + terminated state. + UNAVAILABLE (8): + Indicates that the Autonomous Database is in + unavailable state. + RESTORE_IN_PROGRESS (9): + Indicates that the Autonomous Database + restore is in progress. + RESTORE_FAILED (10): + Indicates that the Autonomous Database failed + to restore. + BACKUP_IN_PROGRESS (11): + Indicates that the Autonomous Database backup + is in progress. + SCALE_IN_PROGRESS (12): + Indicates that the Autonomous Database scale + is in progress. + AVAILABLE_NEEDS_ATTENTION (13): + Indicates that the Autonomous Database is + available but needs attention state. + UPDATING (14): + Indicates that the Autonomous Database is in + updating state. + MAINTENANCE_IN_PROGRESS (15): + Indicates that the Autonomous Database's + maintenance is in progress state. + RESTARTING (16): + Indicates that the Autonomous Database is in + restarting state. + RECREATING (17): + Indicates that the Autonomous Database is in + recreating state. + ROLE_CHANGE_IN_PROGRESS (18): + Indicates that the Autonomous Database's role + change is in progress state. + UPGRADING (19): + Indicates that the Autonomous Database is in + upgrading state. + INACCESSIBLE (20): + Indicates that the Autonomous Database is in + inaccessible state. + STANDBY (21): + Indicates that the Autonomous Database is in + standby state. + """ + STATE_UNSPECIFIED = 0 + PROVISIONING = 1 + AVAILABLE = 2 + STOPPING = 3 + STOPPED = 4 + STARTING = 5 + TERMINATING = 6 + TERMINATED = 7 + UNAVAILABLE = 8 + RESTORE_IN_PROGRESS = 9 + RESTORE_FAILED = 10 + BACKUP_IN_PROGRESS = 11 + SCALE_IN_PROGRESS = 12 + AVAILABLE_NEEDS_ATTENTION = 13 + UPDATING = 14 + MAINTENANCE_IN_PROGRESS = 15 + RESTARTING = 16 + RECREATING = 17 + ROLE_CHANGE_IN_PROGRESS = 18 + UPGRADING = 19 + INACCESSIBLE = 20 + STANDBY = 21 + + +class OperationsInsightsState(proto.Enum): + r"""The state of the Operations Insights for this Autonomous + Database. + + Values: + OPERATIONS_INSIGHTS_STATE_UNSPECIFIED (0): + Default unspecified value. + ENABLING (1): + Enabling status for operation insights. + ENABLED (2): + Enabled status for operation insights. + DISABLING (3): + Disabling status for operation insights. + NOT_ENABLED (4): + Not Enabled status for operation insights. + FAILED_ENABLING (5): + Failed enabling status for operation + insights. + FAILED_DISABLING (6): + Failed disabling status for operation + insights. + """ + OPERATIONS_INSIGHTS_STATE_UNSPECIFIED = 0 + ENABLING = 1 + ENABLED = 2 + DISABLING = 3 + NOT_ENABLED = 4 + FAILED_ENABLING = 5 + FAILED_DISABLING = 6 + + +class DBWorkload(proto.Enum): + r"""The various states available for the Autonomous Database + workload type. + + Values: + DB_WORKLOAD_UNSPECIFIED (0): + Default unspecified value. + OLTP (1): + Autonomous Transaction Processing database. + DW (2): + Autonomous Data Warehouse database. + AJD (3): + Autonomous JSON Database. + APEX (4): + Autonomous Database with the Oracle APEX + Application Development workload type. + """ + DB_WORKLOAD_UNSPECIFIED = 0 + OLTP = 1 + DW = 2 + AJD = 3 + APEX = 4 + + +class AutonomousDatabase(proto.Message): + r"""Details of the Autonomous Database resource. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/AutonomousDatabase/ + + Attributes: + name (str): + Identifier. The name of the Autonomous Database resource in + the following format: + projects/{project}/locations/{region}/autonomousDatabases/{autonomous_database} + database (str): + Optional. The name of the Autonomous + Database. The database name must be unique in + the project. The name must begin with a letter + and can contain a maximum of 30 alphanumeric + characters. + display_name (str): + Optional. The display name for the Autonomous + Database. The name does not have to be unique + within your project. + entitlement_id (str): + Output only. The ID of the subscription + entitlement associated with the Autonomous + Database. + admin_password (str): + Optional. The password for the default ADMIN + user. + properties (google.cloud.oracledatabase_v1.types.AutonomousDatabaseProperties): + Optional. The properties of the Autonomous + Database. + labels (MutableMapping[str, str]): + Optional. The labels or tags associated with + the Autonomous Database. + network (str): + Required. The name of the VPC network used by + the Autonomous Database in the following format: + projects/{project}/global/networks/{network} + cidr (str): + Required. The subnet CIDR range for the + Autonmous Database. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The date and time that the + Autonomous Database was created. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + database: str = proto.Field( + proto.STRING, + number=2, + ) + display_name: str = proto.Field( + proto.STRING, + number=3, + ) + entitlement_id: str = proto.Field( + proto.STRING, + number=5, + ) + admin_password: str = proto.Field( + proto.STRING, + number=6, + ) + properties: "AutonomousDatabaseProperties" = proto.Field( + proto.MESSAGE, + number=7, + message="AutonomousDatabaseProperties", + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=8, + ) + network: str = proto.Field( + proto.STRING, + number=9, + ) + cidr: str = proto.Field( + proto.STRING, + number=10, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=13, + message=timestamp_pb2.Timestamp, + ) + + +class AutonomousDatabaseProperties(proto.Message): + r"""The properties of an Autonomous Database. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + ocid (str): + Output only. OCID of the Autonomous Database. + https://docs.oracle.com/en-us/iaas/Content/General/Concepts/identifiers.htm#Oracle + compute_count (float): + Optional. The number of compute servers for + the Autonomous Database. + cpu_core_count (int): + Optional. The number of CPU cores to be made + available to the database. + data_storage_size_tb (int): + Optional. The size of the data stored in the + database, in terabytes. + data_storage_size_gb (int): + Optional. The size of the data stored in the + database, in gigabytes. + db_workload (google.cloud.oracledatabase_v1.types.DBWorkload): + Required. The workload type of the Autonomous + Database. + db_edition (google.cloud.oracledatabase_v1.types.AutonomousDatabaseProperties.DatabaseEdition): + Optional. The edition of the Autonomous + Databases. + character_set (str): + Optional. The character set for the + Autonomous Database. The default is AL32UTF8. + n_character_set (str): + Optional. The national character set for the + Autonomous Database. The default is AL16UTF16. + private_endpoint_ip (str): + Optional. The private endpoint IP address for + the Autonomous Database. + private_endpoint_label (str): + Optional. The private endpoint label for the + Autonomous Database. + db_version (str): + Optional. The Oracle Database version for the + Autonomous Database. + is_auto_scaling_enabled (bool): + Optional. This field indicates if auto + scaling is enabled for the Autonomous Database + CPU core count. + is_storage_auto_scaling_enabled (bool): + Optional. This field indicates if auto + scaling is enabled for the Autonomous Database + storage. + license_type (google.cloud.oracledatabase_v1.types.AutonomousDatabaseProperties.LicenseType): + Required. The license type used for the + Autonomous Database. + customer_contacts (MutableSequence[google.cloud.oracledatabase_v1.types.CustomerContact]): + Optional. The list of customer contacts. + secret_id (str): + Optional. The ID of the Oracle Cloud + Infrastructure vault secret. + vault_id (str): + Optional. The ID of the Oracle Cloud + Infrastructure vault. + maintenance_schedule_type (google.cloud.oracledatabase_v1.types.AutonomousDatabaseProperties.MaintenanceScheduleType): + Optional. The maintenance schedule of the + Autonomous Database. + mtls_connection_required (bool): + Optional. This field specifies if the + Autonomous Database requires mTLS connections. + backup_retention_period_days (int): + Optional. The retention period for the + Autonomous Database. This field is specified in + days, can range from 1 day to 60 days, and has a + default value of 60 days. + actual_used_data_storage_size_tb (float): + Output only. The amount of storage currently + being used for user and system data, in + terabytes. + allocated_storage_size_tb (float): + Output only. The amount of storage currently + allocated for the database tables and billed + for, rounded up in terabytes. + apex_details (google.cloud.oracledatabase_v1.types.AutonomousDatabaseApex): + Output only. The details for the Oracle APEX + Application Development. + are_primary_allowlisted_ips_used (bool): + Output only. This field indicates the status + of Data Guard and Access control for the + Autonomous Database. The field's value is null + if Data Guard is disabled or Access Control is + disabled. The field's value is TRUE if both Data + Guard and Access Control are enabled, and the + Autonomous Database is using primary IP access + control list (ACL) for standby. The field's + value is FALSE if both Data Guard and Access + Control are enabled, and the Autonomous Database + is using a different IP access control list + (ACL) for standby compared to primary. + + This field is a member of `oneof`_ ``_are_primary_allowlisted_ips_used``. + lifecycle_details (str): + Output only. The details of the current + lifestyle state of the Autonomous Database. + state (google.cloud.oracledatabase_v1.types.State): + Output only. The current lifecycle state of + the Autonomous Database. + autonomous_container_database_id (str): + Output only. The Autonomous Container + Database OCID. + available_upgrade_versions (MutableSequence[str]): + Output only. The list of available Oracle + Database upgrade versions for an Autonomous + Database. + connection_strings (google.cloud.oracledatabase_v1.types.AutonomousDatabaseConnectionStrings): + Output only. The connection strings used to + connect to an Autonomous Database. + connection_urls (google.cloud.oracledatabase_v1.types.AutonomousDatabaseConnectionUrls): + Output only. The Oracle Connection URLs for + an Autonomous Database. + failed_data_recovery_duration (google.protobuf.duration_pb2.Duration): + Output only. This field indicates the number + of seconds of data loss during a Data Guard + failover. + memory_table_gbs (int): + Output only. The memory assigned to in-memory + tables in an Autonomous Database. + is_local_data_guard_enabled (bool): + Output only. This field indicates whether the + Autonomous Database has local (in-region) Data + Guard enabled. + local_adg_auto_failover_max_data_loss_limit (int): + Output only. This field indicates the maximum + data loss limit for an Autonomous Database, in + seconds. + local_standby_db (google.cloud.oracledatabase_v1.types.AutonomousDatabaseStandbySummary): + Output only. The details of the Autonomous + Data Guard standby database. + memory_per_oracle_compute_unit_gbs (int): + Output only. The amount of memory enabled per + ECPU, in gigabytes. + local_disaster_recovery_type (google.cloud.oracledatabase_v1.types.AutonomousDatabaseProperties.LocalDisasterRecoveryType): + Output only. This field indicates the local + disaster recovery (DR) type of an Autonomous + Database. + data_safe_state (google.cloud.oracledatabase_v1.types.AutonomousDatabaseProperties.DataSafeState): + Output only. The current state of the Data + Safe registration for the Autonomous Database. + database_management_state (google.cloud.oracledatabase_v1.types.AutonomousDatabaseProperties.DatabaseManagementState): + Output only. The current state of database + management for the Autonomous Database. + open_mode (google.cloud.oracledatabase_v1.types.AutonomousDatabaseProperties.OpenMode): + Output only. This field indicates the current + mode of the Autonomous Database. + operations_insights_state (google.cloud.oracledatabase_v1.types.OperationsInsightsState): + Output only. This field indicates the state + of Operations Insights for the Autonomous + Database. + peer_db_ids (MutableSequence[str]): + Output only. The list of OCIDs of standby + databases located in Autonomous Data Guard + remote regions that are associated with the + source database. + permission_level (google.cloud.oracledatabase_v1.types.AutonomousDatabaseProperties.PermissionLevel): + Output only. The permission level of the + Autonomous Database. + private_endpoint (str): + Output only. The private endpoint for the + Autonomous Database. + refreshable_mode (google.cloud.oracledatabase_v1.types.AutonomousDatabaseProperties.RefreshableMode): + Output only. The refresh mode of the cloned + Autonomous Database. + refreshable_state (google.cloud.oracledatabase_v1.types.AutonomousDatabaseProperties.RefreshableState): + Output only. The refresh State of the clone. + role (google.cloud.oracledatabase_v1.types.AutonomousDatabaseProperties.Role): + Output only. The Data Guard role of the + Autonomous Database. + scheduled_operation_details (MutableSequence[google.cloud.oracledatabase_v1.types.ScheduledOperationDetails]): + Output only. The list and details of the + scheduled operations of the Autonomous Database. + sql_web_developer_url (str): + Output only. The SQL Web Developer URL for + the Autonomous Database. + supported_clone_regions (MutableSequence[str]): + Output only. The list of available regions + that can be used to create a clone for the + Autonomous Database. + used_data_storage_size_tbs (int): + Output only. The storage space used by + Autonomous Database, in gigabytes. + oci_url (str): + Output only. The Oracle Cloud Infrastructure + link for the Autonomous Database. + total_auto_backup_storage_size_gbs (float): + Output only. The storage space used by + automatic backups of Autonomous Database, in + gigabytes. + next_long_term_backup_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The long term backup schedule of + the Autonomous Database. + maintenance_begin_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The date and time when + maintenance will begin. + maintenance_end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The date and time when + maintenance will end. + """ + + class DatabaseEdition(proto.Enum): + r"""The editions available for the Autonomous Database. + + Values: + DATABASE_EDITION_UNSPECIFIED (0): + Default unspecified value. + STANDARD_EDITION (1): + Standard Database Edition + ENTERPRISE_EDITION (2): + Enterprise Database Edition + """ + DATABASE_EDITION_UNSPECIFIED = 0 + STANDARD_EDITION = 1 + ENTERPRISE_EDITION = 2 + + class LicenseType(proto.Enum): + r"""The license types available for the Autonomous Database. + + Values: + LICENSE_TYPE_UNSPECIFIED (0): + Unspecified + LICENSE_INCLUDED (1): + License included part of offer + BRING_YOUR_OWN_LICENSE (2): + Bring your own license + """ + LICENSE_TYPE_UNSPECIFIED = 0 + LICENSE_INCLUDED = 1 + BRING_YOUR_OWN_LICENSE = 2 + + class MaintenanceScheduleType(proto.Enum): + r"""The available maintenance schedules for the Autonomous + Database. + + Values: + MAINTENANCE_SCHEDULE_TYPE_UNSPECIFIED (0): + Default unspecified value. + EARLY (1): + An EARLY maintenance schedule patches the + database before the regular scheduled + maintenance. + REGULAR (2): + A REGULAR maintenance schedule follows the + normal maintenance cycle. + """ + MAINTENANCE_SCHEDULE_TYPE_UNSPECIFIED = 0 + EARLY = 1 + REGULAR = 2 + + class LocalDisasterRecoveryType(proto.Enum): + r"""The types of local disaster recovery available for an + Autonomous Database. + + Values: + LOCAL_DISASTER_RECOVERY_TYPE_UNSPECIFIED (0): + Default unspecified value. + ADG (1): + Autonomous Data Guard recovery. + BACKUP_BASED (2): + Backup based recovery. + """ + LOCAL_DISASTER_RECOVERY_TYPE_UNSPECIFIED = 0 + ADG = 1 + BACKUP_BASED = 2 + + class DataSafeState(proto.Enum): + r"""Varies states of the Data Safe registration for the + Autonomous Database. + + Values: + DATA_SAFE_STATE_UNSPECIFIED (0): + Default unspecified value. + REGISTERING (1): + Registering data safe state. + REGISTERED (2): + Registered data safe state. + DEREGISTERING (3): + Deregistering data safe state. + NOT_REGISTERED (4): + Not registered data safe state. + FAILED (5): + Failed data safe state. + """ + DATA_SAFE_STATE_UNSPECIFIED = 0 + REGISTERING = 1 + REGISTERED = 2 + DEREGISTERING = 3 + NOT_REGISTERED = 4 + FAILED = 5 + + class DatabaseManagementState(proto.Enum): + r"""The different states of database management for an Autonomous + Database. + + Values: + DATABASE_MANAGEMENT_STATE_UNSPECIFIED (0): + Default unspecified value. + ENABLING (1): + Enabling Database Management state + ENABLED (2): + Enabled Database Management state + DISABLING (3): + Disabling Database Management state + NOT_ENABLED (4): + Not Enabled Database Management state + FAILED_ENABLING (5): + Failed enabling Database Management state + FAILED_DISABLING (6): + Failed disabling Database Management state + """ + DATABASE_MANAGEMENT_STATE_UNSPECIFIED = 0 + ENABLING = 1 + ENABLED = 2 + DISABLING = 3 + NOT_ENABLED = 4 + FAILED_ENABLING = 5 + FAILED_DISABLING = 6 + + class OpenMode(proto.Enum): + r"""This field indicates the modes of an Autonomous Database. + + Values: + OPEN_MODE_UNSPECIFIED (0): + Default unspecified value. + READ_ONLY (1): + Read Only Mode + READ_WRITE (2): + Read Write Mode + """ + OPEN_MODE_UNSPECIFIED = 0 + READ_ONLY = 1 + READ_WRITE = 2 + + class PermissionLevel(proto.Enum): + r"""The types of permission levels for an Autonomous Database. + + Values: + PERMISSION_LEVEL_UNSPECIFIED (0): + Default unspecified value. + RESTRICTED (1): + Restricted mode allows access only by admin + users. + UNRESTRICTED (2): + Normal access. + """ + PERMISSION_LEVEL_UNSPECIFIED = 0 + RESTRICTED = 1 + UNRESTRICTED = 2 + + class RefreshableMode(proto.Enum): + r"""The refresh mode of the cloned Autonomous Database. + + Values: + REFRESHABLE_MODE_UNSPECIFIED (0): + The default unspecified value. + AUTOMATIC (1): + AUTOMATIC indicates that the cloned database + is automatically refreshed with data from the + source Autonomous Database. + MANUAL (2): + MANUAL indicates that the cloned database is + manually refreshed with data from the source + Autonomous Database. + """ + REFRESHABLE_MODE_UNSPECIFIED = 0 + AUTOMATIC = 1 + MANUAL = 2 + + class RefreshableState(proto.Enum): + r"""The refresh state of the cloned Autonomous Database. + + Values: + REFRESHABLE_STATE_UNSPECIFIED (0): + Default unspecified value. + REFRESHING (1): + Refreshing + NOT_REFRESHING (2): + Not refreshed + """ + REFRESHABLE_STATE_UNSPECIFIED = 0 + REFRESHING = 1 + NOT_REFRESHING = 2 + + class Role(proto.Enum): + r"""The Data Guard role of the Autonomous Database. + + Values: + ROLE_UNSPECIFIED (0): + Default unspecified value. + PRIMARY (1): + Primary role + STANDBY (2): + Standby role + DISABLED_STANDBY (3): + Disabled standby role + BACKUP_COPY (4): + Backup copy role + SNAPSHOT_STANDBY (5): + Snapshot standby role + """ + ROLE_UNSPECIFIED = 0 + PRIMARY = 1 + STANDBY = 2 + DISABLED_STANDBY = 3 + BACKUP_COPY = 4 + SNAPSHOT_STANDBY = 5 + + ocid: str = proto.Field( + proto.STRING, + number=1, + ) + compute_count: float = proto.Field( + proto.FLOAT, + number=2, + ) + cpu_core_count: int = proto.Field( + proto.INT32, + number=3, + ) + data_storage_size_tb: int = proto.Field( + proto.INT32, + number=4, + ) + data_storage_size_gb: int = proto.Field( + proto.INT32, + number=63, + ) + db_workload: "DBWorkload" = proto.Field( + proto.ENUM, + number=5, + enum="DBWorkload", + ) + db_edition: DatabaseEdition = proto.Field( + proto.ENUM, + number=6, + enum=DatabaseEdition, + ) + character_set: str = proto.Field( + proto.STRING, + number=8, + ) + n_character_set: str = proto.Field( + proto.STRING, + number=9, + ) + private_endpoint_ip: str = proto.Field( + proto.STRING, + number=10, + ) + private_endpoint_label: str = proto.Field( + proto.STRING, + number=11, + ) + db_version: str = proto.Field( + proto.STRING, + number=12, + ) + is_auto_scaling_enabled: bool = proto.Field( + proto.BOOL, + number=14, + ) + is_storage_auto_scaling_enabled: bool = proto.Field( + proto.BOOL, + number=15, + ) + license_type: LicenseType = proto.Field( + proto.ENUM, + number=16, + enum=LicenseType, + ) + customer_contacts: MutableSequence[common.CustomerContact] = proto.RepeatedField( + proto.MESSAGE, + number=17, + message=common.CustomerContact, + ) + secret_id: str = proto.Field( + proto.STRING, + number=18, + ) + vault_id: str = proto.Field( + proto.STRING, + number=19, + ) + maintenance_schedule_type: MaintenanceScheduleType = proto.Field( + proto.ENUM, + number=20, + enum=MaintenanceScheduleType, + ) + mtls_connection_required: bool = proto.Field( + proto.BOOL, + number=34, + ) + backup_retention_period_days: int = proto.Field( + proto.INT32, + number=57, + ) + actual_used_data_storage_size_tb: float = proto.Field( + proto.DOUBLE, + number=21, + ) + allocated_storage_size_tb: float = proto.Field( + proto.DOUBLE, + number=22, + ) + apex_details: "AutonomousDatabaseApex" = proto.Field( + proto.MESSAGE, + number=23, + message="AutonomousDatabaseApex", + ) + are_primary_allowlisted_ips_used: bool = proto.Field( + proto.BOOL, + number=24, + optional=True, + ) + lifecycle_details: str = proto.Field( + proto.STRING, + number=25, + ) + state: "State" = proto.Field( + proto.ENUM, + number=26, + enum="State", + ) + autonomous_container_database_id: str = proto.Field( + proto.STRING, + number=27, + ) + available_upgrade_versions: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=28, + ) + connection_strings: "AutonomousDatabaseConnectionStrings" = proto.Field( + proto.MESSAGE, + number=29, + message="AutonomousDatabaseConnectionStrings", + ) + connection_urls: "AutonomousDatabaseConnectionUrls" = proto.Field( + proto.MESSAGE, + number=30, + message="AutonomousDatabaseConnectionUrls", + ) + failed_data_recovery_duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=31, + message=duration_pb2.Duration, + ) + memory_table_gbs: int = proto.Field( + proto.INT32, + number=32, + ) + is_local_data_guard_enabled: bool = proto.Field( + proto.BOOL, + number=33, + ) + local_adg_auto_failover_max_data_loss_limit: int = proto.Field( + proto.INT32, + number=35, + ) + local_standby_db: "AutonomousDatabaseStandbySummary" = proto.Field( + proto.MESSAGE, + number=36, + message="AutonomousDatabaseStandbySummary", + ) + memory_per_oracle_compute_unit_gbs: int = proto.Field( + proto.INT32, + number=37, + ) + local_disaster_recovery_type: LocalDisasterRecoveryType = proto.Field( + proto.ENUM, + number=38, + enum=LocalDisasterRecoveryType, + ) + data_safe_state: DataSafeState = proto.Field( + proto.ENUM, + number=39, + enum=DataSafeState, + ) + database_management_state: DatabaseManagementState = proto.Field( + proto.ENUM, + number=40, + enum=DatabaseManagementState, + ) + open_mode: OpenMode = proto.Field( + proto.ENUM, + number=41, + enum=OpenMode, + ) + operations_insights_state: "OperationsInsightsState" = proto.Field( + proto.ENUM, + number=42, + enum="OperationsInsightsState", + ) + peer_db_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=43, + ) + permission_level: PermissionLevel = proto.Field( + proto.ENUM, + number=44, + enum=PermissionLevel, + ) + private_endpoint: str = proto.Field( + proto.STRING, + number=45, + ) + refreshable_mode: RefreshableMode = proto.Field( + proto.ENUM, + number=46, + enum=RefreshableMode, + ) + refreshable_state: RefreshableState = proto.Field( + proto.ENUM, + number=47, + enum=RefreshableState, + ) + role: Role = proto.Field( + proto.ENUM, + number=48, + enum=Role, + ) + scheduled_operation_details: MutableSequence[ + "ScheduledOperationDetails" + ] = proto.RepeatedField( + proto.MESSAGE, + number=64, + message="ScheduledOperationDetails", + ) + sql_web_developer_url: str = proto.Field( + proto.STRING, + number=50, + ) + supported_clone_regions: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=51, + ) + used_data_storage_size_tbs: int = proto.Field( + proto.INT32, + number=53, + ) + oci_url: str = proto.Field( + proto.STRING, + number=54, + ) + total_auto_backup_storage_size_gbs: float = proto.Field( + proto.FLOAT, + number=59, + ) + next_long_term_backup_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=60, + message=timestamp_pb2.Timestamp, + ) + maintenance_begin_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=65, + message=timestamp_pb2.Timestamp, + ) + maintenance_end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=66, + message=timestamp_pb2.Timestamp, + ) + + +class AutonomousDatabaseApex(proto.Message): + r"""Oracle APEX Application Development. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/datatypes/AutonomousDatabaseApex + + Attributes: + apex_version (str): + Output only. The Oracle APEX Application + Development version. + ords_version (str): + Output only. The Oracle REST Data Services + (ORDS) version. + """ + + apex_version: str = proto.Field( + proto.STRING, + number=1, + ) + ords_version: str = proto.Field( + proto.STRING, + number=2, + ) + + +class AutonomousDatabaseConnectionStrings(proto.Message): + r"""The connection string used to connect to the Autonomous + Database. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/datatypes/AutonomousDatabaseConnectionStrings + + Attributes: + all_connection_strings (google.cloud.oracledatabase_v1.types.AllConnectionStrings): + Output only. Returns all connection strings + that can be used to connect to the Autonomous + Database. + dedicated (str): + Output only. The database service provides + the least level of resources to each SQL + statement, but supports the most number of + concurrent SQL statements. + high (str): + Output only. The database service provides + the highest level of resources to each SQL + statement. + low (str): + Output only. The database service provides + the least level of resources to each SQL + statement. + medium (str): + Output only. The database service provides a + lower level of resources to each SQL statement. + profiles (MutableSequence[google.cloud.oracledatabase_v1.types.DatabaseConnectionStringProfile]): + Output only. A list of connection string + profiles to allow clients to group, filter, and + select values based on the structured metadata. + """ + + all_connection_strings: "AllConnectionStrings" = proto.Field( + proto.MESSAGE, + number=1, + message="AllConnectionStrings", + ) + dedicated: str = proto.Field( + proto.STRING, + number=2, + ) + high: str = proto.Field( + proto.STRING, + number=3, + ) + low: str = proto.Field( + proto.STRING, + number=4, + ) + medium: str = proto.Field( + proto.STRING, + number=5, + ) + profiles: MutableSequence["DatabaseConnectionStringProfile"] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message="DatabaseConnectionStringProfile", + ) + + +class DatabaseConnectionStringProfile(proto.Message): + r"""The connection string profile to allow clients to group. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/datatypes/DatabaseConnectionStringProfile + + Attributes: + consumer_group (google.cloud.oracledatabase_v1.types.DatabaseConnectionStringProfile.ConsumerGroup): + Output only. The current consumer group being + used by the connection. + display_name (str): + Output only. The display name for the + database connection. + host_format (google.cloud.oracledatabase_v1.types.DatabaseConnectionStringProfile.HostFormat): + Output only. The host name format being + currently used in connection string. + is_regional (bool): + Output only. This field indicates if the + connection string is regional and is only + applicable for cross-region Data Guard. + protocol (google.cloud.oracledatabase_v1.types.DatabaseConnectionStringProfile.Protocol): + Output only. The protocol being used by the + connection. + session_mode (google.cloud.oracledatabase_v1.types.DatabaseConnectionStringProfile.SessionMode): + Output only. The current session mode of the + connection. + syntax_format (google.cloud.oracledatabase_v1.types.DatabaseConnectionStringProfile.SyntaxFormat): + Output only. The syntax of the connection + string. + tls_authentication (google.cloud.oracledatabase_v1.types.DatabaseConnectionStringProfile.TLSAuthentication): + Output only. This field indicates the TLS + authentication type of the connection. + value (str): + Output only. The value of the connection + string. + """ + + class ConsumerGroup(proto.Enum): + r"""The various consumer groups available in the connection + string profile. + + Values: + CONSUMER_GROUP_UNSPECIFIED (0): + Default unspecified value. + HIGH (1): + High consumer group. + MEDIUM (2): + Medium consumer group. + LOW (3): + Low consumer group. + TP (4): + TP consumer group. + TPURGENT (5): + TPURGENT consumer group. + """ + CONSUMER_GROUP_UNSPECIFIED = 0 + HIGH = 1 + MEDIUM = 2 + LOW = 3 + TP = 4 + TPURGENT = 5 + + class HostFormat(proto.Enum): + r"""The host name format being used in the connection string. + + Values: + HOST_FORMAT_UNSPECIFIED (0): + Default unspecified value. + FQDN (1): + FQDN + IP (2): + IP + """ + HOST_FORMAT_UNSPECIFIED = 0 + FQDN = 1 + IP = 2 + + class Protocol(proto.Enum): + r"""The protocol being used by the connection. + + Values: + PROTOCOL_UNSPECIFIED (0): + Default unspecified value. + TCP (1): + Tcp + TCPS (2): + Tcps + """ + PROTOCOL_UNSPECIFIED = 0 + TCP = 1 + TCPS = 2 + + class SessionMode(proto.Enum): + r"""The session mode of the connection. + + Values: + SESSION_MODE_UNSPECIFIED (0): + Default unspecified value. + DIRECT (1): + Direct + INDIRECT (2): + Indirect + """ + SESSION_MODE_UNSPECIFIED = 0 + DIRECT = 1 + INDIRECT = 2 + + class SyntaxFormat(proto.Enum): + r"""Specifies syntax of the connection string. + + Values: + SYNTAX_FORMAT_UNSPECIFIED (0): + Default unspecified value. + LONG (1): + Long + EZCONNECT (2): + Ezconnect + EZCONNECTPLUS (3): + Ezconnectplus + """ + SYNTAX_FORMAT_UNSPECIFIED = 0 + LONG = 1 + EZCONNECT = 2 + EZCONNECTPLUS = 3 + + class TLSAuthentication(proto.Enum): + r"""This field indicates the TLS authentication type of the + connection. + + Values: + TLS_AUTHENTICATION_UNSPECIFIED (0): + Default unspecified value. + SERVER (1): + Server + MUTUAL (2): + Mutual + """ + TLS_AUTHENTICATION_UNSPECIFIED = 0 + SERVER = 1 + MUTUAL = 2 + + consumer_group: ConsumerGroup = proto.Field( + proto.ENUM, + number=1, + enum=ConsumerGroup, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + host_format: HostFormat = proto.Field( + proto.ENUM, + number=3, + enum=HostFormat, + ) + is_regional: bool = proto.Field( + proto.BOOL, + number=4, + ) + protocol: Protocol = proto.Field( + proto.ENUM, + number=5, + enum=Protocol, + ) + session_mode: SessionMode = proto.Field( + proto.ENUM, + number=6, + enum=SessionMode, + ) + syntax_format: SyntaxFormat = proto.Field( + proto.ENUM, + number=7, + enum=SyntaxFormat, + ) + tls_authentication: TLSAuthentication = proto.Field( + proto.ENUM, + number=8, + enum=TLSAuthentication, + ) + value: str = proto.Field( + proto.STRING, + number=9, + ) + + +class AllConnectionStrings(proto.Message): + r"""A list of all connection strings that can be used to connect + to the Autonomous Database. + + Attributes: + high (str): + Output only. The database service provides + the highest level of resources to each SQL + statement. + low (str): + Output only. The database service provides + the least level of resources to each SQL + statement. + medium (str): + Output only. The database service provides a + lower level of resources to each SQL statement. + """ + + high: str = proto.Field( + proto.STRING, + number=1, + ) + low: str = proto.Field( + proto.STRING, + number=2, + ) + medium: str = proto.Field( + proto.STRING, + number=3, + ) + + +class AutonomousDatabaseConnectionUrls(proto.Message): + r"""The URLs for accessing Oracle Application Express (APEX) and + SQL Developer Web with a browser from a Compute instance. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/datatypes/AutonomousDatabaseConnectionUrls + + Attributes: + apex_uri (str): + Output only. Oracle Application Express + (APEX) URL. + database_transforms_uri (str): + Output only. The URL of the Database + Transforms for the Autonomous Database. + graph_studio_uri (str): + Output only. The URL of the Graph Studio for + the Autonomous Database. + machine_learning_notebook_uri (str): + Output only. The URL of the Oracle Machine + Learning (OML) Notebook for the Autonomous + Database. + machine_learning_user_management_uri (str): + Output only. The URL of Machine Learning user + management the Autonomous Database. + mongo_db_uri (str): + Output only. The URL of the MongoDB API for + the Autonomous Database. + ords_uri (str): + Output only. The Oracle REST Data Services + (ORDS) URL of the Web Access for the Autonomous + Database. + sql_dev_web_uri (str): + Output only. The URL of the Oracle SQL + Developer Web for the Autonomous Database. + """ + + apex_uri: str = proto.Field( + proto.STRING, + number=1, + ) + database_transforms_uri: str = proto.Field( + proto.STRING, + number=2, + ) + graph_studio_uri: str = proto.Field( + proto.STRING, + number=3, + ) + machine_learning_notebook_uri: str = proto.Field( + proto.STRING, + number=4, + ) + machine_learning_user_management_uri: str = proto.Field( + proto.STRING, + number=5, + ) + mongo_db_uri: str = proto.Field( + proto.STRING, + number=6, + ) + ords_uri: str = proto.Field( + proto.STRING, + number=7, + ) + sql_dev_web_uri: str = proto.Field( + proto.STRING, + number=8, + ) + + +class AutonomousDatabaseStandbySummary(proto.Message): + r"""Autonomous Data Guard standby database details. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/datatypes/AutonomousDatabaseStandbySummary + + Attributes: + lag_time_duration (google.protobuf.duration_pb2.Duration): + Output only. The amount of time, in seconds, + that the data of the standby database lags in + comparison to the data of the primary database. + lifecycle_details (str): + Output only. The additional details about the + current lifecycle state of the Autonomous + Database. + state (google.cloud.oracledatabase_v1.types.State): + Output only. The current lifecycle state of + the Autonomous Database. + data_guard_role_changed_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The date and time the Autonomous + Data Guard role was switched for the standby + Autonomous Database. + disaster_recovery_role_changed_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The date and time the Disaster + Recovery role was switched for the standby + Autonomous Database. + """ + + lag_time_duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=1, + message=duration_pb2.Duration, + ) + lifecycle_details: str = proto.Field( + proto.STRING, + number=2, + ) + state: "State" = proto.Field( + proto.ENUM, + number=3, + enum="State", + ) + data_guard_role_changed_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + disaster_recovery_role_changed_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + + +class ScheduledOperationDetails(proto.Message): + r"""Details of scheduled operation. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/datatypes/ScheduledOperationDetails + + Attributes: + day_of_week (google.type.dayofweek_pb2.DayOfWeek): + Output only. Day of week. + start_time (google.type.timeofday_pb2.TimeOfDay): + Output only. Auto start time. + stop_time (google.type.timeofday_pb2.TimeOfDay): + Output only. Auto stop time. + """ + + day_of_week: dayofweek_pb2.DayOfWeek = proto.Field( + proto.ENUM, + number=1, + enum=dayofweek_pb2.DayOfWeek, + ) + start_time: timeofday_pb2.TimeOfDay = proto.Field( + proto.MESSAGE, + number=4, + message=timeofday_pb2.TimeOfDay, + ) + stop_time: timeofday_pb2.TimeOfDay = proto.Field( + proto.MESSAGE, + number=5, + message=timeofday_pb2.TimeOfDay, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/autonomous_database_character_set.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/autonomous_database_character_set.py new file mode 100644 index 000000000000..dd6bfd509fce --- /dev/null +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/autonomous_database_character_set.py @@ -0,0 +1,78 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.oracledatabase.v1", + manifest={ + "AutonomousDatabaseCharacterSet", + }, +) + + +class AutonomousDatabaseCharacterSet(proto.Message): + r"""Details of the Autonomous Database character set resource. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/AutonomousDatabaseCharacterSets/ + + Attributes: + name (str): + Identifier. The name of the Autonomous Database Character + Set resource in the following format: + projects/{project}/locations/{region}/autonomousDatabaseCharacterSets/{autonomous_database_character_set} + character_set_type (google.cloud.oracledatabase_v1.types.AutonomousDatabaseCharacterSet.CharacterSetType): + Output only. The character set type for the + Autonomous Database. + character_set (str): + Output only. The character set name for the + Autonomous Database which is the ID in the + resource name. + """ + + class CharacterSetType(proto.Enum): + r"""The type of character set an Autonomous Database can have. + + Values: + CHARACTER_SET_TYPE_UNSPECIFIED (0): + Character set type is not specified. + DATABASE (1): + Character set type is set to database. + NATIONAL (2): + Character set type is set to national. + """ + CHARACTER_SET_TYPE_UNSPECIFIED = 0 + DATABASE = 1 + NATIONAL = 2 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + character_set_type: CharacterSetType = proto.Field( + proto.ENUM, + number=2, + enum=CharacterSetType, + ) + character_set: str = proto.Field( + proto.STRING, + number=3, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/autonomous_db_backup.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/autonomous_db_backup.py new file mode 100644 index 000000000000..1f15eb50e02a --- /dev/null +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/autonomous_db_backup.py @@ -0,0 +1,289 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.oracledatabase.v1", + manifest={ + "AutonomousDatabaseBackup", + "AutonomousDatabaseBackupProperties", + }, +) + + +class AutonomousDatabaseBackup(proto.Message): + r"""Details of the Autonomous Database Backup resource. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/AutonomousDatabaseBackup/ + + Attributes: + name (str): + Identifier. The name of the Autonomous Database Backup + resource with the format: + projects/{project}/locations/{region}/autonomousDatabaseBackups/{autonomous_database_backup} + autonomous_database (str): + Required. The name of the Autonomous Database resource for + which the backup is being created. Format: + projects/{project}/locations/{region}/autonomousDatabases/{autonomous_database} + display_name (str): + Optional. User friendly name for the Backup. + The name does not have to be unique. + properties (google.cloud.oracledatabase_v1.types.AutonomousDatabaseBackupProperties): + Optional. Various properties of the backup. + labels (MutableMapping[str, str]): + Optional. labels or tags associated with the + resource. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + autonomous_database: str = proto.Field( + proto.STRING, + number=2, + ) + display_name: str = proto.Field( + proto.STRING, + number=3, + ) + properties: "AutonomousDatabaseBackupProperties" = proto.Field( + proto.MESSAGE, + number=4, + message="AutonomousDatabaseBackupProperties", + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=5, + ) + + +class AutonomousDatabaseBackupProperties(proto.Message): + r"""Properties of the Autonomous Database Backup resource. + + Attributes: + ocid (str): + Output only. OCID of the Autonomous Database + backup. + https://docs.oracle.com/en-us/iaas/Content/General/Concepts/identifiers.htm#Oracle + retention_period_days (int): + Optional. Retention period in days for the + backup. + compartment_id (str): + Output only. The OCID of the compartment. + database_size_tb (float): + Output only. The quantity of data in the + database, in terabytes. + db_version (str): + Output only. A valid Oracle Database version + for Autonomous Database. + is_long_term_backup (bool): + Output only. Indicates if the backup is long + term backup. + is_automatic_backup (bool): + Output only. Indicates if the backup is + automatic or user initiated. + is_restorable (bool): + Output only. Indicates if the backup can be + used to restore the Autonomous Database. + key_store_id (str): + Optional. The OCID of the key store of Oracle + Vault. + key_store_wallet (str): + Optional. The wallet name for Oracle Key + Vault. + kms_key_id (str): + Optional. The OCID of the key container that + is used as the master encryption key in database + transparent data encryption (TDE) operations. + kms_key_version_id (str): + Optional. The OCID of the key container + version that is used in database transparent + data encryption (TDE) operations KMS Key can + have multiple key versions. If none is + specified, the current key version (latest) of + the Key Id is used for the operation. Autonomous + Database Serverless does not use key versions, + hence is not applicable for Autonomous Database + Serverless instances. + lifecycle_details (str): + Output only. Additional information about the + current lifecycle state. + lifecycle_state (google.cloud.oracledatabase_v1.types.AutonomousDatabaseBackupProperties.State): + Output only. The lifecycle state of the + backup. + size_tb (float): + Output only. The backup size in terabytes. + available_till_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Timestamp until when the backup + will be available. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The date and time the backup + completed. + start_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The date and time the backup + started. + type_ (google.cloud.oracledatabase_v1.types.AutonomousDatabaseBackupProperties.Type): + Output only. The type of the backup. + vault_id (str): + Optional. The OCID of the vault. + """ + + class State(proto.Enum): + r"""// The various lifecycle states of the Autonomous Database + Backup. + + Values: + STATE_UNSPECIFIED (0): + Default unspecified value. + CREATING (1): + Indicates that the resource is in creating + state. + ACTIVE (2): + Indicates that the resource is in active + state. + DELETING (3): + Indicates that the resource is in deleting + state. + DELETED (4): + Indicates that the resource is in deleted + state. + FAILED (6): + Indicates that the resource is in failed + state. + UPDATING (7): + Indicates that the resource is in updating + state. + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + ACTIVE = 2 + DELETING = 3 + DELETED = 4 + FAILED = 6 + UPDATING = 7 + + class Type(proto.Enum): + r"""The type of the backup. + + Values: + TYPE_UNSPECIFIED (0): + Default unspecified value. + INCREMENTAL (1): + Incremental backups. + FULL (2): + Full backups. + LONG_TERM (3): + Long term backups. + """ + TYPE_UNSPECIFIED = 0 + INCREMENTAL = 1 + FULL = 2 + LONG_TERM = 3 + + ocid: str = proto.Field( + proto.STRING, + number=1, + ) + retention_period_days: int = proto.Field( + proto.INT32, + number=2, + ) + compartment_id: str = proto.Field( + proto.STRING, + number=3, + ) + database_size_tb: float = proto.Field( + proto.FLOAT, + number=4, + ) + db_version: str = proto.Field( + proto.STRING, + number=5, + ) + is_long_term_backup: bool = proto.Field( + proto.BOOL, + number=6, + ) + is_automatic_backup: bool = proto.Field( + proto.BOOL, + number=7, + ) + is_restorable: bool = proto.Field( + proto.BOOL, + number=8, + ) + key_store_id: str = proto.Field( + proto.STRING, + number=9, + ) + key_store_wallet: str = proto.Field( + proto.STRING, + number=10, + ) + kms_key_id: str = proto.Field( + proto.STRING, + number=11, + ) + kms_key_version_id: str = proto.Field( + proto.STRING, + number=12, + ) + lifecycle_details: str = proto.Field( + proto.STRING, + number=13, + ) + lifecycle_state: State = proto.Field( + proto.ENUM, + number=14, + enum=State, + ) + size_tb: float = proto.Field( + proto.FLOAT, + number=15, + ) + available_till_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=16, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=17, + message=timestamp_pb2.Timestamp, + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=18, + message=timestamp_pb2.Timestamp, + ) + type_: Type = proto.Field( + proto.ENUM, + number=19, + enum=Type, + ) + vault_id: str = proto.Field( + proto.STRING, + number=20, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/autonomous_db_version.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/autonomous_db_version.py new file mode 100644 index 000000000000..05189694df98 --- /dev/null +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/autonomous_db_version.py @@ -0,0 +1,71 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.oracledatabase_v1.types import autonomous_database + +__protobuf__ = proto.module( + package="google.cloud.oracledatabase.v1", + manifest={ + "AutonomousDbVersion", + }, +) + + +class AutonomousDbVersion(proto.Message): + r"""Details of the Autonomous Database version. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/AutonomousDbVersionSummary/ + + Attributes: + name (str): + Identifier. The name of the Autonomous Database Version + resource with the format: + projects/{project}/locations/{region}/autonomousDbVersions/{autonomous_db_version} + version (str): + Output only. An Oracle Database version for + Autonomous Database. + db_workload (google.cloud.oracledatabase_v1.types.DBWorkload): + Output only. The Autonomous Database workload + type. + workload_uri (str): + Output only. A URL that points to a detailed + description of the Autonomous Database version. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + version: str = proto.Field( + proto.STRING, + number=2, + ) + db_workload: autonomous_database.DBWorkload = proto.Field( + proto.ENUM, + number=4, + enum=autonomous_database.DBWorkload, + ) + workload_uri: str = proto.Field( + proto.STRING, + number=5, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/common.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/common.py new file mode 100644 index 000000000000..2357b454221c --- /dev/null +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/common.py @@ -0,0 +1,47 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.oracledatabase.v1", + manifest={ + "CustomerContact", + }, +) + + +class CustomerContact(proto.Message): + r"""The CustomerContact reference as defined by Oracle. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/datatypes/CustomerContact + + Attributes: + email (str): + Required. The email address used by Oracle to + send notifications regarding databases and + infrastructure. + """ + + email: str = proto.Field( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/db_node.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/db_node.py new file mode 100644 index 000000000000..4f0a7175908f --- /dev/null +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/db_node.py @@ -0,0 +1,158 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.oracledatabase.v1", + manifest={ + "DbNode", + "DbNodeProperties", + }, +) + + +class DbNode(proto.Message): + r"""Details of the database node resource. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/DbNode/ + + Attributes: + name (str): + Identifier. The name of the database node resource in the + following format: + projects/{project}/locations/{location}/cloudVmClusters/{cloud_vm_cluster}/dbNodes/{db_node} + properties (google.cloud.oracledatabase_v1.types.DbNodeProperties): + Optional. Various properties of the database + node. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + properties: "DbNodeProperties" = proto.Field( + proto.MESSAGE, + number=3, + message="DbNodeProperties", + ) + + +class DbNodeProperties(proto.Message): + r"""Various properties and settings associated with Db node. + + Attributes: + ocid (str): + Output only. OCID of database node. + ocpu_count (int): + Optional. OCPU count per database node. + memory_size_gb (int): + Memory allocated in GBs. + db_node_storage_size_gb (int): + Optional. Local storage per database node. + db_server_ocid (str): + Optional. Database server OCID. + hostname (str): + Optional. DNS + state (google.cloud.oracledatabase_v1.types.DbNodeProperties.State): + Output only. State of the database node. + total_cpu_core_count (int): + Total CPU core count of the database node. + """ + + class State(proto.Enum): + r"""The various lifecycle states of the database node. + + Values: + STATE_UNSPECIFIED (0): + Default unspecified value. + PROVISIONING (1): + Indicates that the resource is in + provisioning state. + AVAILABLE (2): + Indicates that the resource is in available + state. + UPDATING (3): + Indicates that the resource is in updating + state. + STOPPING (4): + Indicates that the resource is in stopping + state. + STOPPED (5): + Indicates that the resource is in stopped + state. + STARTING (6): + Indicates that the resource is in starting + state. + TERMINATING (7): + Indicates that the resource is in terminating + state. + TERMINATED (8): + Indicates that the resource is in terminated + state. + FAILED (9): + Indicates that the resource is in failed + state. + """ + STATE_UNSPECIFIED = 0 + PROVISIONING = 1 + AVAILABLE = 2 + UPDATING = 3 + STOPPING = 4 + STOPPED = 5 + STARTING = 6 + TERMINATING = 7 + TERMINATED = 8 + FAILED = 9 + + ocid: str = proto.Field( + proto.STRING, + number=1, + ) + ocpu_count: int = proto.Field( + proto.INT32, + number=2, + ) + memory_size_gb: int = proto.Field( + proto.INT32, + number=3, + ) + db_node_storage_size_gb: int = proto.Field( + proto.INT32, + number=4, + ) + db_server_ocid: str = proto.Field( + proto.STRING, + number=5, + ) + hostname: str = proto.Field( + proto.STRING, + number=8, + ) + state: State = proto.Field( + proto.ENUM, + number=9, + enum=State, + ) + total_cpu_core_count: int = proto.Field( + proto.INT32, + number=10, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/db_server.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/db_server.py new file mode 100644 index 000000000000..ac60975560d4 --- /dev/null +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/db_server.py @@ -0,0 +1,163 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.oracledatabase.v1", + manifest={ + "DbServer", + "DbServerProperties", + }, +) + + +class DbServer(proto.Message): + r"""Details of the database server resource. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/DbServer/ + + Attributes: + name (str): + Identifier. The name of the database server resource with + the format: + projects/{project}/locations/{location}/cloudExadataInfrastructures/{cloud_exadata_infrastructure}/dbServers/{db_server} + display_name (str): + Optional. User friendly name for this + resource. + properties (google.cloud.oracledatabase_v1.types.DbServerProperties): + Optional. Various properties of the database + server. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + properties: "DbServerProperties" = proto.Field( + proto.MESSAGE, + number=3, + message="DbServerProperties", + ) + + +class DbServerProperties(proto.Message): + r"""Various properties and settings associated with Exadata + database server. + + Attributes: + ocid (str): + Output only. OCID of database server. + ocpu_count (int): + Optional. OCPU count per database. + max_ocpu_count (int): + Optional. Maximum OCPU count per database. + memory_size_gb (int): + Optional. Memory allocated in GBs. + max_memory_size_gb (int): + Optional. Maximum memory allocated in GBs. + db_node_storage_size_gb (int): + Optional. Local storage per VM. + max_db_node_storage_size_gb (int): + Optional. Maximum local storage per VM. + vm_count (int): + Optional. Vm count per database. + state (google.cloud.oracledatabase_v1.types.DbServerProperties.State): + Output only. State of the database server. + db_node_ids (MutableSequence[str]): + Output only. OCID of database nodes + associated with the database server. + """ + + class State(proto.Enum): + r"""The various lifecycle states of the database server. + + Values: + STATE_UNSPECIFIED (0): + Default unspecified value. + CREATING (1): + Indicates that the resource is in creating + state. + AVAILABLE (2): + Indicates that the resource is in available + state. + UNAVAILABLE (3): + Indicates that the resource is in unavailable + state. + DELETING (4): + Indicates that the resource is in deleting + state. + DELETED (5): + Indicates that the resource is in deleted + state. + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + AVAILABLE = 2 + UNAVAILABLE = 3 + DELETING = 4 + DELETED = 5 + + ocid: str = proto.Field( + proto.STRING, + number=1, + ) + ocpu_count: int = proto.Field( + proto.INT32, + number=2, + ) + max_ocpu_count: int = proto.Field( + proto.INT32, + number=3, + ) + memory_size_gb: int = proto.Field( + proto.INT32, + number=4, + ) + max_memory_size_gb: int = proto.Field( + proto.INT32, + number=5, + ) + db_node_storage_size_gb: int = proto.Field( + proto.INT32, + number=6, + ) + max_db_node_storage_size_gb: int = proto.Field( + proto.INT32, + number=7, + ) + vm_count: int = proto.Field( + proto.INT32, + number=8, + ) + state: State = proto.Field( + proto.ENUM, + number=9, + enum=State, + ) + db_node_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=10, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/db_system_shape.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/db_system_shape.py new file mode 100644 index 000000000000..7429af46b6cc --- /dev/null +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/db_system_shape.py @@ -0,0 +1,117 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.oracledatabase.v1", + manifest={ + "DbSystemShape", + }, +) + + +class DbSystemShape(proto.Message): + r"""Details of the Database System Shapes resource. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/DbSystemShapeSummary/ + + Attributes: + name (str): + Identifier. The name of the Database System Shape resource + with the format: + projects/{project}/locations/{region}/dbSystemShapes/{db_system_shape} + shape (str): + Optional. shape + min_node_count (int): + Optional. Minimum number of database servers. + max_node_count (int): + Optional. Maximum number of database servers. + min_storage_count (int): + Optional. Minimum number of storage servers. + max_storage_count (int): + Optional. Maximum number of storage servers. + available_core_count_per_node (int): + Optional. Number of cores per node. + available_memory_per_node_gb (int): + Optional. Memory per database server node in + gigabytes. + available_data_storage_tb (int): + Optional. Storage per storage server in + terabytes. + min_core_count_per_node (int): + Optional. Minimum core count per node. + min_memory_per_node_gb (int): + Optional. Minimum memory per node in + gigabytes. + min_db_node_storage_per_node_gb (int): + Optional. Minimum node storage per database + server in gigabytes. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + shape: str = proto.Field( + proto.STRING, + number=2, + ) + min_node_count: int = proto.Field( + proto.INT32, + number=3, + ) + max_node_count: int = proto.Field( + proto.INT32, + number=4, + ) + min_storage_count: int = proto.Field( + proto.INT32, + number=5, + ) + max_storage_count: int = proto.Field( + proto.INT32, + number=6, + ) + available_core_count_per_node: int = proto.Field( + proto.INT32, + number=7, + ) + available_memory_per_node_gb: int = proto.Field( + proto.INT32, + number=8, + ) + available_data_storage_tb: int = proto.Field( + proto.INT32, + number=9, + ) + min_core_count_per_node: int = proto.Field( + proto.INT32, + number=10, + ) + min_memory_per_node_gb: int = proto.Field( + proto.INT32, + number=11, + ) + min_db_node_storage_per_node_gb: int = proto.Field( + proto.INT32, + number=12, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/entitlement.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/entitlement.py new file mode 100644 index 000000000000..01b82a412c0b --- /dev/null +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/entitlement.py @@ -0,0 +1,127 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.oracledatabase.v1", + manifest={ + "Entitlement", + "CloudAccountDetails", + }, +) + + +class Entitlement(proto.Message): + r"""Details of the Entitlement resource. + + Attributes: + name (str): + Identifier. The name of the Entitlement + resource with the format: + projects/{project}/locations/{region}/entitlements/{entitlement} + cloud_account_details (google.cloud.oracledatabase_v1.types.CloudAccountDetails): + Details of the OCI Cloud Account. + entitlement_id (str): + Output only. Google Cloud Marketplace order + ID (aka entitlement ID) + state (google.cloud.oracledatabase_v1.types.Entitlement.State): + Output only. Entitlement State. + """ + + class State(proto.Enum): + r"""The various lifecycle states of the subscription. + + Values: + STATE_UNSPECIFIED (0): + Default unspecified value. + ACCOUNT_NOT_LINKED (1): + Account not linked. + ACCOUNT_NOT_ACTIVE (2): + Account is linked but not active. + ACTIVE (3): + Entitlement and Account are active. + """ + STATE_UNSPECIFIED = 0 + ACCOUNT_NOT_LINKED = 1 + ACCOUNT_NOT_ACTIVE = 2 + ACTIVE = 3 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + cloud_account_details: "CloudAccountDetails" = proto.Field( + proto.MESSAGE, + number=2, + message="CloudAccountDetails", + ) + entitlement_id: str = proto.Field( + proto.STRING, + number=3, + ) + state: State = proto.Field( + proto.ENUM, + number=4, + enum=State, + ) + + +class CloudAccountDetails(proto.Message): + r"""Details of the OCI Cloud Account. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + cloud_account (str): + Output only. OCI account name. + cloud_account_home_region (str): + Output only. OCI account home region. + link_existing_account_uri (str): + Output only. URL to link an existing account. + + This field is a member of `oneof`_ ``_link_existing_account_uri``. + account_creation_uri (str): + Output only. URL to create a new account and + link. + + This field is a member of `oneof`_ ``_account_creation_uri``. + """ + + cloud_account: str = proto.Field( + proto.STRING, + number=1, + ) + cloud_account_home_region: str = proto.Field( + proto.STRING, + number=2, + ) + link_existing_account_uri: str = proto.Field( + proto.STRING, + number=3, + optional=True, + ) + account_creation_uri: str = proto.Field( + proto.STRING, + number=4, + optional=True, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/exadata_infra.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/exadata_infra.py new file mode 100644 index 000000000000..b023bb9f1f34 --- /dev/null +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/exadata_infra.py @@ -0,0 +1,468 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +from google.type import month_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.oracledatabase_v1.types import common + +__protobuf__ = proto.module( + package="google.cloud.oracledatabase.v1", + manifest={ + "CloudExadataInfrastructure", + "CloudExadataInfrastructureProperties", + "MaintenanceWindow", + }, +) + + +class CloudExadataInfrastructure(proto.Message): + r"""Represents CloudExadataInfrastructure resource. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/CloudExadataInfrastructure/ + + Attributes: + name (str): + Identifier. The name of the Exadata Infrastructure resource + with the format: + projects/{project}/locations/{region}/cloudExadataInfrastructures/{cloud_exadata_infrastructure} + display_name (str): + Optional. User friendly name for this + resource. + gcp_oracle_zone (str): + Optional. Google Cloud Platform location + where Oracle Exadata is hosted. + entitlement_id (str): + Output only. Entitlement ID of the private + offer against which this infrastructure resource + is provisioned. + properties (google.cloud.oracledatabase_v1.types.CloudExadataInfrastructureProperties): + Optional. Various properties of the infra. + labels (MutableMapping[str, str]): + Optional. Labels or tags associated with the + resource. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The date and time that the + Exadata Infrastructure was created. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + gcp_oracle_zone: str = proto.Field( + proto.STRING, + number=8, + ) + entitlement_id: str = proto.Field( + proto.STRING, + number=4, + ) + properties: "CloudExadataInfrastructureProperties" = proto.Field( + proto.MESSAGE, + number=5, + message="CloudExadataInfrastructureProperties", + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=6, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + + +class CloudExadataInfrastructureProperties(proto.Message): + r"""Various properties of Exadata Infrastructure. + + Attributes: + ocid (str): + Output only. OCID of created infra. + https://docs.oracle.com/en-us/iaas/Content/General/Concepts/identifiers.htm#Oracle + compute_count (int): + Optional. The number of compute servers for + the Exadata Infrastructure. + storage_count (int): + Optional. The number of Cloud Exadata storage + servers for the Exadata Infrastructure. + total_storage_size_gb (int): + Optional. The total storage allocated to the + Exadata Infrastructure resource, in gigabytes + (GB). + available_storage_size_gb (int): + Output only. The available storage can be + allocated to the Exadata Infrastructure + resource, in gigabytes (GB). + maintenance_window (google.cloud.oracledatabase_v1.types.MaintenanceWindow): + Optional. Maintenance window for repair. + state (google.cloud.oracledatabase_v1.types.CloudExadataInfrastructureProperties.State): + Output only. The current lifecycle state of + the Exadata Infrastructure. + shape (str): + Required. The shape of the Exadata + Infrastructure. The shape determines the amount + of CPU, storage, and memory resources allocated + to the instance. + oci_url (str): + Output only. Deep link to the OCI console to + view this resource. + cpu_count (int): + Optional. The number of enabled CPU cores. + max_cpu_count (int): + Output only. The total number of CPU cores + available. + memory_size_gb (int): + Optional. The memory allocated in GBs. + max_memory_gb (int): + Output only. The total memory available in + GBs. + db_node_storage_size_gb (int): + Optional. The local node storage allocated in + GBs. + max_db_node_storage_size_gb (int): + Output only. The total local node storage + available in GBs. + data_storage_size_tb (float): + Output only. Size, in terabytes, of the DATA + disk group. + max_data_storage_tb (float): + Output only. The total available DATA disk + group size. + activated_storage_count (int): + Output only. The requested number of + additional storage servers activated for the + Exadata Infrastructure. + additional_storage_count (int): + Output only. The requested number of + additional storage servers for the Exadata + Infrastructure. + db_server_version (str): + Output only. The software version of the + database servers (dom0) in the Exadata + Infrastructure. + storage_server_version (str): + Output only. The software version of the + storage servers (cells) in the Exadata + Infrastructure. + next_maintenance_run_id (str): + Output only. The OCID of the next maintenance + run. + next_maintenance_run_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the next + maintenance run will occur. + next_security_maintenance_run_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the next security + maintenance run will occur. + customer_contacts (MutableSequence[google.cloud.oracledatabase_v1.types.CustomerContact]): + Optional. The list of customer contacts. + monthly_storage_server_version (str): + Output only. The monthly software version of + the storage servers (cells) in the Exadata + Infrastructure. Example: 20.1.15 + monthly_db_server_version (str): + Output only. The monthly software version of + the database servers (dom0) in the Exadata + Infrastructure. Example: 20.1.15 + """ + + class State(proto.Enum): + r"""The various lifecycle states of the Exadata Infrastructure. + + Values: + STATE_UNSPECIFIED (0): + Default unspecified value. + PROVISIONING (1): + The Exadata Infrastructure is being + provisioned. + AVAILABLE (2): + The Exadata Infrastructure is available for + use. + UPDATING (3): + The Exadata Infrastructure is being updated. + TERMINATING (4): + The Exadata Infrastructure is being + terminated. + TERMINATED (5): + The Exadata Infrastructure is terminated. + FAILED (6): + The Exadata Infrastructure is in failed + state. + MAINTENANCE_IN_PROGRESS (7): + The Exadata Infrastructure is in maintenance. + """ + STATE_UNSPECIFIED = 0 + PROVISIONING = 1 + AVAILABLE = 2 + UPDATING = 3 + TERMINATING = 4 + TERMINATED = 5 + FAILED = 6 + MAINTENANCE_IN_PROGRESS = 7 + + ocid: str = proto.Field( + proto.STRING, + number=1, + ) + compute_count: int = proto.Field( + proto.INT32, + number=2, + ) + storage_count: int = proto.Field( + proto.INT32, + number=3, + ) + total_storage_size_gb: int = proto.Field( + proto.INT32, + number=4, + ) + available_storage_size_gb: int = proto.Field( + proto.INT32, + number=5, + ) + maintenance_window: "MaintenanceWindow" = proto.Field( + proto.MESSAGE, + number=6, + message="MaintenanceWindow", + ) + state: State = proto.Field( + proto.ENUM, + number=7, + enum=State, + ) + shape: str = proto.Field( + proto.STRING, + number=8, + ) + oci_url: str = proto.Field( + proto.STRING, + number=9, + ) + cpu_count: int = proto.Field( + proto.INT32, + number=10, + ) + max_cpu_count: int = proto.Field( + proto.INT32, + number=11, + ) + memory_size_gb: int = proto.Field( + proto.INT32, + number=12, + ) + max_memory_gb: int = proto.Field( + proto.INT32, + number=13, + ) + db_node_storage_size_gb: int = proto.Field( + proto.INT32, + number=14, + ) + max_db_node_storage_size_gb: int = proto.Field( + proto.INT32, + number=15, + ) + data_storage_size_tb: float = proto.Field( + proto.DOUBLE, + number=16, + ) + max_data_storage_tb: float = proto.Field( + proto.DOUBLE, + number=17, + ) + activated_storage_count: int = proto.Field( + proto.INT32, + number=18, + ) + additional_storage_count: int = proto.Field( + proto.INT32, + number=19, + ) + db_server_version: str = proto.Field( + proto.STRING, + number=20, + ) + storage_server_version: str = proto.Field( + proto.STRING, + number=21, + ) + next_maintenance_run_id: str = proto.Field( + proto.STRING, + number=22, + ) + next_maintenance_run_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=23, + message=timestamp_pb2.Timestamp, + ) + next_security_maintenance_run_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=24, + message=timestamp_pb2.Timestamp, + ) + customer_contacts: MutableSequence[common.CustomerContact] = proto.RepeatedField( + proto.MESSAGE, + number=25, + message=common.CustomerContact, + ) + monthly_storage_server_version: str = proto.Field( + proto.STRING, + number=26, + ) + monthly_db_server_version: str = proto.Field( + proto.STRING, + number=27, + ) + + +class MaintenanceWindow(proto.Message): + r"""Maintenance window as defined by Oracle. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/datatypes/MaintenanceWindow + + Attributes: + preference (google.cloud.oracledatabase_v1.types.MaintenanceWindow.MaintenanceWindowPreference): + Optional. The maintenance window scheduling + preference. + months (MutableSequence[google.type.month_pb2.Month]): + Optional. Months during the year when + maintenance should be performed. + weeks_of_month (MutableSequence[int]): + Optional. Weeks during the month when + maintenance should be performed. Weeks start on + the 1st, 8th, 15th, and 22nd days of the month, + and have a duration of 7 days. Weeks start and + end based on calendar dates, not days of the + week. + days_of_week (MutableSequence[google.type.dayofweek_pb2.DayOfWeek]): + Optional. Days during the week when + maintenance should be performed. + hours_of_day (MutableSequence[int]): + Optional. The window of hours during the day + when maintenance should be performed. The window + is a 4 hour slot. Valid values are: + 0 - represents time slot 0:00 - 3:59 UTC + 4 - represents time slot 4:00 - 7:59 UTC + 8 - represents time slot 8:00 - 11:59 UTC + 12 - represents time slot 12:00 - 15:59 UTC + 16 - represents time slot 16:00 - 19:59 UTC + 20 - represents time slot 20:00 - 23:59 UTC + lead_time_week (int): + Optional. Lead time window allows user to set + a lead time to prepare for a down time. The lead + time is in weeks and valid value is between 1 to + 4. + patching_mode (google.cloud.oracledatabase_v1.types.MaintenanceWindow.PatchingMode): + Optional. Cloud CloudExadataInfrastructure + node patching method, either "ROLLING" + or "NONROLLING". Default value is ROLLING. + custom_action_timeout_mins (int): + Optional. Determines the amount of time the + system will wait before the start of each + database server patching operation. Custom + action timeout is in minutes and valid value is + between 15 to 120 (inclusive). + is_custom_action_timeout_enabled (bool): + Optional. If true, enables the configuration + of a custom action timeout (waiting period) + between database server patching operations. + """ + + class MaintenanceWindowPreference(proto.Enum): + r"""Maintenance window preference. + + Values: + MAINTENANCE_WINDOW_PREFERENCE_UNSPECIFIED (0): + Default unspecified value. + CUSTOM_PREFERENCE (1): + Custom preference. + NO_PREFERENCE (2): + No preference. + """ + MAINTENANCE_WINDOW_PREFERENCE_UNSPECIFIED = 0 + CUSTOM_PREFERENCE = 1 + NO_PREFERENCE = 2 + + class PatchingMode(proto.Enum): + r"""Patching mode. + + Values: + PATCHING_MODE_UNSPECIFIED (0): + Default unspecified value. + ROLLING (1): + Updates the Cloud Exadata database server + hosts in a rolling fashion. + NON_ROLLING (2): + The non-rolling maintenance method first + updates your storage servers at the same time, + then your database servers at the same time. + """ + PATCHING_MODE_UNSPECIFIED = 0 + ROLLING = 1 + NON_ROLLING = 2 + + preference: MaintenanceWindowPreference = proto.Field( + proto.ENUM, + number=1, + enum=MaintenanceWindowPreference, + ) + months: MutableSequence[month_pb2.Month] = proto.RepeatedField( + proto.ENUM, + number=2, + enum=month_pb2.Month, + ) + weeks_of_month: MutableSequence[int] = proto.RepeatedField( + proto.INT32, + number=3, + ) + days_of_week: MutableSequence[dayofweek_pb2.DayOfWeek] = proto.RepeatedField( + proto.ENUM, + number=4, + enum=dayofweek_pb2.DayOfWeek, + ) + hours_of_day: MutableSequence[int] = proto.RepeatedField( + proto.INT32, + number=5, + ) + lead_time_week: int = proto.Field( + proto.INT32, + number=6, + ) + patching_mode: PatchingMode = proto.Field( + proto.ENUM, + number=7, + enum=PatchingMode, + ) + custom_action_timeout_mins: int = proto.Field( + proto.INT32, + number=8, + ) + is_custom_action_timeout_enabled: bool = proto.Field( + proto.BOOL, + number=9, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/gi_version.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/gi_version.py new file mode 100644 index 000000000000..1ecf83198d06 --- /dev/null +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/gi_version.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.oracledatabase.v1", + manifest={ + "GiVersion", + }, +) + + +class GiVersion(proto.Message): + r"""Details of the Oracle Grid Infrastructure (GI) version + resource. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/GiVersionSummary/ + + Attributes: + name (str): + Identifier. The name of the Oracle Grid Infrastructure (GI) + version resource with the format: + projects/{project}/locations/{region}/giVersions/{gi_versions} + version (str): + Optional. version + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + version: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/location_metadata.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/location_metadata.py new file mode 100644 index 000000000000..f81798592e71 --- /dev/null +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/location_metadata.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.oracledatabase.v1", + manifest={ + "LocationMetadata", + }, +) + + +class LocationMetadata(proto.Message): + r"""Metadata for a given [Location][google.cloud.location.Location]. + + Attributes: + gcp_oracle_zones (MutableSequence[str]): + Output only. Google Cloud Platform Oracle + zones in a location. + """ + + gcp_oracle_zones: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/oracledatabase.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/oracledatabase.py new file mode 100644 index 000000000000..796dbe2203f9 --- /dev/null +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/oracledatabase.py @@ -0,0 +1,1244 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.oracledatabase_v1.types import ( + autonomous_database_character_set, + autonomous_db_backup, + autonomous_db_version, + db_node, + db_server, + db_system_shape, + entitlement, + exadata_infra, + gi_version, + vm_cluster, +) +from google.cloud.oracledatabase_v1.types import ( + autonomous_database as gco_autonomous_database, +) + +__protobuf__ = proto.module( + package="google.cloud.oracledatabase.v1", + manifest={ + "ListCloudExadataInfrastructuresRequest", + "ListCloudExadataInfrastructuresResponse", + "GetCloudExadataInfrastructureRequest", + "CreateCloudExadataInfrastructureRequest", + "DeleteCloudExadataInfrastructureRequest", + "ListCloudVmClustersRequest", + "ListCloudVmClustersResponse", + "GetCloudVmClusterRequest", + "CreateCloudVmClusterRequest", + "DeleteCloudVmClusterRequest", + "ListEntitlementsRequest", + "ListEntitlementsResponse", + "ListDbServersRequest", + "ListDbServersResponse", + "ListDbNodesRequest", + "ListDbNodesResponse", + "ListGiVersionsRequest", + "ListGiVersionsResponse", + "ListDbSystemShapesRequest", + "ListDbSystemShapesResponse", + "OperationMetadata", + "ListAutonomousDatabasesRequest", + "ListAutonomousDatabasesResponse", + "GetAutonomousDatabaseRequest", + "CreateAutonomousDatabaseRequest", + "DeleteAutonomousDatabaseRequest", + "RestoreAutonomousDatabaseRequest", + "GenerateAutonomousDatabaseWalletRequest", + "GenerateAutonomousDatabaseWalletResponse", + "ListAutonomousDbVersionsRequest", + "ListAutonomousDbVersionsResponse", + "ListAutonomousDatabaseCharacterSetsRequest", + "ListAutonomousDatabaseCharacterSetsResponse", + "ListAutonomousDatabaseBackupsRequest", + "ListAutonomousDatabaseBackupsResponse", + }, +) + + +class ListCloudExadataInfrastructuresRequest(proto.Message): + r"""The request for ``CloudExadataInfrastructures.List``. + + Attributes: + parent (str): + Required. The parent value for + CloudExadataInfrastructure in the following + format: projects/{project}/locations/{location}. + page_size (int): + Optional. The maximum number of items to + return. If unspecified, at most 50 Exadata + infrastructures will be returned. The maximum + value is 1000; values above 1000 will be coerced + to 1000. + page_token (str): + Optional. A token identifying a page of + results the server should return. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListCloudExadataInfrastructuresResponse(proto.Message): + r"""The response for ``CloudExadataInfrastructures.list``. + + Attributes: + cloud_exadata_infrastructures (MutableSequence[google.cloud.oracledatabase_v1.types.CloudExadataInfrastructure]): + The list of Exadata Infrastructures. + next_page_token (str): + A token for fetching next page of response. + """ + + @property + def raw_page(self): + return self + + cloud_exadata_infrastructures: MutableSequence[ + exadata_infra.CloudExadataInfrastructure + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=exadata_infra.CloudExadataInfrastructure, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetCloudExadataInfrastructureRequest(proto.Message): + r"""The request for ``CloudExadataInfrastructure.Get``. + + Attributes: + name (str): + Required. The name of the Cloud Exadata Infrastructure in + the following format: + projects/{project}/locations/{location}/cloudExadataInfrastructures/{cloud_exadata_infrastructure}. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateCloudExadataInfrastructureRequest(proto.Message): + r"""The request for ``CloudExadataInfrastructure.Create``. + + Attributes: + parent (str): + Required. The parent value for + CloudExadataInfrastructure in the following + format: projects/{project}/locations/{location}. + cloud_exadata_infrastructure_id (str): + Required. The ID of the Exadata Infrastructure to create. + This value is restricted to + (^`a-z <[a-z0-9-]{0,61}[a-z0-9]>`__?$) and must be a maximum + of 63 characters in length. The value must start with a + letter and end with a letter or a number. + cloud_exadata_infrastructure (google.cloud.oracledatabase_v1.types.CloudExadataInfrastructure): + Required. Details of the Exadata + Infrastructure instance to create. + request_id (str): + Optional. An optional ID to identify the + request. This value is used to identify + duplicate requests. If you make a request with + the same request ID and the original request is + still in progress or completed, the server + ignores the second request. This prevents + clients from accidentally creating duplicate + commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + cloud_exadata_infrastructure_id: str = proto.Field( + proto.STRING, + number=2, + ) + cloud_exadata_infrastructure: exadata_infra.CloudExadataInfrastructure = ( + proto.Field( + proto.MESSAGE, + number=3, + message=exadata_infra.CloudExadataInfrastructure, + ) + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class DeleteCloudExadataInfrastructureRequest(proto.Message): + r"""The request for ``CloudExadataInfrastructure.Delete``. + + Attributes: + name (str): + Required. The name of the Cloud Exadata Infrastructure in + the following format: + projects/{project}/locations/{location}/cloudExadataInfrastructures/{cloud_exadata_infrastructure}. + request_id (str): + Optional. An optional ID to identify the + request. This value is used to identify + duplicate requests. If you make a request with + the same request ID and the original request is + still in progress or completed, the server + ignores the second request. This prevents + clients from accidentally creating duplicate + commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + force (bool): + Optional. If set to true, all VM clusters for + this Exadata Infrastructure will be deleted. An + Exadata Infrastructure can only be deleted once + all its VM clusters have been deleted. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + force: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class ListCloudVmClustersRequest(proto.Message): + r"""The request for ``CloudVmCluster.List``. + + Attributes: + parent (str): + Required. The name of the parent in the + following format: + projects/{project}/locations/{location}. + page_size (int): + Optional. The number of VM clusters to + return. If unspecified, at most 50 VM clusters + will be returned. The maximum value is 1,000. + page_token (str): + Optional. A token identifying the page of + results the server returns. + filter (str): + Optional. An expression for filtering the + results of the request. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListCloudVmClustersResponse(proto.Message): + r"""The response for ``CloudVmCluster.List``. + + Attributes: + cloud_vm_clusters (MutableSequence[google.cloud.oracledatabase_v1.types.CloudVmCluster]): + The list of VM Clusters. + next_page_token (str): + A token to fetch the next page of results. + """ + + @property + def raw_page(self): + return self + + cloud_vm_clusters: MutableSequence[vm_cluster.CloudVmCluster] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=vm_cluster.CloudVmCluster, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetCloudVmClusterRequest(proto.Message): + r"""The request for ``CloudVmCluster.Get``. + + Attributes: + name (str): + Required. The name of the Cloud VM Cluster in the following + format: + projects/{project}/locations/{location}/cloudVmClusters/{cloud_vm_cluster}. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateCloudVmClusterRequest(proto.Message): + r"""The request for ``CloudVmCluster.Create``. + + Attributes: + parent (str): + Required. The name of the parent in the + following format: + projects/{project}/locations/{location}. + cloud_vm_cluster_id (str): + Required. The ID of the VM Cluster to create. This value is + restricted to (^`a-z <[a-z0-9-]{0,61}[a-z0-9]>`__?$) and + must be a maximum of 63 characters in length. The value must + start with a letter and end with a letter or a number. + cloud_vm_cluster (google.cloud.oracledatabase_v1.types.CloudVmCluster): + Required. The resource being created + request_id (str): + Optional. An optional ID to identify the + request. This value is used to identify + duplicate requests. If you make a request with + the same request ID and the original request is + still in progress or completed, the server + ignores the second request. This prevents + clients from accidentally creating duplicate + commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + cloud_vm_cluster_id: str = proto.Field( + proto.STRING, + number=2, + ) + cloud_vm_cluster: vm_cluster.CloudVmCluster = proto.Field( + proto.MESSAGE, + number=3, + message=vm_cluster.CloudVmCluster, + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class DeleteCloudVmClusterRequest(proto.Message): + r"""The request for ``CloudVmCluster.Delete``. + + Attributes: + name (str): + Required. The name of the Cloud VM Cluster in the following + format: + projects/{project}/locations/{location}/cloudVmClusters/{cloud_vm_cluster}. + request_id (str): + Optional. An optional ID to identify the + request. This value is used to identify + duplicate requests. If you make a request with + the same request ID and the original request is + still in progress or completed, the server + ignores the second request. This prevents + clients from accidentally creating duplicate + commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + force (bool): + Optional. If set to true, all child resources + for the VM Cluster will be deleted. A VM Cluster + can only be deleted once all its child resources + have been deleted. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + force: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class ListEntitlementsRequest(proto.Message): + r"""The request for ``Entitlement.List``. + + Attributes: + parent (str): + Required. The parent value for the + entitlement in the following format: + projects/{project}/locations/{location}. + page_size (int): + Optional. The maximum number of items to + return. If unspecified, a maximum of 50 + entitlements will be returned. The maximum value + is 1000. + page_token (str): + Optional. A token identifying a page of + results the server should return. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListEntitlementsResponse(proto.Message): + r"""The response for ``Entitlement.List``. + + Attributes: + entitlements (MutableSequence[google.cloud.oracledatabase_v1.types.Entitlement]): + The list of Entitlements + next_page_token (str): + A token identifying a page of results the + server should return. + """ + + @property + def raw_page(self): + return self + + entitlements: MutableSequence[entitlement.Entitlement] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=entitlement.Entitlement, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListDbServersRequest(proto.Message): + r"""The request for ``DbServer.List``. + + Attributes: + parent (str): + Required. The parent value for database + server in the following format: + projects/{project}/locations/{location}/cloudExadataInfrastructures/{cloudExadataInfrastructure}. + page_size (int): + Optional. The maximum number of items to + return. If unspecified, a maximum of 50 db + servers will be returned. The maximum value is + 1000; values above 1000 will be reset to 1000. + page_token (str): + Optional. A token identifying a page of + results the server should return. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListDbServersResponse(proto.Message): + r"""The response for ``DbServer.List``. + + Attributes: + db_servers (MutableSequence[google.cloud.oracledatabase_v1.types.DbServer]): + The list of database servers. + next_page_token (str): + A token identifying a page of results the + server should return. + """ + + @property + def raw_page(self): + return self + + db_servers: MutableSequence[db_server.DbServer] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=db_server.DbServer, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListDbNodesRequest(proto.Message): + r"""The request for ``DbNode.List``. + + Attributes: + parent (str): + Required. The parent value for database node + in the following format: + projects/{project}/locations/{location}/cloudVmClusters/{cloudVmCluster}. + page_size (int): + Optional. The maximum number of items to + return. If unspecified, at most 50 db nodes will + be returned. The maximum value is 1000; values + above 1000 will be coerced to 1000. + page_token (str): + Optional. A token identifying a page of + results the node should return. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListDbNodesResponse(proto.Message): + r"""The response for ``DbNode.List``. + + Attributes: + db_nodes (MutableSequence[google.cloud.oracledatabase_v1.types.DbNode]): + The list of DB Nodes + next_page_token (str): + A token identifying a page of results the + node should return. + """ + + @property + def raw_page(self): + return self + + db_nodes: MutableSequence[db_node.DbNode] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=db_node.DbNode, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListGiVersionsRequest(proto.Message): + r"""The request for ``GiVersion.List``. + + Attributes: + parent (str): + Required. The parent value for Grid + Infrastructure Version in the following format: + Format: projects/{project}/locations/{location}. + page_size (int): + Optional. The maximum number of items to + return. If unspecified, a maximum of 50 Oracle + Grid Infrastructure (GI) versions will be + returned. The maximum value is 1000; values + above 1000 will be reset to 1000. + page_token (str): + Optional. A token identifying a page of + results the server should return. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListGiVersionsResponse(proto.Message): + r"""The response for ``GiVersion.List``. + + Attributes: + gi_versions (MutableSequence[google.cloud.oracledatabase_v1.types.GiVersion]): + The list of Oracle Grid Infrastructure (GI) + versions. + next_page_token (str): + A token identifying a page of results the + server should return. + """ + + @property + def raw_page(self): + return self + + gi_versions: MutableSequence[gi_version.GiVersion] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gi_version.GiVersion, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListDbSystemShapesRequest(proto.Message): + r"""The request for ``DbSystemShape.List``. + + Attributes: + parent (str): + Required. The parent value for Database + System Shapes in the following format: + projects/{project}/locations/{location}. + page_size (int): + Optional. The maximum number of items to + return. If unspecified, at most 50 database + system shapes will be returned. The maximum + value is 1000; values above 1000 will be coerced + to 1000. + page_token (str): + Optional. A token identifying a page of + results the server should return. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListDbSystemShapesResponse(proto.Message): + r"""The response for ``DbSystemShape.List``. + + Attributes: + db_system_shapes (MutableSequence[google.cloud.oracledatabase_v1.types.DbSystemShape]): + The list of Database System shapes. + next_page_token (str): + A token identifying a page of results the + server should return. + """ + + @property + def raw_page(self): + return self + + db_system_shapes: MutableSequence[ + db_system_shape.DbSystemShape + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=db_system_shape.DbSystemShape, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class OperationMetadata(proto.Message): + r"""Represents the metadata of the long-running operation. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation was + created. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation finished + running. + target (str): + Output only. Server-defined resource path for + the target of the operation. + verb (str): + Output only. Name of the verb executed by the + operation. + status_message (str): + Output only. The status of the operation. + requested_cancellation (bool): + Output only. Identifies whether the user has requested + cancellation of the operation. Operations that have been + cancelled successfully have [Operation.error][] value with a + [google.rpc.Status.code][google.rpc.Status.code] of 1, + corresponding to ``Code.CANCELLED``. + api_version (str): + Output only. API version used to start the + operation. + percent_complete (float): + Output only. An estimated percentage of the + operation that has been completed at a given + moment of time, between 0 and 100. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + target: str = proto.Field( + proto.STRING, + number=3, + ) + verb: str = proto.Field( + proto.STRING, + number=4, + ) + status_message: str = proto.Field( + proto.STRING, + number=5, + ) + requested_cancellation: bool = proto.Field( + proto.BOOL, + number=6, + ) + api_version: str = proto.Field( + proto.STRING, + number=7, + ) + percent_complete: float = proto.Field( + proto.DOUBLE, + number=8, + ) + + +class ListAutonomousDatabasesRequest(proto.Message): + r"""The request for ``AutonomousDatabase.List``. + + Attributes: + parent (str): + Required. The parent value for the Autonomous + Database in the following format: + projects/{project}/locations/{location}. + page_size (int): + Optional. The maximum number of items to + return. If unspecified, at most 50 Autonomous + Database will be returned. The maximum value is + 1000; values above 1000 will be coerced to 1000. + page_token (str): + Optional. A token identifying a page of + results the server should return. + filter (str): + Optional. An expression for filtering the + results of the request. + order_by (str): + Optional. An expression for ordering the + results of the request. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListAutonomousDatabasesResponse(proto.Message): + r"""The response for ``AutonomousDatabase.List``. + + Attributes: + autonomous_databases (MutableSequence[google.cloud.oracledatabase_v1.types.AutonomousDatabase]): + The list of Autonomous Databases. + next_page_token (str): + A token identifying a page of results the + server should return. + """ + + @property + def raw_page(self): + return self + + autonomous_databases: MutableSequence[ + gco_autonomous_database.AutonomousDatabase + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gco_autonomous_database.AutonomousDatabase, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetAutonomousDatabaseRequest(proto.Message): + r"""The request for ``AutonomousDatabase.Get``. + + Attributes: + name (str): + Required. The name of the Autonomous Database in the + following format: + projects/{project}/locations/{location}/autonomousDatabases/{autonomous_database}. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateAutonomousDatabaseRequest(proto.Message): + r"""The request for ``AutonomousDatabase.Create``. + + Attributes: + parent (str): + Required. The name of the parent in the + following format: + projects/{project}/locations/{location}. + autonomous_database_id (str): + Required. The ID of the Autonomous Database to create. This + value is restricted to + (^`a-z <[a-z0-9-]{0,61}[a-z0-9]>`__?$) and must be a maximum + of 63 characters in length. The value must start with a + letter and end with a letter or a number. + autonomous_database (google.cloud.oracledatabase_v1.types.AutonomousDatabase): + Required. The Autonomous Database being + created. + request_id (str): + Optional. An optional ID to identify the + request. This value is used to identify + duplicate requests. If you make a request with + the same request ID and the original request is + still in progress or completed, the server + ignores the second request. This prevents + clients from accidentally creating duplicate + commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + autonomous_database_id: str = proto.Field( + proto.STRING, + number=2, + ) + autonomous_database: gco_autonomous_database.AutonomousDatabase = proto.Field( + proto.MESSAGE, + number=3, + message=gco_autonomous_database.AutonomousDatabase, + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class DeleteAutonomousDatabaseRequest(proto.Message): + r"""The request for ``AutonomousDatabase.Delete``. + + Attributes: + name (str): + Required. The name of the resource in the following format: + projects/{project}/locations/{location}/autonomousDatabases/{autonomous_database}. + request_id (str): + Optional. An optional ID to identify the + request. This value is used to identify + duplicate requests. If you make a request with + the same request ID and the original request is + still in progress or completed, the server + ignores the second request. This prevents + clients from accidentally creating duplicate + commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class RestoreAutonomousDatabaseRequest(proto.Message): + r"""The request for ``AutonomousDatabase.Restore``. + + Attributes: + name (str): + Required. The name of the Autonomous Database in the + following format: + projects/{project}/locations/{location}/autonomousDatabases/{autonomous_database}. + restore_time (google.protobuf.timestamp_pb2.Timestamp): + Required. The time and date to restore the + database to. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + restore_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class GenerateAutonomousDatabaseWalletRequest(proto.Message): + r"""The request for ``AutonomousDatabase.GenerateWallet``. + + Attributes: + name (str): + Required. The name of the Autonomous Database in the + following format: + projects/{project}/locations/{location}/autonomousDatabases/{autonomous_database}. + type_ (google.cloud.oracledatabase_v1.types.GenerateType): + Optional. The type of wallet generation for + the Autonomous Database. The default value is + SINGLE. + is_regional (bool): + Optional. True when requesting regional + connection strings in PDB connect info, + applicable to cross-region Data Guard only. + password (str): + Required. The password used to encrypt the + keys inside the wallet. The password must be a + minimum of 8 characters. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + type_: gco_autonomous_database.GenerateType = proto.Field( + proto.ENUM, + number=2, + enum=gco_autonomous_database.GenerateType, + ) + is_regional: bool = proto.Field( + proto.BOOL, + number=3, + ) + password: str = proto.Field( + proto.STRING, + number=4, + ) + + +class GenerateAutonomousDatabaseWalletResponse(proto.Message): + r"""The response for ``AutonomousDatabase.GenerateWallet``. + + Attributes: + archive_content (bytes): + Output only. The base64 encoded wallet files. + """ + + archive_content: bytes = proto.Field( + proto.BYTES, + number=1, + ) + + +class ListAutonomousDbVersionsRequest(proto.Message): + r"""The request for ``AutonomousDbVersion.List``. + + Attributes: + parent (str): + Required. The parent value for the Autonomous + Database in the following format: + projects/{project}/locations/{location}. + page_size (int): + Optional. The maximum number of items to + return. If unspecified, at most 50 Autonomous DB + Versions will be returned. The maximum value is + 1000; values above 1000 will be coerced to 1000. + page_token (str): + Optional. A token identifying a page of + results the server should return. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListAutonomousDbVersionsResponse(proto.Message): + r"""The response for ``AutonomousDbVersion.List``. + + Attributes: + autonomous_db_versions (MutableSequence[google.cloud.oracledatabase_v1.types.AutonomousDbVersion]): + The list of Autonomous Database versions. + next_page_token (str): + A token identifying a page of results the + server should return. + """ + + @property + def raw_page(self): + return self + + autonomous_db_versions: MutableSequence[ + autonomous_db_version.AutonomousDbVersion + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=autonomous_db_version.AutonomousDbVersion, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListAutonomousDatabaseCharacterSetsRequest(proto.Message): + r"""The request for ``AutonomousDatabaseCharacterSet.List``. + + Attributes: + parent (str): + Required. The parent value for the Autonomous + Database in the following format: + projects/{project}/locations/{location}. + page_size (int): + Optional. The maximum number of items to + return. If unspecified, at most 50 Autonomous DB + Character Sets will be returned. The maximum + value is 1000; values above 1000 will be coerced + to 1000. + page_token (str): + Optional. A token identifying a page of + results the server should return. + filter (str): + Optional. An expression for filtering the results of the + request. Only the **character_set_type** field is supported + in the following format: + ``character_set_type="{characterSetType}"``. Accepted values + include ``DATABASE`` and ``NATIONAL``. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListAutonomousDatabaseCharacterSetsResponse(proto.Message): + r"""The response for ``AutonomousDatabaseCharacterSet.List``. + + Attributes: + autonomous_database_character_sets (MutableSequence[google.cloud.oracledatabase_v1.types.AutonomousDatabaseCharacterSet]): + The list of Autonomous Database Character + Sets. + next_page_token (str): + A token identifying a page of results the + server should return. + """ + + @property + def raw_page(self): + return self + + autonomous_database_character_sets: MutableSequence[ + autonomous_database_character_set.AutonomousDatabaseCharacterSet + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=autonomous_database_character_set.AutonomousDatabaseCharacterSet, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListAutonomousDatabaseBackupsRequest(proto.Message): + r"""The request for ``AutonomousDatabaseBackup.List``. + + Attributes: + parent (str): + Required. The parent value for + ListAutonomousDatabaseBackups in the following + format: projects/{project}/locations/{location}. + filter (str): + Optional. An expression for filtering the results of the + request. Only the **autonomous_database_id** field is + supported in the following format: + ``autonomous_database_id="{autonomous_database_id}"``. The + accepted values must be a valid Autonomous Database ID, + limited to the naming restrictions of the ID: + ^\ `a-z <[a-z0-9-]{0,61}[a-z0-9]>`__?$). The ID must start + with a letter, end with a letter or a number, and be a + maximum of 63 characters. + page_size (int): + Optional. The maximum number of items to + return. If unspecified, at most 50 Autonomous DB + Backups will be returned. The maximum value is + 1000; values above 1000 will be coerced to 1000. + page_token (str): + Optional. A token identifying a page of + results the server should return. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + filter: str = proto.Field( + proto.STRING, + number=5, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListAutonomousDatabaseBackupsResponse(proto.Message): + r"""The response for ``AutonomousDatabaseBackup.List``. + + Attributes: + autonomous_database_backups (MutableSequence[google.cloud.oracledatabase_v1.types.AutonomousDatabaseBackup]): + The list of Autonomous Database Backups. + next_page_token (str): + A token identifying a page of results the + server should return. + """ + + @property + def raw_page(self): + return self + + autonomous_database_backups: MutableSequence[ + autonomous_db_backup.AutonomousDatabaseBackup + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=autonomous_db_backup.AutonomousDatabaseBackup, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/vm_cluster.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/vm_cluster.py new file mode 100644 index 000000000000..44104d291bd3 --- /dev/null +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/vm_cluster.py @@ -0,0 +1,437 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +from google.type import datetime_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.oracledatabase.v1", + manifest={ + "CloudVmCluster", + "CloudVmClusterProperties", + "DataCollectionOptions", + }, +) + + +class CloudVmCluster(proto.Message): + r"""Details of the Cloud VM Cluster resource. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/CloudVmCluster/ + + Attributes: + name (str): + Identifier. The name of the VM Cluster resource with the + format: + projects/{project}/locations/{region}/cloudVmClusters/{cloud_vm_cluster} + exadata_infrastructure (str): + Required. The name of the Exadata Infrastructure resource on + which VM cluster resource is created, in the following + format: + projects/{project}/locations/{region}/cloudExadataInfrastuctures/{cloud_extradata_infrastructure} + display_name (str): + Optional. User friendly name for this + resource. + gcp_oracle_zone (str): + Output only. Google Cloud Platform location + where Oracle Exadata is hosted. It is same as + Google Cloud Platform Oracle zone of Exadata + infrastructure. + properties (google.cloud.oracledatabase_v1.types.CloudVmClusterProperties): + Optional. Various properties of the VM + Cluster. + labels (MutableMapping[str, str]): + Optional. Labels or tags associated with the + VM Cluster. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The date and time that the VM + cluster was created. + cidr (str): + Required. Network settings. CIDR to use for + cluster IP allocation. + backup_subnet_cidr (str): + Required. CIDR range of the backup subnet. + network (str): + Required. The name of the VPC network. + Format: + projects/{project}/global/networks/{network} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + exadata_infrastructure: str = proto.Field( + proto.STRING, + number=2, + ) + display_name: str = proto.Field( + proto.STRING, + number=3, + ) + gcp_oracle_zone: str = proto.Field( + proto.STRING, + number=12, + ) + properties: "CloudVmClusterProperties" = proto.Field( + proto.MESSAGE, + number=6, + message="CloudVmClusterProperties", + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=7, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + cidr: str = proto.Field( + proto.STRING, + number=9, + ) + backup_subnet_cidr: str = proto.Field( + proto.STRING, + number=10, + ) + network: str = proto.Field( + proto.STRING, + number=11, + ) + + +class CloudVmClusterProperties(proto.Message): + r"""Various properties and settings associated with Exadata VM + cluster. + + Attributes: + ocid (str): + Output only. Oracle Cloud Infrastructure ID + of VM Cluster. + license_type (google.cloud.oracledatabase_v1.types.CloudVmClusterProperties.LicenseType): + Required. License type of VM Cluster. + gi_version (str): + Optional. Grid Infrastructure Version. + time_zone (google.type.datetime_pb2.TimeZone): + Optional. Time zone of VM Cluster to set. + Defaults to UTC if not specified. + ssh_public_keys (MutableSequence[str]): + Optional. SSH public keys to be stored with + cluster. + node_count (int): + Optional. Number of database servers. + shape (str): + Output only. Shape of VM Cluster. + ocpu_count (float): + Optional. OCPU count per VM. Minimum is 0.1. + memory_size_gb (int): + Optional. Memory allocated in GBs. + db_node_storage_size_gb (int): + Optional. Local storage per VM. + storage_size_gb (int): + Output only. The storage allocation for the + disk group, in gigabytes (GB). + data_storage_size_tb (float): + Optional. The data disk group size to be + allocated in TBs. + disk_redundancy (google.cloud.oracledatabase_v1.types.CloudVmClusterProperties.DiskRedundancy): + Optional. The type of redundancy. + sparse_diskgroup_enabled (bool): + Optional. Use exadata sparse snapshots. + local_backup_enabled (bool): + Optional. Use local backup. + hostname_prefix (str): + Optional. Prefix for VM cluster host names. + diagnostics_data_collection_options (google.cloud.oracledatabase_v1.types.DataCollectionOptions): + Optional. Data collection options for + diagnostics. + state (google.cloud.oracledatabase_v1.types.CloudVmClusterProperties.State): + Output only. State of the cluster. + scan_listener_port_tcp (int): + Output only. SCAN listener port - TCP + scan_listener_port_tcp_ssl (int): + Output only. SCAN listener port - TLS + domain (str): + Output only. Parent DNS domain where SCAN DNS + and hosts names are qualified. ex: + ocispdelegated.ocisp10jvnet.oraclevcn.com + scan_dns (str): + Output only. SCAN DNS name. + ex: + sp2-yi0xq-scan.ocispdelegated.ocisp10jvnet.oraclevcn.com + hostname (str): + Output only. host name without domain. format: + "-" with some suffix. ex: sp2-yi0xq where + "sp2" is the hostname_prefix. + cpu_core_count (int): + Required. Number of enabled CPU cores. + system_version (str): + Output only. Operating system version of the + image. + scan_ip_ids (MutableSequence[str]): + Output only. OCIDs of scan IPs. + scan_dns_record_id (str): + Output only. OCID of scan DNS record. + oci_url (str): + Output only. Deep link to the OCI console to + view this resource. + db_server_ocids (MutableSequence[str]): + Optional. OCID of database servers. + compartment_id (str): + Output only. Compartment ID of cluster. + dns_listener_ip (str): + Output only. DNS listener IP. + cluster_name (str): + Optional. OCI Cluster name. + """ + + class LicenseType(proto.Enum): + r"""Different licenses supported. + + Values: + LICENSE_TYPE_UNSPECIFIED (0): + Unspecified + LICENSE_INCLUDED (1): + License included part of offer + BRING_YOUR_OWN_LICENSE (2): + Bring your own license + """ + LICENSE_TYPE_UNSPECIFIED = 0 + LICENSE_INCLUDED = 1 + BRING_YOUR_OWN_LICENSE = 2 + + class DiskRedundancy(proto.Enum): + r"""Types of disk redundancy provided by Oracle. + + Values: + DISK_REDUNDANCY_UNSPECIFIED (0): + Unspecified. + HIGH (1): + High - 3 way mirror. + NORMAL (2): + Normal - 2 way mirror. + """ + DISK_REDUNDANCY_UNSPECIFIED = 0 + HIGH = 1 + NORMAL = 2 + + class State(proto.Enum): + r"""The various lifecycle states of the VM cluster. + + Values: + STATE_UNSPECIFIED (0): + Default unspecified value. + PROVISIONING (1): + Indicates that the resource is in + provisioning state. + AVAILABLE (2): + Indicates that the resource is in available + state. + UPDATING (3): + Indicates that the resource is in updating + state. + TERMINATING (4): + Indicates that the resource is in terminating + state. + TERMINATED (5): + Indicates that the resource is in terminated + state. + FAILED (6): + Indicates that the resource is in failed + state. + MAINTENANCE_IN_PROGRESS (7): + Indicates that the resource is in maintenance + in progress state. + """ + STATE_UNSPECIFIED = 0 + PROVISIONING = 1 + AVAILABLE = 2 + UPDATING = 3 + TERMINATING = 4 + TERMINATED = 5 + FAILED = 6 + MAINTENANCE_IN_PROGRESS = 7 + + ocid: str = proto.Field( + proto.STRING, + number=1, + ) + license_type: LicenseType = proto.Field( + proto.ENUM, + number=2, + enum=LicenseType, + ) + gi_version: str = proto.Field( + proto.STRING, + number=3, + ) + time_zone: datetime_pb2.TimeZone = proto.Field( + proto.MESSAGE, + number=4, + message=datetime_pb2.TimeZone, + ) + ssh_public_keys: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) + node_count: int = proto.Field( + proto.INT32, + number=6, + ) + shape: str = proto.Field( + proto.STRING, + number=7, + ) + ocpu_count: float = proto.Field( + proto.FLOAT, + number=8, + ) + memory_size_gb: int = proto.Field( + proto.INT32, + number=9, + ) + db_node_storage_size_gb: int = proto.Field( + proto.INT32, + number=10, + ) + storage_size_gb: int = proto.Field( + proto.INT32, + number=11, + ) + data_storage_size_tb: float = proto.Field( + proto.DOUBLE, + number=12, + ) + disk_redundancy: DiskRedundancy = proto.Field( + proto.ENUM, + number=13, + enum=DiskRedundancy, + ) + sparse_diskgroup_enabled: bool = proto.Field( + proto.BOOL, + number=14, + ) + local_backup_enabled: bool = proto.Field( + proto.BOOL, + number=15, + ) + hostname_prefix: str = proto.Field( + proto.STRING, + number=16, + ) + diagnostics_data_collection_options: "DataCollectionOptions" = proto.Field( + proto.MESSAGE, + number=19, + message="DataCollectionOptions", + ) + state: State = proto.Field( + proto.ENUM, + number=20, + enum=State, + ) + scan_listener_port_tcp: int = proto.Field( + proto.INT32, + number=21, + ) + scan_listener_port_tcp_ssl: int = proto.Field( + proto.INT32, + number=22, + ) + domain: str = proto.Field( + proto.STRING, + number=23, + ) + scan_dns: str = proto.Field( + proto.STRING, + number=24, + ) + hostname: str = proto.Field( + proto.STRING, + number=25, + ) + cpu_core_count: int = proto.Field( + proto.INT32, + number=26, + ) + system_version: str = proto.Field( + proto.STRING, + number=27, + ) + scan_ip_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=28, + ) + scan_dns_record_id: str = proto.Field( + proto.STRING, + number=29, + ) + oci_url: str = proto.Field( + proto.STRING, + number=30, + ) + db_server_ocids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=31, + ) + compartment_id: str = proto.Field( + proto.STRING, + number=32, + ) + dns_listener_ip: str = proto.Field( + proto.STRING, + number=35, + ) + cluster_name: str = proto.Field( + proto.STRING, + number=36, + ) + + +class DataCollectionOptions(proto.Message): + r"""Data collection options for diagnostics. + + Attributes: + diagnostics_events_enabled (bool): + Optional. Indicates whether diagnostic + collection is enabled for the VM cluster + health_monitoring_enabled (bool): + Optional. Indicates whether health monitoring + is enabled for the VM cluster + incident_logs_enabled (bool): + Optional. Indicates whether incident logs and + trace collection are enabled for the VM cluster + """ + + diagnostics_events_enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + health_monitoring_enabled: bool = proto.Field( + proto.BOOL, + number=2, + ) + incident_logs_enabled: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-oracledatabase/mypy.ini b/packages/google-cloud-oracledatabase/mypy.ini new file mode 100644 index 000000000000..574c5aed394b --- /dev/null +++ b/packages/google-cloud-oracledatabase/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/packages/google-cloud-oracledatabase/noxfile.py b/packages/google-cloud-oracledatabase/noxfile.py new file mode 100644 index 000000000000..aeee7851401a --- /dev/null +++ b/packages/google-cloud-oracledatabase/noxfile.py @@ -0,0 +1,452 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! + +from __future__ import absolute_import + +import os +import pathlib +import re +import shutil +from typing import Dict, List +import warnings + +import nox + +BLACK_VERSION = "black[jupyter]==23.7.0" +ISORT_VERSION = "isort==5.11.0" + +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + + +DEFAULT_PYTHON_VERSION = "3.10" + +UNIT_TEST_PYTHON_VERSIONS: List[str] = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_DEPENDENCIES: List[str] = [] +UNIT_TEST_EXTRAS: List[str] = [] +UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.8", "3.9", "3.10", "3.11", "3.12"] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_EXTRAS: List[str] = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def unit(session, protobuf_implementation): + # Install all test dependencies, then install this package in-place. + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + session.skip("cpp implementation is not supported in python 3.11+") + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + + # TODO(https://github.com/googleapis/synthtool/issues/1976): + # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. + # The 'cpp' implementation requires Protobuf<4. + if protobuf_implementation == "cpp": + session.install("protobuf<4") + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + + +def install_systemtest_dependencies(session, *constraints): + # Use pre-release gRPC for system tests. + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "sphinx==4.5.0", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "gcp-sphinx-docfx-yaml", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.12") +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def prerelease_deps(session, protobuf_implementation): + """Run all tests with prerelease versions of dependencies installed.""" + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + session.skip("cpp implementation is not supported in python 3.11+") + + # Install all dependencies + session.install("-e", ".[all, tests, tracing]") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "protobuf", + # dependency of grpc + "six", + "grpc-google-iam-v1", + "googleapis-common-protos", + "grpcio", + "grpcio-status", + "google-api-core", + "google-auth", + "proto-plus", + "google-cloud-testutils", + # dependencies of google-cloud-testutils" + "click", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + # Print out prerelease package versions + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") + + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) diff --git a/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_create_autonomous_database_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_create_autonomous_database_sync.py new file mode 100644 index 000000000000..8812689ffc11 --- /dev/null +++ b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_create_autonomous_database_sync.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateAutonomousDatabase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-oracledatabase + + +# [START oracledatabase_v1_generated_OracleDatabase_CreateAutonomousDatabase_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import oracledatabase_v1 + + +def sample_create_autonomous_database(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + autonomous_database = oracledatabase_v1.AutonomousDatabase() + autonomous_database.network = "network_value" + autonomous_database.cidr = "cidr_value" + + request = oracledatabase_v1.CreateAutonomousDatabaseRequest( + parent="parent_value", + autonomous_database_id="autonomous_database_id_value", + autonomous_database=autonomous_database, + ) + + # Make the request + operation = client.create_autonomous_database(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END oracledatabase_v1_generated_OracleDatabase_CreateAutonomousDatabase_sync] diff --git a/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_create_cloud_exadata_infrastructure_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_create_cloud_exadata_infrastructure_sync.py new file mode 100644 index 000000000000..298e7b566d84 --- /dev/null +++ b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_create_cloud_exadata_infrastructure_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateCloudExadataInfrastructure +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-oracledatabase + + +# [START oracledatabase_v1_generated_OracleDatabase_CreateCloudExadataInfrastructure_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import oracledatabase_v1 + + +def sample_create_cloud_exadata_infrastructure(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.CreateCloudExadataInfrastructureRequest( + parent="parent_value", + cloud_exadata_infrastructure_id="cloud_exadata_infrastructure_id_value", + ) + + # Make the request + operation = client.create_cloud_exadata_infrastructure(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END oracledatabase_v1_generated_OracleDatabase_CreateCloudExadataInfrastructure_sync] diff --git a/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_create_cloud_vm_cluster_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_create_cloud_vm_cluster_sync.py new file mode 100644 index 000000000000..4f172f9ce515 --- /dev/null +++ b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_create_cloud_vm_cluster_sync.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateCloudVmCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-oracledatabase + + +# [START oracledatabase_v1_generated_OracleDatabase_CreateCloudVmCluster_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import oracledatabase_v1 + + +def sample_create_cloud_vm_cluster(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + cloud_vm_cluster = oracledatabase_v1.CloudVmCluster() + cloud_vm_cluster.exadata_infrastructure = "exadata_infrastructure_value" + cloud_vm_cluster.cidr = "cidr_value" + cloud_vm_cluster.backup_subnet_cidr = "backup_subnet_cidr_value" + cloud_vm_cluster.network = "network_value" + + request = oracledatabase_v1.CreateCloudVmClusterRequest( + parent="parent_value", + cloud_vm_cluster_id="cloud_vm_cluster_id_value", + cloud_vm_cluster=cloud_vm_cluster, + ) + + # Make the request + operation = client.create_cloud_vm_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END oracledatabase_v1_generated_OracleDatabase_CreateCloudVmCluster_sync] diff --git a/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_delete_autonomous_database_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_delete_autonomous_database_sync.py new file mode 100644 index 000000000000..44f792f2869d --- /dev/null +++ b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_delete_autonomous_database_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteAutonomousDatabase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-oracledatabase + + +# [START oracledatabase_v1_generated_OracleDatabase_DeleteAutonomousDatabase_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import oracledatabase_v1 + + +def sample_delete_autonomous_database(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.DeleteAutonomousDatabaseRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_autonomous_database(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END oracledatabase_v1_generated_OracleDatabase_DeleteAutonomousDatabase_sync] diff --git a/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_delete_cloud_exadata_infrastructure_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_delete_cloud_exadata_infrastructure_sync.py new file mode 100644 index 000000000000..bbbc8441a482 --- /dev/null +++ b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_delete_cloud_exadata_infrastructure_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteCloudExadataInfrastructure +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-oracledatabase + + +# [START oracledatabase_v1_generated_OracleDatabase_DeleteCloudExadataInfrastructure_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import oracledatabase_v1 + + +def sample_delete_cloud_exadata_infrastructure(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.DeleteCloudExadataInfrastructureRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_cloud_exadata_infrastructure(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END oracledatabase_v1_generated_OracleDatabase_DeleteCloudExadataInfrastructure_sync] diff --git a/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_delete_cloud_vm_cluster_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_delete_cloud_vm_cluster_sync.py new file mode 100644 index 000000000000..42bdd4641c93 --- /dev/null +++ b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_delete_cloud_vm_cluster_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteCloudVmCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-oracledatabase + + +# [START oracledatabase_v1_generated_OracleDatabase_DeleteCloudVmCluster_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import oracledatabase_v1 + + +def sample_delete_cloud_vm_cluster(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.DeleteCloudVmClusterRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_cloud_vm_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END oracledatabase_v1_generated_OracleDatabase_DeleteCloudVmCluster_sync] diff --git a/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_generate_autonomous_database_wallet_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_generate_autonomous_database_wallet_sync.py new file mode 100644 index 000000000000..3f2cdc2849ae --- /dev/null +++ b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_generate_autonomous_database_wallet_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GenerateAutonomousDatabaseWallet +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-oracledatabase + + +# [START oracledatabase_v1_generated_OracleDatabase_GenerateAutonomousDatabaseWallet_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import oracledatabase_v1 + + +def sample_generate_autonomous_database_wallet(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.GenerateAutonomousDatabaseWalletRequest( + name="name_value", + password="password_value", + ) + + # Make the request + response = client.generate_autonomous_database_wallet(request=request) + + # Handle the response + print(response) + +# [END oracledatabase_v1_generated_OracleDatabase_GenerateAutonomousDatabaseWallet_sync] diff --git a/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_get_autonomous_database_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_get_autonomous_database_sync.py new file mode 100644 index 000000000000..6273b25ca4e7 --- /dev/null +++ b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_get_autonomous_database_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetAutonomousDatabase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-oracledatabase + + +# [START oracledatabase_v1_generated_OracleDatabase_GetAutonomousDatabase_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import oracledatabase_v1 + + +def sample_get_autonomous_database(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.GetAutonomousDatabaseRequest( + name="name_value", + ) + + # Make the request + response = client.get_autonomous_database(request=request) + + # Handle the response + print(response) + +# [END oracledatabase_v1_generated_OracleDatabase_GetAutonomousDatabase_sync] diff --git a/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_get_cloud_exadata_infrastructure_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_get_cloud_exadata_infrastructure_sync.py new file mode 100644 index 000000000000..abbfae94a61b --- /dev/null +++ b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_get_cloud_exadata_infrastructure_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetCloudExadataInfrastructure +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-oracledatabase + + +# [START oracledatabase_v1_generated_OracleDatabase_GetCloudExadataInfrastructure_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import oracledatabase_v1 + + +def sample_get_cloud_exadata_infrastructure(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.GetCloudExadataInfrastructureRequest( + name="name_value", + ) + + # Make the request + response = client.get_cloud_exadata_infrastructure(request=request) + + # Handle the response + print(response) + +# [END oracledatabase_v1_generated_OracleDatabase_GetCloudExadataInfrastructure_sync] diff --git a/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_get_cloud_vm_cluster_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_get_cloud_vm_cluster_sync.py new file mode 100644 index 000000000000..76e6c851c122 --- /dev/null +++ b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_get_cloud_vm_cluster_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetCloudVmCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-oracledatabase + + +# [START oracledatabase_v1_generated_OracleDatabase_GetCloudVmCluster_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import oracledatabase_v1 + + +def sample_get_cloud_vm_cluster(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.GetCloudVmClusterRequest( + name="name_value", + ) + + # Make the request + response = client.get_cloud_vm_cluster(request=request) + + # Handle the response + print(response) + +# [END oracledatabase_v1_generated_OracleDatabase_GetCloudVmCluster_sync] diff --git a/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_database_backups_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_database_backups_sync.py new file mode 100644 index 000000000000..217be56f9f1e --- /dev/null +++ b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_database_backups_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListAutonomousDatabaseBackups +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-oracledatabase + + +# [START oracledatabase_v1_generated_OracleDatabase_ListAutonomousDatabaseBackups_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import oracledatabase_v1 + + +def sample_list_autonomous_database_backups(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.ListAutonomousDatabaseBackupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_autonomous_database_backups(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END oracledatabase_v1_generated_OracleDatabase_ListAutonomousDatabaseBackups_sync] diff --git a/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_database_character_sets_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_database_character_sets_sync.py new file mode 100644 index 000000000000..d33d179e389a --- /dev/null +++ b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_database_character_sets_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListAutonomousDatabaseCharacterSets +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-oracledatabase + + +# [START oracledatabase_v1_generated_OracleDatabase_ListAutonomousDatabaseCharacterSets_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import oracledatabase_v1 + + +def sample_list_autonomous_database_character_sets(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.ListAutonomousDatabaseCharacterSetsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_autonomous_database_character_sets(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END oracledatabase_v1_generated_OracleDatabase_ListAutonomousDatabaseCharacterSets_sync] diff --git a/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_databases_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_databases_sync.py new file mode 100644 index 000000000000..4153a3ad56f5 --- /dev/null +++ b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_databases_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListAutonomousDatabases +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-oracledatabase + + +# [START oracledatabase_v1_generated_OracleDatabase_ListAutonomousDatabases_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import oracledatabase_v1 + + +def sample_list_autonomous_databases(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.ListAutonomousDatabasesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_autonomous_databases(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END oracledatabase_v1_generated_OracleDatabase_ListAutonomousDatabases_sync] diff --git a/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_db_versions_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_db_versions_sync.py new file mode 100644 index 000000000000..3ffeb9c12ee1 --- /dev/null +++ b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_db_versions_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListAutonomousDbVersions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-oracledatabase + + +# [START oracledatabase_v1_generated_OracleDatabase_ListAutonomousDbVersions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import oracledatabase_v1 + + +def sample_list_autonomous_db_versions(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.ListAutonomousDbVersionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_autonomous_db_versions(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END oracledatabase_v1_generated_OracleDatabase_ListAutonomousDbVersions_sync] diff --git a/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_cloud_exadata_infrastructures_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_cloud_exadata_infrastructures_sync.py new file mode 100644 index 000000000000..9fa96f7b3216 --- /dev/null +++ b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_cloud_exadata_infrastructures_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListCloudExadataInfrastructures +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-oracledatabase + + +# [START oracledatabase_v1_generated_OracleDatabase_ListCloudExadataInfrastructures_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import oracledatabase_v1 + + +def sample_list_cloud_exadata_infrastructures(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.ListCloudExadataInfrastructuresRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_cloud_exadata_infrastructures(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END oracledatabase_v1_generated_OracleDatabase_ListCloudExadataInfrastructures_sync] diff --git a/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_cloud_vm_clusters_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_cloud_vm_clusters_sync.py new file mode 100644 index 000000000000..4d768bee445f --- /dev/null +++ b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_cloud_vm_clusters_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListCloudVmClusters +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-oracledatabase + + +# [START oracledatabase_v1_generated_OracleDatabase_ListCloudVmClusters_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import oracledatabase_v1 + + +def sample_list_cloud_vm_clusters(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.ListCloudVmClustersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_cloud_vm_clusters(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END oracledatabase_v1_generated_OracleDatabase_ListCloudVmClusters_sync] diff --git a/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_db_nodes_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_db_nodes_sync.py new file mode 100644 index 000000000000..5aeb00470993 --- /dev/null +++ b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_db_nodes_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDbNodes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-oracledatabase + + +# [START oracledatabase_v1_generated_OracleDatabase_ListDbNodes_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import oracledatabase_v1 + + +def sample_list_db_nodes(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.ListDbNodesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_db_nodes(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END oracledatabase_v1_generated_OracleDatabase_ListDbNodes_sync] diff --git a/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_db_servers_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_db_servers_sync.py new file mode 100644 index 000000000000..7daf125eb879 --- /dev/null +++ b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_db_servers_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDbServers +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-oracledatabase + + +# [START oracledatabase_v1_generated_OracleDatabase_ListDbServers_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import oracledatabase_v1 + + +def sample_list_db_servers(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.ListDbServersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_db_servers(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END oracledatabase_v1_generated_OracleDatabase_ListDbServers_sync] diff --git a/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_db_system_shapes_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_db_system_shapes_sync.py new file mode 100644 index 000000000000..851a38768000 --- /dev/null +++ b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_db_system_shapes_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDbSystemShapes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-oracledatabase + + +# [START oracledatabase_v1_generated_OracleDatabase_ListDbSystemShapes_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import oracledatabase_v1 + + +def sample_list_db_system_shapes(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.ListDbSystemShapesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_db_system_shapes(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END oracledatabase_v1_generated_OracleDatabase_ListDbSystemShapes_sync] diff --git a/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_entitlements_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_entitlements_sync.py new file mode 100644 index 000000000000..cf7ff1ebdfea --- /dev/null +++ b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_entitlements_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListEntitlements +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-oracledatabase + + +# [START oracledatabase_v1_generated_OracleDatabase_ListEntitlements_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import oracledatabase_v1 + + +def sample_list_entitlements(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.ListEntitlementsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_entitlements(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END oracledatabase_v1_generated_OracleDatabase_ListEntitlements_sync] diff --git a/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_gi_versions_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_gi_versions_sync.py new file mode 100644 index 000000000000..23cd229e7552 --- /dev/null +++ b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_gi_versions_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListGiVersions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-oracledatabase + + +# [START oracledatabase_v1_generated_OracleDatabase_ListGiVersions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import oracledatabase_v1 + + +def sample_list_gi_versions(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.ListGiVersionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_gi_versions(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END oracledatabase_v1_generated_OracleDatabase_ListGiVersions_sync] diff --git a/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_restore_autonomous_database_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_restore_autonomous_database_sync.py new file mode 100644 index 000000000000..d0e6cf445d21 --- /dev/null +++ b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_restore_autonomous_database_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RestoreAutonomousDatabase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-oracledatabase + + +# [START oracledatabase_v1_generated_OracleDatabase_RestoreAutonomousDatabase_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import oracledatabase_v1 + + +def sample_restore_autonomous_database(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.RestoreAutonomousDatabaseRequest( + name="name_value", + ) + + # Make the request + operation = client.restore_autonomous_database(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END oracledatabase_v1_generated_OracleDatabase_RestoreAutonomousDatabase_sync] diff --git a/packages/google-cloud-oracledatabase/samples/generated_samples/snippet_metadata_google.cloud.oracledatabase.v1.json b/packages/google-cloud-oracledatabase/samples/generated_samples/snippet_metadata_google.cloud.oracledatabase.v1.json new file mode 100644 index 000000000000..f8cf3fba2f4a --- /dev/null +++ b/packages/google-cloud-oracledatabase/samples/generated_samples/snippet_metadata_google.cloud.oracledatabase.v1.json @@ -0,0 +1,1815 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.cloud.oracledatabase.v1", + "version": "v1" + } + ], + "language": "PYTHON", + "name": "google-cloud-oracledatabase", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient", + "shortName": "OracleDatabaseClient" + }, + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient.create_autonomous_database", + "method": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase.CreateAutonomousDatabase", + "service": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase", + "shortName": "OracleDatabase" + }, + "shortName": "CreateAutonomousDatabase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.oracledatabase_v1.types.CreateAutonomousDatabaseRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "autonomous_database", + "type": "google.cloud.oracledatabase_v1.types.AutonomousDatabase" + }, + { + "name": "autonomous_database_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_autonomous_database" + }, + "description": "Sample for CreateAutonomousDatabase", + "file": "oracledatabase_v1_generated_oracle_database_create_autonomous_database_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "oracledatabase_v1_generated_OracleDatabase_CreateAutonomousDatabase_sync", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "oracledatabase_v1_generated_oracle_database_create_autonomous_database_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient", + "shortName": "OracleDatabaseClient" + }, + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient.create_cloud_exadata_infrastructure", + "method": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase.CreateCloudExadataInfrastructure", + "service": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase", + "shortName": "OracleDatabase" + }, + "shortName": "CreateCloudExadataInfrastructure" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.oracledatabase_v1.types.CreateCloudExadataInfrastructureRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "cloud_exadata_infrastructure", + "type": "google.cloud.oracledatabase_v1.types.CloudExadataInfrastructure" + }, + { + "name": "cloud_exadata_infrastructure_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_cloud_exadata_infrastructure" + }, + "description": "Sample for CreateCloudExadataInfrastructure", + "file": "oracledatabase_v1_generated_oracle_database_create_cloud_exadata_infrastructure_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "oracledatabase_v1_generated_OracleDatabase_CreateCloudExadataInfrastructure_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "oracledatabase_v1_generated_oracle_database_create_cloud_exadata_infrastructure_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient", + "shortName": "OracleDatabaseClient" + }, + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient.create_cloud_vm_cluster", + "method": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase.CreateCloudVmCluster", + "service": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase", + "shortName": "OracleDatabase" + }, + "shortName": "CreateCloudVmCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.oracledatabase_v1.types.CreateCloudVmClusterRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "cloud_vm_cluster", + "type": "google.cloud.oracledatabase_v1.types.CloudVmCluster" + }, + { + "name": "cloud_vm_cluster_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_cloud_vm_cluster" + }, + "description": "Sample for CreateCloudVmCluster", + "file": "oracledatabase_v1_generated_oracle_database_create_cloud_vm_cluster_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "oracledatabase_v1_generated_OracleDatabase_CreateCloudVmCluster_sync", + "segments": [ + { + "end": 63, + "start": 27, + "type": "FULL" + }, + { + "end": 63, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 53, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 60, + "start": 54, + "type": "REQUEST_EXECUTION" + }, + { + "end": 64, + "start": 61, + "type": "RESPONSE_HANDLING" + } + ], + "title": "oracledatabase_v1_generated_oracle_database_create_cloud_vm_cluster_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient", + "shortName": "OracleDatabaseClient" + }, + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient.delete_autonomous_database", + "method": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase.DeleteAutonomousDatabase", + "service": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase", + "shortName": "OracleDatabase" + }, + "shortName": "DeleteAutonomousDatabase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.oracledatabase_v1.types.DeleteAutonomousDatabaseRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_autonomous_database" + }, + "description": "Sample for DeleteAutonomousDatabase", + "file": "oracledatabase_v1_generated_oracle_database_delete_autonomous_database_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "oracledatabase_v1_generated_OracleDatabase_DeleteAutonomousDatabase_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "oracledatabase_v1_generated_oracle_database_delete_autonomous_database_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient", + "shortName": "OracleDatabaseClient" + }, + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient.delete_cloud_exadata_infrastructure", + "method": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase.DeleteCloudExadataInfrastructure", + "service": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase", + "shortName": "OracleDatabase" + }, + "shortName": "DeleteCloudExadataInfrastructure" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.oracledatabase_v1.types.DeleteCloudExadataInfrastructureRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_cloud_exadata_infrastructure" + }, + "description": "Sample for DeleteCloudExadataInfrastructure", + "file": "oracledatabase_v1_generated_oracle_database_delete_cloud_exadata_infrastructure_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "oracledatabase_v1_generated_OracleDatabase_DeleteCloudExadataInfrastructure_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "oracledatabase_v1_generated_oracle_database_delete_cloud_exadata_infrastructure_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient", + "shortName": "OracleDatabaseClient" + }, + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient.delete_cloud_vm_cluster", + "method": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase.DeleteCloudVmCluster", + "service": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase", + "shortName": "OracleDatabase" + }, + "shortName": "DeleteCloudVmCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.oracledatabase_v1.types.DeleteCloudVmClusterRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_cloud_vm_cluster" + }, + "description": "Sample for DeleteCloudVmCluster", + "file": "oracledatabase_v1_generated_oracle_database_delete_cloud_vm_cluster_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "oracledatabase_v1_generated_OracleDatabase_DeleteCloudVmCluster_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "oracledatabase_v1_generated_oracle_database_delete_cloud_vm_cluster_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient", + "shortName": "OracleDatabaseClient" + }, + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient.generate_autonomous_database_wallet", + "method": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase.GenerateAutonomousDatabaseWallet", + "service": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase", + "shortName": "OracleDatabase" + }, + "shortName": "GenerateAutonomousDatabaseWallet" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.oracledatabase_v1.types.GenerateAutonomousDatabaseWalletRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "type_", + "type": "google.cloud.oracledatabase_v1.types.GenerateType" + }, + { + "name": "is_regional", + "type": "bool" + }, + { + "name": "password", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.oracledatabase_v1.types.GenerateAutonomousDatabaseWalletResponse", + "shortName": "generate_autonomous_database_wallet" + }, + "description": "Sample for GenerateAutonomousDatabaseWallet", + "file": "oracledatabase_v1_generated_oracle_database_generate_autonomous_database_wallet_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "oracledatabase_v1_generated_OracleDatabase_GenerateAutonomousDatabaseWallet_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "oracledatabase_v1_generated_oracle_database_generate_autonomous_database_wallet_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient", + "shortName": "OracleDatabaseClient" + }, + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient.get_autonomous_database", + "method": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase.GetAutonomousDatabase", + "service": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase", + "shortName": "OracleDatabase" + }, + "shortName": "GetAutonomousDatabase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.oracledatabase_v1.types.GetAutonomousDatabaseRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.oracledatabase_v1.types.AutonomousDatabase", + "shortName": "get_autonomous_database" + }, + "description": "Sample for GetAutonomousDatabase", + "file": "oracledatabase_v1_generated_oracle_database_get_autonomous_database_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "oracledatabase_v1_generated_OracleDatabase_GetAutonomousDatabase_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "oracledatabase_v1_generated_oracle_database_get_autonomous_database_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient", + "shortName": "OracleDatabaseClient" + }, + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient.get_cloud_exadata_infrastructure", + "method": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase.GetCloudExadataInfrastructure", + "service": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase", + "shortName": "OracleDatabase" + }, + "shortName": "GetCloudExadataInfrastructure" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.oracledatabase_v1.types.GetCloudExadataInfrastructureRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.oracledatabase_v1.types.CloudExadataInfrastructure", + "shortName": "get_cloud_exadata_infrastructure" + }, + "description": "Sample for GetCloudExadataInfrastructure", + "file": "oracledatabase_v1_generated_oracle_database_get_cloud_exadata_infrastructure_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "oracledatabase_v1_generated_OracleDatabase_GetCloudExadataInfrastructure_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "oracledatabase_v1_generated_oracle_database_get_cloud_exadata_infrastructure_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient", + "shortName": "OracleDatabaseClient" + }, + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient.get_cloud_vm_cluster", + "method": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase.GetCloudVmCluster", + "service": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase", + "shortName": "OracleDatabase" + }, + "shortName": "GetCloudVmCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.oracledatabase_v1.types.GetCloudVmClusterRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.oracledatabase_v1.types.CloudVmCluster", + "shortName": "get_cloud_vm_cluster" + }, + "description": "Sample for GetCloudVmCluster", + "file": "oracledatabase_v1_generated_oracle_database_get_cloud_vm_cluster_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "oracledatabase_v1_generated_OracleDatabase_GetCloudVmCluster_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "oracledatabase_v1_generated_oracle_database_get_cloud_vm_cluster_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient", + "shortName": "OracleDatabaseClient" + }, + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient.list_autonomous_database_backups", + "method": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase.ListAutonomousDatabaseBackups", + "service": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase", + "shortName": "OracleDatabase" + }, + "shortName": "ListAutonomousDatabaseBackups" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.oracledatabase_v1.types.ListAutonomousDatabaseBackupsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.oracledatabase_v1.services.oracle_database.pagers.ListAutonomousDatabaseBackupsPager", + "shortName": "list_autonomous_database_backups" + }, + "description": "Sample for ListAutonomousDatabaseBackups", + "file": "oracledatabase_v1_generated_oracle_database_list_autonomous_database_backups_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "oracledatabase_v1_generated_OracleDatabase_ListAutonomousDatabaseBackups_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "oracledatabase_v1_generated_oracle_database_list_autonomous_database_backups_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient", + "shortName": "OracleDatabaseClient" + }, + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient.list_autonomous_database_character_sets", + "method": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase.ListAutonomousDatabaseCharacterSets", + "service": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase", + "shortName": "OracleDatabase" + }, + "shortName": "ListAutonomousDatabaseCharacterSets" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.oracledatabase_v1.types.ListAutonomousDatabaseCharacterSetsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.oracledatabase_v1.services.oracle_database.pagers.ListAutonomousDatabaseCharacterSetsPager", + "shortName": "list_autonomous_database_character_sets" + }, + "description": "Sample for ListAutonomousDatabaseCharacterSets", + "file": "oracledatabase_v1_generated_oracle_database_list_autonomous_database_character_sets_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "oracledatabase_v1_generated_OracleDatabase_ListAutonomousDatabaseCharacterSets_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "oracledatabase_v1_generated_oracle_database_list_autonomous_database_character_sets_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient", + "shortName": "OracleDatabaseClient" + }, + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient.list_autonomous_databases", + "method": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase.ListAutonomousDatabases", + "service": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase", + "shortName": "OracleDatabase" + }, + "shortName": "ListAutonomousDatabases" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.oracledatabase_v1.types.ListAutonomousDatabasesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.oracledatabase_v1.services.oracle_database.pagers.ListAutonomousDatabasesPager", + "shortName": "list_autonomous_databases" + }, + "description": "Sample for ListAutonomousDatabases", + "file": "oracledatabase_v1_generated_oracle_database_list_autonomous_databases_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "oracledatabase_v1_generated_OracleDatabase_ListAutonomousDatabases_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "oracledatabase_v1_generated_oracle_database_list_autonomous_databases_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient", + "shortName": "OracleDatabaseClient" + }, + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient.list_autonomous_db_versions", + "method": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase.ListAutonomousDbVersions", + "service": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase", + "shortName": "OracleDatabase" + }, + "shortName": "ListAutonomousDbVersions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.oracledatabase_v1.types.ListAutonomousDbVersionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.oracledatabase_v1.services.oracle_database.pagers.ListAutonomousDbVersionsPager", + "shortName": "list_autonomous_db_versions" + }, + "description": "Sample for ListAutonomousDbVersions", + "file": "oracledatabase_v1_generated_oracle_database_list_autonomous_db_versions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "oracledatabase_v1_generated_OracleDatabase_ListAutonomousDbVersions_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "oracledatabase_v1_generated_oracle_database_list_autonomous_db_versions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient", + "shortName": "OracleDatabaseClient" + }, + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient.list_cloud_exadata_infrastructures", + "method": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase.ListCloudExadataInfrastructures", + "service": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase", + "shortName": "OracleDatabase" + }, + "shortName": "ListCloudExadataInfrastructures" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.oracledatabase_v1.types.ListCloudExadataInfrastructuresRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.oracledatabase_v1.services.oracle_database.pagers.ListCloudExadataInfrastructuresPager", + "shortName": "list_cloud_exadata_infrastructures" + }, + "description": "Sample for ListCloudExadataInfrastructures", + "file": "oracledatabase_v1_generated_oracle_database_list_cloud_exadata_infrastructures_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "oracledatabase_v1_generated_OracleDatabase_ListCloudExadataInfrastructures_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "oracledatabase_v1_generated_oracle_database_list_cloud_exadata_infrastructures_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient", + "shortName": "OracleDatabaseClient" + }, + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient.list_cloud_vm_clusters", + "method": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase.ListCloudVmClusters", + "service": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase", + "shortName": "OracleDatabase" + }, + "shortName": "ListCloudVmClusters" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.oracledatabase_v1.types.ListCloudVmClustersRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.oracledatabase_v1.services.oracle_database.pagers.ListCloudVmClustersPager", + "shortName": "list_cloud_vm_clusters" + }, + "description": "Sample for ListCloudVmClusters", + "file": "oracledatabase_v1_generated_oracle_database_list_cloud_vm_clusters_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "oracledatabase_v1_generated_OracleDatabase_ListCloudVmClusters_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "oracledatabase_v1_generated_oracle_database_list_cloud_vm_clusters_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient", + "shortName": "OracleDatabaseClient" + }, + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient.list_db_nodes", + "method": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase.ListDbNodes", + "service": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase", + "shortName": "OracleDatabase" + }, + "shortName": "ListDbNodes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.oracledatabase_v1.types.ListDbNodesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.oracledatabase_v1.services.oracle_database.pagers.ListDbNodesPager", + "shortName": "list_db_nodes" + }, + "description": "Sample for ListDbNodes", + "file": "oracledatabase_v1_generated_oracle_database_list_db_nodes_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "oracledatabase_v1_generated_OracleDatabase_ListDbNodes_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "oracledatabase_v1_generated_oracle_database_list_db_nodes_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient", + "shortName": "OracleDatabaseClient" + }, + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient.list_db_servers", + "method": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase.ListDbServers", + "service": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase", + "shortName": "OracleDatabase" + }, + "shortName": "ListDbServers" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.oracledatabase_v1.types.ListDbServersRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.oracledatabase_v1.services.oracle_database.pagers.ListDbServersPager", + "shortName": "list_db_servers" + }, + "description": "Sample for ListDbServers", + "file": "oracledatabase_v1_generated_oracle_database_list_db_servers_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "oracledatabase_v1_generated_OracleDatabase_ListDbServers_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "oracledatabase_v1_generated_oracle_database_list_db_servers_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient", + "shortName": "OracleDatabaseClient" + }, + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient.list_db_system_shapes", + "method": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase.ListDbSystemShapes", + "service": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase", + "shortName": "OracleDatabase" + }, + "shortName": "ListDbSystemShapes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.oracledatabase_v1.types.ListDbSystemShapesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.oracledatabase_v1.services.oracle_database.pagers.ListDbSystemShapesPager", + "shortName": "list_db_system_shapes" + }, + "description": "Sample for ListDbSystemShapes", + "file": "oracledatabase_v1_generated_oracle_database_list_db_system_shapes_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "oracledatabase_v1_generated_OracleDatabase_ListDbSystemShapes_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "oracledatabase_v1_generated_oracle_database_list_db_system_shapes_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient", + "shortName": "OracleDatabaseClient" + }, + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient.list_entitlements", + "method": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase.ListEntitlements", + "service": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase", + "shortName": "OracleDatabase" + }, + "shortName": "ListEntitlements" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.oracledatabase_v1.types.ListEntitlementsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.oracledatabase_v1.services.oracle_database.pagers.ListEntitlementsPager", + "shortName": "list_entitlements" + }, + "description": "Sample for ListEntitlements", + "file": "oracledatabase_v1_generated_oracle_database_list_entitlements_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "oracledatabase_v1_generated_OracleDatabase_ListEntitlements_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "oracledatabase_v1_generated_oracle_database_list_entitlements_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient", + "shortName": "OracleDatabaseClient" + }, + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient.list_gi_versions", + "method": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase.ListGiVersions", + "service": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase", + "shortName": "OracleDatabase" + }, + "shortName": "ListGiVersions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.oracledatabase_v1.types.ListGiVersionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.oracledatabase_v1.services.oracle_database.pagers.ListGiVersionsPager", + "shortName": "list_gi_versions" + }, + "description": "Sample for ListGiVersions", + "file": "oracledatabase_v1_generated_oracle_database_list_gi_versions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "oracledatabase_v1_generated_OracleDatabase_ListGiVersions_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "oracledatabase_v1_generated_oracle_database_list_gi_versions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient", + "shortName": "OracleDatabaseClient" + }, + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient.restore_autonomous_database", + "method": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase.RestoreAutonomousDatabase", + "service": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase", + "shortName": "OracleDatabase" + }, + "shortName": "RestoreAutonomousDatabase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.oracledatabase_v1.types.RestoreAutonomousDatabaseRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "restore_time", + "type": "google.protobuf.timestamp_pb2.Timestamp" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "restore_autonomous_database" + }, + "description": "Sample for RestoreAutonomousDatabase", + "file": "oracledatabase_v1_generated_oracle_database_restore_autonomous_database_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "oracledatabase_v1_generated_OracleDatabase_RestoreAutonomousDatabase_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "oracledatabase_v1_generated_oracle_database_restore_autonomous_database_sync.py" + } + ] +} diff --git a/packages/google-cloud-oracledatabase/scripts/decrypt-secrets.sh b/packages/google-cloud-oracledatabase/scripts/decrypt-secrets.sh new file mode 100755 index 000000000000..120b0ddc4364 --- /dev/null +++ b/packages/google-cloud-oracledatabase/scripts/decrypt-secrets.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +# Copyright 2024 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ROOT=$( dirname "$DIR" ) + +# Work from the project root. +cd $ROOT + +# Prevent it from overriding files. +# We recommend that sample authors use their own service account files and cloud project. +# In that case, they are supposed to prepare these files by themselves. +if [[ -f "testing/test-env.sh" ]] || \ + [[ -f "testing/service-account.json" ]] || \ + [[ -f "testing/client-secrets.json" ]]; then + echo "One or more target files exist, aborting." + exit 1 +fi + +# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. +PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" + +gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + --project="${PROJECT_ID}" \ + > testing/test-env.sh +gcloud secrets versions access latest \ + --secret="python-docs-samples-service-account" \ + --project="${PROJECT_ID}" \ + > testing/service-account.json +gcloud secrets versions access latest \ + --secret="python-docs-samples-client-secrets" \ + --project="${PROJECT_ID}" \ + > testing/client-secrets.json diff --git a/packages/google-cloud-oracledatabase/scripts/fixup_oracledatabase_v1_keywords.py b/packages/google-cloud-oracledatabase/scripts/fixup_oracledatabase_v1_keywords.py new file mode 100644 index 000000000000..177c56933878 --- /dev/null +++ b/packages/google-cloud-oracledatabase/scripts/fixup_oracledatabase_v1_keywords.py @@ -0,0 +1,197 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class oracledatabaseCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_autonomous_database': ('parent', 'autonomous_database_id', 'autonomous_database', 'request_id', ), + 'create_cloud_exadata_infrastructure': ('parent', 'cloud_exadata_infrastructure_id', 'cloud_exadata_infrastructure', 'request_id', ), + 'create_cloud_vm_cluster': ('parent', 'cloud_vm_cluster_id', 'cloud_vm_cluster', 'request_id', ), + 'delete_autonomous_database': ('name', 'request_id', ), + 'delete_cloud_exadata_infrastructure': ('name', 'request_id', 'force', ), + 'delete_cloud_vm_cluster': ('name', 'request_id', 'force', ), + 'generate_autonomous_database_wallet': ('name', 'password', 'type_', 'is_regional', ), + 'get_autonomous_database': ('name', ), + 'get_cloud_exadata_infrastructure': ('name', ), + 'get_cloud_vm_cluster': ('name', ), + 'list_autonomous_database_backups': ('parent', 'filter', 'page_size', 'page_token', ), + 'list_autonomous_database_character_sets': ('parent', 'page_size', 'page_token', 'filter', ), + 'list_autonomous_databases': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_autonomous_db_versions': ('parent', 'page_size', 'page_token', ), + 'list_cloud_exadata_infrastructures': ('parent', 'page_size', 'page_token', ), + 'list_cloud_vm_clusters': ('parent', 'page_size', 'page_token', 'filter', ), + 'list_db_nodes': ('parent', 'page_size', 'page_token', ), + 'list_db_servers': ('parent', 'page_size', 'page_token', ), + 'list_db_system_shapes': ('parent', 'page_size', 'page_token', ), + 'list_entitlements': ('parent', 'page_size', 'page_token', ), + 'list_gi_versions': ('parent', 'page_size', 'page_token', ), + 'restore_autonomous_database': ('name', 'restore_time', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=oracledatabaseCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the oracledatabase client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-oracledatabase/setup.py b/packages/google-cloud-oracledatabase/setup.py new file mode 100644 index 000000000000..f4dfafa62eff --- /dev/null +++ b/packages/google-cloud-oracledatabase/setup.py @@ -0,0 +1,95 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os +import re + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = "google-cloud-oracledatabase" + + +description = "Google Cloud Oracledatabase API client library" + +version = None + +with open( + os.path.join(package_root, "google/cloud/oracledatabase/gapic_version.py") +) as fp: + version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) + assert len(version_candidates) == 1 + version = version_candidates[0] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + # Exclude incompatible versions of `google-auth` + # See https://github.com/googleapis/google-cloud-python/issues/12364 + "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", + "proto-plus >= 1.22.3, <2.0.0dev", + "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-oracledatabase" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.find_namespace_packages() + if package.startswith("google") +] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/packages/google-cloud-oracledatabase/testing/.gitignore b/packages/google-cloud-oracledatabase/testing/.gitignore new file mode 100644 index 000000000000..b05fbd630881 --- /dev/null +++ b/packages/google-cloud-oracledatabase/testing/.gitignore @@ -0,0 +1,3 @@ +test-env.sh +service-account.json +client-secrets.json \ No newline at end of file diff --git a/packages/google-cloud-oracledatabase/testing/constraints-3.10.txt b/packages/google-cloud-oracledatabase/testing/constraints-3.10.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-oracledatabase/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-oracledatabase/testing/constraints-3.11.txt b/packages/google-cloud-oracledatabase/testing/constraints-3.11.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-oracledatabase/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-oracledatabase/testing/constraints-3.12.txt b/packages/google-cloud-oracledatabase/testing/constraints-3.12.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-oracledatabase/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-oracledatabase/testing/constraints-3.7.txt b/packages/google-cloud-oracledatabase/testing/constraints-3.7.txt new file mode 100644 index 000000000000..fc812592b0ee --- /dev/null +++ b/packages/google-cloud-oracledatabase/testing/constraints-3.7.txt @@ -0,0 +1,10 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.1 +google-auth==2.14.1 +proto-plus==1.22.3 +protobuf==3.20.2 diff --git a/packages/google-cloud-oracledatabase/testing/constraints-3.8.txt b/packages/google-cloud-oracledatabase/testing/constraints-3.8.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-oracledatabase/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-oracledatabase/testing/constraints-3.9.txt b/packages/google-cloud-oracledatabase/testing/constraints-3.9.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-oracledatabase/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-oracledatabase/tests/__init__.py b/packages/google-cloud-oracledatabase/tests/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-cloud-oracledatabase/tests/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-oracledatabase/tests/unit/__init__.py b/packages/google-cloud-oracledatabase/tests/unit/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-cloud-oracledatabase/tests/unit/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-oracledatabase/tests/unit/gapic/__init__.py b/packages/google-cloud-oracledatabase/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-cloud-oracledatabase/tests/unit/gapic/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-oracledatabase/tests/unit/gapic/oracledatabase_v1/__init__.py b/packages/google-cloud-oracledatabase/tests/unit/gapic/oracledatabase_v1/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-cloud-oracledatabase/tests/unit/gapic/oracledatabase_v1/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-oracledatabase/tests/unit/gapic/oracledatabase_v1/test_oracle_database.py b/packages/google-cloud-oracledatabase/tests/unit/gapic/oracledatabase_v1/test_oracle_database.py new file mode 100644 index 000000000000..05cfe6d9f132 --- /dev/null +++ b/packages/google-cloud-oracledatabase/tests/unit/gapic/oracledatabase_v1/test_oracle_database.py @@ -0,0 +1,10589 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import operation_async # type: ignore +from google.api_core import retry as retries +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +from google.type import datetime_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +from google.type import month_pb2 # type: ignore +from google.type import timeofday_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.oracledatabase_v1.services.oracle_database import ( + OracleDatabaseClient, + pagers, + transports, +) +from google.cloud.oracledatabase_v1.types import ( + autonomous_database_character_set, + autonomous_db_backup, + autonomous_db_version, + common, + db_node, + db_server, + db_system_shape, + entitlement, + exadata_infra, + gi_version, + oracledatabase, + vm_cluster, +) +from google.cloud.oracledatabase_v1.types import ( + autonomous_database as gco_autonomous_database, +) +from google.cloud.oracledatabase_v1.types import autonomous_database + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert OracleDatabaseClient._get_default_mtls_endpoint(None) is None + assert ( + OracleDatabaseClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + OracleDatabaseClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + OracleDatabaseClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + OracleDatabaseClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + OracleDatabaseClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + ) + + +def test__read_environment_variables(): + assert OracleDatabaseClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert OracleDatabaseClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert OracleDatabaseClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + OracleDatabaseClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert OracleDatabaseClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert OracleDatabaseClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert OracleDatabaseClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + OracleDatabaseClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert OracleDatabaseClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert OracleDatabaseClient._get_client_cert_source(None, False) is None + assert ( + OracleDatabaseClient._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + OracleDatabaseClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + OracleDatabaseClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + OracleDatabaseClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + OracleDatabaseClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(OracleDatabaseClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = OracleDatabaseClient._DEFAULT_UNIVERSE + default_endpoint = OracleDatabaseClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = OracleDatabaseClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + OracleDatabaseClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + OracleDatabaseClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == OracleDatabaseClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + OracleDatabaseClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + OracleDatabaseClient._get_api_endpoint(None, None, default_universe, "always") + == OracleDatabaseClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + OracleDatabaseClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == OracleDatabaseClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + OracleDatabaseClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + OracleDatabaseClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + OracleDatabaseClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + OracleDatabaseClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + OracleDatabaseClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + OracleDatabaseClient._get_universe_domain(None, None) + == OracleDatabaseClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + OracleDatabaseClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (OracleDatabaseClient, transports.OracleDatabaseRestTransport, "rest"), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (OracleDatabaseClient, "rest"), + ], +) +def test_oracle_database_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "oracledatabase.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://oracledatabase.googleapis.com/" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.OracleDatabaseRestTransport, "rest"), + ], +) +def test_oracle_database_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (OracleDatabaseClient, "rest"), + ], +) +def test_oracle_database_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "oracledatabase.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://oracledatabase.googleapis.com/" + ) + + +def test_oracle_database_client_get_transport_class(): + transport = OracleDatabaseClient.get_transport_class() + available_transports = [ + transports.OracleDatabaseRestTransport, + ] + assert transport in available_transports + + transport = OracleDatabaseClient.get_transport_class("rest") + assert transport == transports.OracleDatabaseRestTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (OracleDatabaseClient, transports.OracleDatabaseRestTransport, "rest"), + ], +) +@mock.patch.object( + OracleDatabaseClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(OracleDatabaseClient), +) +def test_oracle_database_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(OracleDatabaseClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(OracleDatabaseClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com/" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com/", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (OracleDatabaseClient, transports.OracleDatabaseRestTransport, "rest", "true"), + (OracleDatabaseClient, transports.OracleDatabaseRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + OracleDatabaseClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(OracleDatabaseClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_oracle_database_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [OracleDatabaseClient]) +@mock.patch.object( + OracleDatabaseClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(OracleDatabaseClient), +) +def test_oracle_database_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize("client_class", [OracleDatabaseClient]) +@mock.patch.object( + OracleDatabaseClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(OracleDatabaseClient), +) +def test_oracle_database_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = OracleDatabaseClient._DEFAULT_UNIVERSE + default_endpoint = OracleDatabaseClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = OracleDatabaseClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (OracleDatabaseClient, transports.OracleDatabaseRestTransport, "rest"), + ], +) +def test_oracle_database_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (OracleDatabaseClient, transports.OracleDatabaseRestTransport, "rest", None), + ], +) +def test_oracle_database_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.ListCloudExadataInfrastructuresRequest, + dict, + ], +) +def test_list_cloud_exadata_infrastructures_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListCloudExadataInfrastructuresResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.ListCloudExadataInfrastructuresResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_cloud_exadata_infrastructures(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListCloudExadataInfrastructuresPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_cloud_exadata_infrastructures_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_cloud_exadata_infrastructures + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_cloud_exadata_infrastructures + ] = mock_rpc + + request = {} + client.list_cloud_exadata_infrastructures(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_cloud_exadata_infrastructures(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_cloud_exadata_infrastructures_rest_required_fields( + request_type=oracledatabase.ListCloudExadataInfrastructuresRequest, +): + transport_class = transports.OracleDatabaseRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_cloud_exadata_infrastructures._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_cloud_exadata_infrastructures._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListCloudExadataInfrastructuresResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = oracledatabase.ListCloudExadataInfrastructuresResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_cloud_exadata_infrastructures(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_cloud_exadata_infrastructures_rest_unset_required_fields(): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.list_cloud_exadata_infrastructures._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_cloud_exadata_infrastructures_rest_interceptors(null_interceptor): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) + client = OracleDatabaseClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OracleDatabaseRestInterceptor, + "post_list_cloud_exadata_infrastructures", + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, + "pre_list_cloud_exadata_infrastructures", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = oracledatabase.ListCloudExadataInfrastructuresRequest.pb( + oracledatabase.ListCloudExadataInfrastructuresRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + oracledatabase.ListCloudExadataInfrastructuresResponse.to_json( + oracledatabase.ListCloudExadataInfrastructuresResponse() + ) + ) + + request = oracledatabase.ListCloudExadataInfrastructuresRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = oracledatabase.ListCloudExadataInfrastructuresResponse() + + client.list_cloud_exadata_infrastructures( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_cloud_exadata_infrastructures_rest_bad_request( + transport: str = "rest", + request_type=oracledatabase.ListCloudExadataInfrastructuresRequest, +): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_cloud_exadata_infrastructures(request) + + +def test_list_cloud_exadata_infrastructures_rest_flattened(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListCloudExadataInfrastructuresResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.ListCloudExadataInfrastructuresResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_cloud_exadata_infrastructures(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/cloudExadataInfrastructures" + % client.transport._host, + args[1], + ) + + +def test_list_cloud_exadata_infrastructures_rest_flattened_error( + transport: str = "rest", +): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_cloud_exadata_infrastructures( + oracledatabase.ListCloudExadataInfrastructuresRequest(), + parent="parent_value", + ) + + +def test_list_cloud_exadata_infrastructures_rest_pager(transport: str = "rest"): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + oracledatabase.ListCloudExadataInfrastructuresResponse( + cloud_exadata_infrastructures=[ + exadata_infra.CloudExadataInfrastructure(), + exadata_infra.CloudExadataInfrastructure(), + exadata_infra.CloudExadataInfrastructure(), + ], + next_page_token="abc", + ), + oracledatabase.ListCloudExadataInfrastructuresResponse( + cloud_exadata_infrastructures=[], + next_page_token="def", + ), + oracledatabase.ListCloudExadataInfrastructuresResponse( + cloud_exadata_infrastructures=[ + exadata_infra.CloudExadataInfrastructure(), + ], + next_page_token="ghi", + ), + oracledatabase.ListCloudExadataInfrastructuresResponse( + cloud_exadata_infrastructures=[ + exadata_infra.CloudExadataInfrastructure(), + exadata_infra.CloudExadataInfrastructure(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + oracledatabase.ListCloudExadataInfrastructuresResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_cloud_exadata_infrastructures(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, exadata_infra.CloudExadataInfrastructure) for i in results + ) + + pages = list( + client.list_cloud_exadata_infrastructures(request=sample_request).pages + ) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.GetCloudExadataInfrastructureRequest, + dict, + ], +) +def test_get_cloud_exadata_infrastructure_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/cloudExadataInfrastructures/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = exadata_infra.CloudExadataInfrastructure( + name="name_value", + display_name="display_name_value", + gcp_oracle_zone="gcp_oracle_zone_value", + entitlement_id="entitlement_id_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = exadata_infra.CloudExadataInfrastructure.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_cloud_exadata_infrastructure(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, exadata_infra.CloudExadataInfrastructure) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.gcp_oracle_zone == "gcp_oracle_zone_value" + assert response.entitlement_id == "entitlement_id_value" + + +def test_get_cloud_exadata_infrastructure_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_cloud_exadata_infrastructure + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_cloud_exadata_infrastructure + ] = mock_rpc + + request = {} + client.get_cloud_exadata_infrastructure(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_cloud_exadata_infrastructure(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_cloud_exadata_infrastructure_rest_required_fields( + request_type=oracledatabase.GetCloudExadataInfrastructureRequest, +): + transport_class = transports.OracleDatabaseRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_cloud_exadata_infrastructure._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_cloud_exadata_infrastructure._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = exadata_infra.CloudExadataInfrastructure() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = exadata_infra.CloudExadataInfrastructure.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_cloud_exadata_infrastructure(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_cloud_exadata_infrastructure_rest_unset_required_fields(): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.get_cloud_exadata_infrastructure._get_unset_required_fields({}) + ) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_cloud_exadata_infrastructure_rest_interceptors(null_interceptor): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) + client = OracleDatabaseClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OracleDatabaseRestInterceptor, + "post_get_cloud_exadata_infrastructure", + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "pre_get_cloud_exadata_infrastructure" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = oracledatabase.GetCloudExadataInfrastructureRequest.pb( + oracledatabase.GetCloudExadataInfrastructureRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = exadata_infra.CloudExadataInfrastructure.to_json( + exadata_infra.CloudExadataInfrastructure() + ) + + request = oracledatabase.GetCloudExadataInfrastructureRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = exadata_infra.CloudExadataInfrastructure() + + client.get_cloud_exadata_infrastructure( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_cloud_exadata_infrastructure_rest_bad_request( + transport: str = "rest", + request_type=oracledatabase.GetCloudExadataInfrastructureRequest, +): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/cloudExadataInfrastructures/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_cloud_exadata_infrastructure(request) + + +def test_get_cloud_exadata_infrastructure_rest_flattened(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = exadata_infra.CloudExadataInfrastructure() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/cloudExadataInfrastructures/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = exadata_infra.CloudExadataInfrastructure.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_cloud_exadata_infrastructure(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/cloudExadataInfrastructures/*}" + % client.transport._host, + args[1], + ) + + +def test_get_cloud_exadata_infrastructure_rest_flattened_error(transport: str = "rest"): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_cloud_exadata_infrastructure( + oracledatabase.GetCloudExadataInfrastructureRequest(), + name="name_value", + ) + + +def test_get_cloud_exadata_infrastructure_rest_error(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.CreateCloudExadataInfrastructureRequest, + dict, + ], +) +def test_create_cloud_exadata_infrastructure_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["cloud_exadata_infrastructure"] = { + "name": "name_value", + "display_name": "display_name_value", + "gcp_oracle_zone": "gcp_oracle_zone_value", + "entitlement_id": "entitlement_id_value", + "properties": { + "ocid": "ocid_value", + "compute_count": 1413, + "storage_count": 1405, + "total_storage_size_gb": 2234, + "available_storage_size_gb": 2615, + "maintenance_window": { + "preference": 1, + "months": [1], + "weeks_of_month": [1497, 1498], + "days_of_week": [1], + "hours_of_day": [1283, 1284], + "lead_time_week": 1455, + "patching_mode": 1, + "custom_action_timeout_mins": 2804, + "is_custom_action_timeout_enabled": True, + }, + "state": 1, + "shape": "shape_value", + "oci_url": "oci_url_value", + "cpu_count": 976, + "max_cpu_count": 1397, + "memory_size_gb": 1499, + "max_memory_gb": 1382, + "db_node_storage_size_gb": 2401, + "max_db_node_storage_size_gb": 2822, + "data_storage_size_tb": 0.2109, + "max_data_storage_tb": 0.19920000000000002, + "activated_storage_count": 2449, + "additional_storage_count": 2549, + "db_server_version": "db_server_version_value", + "storage_server_version": "storage_server_version_value", + "next_maintenance_run_id": "next_maintenance_run_id_value", + "next_maintenance_run_time": {"seconds": 751, "nanos": 543}, + "next_security_maintenance_run_time": {}, + "customer_contacts": [{"email": "email_value"}], + "monthly_storage_server_version": "monthly_storage_server_version_value", + "monthly_db_server_version": "monthly_db_server_version_value", + }, + "labels": {}, + "create_time": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = oracledatabase.CreateCloudExadataInfrastructureRequest.meta.fields[ + "cloud_exadata_infrastructure" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "cloud_exadata_infrastructure" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["cloud_exadata_infrastructure"][field]) + ): + del request_init["cloud_exadata_infrastructure"][field][i][subfield] + else: + del request_init["cloud_exadata_infrastructure"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_cloud_exadata_infrastructure(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_cloud_exadata_infrastructure_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_cloud_exadata_infrastructure + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_cloud_exadata_infrastructure + ] = mock_rpc + + request = {} + client.create_cloud_exadata_infrastructure(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_cloud_exadata_infrastructure(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_cloud_exadata_infrastructure_rest_required_fields( + request_type=oracledatabase.CreateCloudExadataInfrastructureRequest, +): + transport_class = transports.OracleDatabaseRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["cloud_exadata_infrastructure_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "cloudExadataInfrastructureId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_cloud_exadata_infrastructure._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "cloudExadataInfrastructureId" in jsonified_request + assert ( + jsonified_request["cloudExadataInfrastructureId"] + == request_init["cloud_exadata_infrastructure_id"] + ) + + jsonified_request["parent"] = "parent_value" + jsonified_request[ + "cloudExadataInfrastructureId" + ] = "cloud_exadata_infrastructure_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_cloud_exadata_infrastructure._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "cloud_exadata_infrastructure_id", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "cloudExadataInfrastructureId" in jsonified_request + assert ( + jsonified_request["cloudExadataInfrastructureId"] + == "cloud_exadata_infrastructure_id_value" + ) + + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_cloud_exadata_infrastructure(request) + + expected_params = [ + ( + "cloudExadataInfrastructureId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_cloud_exadata_infrastructure_rest_unset_required_fields(): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.create_cloud_exadata_infrastructure._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set( + ( + "cloudExadataInfrastructureId", + "requestId", + ) + ) + & set( + ( + "parent", + "cloudExadataInfrastructureId", + "cloudExadataInfrastructure", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_cloud_exadata_infrastructure_rest_interceptors(null_interceptor): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) + client = OracleDatabaseClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.OracleDatabaseRestInterceptor, + "post_create_cloud_exadata_infrastructure", + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, + "pre_create_cloud_exadata_infrastructure", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = oracledatabase.CreateCloudExadataInfrastructureRequest.pb( + oracledatabase.CreateCloudExadataInfrastructureRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = oracledatabase.CreateCloudExadataInfrastructureRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_cloud_exadata_infrastructure( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_cloud_exadata_infrastructure_rest_bad_request( + transport: str = "rest", + request_type=oracledatabase.CreateCloudExadataInfrastructureRequest, +): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_cloud_exadata_infrastructure(request) + + +def test_create_cloud_exadata_infrastructure_rest_flattened(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + cloud_exadata_infrastructure=exadata_infra.CloudExadataInfrastructure( + name="name_value" + ), + cloud_exadata_infrastructure_id="cloud_exadata_infrastructure_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_cloud_exadata_infrastructure(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/cloudExadataInfrastructures" + % client.transport._host, + args[1], + ) + + +def test_create_cloud_exadata_infrastructure_rest_flattened_error( + transport: str = "rest", +): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_cloud_exadata_infrastructure( + oracledatabase.CreateCloudExadataInfrastructureRequest(), + parent="parent_value", + cloud_exadata_infrastructure=exadata_infra.CloudExadataInfrastructure( + name="name_value" + ), + cloud_exadata_infrastructure_id="cloud_exadata_infrastructure_id_value", + ) + + +def test_create_cloud_exadata_infrastructure_rest_error(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.DeleteCloudExadataInfrastructureRequest, + dict, + ], +) +def test_delete_cloud_exadata_infrastructure_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/cloudExadataInfrastructures/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_cloud_exadata_infrastructure(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_cloud_exadata_infrastructure_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_cloud_exadata_infrastructure + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_cloud_exadata_infrastructure + ] = mock_rpc + + request = {} + client.delete_cloud_exadata_infrastructure(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_cloud_exadata_infrastructure(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_cloud_exadata_infrastructure_rest_required_fields( + request_type=oracledatabase.DeleteCloudExadataInfrastructureRequest, +): + transport_class = transports.OracleDatabaseRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_cloud_exadata_infrastructure._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_cloud_exadata_infrastructure._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "force", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_cloud_exadata_infrastructure(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_cloud_exadata_infrastructure_rest_unset_required_fields(): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.delete_cloud_exadata_infrastructure._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set( + ( + "force", + "requestId", + ) + ) + & set(("name",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_cloud_exadata_infrastructure_rest_interceptors(null_interceptor): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) + client = OracleDatabaseClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.OracleDatabaseRestInterceptor, + "post_delete_cloud_exadata_infrastructure", + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, + "pre_delete_cloud_exadata_infrastructure", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = oracledatabase.DeleteCloudExadataInfrastructureRequest.pb( + oracledatabase.DeleteCloudExadataInfrastructureRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = oracledatabase.DeleteCloudExadataInfrastructureRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_cloud_exadata_infrastructure( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_cloud_exadata_infrastructure_rest_bad_request( + transport: str = "rest", + request_type=oracledatabase.DeleteCloudExadataInfrastructureRequest, +): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/cloudExadataInfrastructures/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_cloud_exadata_infrastructure(request) + + +def test_delete_cloud_exadata_infrastructure_rest_flattened(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/cloudExadataInfrastructures/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_cloud_exadata_infrastructure(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/cloudExadataInfrastructures/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_cloud_exadata_infrastructure_rest_flattened_error( + transport: str = "rest", +): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_cloud_exadata_infrastructure( + oracledatabase.DeleteCloudExadataInfrastructureRequest(), + name="name_value", + ) + + +def test_delete_cloud_exadata_infrastructure_rest_error(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.ListCloudVmClustersRequest, + dict, + ], +) +def test_list_cloud_vm_clusters_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListCloudVmClustersResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.ListCloudVmClustersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_cloud_vm_clusters(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListCloudVmClustersPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_cloud_vm_clusters_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_cloud_vm_clusters + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_cloud_vm_clusters + ] = mock_rpc + + request = {} + client.list_cloud_vm_clusters(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_cloud_vm_clusters(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_cloud_vm_clusters_rest_required_fields( + request_type=oracledatabase.ListCloudVmClustersRequest, +): + transport_class = transports.OracleDatabaseRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_cloud_vm_clusters._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_cloud_vm_clusters._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListCloudVmClustersResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = oracledatabase.ListCloudVmClustersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_cloud_vm_clusters(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_cloud_vm_clusters_rest_unset_required_fields(): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_cloud_vm_clusters._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_cloud_vm_clusters_rest_interceptors(null_interceptor): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) + client = OracleDatabaseClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "post_list_cloud_vm_clusters" + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "pre_list_cloud_vm_clusters" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = oracledatabase.ListCloudVmClustersRequest.pb( + oracledatabase.ListCloudVmClustersRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = oracledatabase.ListCloudVmClustersResponse.to_json( + oracledatabase.ListCloudVmClustersResponse() + ) + + request = oracledatabase.ListCloudVmClustersRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = oracledatabase.ListCloudVmClustersResponse() + + client.list_cloud_vm_clusters( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_cloud_vm_clusters_rest_bad_request( + transport: str = "rest", request_type=oracledatabase.ListCloudVmClustersRequest +): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_cloud_vm_clusters(request) + + +def test_list_cloud_vm_clusters_rest_flattened(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListCloudVmClustersResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.ListCloudVmClustersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_cloud_vm_clusters(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/cloudVmClusters" + % client.transport._host, + args[1], + ) + + +def test_list_cloud_vm_clusters_rest_flattened_error(transport: str = "rest"): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_cloud_vm_clusters( + oracledatabase.ListCloudVmClustersRequest(), + parent="parent_value", + ) + + +def test_list_cloud_vm_clusters_rest_pager(transport: str = "rest"): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + oracledatabase.ListCloudVmClustersResponse( + cloud_vm_clusters=[ + vm_cluster.CloudVmCluster(), + vm_cluster.CloudVmCluster(), + vm_cluster.CloudVmCluster(), + ], + next_page_token="abc", + ), + oracledatabase.ListCloudVmClustersResponse( + cloud_vm_clusters=[], + next_page_token="def", + ), + oracledatabase.ListCloudVmClustersResponse( + cloud_vm_clusters=[ + vm_cluster.CloudVmCluster(), + ], + next_page_token="ghi", + ), + oracledatabase.ListCloudVmClustersResponse( + cloud_vm_clusters=[ + vm_cluster.CloudVmCluster(), + vm_cluster.CloudVmCluster(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + oracledatabase.ListCloudVmClustersResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_cloud_vm_clusters(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, vm_cluster.CloudVmCluster) for i in results) + + pages = list(client.list_cloud_vm_clusters(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.GetCloudVmClusterRequest, + dict, + ], +) +def test_get_cloud_vm_cluster_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/cloudVmClusters/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = vm_cluster.CloudVmCluster( + name="name_value", + exadata_infrastructure="exadata_infrastructure_value", + display_name="display_name_value", + gcp_oracle_zone="gcp_oracle_zone_value", + cidr="cidr_value", + backup_subnet_cidr="backup_subnet_cidr_value", + network="network_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = vm_cluster.CloudVmCluster.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_cloud_vm_cluster(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, vm_cluster.CloudVmCluster) + assert response.name == "name_value" + assert response.exadata_infrastructure == "exadata_infrastructure_value" + assert response.display_name == "display_name_value" + assert response.gcp_oracle_zone == "gcp_oracle_zone_value" + assert response.cidr == "cidr_value" + assert response.backup_subnet_cidr == "backup_subnet_cidr_value" + assert response.network == "network_value" + + +def test_get_cloud_vm_cluster_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_cloud_vm_cluster in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_cloud_vm_cluster + ] = mock_rpc + + request = {} + client.get_cloud_vm_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_cloud_vm_cluster(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_cloud_vm_cluster_rest_required_fields( + request_type=oracledatabase.GetCloudVmClusterRequest, +): + transport_class = transports.OracleDatabaseRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_cloud_vm_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_cloud_vm_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = vm_cluster.CloudVmCluster() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = vm_cluster.CloudVmCluster.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_cloud_vm_cluster(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_cloud_vm_cluster_rest_unset_required_fields(): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_cloud_vm_cluster._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_cloud_vm_cluster_rest_interceptors(null_interceptor): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) + client = OracleDatabaseClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "post_get_cloud_vm_cluster" + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "pre_get_cloud_vm_cluster" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = oracledatabase.GetCloudVmClusterRequest.pb( + oracledatabase.GetCloudVmClusterRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = vm_cluster.CloudVmCluster.to_json( + vm_cluster.CloudVmCluster() + ) + + request = oracledatabase.GetCloudVmClusterRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = vm_cluster.CloudVmCluster() + + client.get_cloud_vm_cluster( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_cloud_vm_cluster_rest_bad_request( + transport: str = "rest", request_type=oracledatabase.GetCloudVmClusterRequest +): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/cloudVmClusters/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_cloud_vm_cluster(request) + + +def test_get_cloud_vm_cluster_rest_flattened(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = vm_cluster.CloudVmCluster() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/cloudVmClusters/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = vm_cluster.CloudVmCluster.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_cloud_vm_cluster(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/cloudVmClusters/*}" + % client.transport._host, + args[1], + ) + + +def test_get_cloud_vm_cluster_rest_flattened_error(transport: str = "rest"): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_cloud_vm_cluster( + oracledatabase.GetCloudVmClusterRequest(), + name="name_value", + ) + + +def test_get_cloud_vm_cluster_rest_error(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.CreateCloudVmClusterRequest, + dict, + ], +) +def test_create_cloud_vm_cluster_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["cloud_vm_cluster"] = { + "name": "name_value", + "exadata_infrastructure": "exadata_infrastructure_value", + "display_name": "display_name_value", + "gcp_oracle_zone": "gcp_oracle_zone_value", + "properties": { + "ocid": "ocid_value", + "license_type": 1, + "gi_version": "gi_version_value", + "time_zone": {"id": "id_value", "version": "version_value"}, + "ssh_public_keys": ["ssh_public_keys_value1", "ssh_public_keys_value2"], + "node_count": 1070, + "shape": "shape_value", + "ocpu_count": 0.1087, + "memory_size_gb": 1499, + "db_node_storage_size_gb": 2401, + "storage_size_gb": 1591, + "data_storage_size_tb": 0.2109, + "disk_redundancy": 1, + "sparse_diskgroup_enabled": True, + "local_backup_enabled": True, + "hostname_prefix": "hostname_prefix_value", + "diagnostics_data_collection_options": { + "diagnostics_events_enabled": True, + "health_monitoring_enabled": True, + "incident_logs_enabled": True, + }, + "state": 1, + "scan_listener_port_tcp": 2356, + "scan_listener_port_tcp_ssl": 2789, + "domain": "domain_value", + "scan_dns": "scan_dns_value", + "hostname": "hostname_value", + "cpu_core_count": 1496, + "system_version": "system_version_value", + "scan_ip_ids": ["scan_ip_ids_value1", "scan_ip_ids_value2"], + "scan_dns_record_id": "scan_dns_record_id_value", + "oci_url": "oci_url_value", + "db_server_ocids": ["db_server_ocids_value1", "db_server_ocids_value2"], + "compartment_id": "compartment_id_value", + "dns_listener_ip": "dns_listener_ip_value", + "cluster_name": "cluster_name_value", + }, + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "cidr": "cidr_value", + "backup_subnet_cidr": "backup_subnet_cidr_value", + "network": "network_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = oracledatabase.CreateCloudVmClusterRequest.meta.fields[ + "cloud_vm_cluster" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["cloud_vm_cluster"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["cloud_vm_cluster"][field])): + del request_init["cloud_vm_cluster"][field][i][subfield] + else: + del request_init["cloud_vm_cluster"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_cloud_vm_cluster(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_cloud_vm_cluster_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_cloud_vm_cluster + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_cloud_vm_cluster + ] = mock_rpc + + request = {} + client.create_cloud_vm_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_cloud_vm_cluster(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_cloud_vm_cluster_rest_required_fields( + request_type=oracledatabase.CreateCloudVmClusterRequest, +): + transport_class = transports.OracleDatabaseRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["cloud_vm_cluster_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "cloudVmClusterId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_cloud_vm_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "cloudVmClusterId" in jsonified_request + assert jsonified_request["cloudVmClusterId"] == request_init["cloud_vm_cluster_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["cloudVmClusterId"] = "cloud_vm_cluster_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_cloud_vm_cluster._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "cloud_vm_cluster_id", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "cloudVmClusterId" in jsonified_request + assert jsonified_request["cloudVmClusterId"] == "cloud_vm_cluster_id_value" + + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_cloud_vm_cluster(request) + + expected_params = [ + ( + "cloudVmClusterId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_cloud_vm_cluster_rest_unset_required_fields(): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_cloud_vm_cluster._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "cloudVmClusterId", + "requestId", + ) + ) + & set( + ( + "parent", + "cloudVmClusterId", + "cloudVmCluster", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_cloud_vm_cluster_rest_interceptors(null_interceptor): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) + client = OracleDatabaseClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.OracleDatabaseRestInterceptor, "post_create_cloud_vm_cluster" + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "pre_create_cloud_vm_cluster" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = oracledatabase.CreateCloudVmClusterRequest.pb( + oracledatabase.CreateCloudVmClusterRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = oracledatabase.CreateCloudVmClusterRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_cloud_vm_cluster( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_cloud_vm_cluster_rest_bad_request( + transport: str = "rest", request_type=oracledatabase.CreateCloudVmClusterRequest +): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_cloud_vm_cluster(request) + + +def test_create_cloud_vm_cluster_rest_flattened(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + cloud_vm_cluster=vm_cluster.CloudVmCluster(name="name_value"), + cloud_vm_cluster_id="cloud_vm_cluster_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_cloud_vm_cluster(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/cloudVmClusters" + % client.transport._host, + args[1], + ) + + +def test_create_cloud_vm_cluster_rest_flattened_error(transport: str = "rest"): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_cloud_vm_cluster( + oracledatabase.CreateCloudVmClusterRequest(), + parent="parent_value", + cloud_vm_cluster=vm_cluster.CloudVmCluster(name="name_value"), + cloud_vm_cluster_id="cloud_vm_cluster_id_value", + ) + + +def test_create_cloud_vm_cluster_rest_error(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.DeleteCloudVmClusterRequest, + dict, + ], +) +def test_delete_cloud_vm_cluster_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/cloudVmClusters/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_cloud_vm_cluster(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_cloud_vm_cluster_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_cloud_vm_cluster + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_cloud_vm_cluster + ] = mock_rpc + + request = {} + client.delete_cloud_vm_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_cloud_vm_cluster(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_cloud_vm_cluster_rest_required_fields( + request_type=oracledatabase.DeleteCloudVmClusterRequest, +): + transport_class = transports.OracleDatabaseRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_cloud_vm_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_cloud_vm_cluster._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "force", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_cloud_vm_cluster(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_cloud_vm_cluster_rest_unset_required_fields(): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_cloud_vm_cluster._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "force", + "requestId", + ) + ) + & set(("name",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_cloud_vm_cluster_rest_interceptors(null_interceptor): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) + client = OracleDatabaseClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.OracleDatabaseRestInterceptor, "post_delete_cloud_vm_cluster" + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "pre_delete_cloud_vm_cluster" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = oracledatabase.DeleteCloudVmClusterRequest.pb( + oracledatabase.DeleteCloudVmClusterRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = oracledatabase.DeleteCloudVmClusterRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_cloud_vm_cluster( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_cloud_vm_cluster_rest_bad_request( + transport: str = "rest", request_type=oracledatabase.DeleteCloudVmClusterRequest +): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/cloudVmClusters/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_cloud_vm_cluster(request) + + +def test_delete_cloud_vm_cluster_rest_flattened(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/cloudVmClusters/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_cloud_vm_cluster(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/cloudVmClusters/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_cloud_vm_cluster_rest_flattened_error(transport: str = "rest"): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_cloud_vm_cluster( + oracledatabase.DeleteCloudVmClusterRequest(), + name="name_value", + ) + + +def test_delete_cloud_vm_cluster_rest_error(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.ListEntitlementsRequest, + dict, + ], +) +def test_list_entitlements_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListEntitlementsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.ListEntitlementsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_entitlements(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEntitlementsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_entitlements_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_entitlements in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_entitlements + ] = mock_rpc + + request = {} + client.list_entitlements(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_entitlements(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_entitlements_rest_required_fields( + request_type=oracledatabase.ListEntitlementsRequest, +): + transport_class = transports.OracleDatabaseRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_entitlements._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_entitlements._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListEntitlementsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = oracledatabase.ListEntitlementsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_entitlements(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_entitlements_rest_unset_required_fields(): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_entitlements._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_entitlements_rest_interceptors(null_interceptor): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) + client = OracleDatabaseClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "post_list_entitlements" + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "pre_list_entitlements" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = oracledatabase.ListEntitlementsRequest.pb( + oracledatabase.ListEntitlementsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = oracledatabase.ListEntitlementsResponse.to_json( + oracledatabase.ListEntitlementsResponse() + ) + + request = oracledatabase.ListEntitlementsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = oracledatabase.ListEntitlementsResponse() + + client.list_entitlements( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_entitlements_rest_bad_request( + transport: str = "rest", request_type=oracledatabase.ListEntitlementsRequest +): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_entitlements(request) + + +def test_list_entitlements_rest_flattened(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListEntitlementsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.ListEntitlementsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_entitlements(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/entitlements" + % client.transport._host, + args[1], + ) + + +def test_list_entitlements_rest_flattened_error(transport: str = "rest"): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_entitlements( + oracledatabase.ListEntitlementsRequest(), + parent="parent_value", + ) + + +def test_list_entitlements_rest_pager(transport: str = "rest"): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + oracledatabase.ListEntitlementsResponse( + entitlements=[ + entitlement.Entitlement(), + entitlement.Entitlement(), + entitlement.Entitlement(), + ], + next_page_token="abc", + ), + oracledatabase.ListEntitlementsResponse( + entitlements=[], + next_page_token="def", + ), + oracledatabase.ListEntitlementsResponse( + entitlements=[ + entitlement.Entitlement(), + ], + next_page_token="ghi", + ), + oracledatabase.ListEntitlementsResponse( + entitlements=[ + entitlement.Entitlement(), + entitlement.Entitlement(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + oracledatabase.ListEntitlementsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_entitlements(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, entitlement.Entitlement) for i in results) + + pages = list(client.list_entitlements(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.ListDbServersRequest, + dict, + ], +) +def test_list_db_servers_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/cloudExadataInfrastructures/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListDbServersResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.ListDbServersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_db_servers(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDbServersPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_db_servers_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_db_servers in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_db_servers] = mock_rpc + + request = {} + client.list_db_servers(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_db_servers(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_db_servers_rest_required_fields( + request_type=oracledatabase.ListDbServersRequest, +): + transport_class = transports.OracleDatabaseRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_db_servers._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_db_servers._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListDbServersResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = oracledatabase.ListDbServersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_db_servers(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_db_servers_rest_unset_required_fields(): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_db_servers._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_db_servers_rest_interceptors(null_interceptor): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) + client = OracleDatabaseClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "post_list_db_servers" + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "pre_list_db_servers" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = oracledatabase.ListDbServersRequest.pb( + oracledatabase.ListDbServersRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = oracledatabase.ListDbServersResponse.to_json( + oracledatabase.ListDbServersResponse() + ) + + request = oracledatabase.ListDbServersRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = oracledatabase.ListDbServersResponse() + + client.list_db_servers( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_db_servers_rest_bad_request( + transport: str = "rest", request_type=oracledatabase.ListDbServersRequest +): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/cloudExadataInfrastructures/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_db_servers(request) + + +def test_list_db_servers_rest_flattened(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListDbServersResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/cloudExadataInfrastructures/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.ListDbServersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_db_servers(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/cloudExadataInfrastructures/*}/dbServers" + % client.transport._host, + args[1], + ) + + +def test_list_db_servers_rest_flattened_error(transport: str = "rest"): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_db_servers( + oracledatabase.ListDbServersRequest(), + parent="parent_value", + ) + + +def test_list_db_servers_rest_pager(transport: str = "rest"): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + oracledatabase.ListDbServersResponse( + db_servers=[ + db_server.DbServer(), + db_server.DbServer(), + db_server.DbServer(), + ], + next_page_token="abc", + ), + oracledatabase.ListDbServersResponse( + db_servers=[], + next_page_token="def", + ), + oracledatabase.ListDbServersResponse( + db_servers=[ + db_server.DbServer(), + ], + next_page_token="ghi", + ), + oracledatabase.ListDbServersResponse( + db_servers=[ + db_server.DbServer(), + db_server.DbServer(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + oracledatabase.ListDbServersResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/cloudExadataInfrastructures/sample3" + } + + pager = client.list_db_servers(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, db_server.DbServer) for i in results) + + pages = list(client.list_db_servers(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.ListDbNodesRequest, + dict, + ], +) +def test_list_db_nodes_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/cloudVmClusters/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListDbNodesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.ListDbNodesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_db_nodes(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDbNodesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_db_nodes_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_db_nodes in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_db_nodes] = mock_rpc + + request = {} + client.list_db_nodes(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_db_nodes(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_db_nodes_rest_required_fields( + request_type=oracledatabase.ListDbNodesRequest, +): + transport_class = transports.OracleDatabaseRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_db_nodes._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_db_nodes._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListDbNodesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = oracledatabase.ListDbNodesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_db_nodes(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_db_nodes_rest_unset_required_fields(): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_db_nodes._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_db_nodes_rest_interceptors(null_interceptor): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) + client = OracleDatabaseClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "post_list_db_nodes" + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "pre_list_db_nodes" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = oracledatabase.ListDbNodesRequest.pb( + oracledatabase.ListDbNodesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = oracledatabase.ListDbNodesResponse.to_json( + oracledatabase.ListDbNodesResponse() + ) + + request = oracledatabase.ListDbNodesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = oracledatabase.ListDbNodesResponse() + + client.list_db_nodes( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_db_nodes_rest_bad_request( + transport: str = "rest", request_type=oracledatabase.ListDbNodesRequest +): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/cloudVmClusters/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_db_nodes(request) + + +def test_list_db_nodes_rest_flattened(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListDbNodesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/cloudVmClusters/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.ListDbNodesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_db_nodes(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/cloudVmClusters/*}/dbNodes" + % client.transport._host, + args[1], + ) + + +def test_list_db_nodes_rest_flattened_error(transport: str = "rest"): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_db_nodes( + oracledatabase.ListDbNodesRequest(), + parent="parent_value", + ) + + +def test_list_db_nodes_rest_pager(transport: str = "rest"): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + oracledatabase.ListDbNodesResponse( + db_nodes=[ + db_node.DbNode(), + db_node.DbNode(), + db_node.DbNode(), + ], + next_page_token="abc", + ), + oracledatabase.ListDbNodesResponse( + db_nodes=[], + next_page_token="def", + ), + oracledatabase.ListDbNodesResponse( + db_nodes=[ + db_node.DbNode(), + ], + next_page_token="ghi", + ), + oracledatabase.ListDbNodesResponse( + db_nodes=[ + db_node.DbNode(), + db_node.DbNode(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + oracledatabase.ListDbNodesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/cloudVmClusters/sample3" + } + + pager = client.list_db_nodes(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, db_node.DbNode) for i in results) + + pages = list(client.list_db_nodes(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.ListGiVersionsRequest, + dict, + ], +) +def test_list_gi_versions_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListGiVersionsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.ListGiVersionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_gi_versions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListGiVersionsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_gi_versions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_gi_versions in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_gi_versions + ] = mock_rpc + + request = {} + client.list_gi_versions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_gi_versions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_gi_versions_rest_required_fields( + request_type=oracledatabase.ListGiVersionsRequest, +): + transport_class = transports.OracleDatabaseRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_gi_versions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_gi_versions._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListGiVersionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = oracledatabase.ListGiVersionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_gi_versions(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_gi_versions_rest_unset_required_fields(): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_gi_versions._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_gi_versions_rest_interceptors(null_interceptor): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) + client = OracleDatabaseClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "post_list_gi_versions" + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "pre_list_gi_versions" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = oracledatabase.ListGiVersionsRequest.pb( + oracledatabase.ListGiVersionsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = oracledatabase.ListGiVersionsResponse.to_json( + oracledatabase.ListGiVersionsResponse() + ) + + request = oracledatabase.ListGiVersionsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = oracledatabase.ListGiVersionsResponse() + + client.list_gi_versions( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_gi_versions_rest_bad_request( + transport: str = "rest", request_type=oracledatabase.ListGiVersionsRequest +): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_gi_versions(request) + + +def test_list_gi_versions_rest_flattened(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListGiVersionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.ListGiVersionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_gi_versions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/giVersions" % client.transport._host, + args[1], + ) + + +def test_list_gi_versions_rest_flattened_error(transport: str = "rest"): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_gi_versions( + oracledatabase.ListGiVersionsRequest(), + parent="parent_value", + ) + + +def test_list_gi_versions_rest_pager(transport: str = "rest"): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + oracledatabase.ListGiVersionsResponse( + gi_versions=[ + gi_version.GiVersion(), + gi_version.GiVersion(), + gi_version.GiVersion(), + ], + next_page_token="abc", + ), + oracledatabase.ListGiVersionsResponse( + gi_versions=[], + next_page_token="def", + ), + oracledatabase.ListGiVersionsResponse( + gi_versions=[ + gi_version.GiVersion(), + ], + next_page_token="ghi", + ), + oracledatabase.ListGiVersionsResponse( + gi_versions=[ + gi_version.GiVersion(), + gi_version.GiVersion(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + oracledatabase.ListGiVersionsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_gi_versions(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, gi_version.GiVersion) for i in results) + + pages = list(client.list_gi_versions(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.ListDbSystemShapesRequest, + dict, + ], +) +def test_list_db_system_shapes_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListDbSystemShapesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.ListDbSystemShapesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_db_system_shapes(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDbSystemShapesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_db_system_shapes_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_db_system_shapes + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_db_system_shapes + ] = mock_rpc + + request = {} + client.list_db_system_shapes(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_db_system_shapes(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_db_system_shapes_rest_required_fields( + request_type=oracledatabase.ListDbSystemShapesRequest, +): + transport_class = transports.OracleDatabaseRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_db_system_shapes._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_db_system_shapes._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListDbSystemShapesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = oracledatabase.ListDbSystemShapesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_db_system_shapes(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_db_system_shapes_rest_unset_required_fields(): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_db_system_shapes._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_db_system_shapes_rest_interceptors(null_interceptor): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) + client = OracleDatabaseClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "post_list_db_system_shapes" + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "pre_list_db_system_shapes" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = oracledatabase.ListDbSystemShapesRequest.pb( + oracledatabase.ListDbSystemShapesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = oracledatabase.ListDbSystemShapesResponse.to_json( + oracledatabase.ListDbSystemShapesResponse() + ) + + request = oracledatabase.ListDbSystemShapesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = oracledatabase.ListDbSystemShapesResponse() + + client.list_db_system_shapes( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_db_system_shapes_rest_bad_request( + transport: str = "rest", request_type=oracledatabase.ListDbSystemShapesRequest +): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_db_system_shapes(request) + + +def test_list_db_system_shapes_rest_flattened(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListDbSystemShapesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.ListDbSystemShapesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_db_system_shapes(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/dbSystemShapes" + % client.transport._host, + args[1], + ) + + +def test_list_db_system_shapes_rest_flattened_error(transport: str = "rest"): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_db_system_shapes( + oracledatabase.ListDbSystemShapesRequest(), + parent="parent_value", + ) + + +def test_list_db_system_shapes_rest_pager(transport: str = "rest"): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + oracledatabase.ListDbSystemShapesResponse( + db_system_shapes=[ + db_system_shape.DbSystemShape(), + db_system_shape.DbSystemShape(), + db_system_shape.DbSystemShape(), + ], + next_page_token="abc", + ), + oracledatabase.ListDbSystemShapesResponse( + db_system_shapes=[], + next_page_token="def", + ), + oracledatabase.ListDbSystemShapesResponse( + db_system_shapes=[ + db_system_shape.DbSystemShape(), + ], + next_page_token="ghi", + ), + oracledatabase.ListDbSystemShapesResponse( + db_system_shapes=[ + db_system_shape.DbSystemShape(), + db_system_shape.DbSystemShape(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + oracledatabase.ListDbSystemShapesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_db_system_shapes(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, db_system_shape.DbSystemShape) for i in results) + + pages = list(client.list_db_system_shapes(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.ListAutonomousDatabasesRequest, + dict, + ], +) +def test_list_autonomous_databases_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListAutonomousDatabasesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.ListAutonomousDatabasesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_autonomous_databases(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAutonomousDatabasesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_autonomous_databases_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_autonomous_databases + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_autonomous_databases + ] = mock_rpc + + request = {} + client.list_autonomous_databases(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_autonomous_databases(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_autonomous_databases_rest_required_fields( + request_type=oracledatabase.ListAutonomousDatabasesRequest, +): + transport_class = transports.OracleDatabaseRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_autonomous_databases._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_autonomous_databases._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListAutonomousDatabasesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = oracledatabase.ListAutonomousDatabasesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_autonomous_databases(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_autonomous_databases_rest_unset_required_fields(): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_autonomous_databases._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_autonomous_databases_rest_interceptors(null_interceptor): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) + client = OracleDatabaseClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "post_list_autonomous_databases" + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "pre_list_autonomous_databases" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = oracledatabase.ListAutonomousDatabasesRequest.pb( + oracledatabase.ListAutonomousDatabasesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + oracledatabase.ListAutonomousDatabasesResponse.to_json( + oracledatabase.ListAutonomousDatabasesResponse() + ) + ) + + request = oracledatabase.ListAutonomousDatabasesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = oracledatabase.ListAutonomousDatabasesResponse() + + client.list_autonomous_databases( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_autonomous_databases_rest_bad_request( + transport: str = "rest", request_type=oracledatabase.ListAutonomousDatabasesRequest +): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_autonomous_databases(request) + + +def test_list_autonomous_databases_rest_flattened(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListAutonomousDatabasesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.ListAutonomousDatabasesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_autonomous_databases(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/autonomousDatabases" + % client.transport._host, + args[1], + ) + + +def test_list_autonomous_databases_rest_flattened_error(transport: str = "rest"): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_autonomous_databases( + oracledatabase.ListAutonomousDatabasesRequest(), + parent="parent_value", + ) + + +def test_list_autonomous_databases_rest_pager(transport: str = "rest"): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + oracledatabase.ListAutonomousDatabasesResponse( + autonomous_databases=[ + autonomous_database.AutonomousDatabase(), + autonomous_database.AutonomousDatabase(), + autonomous_database.AutonomousDatabase(), + ], + next_page_token="abc", + ), + oracledatabase.ListAutonomousDatabasesResponse( + autonomous_databases=[], + next_page_token="def", + ), + oracledatabase.ListAutonomousDatabasesResponse( + autonomous_databases=[ + autonomous_database.AutonomousDatabase(), + ], + next_page_token="ghi", + ), + oracledatabase.ListAutonomousDatabasesResponse( + autonomous_databases=[ + autonomous_database.AutonomousDatabase(), + autonomous_database.AutonomousDatabase(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + oracledatabase.ListAutonomousDatabasesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_autonomous_databases(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, autonomous_database.AutonomousDatabase) for i in results + ) + + pages = list(client.list_autonomous_databases(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.GetAutonomousDatabaseRequest, + dict, + ], +) +def test_get_autonomous_database_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/autonomousDatabases/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = autonomous_database.AutonomousDatabase( + name="name_value", + database="database_value", + display_name="display_name_value", + entitlement_id="entitlement_id_value", + admin_password="admin_password_value", + network="network_value", + cidr="cidr_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = autonomous_database.AutonomousDatabase.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_autonomous_database(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, autonomous_database.AutonomousDatabase) + assert response.name == "name_value" + assert response.database == "database_value" + assert response.display_name == "display_name_value" + assert response.entitlement_id == "entitlement_id_value" + assert response.admin_password == "admin_password_value" + assert response.network == "network_value" + assert response.cidr == "cidr_value" + + +def test_get_autonomous_database_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_autonomous_database + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_autonomous_database + ] = mock_rpc + + request = {} + client.get_autonomous_database(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_autonomous_database(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_autonomous_database_rest_required_fields( + request_type=oracledatabase.GetAutonomousDatabaseRequest, +): + transport_class = transports.OracleDatabaseRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_autonomous_database._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_autonomous_database._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = autonomous_database.AutonomousDatabase() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = autonomous_database.AutonomousDatabase.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_autonomous_database(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_autonomous_database_rest_unset_required_fields(): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_autonomous_database._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_autonomous_database_rest_interceptors(null_interceptor): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) + client = OracleDatabaseClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "post_get_autonomous_database" + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "pre_get_autonomous_database" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = oracledatabase.GetAutonomousDatabaseRequest.pb( + oracledatabase.GetAutonomousDatabaseRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = autonomous_database.AutonomousDatabase.to_json( + autonomous_database.AutonomousDatabase() + ) + + request = oracledatabase.GetAutonomousDatabaseRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = autonomous_database.AutonomousDatabase() + + client.get_autonomous_database( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_autonomous_database_rest_bad_request( + transport: str = "rest", request_type=oracledatabase.GetAutonomousDatabaseRequest +): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/autonomousDatabases/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_autonomous_database(request) + + +def test_get_autonomous_database_rest_flattened(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = autonomous_database.AutonomousDatabase() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/autonomousDatabases/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = autonomous_database.AutonomousDatabase.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_autonomous_database(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/autonomousDatabases/*}" + % client.transport._host, + args[1], + ) + + +def test_get_autonomous_database_rest_flattened_error(transport: str = "rest"): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_autonomous_database( + oracledatabase.GetAutonomousDatabaseRequest(), + name="name_value", + ) + + +def test_get_autonomous_database_rest_error(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.CreateAutonomousDatabaseRequest, + dict, + ], +) +def test_create_autonomous_database_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["autonomous_database"] = { + "name": "name_value", + "database": "database_value", + "display_name": "display_name_value", + "entitlement_id": "entitlement_id_value", + "admin_password": "admin_password_value", + "properties": { + "ocid": "ocid_value", + "compute_count": 0.1413, + "cpu_core_count": 1496, + "data_storage_size_tb": 2109, + "data_storage_size_gb": 2096, + "db_workload": 1, + "db_edition": 1, + "character_set": "character_set_value", + "n_character_set": "n_character_set_value", + "private_endpoint_ip": "private_endpoint_ip_value", + "private_endpoint_label": "private_endpoint_label_value", + "db_version": "db_version_value", + "is_auto_scaling_enabled": True, + "is_storage_auto_scaling_enabled": True, + "license_type": 1, + "customer_contacts": [{"email": "email_value"}], + "secret_id": "secret_id_value", + "vault_id": "vault_id_value", + "maintenance_schedule_type": 1, + "mtls_connection_required": True, + "backup_retention_period_days": 2975, + "actual_used_data_storage_size_tb": 0.3366, + "allocated_storage_size_tb": 0.2636, + "apex_details": { + "apex_version": "apex_version_value", + "ords_version": "ords_version_value", + }, + "are_primary_allowlisted_ips_used": True, + "lifecycle_details": "lifecycle_details_value", + "state": 1, + "autonomous_container_database_id": "autonomous_container_database_id_value", + "available_upgrade_versions": [ + "available_upgrade_versions_value1", + "available_upgrade_versions_value2", + ], + "connection_strings": { + "all_connection_strings": { + "high": "high_value", + "low": "low_value", + "medium": "medium_value", + }, + "dedicated": "dedicated_value", + "high": "high_value", + "low": "low_value", + "medium": "medium_value", + "profiles": [ + { + "consumer_group": 1, + "display_name": "display_name_value", + "host_format": 1, + "is_regional": True, + "protocol": 1, + "session_mode": 1, + "syntax_format": 1, + "tls_authentication": 1, + "value": "value_value", + } + ], + }, + "connection_urls": { + "apex_uri": "apex_uri_value", + "database_transforms_uri": "database_transforms_uri_value", + "graph_studio_uri": "graph_studio_uri_value", + "machine_learning_notebook_uri": "machine_learning_notebook_uri_value", + "machine_learning_user_management_uri": "machine_learning_user_management_uri_value", + "mongo_db_uri": "mongo_db_uri_value", + "ords_uri": "ords_uri_value", + "sql_dev_web_uri": "sql_dev_web_uri_value", + }, + "failed_data_recovery_duration": {"seconds": 751, "nanos": 543}, + "memory_table_gbs": 1691, + "is_local_data_guard_enabled": True, + "local_adg_auto_failover_max_data_loss_limit": 4513, + "local_standby_db": { + "lag_time_duration": {}, + "lifecycle_details": "lifecycle_details_value", + "state": 1, + "data_guard_role_changed_time": {"seconds": 751, "nanos": 543}, + "disaster_recovery_role_changed_time": {}, + }, + "memory_per_oracle_compute_unit_gbs": 3626, + "local_disaster_recovery_type": 1, + "data_safe_state": 1, + "database_management_state": 1, + "open_mode": 1, + "operations_insights_state": 1, + "peer_db_ids": ["peer_db_ids_value1", "peer_db_ids_value2"], + "permission_level": 1, + "private_endpoint": "private_endpoint_value", + "refreshable_mode": 1, + "refreshable_state": 1, + "role": 1, + "scheduled_operation_details": [ + { + "day_of_week": 1, + "start_time": { + "hours": 561, + "minutes": 773, + "seconds": 751, + "nanos": 543, + }, + "stop_time": {}, + } + ], + "sql_web_developer_url": "sql_web_developer_url_value", + "supported_clone_regions": [ + "supported_clone_regions_value1", + "supported_clone_regions_value2", + ], + "used_data_storage_size_tbs": 2752, + "oci_url": "oci_url_value", + "total_auto_backup_storage_size_gbs": 0.36100000000000004, + "next_long_term_backup_time": {}, + "maintenance_begin_time": {}, + "maintenance_end_time": {}, + }, + "labels": {}, + "network": "network_value", + "cidr": "cidr_value", + "create_time": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = oracledatabase.CreateAutonomousDatabaseRequest.meta.fields[ + "autonomous_database" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["autonomous_database"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["autonomous_database"][field])): + del request_init["autonomous_database"][field][i][subfield] + else: + del request_init["autonomous_database"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_autonomous_database(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_autonomous_database_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_autonomous_database + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_autonomous_database + ] = mock_rpc + + request = {} + client.create_autonomous_database(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_autonomous_database(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_autonomous_database_rest_required_fields( + request_type=oracledatabase.CreateAutonomousDatabaseRequest, +): + transport_class = transports.OracleDatabaseRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["autonomous_database_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "autonomousDatabaseId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_autonomous_database._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "autonomousDatabaseId" in jsonified_request + assert ( + jsonified_request["autonomousDatabaseId"] + == request_init["autonomous_database_id"] + ) + + jsonified_request["parent"] = "parent_value" + jsonified_request["autonomousDatabaseId"] = "autonomous_database_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_autonomous_database._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "autonomous_database_id", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "autonomousDatabaseId" in jsonified_request + assert jsonified_request["autonomousDatabaseId"] == "autonomous_database_id_value" + + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_autonomous_database(request) + + expected_params = [ + ( + "autonomousDatabaseId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_autonomous_database_rest_unset_required_fields(): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_autonomous_database._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "autonomousDatabaseId", + "requestId", + ) + ) + & set( + ( + "parent", + "autonomousDatabaseId", + "autonomousDatabase", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_autonomous_database_rest_interceptors(null_interceptor): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) + client = OracleDatabaseClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.OracleDatabaseRestInterceptor, "post_create_autonomous_database" + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "pre_create_autonomous_database" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = oracledatabase.CreateAutonomousDatabaseRequest.pb( + oracledatabase.CreateAutonomousDatabaseRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = oracledatabase.CreateAutonomousDatabaseRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_autonomous_database( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_autonomous_database_rest_bad_request( + transport: str = "rest", request_type=oracledatabase.CreateAutonomousDatabaseRequest +): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_autonomous_database(request) + + +def test_create_autonomous_database_rest_flattened(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + autonomous_database=gco_autonomous_database.AutonomousDatabase( + name="name_value" + ), + autonomous_database_id="autonomous_database_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_autonomous_database(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/autonomousDatabases" + % client.transport._host, + args[1], + ) + + +def test_create_autonomous_database_rest_flattened_error(transport: str = "rest"): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_autonomous_database( + oracledatabase.CreateAutonomousDatabaseRequest(), + parent="parent_value", + autonomous_database=gco_autonomous_database.AutonomousDatabase( + name="name_value" + ), + autonomous_database_id="autonomous_database_id_value", + ) + + +def test_create_autonomous_database_rest_error(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.DeleteAutonomousDatabaseRequest, + dict, + ], +) +def test_delete_autonomous_database_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/autonomousDatabases/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_autonomous_database(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_autonomous_database_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_autonomous_database + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_autonomous_database + ] = mock_rpc + + request = {} + client.delete_autonomous_database(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_autonomous_database(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_autonomous_database_rest_required_fields( + request_type=oracledatabase.DeleteAutonomousDatabaseRequest, +): + transport_class = transports.OracleDatabaseRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_autonomous_database._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_autonomous_database._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_autonomous_database(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_autonomous_database_rest_unset_required_fields(): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_autonomous_database._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_autonomous_database_rest_interceptors(null_interceptor): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) + client = OracleDatabaseClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.OracleDatabaseRestInterceptor, "post_delete_autonomous_database" + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "pre_delete_autonomous_database" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = oracledatabase.DeleteAutonomousDatabaseRequest.pb( + oracledatabase.DeleteAutonomousDatabaseRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = oracledatabase.DeleteAutonomousDatabaseRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_autonomous_database( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_autonomous_database_rest_bad_request( + transport: str = "rest", request_type=oracledatabase.DeleteAutonomousDatabaseRequest +): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/autonomousDatabases/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_autonomous_database(request) + + +def test_delete_autonomous_database_rest_flattened(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/autonomousDatabases/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_autonomous_database(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/autonomousDatabases/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_autonomous_database_rest_flattened_error(transport: str = "rest"): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_autonomous_database( + oracledatabase.DeleteAutonomousDatabaseRequest(), + name="name_value", + ) + + +def test_delete_autonomous_database_rest_error(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.RestoreAutonomousDatabaseRequest, + dict, + ], +) +def test_restore_autonomous_database_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/autonomousDatabases/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.restore_autonomous_database(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_restore_autonomous_database_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.restore_autonomous_database + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.restore_autonomous_database + ] = mock_rpc + + request = {} + client.restore_autonomous_database(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.restore_autonomous_database(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_restore_autonomous_database_rest_required_fields( + request_type=oracledatabase.RestoreAutonomousDatabaseRequest, +): + transport_class = transports.OracleDatabaseRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).restore_autonomous_database._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).restore_autonomous_database._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.restore_autonomous_database(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_restore_autonomous_database_rest_unset_required_fields(): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.restore_autonomous_database._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "restoreTime", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_restore_autonomous_database_rest_interceptors(null_interceptor): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) + client = OracleDatabaseClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.OracleDatabaseRestInterceptor, "post_restore_autonomous_database" + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "pre_restore_autonomous_database" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = oracledatabase.RestoreAutonomousDatabaseRequest.pb( + oracledatabase.RestoreAutonomousDatabaseRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = oracledatabase.RestoreAutonomousDatabaseRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.restore_autonomous_database( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_restore_autonomous_database_rest_bad_request( + transport: str = "rest", + request_type=oracledatabase.RestoreAutonomousDatabaseRequest, +): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/autonomousDatabases/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.restore_autonomous_database(request) + + +def test_restore_autonomous_database_rest_flattened(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/autonomousDatabases/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + restore_time=timestamp_pb2.Timestamp(seconds=751), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.restore_autonomous_database(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/autonomousDatabases/*}:restore" + % client.transport._host, + args[1], + ) + + +def test_restore_autonomous_database_rest_flattened_error(transport: str = "rest"): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.restore_autonomous_database( + oracledatabase.RestoreAutonomousDatabaseRequest(), + name="name_value", + restore_time=timestamp_pb2.Timestamp(seconds=751), + ) + + +def test_restore_autonomous_database_rest_error(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.GenerateAutonomousDatabaseWalletRequest, + dict, + ], +) +def test_generate_autonomous_database_wallet_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/autonomousDatabases/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.GenerateAutonomousDatabaseWalletResponse( + archive_content=b"archive_content_blob", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.GenerateAutonomousDatabaseWalletResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.generate_autonomous_database_wallet(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, oracledatabase.GenerateAutonomousDatabaseWalletResponse) + assert response.archive_content == b"archive_content_blob" + + +def test_generate_autonomous_database_wallet_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.generate_autonomous_database_wallet + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.generate_autonomous_database_wallet + ] = mock_rpc + + request = {} + client.generate_autonomous_database_wallet(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.generate_autonomous_database_wallet(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_generate_autonomous_database_wallet_rest_required_fields( + request_type=oracledatabase.GenerateAutonomousDatabaseWalletRequest, +): + transport_class = transports.OracleDatabaseRestTransport + + request_init = {} + request_init["name"] = "" + request_init["password"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).generate_autonomous_database_wallet._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + jsonified_request["password"] = "password_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).generate_autonomous_database_wallet._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + assert "password" in jsonified_request + assert jsonified_request["password"] == "password_value" + + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = oracledatabase.GenerateAutonomousDatabaseWalletResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = oracledatabase.GenerateAutonomousDatabaseWalletResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.generate_autonomous_database_wallet(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_generate_autonomous_database_wallet_rest_unset_required_fields(): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.generate_autonomous_database_wallet._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "password", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_generate_autonomous_database_wallet_rest_interceptors(null_interceptor): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) + client = OracleDatabaseClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OracleDatabaseRestInterceptor, + "post_generate_autonomous_database_wallet", + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, + "pre_generate_autonomous_database_wallet", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = oracledatabase.GenerateAutonomousDatabaseWalletRequest.pb( + oracledatabase.GenerateAutonomousDatabaseWalletRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + oracledatabase.GenerateAutonomousDatabaseWalletResponse.to_json( + oracledatabase.GenerateAutonomousDatabaseWalletResponse() + ) + ) + + request = oracledatabase.GenerateAutonomousDatabaseWalletRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = oracledatabase.GenerateAutonomousDatabaseWalletResponse() + + client.generate_autonomous_database_wallet( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_generate_autonomous_database_wallet_rest_bad_request( + transport: str = "rest", + request_type=oracledatabase.GenerateAutonomousDatabaseWalletRequest, +): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/autonomousDatabases/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.generate_autonomous_database_wallet(request) + + +def test_generate_autonomous_database_wallet_rest_flattened(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.GenerateAutonomousDatabaseWalletResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/autonomousDatabases/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + type_=autonomous_database.GenerateType.ALL, + is_regional=True, + password="password_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.GenerateAutonomousDatabaseWalletResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.generate_autonomous_database_wallet(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/autonomousDatabases/*}:generateWallet" + % client.transport._host, + args[1], + ) + + +def test_generate_autonomous_database_wallet_rest_flattened_error( + transport: str = "rest", +): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.generate_autonomous_database_wallet( + oracledatabase.GenerateAutonomousDatabaseWalletRequest(), + name="name_value", + type_=autonomous_database.GenerateType.ALL, + is_regional=True, + password="password_value", + ) + + +def test_generate_autonomous_database_wallet_rest_error(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.ListAutonomousDbVersionsRequest, + dict, + ], +) +def test_list_autonomous_db_versions_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListAutonomousDbVersionsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.ListAutonomousDbVersionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_autonomous_db_versions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAutonomousDbVersionsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_autonomous_db_versions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_autonomous_db_versions + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_autonomous_db_versions + ] = mock_rpc + + request = {} + client.list_autonomous_db_versions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_autonomous_db_versions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_autonomous_db_versions_rest_required_fields( + request_type=oracledatabase.ListAutonomousDbVersionsRequest, +): + transport_class = transports.OracleDatabaseRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_autonomous_db_versions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_autonomous_db_versions._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListAutonomousDbVersionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = oracledatabase.ListAutonomousDbVersionsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_autonomous_db_versions(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_autonomous_db_versions_rest_unset_required_fields(): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_autonomous_db_versions._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_autonomous_db_versions_rest_interceptors(null_interceptor): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) + client = OracleDatabaseClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "post_list_autonomous_db_versions" + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "pre_list_autonomous_db_versions" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = oracledatabase.ListAutonomousDbVersionsRequest.pb( + oracledatabase.ListAutonomousDbVersionsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + oracledatabase.ListAutonomousDbVersionsResponse.to_json( + oracledatabase.ListAutonomousDbVersionsResponse() + ) + ) + + request = oracledatabase.ListAutonomousDbVersionsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = oracledatabase.ListAutonomousDbVersionsResponse() + + client.list_autonomous_db_versions( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_autonomous_db_versions_rest_bad_request( + transport: str = "rest", request_type=oracledatabase.ListAutonomousDbVersionsRequest +): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_autonomous_db_versions(request) + + +def test_list_autonomous_db_versions_rest_flattened(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListAutonomousDbVersionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.ListAutonomousDbVersionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_autonomous_db_versions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/autonomousDbVersions" + % client.transport._host, + args[1], + ) + + +def test_list_autonomous_db_versions_rest_flattened_error(transport: str = "rest"): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_autonomous_db_versions( + oracledatabase.ListAutonomousDbVersionsRequest(), + parent="parent_value", + ) + + +def test_list_autonomous_db_versions_rest_pager(transport: str = "rest"): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + oracledatabase.ListAutonomousDbVersionsResponse( + autonomous_db_versions=[ + autonomous_db_version.AutonomousDbVersion(), + autonomous_db_version.AutonomousDbVersion(), + autonomous_db_version.AutonomousDbVersion(), + ], + next_page_token="abc", + ), + oracledatabase.ListAutonomousDbVersionsResponse( + autonomous_db_versions=[], + next_page_token="def", + ), + oracledatabase.ListAutonomousDbVersionsResponse( + autonomous_db_versions=[ + autonomous_db_version.AutonomousDbVersion(), + ], + next_page_token="ghi", + ), + oracledatabase.ListAutonomousDbVersionsResponse( + autonomous_db_versions=[ + autonomous_db_version.AutonomousDbVersion(), + autonomous_db_version.AutonomousDbVersion(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + oracledatabase.ListAutonomousDbVersionsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_autonomous_db_versions(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, autonomous_db_version.AutonomousDbVersion) for i in results + ) + + pages = list(client.list_autonomous_db_versions(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.ListAutonomousDatabaseCharacterSetsRequest, + dict, + ], +) +def test_list_autonomous_database_character_sets_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListAutonomousDatabaseCharacterSetsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.ListAutonomousDatabaseCharacterSetsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_autonomous_database_character_sets(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAutonomousDatabaseCharacterSetsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_autonomous_database_character_sets_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_autonomous_database_character_sets + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_autonomous_database_character_sets + ] = mock_rpc + + request = {} + client.list_autonomous_database_character_sets(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_autonomous_database_character_sets(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_autonomous_database_character_sets_rest_required_fields( + request_type=oracledatabase.ListAutonomousDatabaseCharacterSetsRequest, +): + transport_class = transports.OracleDatabaseRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_autonomous_database_character_sets._get_unset_required_fields( + jsonified_request + ) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_autonomous_database_character_sets._get_unset_required_fields( + jsonified_request + ) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListAutonomousDatabaseCharacterSetsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = ( + oracledatabase.ListAutonomousDatabaseCharacterSetsResponse.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_autonomous_database_character_sets(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_autonomous_database_character_sets_rest_unset_required_fields(): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.list_autonomous_database_character_sets._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_autonomous_database_character_sets_rest_interceptors(null_interceptor): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) + client = OracleDatabaseClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OracleDatabaseRestInterceptor, + "post_list_autonomous_database_character_sets", + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, + "pre_list_autonomous_database_character_sets", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = oracledatabase.ListAutonomousDatabaseCharacterSetsRequest.pb( + oracledatabase.ListAutonomousDatabaseCharacterSetsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + oracledatabase.ListAutonomousDatabaseCharacterSetsResponse.to_json( + oracledatabase.ListAutonomousDatabaseCharacterSetsResponse() + ) + ) + + request = oracledatabase.ListAutonomousDatabaseCharacterSetsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = oracledatabase.ListAutonomousDatabaseCharacterSetsResponse() + + client.list_autonomous_database_character_sets( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_autonomous_database_character_sets_rest_bad_request( + transport: str = "rest", + request_type=oracledatabase.ListAutonomousDatabaseCharacterSetsRequest, +): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_autonomous_database_character_sets(request) + + +def test_list_autonomous_database_character_sets_rest_flattened(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListAutonomousDatabaseCharacterSetsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.ListAutonomousDatabaseCharacterSetsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_autonomous_database_character_sets(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/autonomousDatabaseCharacterSets" + % client.transport._host, + args[1], + ) + + +def test_list_autonomous_database_character_sets_rest_flattened_error( + transport: str = "rest", +): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_autonomous_database_character_sets( + oracledatabase.ListAutonomousDatabaseCharacterSetsRequest(), + parent="parent_value", + ) + + +def test_list_autonomous_database_character_sets_rest_pager(transport: str = "rest"): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + oracledatabase.ListAutonomousDatabaseCharacterSetsResponse( + autonomous_database_character_sets=[ + autonomous_database_character_set.AutonomousDatabaseCharacterSet(), + autonomous_database_character_set.AutonomousDatabaseCharacterSet(), + autonomous_database_character_set.AutonomousDatabaseCharacterSet(), + ], + next_page_token="abc", + ), + oracledatabase.ListAutonomousDatabaseCharacterSetsResponse( + autonomous_database_character_sets=[], + next_page_token="def", + ), + oracledatabase.ListAutonomousDatabaseCharacterSetsResponse( + autonomous_database_character_sets=[ + autonomous_database_character_set.AutonomousDatabaseCharacterSet(), + ], + next_page_token="ghi", + ), + oracledatabase.ListAutonomousDatabaseCharacterSetsResponse( + autonomous_database_character_sets=[ + autonomous_database_character_set.AutonomousDatabaseCharacterSet(), + autonomous_database_character_set.AutonomousDatabaseCharacterSet(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + oracledatabase.ListAutonomousDatabaseCharacterSetsResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_autonomous_database_character_sets(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance( + i, autonomous_database_character_set.AutonomousDatabaseCharacterSet + ) + for i in results + ) + + pages = list( + client.list_autonomous_database_character_sets(request=sample_request).pages + ) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.ListAutonomousDatabaseBackupsRequest, + dict, + ], +) +def test_list_autonomous_database_backups_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListAutonomousDatabaseBackupsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.ListAutonomousDatabaseBackupsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_autonomous_database_backups(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAutonomousDatabaseBackupsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_autonomous_database_backups_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_autonomous_database_backups + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_autonomous_database_backups + ] = mock_rpc + + request = {} + client.list_autonomous_database_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_autonomous_database_backups(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_autonomous_database_backups_rest_required_fields( + request_type=oracledatabase.ListAutonomousDatabaseBackupsRequest, +): + transport_class = transports.OracleDatabaseRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_autonomous_database_backups._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_autonomous_database_backups._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListAutonomousDatabaseBackupsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = oracledatabase.ListAutonomousDatabaseBackupsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_autonomous_database_backups(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_autonomous_database_backups_rest_unset_required_fields(): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.list_autonomous_database_backups._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_autonomous_database_backups_rest_interceptors(null_interceptor): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) + client = OracleDatabaseClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OracleDatabaseRestInterceptor, + "post_list_autonomous_database_backups", + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "pre_list_autonomous_database_backups" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = oracledatabase.ListAutonomousDatabaseBackupsRequest.pb( + oracledatabase.ListAutonomousDatabaseBackupsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + oracledatabase.ListAutonomousDatabaseBackupsResponse.to_json( + oracledatabase.ListAutonomousDatabaseBackupsResponse() + ) + ) + + request = oracledatabase.ListAutonomousDatabaseBackupsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = oracledatabase.ListAutonomousDatabaseBackupsResponse() + + client.list_autonomous_database_backups( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_autonomous_database_backups_rest_bad_request( + transport: str = "rest", + request_type=oracledatabase.ListAutonomousDatabaseBackupsRequest, +): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_autonomous_database_backups(request) + + +def test_list_autonomous_database_backups_rest_flattened(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListAutonomousDatabaseBackupsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.ListAutonomousDatabaseBackupsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_autonomous_database_backups(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/autonomousDatabaseBackups" + % client.transport._host, + args[1], + ) + + +def test_list_autonomous_database_backups_rest_flattened_error(transport: str = "rest"): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_autonomous_database_backups( + oracledatabase.ListAutonomousDatabaseBackupsRequest(), + parent="parent_value", + ) + + +def test_list_autonomous_database_backups_rest_pager(transport: str = "rest"): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + oracledatabase.ListAutonomousDatabaseBackupsResponse( + autonomous_database_backups=[ + autonomous_db_backup.AutonomousDatabaseBackup(), + autonomous_db_backup.AutonomousDatabaseBackup(), + autonomous_db_backup.AutonomousDatabaseBackup(), + ], + next_page_token="abc", + ), + oracledatabase.ListAutonomousDatabaseBackupsResponse( + autonomous_database_backups=[], + next_page_token="def", + ), + oracledatabase.ListAutonomousDatabaseBackupsResponse( + autonomous_database_backups=[ + autonomous_db_backup.AutonomousDatabaseBackup(), + ], + next_page_token="ghi", + ), + oracledatabase.ListAutonomousDatabaseBackupsResponse( + autonomous_database_backups=[ + autonomous_db_backup.AutonomousDatabaseBackup(), + autonomous_db_backup.AutonomousDatabaseBackup(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + oracledatabase.ListAutonomousDatabaseBackupsResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_autonomous_database_backups(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, autonomous_db_backup.AutonomousDatabaseBackup) + for i in results + ) + + pages = list( + client.list_autonomous_database_backups(request=sample_request).pages + ) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = OracleDatabaseClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = OracleDatabaseClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = OracleDatabaseClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = OracleDatabaseClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = OracleDatabaseClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.OracleDatabaseRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_transport_kind(transport_name): + transport = OracleDatabaseClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_oracle_database_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.OracleDatabaseTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_oracle_database_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.oracledatabase_v1.services.oracle_database.transports.OracleDatabaseTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.OracleDatabaseTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "list_cloud_exadata_infrastructures", + "get_cloud_exadata_infrastructure", + "create_cloud_exadata_infrastructure", + "delete_cloud_exadata_infrastructure", + "list_cloud_vm_clusters", + "get_cloud_vm_cluster", + "create_cloud_vm_cluster", + "delete_cloud_vm_cluster", + "list_entitlements", + "list_db_servers", + "list_db_nodes", + "list_gi_versions", + "list_db_system_shapes", + "list_autonomous_databases", + "get_autonomous_database", + "create_autonomous_database", + "delete_autonomous_database", + "restore_autonomous_database", + "generate_autonomous_database_wallet", + "list_autonomous_db_versions", + "list_autonomous_database_character_sets", + "list_autonomous_database_backups", + "get_location", + "list_locations", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_oracle_database_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.oracledatabase_v1.services.oracle_database.transports.OracleDatabaseTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.OracleDatabaseTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_oracle_database_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.oracledatabase_v1.services.oracle_database.transports.OracleDatabaseTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.OracleDatabaseTransport() + adc.assert_called_once() + + +def test_oracle_database_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + OracleDatabaseClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +def test_oracle_database_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.OracleDatabaseRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_oracle_database_rest_lro_client(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_oracle_database_host_no_port(transport_name): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="oracledatabase.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "oracledatabase.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://oracledatabase.googleapis.com/" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_oracle_database_host_with_port(transport_name): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="oracledatabase.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "oracledatabase.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://oracledatabase.googleapis.com:8000/" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_oracle_database_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = OracleDatabaseClient( + credentials=creds1, + transport=transport_name, + ) + client2 = OracleDatabaseClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.list_cloud_exadata_infrastructures._session + session2 = client2.transport.list_cloud_exadata_infrastructures._session + assert session1 != session2 + session1 = client1.transport.get_cloud_exadata_infrastructure._session + session2 = client2.transport.get_cloud_exadata_infrastructure._session + assert session1 != session2 + session1 = client1.transport.create_cloud_exadata_infrastructure._session + session2 = client2.transport.create_cloud_exadata_infrastructure._session + assert session1 != session2 + session1 = client1.transport.delete_cloud_exadata_infrastructure._session + session2 = client2.transport.delete_cloud_exadata_infrastructure._session + assert session1 != session2 + session1 = client1.transport.list_cloud_vm_clusters._session + session2 = client2.transport.list_cloud_vm_clusters._session + assert session1 != session2 + session1 = client1.transport.get_cloud_vm_cluster._session + session2 = client2.transport.get_cloud_vm_cluster._session + assert session1 != session2 + session1 = client1.transport.create_cloud_vm_cluster._session + session2 = client2.transport.create_cloud_vm_cluster._session + assert session1 != session2 + session1 = client1.transport.delete_cloud_vm_cluster._session + session2 = client2.transport.delete_cloud_vm_cluster._session + assert session1 != session2 + session1 = client1.transport.list_entitlements._session + session2 = client2.transport.list_entitlements._session + assert session1 != session2 + session1 = client1.transport.list_db_servers._session + session2 = client2.transport.list_db_servers._session + assert session1 != session2 + session1 = client1.transport.list_db_nodes._session + session2 = client2.transport.list_db_nodes._session + assert session1 != session2 + session1 = client1.transport.list_gi_versions._session + session2 = client2.transport.list_gi_versions._session + assert session1 != session2 + session1 = client1.transport.list_db_system_shapes._session + session2 = client2.transport.list_db_system_shapes._session + assert session1 != session2 + session1 = client1.transport.list_autonomous_databases._session + session2 = client2.transport.list_autonomous_databases._session + assert session1 != session2 + session1 = client1.transport.get_autonomous_database._session + session2 = client2.transport.get_autonomous_database._session + assert session1 != session2 + session1 = client1.transport.create_autonomous_database._session + session2 = client2.transport.create_autonomous_database._session + assert session1 != session2 + session1 = client1.transport.delete_autonomous_database._session + session2 = client2.transport.delete_autonomous_database._session + assert session1 != session2 + session1 = client1.transport.restore_autonomous_database._session + session2 = client2.transport.restore_autonomous_database._session + assert session1 != session2 + session1 = client1.transport.generate_autonomous_database_wallet._session + session2 = client2.transport.generate_autonomous_database_wallet._session + assert session1 != session2 + session1 = client1.transport.list_autonomous_db_versions._session + session2 = client2.transport.list_autonomous_db_versions._session + assert session1 != session2 + session1 = client1.transport.list_autonomous_database_character_sets._session + session2 = client2.transport.list_autonomous_database_character_sets._session + assert session1 != session2 + session1 = client1.transport.list_autonomous_database_backups._session + session2 = client2.transport.list_autonomous_database_backups._session + assert session1 != session2 + + +def test_autonomous_database_path(): + project = "squid" + location = "clam" + autonomous_database = "whelk" + expected = "projects/{project}/locations/{location}/autonomousDatabases/{autonomous_database}".format( + project=project, + location=location, + autonomous_database=autonomous_database, + ) + actual = OracleDatabaseClient.autonomous_database_path( + project, location, autonomous_database + ) + assert expected == actual + + +def test_parse_autonomous_database_path(): + expected = { + "project": "octopus", + "location": "oyster", + "autonomous_database": "nudibranch", + } + path = OracleDatabaseClient.autonomous_database_path(**expected) + + # Check that the path construction is reversible. + actual = OracleDatabaseClient.parse_autonomous_database_path(path) + assert expected == actual + + +def test_autonomous_database_backup_path(): + project = "cuttlefish" + location = "mussel" + autonomous_database_backup = "winkle" + expected = "projects/{project}/locations/{location}/autonomousDatabaseBackups/{autonomous_database_backup}".format( + project=project, + location=location, + autonomous_database_backup=autonomous_database_backup, + ) + actual = OracleDatabaseClient.autonomous_database_backup_path( + project, location, autonomous_database_backup + ) + assert expected == actual + + +def test_parse_autonomous_database_backup_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "autonomous_database_backup": "abalone", + } + path = OracleDatabaseClient.autonomous_database_backup_path(**expected) + + # Check that the path construction is reversible. + actual = OracleDatabaseClient.parse_autonomous_database_backup_path(path) + assert expected == actual + + +def test_autonomous_database_character_set_path(): + project = "squid" + location = "clam" + autonomous_database_character_set = "whelk" + expected = "projects/{project}/locations/{location}/autonomousDatabaseCharacterSets/{autonomous_database_character_set}".format( + project=project, + location=location, + autonomous_database_character_set=autonomous_database_character_set, + ) + actual = OracleDatabaseClient.autonomous_database_character_set_path( + project, location, autonomous_database_character_set + ) + assert expected == actual + + +def test_parse_autonomous_database_character_set_path(): + expected = { + "project": "octopus", + "location": "oyster", + "autonomous_database_character_set": "nudibranch", + } + path = OracleDatabaseClient.autonomous_database_character_set_path(**expected) + + # Check that the path construction is reversible. + actual = OracleDatabaseClient.parse_autonomous_database_character_set_path(path) + assert expected == actual + + +def test_autonomous_db_version_path(): + project = "cuttlefish" + location = "mussel" + autonomous_db_version = "winkle" + expected = "projects/{project}/locations/{location}/autonomousDbVersions/{autonomous_db_version}".format( + project=project, + location=location, + autonomous_db_version=autonomous_db_version, + ) + actual = OracleDatabaseClient.autonomous_db_version_path( + project, location, autonomous_db_version + ) + assert expected == actual + + +def test_parse_autonomous_db_version_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "autonomous_db_version": "abalone", + } + path = OracleDatabaseClient.autonomous_db_version_path(**expected) + + # Check that the path construction is reversible. + actual = OracleDatabaseClient.parse_autonomous_db_version_path(path) + assert expected == actual + + +def test_cloud_exadata_infrastructure_path(): + project = "squid" + location = "clam" + cloud_exadata_infrastructure = "whelk" + expected = "projects/{project}/locations/{location}/cloudExadataInfrastructures/{cloud_exadata_infrastructure}".format( + project=project, + location=location, + cloud_exadata_infrastructure=cloud_exadata_infrastructure, + ) + actual = OracleDatabaseClient.cloud_exadata_infrastructure_path( + project, location, cloud_exadata_infrastructure + ) + assert expected == actual + + +def test_parse_cloud_exadata_infrastructure_path(): + expected = { + "project": "octopus", + "location": "oyster", + "cloud_exadata_infrastructure": "nudibranch", + } + path = OracleDatabaseClient.cloud_exadata_infrastructure_path(**expected) + + # Check that the path construction is reversible. + actual = OracleDatabaseClient.parse_cloud_exadata_infrastructure_path(path) + assert expected == actual + + +def test_cloud_vm_cluster_path(): + project = "cuttlefish" + location = "mussel" + cloud_vm_cluster = "winkle" + expected = "projects/{project}/locations/{location}/cloudVmClusters/{cloud_vm_cluster}".format( + project=project, + location=location, + cloud_vm_cluster=cloud_vm_cluster, + ) + actual = OracleDatabaseClient.cloud_vm_cluster_path( + project, location, cloud_vm_cluster + ) + assert expected == actual + + +def test_parse_cloud_vm_cluster_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "cloud_vm_cluster": "abalone", + } + path = OracleDatabaseClient.cloud_vm_cluster_path(**expected) + + # Check that the path construction is reversible. + actual = OracleDatabaseClient.parse_cloud_vm_cluster_path(path) + assert expected == actual + + +def test_db_node_path(): + project = "squid" + location = "clam" + cloud_vm_cluster = "whelk" + db_node = "octopus" + expected = "projects/{project}/locations/{location}/cloudVmClusters/{cloud_vm_cluster}/dbNodes/{db_node}".format( + project=project, + location=location, + cloud_vm_cluster=cloud_vm_cluster, + db_node=db_node, + ) + actual = OracleDatabaseClient.db_node_path( + project, location, cloud_vm_cluster, db_node + ) + assert expected == actual + + +def test_parse_db_node_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "cloud_vm_cluster": "cuttlefish", + "db_node": "mussel", + } + path = OracleDatabaseClient.db_node_path(**expected) + + # Check that the path construction is reversible. + actual = OracleDatabaseClient.parse_db_node_path(path) + assert expected == actual + + +def test_db_server_path(): + project = "winkle" + location = "nautilus" + cloud_exadata_infrastructure = "scallop" + db_server = "abalone" + expected = "projects/{project}/locations/{location}/cloudExadataInfrastructures/{cloud_exadata_infrastructure}/dbServers/{db_server}".format( + project=project, + location=location, + cloud_exadata_infrastructure=cloud_exadata_infrastructure, + db_server=db_server, + ) + actual = OracleDatabaseClient.db_server_path( + project, location, cloud_exadata_infrastructure, db_server + ) + assert expected == actual + + +def test_parse_db_server_path(): + expected = { + "project": "squid", + "location": "clam", + "cloud_exadata_infrastructure": "whelk", + "db_server": "octopus", + } + path = OracleDatabaseClient.db_server_path(**expected) + + # Check that the path construction is reversible. + actual = OracleDatabaseClient.parse_db_server_path(path) + assert expected == actual + + +def test_db_system_shape_path(): + project = "oyster" + location = "nudibranch" + db_system_shape = "cuttlefish" + expected = "projects/{project}/locations/{location}/dbSystemShapes/{db_system_shape}".format( + project=project, + location=location, + db_system_shape=db_system_shape, + ) + actual = OracleDatabaseClient.db_system_shape_path( + project, location, db_system_shape + ) + assert expected == actual + + +def test_parse_db_system_shape_path(): + expected = { + "project": "mussel", + "location": "winkle", + "db_system_shape": "nautilus", + } + path = OracleDatabaseClient.db_system_shape_path(**expected) + + # Check that the path construction is reversible. + actual = OracleDatabaseClient.parse_db_system_shape_path(path) + assert expected == actual + + +def test_entitlement_path(): + project = "scallop" + location = "abalone" + entitlement = "squid" + expected = ( + "projects/{project}/locations/{location}/entitlements/{entitlement}".format( + project=project, + location=location, + entitlement=entitlement, + ) + ) + actual = OracleDatabaseClient.entitlement_path(project, location, entitlement) + assert expected == actual + + +def test_parse_entitlement_path(): + expected = { + "project": "clam", + "location": "whelk", + "entitlement": "octopus", + } + path = OracleDatabaseClient.entitlement_path(**expected) + + # Check that the path construction is reversible. + actual = OracleDatabaseClient.parse_entitlement_path(path) + assert expected == actual + + +def test_gi_version_path(): + project = "oyster" + location = "nudibranch" + gi_version = "cuttlefish" + expected = "projects/{project}/locations/{location}/giVersions/{gi_version}".format( + project=project, + location=location, + gi_version=gi_version, + ) + actual = OracleDatabaseClient.gi_version_path(project, location, gi_version) + assert expected == actual + + +def test_parse_gi_version_path(): + expected = { + "project": "mussel", + "location": "winkle", + "gi_version": "nautilus", + } + path = OracleDatabaseClient.gi_version_path(**expected) + + # Check that the path construction is reversible. + actual = OracleDatabaseClient.parse_gi_version_path(path) + assert expected == actual + + +def test_network_path(): + project = "scallop" + network = "abalone" + expected = "projects/{project}/global/networks/{network}".format( + project=project, + network=network, + ) + actual = OracleDatabaseClient.network_path(project, network) + assert expected == actual + + +def test_parse_network_path(): + expected = { + "project": "squid", + "network": "clam", + } + path = OracleDatabaseClient.network_path(**expected) + + # Check that the path construction is reversible. + actual = OracleDatabaseClient.parse_network_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = OracleDatabaseClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = OracleDatabaseClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = OracleDatabaseClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = OracleDatabaseClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = OracleDatabaseClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = OracleDatabaseClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = OracleDatabaseClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = OracleDatabaseClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = OracleDatabaseClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format( + project=project, + ) + actual = OracleDatabaseClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = OracleDatabaseClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = OracleDatabaseClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = OracleDatabaseClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = OracleDatabaseClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = OracleDatabaseClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.OracleDatabaseTransport, "_prep_wrapped_messages" + ) as prep: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.OracleDatabaseTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = OracleDatabaseClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_get_location_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.GetLocationRequest +): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_location(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) +def test_get_location_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_list_locations_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.ListLocationsRequest +): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_locations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) +def test_list_locations_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest +): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (OracleDatabaseClient, transports.OracleDatabaseRestTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) From 0d350038411bbdcf10eb7fb6820084abcb362c5a Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 30 Sep 2024 13:17:47 -0400 Subject: [PATCH 085/108] feat: [google-cloud-dataproc] add support for Spark Connect sessions in Dataproc Serverless for Spark (#13106) BEGIN_COMMIT_OVERRIDE feat: add support for Spark Connect sessions in Dataproc Serverless for Spark docs: update docs for `filter` field in `ListSessionsRequest` END_COMMIT_OVERRIDE - [ ] Regenerate this pull request now. docs: update docs for `filter` field in `ListSessionsRequest` PiperOrigin-RevId: 678438691 Source-Link: https://github.com/googleapis/googleapis/commit/5c181aaf78bd1ae2e08c3a2971cd9e87b6e00986 Source-Link: https://github.com/googleapis/googleapis-gen/commit/3b37678e3ed4e52f2a122ea91738a9e7b5f4cba1 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWRhdGFwcm9jLy5Pd2xCb3QueWFtbCIsImgiOiIzYjM3Njc4ZTNlZDRlNTJmMmExMjJlYTkxNzM4YTllN2I1ZjRjYmExIn0= --------- Co-authored-by: Owl Bot --- .../google/cloud/dataproc/__init__.py | 2 ++ .../google/cloud/dataproc/gapic_version.py | 2 +- .../google/cloud/dataproc_v1/__init__.py | 2 ++ .../google/cloud/dataproc_v1/gapic_version.py | 2 +- .../cloud/dataproc_v1/types/__init__.py | 2 ++ .../dataproc_v1/types/session_templates.py | 15 +++++++++++ .../cloud/dataproc_v1/types/sessions.py | 27 +++++++++++++++++-- ...pet_metadata_google.cloud.dataproc.v1.json | 2 +- .../dataproc_v1/test_session_controller.py | 1 + .../test_session_template_controller.py | 2 ++ 10 files changed, 52 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc/__init__.py b/packages/google-cloud-dataproc/google/cloud/dataproc/__init__.py index 1c45dca78fda..5df4195f42c4 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc/__init__.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc/__init__.py @@ -189,6 +189,7 @@ ListSessionsRequest, ListSessionsResponse, Session, + SparkConnectConfig, TerminateSessionRequest, ) from google.cloud.dataproc_v1.types.shared import ( @@ -362,6 +363,7 @@ "ListSessionsRequest", "ListSessionsResponse", "Session", + "SparkConnectConfig", "TerminateSessionRequest", "AutotuningConfig", "EnvironmentConfig", diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc/gapic_version.py b/packages/google-cloud-dataproc/google/cloud/dataproc/gapic_version.py index 435e79ea7a30..558c8aab67c5 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc/gapic_version.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "5.12.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/__init__.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/__init__.py index e89772784679..1a6bbd78319e 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/__init__.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/__init__.py @@ -169,6 +169,7 @@ ListSessionsRequest, ListSessionsResponse, Session, + SparkConnectConfig, TerminateSessionRequest, ) from .types.shared import ( @@ -353,6 +354,7 @@ "ShieldedInstanceConfig", "SoftwareConfig", "SparkBatch", + "SparkConnectConfig", "SparkHistoryServerConfig", "SparkJob", "SparkRBatch", diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/gapic_version.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/gapic_version.py index 435e79ea7a30..558c8aab67c5 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/gapic_version.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "5.12.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/__init__.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/__init__.py index 2bf4fcd11209..62dce7408efa 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/__init__.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/__init__.py @@ -138,6 +138,7 @@ ListSessionsRequest, ListSessionsResponse, Session, + SparkConnectConfig, TerminateSessionRequest, ) from .shared import ( @@ -295,6 +296,7 @@ "ListSessionsRequest", "ListSessionsResponse", "Session", + "SparkConnectConfig", "TerminateSessionRequest", "AutotuningConfig", "EnvironmentConfig", diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/session_templates.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/session_templates.py index 60c792b58b5b..66125b2cb461 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/session_templates.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/session_templates.py @@ -175,6 +175,11 @@ class DeleteSessionTemplateRequest(proto.Message): class SessionTemplate(proto.Message): r"""A representation of a session template. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: @@ -189,6 +194,10 @@ class SessionTemplate(proto.Message): jupyter_session (google.cloud.dataproc_v1.types.JupyterConfig): Optional. Jupyter session config. + This field is a member of `oneof`_ ``session_config``. + spark_connect_session (google.cloud.dataproc_v1.types.SparkConnectConfig): + Optional. Spark Connect session config. + This field is a member of `oneof`_ ``session_config``. creator (str): Output only. The email address of the user @@ -236,6 +245,12 @@ class SessionTemplate(proto.Message): oneof="session_config", message=sessions.JupyterConfig, ) + spark_connect_session: sessions.SparkConnectConfig = proto.Field( + proto.MESSAGE, + number=11, + oneof="session_config", + message=sessions.SparkConnectConfig, + ) creator: str = proto.Field( proto.STRING, number=5, diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/sessions.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/sessions.py index 2a5b2cc5d248..1ab37c9ec32c 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/sessions.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/sessions.py @@ -33,6 +33,7 @@ "DeleteSessionRequest", "Session", "JupyterConfig", + "SparkConnectConfig", }, ) @@ -125,13 +126,16 @@ class ListSessionsRequest(proto.Message): various fields in each session resource. Filters are case sensitive, and may contain multiple clauses combined with logical operators (AND, OR). Supported fields are - ``session_id``, ``session_uuid``, ``state``, and - ``create_time``. + ``session_id``, ``session_uuid``, ``state``, + ``create_time``, and ``labels``. Example: ``state = ACTIVE and create_time < "2023-01-01T00:00:00Z"`` is a filter for sessions in an ACTIVE state that were created before 2023-01-01. + ``state = ACTIVE and labels.environment=production`` is a + filter for sessions in an ACTIVE state that have a + production environment label. See https://google.aip.dev/assets/misc/ebnf-filtering.txt for a detailed description of the filter syntax and a list @@ -249,6 +253,11 @@ class DeleteSessionRequest(proto.Message): class Session(proto.Message): r"""A representation of a session. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: @@ -264,6 +273,10 @@ class Session(proto.Message): jupyter_session (google.cloud.dataproc_v1.types.JupyterConfig): Optional. Jupyter session config. + This field is a member of `oneof`_ ``session_config``. + spark_connect_session (google.cloud.dataproc_v1.types.SparkConnectConfig): + Optional. Spark Connect session config. + This field is a member of `oneof`_ ``session_config``. runtime_info (google.cloud.dataproc_v1.types.RuntimeInfo): Output only. Runtime information about @@ -388,6 +401,12 @@ class SessionStateHistory(proto.Message): oneof="session_config", message="JupyterConfig", ) + spark_connect_session: "SparkConnectConfig" = proto.Field( + proto.MESSAGE, + number=17, + oneof="session_config", + message="SparkConnectConfig", + ) runtime_info: shared.RuntimeInfo = proto.Field( proto.MESSAGE, number=6, @@ -478,4 +497,8 @@ class Kernel(proto.Enum): ) +class SparkConnectConfig(proto.Message): + r"""Spark Connect configuration for an interactive session.""" + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dataproc/samples/generated_samples/snippet_metadata_google.cloud.dataproc.v1.json b/packages/google-cloud-dataproc/samples/generated_samples/snippet_metadata_google.cloud.dataproc.v1.json index a44d5d6db9b3..c5f4e003db04 100644 --- a/packages/google-cloud-dataproc/samples/generated_samples/snippet_metadata_google.cloud.dataproc.v1.json +++ b/packages/google-cloud-dataproc/samples/generated_samples/snippet_metadata_google.cloud.dataproc.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dataproc", - "version": "5.12.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_session_controller.py b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_session_controller.py index f5dbfb565af8..7cc868611e6a 100644 --- a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_session_controller.py +++ b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_session_controller.py @@ -3293,6 +3293,7 @@ def test_create_session_rest(request_type): "uuid": "uuid_value", "create_time": {"seconds": 751, "nanos": 543}, "jupyter_session": {"kernel": 1, "display_name": "display_name_value"}, + "spark_connect_session": {}, "runtime_info": { "endpoints": {}, "output_uri": "output_uri_value", diff --git a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_session_template_controller.py b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_session_template_controller.py index b157306093fc..0da9f81f5ffe 100644 --- a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_session_template_controller.py +++ b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_session_template_controller.py @@ -3412,6 +3412,7 @@ def test_create_session_template_rest(request_type): "description": "description_value", "create_time": {"seconds": 751, "nanos": 543}, "jupyter_session": {"kernel": 1, "display_name": "display_name_value"}, + "spark_connect_session": {}, "creator": "creator_value", "labels": {}, "runtime_config": { @@ -3850,6 +3851,7 @@ def test_update_session_template_rest(request_type): "description": "description_value", "create_time": {"seconds": 751, "nanos": 543}, "jupyter_session": {"kernel": 1, "display_name": "display_name_value"}, + "spark_connect_session": {}, "creator": "creator_value", "labels": {}, "runtime_config": { From 89e859b4741d8d4eca7065eb095a9c3ce873d733 Mon Sep 17 00:00:00 2001 From: yoshi-code-bot <70984784+yoshi-code-bot@users.noreply.github.com> Date: Mon, 30 Sep 2024 11:22:38 -0700 Subject: [PATCH 086/108] chore: Update release-please config files (#13103) Update release-please config files --- .release-please-manifest.json | 1 + release-please-config.json | 15 +++++++++++++++ 2 files changed, 16 insertions(+) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 9b7f01b6663b..37cf389b81dc 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -117,6 +117,7 @@ "packages/google-cloud-network-services": "0.5.14", "packages/google-cloud-notebooks": "1.10.5", "packages/google-cloud-optimization": "1.8.5", + "packages/google-cloud-oracledatabase": "0.0.0", "packages/google-cloud-orchestration-airflow": "1.14.0", "packages/google-cloud-os-config": "1.17.5", "packages/google-cloud-os-login": "2.14.6", diff --git a/release-please-config.json b/release-please-config.json index ca70cf6baec5..7866aa34f750 100644 --- a/release-please-config.json +++ b/release-please-config.json @@ -2028,6 +2028,21 @@ ], "release-type": "python" }, + "packages/google-cloud-oracledatabase": { + "bump-minor-pre-major": true, + "bump-patch-for-minor-pre-major": true, + "component": "google-cloud-oracledatabase", + "extra-files": [ + "google/cloud/oracledatabase/gapic_version.py", + "google/cloud/oracledatabase_v1/gapic_version.py", + { + "jsonpath": "$.clientLibrary.version", + "path": "samples/generated_samples/snippet_metadata_google.cloud.oracledatabase.v1.json", + "type": "json" + } + ], + "release-type": "python" + }, "packages/google-cloud-orchestration-airflow": { "bump-minor-pre-major": true, "bump-patch-for-minor-pre-major": true, From 32b254c110626aff2194aceb93f131f745cfcf29 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 30 Sep 2024 14:43:55 -0400 Subject: [PATCH 087/108] feat: [google-maps-routeoptimization] Add support for generating route tokens (#13105) BEGIN_COMMIT_OVERRIDE feat: Add support for generating route tokens feat: A new field `route_token` is added to message `.google.maps.routeoptimization.v1.ShipmentRoute.Transition` docs: A comment for method `BatchOptimizeTours` in service `RouteOptimization` is changed docs: A comment for field `populate_transition_polylines` in message `.google.maps.routeoptimization.v1.OptimizeToursRequest` is changed docs: A comment for field `code` in message `.google.maps.routeoptimization.v1.OptimizeToursValidationError` is changed END_COMMIT_OVERRIDE - [ ] Regenerate this pull request now. feat: A new field `route_token` is added to message `.google.maps.routeoptimization.v1.ShipmentRoute.Transition` docs: A comment for method `BatchOptimizeTours` in service `RouteOptimization` is changed docs: A comment for field `populate_transition_polylines` in message `.google.maps.routeoptimization.v1.OptimizeToursRequest` is changed docs: A comment for field `code` in message `.google.maps.routeoptimization.v1.OptimizeToursValidationError` is changed PiperOrigin-RevId: 677952232 Source-Link: https://github.com/googleapis/googleapis/commit/534e49c0ca0b9297f4ede6f119a0db054b35dd1e Source-Link: https://github.com/googleapis/googleapis-gen/commit/da6e35d31b0de9ddbaa97bd964899fbb9b1c000c Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLW1hcHMtcm91dGVvcHRpbWl6YXRpb24vLk93bEJvdC55YW1sIiwiaCI6ImRhNmUzNWQzMWIwZGU5ZGRiYWE5N2JkOTY0ODk5ZmJiOWIxYzAwMGMifQ== --------- Co-authored-by: Owl Bot Co-authored-by: ohmayr --- .../maps/routeoptimization/gapic_version.py | 2 +- .../routeoptimization_v1/gapic_version.py | 2 +- .../route_optimization/async_client.py | 21 +- .../services/route_optimization/client.py | 21 +- .../route_optimization/transports/grpc.py | 21 +- .../transports/grpc_asyncio.py | 21 +- .../types/route_optimization_service.py | 372 ++---------------- ...data_google.maps.routeoptimization.v1.json | 2 +- 8 files changed, 70 insertions(+), 392 deletions(-) diff --git a/packages/google-maps-routeoptimization/google/maps/routeoptimization/gapic_version.py b/packages/google-maps-routeoptimization/google/maps/routeoptimization/gapic_version.py index 114e40645800..558c8aab67c5 100644 --- a/packages/google-maps-routeoptimization/google/maps/routeoptimization/gapic_version.py +++ b/packages/google-maps-routeoptimization/google/maps/routeoptimization/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.3" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/gapic_version.py b/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/gapic_version.py index 114e40645800..558c8aab67c5 100644 --- a/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/gapic_version.py +++ b/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.3" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/async_client.py b/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/async_client.py index 3c899f1f772a..73dc5e7358cc 100644 --- a/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/async_client.py +++ b/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/async_client.py @@ -403,25 +403,26 @@ async def batch_optimize_tours( This method is a Long Running Operation (LRO). The inputs for optimization (``OptimizeToursRequest`` messages) and outputs - (``OptimizeToursResponse`` messages) are read/written from/to - Cloud Storage in user-specified format. Like the + (``OptimizeToursResponse`` messages) are read from and written + to Cloud Storage in user-specified format. Like the ``OptimizeTours`` method, each ``OptimizeToursRequest`` contains a ``ShipmentModel`` and returns an ``OptimizeToursResponse`` - containing ``ShipmentRoute``\ s, which are a set of routes to be - performed by vehicles minimizing the overall cost. + containing ``ShipmentRoute`` fields, which are a set of routes + to be performed by vehicles minimizing the overall cost. The user can poll ``operations.get`` to check the status of the LRO: - If the LRO's ``done`` field is false, then at least one request - is still being processed. Other requests may have completed - successfully and their results are available in GCS. + If the LRO ``done`` field is false, then at least one request is + still being processed. Other requests may have completed + successfully and their results are available in Cloud Storage. If the LRO's ``done`` field is true, then all requests have been processed. Any successfully processed requests will have their - results available in GCS. Any requests that failed will not have - their results available in GCS. If the LRO's ``error`` field is - set, then it contains the error from one of the failed requests. + results available in Cloud Storage. Any requests that failed + will not have their results available in Cloud Storage. If the + LRO's ``error`` field is set, then it contains the error from + one of the failed requests. .. code-block:: python diff --git a/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/client.py b/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/client.py index c88ee1b4892f..93b71fd86ac4 100644 --- a/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/client.py +++ b/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/client.py @@ -805,25 +805,26 @@ def batch_optimize_tours( This method is a Long Running Operation (LRO). The inputs for optimization (``OptimizeToursRequest`` messages) and outputs - (``OptimizeToursResponse`` messages) are read/written from/to - Cloud Storage in user-specified format. Like the + (``OptimizeToursResponse`` messages) are read from and written + to Cloud Storage in user-specified format. Like the ``OptimizeTours`` method, each ``OptimizeToursRequest`` contains a ``ShipmentModel`` and returns an ``OptimizeToursResponse`` - containing ``ShipmentRoute``\ s, which are a set of routes to be - performed by vehicles minimizing the overall cost. + containing ``ShipmentRoute`` fields, which are a set of routes + to be performed by vehicles minimizing the overall cost. The user can poll ``operations.get`` to check the status of the LRO: - If the LRO's ``done`` field is false, then at least one request - is still being processed. Other requests may have completed - successfully and their results are available in GCS. + If the LRO ``done`` field is false, then at least one request is + still being processed. Other requests may have completed + successfully and their results are available in Cloud Storage. If the LRO's ``done`` field is true, then all requests have been processed. Any successfully processed requests will have their - results available in GCS. Any requests that failed will not have - their results available in GCS. If the LRO's ``error`` field is - set, then it contains the error from one of the failed requests. + results available in Cloud Storage. Any requests that failed + will not have their results available in Cloud Storage. If the + LRO's ``error`` field is set, then it contains the error from + one of the failed requests. .. code-block:: python diff --git a/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/transports/grpc.py b/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/transports/grpc.py index 8c5621a5f0ba..79f73aece6fa 100644 --- a/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/transports/grpc.py +++ b/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/transports/grpc.py @@ -329,25 +329,26 @@ def batch_optimize_tours( This method is a Long Running Operation (LRO). The inputs for optimization (``OptimizeToursRequest`` messages) and outputs - (``OptimizeToursResponse`` messages) are read/written from/to - Cloud Storage in user-specified format. Like the + (``OptimizeToursResponse`` messages) are read from and written + to Cloud Storage in user-specified format. Like the ``OptimizeTours`` method, each ``OptimizeToursRequest`` contains a ``ShipmentModel`` and returns an ``OptimizeToursResponse`` - containing ``ShipmentRoute``\ s, which are a set of routes to be - performed by vehicles minimizing the overall cost. + containing ``ShipmentRoute`` fields, which are a set of routes + to be performed by vehicles minimizing the overall cost. The user can poll ``operations.get`` to check the status of the LRO: - If the LRO's ``done`` field is false, then at least one request - is still being processed. Other requests may have completed - successfully and their results are available in GCS. + If the LRO ``done`` field is false, then at least one request is + still being processed. Other requests may have completed + successfully and their results are available in Cloud Storage. If the LRO's ``done`` field is true, then all requests have been processed. Any successfully processed requests will have their - results available in GCS. Any requests that failed will not have - their results available in GCS. If the LRO's ``error`` field is - set, then it contains the error from one of the failed requests. + results available in Cloud Storage. Any requests that failed + will not have their results available in Cloud Storage. If the + LRO's ``error`` field is set, then it contains the error from + one of the failed requests. Returns: Callable[[~.BatchOptimizeToursRequest], diff --git a/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/transports/grpc_asyncio.py b/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/transports/grpc_asyncio.py index edf1e396fe6f..285c3188cd2e 100644 --- a/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/transports/grpc_asyncio.py +++ b/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/services/route_optimization/transports/grpc_asyncio.py @@ -336,25 +336,26 @@ def batch_optimize_tours( This method is a Long Running Operation (LRO). The inputs for optimization (``OptimizeToursRequest`` messages) and outputs - (``OptimizeToursResponse`` messages) are read/written from/to - Cloud Storage in user-specified format. Like the + (``OptimizeToursResponse`` messages) are read from and written + to Cloud Storage in user-specified format. Like the ``OptimizeTours`` method, each ``OptimizeToursRequest`` contains a ``ShipmentModel`` and returns an ``OptimizeToursResponse`` - containing ``ShipmentRoute``\ s, which are a set of routes to be - performed by vehicles minimizing the overall cost. + containing ``ShipmentRoute`` fields, which are a set of routes + to be performed by vehicles minimizing the overall cost. The user can poll ``operations.get`` to check the status of the LRO: - If the LRO's ``done`` field is false, then at least one request - is still being processed. Other requests may have completed - successfully and their results are available in GCS. + If the LRO ``done`` field is false, then at least one request is + still being processed. Other requests may have completed + successfully and their results are available in Cloud Storage. If the LRO's ``done`` field is true, then all requests have been processed. Any successfully processed requests will have their - results available in GCS. Any requests that failed will not have - their results available in GCS. If the LRO's ``error`` field is - set, then it contains the error from one of the failed requests. + results available in Cloud Storage. Any requests that failed + will not have their results available in Cloud Storage. If the + LRO's ``error`` field is set, then it contains the error from + one of the failed requests. Returns: Callable[[~.BatchOptimizeToursRequest], diff --git a/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/types/route_optimization_service.py b/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/types/route_optimization_service.py index 0dd1de2b1f9d..6a4e3811952e 100644 --- a/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/types/route_optimization_service.py +++ b/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/types/route_optimization_service.py @@ -338,7 +338,8 @@ class OptimizeToursRequest(proto.Message): If true, polylines will be populated in response ``ShipmentRoute``\ s. populate_transition_polylines (bool): - If true, polylines will be populated in response + If true, polylines and route tokens will be populated in + response [ShipmentRoute.transitions][google.maps.routeoptimization.v1.ShipmentRoute.transitions]. allow_large_deadline_despite_interruption_risk (bool): If this is set, then the request can have a @@ -2937,6 +2938,18 @@ class Transition(proto.Message): [populate_transition_polylines] [google.maps.routeoptimization.v1.OptimizeToursRequest.populate_transition_polylines] is set to true. + route_token (str): + Output only. An opaque token that can be passed to + `Navigation + SDK `__ + to reconstruct the route during navigation, and, in the + event of rerouting, honor the original intention when the + route was created. Treat this token as an opaque blob. Don't + compare its value across requests as its value may change + even if the service returns the exact same route. This field + is only populated if [populate_transition_polylines] + [google.maps.routeoptimization.v1.OptimizeToursRequest.populate_transition_polylines] + is set to true. vehicle_loads (MutableMapping[str, google.maps.routeoptimization_v1.types.ShipmentRoute.VehicleLoad]): Vehicle loads during this transition, for each type that either appears in this vehicle's @@ -2995,6 +3008,10 @@ class Transition(proto.Message): number=9, message="ShipmentRoute.EncodedPolyline", ) + route_token: str = proto.Field( + proto.STRING, + number=12, + ) vehicle_loads: MutableMapping[ str, "ShipmentRoute.VehicleLoad" ] = proto.MapField( @@ -3562,7 +3579,8 @@ class OptimizeToursValidationError(proto.Message): A validation error is defined by the pair (``code``, ``display_name``) which are always present. - Other fields (below) provide more context about the error. + The fields following this section provide more context about + the error. *MULTIPLE ERRORS*: When there are multiple errors, the validation process tries to output several of them. Much @@ -3570,358 +3588,14 @@ class OptimizeToursValidationError(proto.Message): validation errors will be "fatal", meaning that they stop the entire validation process. This is the case for ``display_name="UNSPECIFIED"`` errors, among others. Some - may cause the validation process to skip other errors. + errors may cause the validation process to skip other + errors. *STABILITY*: ``code`` and ``display_name`` should be very stable. But new codes and display names may appear over time, which may cause a given (invalid) request to yield a different (``code``, ``display_name``) pair because the new - error hid the old one (see "MULTIPLE ERRORS"). - - *REFERENCE*: A list of all (code, name) pairs: - - - UNSPECIFIED = 0; - - - VALIDATION_TIMEOUT_ERROR = 10; Validation couldn't be - completed within the deadline. - - - REQUEST_OPTIONS_ERROR = 12; - - - REQUEST_OPTIONS_INVALID_SOLVING_MODE = 1201; - - REQUEST_OPTIONS_INVALID_MAX_VALIDATION_ERRORS = 1203; - - REQUEST_OPTIONS_INVALID_GEODESIC_METERS_PER_SECOND = - 1204; - - REQUEST_OPTIONS_GEODESIC_METERS_PER_SECOND_TOO_SMALL = - 1205; - - REQUEST_OPTIONS_MISSING_GEODESIC_METERS_PER_SECOND = - 1206; - - REQUEST_OPTIONS_POPULATE_PATHFINDER_TRIPS_AND_GEODESIC_DISTANCE - = 1207; - - REQUEST_OPTIONS_COST_MODEL_OPTIONS_AND_GEODESIC_DISTANCE - = 1208; - - REQUEST_OPTIONS_TRAVEL_MODE_INCOMPATIBLE_WITH_TRAFFIC - = 1211; - - REQUEST_OPTIONS_MULTIPLE_TRAFFIC_FLAVORS = 1212; - - REQUEST_OPTIONS_INVALID_TRAFFIC_FLAVOR = 1213; - - REQUEST_OPTIONS_TRAFFIC_ENABLED_WITHOUT_GLOBAL_START_TIME - = 1214; - - REQUEST_OPTIONS_TRAFFIC_ENABLED_WITH_PRECEDENCES = - 1215; - - REQUEST_OPTIONS_TRAFFIC_PREFILL_MODE_INVALID = 1216; - - REQUEST_OPTIONS_TRAFFIC_PREFILL_ENABLED_WITHOUT_TRAFFIC - = 1217; - - - INJECTED_SOLUTION_ERROR = 20; - - - INJECTED_SOLUTION_MISSING_LABEL = 2000; - - INJECTED_SOLUTION_DUPLICATE_LABEL = 2001; - - INJECTED_SOLUTION_AMBIGUOUS_INDEX = 2002; - - INJECTED_SOLUTION_INFEASIBLE_AFTER_GETTING_TRAVEL_TIMES - = 2003; - - INJECTED_SOLUTION_TRANSITION_INCONSISTENT_WITH_ACTUAL_TRAVEL - = 2004; - - INJECTED_SOLUTION_CONCURRENT_SOLUTION_TYPES = 2005; - - INJECTED_SOLUTION_MORE_THAN_ONE_PER_TYPE = 2006; - - INJECTED_SOLUTION_REFRESH_WITHOUT_POPULATE = 2008; - - INJECTED_SOLUTION_CONSTRAINED_ROUTE_PORTION_INFEASIBLE - = 2010; - - - SHIPMENT_MODEL_ERROR = 22; - - - SHIPMENT_MODEL_TOO_LARGE = 2200; - - SHIPMENT_MODEL_TOO_MANY_CAPACITY_TYPES = 2201; - - SHIPMENT_MODEL_GLOBAL_START_TIME_NEGATIVE_OR_NAN = - 2202; - - SHIPMENT_MODEL_GLOBAL_END_TIME_TOO_LARGE_OR_NAN = - 2203; - - SHIPMENT_MODEL_GLOBAL_START_TIME_AFTER_GLOBAL_END_TIME - = 2204; - - SHIPMENT_MODEL_GLOBAL_DURATION_TOO_LONG = 2205; - - SHIPMENT_MODEL_MAX_ACTIVE_VEHICLES_NOT_POSITIVE = - 2206; - - SHIPMENT_MODEL_DURATION_MATRIX_TOO_LARGE = 2207; - - - INDEX_ERROR = 24; - - - TAG_ERROR = 26; - - - TIME_WINDOW_ERROR = 28; - - - TIME_WINDOW_INVALID_START_TIME = 2800; - - TIME_WINDOW_INVALID_END_TIME = 2801; - - TIME_WINDOW_INVALID_SOFT_START_TIME = 2802; - - TIME_WINDOW_INVALID_SOFT_END_TIME = 2803; - - TIME_WINDOW_OUTSIDE_GLOBAL_TIME_WINDOW = 2804; - - TIME_WINDOW_START_TIME_AFTER_END_TIME = 2805; - - TIME_WINDOW_INVALID_COST_PER_HOUR_BEFORE_SOFT_START_TIME - = 2806; - - TIME_WINDOW_INVALID_COST_PER_HOUR_AFTER_SOFT_END_TIME - = 2807; - - TIME_WINDOW_COST_BEFORE_SOFT_START_TIME_WITHOUT_SOFT_START_TIME - = 2808; - - TIME_WINDOW_COST_AFTER_SOFT_END_TIME_WITHOUT_SOFT_END_TIME - = 2809; - - TIME_WINDOW_SOFT_START_TIME_WITHOUT_COST_BEFORE_SOFT_START_TIME - = 2810; - - TIME_WINDOW_SOFT_END_TIME_WITHOUT_COST_AFTER_SOFT_END_TIME - = 2811; - - TIME_WINDOW_OVERLAPPING_ADJACENT_OR_EARLIER_THAN_PREVIOUS - = 2812; - - TIME_WINDOW_START_TIME_AFTER_SOFT_START_TIME = 2813; - - TIME_WINDOW_SOFT_START_TIME_OUTSIDE_GLOBAL_TIME_WINDOW - = 2819; - - TIME_WINDOW_SOFT_END_TIME_OUTSIDE_GLOBAL_TIME_WINDOW = - 2820; - - TIME_WINDOW_SOFT_END_TIME_AFTER_END_TIME = 2816; - - TIME_WINDOW_COST_BEFORE_SOFT_START_TIME_SET_AND_MULTIPLE_WINDOWS - = 2817; - - TIME_WINDOW_COST_AFTER_SOFT_END_TIME_SET_AND_MULTIPLE_WINDOWS - = 2818; - - TRANSITION_ATTRIBUTES_ERROR = 30; - - TRANSITION_ATTRIBUTES_INVALID_COST = 3000; - - TRANSITION_ATTRIBUTES_INVALID_COST_PER_KILOMETER = - 3001; - - TRANSITION_ATTRIBUTES_DUPLICATE_TAG_PAIR = 3002; - - TRANSITION_ATTRIBUTES_DISTANCE_LIMIT_MAX_METERS_UNSUPPORTED - = 3003; - - TRANSITION_ATTRIBUTES_UNSPECIFIED_SOURCE_TAGS = 3004; - - TRANSITION_ATTRIBUTES_CONFLICTING_SOURCE_TAGS_FIELDS = - 3005; - - TRANSITION_ATTRIBUTES_UNSPECIFIED_DESTINATION_TAGS = - 3006; - - TRANSITION_ATTRIBUTES_CONFLICTING_DESTINATION_TAGS_FIELDS - = 3007; - - TRANSITION_ATTRIBUTES_DELAY_DURATION_NEGATIVE_OR_NAN = - 3008; - - TRANSITION_ATTRIBUTES_DELAY_DURATION_EXCEEDS_GLOBAL_DURATION - = 3009; - - - AMOUNT_ERROR = 31; - - - AMOUNT_NEGATIVE_VALUE = 3100; - - - LOAD_LIMIT_ERROR = 33; - - - LOAD_LIMIT_INVALID_COST_ABOVE_SOFT_MAX = 3303; - - LOAD_LIMIT_SOFT_MAX_WITHOUT_COST_ABOVE_SOFT_MAX = - 3304; - - LOAD_LIMIT_COST_ABOVE_SOFT_MAX_WITHOUT_SOFT_MAX = - 3305; - - LOAD_LIMIT_NEGATIVE_SOFT_MAX = 3306; - - LOAD_LIMIT_MIXED_DEMAND_TYPE = 3307; - - LOAD_LIMIT_MAX_LOAD_NEGATIVE_VALUE = 3308; - - LOAD_LIMIT_SOFT_MAX_ABOVE_MAX = 3309; - - - INTERVAL_ERROR = 34; - - - INTERVAL_MIN_EXCEEDS_MAX = 3401; - - INTERVAL_NEGATIVE_MIN = 3402; - - INTERVAL_NEGATIVE_MAX = 3403; - - INTERVAL_MIN_EXCEEDS_CAPACITY = 3404; - - INTERVAL_MAX_EXCEEDS_CAPACITY = 3405; - - - DISTANCE_LIMIT_ERROR = 36; - - - DISTANCE_LIMIT_INVALID_COST_AFTER_SOFT_MAX = 3601; - - DISTANCE_LIMIT_SOFT_MAX_WITHOUT_COST_AFTER_SOFT_MAX = - 3602; - - DISTANCE_LIMIT_COST_AFTER_SOFT_MAX_WITHOUT_SOFT_MAX = - 3603; - - DISTANCE_LIMIT_NEGATIVE_MAX = 3604; - - DISTANCE_LIMIT_NEGATIVE_SOFT_MAX = 3605; - - DISTANCE_LIMIT_SOFT_MAX_LARGER_THAN_MAX = 3606; - - - DURATION_LIMIT_ERROR = 38; - - - DURATION_LIMIT_MAX_DURATION_NEGATIVE_OR_NAN = 3800; - - DURATION_LIMIT_SOFT_MAX_DURATION_NEGATIVE_OR_NAN = - 3801; - - DURATION_LIMIT_INVALID_COST_PER_HOUR_AFTER_SOFT_MAX = - 3802; - - DURATION_LIMIT_SOFT_MAX_WITHOUT_COST_AFTER_SOFT_MAX = - 3803; - - DURATION_LIMIT_COST_AFTER_SOFT_MAX_WITHOUT_SOFT_MAX = - 3804; - - DURATION_LIMIT_QUADRATIC_SOFT_MAX_DURATION_NEGATIVE_OR_NAN - = 3805; - - DURATION_LIMIT_INVALID_COST_AFTER_QUADRATIC_SOFT_MAX = - 3806; - - DURATION_LIMIT_QUADRATIC_SOFT_MAX_WITHOUT_COST_PER_SQUARE_HOUR - = 3807; - - DURATION_LIMIT_COST_PER_SQUARE_HOUR_WITHOUT_QUADRATIC_SOFT_MAX - = 3808; - - DURATION_LIMIT_QUADRATIC_SOFT_MAX_WITHOUT_MAX = 3809; - - DURATION_LIMIT_SOFT_MAX_LARGER_THAN_MAX = 3810; - - DURATION_LIMIT_QUADRATIC_SOFT_MAX_LARGER_THAN_MAX = - 3811; - - DURATION_LIMIT_DIFF_BETWEEN_MAX_AND_QUADRATIC_SOFT_MAX_TOO_LARGE - = 3812; - - DURATION_LIMIT_MAX_DURATION_EXCEEDS_GLOBAL_DURATION = - 3813; - - DURATION_LIMIT_SOFT_MAX_DURATION_EXCEEDS_GLOBAL_DURATION - = 3814; - - DURATION_LIMIT_QUADRATIC_SOFT_MAX_DURATION_EXCEEDS_GLOBAL_DURATION - = 3815; - - - SHIPMENT_ERROR = 40; - - - SHIPMENT_PD_LIMIT_WITHOUT_PICKUP_AND_DELIVERY = 4014; - - SHIPMENT_PD_ABSOLUTE_DETOUR_LIMIT_DURATION_NEGATIVE_OR_NAN - = 4000; - - SHIPMENT_PD_ABSOLUTE_DETOUR_LIMIT_DURATION_EXCEEDS_GLOBAL_DURATION - = 4001; - - SHIPMENT_PD_RELATIVE_DETOUR_LIMIT_INVALID = 4015; - - SHIPMENT_PD_DETOUR_LIMIT_AND_EXTRA_VISIT_DURATION = - 4016; - - SHIPMENT_PD_TIME_LIMIT_DURATION_NEGATIVE_OR_NAN = - 4002; - - SHIPMENT_PD_TIME_LIMIT_DURATION_EXCEEDS_GLOBAL_DURATION - = 4003; - - SHIPMENT_EMPTY_SHIPMENT_TYPE = 4004; - - SHIPMENT_NO_PICKUP_NO_DELIVERY = 4005; - - SHIPMENT_INVALID_PENALTY_COST = 4006; - - SHIPMENT_ALLOWED_VEHICLE_INDEX_OUT_OF_BOUNDS = 4007; - - SHIPMENT_DUPLICATE_ALLOWED_VEHICLE_INDEX = 4008; - - SHIPMENT_INCONSISTENT_COST_FOR_VEHICLE_SIZE_WITHOUT_INDEX - = 4009; - - SHIPMENT_INCONSISTENT_COST_FOR_VEHICLE_SIZE_WITH_INDEX - = 4010; - - SHIPMENT_INVALID_COST_FOR_VEHICLE = 4011; - - SHIPMENT_COST_FOR_VEHICLE_INDEX_OUT_OF_BOUNDS = 4012; - - SHIPMENT_DUPLICATE_COST_FOR_VEHICLE_INDEX = 4013; - - - VEHICLE_ERROR = 42; - - - VEHICLE_EMPTY_REQUIRED_OPERATOR_TYPE = 4200; - - VEHICLE_DUPLICATE_REQUIRED_OPERATOR_TYPE = 4201; - - VEHICLE_NO_OPERATOR_WITH_REQUIRED_OPERATOR_TYPE = - 4202; - - VEHICLE_EMPTY_START_TAG = 4203; - - VEHICLE_DUPLICATE_START_TAG = 4204; - - VEHICLE_EMPTY_END_TAG = 4205; - - VEHICLE_DUPLICATE_END_TAG = 4206; - - VEHICLE_EXTRA_VISIT_DURATION_NEGATIVE_OR_NAN = 4207; - - VEHICLE_EXTRA_VISIT_DURATION_EXCEEDS_GLOBAL_DURATION = - 4208; - - VEHICLE_EXTRA_VISIT_DURATION_EMPTY_KEY = 4209; - - VEHICLE_FIRST_SHIPMENT_INDEX_OUT_OF_BOUNDS = 4210; - - VEHICLE_FIRST_SHIPMENT_IGNORED = 4211; - - VEHICLE_FIRST_SHIPMENT_NOT_BOUND = 4212; - - VEHICLE_LAST_SHIPMENT_INDEX_OUT_OF_BOUNDS = 4213; - - VEHICLE_LAST_SHIPMENT_IGNORED = 4214; - - VEHICLE_LAST_SHIPMENT_NOT_BOUND = 4215; - - VEHICLE_IGNORED_WITH_USED_IF_ROUTE_IS_EMPTY = 4216; - - VEHICLE_INVALID_COST_PER_KILOMETER = 4217; - - VEHICLE_INVALID_COST_PER_HOUR = 4218; - - VEHICLE_INVALID_COST_PER_TRAVELED_HOUR = 4219; - - VEHICLE_INVALID_FIXED_COST = 4220; - - VEHICLE_INVALID_TRAVEL_DURATION_MULTIPLE = 4221; - - VEHICLE_TRAVEL_DURATION_MULTIPLE_WITH_SHIPMENT_PD_DETOUR_LIMITS - = 4223; - - VEHICLE_MATRIX_INDEX_WITH_SHIPMENT_PD_DETOUR_LIMITS = - 4224; - - VEHICLE_MINIMUM_DURATION_LONGER_THAN_DURATION_LIMIT = - 4222; - - - VISIT_REQUEST_ERROR = 44; - - - VISIT_REQUEST_EMPTY_TAG = 4400; - - VISIT_REQUEST_DUPLICATE_TAG = 4401; - - VISIT_REQUEST_DURATION_NEGATIVE_OR_NAN = 4404; - - VISIT_REQUEST_DURATION_EXCEEDS_GLOBAL_DURATION = 4405; - - - PRECEDENCE_ERROR = 46; - - - PRECEDENCE_RULE_MISSING_FIRST_INDEX = 4600; - - PRECEDENCE_RULE_MISSING_SECOND_INDEX = 4601; - - PRECEDENCE_RULE_FIRST_INDEX_OUT_OF_BOUNDS = 4602; - - PRECEDENCE_RULE_SECOND_INDEX_OUT_OF_BOUNDS = 4603; - - PRECEDENCE_RULE_DUPLICATE_INDEX = 4604; - - PRECEDENCE_RULE_INEXISTENT_FIRST_VISIT_REQUEST = 4605; - - PRECEDENCE_RULE_INEXISTENT_SECOND_VISIT_REQUEST = - 4606; - - - BREAK_ERROR = 48; - - - BREAK_RULE_EMPTY = 4800; - - BREAK_REQUEST_UNSPECIFIED_DURATION = 4801; - - BREAK_REQUEST_UNSPECIFIED_EARLIEST_START_TIME = 4802; - - BREAK_REQUEST_UNSPECIFIED_LATEST_START_TIME = 4803; - - BREAK_REQUEST_DURATION_NEGATIVE_OR_NAN = 4804; = 4804; - - BREAK_REQUEST_LATEST_START_TIME_BEFORE_EARLIEST_START_TIME - = 4805; - - BREAK_REQUEST_EARLIEST_START_TIME_BEFORE_GLOBAL_START_TIME - = 4806; - - BREAK_REQUEST_LATEST_END_TIME_AFTER_GLOBAL_END_TIME = - 4807; - - BREAK_REQUEST_NON_SCHEDULABLE = 4808; - - BREAK_FREQUENCY_MAX_INTER_BREAK_DURATION_NEGATIVE_OR_NAN - = 4809; - - BREAK_FREQUENCY_MIN_BREAK_DURATION_NEGATIVE_OR_NAN = - 4810; - - BREAK_FREQUENCY_MIN_BREAK_DURATION_EXCEEDS_GLOBAL_DURATION - = 4811; - - BREAK_FREQUENCY_MAX_INTER_BREAK_DURATION_EXCEEDS_GLOBAL_DURATION - = 4812; - - BREAK_REQUEST_DURATION_EXCEEDS_GLOBAL_DURATION = 4813; - - BREAK_FREQUENCY_MISSING_MAX_INTER_BREAK_DURATION = - 4814; - - BREAK_FREQUENCY_MISSING_MIN_BREAK_DURATION = 4815; - - - SHIPMENT_TYPE_INCOMPATIBILITY_ERROR = 50; - - - SHIPMENT_TYPE_INCOMPATIBILITY_EMPTY_TYPE = 5001; - - SHIPMENT_TYPE_INCOMPATIBILITY_LESS_THAN_TWO_TYPES = - 5002; - - SHIPMENT_TYPE_INCOMPATIBILITY_DUPLICATE_TYPE = 5003; - - SHIPMENT_TYPE_INCOMPATIBILITY_INVALID_INCOMPATIBILITY_MODE - = 5004; - - SHIPMENT_TYPE_INCOMPATIBILITY_TOO_MANY_INCOMPATIBILITIES - = 5005; - - - SHIPMENT_TYPE_REQUIREMENT_ERROR = 52; - - - SHIPMENT_TYPE_REQUIREMENT_NO_REQUIRED_TYPE = 52001; - - SHIPMENT_TYPE_REQUIREMENT_NO_DEPENDENT_TYPE = 52002; - - SHIPMENT_TYPE_REQUIREMENT_INVALID_REQUIREMENT_MODE = - 52003; - - SHIPMENT_TYPE_REQUIREMENT_TOO_MANY_REQUIREMENTS = - 52004; - - SHIPMENT_TYPE_REQUIREMENT_EMPTY_REQUIRED_TYPE = 52005; - - SHIPMENT_TYPE_REQUIREMENT_DUPLICATE_REQUIRED_TYPE = - 52006; - - SHIPMENT_TYPE_REQUIREMENT_NO_REQUIRED_TYPE_FOUND = - 52007; - - SHIPMENT_TYPE_REQUIREMENT_EMPTY_DEPENDENT_TYPE = - 52008; - - SHIPMENT_TYPE_REQUIREMENT_DUPLICATE_DEPENDENT_TYPE = - 52009; - - SHIPMENT_TYPE_REQUIREMENT_SELF_DEPENDENT_TYPE = 52010; - - SHIPMENT_TYPE_REQUIREMENT_GRAPH_HAS_CYCLES = 52011; - - - VEHICLE_OPERATOR_ERROR = 54; - - - VEHICLE_OPERATOR_EMPTY_TYPE = 5400; - - VEHICLE_OPERATOR_MULTIPLE_START_TIME_WINDOWS = 5401; - - VEHICLE_OPERATOR_SOFT_START_TIME_WINDOW = 5402; - - VEHICLE_OPERATOR_MULTIPLE_END_TIME_WINDOWS = 5403; - - VEHICLE_OPERATOR_SOFT_END_TIME_WINDOW = 5404; - - - DURATION_SECONDS_MATRIX_ERROR = 56; - - - DURATION_SECONDS_MATRIX_DURATION_NEGATIVE_OR_NAN = - 5600; - - DURATION_SECONDS_MATRIX_DURATION_EXCEEDS_GLOBAL_DURATION - = 5601; - - - WARNING = 9; - - - WARNING_INJECTED_FIRST_SOLUTION = 90; - - - WARNING_INJECTED_FIRST_SOLUTION_INFEASIBLE_SHIPMENTS_REMOVED - = 9000; - - WARNING_INJECTED_FIRST_SOLUTION_INFEASIBLE_AFTER_GETTING_TRAVEL_TIMES - = 9001; + error hid the old one. For example, see "MULTIPLE ERRORS". display_name (str): The error display name. fields (MutableSequence[google.maps.routeoptimization_v1.types.OptimizeToursValidationError.FieldReference]): diff --git a/packages/google-maps-routeoptimization/samples/generated_samples/snippet_metadata_google.maps.routeoptimization.v1.json b/packages/google-maps-routeoptimization/samples/generated_samples/snippet_metadata_google.maps.routeoptimization.v1.json index ff99ce099d17..c329d83ca2a2 100644 --- a/packages/google-maps-routeoptimization/samples/generated_samples/snippet_metadata_google.maps.routeoptimization.v1.json +++ b/packages/google-maps-routeoptimization/samples/generated_samples/snippet_metadata_google.maps.routeoptimization.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-maps-routeoptimization", - "version": "0.1.3" + "version": "0.1.0" }, "snippets": [ { From dbaefebce6ef5eac6e260fa995dfd765d8c9fc99 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 30 Sep 2024 17:55:56 -0400 Subject: [PATCH 088/108] chore: release main (#13115) :robot: I have created a release *beep* *boop* ---
google-cloud-dataproc: 5.13.0 ## [5.13.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dataproc-v5.12.0...google-cloud-dataproc-v5.13.0) (2024-09-30) ### Features * add support for Spark Connect sessions in Dataproc Serverless for Spark ([0d35003](https://github.com/googleapis/google-cloud-python/commit/0d350038411bbdcf10eb7fb6820084abcb362c5a)) ### Documentation * update docs for `filter` field in `ListSessionsRequest` ([0d35003](https://github.com/googleapis/google-cloud-python/commit/0d350038411bbdcf10eb7fb6820084abcb362c5a))
google-cloud-oracledatabase: 0.1.0 ## 0.1.0 (2024-09-30) ### Features * add initial files for google.cloud.oracledatabase.v1 ([#13100](https://github.com/googleapis/google-cloud-python/issues/13100)) ([c638f1f](https://github.com/googleapis/google-cloud-python/commit/c638f1f55a85a228ec6385095ca1befb54067188))
google-maps-routeoptimization: 0.1.4 ## [0.1.4](https://github.com/googleapis/google-cloud-python/compare/google-maps-routeoptimization-v0.1.3...google-maps-routeoptimization-v0.1.4) (2024-09-30) ### Features * A new field `route_token` is added to message `.google.maps.routeoptimization.v1.ShipmentRoute.Transition` ([32b254c](https://github.com/googleapis/google-cloud-python/commit/32b254c110626aff2194aceb93f131f745cfcf29)) * Add support for generating route tokens ([32b254c](https://github.com/googleapis/google-cloud-python/commit/32b254c110626aff2194aceb93f131f745cfcf29)) ### Documentation * A comment for field `code` in message `.google.maps.routeoptimization.v1.OptimizeToursValidationError` is changed ([32b254c](https://github.com/googleapis/google-cloud-python/commit/32b254c110626aff2194aceb93f131f745cfcf29)) * A comment for field `populate_transition_polylines` in message `.google.maps.routeoptimization.v1.OptimizeToursRequest` is changed ([32b254c](https://github.com/googleapis/google-cloud-python/commit/32b254c110626aff2194aceb93f131f745cfcf29)) * A comment for method `BatchOptimizeTours` in service `RouteOptimization` is changed ([32b254c](https://github.com/googleapis/google-cloud-python/commit/32b254c110626aff2194aceb93f131f745cfcf29))
--- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .release-please-manifest.json | 6 +++--- packages/google-cloud-dataproc/CHANGELOG.md | 12 ++++++++++++ .../google/cloud/dataproc/gapic_version.py | 2 +- .../google/cloud/dataproc_v1/gapic_version.py | 2 +- ...snippet_metadata_google.cloud.dataproc.v1.json | 2 +- packages/google-cloud-oracledatabase/CHANGELOG.md | 11 ++++++++++- .../google/cloud/oracledatabase/gapic_version.py | 2 +- .../cloud/oracledatabase_v1/gapic_version.py | 2 +- .../google-maps-routeoptimization/CHANGELOG.md | 15 +++++++++++++++ .../maps/routeoptimization/gapic_version.py | 2 +- .../maps/routeoptimization_v1/gapic_version.py | 2 +- ...metadata_google.maps.routeoptimization.v1.json | 2 +- 12 files changed, 48 insertions(+), 12 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 37cf389b81dc..2fcaaf3233f5 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -68,7 +68,7 @@ "packages/google-cloud-dataform": "0.5.11", "packages/google-cloud-datalabeling": "1.10.5", "packages/google-cloud-dataplex": "2.2.2", - "packages/google-cloud-dataproc": "5.12.0", + "packages/google-cloud-dataproc": "5.13.0", "packages/google-cloud-dataproc-metastore": "1.15.5", "packages/google-cloud-datastream": "1.9.5", "packages/google-cloud-deploy": "2.0.1", @@ -117,7 +117,7 @@ "packages/google-cloud-network-services": "0.5.14", "packages/google-cloud-notebooks": "1.10.5", "packages/google-cloud-optimization": "1.8.5", - "packages/google-cloud-oracledatabase": "0.0.0", + "packages/google-cloud-oracledatabase": "0.1.0", "packages/google-cloud-orchestration-airflow": "1.14.0", "packages/google-cloud-os-config": "1.17.5", "packages/google-cloud-os-login": "2.14.6", @@ -184,7 +184,7 @@ "packages/google-maps-fleetengine-delivery": "0.2.4", "packages/google-maps-mapsplatformdatasets": "0.4.2", "packages/google-maps-places": "0.1.18", - "packages/google-maps-routeoptimization": "0.1.3", + "packages/google-maps-routeoptimization": "0.1.4", "packages/google-maps-routing": "0.6.10", "packages/google-maps-solar": "0.1.2", "packages/google-shopping-css": "0.1.8", diff --git a/packages/google-cloud-dataproc/CHANGELOG.md b/packages/google-cloud-dataproc/CHANGELOG.md index 7a8b08f948bd..2601f62c33b4 100644 --- a/packages/google-cloud-dataproc/CHANGELOG.md +++ b/packages/google-cloud-dataproc/CHANGELOG.md @@ -4,6 +4,18 @@ [1]: https://pypi.org/project/google-cloud-dataproc/#history +## [5.13.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dataproc-v5.12.0...google-cloud-dataproc-v5.13.0) (2024-09-30) + + +### Features + +* add support for Spark Connect sessions in Dataproc Serverless for Spark ([0d35003](https://github.com/googleapis/google-cloud-python/commit/0d350038411bbdcf10eb7fb6820084abcb362c5a)) + + +### Documentation + +* update docs for `filter` field in `ListSessionsRequest` ([0d35003](https://github.com/googleapis/google-cloud-python/commit/0d350038411bbdcf10eb7fb6820084abcb362c5a)) + ## [5.12.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dataproc-v5.11.0...google-cloud-dataproc-v5.12.0) (2024-09-16) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc/gapic_version.py b/packages/google-cloud-dataproc/google/cloud/dataproc/gapic_version.py index 558c8aab67c5..3f1bf5ee8722 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc/gapic_version.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "5.13.0" # {x-release-please-version} diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/gapic_version.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/gapic_version.py index 558c8aab67c5..3f1bf5ee8722 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/gapic_version.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "5.13.0" # {x-release-please-version} diff --git a/packages/google-cloud-dataproc/samples/generated_samples/snippet_metadata_google.cloud.dataproc.v1.json b/packages/google-cloud-dataproc/samples/generated_samples/snippet_metadata_google.cloud.dataproc.v1.json index c5f4e003db04..f516b09c86b6 100644 --- a/packages/google-cloud-dataproc/samples/generated_samples/snippet_metadata_google.cloud.dataproc.v1.json +++ b/packages/google-cloud-dataproc/samples/generated_samples/snippet_metadata_google.cloud.dataproc.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dataproc", - "version": "0.1.0" + "version": "5.13.0" }, "snippets": [ { diff --git a/packages/google-cloud-oracledatabase/CHANGELOG.md b/packages/google-cloud-oracledatabase/CHANGELOG.md index 5ddad421e08f..6a8115a2ad70 100644 --- a/packages/google-cloud-oracledatabase/CHANGELOG.md +++ b/packages/google-cloud-oracledatabase/CHANGELOG.md @@ -1 +1,10 @@ -# Changelog \ No newline at end of file +# Changelog + +## 0.1.0 (2024-09-30) + + +### Features + +* add initial files for google.cloud.oracledatabase.v1 ([#13100](https://github.com/googleapis/google-cloud-python/issues/13100)) ([c638f1f](https://github.com/googleapis/google-cloud-python/commit/c638f1f55a85a228ec6385095ca1befb54067188)) + +## Changelog diff --git a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase/gapic_version.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase/gapic_version.py index 558c8aab67c5..33d37a7b677b 100644 --- a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase/gapic_version.py +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.0" # {x-release-please-version} diff --git a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/gapic_version.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/gapic_version.py index 558c8aab67c5..33d37a7b677b 100644 --- a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/gapic_version.py +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.0" # {x-release-please-version} diff --git a/packages/google-maps-routeoptimization/CHANGELOG.md b/packages/google-maps-routeoptimization/CHANGELOG.md index 14bb0c6b2dc5..d53ed3a2c9c2 100644 --- a/packages/google-maps-routeoptimization/CHANGELOG.md +++ b/packages/google-maps-routeoptimization/CHANGELOG.md @@ -1,5 +1,20 @@ # Changelog +## [0.1.4](https://github.com/googleapis/google-cloud-python/compare/google-maps-routeoptimization-v0.1.3...google-maps-routeoptimization-v0.1.4) (2024-09-30) + + +### Features + +* A new field `route_token` is added to message `.google.maps.routeoptimization.v1.ShipmentRoute.Transition` ([32b254c](https://github.com/googleapis/google-cloud-python/commit/32b254c110626aff2194aceb93f131f745cfcf29)) +* Add support for generating route tokens ([32b254c](https://github.com/googleapis/google-cloud-python/commit/32b254c110626aff2194aceb93f131f745cfcf29)) + + +### Documentation + +* A comment for field `code` in message `.google.maps.routeoptimization.v1.OptimizeToursValidationError` is changed ([32b254c](https://github.com/googleapis/google-cloud-python/commit/32b254c110626aff2194aceb93f131f745cfcf29)) +* A comment for field `populate_transition_polylines` in message `.google.maps.routeoptimization.v1.OptimizeToursRequest` is changed ([32b254c](https://github.com/googleapis/google-cloud-python/commit/32b254c110626aff2194aceb93f131f745cfcf29)) +* A comment for method `BatchOptimizeTours` in service `RouteOptimization` is changed ([32b254c](https://github.com/googleapis/google-cloud-python/commit/32b254c110626aff2194aceb93f131f745cfcf29)) + ## [0.1.3](https://github.com/googleapis/google-cloud-python/compare/google-maps-routeoptimization-v0.1.2...google-maps-routeoptimization-v0.1.3) (2024-09-16) diff --git a/packages/google-maps-routeoptimization/google/maps/routeoptimization/gapic_version.py b/packages/google-maps-routeoptimization/google/maps/routeoptimization/gapic_version.py index 558c8aab67c5..937ede8823ef 100644 --- a/packages/google-maps-routeoptimization/google/maps/routeoptimization/gapic_version.py +++ b/packages/google-maps-routeoptimization/google/maps/routeoptimization/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.4" # {x-release-please-version} diff --git a/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/gapic_version.py b/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/gapic_version.py index 558c8aab67c5..937ede8823ef 100644 --- a/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/gapic_version.py +++ b/packages/google-maps-routeoptimization/google/maps/routeoptimization_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.4" # {x-release-please-version} diff --git a/packages/google-maps-routeoptimization/samples/generated_samples/snippet_metadata_google.maps.routeoptimization.v1.json b/packages/google-maps-routeoptimization/samples/generated_samples/snippet_metadata_google.maps.routeoptimization.v1.json index c329d83ca2a2..b41aa32bfdd8 100644 --- a/packages/google-maps-routeoptimization/samples/generated_samples/snippet_metadata_google.maps.routeoptimization.v1.json +++ b/packages/google-maps-routeoptimization/samples/generated_samples/snippet_metadata_google.maps.routeoptimization.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-maps-routeoptimization", - "version": "0.1.0" + "version": "0.1.4" }, "snippets": [ { From 6dde3826c7c13ff1fcc840495811f58648e0678e Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 3 Oct 2024 17:20:32 -0400 Subject: [PATCH 089/108] docs: [google-cloud-run]fixed formatting of some documentation (#13122) BEGIN_COMMIT_OVERRIDE docs:fixed formatting of some documentation feat:add Builds API feat:add Service Mesh configuration to Services feat:add GPU configuration to Services feat:add INGRESS_TRAFFIC_NONE to Services feat:add ServiceScaling to Services END_COMMIT_OVERRIDE - [ ] Regenerate this pull request now. feat:add Builds API feat:add Service Mesh configuration to Services feat:add GPU configuration to Services feat:add INGRESS_TRAFFIC_NONE to Services feat:add ServiceScaling to Services PiperOrigin-RevId: 681696446 Source-Link: https://github.com/googleapis/googleapis/commit/4fe2139be5d934a40005c41b9dc132a143457ad0 Source-Link: https://github.com/googleapis/googleapis-gen/commit/cb5e78c3c478f81a8c813af5887757dc692052f3 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXJ1bi8uT3dsQm90LnlhbWwiLCJoIjoiY2I1ZTc4YzNjNDc4ZjgxYThjODEzYWY1ODg3NzU3ZGM2OTIwNTJmMyJ9 --------- Co-authored-by: Owl Bot --- .../google-cloud-run/docs/run_v2/builds.rst | 6 + .../docs/run_v2/services_.rst | 1 + .../google/cloud/run/__init__.py | 16 + .../google/cloud/run_v2/__init__.py | 11 + .../google/cloud/run_v2/gapic_metadata.json | 34 + .../cloud/run_v2/services/builds/__init__.py | 22 + .../run_v2/services/builds/async_client.py | 586 +++ .../cloud/run_v2/services/builds/client.py | 1021 ++++++ .../services/builds/transports/__init__.py | 36 + .../run_v2/services/builds/transports/base.py | 199 ++ .../run_v2/services/builds/transports/grpc.py | 343 ++ .../builds/transports/grpc_asyncio.py | 353 ++ .../run_v2/services/builds/transports/rest.py | 665 ++++ .../run_v2/services/jobs/async_client.py | 2 + .../cloud/run_v2/services/jobs/client.py | 15 + .../run_v2/services/revisions/async_client.py | 2 + .../cloud/run_v2/services/revisions/client.py | 22 + .../run_v2/services/services/async_client.py | 4 + .../cloud/run_v2/services/services/client.py | 37 + .../google/cloud/run_v2/types/__init__.py | 8 + .../google/cloud/run_v2/types/build.py | 251 ++ .../google/cloud/run_v2/types/revision.py | 14 + .../cloud/run_v2/types/revision_template.py | 21 +- .../google/cloud/run_v2/types/service.py | 38 +- .../cloud/run_v2/types/vendor_settings.py | 85 +- ..._v2_generated_builds_submit_build_async.py | 58 + ...n_v2_generated_builds_submit_build_sync.py | 58 + .../snippet_metadata_google.cloud.run.v2.json | 153 + .../scripts/fixup_run_v2_keywords.py | 1 + .../tests/unit/gapic/run_v2/test_builds.py | 3162 +++++++++++++++++ .../tests/unit/gapic/run_v2/test_jobs.py | 64 +- .../tests/unit/gapic/run_v2/test_revisions.py | 96 +- .../tests/unit/gapic/run_v2/test_services.py | 160 +- 33 files changed, 7417 insertions(+), 127 deletions(-) create mode 100644 packages/google-cloud-run/docs/run_v2/builds.rst create mode 100644 packages/google-cloud-run/google/cloud/run_v2/services/builds/__init__.py create mode 100644 packages/google-cloud-run/google/cloud/run_v2/services/builds/async_client.py create mode 100644 packages/google-cloud-run/google/cloud/run_v2/services/builds/client.py create mode 100644 packages/google-cloud-run/google/cloud/run_v2/services/builds/transports/__init__.py create mode 100644 packages/google-cloud-run/google/cloud/run_v2/services/builds/transports/base.py create mode 100644 packages/google-cloud-run/google/cloud/run_v2/services/builds/transports/grpc.py create mode 100644 packages/google-cloud-run/google/cloud/run_v2/services/builds/transports/grpc_asyncio.py create mode 100644 packages/google-cloud-run/google/cloud/run_v2/services/builds/transports/rest.py create mode 100644 packages/google-cloud-run/google/cloud/run_v2/types/build.py create mode 100644 packages/google-cloud-run/samples/generated_samples/run_v2_generated_builds_submit_build_async.py create mode 100644 packages/google-cloud-run/samples/generated_samples/run_v2_generated_builds_submit_build_sync.py create mode 100644 packages/google-cloud-run/tests/unit/gapic/run_v2/test_builds.py diff --git a/packages/google-cloud-run/docs/run_v2/builds.rst b/packages/google-cloud-run/docs/run_v2/builds.rst new file mode 100644 index 000000000000..fdedc682ab01 --- /dev/null +++ b/packages/google-cloud-run/docs/run_v2/builds.rst @@ -0,0 +1,6 @@ +Builds +------------------------ + +.. automodule:: google.cloud.run_v2.services.builds + :members: + :inherited-members: diff --git a/packages/google-cloud-run/docs/run_v2/services_.rst b/packages/google-cloud-run/docs/run_v2/services_.rst index c4c4ad864dfd..4a37414732ec 100644 --- a/packages/google-cloud-run/docs/run_v2/services_.rst +++ b/packages/google-cloud-run/docs/run_v2/services_.rst @@ -3,6 +3,7 @@ Services for Google Cloud Run v2 API .. toctree:: :maxdepth: 2 + builds executions jobs revisions diff --git a/packages/google-cloud-run/google/cloud/run/__init__.py b/packages/google-cloud-run/google/cloud/run/__init__.py index 740233a8afb0..47b51fb6ddca 100644 --- a/packages/google-cloud-run/google/cloud/run/__init__.py +++ b/packages/google-cloud-run/google/cloud/run/__init__.py @@ -18,6 +18,8 @@ __version__ = package_version.__version__ +from google.cloud.run_v2.services.builds.async_client import BuildsAsyncClient +from google.cloud.run_v2.services.builds.client import BuildsClient from google.cloud.run_v2.services.executions.async_client import ExecutionsAsyncClient from google.cloud.run_v2.services.executions.client import ExecutionsClient from google.cloud.run_v2.services.jobs.async_client import JobsAsyncClient @@ -28,6 +30,11 @@ from google.cloud.run_v2.services.services.client import ServicesClient from google.cloud.run_v2.services.tasks.async_client import TasksAsyncClient from google.cloud.run_v2.services.tasks.client import TasksClient +from google.cloud.run_v2.types.build import ( + StorageSource, + SubmitBuildRequest, + SubmitBuildResponse, +) from google.cloud.run_v2.types.condition import Condition from google.cloud.run_v2.types.execution import ( CancelExecutionRequest, @@ -106,12 +113,16 @@ EncryptionKeyRevocationAction, ExecutionEnvironment, IngressTraffic, + NodeSelector, RevisionScaling, + ServiceMesh, ServiceScaling, VpcAccess, ) __all__ = ( + "BuildsClient", + "BuildsAsyncClient", "ExecutionsClient", "ExecutionsAsyncClient", "JobsClient", @@ -122,6 +133,9 @@ "ServicesAsyncClient", "TasksClient", "TasksAsyncClient", + "StorageSource", + "SubmitBuildRequest", + "SubmitBuildResponse", "Condition", "CancelExecutionRequest", "DeleteExecutionRequest", @@ -182,7 +196,9 @@ "TrafficTargetStatus", "TrafficTargetAllocationType", "BinaryAuthorization", + "NodeSelector", "RevisionScaling", + "ServiceMesh", "ServiceScaling", "VpcAccess", "EncryptionKeyRevocationAction", diff --git a/packages/google-cloud-run/google/cloud/run_v2/__init__.py b/packages/google-cloud-run/google/cloud/run_v2/__init__.py index 6c8895daadbd..ddc4d0dd73ee 100644 --- a/packages/google-cloud-run/google/cloud/run_v2/__init__.py +++ b/packages/google-cloud-run/google/cloud/run_v2/__init__.py @@ -18,11 +18,13 @@ __version__ = package_version.__version__ +from .services.builds import BuildsAsyncClient, BuildsClient from .services.executions import ExecutionsAsyncClient, ExecutionsClient from .services.jobs import JobsAsyncClient, JobsClient from .services.revisions import RevisionsAsyncClient, RevisionsClient from .services.services import ServicesAsyncClient, ServicesClient from .services.tasks import TasksAsyncClient, TasksClient +from .types.build import StorageSource, SubmitBuildRequest, SubmitBuildResponse from .types.condition import Condition from .types.execution import ( CancelExecutionRequest, @@ -101,18 +103,22 @@ EncryptionKeyRevocationAction, ExecutionEnvironment, IngressTraffic, + NodeSelector, RevisionScaling, + ServiceMesh, ServiceScaling, VpcAccess, ) __all__ = ( + "BuildsAsyncClient", "ExecutionsAsyncClient", "JobsAsyncClient", "RevisionsAsyncClient", "ServicesAsyncClient", "TasksAsyncClient", "BinaryAuthorization", + "BuildsClient", "CancelExecutionRequest", "CloudSqlInstance", "Condition", @@ -156,6 +162,7 @@ "ListTasksRequest", "ListTasksResponse", "NFSVolumeSource", + "NodeSelector", "Probe", "ResourceRequirements", "Revision", @@ -167,8 +174,12 @@ "SecretKeySelector", "SecretVolumeSource", "Service", + "ServiceMesh", "ServiceScaling", "ServicesClient", + "StorageSource", + "SubmitBuildRequest", + "SubmitBuildResponse", "TCPSocketAction", "Task", "TaskAttemptResult", diff --git a/packages/google-cloud-run/google/cloud/run_v2/gapic_metadata.json b/packages/google-cloud-run/google/cloud/run_v2/gapic_metadata.json index e4131ae5c8f6..3f193aa3f5a1 100644 --- a/packages/google-cloud-run/google/cloud/run_v2/gapic_metadata.json +++ b/packages/google-cloud-run/google/cloud/run_v2/gapic_metadata.json @@ -5,6 +5,40 @@ "protoPackage": "google.cloud.run.v2", "schema": "1.0", "services": { + "Builds": { + "clients": { + "grpc": { + "libraryClient": "BuildsClient", + "rpcs": { + "SubmitBuild": { + "methods": [ + "submit_build" + ] + } + } + }, + "grpc-async": { + "libraryClient": "BuildsAsyncClient", + "rpcs": { + "SubmitBuild": { + "methods": [ + "submit_build" + ] + } + } + }, + "rest": { + "libraryClient": "BuildsClient", + "rpcs": { + "SubmitBuild": { + "methods": [ + "submit_build" + ] + } + } + } + } + }, "Executions": { "clients": { "grpc": { diff --git a/packages/google-cloud-run/google/cloud/run_v2/services/builds/__init__.py b/packages/google-cloud-run/google/cloud/run_v2/services/builds/__init__.py new file mode 100644 index 000000000000..c8c671c7635b --- /dev/null +++ b/packages/google-cloud-run/google/cloud/run_v2/services/builds/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import BuildsAsyncClient +from .client import BuildsClient + +__all__ = ( + "BuildsClient", + "BuildsAsyncClient", +) diff --git a/packages/google-cloud-run/google/cloud/run_v2/services/builds/async_client.py b/packages/google-cloud-run/google/cloud/run_v2/services/builds/async_client.py new file mode 100644 index 000000000000..db45db73e87f --- /dev/null +++ b/packages/google-cloud-run/google/cloud/run_v2/services/builds/async_client.py @@ -0,0 +1,586 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.run_v2 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.run_v2.types import build + +from .client import BuildsClient +from .transports.base import DEFAULT_CLIENT_INFO, BuildsTransport +from .transports.grpc_asyncio import BuildsGrpcAsyncIOTransport + + +class BuildsAsyncClient: + """Cloud Run Build Control Plane API""" + + _client: BuildsClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = BuildsClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = BuildsClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = BuildsClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = BuildsClient._DEFAULT_UNIVERSE + + worker_pool_path = staticmethod(BuildsClient.worker_pool_path) + parse_worker_pool_path = staticmethod(BuildsClient.parse_worker_pool_path) + common_billing_account_path = staticmethod(BuildsClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod( + BuildsClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(BuildsClient.common_folder_path) + parse_common_folder_path = staticmethod(BuildsClient.parse_common_folder_path) + common_organization_path = staticmethod(BuildsClient.common_organization_path) + parse_common_organization_path = staticmethod( + BuildsClient.parse_common_organization_path + ) + common_project_path = staticmethod(BuildsClient.common_project_path) + parse_common_project_path = staticmethod(BuildsClient.parse_common_project_path) + common_location_path = staticmethod(BuildsClient.common_location_path) + parse_common_location_path = staticmethod(BuildsClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + BuildsAsyncClient: The constructed client. + """ + return BuildsClient.from_service_account_info.__func__(BuildsAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + BuildsAsyncClient: The constructed client. + """ + return BuildsClient.from_service_account_file.__func__(BuildsAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return BuildsClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> BuildsTransport: + """Returns the transport used by the client instance. + + Returns: + BuildsTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = BuildsClient.get_transport_class + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, BuildsTransport, Callable[..., BuildsTransport]] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the builds async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,BuildsTransport,Callable[..., BuildsTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the BuildsTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = BuildsClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def submit_build( + self, + request: Optional[Union[build.SubmitBuildRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> build.SubmitBuildResponse: + r"""Submits a build in a given project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import run_v2 + + async def sample_submit_build(): + # Create a client + client = run_v2.BuildsAsyncClient() + + # Initialize request argument(s) + storage_source = run_v2.StorageSource() + storage_source.bucket = "bucket_value" + storage_source.object_ = "object__value" + + request = run_v2.SubmitBuildRequest( + storage_source=storage_source, + parent="parent_value", + image_uri="image_uri_value", + ) + + # Make the request + response = await client.submit_build(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.run_v2.types.SubmitBuildRequest, dict]]): + The request object. Request message for submitting a + Build. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.run_v2.types.SubmitBuildResponse: + Response message for submitting a + Build. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, build.SubmitBuildRequest): + request = build.SubmitBuildRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.submit_build + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def wait_operation( + self, + request: Optional[operations_pb2.WaitOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Waits until the specified long-running operation is done or reaches at most + a specified timeout, returning the latest state. + + If the operation is already done, the latest state is immediately returned. + If the timeout specified is greater than the default HTTP/RPC timeout, the HTTP/RPC + timeout is used. If the server does not support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.WaitOperationRequest`): + The request object. Request message for + `WaitOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.WaitOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.wait_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "BuildsAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("BuildsAsyncClient",) diff --git a/packages/google-cloud-run/google/cloud/run_v2/services/builds/client.py b/packages/google-cloud-run/google/cloud/run_v2/services/builds/client.py new file mode 100644 index 000000000000..e7fdd36add08 --- /dev/null +++ b/packages/google-cloud-run/google/cloud/run_v2/services/builds/client.py @@ -0,0 +1,1021 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.run_v2 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.run_v2.types import build + +from .transports.base import DEFAULT_CLIENT_INFO, BuildsTransport +from .transports.grpc import BuildsGrpcTransport +from .transports.grpc_asyncio import BuildsGrpcAsyncIOTransport +from .transports.rest import BuildsRestTransport + + +class BuildsClientMeta(type): + """Metaclass for the Builds client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[BuildsTransport]] + _transport_registry["grpc"] = BuildsGrpcTransport + _transport_registry["grpc_asyncio"] = BuildsGrpcAsyncIOTransport + _transport_registry["rest"] = BuildsRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[BuildsTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class BuildsClient(metaclass=BuildsClientMeta): + """Cloud Run Build Control Plane API""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "run.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "run.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + BuildsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + BuildsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> BuildsTransport: + """Returns the transport used by the client instance. + + Returns: + BuildsTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def worker_pool_path( + project: str, + location: str, + worker_pool: str, + ) -> str: + """Returns a fully-qualified worker_pool string.""" + return ( + "projects/{project}/locations/{location}/workerPools/{worker_pool}".format( + project=project, + location=location, + worker_pool=worker_pool, + ) + ) + + @staticmethod + def parse_worker_pool_path(path: str) -> Dict[str, str]: + """Parses a worker_pool path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/workerPools/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = BuildsClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = BuildsClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = BuildsClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = BuildsClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = BuildsClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or BuildsClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, BuildsTransport, Callable[..., BuildsTransport]] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the builds client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,BuildsTransport,Callable[..., BuildsTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the BuildsTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = BuildsClient._read_environment_variables() + self._client_cert_source = BuildsClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = BuildsClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, BuildsTransport) + if transport_provided: + # transport is a BuildsTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(BuildsTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = self._api_endpoint or BuildsClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[BuildsTransport], Callable[..., BuildsTransport] + ] = ( + BuildsClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., BuildsTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def submit_build( + self, + request: Optional[Union[build.SubmitBuildRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> build.SubmitBuildResponse: + r"""Submits a build in a given project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import run_v2 + + def sample_submit_build(): + # Create a client + client = run_v2.BuildsClient() + + # Initialize request argument(s) + storage_source = run_v2.StorageSource() + storage_source.bucket = "bucket_value" + storage_source.object_ = "object__value" + + request = run_v2.SubmitBuildRequest( + storage_source=storage_source, + parent="parent_value", + image_uri="image_uri_value", + ) + + # Make the request + response = client.submit_build(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.run_v2.types.SubmitBuildRequest, dict]): + The request object. Request message for submitting a + Build. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.run_v2.types.SubmitBuildResponse: + Response message for submitting a + Build. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, build.SubmitBuildRequest): + request = build.SubmitBuildRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.submit_build] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "BuildsClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def wait_operation( + self, + request: Optional[operations_pb2.WaitOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Waits until the specified long-running operation is done or reaches at most + a specified timeout, returning the latest state. + + If the operation is already done, the latest state is immediately returned. + If the timeout specified is greater than the default HTTP/RPC timeout, the HTTP/RPC + timeout is used. If the server does not support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.WaitOperationRequest`): + The request object. Request message for + `WaitOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.WaitOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.wait_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("BuildsClient",) diff --git a/packages/google-cloud-run/google/cloud/run_v2/services/builds/transports/__init__.py b/packages/google-cloud-run/google/cloud/run_v2/services/builds/transports/__init__.py new file mode 100644 index 000000000000..bf368793a29f --- /dev/null +++ b/packages/google-cloud-run/google/cloud/run_v2/services/builds/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import BuildsTransport +from .grpc import BuildsGrpcTransport +from .grpc_asyncio import BuildsGrpcAsyncIOTransport +from .rest import BuildsRestInterceptor, BuildsRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[BuildsTransport]] +_transport_registry["grpc"] = BuildsGrpcTransport +_transport_registry["grpc_asyncio"] = BuildsGrpcAsyncIOTransport +_transport_registry["rest"] = BuildsRestTransport + +__all__ = ( + "BuildsTransport", + "BuildsGrpcTransport", + "BuildsGrpcAsyncIOTransport", + "BuildsRestTransport", + "BuildsRestInterceptor", +) diff --git a/packages/google-cloud-run/google/cloud/run_v2/services/builds/transports/base.py b/packages/google-cloud-run/google/cloud/run_v2/services/builds/transports/base.py new file mode 100644 index 000000000000..c30ed50a208d --- /dev/null +++ b/packages/google-cloud-run/google/cloud/run_v2/services/builds/transports/base.py @@ -0,0 +1,199 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.run_v2 import gapic_version as package_version +from google.cloud.run_v2.types import build + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class BuildsTransport(abc.ABC): + """Abstract transport class for Builds.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "run.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'run.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.submit_build: gapic_v1.method.wrap_method( + self.submit_build, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def submit_build( + self, + ) -> Callable[ + [build.SubmitBuildRequest], + Union[build.SubmitBuildResponse, Awaitable[build.SubmitBuildResponse]], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + raise NotImplementedError() + + @property + def wait_operation( + self, + ) -> Callable[ + [operations_pb2.WaitOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("BuildsTransport",) diff --git a/packages/google-cloud-run/google/cloud/run_v2/services/builds/transports/grpc.py b/packages/google-cloud-run/google/cloud/run_v2/services/builds/transports/grpc.py new file mode 100644 index 000000000000..3097fbc74d50 --- /dev/null +++ b/packages/google-cloud-run/google/cloud/run_v2/services/builds/transports/grpc.py @@ -0,0 +1,343 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.run_v2.types import build + +from .base import DEFAULT_CLIENT_INFO, BuildsTransport + + +class BuildsGrpcTransport(BuildsTransport): + """gRPC backend transport for Builds. + + Cloud Run Build Control Plane API + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "run.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'run.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "run.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def submit_build( + self, + ) -> Callable[[build.SubmitBuildRequest], build.SubmitBuildResponse]: + r"""Return a callable for the submit build method over gRPC. + + Submits a build in a given project. + + Returns: + Callable[[~.SubmitBuildRequest], + ~.SubmitBuildResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "submit_build" not in self._stubs: + self._stubs["submit_build"] = self.grpc_channel.unary_unary( + "/google.cloud.run.v2.Builds/SubmitBuild", + request_serializer=build.SubmitBuildRequest.serialize, + response_deserializer=build.SubmitBuildResponse.deserialize, + ) + return self._stubs["submit_build"] + + def close(self): + self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def wait_operation( + self, + ) -> Callable[[operations_pb2.WaitOperationRequest], None]: + r"""Return a callable for the wait_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["wait_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/WaitOperation", + request_serializer=operations_pb2.WaitOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["wait_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("BuildsGrpcTransport",) diff --git a/packages/google-cloud-run/google/cloud/run_v2/services/builds/transports/grpc_asyncio.py b/packages/google-cloud-run/google/cloud/run_v2/services/builds/transports/grpc_asyncio.py new file mode 100644 index 000000000000..d4277b75fc04 --- /dev/null +++ b/packages/google-cloud-run/google/cloud/run_v2/services/builds/transports/grpc_asyncio.py @@ -0,0 +1,353 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.run_v2.types import build + +from .base import DEFAULT_CLIENT_INFO, BuildsTransport +from .grpc import BuildsGrpcTransport + + +class BuildsGrpcAsyncIOTransport(BuildsTransport): + """gRPC AsyncIO backend transport for Builds. + + Cloud Run Build Control Plane API + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "run.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "run.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'run.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def submit_build( + self, + ) -> Callable[[build.SubmitBuildRequest], Awaitable[build.SubmitBuildResponse]]: + r"""Return a callable for the submit build method over gRPC. + + Submits a build in a given project. + + Returns: + Callable[[~.SubmitBuildRequest], + Awaitable[~.SubmitBuildResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "submit_build" not in self._stubs: + self._stubs["submit_build"] = self.grpc_channel.unary_unary( + "/google.cloud.run.v2.Builds/SubmitBuild", + request_serializer=build.SubmitBuildRequest.serialize, + response_deserializer=build.SubmitBuildResponse.deserialize, + ) + return self._stubs["submit_build"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.submit_build: gapic_v1.method_async.wrap_method( + self.submit_build, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + return self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def wait_operation( + self, + ) -> Callable[[operations_pb2.WaitOperationRequest], None]: + r"""Return a callable for the wait_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["wait_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/WaitOperation", + request_serializer=operations_pb2.WaitOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["wait_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + +__all__ = ("BuildsGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-run/google/cloud/run_v2/services/builds/transports/rest.py b/packages/google-cloud-run/google/cloud/run_v2/services/builds/transports/rest.py new file mode 100644 index 000000000000..7e8ec2f6ff01 --- /dev/null +++ b/packages/google-cloud-run/google/cloud/run_v2/services/builds/transports/rest.py @@ -0,0 +1,665 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.run_v2.types import build + +from .base import BuildsTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class BuildsRestInterceptor: + """Interceptor for Builds. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the BuildsRestTransport. + + .. code-block:: python + class MyCustomBuildsInterceptor(BuildsRestInterceptor): + def pre_submit_build(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_submit_build(self, response): + logging.log(f"Received response: {response}") + return response + + transport = BuildsRestTransport(interceptor=MyCustomBuildsInterceptor()) + client = BuildsClient(transport=transport) + + + """ + + def pre_submit_build( + self, request: build.SubmitBuildRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[build.SubmitBuildRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for submit_build + + Override in a subclass to manipulate the request or metadata + before they are sent to the Builds server. + """ + return request, metadata + + def post_submit_build( + self, response: build.SubmitBuildResponse + ) -> build.SubmitBuildResponse: + """Post-rpc interceptor for submit_build + + Override in a subclass to manipulate the response + after it is returned by the Builds server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Builds server. + """ + return request, metadata + + def post_delete_operation(self, response: None) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the Builds server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Builds server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the Builds server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the Builds server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the Builds server but before + it is returned to user code. + """ + return response + + def pre_wait_operation( + self, + request: operations_pb2.WaitOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.WaitOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for wait_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Builds server. + """ + return request, metadata + + def post_wait_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for wait_operation + + Override in a subclass to manipulate the response + after it is returned by the Builds server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class BuildsRestStub: + _session: AuthorizedSession + _host: str + _interceptor: BuildsRestInterceptor + + +class BuildsRestTransport(BuildsTransport): + """REST backend transport for Builds. + + Cloud Run Build Control Plane API + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "run.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[BuildsRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'run.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or BuildsRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _SubmitBuild(BuildsRestStub): + def __hash__(self): + return hash("SubmitBuild") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: build.SubmitBuildRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> build.SubmitBuildResponse: + r"""Call the submit build method over HTTP. + + Args: + request (~.build.SubmitBuildRequest): + The request object. Request message for submitting a + Build. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.build.SubmitBuildResponse: + Response message for submitting a + Build. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2/{parent=projects/*/locations/*}/builds:submit", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_submit_build(request, metadata) + pb_request = build.SubmitBuildRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = build.SubmitBuildResponse() + pb_resp = build.SubmitBuildResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_submit_build(resp) + return resp + + @property + def submit_build( + self, + ) -> Callable[[build.SubmitBuildRequest], build.SubmitBuildResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SubmitBuild(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation(BuildsRestStub): + def __call__( + self, + request: operations_pb2.DeleteOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v2/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_delete_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(BuildsRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(BuildsRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2/{name=projects/*/locations/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def wait_operation(self): + return self._WaitOperation(self._session, self._host, self._interceptor) # type: ignore + + class _WaitOperation(BuildsRestStub): + def __call__( + self, + request: operations_pb2.WaitOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the wait operation method over HTTP. + + Args: + request (operations_pb2.WaitOperationRequest): + The request object for WaitOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from WaitOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2/{name=projects/*/locations/*/operations/*}:wait", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_wait_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_wait_operation(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("BuildsRestTransport",) diff --git a/packages/google-cloud-run/google/cloud/run_v2/services/jobs/async_client.py b/packages/google-cloud-run/google/cloud/run_v2/services/jobs/async_client.py index 931b728a5107..b41ee497e867 100644 --- a/packages/google-cloud-run/google/cloud/run_v2/services/jobs/async_client.py +++ b/packages/google-cloud-run/google/cloud/run_v2/services/jobs/async_client.py @@ -82,6 +82,8 @@ class JobsAsyncClient: parse_execution_path = staticmethod(JobsClient.parse_execution_path) job_path = staticmethod(JobsClient.job_path) parse_job_path = staticmethod(JobsClient.parse_job_path) + policy_path = staticmethod(JobsClient.policy_path) + parse_policy_path = staticmethod(JobsClient.parse_policy_path) secret_path = staticmethod(JobsClient.secret_path) parse_secret_path = staticmethod(JobsClient.parse_secret_path) secret_version_path = staticmethod(JobsClient.secret_version_path) diff --git a/packages/google-cloud-run/google/cloud/run_v2/services/jobs/client.py b/packages/google-cloud-run/google/cloud/run_v2/services/jobs/client.py index 90ccfba0b0a8..19f51f6caf36 100644 --- a/packages/google-cloud-run/google/cloud/run_v2/services/jobs/client.py +++ b/packages/google-cloud-run/google/cloud/run_v2/services/jobs/client.py @@ -285,6 +285,21 @@ def parse_job_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def policy_path( + project: str, + ) -> str: + """Returns a fully-qualified policy string.""" + return "projects/{project}/policy".format( + project=project, + ) + + @staticmethod + def parse_policy_path(path: str) -> Dict[str, str]: + """Parses a policy path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/policy$", path) + return m.groupdict() if m else {} + @staticmethod def secret_path( project: str, diff --git a/packages/google-cloud-run/google/cloud/run_v2/services/revisions/async_client.py b/packages/google-cloud-run/google/cloud/run_v2/services/revisions/async_client.py index 4862a5b8fd37..37ff9b05024c 100644 --- a/packages/google-cloud-run/google/cloud/run_v2/services/revisions/async_client.py +++ b/packages/google-cloud-run/google/cloud/run_v2/services/revisions/async_client.py @@ -80,6 +80,8 @@ class RevisionsAsyncClient: parse_connector_path = staticmethod(RevisionsClient.parse_connector_path) crypto_key_path = staticmethod(RevisionsClient.crypto_key_path) parse_crypto_key_path = staticmethod(RevisionsClient.parse_crypto_key_path) + mesh_path = staticmethod(RevisionsClient.mesh_path) + parse_mesh_path = staticmethod(RevisionsClient.parse_mesh_path) revision_path = staticmethod(RevisionsClient.revision_path) parse_revision_path = staticmethod(RevisionsClient.parse_revision_path) secret_path = staticmethod(RevisionsClient.secret_path) diff --git a/packages/google-cloud-run/google/cloud/run_v2/services/revisions/client.py b/packages/google-cloud-run/google/cloud/run_v2/services/revisions/client.py index fe0b0250af38..05953885ca89 100644 --- a/packages/google-cloud-run/google/cloud/run_v2/services/revisions/client.py +++ b/packages/google-cloud-run/google/cloud/run_v2/services/revisions/client.py @@ -241,6 +241,28 @@ def parse_crypto_key_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def mesh_path( + project: str, + location: str, + mesh: str, + ) -> str: + """Returns a fully-qualified mesh string.""" + return "projects/{project}/locations/{location}/meshes/{mesh}".format( + project=project, + location=location, + mesh=mesh, + ) + + @staticmethod + def parse_mesh_path(path: str) -> Dict[str, str]: + """Parses a mesh path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/meshes/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def revision_path( project: str, diff --git a/packages/google-cloud-run/google/cloud/run_v2/services/services/async_client.py b/packages/google-cloud-run/google/cloud/run_v2/services/services/async_client.py index 57ec39c67773..28d259c68b51 100644 --- a/packages/google-cloud-run/google/cloud/run_v2/services/services/async_client.py +++ b/packages/google-cloud-run/google/cloud/run_v2/services/services/async_client.py @@ -79,6 +79,10 @@ class ServicesAsyncClient: parse_connector_path = staticmethod(ServicesClient.parse_connector_path) crypto_key_path = staticmethod(ServicesClient.crypto_key_path) parse_crypto_key_path = staticmethod(ServicesClient.parse_crypto_key_path) + mesh_path = staticmethod(ServicesClient.mesh_path) + parse_mesh_path = staticmethod(ServicesClient.parse_mesh_path) + policy_path = staticmethod(ServicesClient.policy_path) + parse_policy_path = staticmethod(ServicesClient.parse_policy_path) revision_path = staticmethod(ServicesClient.revision_path) parse_revision_path = staticmethod(ServicesClient.parse_revision_path) secret_path = staticmethod(ServicesClient.secret_path) diff --git a/packages/google-cloud-run/google/cloud/run_v2/services/services/client.py b/packages/google-cloud-run/google/cloud/run_v2/services/services/client.py index b9909a3d3f08..65ad349bd447 100644 --- a/packages/google-cloud-run/google/cloud/run_v2/services/services/client.py +++ b/packages/google-cloud-run/google/cloud/run_v2/services/services/client.py @@ -240,6 +240,43 @@ def parse_crypto_key_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def mesh_path( + project: str, + location: str, + mesh: str, + ) -> str: + """Returns a fully-qualified mesh string.""" + return "projects/{project}/locations/{location}/meshes/{mesh}".format( + project=project, + location=location, + mesh=mesh, + ) + + @staticmethod + def parse_mesh_path(path: str) -> Dict[str, str]: + """Parses a mesh path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/meshes/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def policy_path( + project: str, + ) -> str: + """Returns a fully-qualified policy string.""" + return "projects/{project}/policy".format( + project=project, + ) + + @staticmethod + def parse_policy_path(path: str) -> Dict[str, str]: + """Parses a policy path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/policy$", path) + return m.groupdict() if m else {} + @staticmethod def revision_path( project: str, diff --git a/packages/google-cloud-run/google/cloud/run_v2/types/__init__.py b/packages/google-cloud-run/google/cloud/run_v2/types/__init__.py index 5ac1e2b5c025..932186b26e22 100644 --- a/packages/google-cloud-run/google/cloud/run_v2/types/__init__.py +++ b/packages/google-cloud-run/google/cloud/run_v2/types/__init__.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from .build import StorageSource, SubmitBuildRequest, SubmitBuildResponse from .condition import Condition from .execution import ( CancelExecutionRequest, @@ -91,12 +92,17 @@ EncryptionKeyRevocationAction, ExecutionEnvironment, IngressTraffic, + NodeSelector, RevisionScaling, + ServiceMesh, ServiceScaling, VpcAccess, ) __all__ = ( + "StorageSource", + "SubmitBuildRequest", + "SubmitBuildResponse", "Condition", "CancelExecutionRequest", "DeleteExecutionRequest", @@ -157,7 +163,9 @@ "TrafficTargetStatus", "TrafficTargetAllocationType", "BinaryAuthorization", + "NodeSelector", "RevisionScaling", + "ServiceMesh", "ServiceScaling", "VpcAccess", "EncryptionKeyRevocationAction", diff --git a/packages/google-cloud-run/google/cloud/run_v2/types/build.py b/packages/google-cloud-run/google/cloud/run_v2/types/build.py new file mode 100644 index 000000000000..c66b6d2c5ebf --- /dev/null +++ b/packages/google-cloud-run/google/cloud/run_v2/types/build.py @@ -0,0 +1,251 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.longrunning import operations_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.run.v2", + manifest={ + "SubmitBuildRequest", + "SubmitBuildResponse", + "StorageSource", + }, +) + + +class SubmitBuildRequest(proto.Message): + r"""Request message for submitting a Build. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + parent (str): + Required. The project and location to build in. Location + must be a region, e.g., 'us-central1' or 'global' if the + global builder is to be used. Format: + ``projects/{project}/locations/{location}`` + storage_source (google.cloud.run_v2.types.StorageSource): + Required. Source for the build. + + This field is a member of `oneof`_ ``source``. + image_uri (str): + Required. Artifact Registry URI to store the + built image. + buildpack_build (google.cloud.run_v2.types.SubmitBuildRequest.BuildpacksBuild): + Build the source using Buildpacks. + + This field is a member of `oneof`_ ``build_type``. + docker_build (google.cloud.run_v2.types.SubmitBuildRequest.DockerBuild): + Build the source using Docker. This means the + source has a Dockerfile. + + This field is a member of `oneof`_ ``build_type``. + service_account (str): + Optional. The service account to use for the + build. If not set, the default Cloud Build + service account for the project will be used. + worker_pool (str): + Optional. Name of the Cloud Build Custom Worker Pool that + should be used to build the function. The format of this + field is + ``projects/{project}/locations/{region}/workerPools/{workerPool}`` + where ``{project}`` and ``{region}`` are the project id and + region respectively where the worker pool is defined and + ``{workerPool}`` is the short name of the worker pool. + tags (MutableSequence[str]): + Optional. Additional tags to annotate the + build. + """ + + class DockerBuild(proto.Message): + r"""Build the source using Docker. This means the source has a + Dockerfile. + + """ + + class BuildpacksBuild(proto.Message): + r"""Build the source using Buildpacks. + + Attributes: + runtime (str): + The runtime name, e.g. 'go113'. Leave blank + for generic builds. + function_target (str): + Optional. Name of the function target if the + source is a function source. Required for + function builds. + cache_image_uri (str): + Optional. cache_image_uri is the GCR/AR URL where the cache + image will be stored. cache_image_uri is optional and + omitting it will disable caching. This URL must be stable + across builds. It is used to derive a build-specific + temporary URL by substituting the tag with the build ID. The + build will clean up the temporary image on a best-effort + basis. + base_image (str): + Optional. The base image used to opt into + automatic base image updates. + environment_variables (MutableMapping[str, str]): + Optional. User-provided build-time + environment variables. + enable_automatic_updates (bool): + Optional. Whether or not the application + container will be enrolled in automatic base + image updates. When true, the application will + be built on a scratch base image, so the base + layers can be appended at run time. + """ + + runtime: str = proto.Field( + proto.STRING, + number=1, + ) + function_target: str = proto.Field( + proto.STRING, + number=2, + ) + cache_image_uri: str = proto.Field( + proto.STRING, + number=3, + ) + base_image: str = proto.Field( + proto.STRING, + number=4, + ) + environment_variables: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=5, + ) + enable_automatic_updates: bool = proto.Field( + proto.BOOL, + number=6, + ) + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + storage_source: "StorageSource" = proto.Field( + proto.MESSAGE, + number=2, + oneof="source", + message="StorageSource", + ) + image_uri: str = proto.Field( + proto.STRING, + number=3, + ) + buildpack_build: BuildpacksBuild = proto.Field( + proto.MESSAGE, + number=4, + oneof="build_type", + message=BuildpacksBuild, + ) + docker_build: DockerBuild = proto.Field( + proto.MESSAGE, + number=5, + oneof="build_type", + message=DockerBuild, + ) + service_account: str = proto.Field( + proto.STRING, + number=6, + ) + worker_pool: str = proto.Field( + proto.STRING, + number=7, + ) + tags: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=8, + ) + + +class SubmitBuildResponse(proto.Message): + r"""Response message for submitting a Build. + + Attributes: + build_operation (google.longrunning.operations_pb2.Operation): + Cloud Build operation to be polled via + CloudBuild API. + base_image_uri (str): + URI of the base builder image in Artifact + Registry being used in the build. Used to opt + into automatic base image updates. + base_image_warning (str): + Warning message for the base image. + """ + + build_operation: operations_pb2.Operation = proto.Field( + proto.MESSAGE, + number=1, + message=operations_pb2.Operation, + ) + base_image_uri: str = proto.Field( + proto.STRING, + number=2, + ) + base_image_warning: str = proto.Field( + proto.STRING, + number=3, + ) + + +class StorageSource(proto.Message): + r"""Location of the source in an archive file in Google Cloud + Storage. + + Attributes: + bucket (str): + Required. Google Cloud Storage bucket containing the source + (see `Bucket Name + Requirements `__). + object_ (str): + Required. Google Cloud Storage object containing the source. + + This object must be a gzipped archive file (``.tar.gz``) + containing source to build. + generation (int): + Optional. Google Cloud Storage generation for + the object. If the generation is omitted, the + latest generation will be used. + """ + + bucket: str = proto.Field( + proto.STRING, + number=1, + ) + object_: str = proto.Field( + proto.STRING, + number=2, + ) + generation: int = proto.Field( + proto.INT64, + number=3, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-run/google/cloud/run_v2/types/revision.py b/packages/google-cloud-run/google/cloud/run_v2/types/revision.py index 1f25de97aed5..4119ad79a757 100644 --- a/packages/google-cloud-run/google/cloud/run_v2/types/revision.py +++ b/packages/google-cloud-run/google/cloud/run_v2/types/revision.py @@ -248,6 +248,8 @@ class Revision(proto.Message): key (CMEK) to use to encrypt this container image. For more information, go to https://cloud.google.com/run/docs/securing/using-cmek + service_mesh (google.cloud.run_v2.types.ServiceMesh): + Enables service mesh connectivity. encryption_key_revocation_action (google.cloud.run_v2.types.EncryptionKeyRevocationAction): The action to take if the encryption key is revoked. @@ -280,6 +282,8 @@ class Revision(proto.Message): scaling_status (google.cloud.run_v2.types.RevisionScalingStatus): Output only. The current effective scaling settings for the revision. + node_selector (google.cloud.run_v2.types.NodeSelector): + The node selector for the revision. etag (str): Output only. A system-generated fingerprint for this version of the resource. May be used to @@ -379,6 +383,11 @@ class Revision(proto.Message): proto.STRING, number=21, ) + service_mesh: vendor_settings.ServiceMesh = proto.Field( + proto.MESSAGE, + number=22, + message=vendor_settings.ServiceMesh, + ) encryption_key_revocation_action: vendor_settings.EncryptionKeyRevocationAction = ( proto.Field( proto.ENUM, @@ -421,6 +430,11 @@ class Revision(proto.Message): number=39, message=status.RevisionScalingStatus, ) + node_selector: vendor_settings.NodeSelector = proto.Field( + proto.MESSAGE, + number=40, + message=vendor_settings.NodeSelector, + ) etag: str = proto.Field( proto.STRING, number=99, diff --git a/packages/google-cloud-run/google/cloud/run_v2/types/revision_template.py b/packages/google-cloud-run/google/cloud/run_v2/types/revision_template.py index ca94333ae35f..c2eda044f0df 100644 --- a/packages/google-cloud-run/google/cloud/run_v2/types/revision_template.py +++ b/packages/google-cloud-run/google/cloud/run_v2/types/revision_template.py @@ -103,13 +103,20 @@ class RevisionTemplate(proto.Message): image. For more information, go to https://cloud.google.com/run/docs/securing/using-cmek max_instance_request_concurrency (int): - Optional. Sets the maximum number of requests - that each serving instance can receive. + Optional. Sets the maximum number of requests that each + serving instance can receive. If not specified or 0, + defaults to 80 when requested ``CPU >= 1`` and defaults to 1 + when requested ``CPU < 1``. + service_mesh (google.cloud.run_v2.types.ServiceMesh): + Optional. Enables service mesh connectivity. session_affinity (bool): Optional. Enable session affinity. health_check_disabled (bool): Optional. Disables health checking containers during deployment. + node_selector (google.cloud.run_v2.types.NodeSelector): + Optional. The node selector for the revision + template. """ revision: str = proto.Field( @@ -168,6 +175,11 @@ class RevisionTemplate(proto.Message): proto.INT32, number=15, ) + service_mesh: vendor_settings.ServiceMesh = proto.Field( + proto.MESSAGE, + number=16, + message=vendor_settings.ServiceMesh, + ) session_affinity: bool = proto.Field( proto.BOOL, number=19, @@ -176,6 +188,11 @@ class RevisionTemplate(proto.Message): proto.BOOL, number=20, ) + node_selector: vendor_settings.NodeSelector = proto.Field( + proto.MESSAGE, + number=21, + message=vendor_settings.NodeSelector, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-run/google/cloud/run_v2/types/service.py b/packages/google-cloud-run/google/cloud/run_v2/types/service.py index 717c05138d3e..7b98547e4f7e 100644 --- a/packages/google-cloud-run/google/cloud/run_v2/types/service.py +++ b/packages/google-cloud-run/google/cloud/run_v2/types/service.py @@ -358,9 +358,24 @@ class Service(proto.Message): scaling (google.cloud.run_v2.types.ServiceScaling): Optional. Specifies service-level scaling settings + invoker_iam_disabled (bool): + Optional. Disables IAM permission check for + run.routes.invoke for callers of this service. + This setting should not be used with external + ingress. default_uri_disabled (bool): Optional. Disables public resolution of the default URI of this service. + urls (MutableSequence[str]): + Output only. All URLs serving traffic for + this Service. + custom_audiences (MutableSequence[str]): + One or more custom audiences that you want + this service to support. Specify each custom + audience as the full URL in a string. The custom + audiences are encoded in the token and used to + authenticate requests. For more information, see + https://cloud.google.com/run/docs/configuring/custom-audiences. observed_generation (int): Output only. The generation of this Service currently serving traffic. See comments in ``reconciling`` for @@ -396,13 +411,6 @@ class Service(proto.Message): uri (str): Output only. The main URI in which this Service is serving traffic. - custom_audiences (MutableSequence[str]): - One or more custom audiences that you want - this service to support. Specify each custom - audience as the full URL in a string. The custom - audiences are encoded in the token and used to - authenticate requests. For more information, see - https://cloud.google.com/run/docs/configuring/custom-audiences. satisfies_pzs (bool): Output only. Reserved for future use. reconciling (bool): @@ -531,10 +539,22 @@ class Service(proto.Message): number=20, message=vendor_settings.ServiceScaling, ) + invoker_iam_disabled: bool = proto.Field( + proto.BOOL, + number=21, + ) default_uri_disabled: bool = proto.Field( proto.BOOL, number=22, ) + urls: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=24, + ) + custom_audiences: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=37, + ) observed_generation: int = proto.Field( proto.INT64, number=30, @@ -568,10 +588,6 @@ class Service(proto.Message): proto.STRING, number=36, ) - custom_audiences: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=37, - ) satisfies_pzs: bool = proto.Field( proto.BOOL, number=38, diff --git a/packages/google-cloud-run/google/cloud/run_v2/types/vendor_settings.py b/packages/google-cloud-run/google/cloud/run_v2/types/vendor_settings.py index 818261827ec0..6ac8d772f463 100644 --- a/packages/google-cloud-run/google/cloud/run_v2/types/vendor_settings.py +++ b/packages/google-cloud-run/google/cloud/run_v2/types/vendor_settings.py @@ -28,7 +28,9 @@ "VpcAccess", "BinaryAuthorization", "RevisionScaling", + "ServiceMesh", "ServiceScaling", + "NodeSelector", }, ) @@ -46,11 +48,14 @@ class IngressTraffic(proto.Enum): INGRESS_TRAFFIC_INTERNAL_LOAD_BALANCER (3): Both internal and Google Cloud Load Balancer traffic is allowed. + INGRESS_TRAFFIC_NONE (4): + No ingress traffic is allowed. """ INGRESS_TRAFFIC_UNSPECIFIED = 0 INGRESS_TRAFFIC_ALL = 1 INGRESS_TRAFFIC_INTERNAL_ONLY = 2 INGRESS_TRAFFIC_INTERNAL_LOAD_BALANCER = 3 + INGRESS_TRAFFIC_NONE = 4 class ExecutionEnvironment(proto.Enum): @@ -94,12 +99,11 @@ class VpcAccess(proto.Message): Attributes: connector (str): - VPC Access connector name. - Format: - projects/{project}/locations/{location}/connectors/{connector}, - where {project} can be project id or number. - For more information on sending traffic to a VPC - network via a connector, visit + VPC Access connector name. Format: + ``projects/{project}/locations/{location}/connectors/{connector}``, + where ``{project}`` can be project id or number. For more + information on sending traffic to a VPC network via a + connector, visit https://cloud.google.com/run/docs/configuring/vpc-connectors. egress (google.cloud.run_v2.types.VpcAccess.VpcEgress): Optional. Traffic VPC egress settings. If not provided, it @@ -201,9 +205,8 @@ class BinaryAuthorization(proto.Message): This field is a member of `oneof`_ ``binauthz_method``. policy (str): - Optional. The path to a binary authorization - policy. Format: - projects/{project}/platforms/cloudRun/{policy-name} + Optional. The path to a binary authorization policy. Format: + ``projects/{project}/platforms/cloudRun/{policy-name}`` This field is a member of `oneof`_ ``binauthz_method``. breakglass_justification (str): @@ -238,7 +241,11 @@ class RevisionScaling(proto.Message): that this resource should have. max_instance_count (int): Optional. Maximum number of serving instances - that this resource should have. + that this resource should have. When + unspecified, the field is set to the server + default value of + 100. For more information see + https://cloud.google.com/run/docs/configuring/max-instances """ min_instance_count: int = proto.Field( @@ -251,6 +258,23 @@ class RevisionScaling(proto.Message): ) +class ServiceMesh(proto.Message): + r"""Settings for Cloud Service Mesh. For more information see + https://cloud.google.com/service-mesh/docs/overview. + + Attributes: + mesh (str): + The Mesh resource name. Format: + ``projects/{project}/locations/global/meshes/{mesh}``, where + ``{project}`` can be project id or number. + """ + + mesh: str = proto.Field( + proto.STRING, + number=1, + ) + + class ServiceScaling(proto.Message): r"""Scaling settings applied at the service level rather than at the revision level. @@ -261,13 +285,52 @@ class ServiceScaling(proto.Message): service. This number of instances is divided among all revisions with specified traffic based on the percent of traffic they are receiving. - (BETA) + scaling_mode (google.cloud.run_v2.types.ServiceScaling.ScalingMode): + Optional. The scaling mode for the service. """ + class ScalingMode(proto.Enum): + r"""The scaling mode for the service. If not provided, it + defaults to AUTOMATIC. + + Values: + SCALING_MODE_UNSPECIFIED (0): + Unspecified. + AUTOMATIC (1): + Scale based on traffic between min and max + instances. + MANUAL (2): + Scale to exactly min instances and ignore max + instances. + """ + SCALING_MODE_UNSPECIFIED = 0 + AUTOMATIC = 1 + MANUAL = 2 + min_instance_count: int = proto.Field( proto.INT32, number=1, ) + scaling_mode: ScalingMode = proto.Field( + proto.ENUM, + number=3, + enum=ScalingMode, + ) + + +class NodeSelector(proto.Message): + r"""Hardware constraints configuration. + + Attributes: + accelerator (str): + Required. GPU accelerator type to attach to + an instance. + """ + + accelerator: str = proto.Field( + proto.STRING, + number=1, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-run/samples/generated_samples/run_v2_generated_builds_submit_build_async.py b/packages/google-cloud-run/samples/generated_samples/run_v2_generated_builds_submit_build_async.py new file mode 100644 index 000000000000..a5a58398f493 --- /dev/null +++ b/packages/google-cloud-run/samples/generated_samples/run_v2_generated_builds_submit_build_async.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SubmitBuild +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-run + + +# [START run_v2_generated_Builds_SubmitBuild_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import run_v2 + + +async def sample_submit_build(): + # Create a client + client = run_v2.BuildsAsyncClient() + + # Initialize request argument(s) + storage_source = run_v2.StorageSource() + storage_source.bucket = "bucket_value" + storage_source.object_ = "object__value" + + request = run_v2.SubmitBuildRequest( + storage_source=storage_source, + parent="parent_value", + image_uri="image_uri_value", + ) + + # Make the request + response = await client.submit_build(request=request) + + # Handle the response + print(response) + +# [END run_v2_generated_Builds_SubmitBuild_async] diff --git a/packages/google-cloud-run/samples/generated_samples/run_v2_generated_builds_submit_build_sync.py b/packages/google-cloud-run/samples/generated_samples/run_v2_generated_builds_submit_build_sync.py new file mode 100644 index 000000000000..17d11866d645 --- /dev/null +++ b/packages/google-cloud-run/samples/generated_samples/run_v2_generated_builds_submit_build_sync.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SubmitBuild +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-run + + +# [START run_v2_generated_Builds_SubmitBuild_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import run_v2 + + +def sample_submit_build(): + # Create a client + client = run_v2.BuildsClient() + + # Initialize request argument(s) + storage_source = run_v2.StorageSource() + storage_source.bucket = "bucket_value" + storage_source.object_ = "object__value" + + request = run_v2.SubmitBuildRequest( + storage_source=storage_source, + parent="parent_value", + image_uri="image_uri_value", + ) + + # Make the request + response = client.submit_build(request=request) + + # Handle the response + print(response) + +# [END run_v2_generated_Builds_SubmitBuild_sync] diff --git a/packages/google-cloud-run/samples/generated_samples/snippet_metadata_google.cloud.run.v2.json b/packages/google-cloud-run/samples/generated_samples/snippet_metadata_google.cloud.run.v2.json index ae607aeec9de..84a8ca294789 100644 --- a/packages/google-cloud-run/samples/generated_samples/snippet_metadata_google.cloud.run.v2.json +++ b/packages/google-cloud-run/samples/generated_samples/snippet_metadata_google.cloud.run.v2.json @@ -11,6 +11,159 @@ "version": "0.1.0" }, "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.run_v2.BuildsAsyncClient", + "shortName": "BuildsAsyncClient" + }, + "fullName": "google.cloud.run_v2.BuildsAsyncClient.submit_build", + "method": { + "fullName": "google.cloud.run.v2.Builds.SubmitBuild", + "service": { + "fullName": "google.cloud.run.v2.Builds", + "shortName": "Builds" + }, + "shortName": "SubmitBuild" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.run_v2.types.SubmitBuildRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.run_v2.types.SubmitBuildResponse", + "shortName": "submit_build" + }, + "description": "Sample for SubmitBuild", + "file": "run_v2_generated_builds_submit_build_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "run_v2_generated_Builds_SubmitBuild_async", + "segments": [ + { + "end": 57, + "start": 27, + "type": "FULL" + }, + { + "end": 57, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 54, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 58, + "start": 55, + "type": "RESPONSE_HANDLING" + } + ], + "title": "run_v2_generated_builds_submit_build_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.run_v2.BuildsClient", + "shortName": "BuildsClient" + }, + "fullName": "google.cloud.run_v2.BuildsClient.submit_build", + "method": { + "fullName": "google.cloud.run.v2.Builds.SubmitBuild", + "service": { + "fullName": "google.cloud.run.v2.Builds", + "shortName": "Builds" + }, + "shortName": "SubmitBuild" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.run_v2.types.SubmitBuildRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.run_v2.types.SubmitBuildResponse", + "shortName": "submit_build" + }, + "description": "Sample for SubmitBuild", + "file": "run_v2_generated_builds_submit_build_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "run_v2_generated_Builds_SubmitBuild_sync", + "segments": [ + { + "end": 57, + "start": 27, + "type": "FULL" + }, + { + "end": 57, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 54, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 58, + "start": 55, + "type": "RESPONSE_HANDLING" + } + ], + "title": "run_v2_generated_builds_submit_build_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-run/scripts/fixup_run_v2_keywords.py b/packages/google-cloud-run/scripts/fixup_run_v2_keywords.py index 2b9c966dccff..84f0f0b13518 100644 --- a/packages/google-cloud-run/scripts/fixup_run_v2_keywords.py +++ b/packages/google-cloud-run/scripts/fixup_run_v2_keywords.py @@ -59,6 +59,7 @@ class runCallTransformer(cst.CSTTransformer): 'list_tasks': ('parent', 'page_size', 'page_token', 'show_deleted', ), 'run_job': ('name', 'validate_only', 'etag', 'overrides', ), 'set_iam_policy': ('resource', 'policy', 'update_mask', ), + 'submit_build': ('parent', 'storage_source', 'image_uri', 'buildpack_build', 'docker_build', 'service_account', 'worker_pool', 'tags', ), 'test_iam_permissions': ('resource', 'permissions', ), 'update_job': ('job', 'validate_only', 'allow_missing', ), 'update_service': ('service', 'update_mask', 'validate_only', 'allow_missing', ), diff --git a/packages/google-cloud-run/tests/unit/gapic/run_v2/test_builds.py b/packages/google-cloud-run/tests/unit/gapic/run_v2/test_builds.py new file mode 100644 index 000000000000..dd13d86db489 --- /dev/null +++ b/packages/google-cloud-run/tests/unit/gapic/run_v2/test_builds.py @@ -0,0 +1,3162 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.run_v2.services.builds import ( + BuildsAsyncClient, + BuildsClient, + transports, +) +from google.cloud.run_v2.types import build + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert BuildsClient._get_default_mtls_endpoint(None) is None + assert BuildsClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert ( + BuildsClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + ) + assert ( + BuildsClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + BuildsClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert BuildsClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +def test__read_environment_variables(): + assert BuildsClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert BuildsClient._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert BuildsClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + BuildsClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert BuildsClient._read_environment_variables() == (False, "never", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert BuildsClient._read_environment_variables() == (False, "always", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert BuildsClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + BuildsClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert BuildsClient._read_environment_variables() == (False, "auto", "foo.com") + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert BuildsClient._get_client_cert_source(None, False) is None + assert ( + BuildsClient._get_client_cert_source(mock_provided_cert_source, False) is None + ) + assert ( + BuildsClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + BuildsClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + BuildsClient._get_client_cert_source(mock_provided_cert_source, "true") + is mock_provided_cert_source + ) + + +@mock.patch.object( + BuildsClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BuildsClient), +) +@mock.patch.object( + BuildsAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BuildsAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = BuildsClient._DEFAULT_UNIVERSE + default_endpoint = BuildsClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = BuildsClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + BuildsClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + BuildsClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == BuildsClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + BuildsClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + BuildsClient._get_api_endpoint(None, None, default_universe, "always") + == BuildsClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + BuildsClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == BuildsClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + BuildsClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + BuildsClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + BuildsClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + BuildsClient._get_universe_domain(client_universe_domain, universe_domain_env) + == client_universe_domain + ) + assert ( + BuildsClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + BuildsClient._get_universe_domain(None, None) == BuildsClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + BuildsClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (BuildsClient, transports.BuildsGrpcTransport, "grpc"), + (BuildsClient, transports.BuildsRestTransport, "rest"), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (BuildsClient, "grpc"), + (BuildsAsyncClient, "grpc_asyncio"), + (BuildsClient, "rest"), + ], +) +def test_builds_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "run.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://run.googleapis.com/" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.BuildsGrpcTransport, "grpc"), + (transports.BuildsGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.BuildsRestTransport, "rest"), + ], +) +def test_builds_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (BuildsClient, "grpc"), + (BuildsAsyncClient, "grpc_asyncio"), + (BuildsClient, "rest"), + ], +) +def test_builds_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "run.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://run.googleapis.com/" + ) + + +def test_builds_client_get_transport_class(): + transport = BuildsClient.get_transport_class() + available_transports = [ + transports.BuildsGrpcTransport, + transports.BuildsRestTransport, + ] + assert transport in available_transports + + transport = BuildsClient.get_transport_class("grpc") + assert transport == transports.BuildsGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (BuildsClient, transports.BuildsGrpcTransport, "grpc"), + (BuildsAsyncClient, transports.BuildsGrpcAsyncIOTransport, "grpc_asyncio"), + (BuildsClient, transports.BuildsRestTransport, "rest"), + ], +) +@mock.patch.object( + BuildsClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BuildsClient), +) +@mock.patch.object( + BuildsAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BuildsAsyncClient), +) +def test_builds_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(BuildsClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(BuildsClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com/" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com/", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (BuildsClient, transports.BuildsGrpcTransport, "grpc", "true"), + ( + BuildsAsyncClient, + transports.BuildsGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (BuildsClient, transports.BuildsGrpcTransport, "grpc", "false"), + ( + BuildsAsyncClient, + transports.BuildsGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (BuildsClient, transports.BuildsRestTransport, "rest", "true"), + (BuildsClient, transports.BuildsRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + BuildsClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BuildsClient), +) +@mock.patch.object( + BuildsAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BuildsAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_builds_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [BuildsClient, BuildsAsyncClient]) +@mock.patch.object( + BuildsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(BuildsClient) +) +@mock.patch.object( + BuildsAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(BuildsAsyncClient) +) +def test_builds_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize("client_class", [BuildsClient, BuildsAsyncClient]) +@mock.patch.object( + BuildsClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BuildsClient), +) +@mock.patch.object( + BuildsAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BuildsAsyncClient), +) +def test_builds_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = BuildsClient._DEFAULT_UNIVERSE + default_endpoint = BuildsClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = BuildsClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (BuildsClient, transports.BuildsGrpcTransport, "grpc"), + (BuildsAsyncClient, transports.BuildsGrpcAsyncIOTransport, "grpc_asyncio"), + (BuildsClient, transports.BuildsRestTransport, "rest"), + ], +) +def test_builds_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (BuildsClient, transports.BuildsGrpcTransport, "grpc", grpc_helpers), + ( + BuildsAsyncClient, + transports.BuildsGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (BuildsClient, transports.BuildsRestTransport, "rest", None), + ], +) +def test_builds_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_builds_client_client_options_from_dict(): + with mock.patch( + "google.cloud.run_v2.services.builds.transports.BuildsGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = BuildsClient(client_options={"api_endpoint": "squid.clam.whelk"}) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (BuildsClient, transports.BuildsGrpcTransport, "grpc", grpc_helpers), + ( + BuildsAsyncClient, + transports.BuildsGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_builds_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "run.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="run.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + build.SubmitBuildRequest, + dict, + ], +) +def test_submit_build(request_type, transport: str = "grpc"): + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.submit_build), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = build.SubmitBuildResponse( + base_image_uri="base_image_uri_value", + base_image_warning="base_image_warning_value", + ) + response = client.submit_build(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = build.SubmitBuildRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, build.SubmitBuildResponse) + assert response.base_image_uri == "base_image_uri_value" + assert response.base_image_warning == "base_image_warning_value" + + +def test_submit_build_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.submit_build), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.submit_build() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == build.SubmitBuildRequest() + + +def test_submit_build_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = build.SubmitBuildRequest( + parent="parent_value", + image_uri="image_uri_value", + service_account="service_account_value", + worker_pool="worker_pool_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.submit_build), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.submit_build(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == build.SubmitBuildRequest( + parent="parent_value", + image_uri="image_uri_value", + service_account="service_account_value", + worker_pool="worker_pool_value", + ) + + +def test_submit_build_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.submit_build in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.submit_build] = mock_rpc + request = {} + client.submit_build(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.submit_build(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_submit_build_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BuildsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.submit_build), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + build.SubmitBuildResponse( + base_image_uri="base_image_uri_value", + base_image_warning="base_image_warning_value", + ) + ) + response = await client.submit_build() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == build.SubmitBuildRequest() + + +@pytest.mark.asyncio +async def test_submit_build_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BuildsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.submit_build + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.submit_build + ] = mock_rpc + + request = {} + await client.submit_build(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.submit_build(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_submit_build_async( + transport: str = "grpc_asyncio", request_type=build.SubmitBuildRequest +): + client = BuildsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.submit_build), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + build.SubmitBuildResponse( + base_image_uri="base_image_uri_value", + base_image_warning="base_image_warning_value", + ) + ) + response = await client.submit_build(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = build.SubmitBuildRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, build.SubmitBuildResponse) + assert response.base_image_uri == "base_image_uri_value" + assert response.base_image_warning == "base_image_warning_value" + + +@pytest.mark.asyncio +async def test_submit_build_async_from_dict(): + await test_submit_build_async(request_type=dict) + + +def test_submit_build_field_headers(): + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = build.SubmitBuildRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.submit_build), "__call__") as call: + call.return_value = build.SubmitBuildResponse() + client.submit_build(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_submit_build_field_headers_async(): + client = BuildsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = build.SubmitBuildRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.submit_build), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + build.SubmitBuildResponse() + ) + await client.submit_build(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + build.SubmitBuildRequest, + dict, + ], +) +def test_submit_build_rest(request_type): + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = build.SubmitBuildResponse( + base_image_uri="base_image_uri_value", + base_image_warning="base_image_warning_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = build.SubmitBuildResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.submit_build(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, build.SubmitBuildResponse) + assert response.base_image_uri == "base_image_uri_value" + assert response.base_image_warning == "base_image_warning_value" + + +def test_submit_build_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.submit_build in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.submit_build] = mock_rpc + + request = {} + client.submit_build(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.submit_build(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_submit_build_rest_required_fields(request_type=build.SubmitBuildRequest): + transport_class = transports.BuildsRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["image_uri"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).submit_build._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + jsonified_request["imageUri"] = "image_uri_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).submit_build._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "imageUri" in jsonified_request + assert jsonified_request["imageUri"] == "image_uri_value" + + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = build.SubmitBuildResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = build.SubmitBuildResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.submit_build(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_submit_build_rest_unset_required_fields(): + transport = transports.BuildsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.submit_build._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "storageSource", + "imageUri", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_submit_build_rest_interceptors(null_interceptor): + transport = transports.BuildsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BuildsRestInterceptor(), + ) + client = BuildsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BuildsRestInterceptor, "post_submit_build" + ) as post, mock.patch.object( + transports.BuildsRestInterceptor, "pre_submit_build" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = build.SubmitBuildRequest.pb(build.SubmitBuildRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = build.SubmitBuildResponse.to_json( + build.SubmitBuildResponse() + ) + + request = build.SubmitBuildRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = build.SubmitBuildResponse() + + client.submit_build( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_submit_build_rest_bad_request( + transport: str = "rest", request_type=build.SubmitBuildRequest +): + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.submit_build(request) + + +def test_submit_build_rest_error(): + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.BuildsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.BuildsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = BuildsClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.BuildsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = BuildsClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = BuildsClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.BuildsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = BuildsClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.BuildsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = BuildsClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.BuildsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.BuildsGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.BuildsGrpcTransport, + transports.BuildsGrpcAsyncIOTransport, + transports.BuildsRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = BuildsClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.BuildsGrpcTransport, + ) + + +def test_builds_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.BuildsTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_builds_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.run_v2.services.builds.transports.BuildsTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.BuildsTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "submit_build", + "get_operation", + "wait_operation", + "delete_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_builds_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.run_v2.services.builds.transports.BuildsTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.BuildsTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_builds_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.run_v2.services.builds.transports.BuildsTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.BuildsTransport() + adc.assert_called_once() + + +def test_builds_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + BuildsClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.BuildsGrpcTransport, + transports.BuildsGrpcAsyncIOTransport, + ], +) +def test_builds_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.BuildsGrpcTransport, + transports.BuildsGrpcAsyncIOTransport, + transports.BuildsRestTransport, + ], +) +def test_builds_transport_auth_gdch_credentials(transport_class): + host = "https://language.com/" + api_audience_tests = [None, "https://language2.com/"] + api_audience_expect = [host, "https://language2.com/"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.BuildsGrpcTransport, grpc_helpers), + (transports.BuildsGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_builds_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "run.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="run.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [transports.BuildsGrpcTransport, transports.BuildsGrpcAsyncIOTransport], +) +def test_builds_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_builds_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.BuildsRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_builds_host_no_port(transport_name): + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint="run.googleapis.com"), + transport=transport_name, + ) + assert client.transport._host == ( + "run.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://run.googleapis.com/" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_builds_host_with_port(transport_name): + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="run.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "run.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://run.googleapis.com:8000/" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_builds_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = BuildsClient( + credentials=creds1, + transport=transport_name, + ) + client2 = BuildsClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.submit_build._session + session2 = client2.transport.submit_build._session + assert session1 != session2 + + +def test_builds_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.BuildsGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_builds_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.BuildsGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.BuildsGrpcTransport, transports.BuildsGrpcAsyncIOTransport], +) +def test_builds_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.BuildsGrpcTransport, transports.BuildsGrpcAsyncIOTransport], +) +def test_builds_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_worker_pool_path(): + project = "squid" + location = "clam" + worker_pool = "whelk" + expected = ( + "projects/{project}/locations/{location}/workerPools/{worker_pool}".format( + project=project, + location=location, + worker_pool=worker_pool, + ) + ) + actual = BuildsClient.worker_pool_path(project, location, worker_pool) + assert expected == actual + + +def test_parse_worker_pool_path(): + expected = { + "project": "octopus", + "location": "oyster", + "worker_pool": "nudibranch", + } + path = BuildsClient.worker_pool_path(**expected) + + # Check that the path construction is reversible. + actual = BuildsClient.parse_worker_pool_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "cuttlefish" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = BuildsClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "mussel", + } + path = BuildsClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = BuildsClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "winkle" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = BuildsClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nautilus", + } + path = BuildsClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = BuildsClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "scallop" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = BuildsClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "abalone", + } + path = BuildsClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = BuildsClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "squid" + expected = "projects/{project}".format( + project=project, + ) + actual = BuildsClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "clam", + } + path = BuildsClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = BuildsClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "whelk" + location = "octopus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = BuildsClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = BuildsClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = BuildsClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.BuildsTransport, "_prep_wrapped_messages" + ) as prep: + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.BuildsTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = BuildsClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = BuildsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_delete_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest +): + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_wait_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.WaitOperationRequest +): + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.wait_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.WaitOperationRequest, + dict, + ], +) +def test_wait_operation_rest(request_type): + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.wait_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_delete_operation(transport: str = "grpc"): + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc_asyncio"): + client = BuildsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_field_headers(): + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = BuildsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_delete_operation_from_dict(): + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = BuildsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_wait_operation(transport: str = "grpc"): + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.WaitOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.wait_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.wait_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_wait_operation(transport: str = "grpc_asyncio"): + client = BuildsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.WaitOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.wait_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.wait_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_wait_operation_field_headers(): + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.WaitOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.wait_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.wait_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_wait_operation_field_headers_async(): + client = BuildsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.WaitOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.wait_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.wait_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_wait_operation_from_dict(): + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.wait_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.wait_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_wait_operation_from_dict_async(): + client = BuildsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.wait_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.wait_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = BuildsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = BuildsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = BuildsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = BuildsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = BuildsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = BuildsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = BuildsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (BuildsClient, transports.BuildsGrpcTransport), + (BuildsAsyncClient, transports.BuildsGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-run/tests/unit/gapic/run_v2/test_jobs.py b/packages/google-cloud-run/tests/unit/gapic/run_v2/test_jobs.py index 11755c4fb3c4..7d6702058641 100644 --- a/packages/google-cloud-run/tests/unit/gapic/run_v2/test_jobs.py +++ b/packages/google-cloud-run/tests/unit/gapic/run_v2/test_jobs.py @@ -8029,9 +8029,29 @@ def test_parse_job_path(): assert expected == actual -def test_secret_path(): +def test_policy_path(): project = "oyster" - secret = "nudibranch" + expected = "projects/{project}/policy".format( + project=project, + ) + actual = JobsClient.policy_path(project) + assert expected == actual + + +def test_parse_policy_path(): + expected = { + "project": "nudibranch", + } + path = JobsClient.policy_path(**expected) + + # Check that the path construction is reversible. + actual = JobsClient.parse_policy_path(path) + assert expected == actual + + +def test_secret_path(): + project = "cuttlefish" + secret = "mussel" expected = "projects/{project}/secrets/{secret}".format( project=project, secret=secret, @@ -8042,8 +8062,8 @@ def test_secret_path(): def test_parse_secret_path(): expected = { - "project": "cuttlefish", - "secret": "mussel", + "project": "winkle", + "secret": "nautilus", } path = JobsClient.secret_path(**expected) @@ -8053,9 +8073,9 @@ def test_parse_secret_path(): def test_secret_version_path(): - project = "winkle" - secret = "nautilus" - version = "scallop" + project = "scallop" + secret = "abalone" + version = "squid" expected = "projects/{project}/secrets/{secret}/versions/{version}".format( project=project, secret=secret, @@ -8067,9 +8087,9 @@ def test_secret_version_path(): def test_parse_secret_version_path(): expected = { - "project": "abalone", - "secret": "squid", - "version": "clam", + "project": "clam", + "secret": "whelk", + "version": "octopus", } path = JobsClient.secret_version_path(**expected) @@ -8079,7 +8099,7 @@ def test_parse_secret_version_path(): def test_common_billing_account_path(): - billing_account = "whelk" + billing_account = "oyster" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -8089,7 +8109,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "octopus", + "billing_account": "nudibranch", } path = JobsClient.common_billing_account_path(**expected) @@ -8099,7 +8119,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "oyster" + folder = "cuttlefish" expected = "folders/{folder}".format( folder=folder, ) @@ -8109,7 +8129,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "nudibranch", + "folder": "mussel", } path = JobsClient.common_folder_path(**expected) @@ -8119,7 +8139,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "cuttlefish" + organization = "winkle" expected = "organizations/{organization}".format( organization=organization, ) @@ -8129,7 +8149,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "mussel", + "organization": "nautilus", } path = JobsClient.common_organization_path(**expected) @@ -8139,7 +8159,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "winkle" + project = "scallop" expected = "projects/{project}".format( project=project, ) @@ -8149,7 +8169,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "nautilus", + "project": "abalone", } path = JobsClient.common_project_path(**expected) @@ -8159,8 +8179,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "scallop" - location = "abalone" + project = "squid" + location = "clam" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -8171,8 +8191,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "squid", - "location": "clam", + "project": "whelk", + "location": "octopus", } path = JobsClient.common_location_path(**expected) diff --git a/packages/google-cloud-run/tests/unit/gapic/run_v2/test_revisions.py b/packages/google-cloud-run/tests/unit/gapic/run_v2/test_revisions.py index e01cb1c29f93..04d6b89fa111 100644 --- a/packages/google-cloud-run/tests/unit/gapic/run_v2/test_revisions.py +++ b/packages/google-cloud-run/tests/unit/gapic/run_v2/test_revisions.py @@ -4031,11 +4031,37 @@ def test_parse_crypto_key_path(): assert expected == actual -def test_revision_path(): +def test_mesh_path(): project = "whelk" location = "octopus" - service = "oyster" - revision = "nudibranch" + mesh = "oyster" + expected = "projects/{project}/locations/{location}/meshes/{mesh}".format( + project=project, + location=location, + mesh=mesh, + ) + actual = RevisionsClient.mesh_path(project, location, mesh) + assert expected == actual + + +def test_parse_mesh_path(): + expected = { + "project": "nudibranch", + "location": "cuttlefish", + "mesh": "mussel", + } + path = RevisionsClient.mesh_path(**expected) + + # Check that the path construction is reversible. + actual = RevisionsClient.parse_mesh_path(path) + assert expected == actual + + +def test_revision_path(): + project = "winkle" + location = "nautilus" + service = "scallop" + revision = "abalone" expected = "projects/{project}/locations/{location}/services/{service}/revisions/{revision}".format( project=project, location=location, @@ -4048,10 +4074,10 @@ def test_revision_path(): def test_parse_revision_path(): expected = { - "project": "cuttlefish", - "location": "mussel", - "service": "winkle", - "revision": "nautilus", + "project": "squid", + "location": "clam", + "service": "whelk", + "revision": "octopus", } path = RevisionsClient.revision_path(**expected) @@ -4061,8 +4087,8 @@ def test_parse_revision_path(): def test_secret_path(): - project = "scallop" - secret = "abalone" + project = "oyster" + secret = "nudibranch" expected = "projects/{project}/secrets/{secret}".format( project=project, secret=secret, @@ -4073,8 +4099,8 @@ def test_secret_path(): def test_parse_secret_path(): expected = { - "project": "squid", - "secret": "clam", + "project": "cuttlefish", + "secret": "mussel", } path = RevisionsClient.secret_path(**expected) @@ -4084,9 +4110,9 @@ def test_parse_secret_path(): def test_secret_version_path(): - project = "whelk" - secret = "octopus" - version = "oyster" + project = "winkle" + secret = "nautilus" + version = "scallop" expected = "projects/{project}/secrets/{secret}/versions/{version}".format( project=project, secret=secret, @@ -4098,9 +4124,9 @@ def test_secret_version_path(): def test_parse_secret_version_path(): expected = { - "project": "nudibranch", - "secret": "cuttlefish", - "version": "mussel", + "project": "abalone", + "secret": "squid", + "version": "clam", } path = RevisionsClient.secret_version_path(**expected) @@ -4110,9 +4136,9 @@ def test_parse_secret_version_path(): def test_service_path(): - project = "winkle" - location = "nautilus" - service = "scallop" + project = "whelk" + location = "octopus" + service = "oyster" expected = "projects/{project}/locations/{location}/services/{service}".format( project=project, location=location, @@ -4124,9 +4150,9 @@ def test_service_path(): def test_parse_service_path(): expected = { - "project": "abalone", - "location": "squid", - "service": "clam", + "project": "nudibranch", + "location": "cuttlefish", + "service": "mussel", } path = RevisionsClient.service_path(**expected) @@ -4136,7 +4162,7 @@ def test_parse_service_path(): def test_common_billing_account_path(): - billing_account = "whelk" + billing_account = "winkle" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -4146,7 +4172,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "octopus", + "billing_account": "nautilus", } path = RevisionsClient.common_billing_account_path(**expected) @@ -4156,7 +4182,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "oyster" + folder = "scallop" expected = "folders/{folder}".format( folder=folder, ) @@ -4166,7 +4192,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "nudibranch", + "folder": "abalone", } path = RevisionsClient.common_folder_path(**expected) @@ -4176,7 +4202,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "cuttlefish" + organization = "squid" expected = "organizations/{organization}".format( organization=organization, ) @@ -4186,7 +4212,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "mussel", + "organization": "clam", } path = RevisionsClient.common_organization_path(**expected) @@ -4196,7 +4222,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "winkle" + project = "whelk" expected = "projects/{project}".format( project=project, ) @@ -4206,7 +4232,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "nautilus", + "project": "octopus", } path = RevisionsClient.common_project_path(**expected) @@ -4216,8 +4242,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "scallop" - location = "abalone" + project = "oyster" + location = "nudibranch" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -4228,8 +4254,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "squid", - "location": "clam", + "project": "cuttlefish", + "location": "mussel", } path = RevisionsClient.common_location_path(**expected) diff --git a/packages/google-cloud-run/tests/unit/gapic/run_v2/test_services.py b/packages/google-cloud-run/tests/unit/gapic/run_v2/test_services.py index af0363c22fdb..e2de63b49bc9 100644 --- a/packages/google-cloud-run/tests/unit/gapic/run_v2/test_services.py +++ b/packages/google-cloud-run/tests/unit/gapic/run_v2/test_services.py @@ -1468,12 +1468,14 @@ def test_get_service(request_type, transport: str = "grpc"): client_version="client_version_value", ingress=vendor_settings.IngressTraffic.INGRESS_TRAFFIC_ALL, launch_stage=launch_stage_pb2.LaunchStage.UNIMPLEMENTED, + invoker_iam_disabled=True, default_uri_disabled=True, + urls=["urls_value"], + custom_audiences=["custom_audiences_value"], observed_generation=2021, latest_ready_revision="latest_ready_revision_value", latest_created_revision="latest_created_revision_value", uri="uri_value", - custom_audiences=["custom_audiences_value"], satisfies_pzs=True, reconciling=True, etag="etag_value", @@ -1498,12 +1500,14 @@ def test_get_service(request_type, transport: str = "grpc"): assert response.client_version == "client_version_value" assert response.ingress == vendor_settings.IngressTraffic.INGRESS_TRAFFIC_ALL assert response.launch_stage == launch_stage_pb2.LaunchStage.UNIMPLEMENTED + assert response.invoker_iam_disabled is True assert response.default_uri_disabled is True + assert response.urls == ["urls_value"] + assert response.custom_audiences == ["custom_audiences_value"] assert response.observed_generation == 2021 assert response.latest_ready_revision == "latest_ready_revision_value" assert response.latest_created_revision == "latest_created_revision_value" assert response.uri == "uri_value" - assert response.custom_audiences == ["custom_audiences_value"] assert response.satisfies_pzs is True assert response.reconciling is True assert response.etag == "etag_value" @@ -1615,12 +1619,14 @@ async def test_get_service_empty_call_async(): client_version="client_version_value", ingress=vendor_settings.IngressTraffic.INGRESS_TRAFFIC_ALL, launch_stage=launch_stage_pb2.LaunchStage.UNIMPLEMENTED, + invoker_iam_disabled=True, default_uri_disabled=True, + urls=["urls_value"], + custom_audiences=["custom_audiences_value"], observed_generation=2021, latest_ready_revision="latest_ready_revision_value", latest_created_revision="latest_created_revision_value", uri="uri_value", - custom_audiences=["custom_audiences_value"], satisfies_pzs=True, reconciling=True, etag="etag_value", @@ -1702,12 +1708,14 @@ async def test_get_service_async( client_version="client_version_value", ingress=vendor_settings.IngressTraffic.INGRESS_TRAFFIC_ALL, launch_stage=launch_stage_pb2.LaunchStage.UNIMPLEMENTED, + invoker_iam_disabled=True, default_uri_disabled=True, + urls=["urls_value"], + custom_audiences=["custom_audiences_value"], observed_generation=2021, latest_ready_revision="latest_ready_revision_value", latest_created_revision="latest_created_revision_value", uri="uri_value", - custom_audiences=["custom_audiences_value"], satisfies_pzs=True, reconciling=True, etag="etag_value", @@ -1733,12 +1741,14 @@ async def test_get_service_async( assert response.client_version == "client_version_value" assert response.ingress == vendor_settings.IngressTraffic.INGRESS_TRAFFIC_ALL assert response.launch_stage == launch_stage_pb2.LaunchStage.UNIMPLEMENTED + assert response.invoker_iam_disabled is True assert response.default_uri_disabled is True + assert response.urls == ["urls_value"] + assert response.custom_audiences == ["custom_audiences_value"] assert response.observed_generation == 2021 assert response.latest_ready_revision == "latest_ready_revision_value" assert response.latest_created_revision == "latest_created_revision_value" assert response.uri == "uri_value" - assert response.custom_audiences == ["custom_audiences_value"] assert response.satisfies_pzs is True assert response.reconciling is True assert response.etag == "etag_value" @@ -4109,8 +4119,10 @@ def test_create_service_rest(request_type): "execution_environment": 1, "encryption_key": "encryption_key_value", "max_instance_request_concurrency": 3436, + "service_mesh": {"mesh": "mesh_value"}, "session_affinity": True, "health_check_disabled": True, + "node_selector": {"accelerator": "accelerator_value"}, }, "traffic": [ { @@ -4120,8 +4132,11 @@ def test_create_service_rest(request_type): "tag": "tag_value", } ], - "scaling": {"min_instance_count": 1920}, + "scaling": {"min_instance_count": 1920, "scaling_mode": 1}, + "invoker_iam_disabled": True, "default_uri_disabled": True, + "urls": ["urls_value1", "urls_value2"], + "custom_audiences": ["custom_audiences_value1", "custom_audiences_value2"], "observed_generation": 2021, "terminal_condition": { "type_": "type__value", @@ -4146,7 +4161,6 @@ def test_create_service_rest(request_type): } ], "uri": "uri_value", - "custom_audiences": ["custom_audiences_value1", "custom_audiences_value2"], "satisfies_pzs": True, "reconciling": True, "etag": "etag_value", @@ -4569,12 +4583,14 @@ def test_get_service_rest(request_type): client_version="client_version_value", ingress=vendor_settings.IngressTraffic.INGRESS_TRAFFIC_ALL, launch_stage=launch_stage_pb2.LaunchStage.UNIMPLEMENTED, + invoker_iam_disabled=True, default_uri_disabled=True, + urls=["urls_value"], + custom_audiences=["custom_audiences_value"], observed_generation=2021, latest_ready_revision="latest_ready_revision_value", latest_created_revision="latest_created_revision_value", uri="uri_value", - custom_audiences=["custom_audiences_value"], satisfies_pzs=True, reconciling=True, etag="etag_value", @@ -4603,12 +4619,14 @@ def test_get_service_rest(request_type): assert response.client_version == "client_version_value" assert response.ingress == vendor_settings.IngressTraffic.INGRESS_TRAFFIC_ALL assert response.launch_stage == launch_stage_pb2.LaunchStage.UNIMPLEMENTED + assert response.invoker_iam_disabled is True assert response.default_uri_disabled is True + assert response.urls == ["urls_value"] + assert response.custom_audiences == ["custom_audiences_value"] assert response.observed_generation == 2021 assert response.latest_ready_revision == "latest_ready_revision_value" assert response.latest_created_revision == "latest_created_revision_value" assert response.uri == "uri_value" - assert response.custom_audiences == ["custom_audiences_value"] assert response.satisfies_pzs is True assert response.reconciling is True assert response.etag == "etag_value" @@ -5369,8 +5387,10 @@ def test_update_service_rest(request_type): "execution_environment": 1, "encryption_key": "encryption_key_value", "max_instance_request_concurrency": 3436, + "service_mesh": {"mesh": "mesh_value"}, "session_affinity": True, "health_check_disabled": True, + "node_selector": {"accelerator": "accelerator_value"}, }, "traffic": [ { @@ -5380,8 +5400,11 @@ def test_update_service_rest(request_type): "tag": "tag_value", } ], - "scaling": {"min_instance_count": 1920}, + "scaling": {"min_instance_count": 1920, "scaling_mode": 1}, + "invoker_iam_disabled": True, "default_uri_disabled": True, + "urls": ["urls_value1", "urls_value2"], + "custom_audiences": ["custom_audiences_value1", "custom_audiences_value2"], "observed_generation": 2021, "terminal_condition": { "type_": "type__value", @@ -5406,7 +5429,6 @@ def test_update_service_rest(request_type): } ], "uri": "uri_value", - "custom_audiences": ["custom_audiences_value1", "custom_audiences_value2"], "satisfies_pzs": True, "reconciling": True, "etag": "etag_value", @@ -7487,11 +7509,57 @@ def test_parse_crypto_key_path(): assert expected == actual -def test_revision_path(): +def test_mesh_path(): project = "whelk" location = "octopus" - service = "oyster" - revision = "nudibranch" + mesh = "oyster" + expected = "projects/{project}/locations/{location}/meshes/{mesh}".format( + project=project, + location=location, + mesh=mesh, + ) + actual = ServicesClient.mesh_path(project, location, mesh) + assert expected == actual + + +def test_parse_mesh_path(): + expected = { + "project": "nudibranch", + "location": "cuttlefish", + "mesh": "mussel", + } + path = ServicesClient.mesh_path(**expected) + + # Check that the path construction is reversible. + actual = ServicesClient.parse_mesh_path(path) + assert expected == actual + + +def test_policy_path(): + project = "winkle" + expected = "projects/{project}/policy".format( + project=project, + ) + actual = ServicesClient.policy_path(project) + assert expected == actual + + +def test_parse_policy_path(): + expected = { + "project": "nautilus", + } + path = ServicesClient.policy_path(**expected) + + # Check that the path construction is reversible. + actual = ServicesClient.parse_policy_path(path) + assert expected == actual + + +def test_revision_path(): + project = "scallop" + location = "abalone" + service = "squid" + revision = "clam" expected = "projects/{project}/locations/{location}/services/{service}/revisions/{revision}".format( project=project, location=location, @@ -7504,10 +7572,10 @@ def test_revision_path(): def test_parse_revision_path(): expected = { - "project": "cuttlefish", - "location": "mussel", - "service": "winkle", - "revision": "nautilus", + "project": "whelk", + "location": "octopus", + "service": "oyster", + "revision": "nudibranch", } path = ServicesClient.revision_path(**expected) @@ -7517,8 +7585,8 @@ def test_parse_revision_path(): def test_secret_path(): - project = "scallop" - secret = "abalone" + project = "cuttlefish" + secret = "mussel" expected = "projects/{project}/secrets/{secret}".format( project=project, secret=secret, @@ -7529,8 +7597,8 @@ def test_secret_path(): def test_parse_secret_path(): expected = { - "project": "squid", - "secret": "clam", + "project": "winkle", + "secret": "nautilus", } path = ServicesClient.secret_path(**expected) @@ -7540,9 +7608,9 @@ def test_parse_secret_path(): def test_secret_version_path(): - project = "whelk" - secret = "octopus" - version = "oyster" + project = "scallop" + secret = "abalone" + version = "squid" expected = "projects/{project}/secrets/{secret}/versions/{version}".format( project=project, secret=secret, @@ -7554,9 +7622,9 @@ def test_secret_version_path(): def test_parse_secret_version_path(): expected = { - "project": "nudibranch", - "secret": "cuttlefish", - "version": "mussel", + "project": "clam", + "secret": "whelk", + "version": "octopus", } path = ServicesClient.secret_version_path(**expected) @@ -7566,9 +7634,9 @@ def test_parse_secret_version_path(): def test_service_path(): - project = "winkle" - location = "nautilus" - service = "scallop" + project = "oyster" + location = "nudibranch" + service = "cuttlefish" expected = "projects/{project}/locations/{location}/services/{service}".format( project=project, location=location, @@ -7580,9 +7648,9 @@ def test_service_path(): def test_parse_service_path(): expected = { - "project": "abalone", - "location": "squid", - "service": "clam", + "project": "mussel", + "location": "winkle", + "service": "nautilus", } path = ServicesClient.service_path(**expected) @@ -7592,7 +7660,7 @@ def test_parse_service_path(): def test_common_billing_account_path(): - billing_account = "whelk" + billing_account = "scallop" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -7602,7 +7670,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "octopus", + "billing_account": "abalone", } path = ServicesClient.common_billing_account_path(**expected) @@ -7612,7 +7680,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "oyster" + folder = "squid" expected = "folders/{folder}".format( folder=folder, ) @@ -7622,7 +7690,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "nudibranch", + "folder": "clam", } path = ServicesClient.common_folder_path(**expected) @@ -7632,7 +7700,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "cuttlefish" + organization = "whelk" expected = "organizations/{organization}".format( organization=organization, ) @@ -7642,7 +7710,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "mussel", + "organization": "octopus", } path = ServicesClient.common_organization_path(**expected) @@ -7652,7 +7720,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "winkle" + project = "oyster" expected = "projects/{project}".format( project=project, ) @@ -7662,7 +7730,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "nautilus", + "project": "nudibranch", } path = ServicesClient.common_project_path(**expected) @@ -7672,8 +7740,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "scallop" - location = "abalone" + project = "cuttlefish" + location = "mussel" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -7684,8 +7752,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "squid", - "location": "clam", + "project": "winkle", + "location": "nautilus", } path = ServicesClient.common_location_path(**expected) From 023d09955a2b4e013a3506d2dbed45c3e7e4a696 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 4 Oct 2024 11:09:14 -0400 Subject: [PATCH 090/108] docs: [google-cloud-batch] Clarify Batch only supports global custom instance template now (#13117) BEGIN_COMMIT_OVERRIDE docs: Clarify Batch only supports global custom instance template now END_COMMIT_OVERRIDE - [ ] Regenerate this pull request now. PiperOrigin-RevId: 680722756 Source-Link: https://github.com/googleapis/googleapis/commit/42f7085c6332271d9b4d95c77ce3c9d5d0509cfc Source-Link: https://github.com/googleapis/googleapis-gen/commit/8aafe35f7fb64040f3f2eb79b46164250bec8483 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWJhdGNoLy5Pd2xCb3QueWFtbCIsImgiOiI4YWFmZTM1ZjdmYjY0MDQwZjNmMmViNzliNDYxNjQyNTBiZWM4NDgzIn0= --------- Co-authored-by: Owl Bot Co-authored-by: ohmayr --- .../google-cloud-batch/google/cloud/batch/gapic_version.py | 2 +- .../google/cloud/batch_v1/gapic_version.py | 2 +- .../google-cloud-batch/google/cloud/batch_v1/types/job.py | 6 +++++- .../google/cloud/batch_v1alpha/gapic_version.py | 2 +- .../snippet_metadata_google.cloud.batch.v1.json | 2 +- .../snippet_metadata_google.cloud.batch.v1alpha.json | 2 +- 6 files changed, 10 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-batch/google/cloud/batch/gapic_version.py b/packages/google-cloud-batch/google/cloud/batch/gapic_version.py index 5f7f6c52ce54..558c8aab67c5 100644 --- a/packages/google-cloud-batch/google/cloud/batch/gapic_version.py +++ b/packages/google-cloud-batch/google/cloud/batch/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.17.28" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py b/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py index 5f7f6c52ce54..558c8aab67c5 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.17.28" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-batch/google/cloud/batch_v1/types/job.py b/packages/google-cloud-batch/google/cloud/batch_v1/types/job.py index c11a34f16b56..a5aca6e5d1bc 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1/types/job.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1/types/job.py @@ -792,7 +792,11 @@ class InstancePolicyOrTemplate(proto.Message): instance_template (str): Name of an instance template used to create VMs. Named the field as 'instance_template' instead of 'template' to avoid - c++ keyword conflict. + C++ keyword conflict. + + Batch only supports global instance templates. You can + specify the global instance template as a full or partial + URL. This field is a member of `oneof`_ ``policy_template``. install_gpu_drivers (bool): diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py index 5f7f6c52ce54..558c8aab67c5 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.17.28" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json index 4aeac54efe09..e2df1067e4dd 100644 --- a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json +++ b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-batch", - "version": "0.17.28" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json index 88b47050fed2..7f67670b100c 100644 --- a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json +++ b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-batch", - "version": "0.17.28" + "version": "0.1.0" }, "snippets": [ { From c1693486f314261e3799547ee6f5e53dd7e687fc Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 4 Oct 2024 11:15:11 -0400 Subject: [PATCH 091/108] feat: [google-cloud-dialogflow] created new boolean fields in conversation dataset for zone isolation and zone separation compliance status (#13107) BEGIN_COMMIT_OVERRIDE feat: created new boolean fields in conversation dataset for zone isolation and zone separation compliance status feat: add ALAW encoding value to Audio encoding enum END_COMMIT_OVERRIDE - [ ] Regenerate this pull request now. feat: add ALAW encoding value to Audio encoding enum PiperOrigin-RevId: 678636701 Source-Link: https://github.com/googleapis/googleapis/commit/0ede901c455762b9d55ea3cf386f50663d0650ba Source-Link: https://github.com/googleapis/googleapis-gen/commit/39c42782febc92124134995b2e7d78be762bcc22 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWRpYWxvZ2Zsb3cvLk93bEJvdC55YW1sIiwiaCI6IjM5YzQyNzgyZmViYzkyMTI0MTM0OTk1YjJlN2Q3OGJlNzYyYmNjMjIifQ== --------- Co-authored-by: Owl Bot Co-authored-by: ohmayr --- .../google/cloud/dialogflow/gapic_version.py | 2 +- .../cloud/dialogflow_v2/gapic_version.py | 2 +- .../services/participants/async_client.py | 2 +- .../services/participants/client.py | 2 +- .../services/sessions/async_client.py | 4 ++-- .../dialogflow_v2/services/sessions/client.py | 4 ++-- .../cloud/dialogflow_v2/types/audio_config.py | 8 +++++++ .../types/conversation_dataset.py | 24 +++++++++++++++++++ .../cloud/dialogflow_v2beta1/gapic_version.py | 2 +- ...cipants_streaming_analyze_content_async.py | 2 +- ...icipants_streaming_analyze_content_sync.py | 2 +- ..._generated_sessions_detect_intent_async.py | 2 +- ...2_generated_sessions_detect_intent_sync.py | 2 +- ..._sessions_streaming_detect_intent_async.py | 2 +- ...d_sessions_streaming_detect_intent_sync.py | 2 +- ...t_metadata_google.cloud.dialogflow.v2.json | 2 +- ...adata_google.cloud.dialogflow.v2beta1.json | 2 +- .../test_conversation_datasets.py | 16 +++++++++++++ 18 files changed, 65 insertions(+), 17 deletions(-) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow/gapic_version.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow/gapic_version.py index c82b1e137507..558c8aab67c5 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow/gapic_version.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.32.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/gapic_version.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/gapic_version.py index c82b1e137507..558c8aab67c5 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/gapic_version.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.32.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/participants/async_client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/participants/async_client.py index 43f7da028296..d1cfd3548f4a 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/participants/async_client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/participants/async_client.py @@ -913,7 +913,7 @@ async def sample_streaming_analyze_content(): # Initialize request argument(s) audio_config = dialogflow_v2.InputAudioConfig() - audio_config.audio_encoding = "AUDIO_ENCODING_SPEEX_WITH_HEADER_BYTE" + audio_config.audio_encoding = "AUDIO_ENCODING_ALAW" audio_config.sample_rate_hertz = 1817 audio_config.language_code = "language_code_value" diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/participants/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/participants/client.py index dcd520b9ac4e..57bcaccfbc20 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/participants/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/participants/client.py @@ -1409,7 +1409,7 @@ def sample_streaming_analyze_content(): # Initialize request argument(s) audio_config = dialogflow_v2.InputAudioConfig() - audio_config.audio_encoding = "AUDIO_ENCODING_SPEEX_WITH_HEADER_BYTE" + audio_config.audio_encoding = "AUDIO_ENCODING_ALAW" audio_config.sample_rate_hertz = 1817 audio_config.language_code = "language_code_value" diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/sessions/async_client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/sessions/async_client.py index fd60fadf2543..6b70ea5b5d22 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/sessions/async_client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/sessions/async_client.py @@ -309,7 +309,7 @@ async def sample_detect_intent(): # Initialize request argument(s) query_input = dialogflow_v2.QueryInput() - query_input.audio_config.audio_encoding = "AUDIO_ENCODING_SPEEX_WITH_HEADER_BYTE" + query_input.audio_config.audio_encoding = "AUDIO_ENCODING_ALAW" query_input.audio_config.sample_rate_hertz = 1817 query_input.audio_config.language_code = "language_code_value" @@ -472,7 +472,7 @@ async def sample_streaming_detect_intent(): # Initialize request argument(s) query_input = dialogflow_v2.QueryInput() - query_input.audio_config.audio_encoding = "AUDIO_ENCODING_SPEEX_WITH_HEADER_BYTE" + query_input.audio_config.audio_encoding = "AUDIO_ENCODING_ALAW" query_input.audio_config.sample_rate_hertz = 1817 query_input.audio_config.language_code = "language_code_value" diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/sessions/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/sessions/client.py index ad4dec9d8b41..37a879d61bd5 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/sessions/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/sessions/client.py @@ -784,7 +784,7 @@ def sample_detect_intent(): # Initialize request argument(s) query_input = dialogflow_v2.QueryInput() - query_input.audio_config.audio_encoding = "AUDIO_ENCODING_SPEEX_WITH_HEADER_BYTE" + query_input.audio_config.audio_encoding = "AUDIO_ENCODING_ALAW" query_input.audio_config.sample_rate_hertz = 1817 query_input.audio_config.language_code = "language_code_value" @@ -944,7 +944,7 @@ def sample_streaming_detect_intent(): # Initialize request argument(s) query_input = dialogflow_v2.QueryInput() - query_input.audio_config.audio_encoding = "AUDIO_ENCODING_SPEEX_WITH_HEADER_BYTE" + query_input.audio_config.audio_encoding = "AUDIO_ENCODING_ALAW" query_input.audio_config.sample_rate_hertz = 1817 query_input.audio_config.language_code = "language_code_value" diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/audio_config.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/audio_config.py index 200f60a0c406..2c2c2307a292 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/audio_config.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/audio_config.py @@ -150,6 +150,9 @@ class AudioEncoding(proto.Enum): 5574. In other words, each RTP header is replaced with a single byte containing the block length. Only Speex wideband is supported. ``sample_rate_hertz`` must be 16000. + AUDIO_ENCODING_ALAW (8): + 8-bit samples that compand 13-bit audio + samples using G.711 PCMU/a-law. """ AUDIO_ENCODING_UNSPECIFIED = 0 AUDIO_ENCODING_LINEAR_16 = 1 @@ -159,6 +162,7 @@ class AudioEncoding(proto.Enum): AUDIO_ENCODING_AMR_WB = 5 AUDIO_ENCODING_OGG_OPUS = 6 AUDIO_ENCODING_SPEEX_WITH_HEADER_BYTE = 7 + AUDIO_ENCODING_ALAW = 8 class SpeechModelVariant(proto.Enum): @@ -258,6 +262,9 @@ class OutputAudioEncoding(proto.Enum): OUTPUT_AUDIO_ENCODING_MULAW (5): 8-bit samples that compand 14-bit audio samples using G.711 PCMU/mu-law. + OUTPUT_AUDIO_ENCODING_ALAW (6): + 8-bit samples that compand 13-bit audio + samples using G.711 PCMU/a-law. """ OUTPUT_AUDIO_ENCODING_UNSPECIFIED = 0 OUTPUT_AUDIO_ENCODING_LINEAR_16 = 1 @@ -265,6 +272,7 @@ class OutputAudioEncoding(proto.Enum): OUTPUT_AUDIO_ENCODING_MP3_64_KBPS = 4 OUTPUT_AUDIO_ENCODING_OGG_OPUS = 3 OUTPUT_AUDIO_ENCODING_MULAW = 5 + OUTPUT_AUDIO_ENCODING_ALAW = 6 class SpeechContext(proto.Message): diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/conversation_dataset.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/conversation_dataset.py index 788094e54596..19ba30433068 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/conversation_dataset.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/conversation_dataset.py @@ -91,6 +91,9 @@ class ConversationDataset(proto.Message): ImportConversationData on a dataset that already has data is not allowed). + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: name (str): Output only. ConversationDataset resource name. Format: @@ -112,6 +115,17 @@ class ConversationDataset(proto.Message): conversation_count (int): Output only. The number of conversations this conversation dataset contains. + satisfies_pzi (bool): + Output only. A read only boolean field + reflecting Zone Isolation status of the dataset. + + This field is a member of `oneof`_ ``_satisfies_pzi``. + satisfies_pzs (bool): + Output only. A read only boolean field + reflecting Zone Separation status of the + dataset. + + This field is a member of `oneof`_ ``_satisfies_pzs``. """ name: str = proto.Field( @@ -145,6 +159,16 @@ class ConversationDataset(proto.Message): proto.INT64, number=7, ) + satisfies_pzi: bool = proto.Field( + proto.BOOL, + number=8, + optional=True, + ) + satisfies_pzs: bool = proto.Field( + proto.BOOL, + number=9, + optional=True, + ) class CreateConversationDatasetRequest(proto.Message): diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/gapic_version.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/gapic_version.py index c82b1e137507..558c8aab67c5 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/gapic_version.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.32.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_participants_streaming_analyze_content_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_participants_streaming_analyze_content_async.py index 014ecaa28d7c..17db60fde096 100644 --- a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_participants_streaming_analyze_content_async.py +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_participants_streaming_analyze_content_async.py @@ -40,7 +40,7 @@ async def sample_streaming_analyze_content(): # Initialize request argument(s) audio_config = dialogflow_v2.InputAudioConfig() - audio_config.audio_encoding = "AUDIO_ENCODING_SPEEX_WITH_HEADER_BYTE" + audio_config.audio_encoding = "AUDIO_ENCODING_ALAW" audio_config.sample_rate_hertz = 1817 audio_config.language_code = "language_code_value" diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_participants_streaming_analyze_content_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_participants_streaming_analyze_content_sync.py index b99cb62d1737..a2a254ea096b 100644 --- a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_participants_streaming_analyze_content_sync.py +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_participants_streaming_analyze_content_sync.py @@ -40,7 +40,7 @@ def sample_streaming_analyze_content(): # Initialize request argument(s) audio_config = dialogflow_v2.InputAudioConfig() - audio_config.audio_encoding = "AUDIO_ENCODING_SPEEX_WITH_HEADER_BYTE" + audio_config.audio_encoding = "AUDIO_ENCODING_ALAW" audio_config.sample_rate_hertz = 1817 audio_config.language_code = "language_code_value" diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_sessions_detect_intent_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_sessions_detect_intent_async.py index 3e398b3f4df6..2fa858e2bd9f 100644 --- a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_sessions_detect_intent_async.py +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_sessions_detect_intent_async.py @@ -40,7 +40,7 @@ async def sample_detect_intent(): # Initialize request argument(s) query_input = dialogflow_v2.QueryInput() - query_input.audio_config.audio_encoding = "AUDIO_ENCODING_SPEEX_WITH_HEADER_BYTE" + query_input.audio_config.audio_encoding = "AUDIO_ENCODING_ALAW" query_input.audio_config.sample_rate_hertz = 1817 query_input.audio_config.language_code = "language_code_value" diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_sessions_detect_intent_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_sessions_detect_intent_sync.py index 297e7a9332f5..836cee509472 100644 --- a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_sessions_detect_intent_sync.py +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_sessions_detect_intent_sync.py @@ -40,7 +40,7 @@ def sample_detect_intent(): # Initialize request argument(s) query_input = dialogflow_v2.QueryInput() - query_input.audio_config.audio_encoding = "AUDIO_ENCODING_SPEEX_WITH_HEADER_BYTE" + query_input.audio_config.audio_encoding = "AUDIO_ENCODING_ALAW" query_input.audio_config.sample_rate_hertz = 1817 query_input.audio_config.language_code = "language_code_value" diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_sessions_streaming_detect_intent_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_sessions_streaming_detect_intent_async.py index 540b21d4a829..a0b52da11775 100644 --- a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_sessions_streaming_detect_intent_async.py +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_sessions_streaming_detect_intent_async.py @@ -40,7 +40,7 @@ async def sample_streaming_detect_intent(): # Initialize request argument(s) query_input = dialogflow_v2.QueryInput() - query_input.audio_config.audio_encoding = "AUDIO_ENCODING_SPEEX_WITH_HEADER_BYTE" + query_input.audio_config.audio_encoding = "AUDIO_ENCODING_ALAW" query_input.audio_config.sample_rate_hertz = 1817 query_input.audio_config.language_code = "language_code_value" diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_sessions_streaming_detect_intent_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_sessions_streaming_detect_intent_sync.py index 34b9599f4d5a..6b154fd948c9 100644 --- a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_sessions_streaming_detect_intent_sync.py +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_sessions_streaming_detect_intent_sync.py @@ -40,7 +40,7 @@ def sample_streaming_detect_intent(): # Initialize request argument(s) query_input = dialogflow_v2.QueryInput() - query_input.audio_config.audio_encoding = "AUDIO_ENCODING_SPEEX_WITH_HEADER_BYTE" + query_input.audio_config.audio_encoding = "AUDIO_ENCODING_ALAW" query_input.audio_config.sample_rate_hertz = 1817 query_input.audio_config.language_code = "language_code_value" diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.v2.json b/packages/google-cloud-dialogflow/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.v2.json index 7e99cd1321e2..dde14d384e60 100644 --- a/packages/google-cloud-dialogflow/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.v2.json +++ b/packages/google-cloud-dialogflow/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dialogflow", - "version": "2.32.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.v2beta1.json b/packages/google-cloud-dialogflow/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.v2beta1.json index a9752b2203e1..58a96bc185e8 100644 --- a/packages/google-cloud-dialogflow/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.v2beta1.json +++ b/packages/google-cloud-dialogflow/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.v2beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dialogflow", - "version": "2.32.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversation_datasets.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversation_datasets.py index e5c420409ba5..9b288c8fa5a9 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversation_datasets.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversation_datasets.py @@ -1641,6 +1641,8 @@ def test_get_conversation_dataset(request_type, transport: str = "grpc"): display_name="display_name_value", description="description_value", conversation_count=1955, + satisfies_pzi=True, + satisfies_pzs=True, ) response = client.get_conversation_dataset(request) @@ -1656,6 +1658,8 @@ def test_get_conversation_dataset(request_type, transport: str = "grpc"): assert response.display_name == "display_name_value" assert response.description == "description_value" assert response.conversation_count == 1955 + assert response.satisfies_pzi is True + assert response.satisfies_pzs is True def test_get_conversation_dataset_empty_call(): @@ -1769,6 +1773,8 @@ async def test_get_conversation_dataset_empty_call_async(): display_name="display_name_value", description="description_value", conversation_count=1955, + satisfies_pzi=True, + satisfies_pzs=True, ) ) response = await client.get_conversation_dataset() @@ -1844,6 +1850,8 @@ async def test_get_conversation_dataset_async( display_name="display_name_value", description="description_value", conversation_count=1955, + satisfies_pzi=True, + satisfies_pzs=True, ) ) response = await client.get_conversation_dataset(request) @@ -1860,6 +1868,8 @@ async def test_get_conversation_dataset_async( assert response.display_name == "display_name_value" assert response.description == "description_value" assert response.conversation_count == 1955 + assert response.satisfies_pzi is True + assert response.satisfies_pzs is True @pytest.mark.asyncio @@ -3336,6 +3346,8 @@ def test_create_conversation_dataset_rest(request_type): "input_config": {"gcs_source": {"uris": ["uris_value1", "uris_value2"]}}, "conversation_info": {"language_code": "language_code_value"}, "conversation_count": 1955, + "satisfies_pzi": True, + "satisfies_pzs": True, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -3740,6 +3752,8 @@ def test_get_conversation_dataset_rest(request_type): display_name="display_name_value", description="description_value", conversation_count=1955, + satisfies_pzi=True, + satisfies_pzs=True, ) # Wrap the value into a proper Response obj @@ -3759,6 +3773,8 @@ def test_get_conversation_dataset_rest(request_type): assert response.display_name == "display_name_value" assert response.description == "description_value" assert response.conversation_count == 1955 + assert response.satisfies_pzi is True + assert response.satisfies_pzs is True def test_get_conversation_dataset_rest_use_cached_wrapped_rpc(): From 7f9bc3a7a504956eaf6eff5b80d77a15eda9e0b6 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 4 Oct 2024 11:20:16 -0400 Subject: [PATCH 092/108] docs: [google-cloud-parallelstore] minor documentation formatting fix for Parallelstore (#13109) BEGIN_COMMIT_OVERRIDE docs: minor documentation formatting fix for Parallelstore feat: adding v1 version of our api docs: cleanup of Parallelstore API descriptions feat: add UPGRADING state to Parallelstore state BEGIN_PUBLIC_OVERRIDE - [ ] Regenerate this pull request now. PiperOrigin-RevId: 681948084 Source-Link: https://github.com/googleapis/googleapis/commit/3708fdf26f073ba5cb83c3f3a3778f701c80458d Source-Link: https://github.com/googleapis/googleapis-gen/commit/1a535dedd7a34b71f1aa1afdcb9f458594772c60 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXBhcmFsbGVsc3RvcmUvLk93bEJvdC55YW1sIiwiaCI6IjFhNTM1ZGVkZDdhMzRiNzFmMWFhMWFmZGNiOWY0NTg1OTQ3NzJjNjAifQ== BEGIN_NESTED_COMMIT docs: [google-cloud-parallelstore] minor documentation formatting fix for Parallelstore PiperOrigin-RevId: 681538037 Source-Link: https://github.com/googleapis/googleapis/commit/917e347c536bec36446ac434455da0cb2e7ddf46 Source-Link: https://github.com/googleapis/googleapis-gen/commit/ed8a66ee826a7aaa27450e2d533eb325cb31b77f Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXBhcmFsbGVsc3RvcmUvLk93bEJvdC55YW1sIiwiaCI6ImVkOGE2NmVlODI2YTdhYWEyNzQ1MGUyZDUzM2ViMzI1Y2IzMWI3N2YifQ== END_NESTED_COMMIT BEGIN_NESTED_COMMIT feat: [google-cloud-parallelstore] adding v1 version of our api PiperOrigin-RevId: 681144478 Source-Link: https://github.com/googleapis/googleapis/commit/692cc30fdef961552861625f45f097f576679e86 Source-Link: https://github.com/googleapis/googleapis-gen/commit/77b92bf618cc25a1ddce15413b481020d87a43d8 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXBhcmFsbGVsc3RvcmUvLk93bEJvdC55YW1sIiwiaCI6Ijc3YjkyYmY2MThjYzI1YTFkZGNlMTU0MTNiNDgxMDIwZDg3YTQzZDgifQ== END_NESTED_COMMIT BEGIN_NESTED_COMMIT docs: [google-cloud-parallelstore] cleanup of Parallelstore API descriptions feat: add UPGRADING state to Parallelstore state PiperOrigin-RevId: 678758858 Source-Link: https://github.com/googleapis/googleapis/commit/6125b3deffdaf8d23af5a99a853b2a9a13b407d2 Source-Link: https://github.com/googleapis/googleapis-gen/commit/8f63dd4523066a23d77be22802232a20a20f6281 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXBhcmFsbGVsc3RvcmUvLk93bEJvdC55YW1sIiwiaCI6IjhmNjNkZDQ1MjMwNjZhMjNkNzdiZTIyODAyMjMyYTIwYTIwZjYyODEifQ== END_NESTED_COMMIT --------- Co-authored-by: Owl Bot Co-authored-by: ohmayr --- .../google-cloud-parallelstore/docs/index.rst | 11 + .../docs/parallelstore_v1/parallelstore.rst | 10 + .../docs/parallelstore_v1/services_.rst | 6 + .../docs/parallelstore_v1/types_.rst | 6 + .../google/cloud/parallelstore_v1/__init__.py | 74 + .../parallelstore_v1/gapic_metadata.json | 133 + .../cloud/parallelstore_v1/gapic_version.py | 16 + .../google/cloud/parallelstore_v1/py.typed | 2 + .../parallelstore_v1/services/__init__.py | 15 + .../services/parallelstore/__init__.py | 22 + .../services/parallelstore/async_client.py | 1498 +++ .../services/parallelstore/client.py | 1960 ++++ .../services/parallelstore/pagers.py | 193 + .../parallelstore/transports/__init__.py | 36 + .../services/parallelstore/transports/base.py | 309 + .../services/parallelstore/transports/grpc.py | 573 ++ .../parallelstore/transports/grpc_asyncio.py | 628 ++ .../services/parallelstore/transports/rest.py | 1696 ++++ .../cloud/parallelstore_v1/types/__init__.py | 66 + .../parallelstore_v1/types/parallelstore.py | 1063 +++ .../services/parallelstore/async_client.py | 74 +- .../services/parallelstore/client.py | 74 +- .../services/parallelstore/transports/grpc.py | 14 +- .../parallelstore/transports/grpc_asyncio.py | 14 +- .../services/parallelstore/transports/rest.py | 26 +- .../types/parallelstore.py | 244 +- ...ted_parallelstore_create_instance_async.py | 61 + ...ated_parallelstore_create_instance_sync.py | 61 + ...ted_parallelstore_delete_instance_async.py | 56 + ...ated_parallelstore_delete_instance_sync.py | 56 + ...nerated_parallelstore_export_data_async.py | 60 + ...enerated_parallelstore_export_data_sync.py | 60 + ...erated_parallelstore_get_instance_async.py | 52 + ...nerated_parallelstore_get_instance_sync.py | 52 + ...nerated_parallelstore_import_data_async.py | 60 + ...enerated_parallelstore_import_data_sync.py | 60 + ...ated_parallelstore_list_instances_async.py | 53 + ...rated_parallelstore_list_instances_sync.py | 53 + ...ted_parallelstore_update_instance_async.py | 59 + ...ated_parallelstore_update_instance_sync.py | 59 + ...etadata_google.cloud.parallelstore.v1.json | 1150 +++ .../fixup_parallelstore_v1_keywords.py | 182 + .../unit/gapic/parallelstore_v1/__init__.py | 15 + .../parallelstore_v1/test_parallelstore.py | 8278 +++++++++++++++++ 44 files changed, 18950 insertions(+), 240 deletions(-) create mode 100644 packages/google-cloud-parallelstore/docs/parallelstore_v1/parallelstore.rst create mode 100644 packages/google-cloud-parallelstore/docs/parallelstore_v1/services_.rst create mode 100644 packages/google-cloud-parallelstore/docs/parallelstore_v1/types_.rst create mode 100644 packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/__init__.py create mode 100644 packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/gapic_metadata.json create mode 100644 packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/gapic_version.py create mode 100644 packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/py.typed create mode 100644 packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/__init__.py create mode 100644 packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/__init__.py create mode 100644 packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/async_client.py create mode 100644 packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/client.py create mode 100644 packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/pagers.py create mode 100644 packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/transports/__init__.py create mode 100644 packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/transports/base.py create mode 100644 packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/transports/grpc.py create mode 100644 packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/transports/grpc_asyncio.py create mode 100644 packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/transports/rest.py create mode 100644 packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/types/__init__.py create mode 100644 packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/types/parallelstore.py create mode 100644 packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_create_instance_async.py create mode 100644 packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_create_instance_sync.py create mode 100644 packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_delete_instance_async.py create mode 100644 packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_delete_instance_sync.py create mode 100644 packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_export_data_async.py create mode 100644 packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_export_data_sync.py create mode 100644 packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_get_instance_async.py create mode 100644 packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_get_instance_sync.py create mode 100644 packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_import_data_async.py create mode 100644 packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_import_data_sync.py create mode 100644 packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_list_instances_async.py create mode 100644 packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_list_instances_sync.py create mode 100644 packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_update_instance_async.py create mode 100644 packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_update_instance_sync.py create mode 100644 packages/google-cloud-parallelstore/samples/generated_samples/snippet_metadata_google.cloud.parallelstore.v1.json create mode 100644 packages/google-cloud-parallelstore/scripts/fixup_parallelstore_v1_keywords.py create mode 100644 packages/google-cloud-parallelstore/tests/unit/gapic/parallelstore_v1/__init__.py create mode 100644 packages/google-cloud-parallelstore/tests/unit/gapic/parallelstore_v1/test_parallelstore.py diff --git a/packages/google-cloud-parallelstore/docs/index.rst b/packages/google-cloud-parallelstore/docs/index.rst index acda35a132db..2711251a6855 100644 --- a/packages/google-cloud-parallelstore/docs/index.rst +++ b/packages/google-cloud-parallelstore/docs/index.rst @@ -2,6 +2,9 @@ .. include:: multiprocessing.rst +This package includes clients for multiple versions of Parallelstore API. +By default, you will get version ``parallelstore_v1beta``. + API Reference ------------- @@ -11,6 +14,14 @@ API Reference parallelstore_v1beta/services_ parallelstore_v1beta/types_ +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + parallelstore_v1/services_ + parallelstore_v1/types_ + Changelog --------- diff --git a/packages/google-cloud-parallelstore/docs/parallelstore_v1/parallelstore.rst b/packages/google-cloud-parallelstore/docs/parallelstore_v1/parallelstore.rst new file mode 100644 index 000000000000..cee322fb4453 --- /dev/null +++ b/packages/google-cloud-parallelstore/docs/parallelstore_v1/parallelstore.rst @@ -0,0 +1,10 @@ +Parallelstore +------------------------------- + +.. automodule:: google.cloud.parallelstore_v1.services.parallelstore + :members: + :inherited-members: + +.. automodule:: google.cloud.parallelstore_v1.services.parallelstore.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-parallelstore/docs/parallelstore_v1/services_.rst b/packages/google-cloud-parallelstore/docs/parallelstore_v1/services_.rst new file mode 100644 index 000000000000..70ee7e4829c2 --- /dev/null +++ b/packages/google-cloud-parallelstore/docs/parallelstore_v1/services_.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Parallelstore v1 API +============================================== +.. toctree:: + :maxdepth: 2 + + parallelstore diff --git a/packages/google-cloud-parallelstore/docs/parallelstore_v1/types_.rst b/packages/google-cloud-parallelstore/docs/parallelstore_v1/types_.rst new file mode 100644 index 000000000000..2353120e572c --- /dev/null +++ b/packages/google-cloud-parallelstore/docs/parallelstore_v1/types_.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Parallelstore v1 API +=========================================== + +.. automodule:: google.cloud.parallelstore_v1.types + :members: + :show-inheritance: diff --git a/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/__init__.py b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/__init__.py new file mode 100644 index 000000000000..0300bbd36217 --- /dev/null +++ b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/__init__.py @@ -0,0 +1,74 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.parallelstore_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.parallelstore import ParallelstoreAsyncClient, ParallelstoreClient +from .types.parallelstore import ( + CreateInstanceRequest, + DeleteInstanceRequest, + DestinationGcsBucket, + DestinationParallelstore, + DirectoryStripeLevel, + ExportDataMetadata, + ExportDataRequest, + ExportDataResponse, + FileStripeLevel, + GetInstanceRequest, + ImportDataMetadata, + ImportDataRequest, + ImportDataResponse, + Instance, + ListInstancesRequest, + ListInstancesResponse, + OperationMetadata, + SourceGcsBucket, + SourceParallelstore, + TransferCounters, + TransferOperationMetadata, + TransferType, + UpdateInstanceRequest, +) + +__all__ = ( + "ParallelstoreAsyncClient", + "CreateInstanceRequest", + "DeleteInstanceRequest", + "DestinationGcsBucket", + "DestinationParallelstore", + "DirectoryStripeLevel", + "ExportDataMetadata", + "ExportDataRequest", + "ExportDataResponse", + "FileStripeLevel", + "GetInstanceRequest", + "ImportDataMetadata", + "ImportDataRequest", + "ImportDataResponse", + "Instance", + "ListInstancesRequest", + "ListInstancesResponse", + "OperationMetadata", + "ParallelstoreClient", + "SourceGcsBucket", + "SourceParallelstore", + "TransferCounters", + "TransferOperationMetadata", + "TransferType", + "UpdateInstanceRequest", +) diff --git a/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/gapic_metadata.json b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/gapic_metadata.json new file mode 100644 index 000000000000..b7dffed6e226 --- /dev/null +++ b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/gapic_metadata.json @@ -0,0 +1,133 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.parallelstore_v1", + "protoPackage": "google.cloud.parallelstore.v1", + "schema": "1.0", + "services": { + "Parallelstore": { + "clients": { + "grpc": { + "libraryClient": "ParallelstoreClient", + "rpcs": { + "CreateInstance": { + "methods": [ + "create_instance" + ] + }, + "DeleteInstance": { + "methods": [ + "delete_instance" + ] + }, + "ExportData": { + "methods": [ + "export_data" + ] + }, + "GetInstance": { + "methods": [ + "get_instance" + ] + }, + "ImportData": { + "methods": [ + "import_data" + ] + }, + "ListInstances": { + "methods": [ + "list_instances" + ] + }, + "UpdateInstance": { + "methods": [ + "update_instance" + ] + } + } + }, + "grpc-async": { + "libraryClient": "ParallelstoreAsyncClient", + "rpcs": { + "CreateInstance": { + "methods": [ + "create_instance" + ] + }, + "DeleteInstance": { + "methods": [ + "delete_instance" + ] + }, + "ExportData": { + "methods": [ + "export_data" + ] + }, + "GetInstance": { + "methods": [ + "get_instance" + ] + }, + "ImportData": { + "methods": [ + "import_data" + ] + }, + "ListInstances": { + "methods": [ + "list_instances" + ] + }, + "UpdateInstance": { + "methods": [ + "update_instance" + ] + } + } + }, + "rest": { + "libraryClient": "ParallelstoreClient", + "rpcs": { + "CreateInstance": { + "methods": [ + "create_instance" + ] + }, + "DeleteInstance": { + "methods": [ + "delete_instance" + ] + }, + "ExportData": { + "methods": [ + "export_data" + ] + }, + "GetInstance": { + "methods": [ + "get_instance" + ] + }, + "ImportData": { + "methods": [ + "import_data" + ] + }, + "ListInstances": { + "methods": [ + "list_instances" + ] + }, + "UpdateInstance": { + "methods": [ + "update_instance" + ] + } + } + } + } + } + } +} diff --git a/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/gapic_version.py b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/gapic_version.py new file mode 100644 index 000000000000..558c8aab67c5 --- /dev/null +++ b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/py.typed b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/py.typed new file mode 100644 index 000000000000..743160d56b7b --- /dev/null +++ b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-parallelstore package uses inline types. diff --git a/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/__init__.py b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/__init__.py b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/__init__.py new file mode 100644 index 000000000000..ef9094cd9b4e --- /dev/null +++ b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import ParallelstoreAsyncClient +from .client import ParallelstoreClient + +__all__ = ( + "ParallelstoreClient", + "ParallelstoreAsyncClient", +) diff --git a/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/async_client.py b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/async_client.py new file mode 100644 index 000000000000..2d553f8706be --- /dev/null +++ b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/async_client.py @@ -0,0 +1,1498 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.parallelstore_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.parallelstore_v1.services.parallelstore import pagers +from google.cloud.parallelstore_v1.types import parallelstore + +from .client import ParallelstoreClient +from .transports.base import DEFAULT_CLIENT_INFO, ParallelstoreTransport +from .transports.grpc_asyncio import ParallelstoreGrpcAsyncIOTransport + + +class ParallelstoreAsyncClient: + """Service describing handlers for resources Configures and manages + parallelstore resources. + + Parallelstore service. + + The ``parallelstore.googleapis.com`` service implements the + parallelstore API and defines the following resource model for + managing instances: + + - The service works with a collection of cloud projects, named: + ``/projects/*`` + - Each project has a collection of available locations, named: + ``/locations/*`` + - Each location has a collection of instances named + ``/instances/*``. + - Parallelstore instances are resources of the form: + ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + + Note that location_id must be a Google Cloud ``zone``; for example: + + - ``projects/12345/locations/us-central1-c/instances/my-parallelstore-share`` + """ + + _client: ParallelstoreClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = ParallelstoreClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = ParallelstoreClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = ParallelstoreClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = ParallelstoreClient._DEFAULT_UNIVERSE + + address_path = staticmethod(ParallelstoreClient.address_path) + parse_address_path = staticmethod(ParallelstoreClient.parse_address_path) + instance_path = staticmethod(ParallelstoreClient.instance_path) + parse_instance_path = staticmethod(ParallelstoreClient.parse_instance_path) + network_path = staticmethod(ParallelstoreClient.network_path) + parse_network_path = staticmethod(ParallelstoreClient.parse_network_path) + service_account_path = staticmethod(ParallelstoreClient.service_account_path) + parse_service_account_path = staticmethod( + ParallelstoreClient.parse_service_account_path + ) + common_billing_account_path = staticmethod( + ParallelstoreClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + ParallelstoreClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(ParallelstoreClient.common_folder_path) + parse_common_folder_path = staticmethod( + ParallelstoreClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + ParallelstoreClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + ParallelstoreClient.parse_common_organization_path + ) + common_project_path = staticmethod(ParallelstoreClient.common_project_path) + parse_common_project_path = staticmethod( + ParallelstoreClient.parse_common_project_path + ) + common_location_path = staticmethod(ParallelstoreClient.common_location_path) + parse_common_location_path = staticmethod( + ParallelstoreClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ParallelstoreAsyncClient: The constructed client. + """ + return ParallelstoreClient.from_service_account_info.__func__(ParallelstoreAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ParallelstoreAsyncClient: The constructed client. + """ + return ParallelstoreClient.from_service_account_file.__func__(ParallelstoreAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return ParallelstoreClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> ParallelstoreTransport: + """Returns the transport used by the client instance. + + Returns: + ParallelstoreTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = ParallelstoreClient.get_transport_class + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, ParallelstoreTransport, Callable[..., ParallelstoreTransport]] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the parallelstore async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,ParallelstoreTransport,Callable[..., ParallelstoreTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ParallelstoreTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = ParallelstoreClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def list_instances( + self, + request: Optional[Union[parallelstore.ListInstancesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListInstancesAsyncPager: + r"""Lists all instances in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import parallelstore_v1 + + async def sample_list_instances(): + # Create a client + client = parallelstore_v1.ParallelstoreAsyncClient() + + # Initialize request argument(s) + request = parallelstore_v1.ListInstancesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instances(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.parallelstore_v1.types.ListInstancesRequest, dict]]): + The request object. List instances request. + parent (:class:`str`): + Required. The project and location for which to retrieve + instance information, in the format + ``projects/{project_id}/locations/{location}``. + + To retrieve instance information for all locations, use + "-" as the value of ``{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.parallelstore_v1.services.parallelstore.pagers.ListInstancesAsyncPager: + Response from + [ListInstances][google.cloud.parallelstore.v1.Parallelstore.ListInstances]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, parallelstore.ListInstancesRequest): + request = parallelstore.ListInstancesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_instances + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListInstancesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_instance( + self, + request: Optional[Union[parallelstore.GetInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> parallelstore.Instance: + r"""Gets details of a single instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import parallelstore_v1 + + async def sample_get_instance(): + # Create a client + client = parallelstore_v1.ParallelstoreAsyncClient() + + # Initialize request argument(s) + request = parallelstore_v1.GetInstanceRequest( + name="name_value", + ) + + # Make the request + response = await client.get_instance(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.parallelstore_v1.types.GetInstanceRequest, dict]]): + The request object. Get an instance's details. + name (:class:`str`): + Required. The instance resource name, in the format + ``projects/{project_id}/locations/{location}/instances/{instance_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.parallelstore_v1.types.Instance: + A Parallelstore instance. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, parallelstore.GetInstanceRequest): + request = parallelstore.GetInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_instance + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_instance( + self, + request: Optional[Union[parallelstore.CreateInstanceRequest, dict]] = None, + *, + parent: Optional[str] = None, + instance: Optional[parallelstore.Instance] = None, + instance_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a Parallelstore instance in a given project + and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import parallelstore_v1 + + async def sample_create_instance(): + # Create a client + client = parallelstore_v1.ParallelstoreAsyncClient() + + # Initialize request argument(s) + instance = parallelstore_v1.Instance() + instance.capacity_gib = 1247 + + request = parallelstore_v1.CreateInstanceRequest( + parent="parent_value", + instance_id="instance_id_value", + instance=instance, + ) + + # Make the request + operation = client.create_instance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.parallelstore_v1.types.CreateInstanceRequest, dict]]): + The request object. Create a new Parallelstore instance. + parent (:class:`str`): + Required. The instance's project and location, in the + format ``projects/{project}/locations/{location}``. + Locations map to Google Cloud zones; for example, + ``us-west1-b``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (:class:`google.cloud.parallelstore_v1.types.Instance`): + Required. The instance to create. + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_id (:class:`str`): + Required. The name of the Parallelstore instance. + + - Must contain only lowercase letters, numbers, and + hyphens. + - Must start with a letter. + - Must be between 1-63 characters. + - Must end with a number or a letter. + - Must be unique within the customer project / location + + This corresponds to the ``instance_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.parallelstore_v1.types.Instance` A + Parallelstore instance. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, instance, instance_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, parallelstore.CreateInstanceRequest): + request = parallelstore.CreateInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if instance is not None: + request.instance = instance + if instance_id is not None: + request.instance_id = instance_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_instance + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + parallelstore.Instance, + metadata_type=parallelstore.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_instance( + self, + request: Optional[Union[parallelstore.UpdateInstanceRequest, dict]] = None, + *, + instance: Optional[parallelstore.Instance] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates the parameters of a single instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import parallelstore_v1 + + async def sample_update_instance(): + # Create a client + client = parallelstore_v1.ParallelstoreAsyncClient() + + # Initialize request argument(s) + instance = parallelstore_v1.Instance() + instance.capacity_gib = 1247 + + request = parallelstore_v1.UpdateInstanceRequest( + instance=instance, + ) + + # Make the request + operation = client.update_instance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.parallelstore_v1.types.UpdateInstanceRequest, dict]]): + The request object. Update an instance. + instance (:class:`google.cloud.parallelstore_v1.types.Instance`): + Required. The instance to update. + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Mask of fields to update. Field mask is used + to specify the fields to be overwritten in the Instance + resource by the update. At least one path must be + supplied in this field. The fields specified in the + update_mask are relative to the resource, not the full + request. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.parallelstore_v1.types.Instance` A + Parallelstore instance. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([instance, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, parallelstore.UpdateInstanceRequest): + request = parallelstore.UpdateInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if instance is not None: + request.instance = instance + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_instance + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("instance.name", request.instance.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + parallelstore.Instance, + metadata_type=parallelstore.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_instance( + self, + request: Optional[Union[parallelstore.DeleteInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a single instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import parallelstore_v1 + + async def sample_delete_instance(): + # Create a client + client = parallelstore_v1.ParallelstoreAsyncClient() + + # Initialize request argument(s) + request = parallelstore_v1.DeleteInstanceRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_instance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.parallelstore_v1.types.DeleteInstanceRequest, dict]]): + The request object. Delete an instance. + name (:class:`str`): + Required. Name of the resource + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, parallelstore.DeleteInstanceRequest): + request = parallelstore.DeleteInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_instance + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=parallelstore.OperationMetadata, + ) + + # Done; return the response. + return response + + async def import_data( + self, + request: Optional[Union[parallelstore.ImportDataRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Copies data from Cloud Storage to Parallelstore. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import parallelstore_v1 + + async def sample_import_data(): + # Create a client + client = parallelstore_v1.ParallelstoreAsyncClient() + + # Initialize request argument(s) + source_gcs_bucket = parallelstore_v1.SourceGcsBucket() + source_gcs_bucket.uri = "uri_value" + + request = parallelstore_v1.ImportDataRequest( + source_gcs_bucket=source_gcs_bucket, + name="name_value", + ) + + # Make the request + operation = client.import_data(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.parallelstore_v1.types.ImportDataRequest, dict]]): + The request object. Import data from Cloud Storage into a + Parallelstore instance. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.parallelstore_v1.types.ImportDataResponse` + The response to a request to import data to + Parallelstore. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, parallelstore.ImportDataRequest): + request = parallelstore.ImportDataRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.import_data + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + parallelstore.ImportDataResponse, + metadata_type=parallelstore.ImportDataMetadata, + ) + + # Done; return the response. + return response + + async def export_data( + self, + request: Optional[Union[parallelstore.ExportDataRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Copies data from Parallelstore to Cloud Storage. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import parallelstore_v1 + + async def sample_export_data(): + # Create a client + client = parallelstore_v1.ParallelstoreAsyncClient() + + # Initialize request argument(s) + destination_gcs_bucket = parallelstore_v1.DestinationGcsBucket() + destination_gcs_bucket.uri = "uri_value" + + request = parallelstore_v1.ExportDataRequest( + destination_gcs_bucket=destination_gcs_bucket, + name="name_value", + ) + + # Make the request + operation = client.export_data(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.parallelstore_v1.types.ExportDataRequest, dict]]): + The request object. Export data from Parallelstore to + Cloud Storage. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.parallelstore_v1.types.ExportDataResponse` + The response to a request to export data from + Parallelstore. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, parallelstore.ExportDataRequest): + request = parallelstore.ExportDataRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.export_data + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + parallelstore.ExportDataResponse, + metadata_type=parallelstore.ExportDataMetadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "ParallelstoreAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("ParallelstoreAsyncClient",) diff --git a/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/client.py b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/client.py new file mode 100644 index 000000000000..20afd53a8928 --- /dev/null +++ b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/client.py @@ -0,0 +1,1960 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.parallelstore_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.parallelstore_v1.services.parallelstore import pagers +from google.cloud.parallelstore_v1.types import parallelstore + +from .transports.base import DEFAULT_CLIENT_INFO, ParallelstoreTransport +from .transports.grpc import ParallelstoreGrpcTransport +from .transports.grpc_asyncio import ParallelstoreGrpcAsyncIOTransport +from .transports.rest import ParallelstoreRestTransport + + +class ParallelstoreClientMeta(type): + """Metaclass for the Parallelstore client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[ParallelstoreTransport]] + _transport_registry["grpc"] = ParallelstoreGrpcTransport + _transport_registry["grpc_asyncio"] = ParallelstoreGrpcAsyncIOTransport + _transport_registry["rest"] = ParallelstoreRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[ParallelstoreTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class ParallelstoreClient(metaclass=ParallelstoreClientMeta): + """Service describing handlers for resources Configures and manages + parallelstore resources. + + Parallelstore service. + + The ``parallelstore.googleapis.com`` service implements the + parallelstore API and defines the following resource model for + managing instances: + + - The service works with a collection of cloud projects, named: + ``/projects/*`` + - Each project has a collection of available locations, named: + ``/locations/*`` + - Each location has a collection of instances named + ``/instances/*``. + - Parallelstore instances are resources of the form: + ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + + Note that location_id must be a Google Cloud ``zone``; for example: + + - ``projects/12345/locations/us-central1-c/instances/my-parallelstore-share`` + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "parallelstore.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "parallelstore.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ParallelstoreClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ParallelstoreClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> ParallelstoreTransport: + """Returns the transport used by the client instance. + + Returns: + ParallelstoreTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def address_path( + project: str, + region: str, + address: str, + ) -> str: + """Returns a fully-qualified address string.""" + return "projects/{project}/regions/{region}/addresses/{address}".format( + project=project, + region=region, + address=address, + ) + + @staticmethod + def parse_address_path(path: str) -> Dict[str, str]: + """Parses a address path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/regions/(?P.+?)/addresses/(?P
.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def instance_path( + project: str, + location: str, + instance: str, + ) -> str: + """Returns a fully-qualified instance string.""" + return "projects/{project}/locations/{location}/instances/{instance}".format( + project=project, + location=location, + instance=instance, + ) + + @staticmethod + def parse_instance_path(path: str) -> Dict[str, str]: + """Parses a instance path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/instances/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def network_path( + project: str, + network: str, + ) -> str: + """Returns a fully-qualified network string.""" + return "projects/{project}/global/networks/{network}".format( + project=project, + network=network, + ) + + @staticmethod + def parse_network_path(path: str) -> Dict[str, str]: + """Parses a network path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/global/networks/(?P.+?)$", path + ) + return m.groupdict() if m else {} + + @staticmethod + def service_account_path( + project: str, + service_account: str, + ) -> str: + """Returns a fully-qualified service_account string.""" + return "projects/{project}/serviceAccounts/{service_account}".format( + project=project, + service_account=service_account, + ) + + @staticmethod + def parse_service_account_path(path: str) -> Dict[str, str]: + """Parses a service_account path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/serviceAccounts/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = ParallelstoreClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = ParallelstoreClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = ParallelstoreClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = ParallelstoreClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = ParallelstoreClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or ParallelstoreClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, ParallelstoreTransport, Callable[..., ParallelstoreTransport]] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the parallelstore client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,ParallelstoreTransport,Callable[..., ParallelstoreTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ParallelstoreTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = ParallelstoreClient._read_environment_variables() + self._client_cert_source = ParallelstoreClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = ParallelstoreClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, ParallelstoreTransport) + if transport_provided: + # transport is a ParallelstoreTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(ParallelstoreTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or ParallelstoreClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[ParallelstoreTransport], Callable[..., ParallelstoreTransport] + ] = ( + ParallelstoreClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., ParallelstoreTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def list_instances( + self, + request: Optional[Union[parallelstore.ListInstancesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListInstancesPager: + r"""Lists all instances in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import parallelstore_v1 + + def sample_list_instances(): + # Create a client + client = parallelstore_v1.ParallelstoreClient() + + # Initialize request argument(s) + request = parallelstore_v1.ListInstancesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instances(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.parallelstore_v1.types.ListInstancesRequest, dict]): + The request object. List instances request. + parent (str): + Required. The project and location for which to retrieve + instance information, in the format + ``projects/{project_id}/locations/{location}``. + + To retrieve instance information for all locations, use + "-" as the value of ``{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.parallelstore_v1.services.parallelstore.pagers.ListInstancesPager: + Response from + [ListInstances][google.cloud.parallelstore.v1.Parallelstore.ListInstances]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, parallelstore.ListInstancesRequest): + request = parallelstore.ListInstancesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_instances] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListInstancesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_instance( + self, + request: Optional[Union[parallelstore.GetInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> parallelstore.Instance: + r"""Gets details of a single instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import parallelstore_v1 + + def sample_get_instance(): + # Create a client + client = parallelstore_v1.ParallelstoreClient() + + # Initialize request argument(s) + request = parallelstore_v1.GetInstanceRequest( + name="name_value", + ) + + # Make the request + response = client.get_instance(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.parallelstore_v1.types.GetInstanceRequest, dict]): + The request object. Get an instance's details. + name (str): + Required. The instance resource name, in the format + ``projects/{project_id}/locations/{location}/instances/{instance_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.parallelstore_v1.types.Instance: + A Parallelstore instance. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, parallelstore.GetInstanceRequest): + request = parallelstore.GetInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_instance( + self, + request: Optional[Union[parallelstore.CreateInstanceRequest, dict]] = None, + *, + parent: Optional[str] = None, + instance: Optional[parallelstore.Instance] = None, + instance_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a Parallelstore instance in a given project + and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import parallelstore_v1 + + def sample_create_instance(): + # Create a client + client = parallelstore_v1.ParallelstoreClient() + + # Initialize request argument(s) + instance = parallelstore_v1.Instance() + instance.capacity_gib = 1247 + + request = parallelstore_v1.CreateInstanceRequest( + parent="parent_value", + instance_id="instance_id_value", + instance=instance, + ) + + # Make the request + operation = client.create_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.parallelstore_v1.types.CreateInstanceRequest, dict]): + The request object. Create a new Parallelstore instance. + parent (str): + Required. The instance's project and location, in the + format ``projects/{project}/locations/{location}``. + Locations map to Google Cloud zones; for example, + ``us-west1-b``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (google.cloud.parallelstore_v1.types.Instance): + Required. The instance to create. + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_id (str): + Required. The name of the Parallelstore instance. + + - Must contain only lowercase letters, numbers, and + hyphens. + - Must start with a letter. + - Must be between 1-63 characters. + - Must end with a number or a letter. + - Must be unique within the customer project / location + + This corresponds to the ``instance_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.parallelstore_v1.types.Instance` A + Parallelstore instance. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, instance, instance_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, parallelstore.CreateInstanceRequest): + request = parallelstore.CreateInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if instance is not None: + request.instance = instance + if instance_id is not None: + request.instance_id = instance_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + parallelstore.Instance, + metadata_type=parallelstore.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_instance( + self, + request: Optional[Union[parallelstore.UpdateInstanceRequest, dict]] = None, + *, + instance: Optional[parallelstore.Instance] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates the parameters of a single instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import parallelstore_v1 + + def sample_update_instance(): + # Create a client + client = parallelstore_v1.ParallelstoreClient() + + # Initialize request argument(s) + instance = parallelstore_v1.Instance() + instance.capacity_gib = 1247 + + request = parallelstore_v1.UpdateInstanceRequest( + instance=instance, + ) + + # Make the request + operation = client.update_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.parallelstore_v1.types.UpdateInstanceRequest, dict]): + The request object. Update an instance. + instance (google.cloud.parallelstore_v1.types.Instance): + Required. The instance to update. + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask of fields to update. Field mask is used + to specify the fields to be overwritten in the Instance + resource by the update. At least one path must be + supplied in this field. The fields specified in the + update_mask are relative to the resource, not the full + request. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.parallelstore_v1.types.Instance` A + Parallelstore instance. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([instance, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, parallelstore.UpdateInstanceRequest): + request = parallelstore.UpdateInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if instance is not None: + request.instance = instance + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("instance.name", request.instance.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + parallelstore.Instance, + metadata_type=parallelstore.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_instance( + self, + request: Optional[Union[parallelstore.DeleteInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a single instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import parallelstore_v1 + + def sample_delete_instance(): + # Create a client + client = parallelstore_v1.ParallelstoreClient() + + # Initialize request argument(s) + request = parallelstore_v1.DeleteInstanceRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.parallelstore_v1.types.DeleteInstanceRequest, dict]): + The request object. Delete an instance. + name (str): + Required. Name of the resource + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, parallelstore.DeleteInstanceRequest): + request = parallelstore.DeleteInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=parallelstore.OperationMetadata, + ) + + # Done; return the response. + return response + + def import_data( + self, + request: Optional[Union[parallelstore.ImportDataRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Copies data from Cloud Storage to Parallelstore. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import parallelstore_v1 + + def sample_import_data(): + # Create a client + client = parallelstore_v1.ParallelstoreClient() + + # Initialize request argument(s) + source_gcs_bucket = parallelstore_v1.SourceGcsBucket() + source_gcs_bucket.uri = "uri_value" + + request = parallelstore_v1.ImportDataRequest( + source_gcs_bucket=source_gcs_bucket, + name="name_value", + ) + + # Make the request + operation = client.import_data(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.parallelstore_v1.types.ImportDataRequest, dict]): + The request object. Import data from Cloud Storage into a + Parallelstore instance. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.parallelstore_v1.types.ImportDataResponse` + The response to a request to import data to + Parallelstore. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, parallelstore.ImportDataRequest): + request = parallelstore.ImportDataRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.import_data] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + parallelstore.ImportDataResponse, + metadata_type=parallelstore.ImportDataMetadata, + ) + + # Done; return the response. + return response + + def export_data( + self, + request: Optional[Union[parallelstore.ExportDataRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Copies data from Parallelstore to Cloud Storage. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import parallelstore_v1 + + def sample_export_data(): + # Create a client + client = parallelstore_v1.ParallelstoreClient() + + # Initialize request argument(s) + destination_gcs_bucket = parallelstore_v1.DestinationGcsBucket() + destination_gcs_bucket.uri = "uri_value" + + request = parallelstore_v1.ExportDataRequest( + destination_gcs_bucket=destination_gcs_bucket, + name="name_value", + ) + + # Make the request + operation = client.export_data(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.parallelstore_v1.types.ExportDataRequest, dict]): + The request object. Export data from Parallelstore to + Cloud Storage. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.parallelstore_v1.types.ExportDataResponse` + The response to a request to export data from + Parallelstore. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, parallelstore.ExportDataRequest): + request = parallelstore.ExportDataRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.export_data] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + parallelstore.ExportDataResponse, + metadata_type=parallelstore.ExportDataMetadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "ParallelstoreClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("ParallelstoreClient",) diff --git a/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/pagers.py b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/pagers.py new file mode 100644 index 000000000000..d70d39fd27ac --- /dev/null +++ b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/pagers.py @@ -0,0 +1,193 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, + Union, +) + +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.cloud.parallelstore_v1.types import parallelstore + + +class ListInstancesPager: + """A pager for iterating through ``list_instances`` requests. + + This class thinly wraps an initial + :class:`google.cloud.parallelstore_v1.types.ListInstancesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``instances`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListInstances`` requests and continue to iterate + through the ``instances`` field on the + corresponding responses. + + All the usual :class:`google.cloud.parallelstore_v1.types.ListInstancesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., parallelstore.ListInstancesResponse], + request: parallelstore.ListInstancesRequest, + response: parallelstore.ListInstancesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.parallelstore_v1.types.ListInstancesRequest): + The initial request object. + response (google.cloud.parallelstore_v1.types.ListInstancesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = parallelstore.ListInstancesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[parallelstore.ListInstancesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[parallelstore.Instance]: + for page in self.pages: + yield from page.instances + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListInstancesAsyncPager: + """A pager for iterating through ``list_instances`` requests. + + This class thinly wraps an initial + :class:`google.cloud.parallelstore_v1.types.ListInstancesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``instances`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListInstances`` requests and continue to iterate + through the ``instances`` field on the + corresponding responses. + + All the usual :class:`google.cloud.parallelstore_v1.types.ListInstancesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[parallelstore.ListInstancesResponse]], + request: parallelstore.ListInstancesRequest, + response: parallelstore.ListInstancesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.parallelstore_v1.types.ListInstancesRequest): + The initial request object. + response (google.cloud.parallelstore_v1.types.ListInstancesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = parallelstore.ListInstancesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[parallelstore.ListInstancesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[parallelstore.Instance]: + async def async_generator(): + async for page in self.pages: + for response in page.instances: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/transports/__init__.py b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/transports/__init__.py new file mode 100644 index 000000000000..b8fe31557374 --- /dev/null +++ b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import ParallelstoreTransport +from .grpc import ParallelstoreGrpcTransport +from .grpc_asyncio import ParallelstoreGrpcAsyncIOTransport +from .rest import ParallelstoreRestInterceptor, ParallelstoreRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[ParallelstoreTransport]] +_transport_registry["grpc"] = ParallelstoreGrpcTransport +_transport_registry["grpc_asyncio"] = ParallelstoreGrpcAsyncIOTransport +_transport_registry["rest"] = ParallelstoreRestTransport + +__all__ = ( + "ParallelstoreTransport", + "ParallelstoreGrpcTransport", + "ParallelstoreGrpcAsyncIOTransport", + "ParallelstoreRestTransport", + "ParallelstoreRestInterceptor", +) diff --git a/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/transports/base.py b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/transports/base.py new file mode 100644 index 000000000000..e62740dc2cd0 --- /dev/null +++ b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/transports/base.py @@ -0,0 +1,309 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.parallelstore_v1 import gapic_version as package_version +from google.cloud.parallelstore_v1.types import parallelstore + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class ParallelstoreTransport(abc.ABC): + """Abstract transport class for Parallelstore.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "parallelstore.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'parallelstore.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_instances: gapic_v1.method.wrap_method( + self.list_instances, + default_timeout=None, + client_info=client_info, + ), + self.get_instance: gapic_v1.method.wrap_method( + self.get_instance, + default_timeout=None, + client_info=client_info, + ), + self.create_instance: gapic_v1.method.wrap_method( + self.create_instance, + default_timeout=None, + client_info=client_info, + ), + self.update_instance: gapic_v1.method.wrap_method( + self.update_instance, + default_timeout=None, + client_info=client_info, + ), + self.delete_instance: gapic_v1.method.wrap_method( + self.delete_instance, + default_timeout=None, + client_info=client_info, + ), + self.import_data: gapic_v1.method.wrap_method( + self.import_data, + default_timeout=None, + client_info=client_info, + ), + self.export_data: gapic_v1.method.wrap_method( + self.export_data, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def list_instances( + self, + ) -> Callable[ + [parallelstore.ListInstancesRequest], + Union[ + parallelstore.ListInstancesResponse, + Awaitable[parallelstore.ListInstancesResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_instance( + self, + ) -> Callable[ + [parallelstore.GetInstanceRequest], + Union[parallelstore.Instance, Awaitable[parallelstore.Instance]], + ]: + raise NotImplementedError() + + @property + def create_instance( + self, + ) -> Callable[ + [parallelstore.CreateInstanceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_instance( + self, + ) -> Callable[ + [parallelstore.UpdateInstanceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_instance( + self, + ) -> Callable[ + [parallelstore.DeleteInstanceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def import_data( + self, + ) -> Callable[ + [parallelstore.ImportDataRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def export_data( + self, + ) -> Callable[ + [parallelstore.ExportDataRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + raise NotImplementedError() + + @property + def get_location( + self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("ParallelstoreTransport",) diff --git a/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/transports/grpc.py b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/transports/grpc.py new file mode 100644 index 000000000000..3f3073c101af --- /dev/null +++ b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/transports/grpc.py @@ -0,0 +1,573 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.parallelstore_v1.types import parallelstore + +from .base import DEFAULT_CLIENT_INFO, ParallelstoreTransport + + +class ParallelstoreGrpcTransport(ParallelstoreTransport): + """gRPC backend transport for Parallelstore. + + Service describing handlers for resources Configures and manages + parallelstore resources. + + Parallelstore service. + + The ``parallelstore.googleapis.com`` service implements the + parallelstore API and defines the following resource model for + managing instances: + + - The service works with a collection of cloud projects, named: + ``/projects/*`` + - Each project has a collection of available locations, named: + ``/locations/*`` + - Each location has a collection of instances named + ``/instances/*``. + - Parallelstore instances are resources of the form: + ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + + Note that location_id must be a Google Cloud ``zone``; for example: + + - ``projects/12345/locations/us-central1-c/instances/my-parallelstore-share`` + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "parallelstore.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'parallelstore.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "parallelstore.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + + @property + def list_instances( + self, + ) -> Callable[ + [parallelstore.ListInstancesRequest], parallelstore.ListInstancesResponse + ]: + r"""Return a callable for the list instances method over gRPC. + + Lists all instances in a given project and location. + + Returns: + Callable[[~.ListInstancesRequest], + ~.ListInstancesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_instances" not in self._stubs: + self._stubs["list_instances"] = self.grpc_channel.unary_unary( + "/google.cloud.parallelstore.v1.Parallelstore/ListInstances", + request_serializer=parallelstore.ListInstancesRequest.serialize, + response_deserializer=parallelstore.ListInstancesResponse.deserialize, + ) + return self._stubs["list_instances"] + + @property + def get_instance( + self, + ) -> Callable[[parallelstore.GetInstanceRequest], parallelstore.Instance]: + r"""Return a callable for the get instance method over gRPC. + + Gets details of a single instance. + + Returns: + Callable[[~.GetInstanceRequest], + ~.Instance]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_instance" not in self._stubs: + self._stubs["get_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.parallelstore.v1.Parallelstore/GetInstance", + request_serializer=parallelstore.GetInstanceRequest.serialize, + response_deserializer=parallelstore.Instance.deserialize, + ) + return self._stubs["get_instance"] + + @property + def create_instance( + self, + ) -> Callable[[parallelstore.CreateInstanceRequest], operations_pb2.Operation]: + r"""Return a callable for the create instance method over gRPC. + + Creates a Parallelstore instance in a given project + and location. + + Returns: + Callable[[~.CreateInstanceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_instance" not in self._stubs: + self._stubs["create_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.parallelstore.v1.Parallelstore/CreateInstance", + request_serializer=parallelstore.CreateInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_instance"] + + @property + def update_instance( + self, + ) -> Callable[[parallelstore.UpdateInstanceRequest], operations_pb2.Operation]: + r"""Return a callable for the update instance method over gRPC. + + Updates the parameters of a single instance. + + Returns: + Callable[[~.UpdateInstanceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_instance" not in self._stubs: + self._stubs["update_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.parallelstore.v1.Parallelstore/UpdateInstance", + request_serializer=parallelstore.UpdateInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_instance"] + + @property + def delete_instance( + self, + ) -> Callable[[parallelstore.DeleteInstanceRequest], operations_pb2.Operation]: + r"""Return a callable for the delete instance method over gRPC. + + Deletes a single instance. + + Returns: + Callable[[~.DeleteInstanceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_instance" not in self._stubs: + self._stubs["delete_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.parallelstore.v1.Parallelstore/DeleteInstance", + request_serializer=parallelstore.DeleteInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_instance"] + + @property + def import_data( + self, + ) -> Callable[[parallelstore.ImportDataRequest], operations_pb2.Operation]: + r"""Return a callable for the import data method over gRPC. + + Copies data from Cloud Storage to Parallelstore. + + Returns: + Callable[[~.ImportDataRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "import_data" not in self._stubs: + self._stubs["import_data"] = self.grpc_channel.unary_unary( + "/google.cloud.parallelstore.v1.Parallelstore/ImportData", + request_serializer=parallelstore.ImportDataRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["import_data"] + + @property + def export_data( + self, + ) -> Callable[[parallelstore.ExportDataRequest], operations_pb2.Operation]: + r"""Return a callable for the export data method over gRPC. + + Copies data from Parallelstore to Cloud Storage. + + Returns: + Callable[[~.ExportDataRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "export_data" not in self._stubs: + self._stubs["export_data"] = self.grpc_channel.unary_unary( + "/google.cloud.parallelstore.v1.Parallelstore/ExportData", + request_serializer=parallelstore.ExportDataRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["export_data"] + + def close(self): + self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("ParallelstoreGrpcTransport",) diff --git a/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/transports/grpc_asyncio.py b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/transports/grpc_asyncio.py new file mode 100644 index 000000000000..721fc6fcc843 --- /dev/null +++ b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/transports/grpc_asyncio.py @@ -0,0 +1,628 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.parallelstore_v1.types import parallelstore + +from .base import DEFAULT_CLIENT_INFO, ParallelstoreTransport +from .grpc import ParallelstoreGrpcTransport + + +class ParallelstoreGrpcAsyncIOTransport(ParallelstoreTransport): + """gRPC AsyncIO backend transport for Parallelstore. + + Service describing handlers for resources Configures and manages + parallelstore resources. + + Parallelstore service. + + The ``parallelstore.googleapis.com`` service implements the + parallelstore API and defines the following resource model for + managing instances: + + - The service works with a collection of cloud projects, named: + ``/projects/*`` + - Each project has a collection of available locations, named: + ``/locations/*`` + - Each location has a collection of instances named + ``/instances/*``. + - Parallelstore instances are resources of the form: + ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + + Note that location_id must be a Google Cloud ``zone``; for example: + + - ``projects/12345/locations/us-central1-c/instances/my-parallelstore-share`` + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "parallelstore.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "parallelstore.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'parallelstore.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def list_instances( + self, + ) -> Callable[ + [parallelstore.ListInstancesRequest], + Awaitable[parallelstore.ListInstancesResponse], + ]: + r"""Return a callable for the list instances method over gRPC. + + Lists all instances in a given project and location. + + Returns: + Callable[[~.ListInstancesRequest], + Awaitable[~.ListInstancesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_instances" not in self._stubs: + self._stubs["list_instances"] = self.grpc_channel.unary_unary( + "/google.cloud.parallelstore.v1.Parallelstore/ListInstances", + request_serializer=parallelstore.ListInstancesRequest.serialize, + response_deserializer=parallelstore.ListInstancesResponse.deserialize, + ) + return self._stubs["list_instances"] + + @property + def get_instance( + self, + ) -> Callable[ + [parallelstore.GetInstanceRequest], Awaitable[parallelstore.Instance] + ]: + r"""Return a callable for the get instance method over gRPC. + + Gets details of a single instance. + + Returns: + Callable[[~.GetInstanceRequest], + Awaitable[~.Instance]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_instance" not in self._stubs: + self._stubs["get_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.parallelstore.v1.Parallelstore/GetInstance", + request_serializer=parallelstore.GetInstanceRequest.serialize, + response_deserializer=parallelstore.Instance.deserialize, + ) + return self._stubs["get_instance"] + + @property + def create_instance( + self, + ) -> Callable[ + [parallelstore.CreateInstanceRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the create instance method over gRPC. + + Creates a Parallelstore instance in a given project + and location. + + Returns: + Callable[[~.CreateInstanceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_instance" not in self._stubs: + self._stubs["create_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.parallelstore.v1.Parallelstore/CreateInstance", + request_serializer=parallelstore.CreateInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_instance"] + + @property + def update_instance( + self, + ) -> Callable[ + [parallelstore.UpdateInstanceRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the update instance method over gRPC. + + Updates the parameters of a single instance. + + Returns: + Callable[[~.UpdateInstanceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_instance" not in self._stubs: + self._stubs["update_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.parallelstore.v1.Parallelstore/UpdateInstance", + request_serializer=parallelstore.UpdateInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_instance"] + + @property + def delete_instance( + self, + ) -> Callable[ + [parallelstore.DeleteInstanceRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the delete instance method over gRPC. + + Deletes a single instance. + + Returns: + Callable[[~.DeleteInstanceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_instance" not in self._stubs: + self._stubs["delete_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.parallelstore.v1.Parallelstore/DeleteInstance", + request_serializer=parallelstore.DeleteInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_instance"] + + @property + def import_data( + self, + ) -> Callable[ + [parallelstore.ImportDataRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the import data method over gRPC. + + Copies data from Cloud Storage to Parallelstore. + + Returns: + Callable[[~.ImportDataRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "import_data" not in self._stubs: + self._stubs["import_data"] = self.grpc_channel.unary_unary( + "/google.cloud.parallelstore.v1.Parallelstore/ImportData", + request_serializer=parallelstore.ImportDataRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["import_data"] + + @property + def export_data( + self, + ) -> Callable[ + [parallelstore.ExportDataRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the export data method over gRPC. + + Copies data from Parallelstore to Cloud Storage. + + Returns: + Callable[[~.ExportDataRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "export_data" not in self._stubs: + self._stubs["export_data"] = self.grpc_channel.unary_unary( + "/google.cloud.parallelstore.v1.Parallelstore/ExportData", + request_serializer=parallelstore.ExportDataRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["export_data"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.list_instances: gapic_v1.method_async.wrap_method( + self.list_instances, + default_timeout=None, + client_info=client_info, + ), + self.get_instance: gapic_v1.method_async.wrap_method( + self.get_instance, + default_timeout=None, + client_info=client_info, + ), + self.create_instance: gapic_v1.method_async.wrap_method( + self.create_instance, + default_timeout=None, + client_info=client_info, + ), + self.update_instance: gapic_v1.method_async.wrap_method( + self.update_instance, + default_timeout=None, + client_info=client_info, + ), + self.delete_instance: gapic_v1.method_async.wrap_method( + self.delete_instance, + default_timeout=None, + client_info=client_info, + ), + self.import_data: gapic_v1.method_async.wrap_method( + self.import_data, + default_timeout=None, + client_info=client_info, + ), + self.export_data: gapic_v1.method_async.wrap_method( + self.export_data, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + return self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + +__all__ = ("ParallelstoreGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/transports/rest.py b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/transports/rest.py new file mode 100644 index 000000000000..928252b84ce3 --- /dev/null +++ b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/services/parallelstore/transports/rest.py @@ -0,0 +1,1696 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.parallelstore_v1.types import parallelstore + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import ParallelstoreTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class ParallelstoreRestInterceptor: + """Interceptor for Parallelstore. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the ParallelstoreRestTransport. + + .. code-block:: python + class MyCustomParallelstoreInterceptor(ParallelstoreRestInterceptor): + def pre_create_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_instance(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_instance(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_export_data(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_export_data(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_instance(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_import_data(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_import_data(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_instances(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_instances(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_instance(self, response): + logging.log(f"Received response: {response}") + return response + + transport = ParallelstoreRestTransport(interceptor=MyCustomParallelstoreInterceptor()) + client = ParallelstoreClient(transport=transport) + + + """ + + def pre_create_instance( + self, + request: parallelstore.CreateInstanceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[parallelstore.CreateInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the Parallelstore server. + """ + return request, metadata + + def post_create_instance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_instance + + Override in a subclass to manipulate the response + after it is returned by the Parallelstore server but before + it is returned to user code. + """ + return response + + def pre_delete_instance( + self, + request: parallelstore.DeleteInstanceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[parallelstore.DeleteInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the Parallelstore server. + """ + return request, metadata + + def post_delete_instance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_instance + + Override in a subclass to manipulate the response + after it is returned by the Parallelstore server but before + it is returned to user code. + """ + return response + + def pre_export_data( + self, + request: parallelstore.ExportDataRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[parallelstore.ExportDataRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for export_data + + Override in a subclass to manipulate the request or metadata + before they are sent to the Parallelstore server. + """ + return request, metadata + + def post_export_data( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for export_data + + Override in a subclass to manipulate the response + after it is returned by the Parallelstore server but before + it is returned to user code. + """ + return response + + def pre_get_instance( + self, + request: parallelstore.GetInstanceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[parallelstore.GetInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the Parallelstore server. + """ + return request, metadata + + def post_get_instance( + self, response: parallelstore.Instance + ) -> parallelstore.Instance: + """Post-rpc interceptor for get_instance + + Override in a subclass to manipulate the response + after it is returned by the Parallelstore server but before + it is returned to user code. + """ + return response + + def pre_import_data( + self, + request: parallelstore.ImportDataRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[parallelstore.ImportDataRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for import_data + + Override in a subclass to manipulate the request or metadata + before they are sent to the Parallelstore server. + """ + return request, metadata + + def post_import_data( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for import_data + + Override in a subclass to manipulate the response + after it is returned by the Parallelstore server but before + it is returned to user code. + """ + return response + + def pre_list_instances( + self, + request: parallelstore.ListInstancesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[parallelstore.ListInstancesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_instances + + Override in a subclass to manipulate the request or metadata + before they are sent to the Parallelstore server. + """ + return request, metadata + + def post_list_instances( + self, response: parallelstore.ListInstancesResponse + ) -> parallelstore.ListInstancesResponse: + """Post-rpc interceptor for list_instances + + Override in a subclass to manipulate the response + after it is returned by the Parallelstore server but before + it is returned to user code. + """ + return response + + def pre_update_instance( + self, + request: parallelstore.UpdateInstanceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[parallelstore.UpdateInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the Parallelstore server. + """ + return request, metadata + + def post_update_instance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_instance + + Override in a subclass to manipulate the response + after it is returned by the Parallelstore server but before + it is returned to user code. + """ + return response + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the Parallelstore server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the Parallelstore server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the Parallelstore server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the Parallelstore server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Parallelstore server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the Parallelstore server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Parallelstore server. + """ + return request, metadata + + def post_delete_operation(self, response: None) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the Parallelstore server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Parallelstore server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the Parallelstore server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the Parallelstore server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the Parallelstore server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class ParallelstoreRestStub: + _session: AuthorizedSession + _host: str + _interceptor: ParallelstoreRestInterceptor + + +class ParallelstoreRestTransport(ParallelstoreTransport): + """REST backend transport for Parallelstore. + + Service describing handlers for resources Configures and manages + parallelstore resources. + + Parallelstore service. + + The ``parallelstore.googleapis.com`` service implements the + parallelstore API and defines the following resource model for + managing instances: + + - The service works with a collection of cloud projects, named: + ``/projects/*`` + - Each project has a collection of available locations, named: + ``/locations/*`` + - Each location has a collection of instances named + ``/instances/*``. + - Parallelstore instances are resources of the form: + ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + + Note that location_id must be a Google Cloud ``zone``; for example: + + - ``projects/12345/locations/us-central1-c/instances/my-parallelstore-share`` + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "parallelstore.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[ParallelstoreRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'parallelstore.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or ParallelstoreRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + ], + "google.longrunning.Operations.DeleteOperation": [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _CreateInstance(ParallelstoreRestStub): + def __hash__(self): + return hash("CreateInstance") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "instanceId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: parallelstore.CreateInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create instance method over HTTP. + + Args: + request (~.parallelstore.CreateInstanceRequest): + The request object. Create a new Parallelstore instance. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/instances", + "body": "instance", + }, + ] + request, metadata = self._interceptor.pre_create_instance(request, metadata) + pb_request = parallelstore.CreateInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_instance(resp) + return resp + + class _DeleteInstance(ParallelstoreRestStub): + def __hash__(self): + return hash("DeleteInstance") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: parallelstore.DeleteInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete instance method over HTTP. + + Args: + request (~.parallelstore.DeleteInstanceRequest): + The request object. Delete an instance. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/instances/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_instance(request, metadata) + pb_request = parallelstore.DeleteInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_instance(resp) + return resp + + class _ExportData(ParallelstoreRestStub): + def __hash__(self): + return hash("ExportData") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: parallelstore.ExportDataRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the export data method over HTTP. + + Args: + request (~.parallelstore.ExportDataRequest): + The request object. Export data from Parallelstore to + Cloud Storage. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/instances/*}:exportData", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_export_data(request, metadata) + pb_request = parallelstore.ExportDataRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_export_data(resp) + return resp + + class _GetInstance(ParallelstoreRestStub): + def __hash__(self): + return hash("GetInstance") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: parallelstore.GetInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> parallelstore.Instance: + r"""Call the get instance method over HTTP. + + Args: + request (~.parallelstore.GetInstanceRequest): + The request object. Get an instance's details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.parallelstore.Instance: + A Parallelstore instance. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/instances/*}", + }, + ] + request, metadata = self._interceptor.pre_get_instance(request, metadata) + pb_request = parallelstore.GetInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = parallelstore.Instance() + pb_resp = parallelstore.Instance.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_instance(resp) + return resp + + class _ImportData(ParallelstoreRestStub): + def __hash__(self): + return hash("ImportData") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: parallelstore.ImportDataRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the import data method over HTTP. + + Args: + request (~.parallelstore.ImportDataRequest): + The request object. Import data from Cloud Storage into a + Parallelstore instance. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/instances/*}:importData", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_import_data(request, metadata) + pb_request = parallelstore.ImportDataRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_import_data(resp) + return resp + + class _ListInstances(ParallelstoreRestStub): + def __hash__(self): + return hash("ListInstances") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: parallelstore.ListInstancesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> parallelstore.ListInstancesResponse: + r"""Call the list instances method over HTTP. + + Args: + request (~.parallelstore.ListInstancesRequest): + The request object. List instances request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.parallelstore.ListInstancesResponse: + Response from + [ListInstances][google.cloud.parallelstore.v1.Parallelstore.ListInstances]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/instances", + }, + ] + request, metadata = self._interceptor.pre_list_instances(request, metadata) + pb_request = parallelstore.ListInstancesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = parallelstore.ListInstancesResponse() + pb_resp = parallelstore.ListInstancesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_instances(resp) + return resp + + class _UpdateInstance(ParallelstoreRestStub): + def __hash__(self): + return hash("UpdateInstance") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: parallelstore.UpdateInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the update instance method over HTTP. + + Args: + request (~.parallelstore.UpdateInstanceRequest): + The request object. Update an instance. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{instance.name=projects/*/locations/*/instances/*}", + "body": "instance", + }, + ] + request, metadata = self._interceptor.pre_update_instance(request, metadata) + pb_request = parallelstore.UpdateInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_instance(resp) + return resp + + @property + def create_instance( + self, + ) -> Callable[[parallelstore.CreateInstanceRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_instance( + self, + ) -> Callable[[parallelstore.DeleteInstanceRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def export_data( + self, + ) -> Callable[[parallelstore.ExportDataRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ExportData(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_instance( + self, + ) -> Callable[[parallelstore.GetInstanceRequest], parallelstore.Instance]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def import_data( + self, + ) -> Callable[[parallelstore.ImportDataRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ImportData(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_instances( + self, + ) -> Callable[ + [parallelstore.ListInstancesRequest], parallelstore.ListInstancesResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListInstances(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_instance( + self, + ) -> Callable[[parallelstore.UpdateInstanceRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(ParallelstoreRestStub): + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_location(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.Location() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_location(resp) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(ParallelstoreRestStub): + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*}/locations", + }, + ] + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_locations(resp) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(ParallelstoreRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation(ParallelstoreRestStub): + def __call__( + self, + request: operations_pb2.DeleteOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_delete_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(ParallelstoreRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(ParallelstoreRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("ParallelstoreRestTransport",) diff --git a/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/types/__init__.py b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/types/__init__.py new file mode 100644 index 000000000000..faadce2e0eff --- /dev/null +++ b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/types/__init__.py @@ -0,0 +1,66 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .parallelstore import ( + CreateInstanceRequest, + DeleteInstanceRequest, + DestinationGcsBucket, + DestinationParallelstore, + DirectoryStripeLevel, + ExportDataMetadata, + ExportDataRequest, + ExportDataResponse, + FileStripeLevel, + GetInstanceRequest, + ImportDataMetadata, + ImportDataRequest, + ImportDataResponse, + Instance, + ListInstancesRequest, + ListInstancesResponse, + OperationMetadata, + SourceGcsBucket, + SourceParallelstore, + TransferCounters, + TransferOperationMetadata, + TransferType, + UpdateInstanceRequest, +) + +__all__ = ( + "CreateInstanceRequest", + "DeleteInstanceRequest", + "DestinationGcsBucket", + "DestinationParallelstore", + "ExportDataMetadata", + "ExportDataRequest", + "ExportDataResponse", + "GetInstanceRequest", + "ImportDataMetadata", + "ImportDataRequest", + "ImportDataResponse", + "Instance", + "ListInstancesRequest", + "ListInstancesResponse", + "OperationMetadata", + "SourceGcsBucket", + "SourceParallelstore", + "TransferCounters", + "TransferOperationMetadata", + "UpdateInstanceRequest", + "DirectoryStripeLevel", + "FileStripeLevel", + "TransferType", +) diff --git a/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/types/parallelstore.py b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/types/parallelstore.py new file mode 100644 index 000000000000..29e63b7f19f5 --- /dev/null +++ b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/types/parallelstore.py @@ -0,0 +1,1063 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.parallelstore.v1", + manifest={ + "TransferType", + "FileStripeLevel", + "DirectoryStripeLevel", + "Instance", + "ListInstancesRequest", + "ListInstancesResponse", + "GetInstanceRequest", + "CreateInstanceRequest", + "UpdateInstanceRequest", + "DeleteInstanceRequest", + "OperationMetadata", + "SourceGcsBucket", + "DestinationGcsBucket", + "SourceParallelstore", + "DestinationParallelstore", + "ImportDataRequest", + "ExportDataRequest", + "ImportDataResponse", + "ImportDataMetadata", + "ExportDataResponse", + "ExportDataMetadata", + "TransferOperationMetadata", + "TransferCounters", + }, +) + + +class TransferType(proto.Enum): + r"""Type of transfer that occurred. + + Values: + TRANSFER_TYPE_UNSPECIFIED (0): + Zero is an illegal value. + IMPORT (1): + Imports to Parallelstore. + EXPORT (2): + Exports from Parallelstore. + """ + TRANSFER_TYPE_UNSPECIFIED = 0 + IMPORT = 1 + EXPORT = 2 + + +class FileStripeLevel(proto.Enum): + r"""Represents the striping options for files. + + Values: + FILE_STRIPE_LEVEL_UNSPECIFIED (0): + If not set, FileStripeLevel will default to + FILE_STRIPE_LEVEL_BALANCED + FILE_STRIPE_LEVEL_MIN (1): + Minimum file striping + FILE_STRIPE_LEVEL_BALANCED (2): + Medium file striping + FILE_STRIPE_LEVEL_MAX (3): + Maximum file striping + """ + FILE_STRIPE_LEVEL_UNSPECIFIED = 0 + FILE_STRIPE_LEVEL_MIN = 1 + FILE_STRIPE_LEVEL_BALANCED = 2 + FILE_STRIPE_LEVEL_MAX = 3 + + +class DirectoryStripeLevel(proto.Enum): + r"""Represents the striping options for directories. + + Values: + DIRECTORY_STRIPE_LEVEL_UNSPECIFIED (0): + If not set, DirectoryStripeLevel will default to + DIRECTORY_STRIPE_LEVEL_MAX + DIRECTORY_STRIPE_LEVEL_MIN (1): + Minimum directory striping + DIRECTORY_STRIPE_LEVEL_BALANCED (2): + Medium directory striping + DIRECTORY_STRIPE_LEVEL_MAX (3): + Maximum directory striping + """ + DIRECTORY_STRIPE_LEVEL_UNSPECIFIED = 0 + DIRECTORY_STRIPE_LEVEL_MIN = 1 + DIRECTORY_STRIPE_LEVEL_BALANCED = 2 + DIRECTORY_STRIPE_LEVEL_MAX = 3 + + +class Instance(proto.Message): + r"""A Parallelstore instance. + + Attributes: + name (str): + Identifier. The resource name of the instance, in the format + ``projects/{project}/locations/{location}/instances/{instance_id}``. + description (str): + Optional. The description of the instance. + 2048 characters or less. + state (google.cloud.parallelstore_v1.types.Instance.State): + Output only. The instance state. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the instance was + created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the instance was + updated. + labels (MutableMapping[str, str]): + Optional. Cloud Labels are a flexible and + lightweight mechanism for organizing cloud + resources into groups that reflect a customer's + organizational needs and deployment strategies. + See + https://cloud.google.com/resource-manager/docs/labels-overview + for details. + capacity_gib (int): + Required. Immutable. The instance's storage + capacity in Gibibytes (GiB). Allowed values are + between 12000 and 100000, in multiples of 4000; + e.g., 12000, 16000, 20000, ... + daos_version (str): + Output only. The version of DAOS software + running in the instance. + access_points (MutableSequence[str]): + Output only. A list of IPv4 addresses used + for client side configuration. + network (str): + Optional. Immutable. The name of the Compute Engine `VPC + network `__ to which + the instance is connected. + reserved_ip_range (str): + Optional. Immutable. The ID of the IP address range being + used by the instance's VPC network. See `Configure a VPC + network `__. + If no ID is provided, all ranges are considered. + effective_reserved_ip_range (str): + Output only. Immutable. The ID of the IP + address range being used by the instance's VPC + network. This field is populated by the service + and contains the value currently used by the + service. + file_stripe_level (google.cloud.parallelstore_v1.types.FileStripeLevel): + Optional. Stripe level for files. Allowed values are: + + - ``FILE_STRIPE_LEVEL_MIN``: offers the best performance + for small size files. + - ``FILE_STRIPE_LEVEL_BALANCED``: balances performance for + workloads involving a mix of small and large files. + - ``FILE_STRIPE_LEVEL_MAX``: higher throughput performance + for larger files. + directory_stripe_level (google.cloud.parallelstore_v1.types.DirectoryStripeLevel): + Optional. Stripe level for directories. Allowed values are: + + - ``DIRECTORY_STRIPE_LEVEL_MIN``: recommended when + directories contain a small number of files. + - ``DIRECTORY_STRIPE_LEVEL_BALANCED``: balances performance + for workloads involving a mix of small and large + directories. + - ``DIRECTORY_STRIPE_LEVEL_MAX``: recommended for + directories with a large number of files. + """ + + class State(proto.Enum): + r"""The possible states of a Parallelstore instance. + + Values: + STATE_UNSPECIFIED (0): + Not set. + CREATING (1): + The instance is being created. + ACTIVE (2): + The instance is available for use. + DELETING (3): + The instance is being deleted. + FAILED (4): + The instance is not usable. + UPGRADING (5): + The instance is being upgraded. + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + ACTIVE = 2 + DELETING = 3 + FAILED = 4 + UPGRADING = 5 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + state: State = proto.Field( + proto.ENUM, + number=3, + enum=State, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=6, + ) + capacity_gib: int = proto.Field( + proto.INT64, + number=8, + ) + daos_version: str = proto.Field( + proto.STRING, + number=9, + ) + access_points: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=10, + ) + network: str = proto.Field( + proto.STRING, + number=11, + ) + reserved_ip_range: str = proto.Field( + proto.STRING, + number=12, + ) + effective_reserved_ip_range: str = proto.Field( + proto.STRING, + number=14, + ) + file_stripe_level: "FileStripeLevel" = proto.Field( + proto.ENUM, + number=15, + enum="FileStripeLevel", + ) + directory_stripe_level: "DirectoryStripeLevel" = proto.Field( + proto.ENUM, + number=16, + enum="DirectoryStripeLevel", + ) + + +class ListInstancesRequest(proto.Message): + r"""List instances request. + + Attributes: + parent (str): + Required. The project and location for which to retrieve + instance information, in the format + ``projects/{project_id}/locations/{location}``. + + To retrieve instance information for all locations, use "-" + as the value of ``{location}``. + page_size (int): + Optional. Requested page size. Server may + return fewer items than requested. If + unspecified, the server will pick an appropriate + default. + page_token (str): + Optional. A token identifying a page of + results the server should return. + filter (str): + Optional. Filtering results. + order_by (str): + Optional. Hint for how to order the results. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListInstancesResponse(proto.Message): + r"""Response from + [ListInstances][google.cloud.parallelstore.v1.Parallelstore.ListInstances]. + + Attributes: + instances (MutableSequence[google.cloud.parallelstore_v1.types.Instance]): + The list of Parallelstore instances. + next_page_token (str): + A token identifying a page of results the + server should return. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + instances: MutableSequence["Instance"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Instance", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetInstanceRequest(proto.Message): + r"""Get an instance's details. + + Attributes: + name (str): + Required. The instance resource name, in the format + ``projects/{project_id}/locations/{location}/instances/{instance_id}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateInstanceRequest(proto.Message): + r"""Create a new Parallelstore instance. + + Attributes: + parent (str): + Required. The instance's project and location, in the format + ``projects/{project}/locations/{location}``. Locations map + to Google Cloud zones; for example, ``us-west1-b``. + instance_id (str): + Required. The name of the Parallelstore instance. + + - Must contain only lowercase letters, numbers, and + hyphens. + - Must start with a letter. + - Must be between 1-63 characters. + - Must end with a number or a letter. + - Must be unique within the customer project / location + instance (google.cloud.parallelstore_v1.types.Instance): + Required. The instance to create. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + + For example, consider a situation where you make + an initial request and t he request times out. + If you make the request again with the same + request ID, the server can check if original + operation with the same request ID was received, + and if so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + instance_id: str = proto.Field( + proto.STRING, + number=2, + ) + instance: "Instance" = proto.Field( + proto.MESSAGE, + number=3, + message="Instance", + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class UpdateInstanceRequest(proto.Message): + r"""Update an instance. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask of fields to update. Field mask is used to + specify the fields to be overwritten in the Instance + resource by the update. At least one path must be supplied + in this field. The fields specified in the update_mask are + relative to the resource, not the full request. + instance (google.cloud.parallelstore_v1.types.Instance): + Required. The instance to update. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + + For example, consider a situation where you make + an initial request and t he request times out. + If you make the request again with the same + request ID, the server can check if original + operation with the same request ID was received, + and if so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + instance: "Instance" = proto.Field( + proto.MESSAGE, + number=2, + message="Instance", + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class DeleteInstanceRequest(proto.Message): + r"""Delete an instance. + + Attributes: + name (str): + Required. Name of the resource + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes after the first request. + + For example, consider a situation where you make + an initial request and t he request times out. + If you make the request again with the same + request ID, the server can check if original + operation with the same request ID was received, + and if so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class OperationMetadata(proto.Message): + r"""Long-running operation metadata. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation was + created. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation finished + running. + target (str): + Output only. Server-defined resource path for + the target of the operation. + verb (str): + Output only. Name of the verb executed by the + operation. + status_message (str): + Output only. Human-readable status of the + operation, if any. + requested_cancellation (bool): + Output only. Identifies whether the user has requested + cancellation of the operation. Operations that have been + cancelled successfully have [Operation.error][] value with a + [google.rpc.Status.code][google.rpc.Status.code] of 1, + corresponding to ``Code.CANCELLED``. + api_version (str): + Output only. API version used to start the + operation. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + target: str = proto.Field( + proto.STRING, + number=3, + ) + verb: str = proto.Field( + proto.STRING, + number=4, + ) + status_message: str = proto.Field( + proto.STRING, + number=5, + ) + requested_cancellation: bool = proto.Field( + proto.BOOL, + number=6, + ) + api_version: str = proto.Field( + proto.STRING, + number=7, + ) + + +class SourceGcsBucket(proto.Message): + r"""Cloud Storage as the source of a data transfer. + + Attributes: + uri (str): + Required. URI to a Cloud Storage bucket in the format: + ``gs:///``. The path inside + the bucket is optional. + """ + + uri: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DestinationGcsBucket(proto.Message): + r"""Cloud Storage as the destination of a data transfer. + + Attributes: + uri (str): + Required. URI to a Cloud Storage bucket in the format: + ``gs:///``. The path inside + the bucket is optional. + """ + + uri: str = proto.Field( + proto.STRING, + number=1, + ) + + +class SourceParallelstore(proto.Message): + r"""Parallelstore as the source of a data transfer. + + Attributes: + path (str): + Optional. Root directory path to the Paralellstore + filesystem, starting with ``/``. Defaults to ``/`` if unset. + """ + + path: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DestinationParallelstore(proto.Message): + r"""Parallelstore as the destination of a data transfer. + + Attributes: + path (str): + Optional. Root directory path to the Paralellstore + filesystem, starting with ``/``. Defaults to ``/`` if unset. + """ + + path: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ImportDataRequest(proto.Message): + r"""Import data from Cloud Storage into a Parallelstore instance. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + source_gcs_bucket (google.cloud.parallelstore_v1.types.SourceGcsBucket): + The Cloud Storage source bucket and, + optionally, path inside the bucket. + + This field is a member of `oneof`_ ``source``. + destination_parallelstore (google.cloud.parallelstore_v1.types.DestinationParallelstore): + Parallelstore destination. + + This field is a member of `oneof`_ ``destination``. + name (str): + Required. Name of the resource. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + + For example, consider a situation where you make + an initial request and t he request times out. + If you make the request again with the same + request ID, the server can check if original + operation with the same request ID was received, + and if so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + service_account (str): + Optional. User-specified service account credentials to be + used when performing the transfer. + + Use one of the following formats: + + - ``{EMAIL_ADDRESS_OR_UNIQUE_ID}`` + - ``projects/{PROJECT_ID_OR_NUMBER}/serviceAccounts/{EMAIL_ADDRESS_OR_UNIQUE_ID}`` + - ``projects/-/serviceAccounts/{EMAIL_ADDRESS_OR_UNIQUE_ID}`` + + If unspecified, the Parallelstore service agent is used: + ``service-@gcp-sa-parallelstore.iam.gserviceaccount.com`` + """ + + source_gcs_bucket: "SourceGcsBucket" = proto.Field( + proto.MESSAGE, + number=2, + oneof="source", + message="SourceGcsBucket", + ) + destination_parallelstore: "DestinationParallelstore" = proto.Field( + proto.MESSAGE, + number=3, + oneof="destination", + message="DestinationParallelstore", + ) + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + service_account: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ExportDataRequest(proto.Message): + r"""Export data from Parallelstore to Cloud Storage. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + source_parallelstore (google.cloud.parallelstore_v1.types.SourceParallelstore): + Parallelstore source. + + This field is a member of `oneof`_ ``source``. + destination_gcs_bucket (google.cloud.parallelstore_v1.types.DestinationGcsBucket): + Cloud Storage destination. + + This field is a member of `oneof`_ ``destination``. + name (str): + Required. Name of the resource. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + + For example, consider a situation where you make + an initial request and t he request times out. + If you make the request again with the same + request ID, the server can check if original + operation with the same request ID was received, + and if so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + service_account (str): + Optional. User-specified Service Account (SA) credentials to + be used when performing the transfer. Use one of the + following formats: + + - ``{EMAIL_ADDRESS_OR_UNIQUE_ID}`` + - ``projects/{PROJECT_ID_OR_NUMBER}/serviceAccounts/{EMAIL_ADDRESS_OR_UNIQUE_ID}`` + - ``projects/-/serviceAccounts/{EMAIL_ADDRESS_OR_UNIQUE_ID}`` + + If unspecified, the Parallelstore service agent is used: + ``service-@gcp-sa-parallelstore.iam.gserviceaccount.com`` + """ + + source_parallelstore: "SourceParallelstore" = proto.Field( + proto.MESSAGE, + number=2, + oneof="source", + message="SourceParallelstore", + ) + destination_gcs_bucket: "DestinationGcsBucket" = proto.Field( + proto.MESSAGE, + number=3, + oneof="destination", + message="DestinationGcsBucket", + ) + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + service_account: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ImportDataResponse(proto.Message): + r"""The response to a request to import data to Parallelstore.""" + + +class ImportDataMetadata(proto.Message): + r"""Metadata related to the data import operation. + + Attributes: + operation_metadata (google.cloud.parallelstore_v1.types.TransferOperationMetadata): + Data transfer operation metadata. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation was + created. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation finished + running. + target (str): + Output only. Server-defined resource path for + the target of the operation. + verb (str): + Output only. Name of the verb executed by the + operation. + status_message (str): + Output only. Human-readable status of the + operation, if any. + requested_cancellation (bool): + Output only. Identifies whether the user has requested + cancellation of the operation. Operations that have + successfully been cancelled have [Operation.error][] value + with a [google.rpc.Status.code][google.rpc.Status.code] of + 1, corresponding to ``Code.CANCELLED``. + api_version (str): + Output only. API version used to start the + operation. + """ + + operation_metadata: "TransferOperationMetadata" = proto.Field( + proto.MESSAGE, + number=1, + message="TransferOperationMetadata", + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + target: str = proto.Field( + proto.STRING, + number=4, + ) + verb: str = proto.Field( + proto.STRING, + number=5, + ) + status_message: str = proto.Field( + proto.STRING, + number=6, + ) + requested_cancellation: bool = proto.Field( + proto.BOOL, + number=7, + ) + api_version: str = proto.Field( + proto.STRING, + number=8, + ) + + +class ExportDataResponse(proto.Message): + r"""The response to a request to export data from Parallelstore.""" + + +class ExportDataMetadata(proto.Message): + r"""Metadata related to the data export operation. + + Attributes: + operation_metadata (google.cloud.parallelstore_v1.types.TransferOperationMetadata): + Data transfer operation metadata. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation was + created. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation finished + running. + target (str): + Output only. Server-defined resource path for + the target of the operation. + verb (str): + Output only. Name of the verb executed by the + operation. + status_message (str): + Output only. Human-readable status of the + operation, if any. + requested_cancellation (bool): + Output only. Identifies whether the user has requested + cancellation of the operation. Operations that have + successfully been cancelled have [Operation.error][] value + with a [google.rpc.Status.code][google.rpc.Status.code] of + 1, corresponding to ``Code.CANCELLED``. + api_version (str): + Output only. API version used to start the + operation. + """ + + operation_metadata: "TransferOperationMetadata" = proto.Field( + proto.MESSAGE, + number=1, + message="TransferOperationMetadata", + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + target: str = proto.Field( + proto.STRING, + number=4, + ) + verb: str = proto.Field( + proto.STRING, + number=5, + ) + status_message: str = proto.Field( + proto.STRING, + number=6, + ) + requested_cancellation: bool = proto.Field( + proto.BOOL, + number=7, + ) + api_version: str = proto.Field( + proto.STRING, + number=8, + ) + + +class TransferOperationMetadata(proto.Message): + r"""Long-running operation metadata related to a data transfer. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + source_parallelstore (google.cloud.parallelstore_v1.types.SourceParallelstore): + Output only. Parallelstore source. + + This field is a member of `oneof`_ ``source``. + source_gcs_bucket (google.cloud.parallelstore_v1.types.SourceGcsBucket): + Output only. Cloud Storage source. + + This field is a member of `oneof`_ ``source``. + destination_gcs_bucket (google.cloud.parallelstore_v1.types.DestinationGcsBucket): + Output only. Cloud Storage destination. + + This field is a member of `oneof`_ ``destination``. + destination_parallelstore (google.cloud.parallelstore_v1.types.DestinationParallelstore): + Output only. Parallelstore destination. + + This field is a member of `oneof`_ ``destination``. + counters (google.cloud.parallelstore_v1.types.TransferCounters): + Output only. The progress of the transfer + operation. + transfer_type (google.cloud.parallelstore_v1.types.TransferType): + Output only. The type of transfer occurring. + """ + + source_parallelstore: "SourceParallelstore" = proto.Field( + proto.MESSAGE, + number=7, + oneof="source", + message="SourceParallelstore", + ) + source_gcs_bucket: "SourceGcsBucket" = proto.Field( + proto.MESSAGE, + number=8, + oneof="source", + message="SourceGcsBucket", + ) + destination_gcs_bucket: "DestinationGcsBucket" = proto.Field( + proto.MESSAGE, + number=9, + oneof="destination", + message="DestinationGcsBucket", + ) + destination_parallelstore: "DestinationParallelstore" = proto.Field( + proto.MESSAGE, + number=10, + oneof="destination", + message="DestinationParallelstore", + ) + counters: "TransferCounters" = proto.Field( + proto.MESSAGE, + number=3, + message="TransferCounters", + ) + transfer_type: "TransferType" = proto.Field( + proto.ENUM, + number=6, + enum="TransferType", + ) + + +class TransferCounters(proto.Message): + r"""A collection of counters that report the progress of a + transfer operation. + + Attributes: + objects_found (int): + Objects found in the data source that are + scheduled to be transferred, excluding any that + are filtered based on object conditions or + skipped due to sync. + bytes_found (int): + Bytes found in the data source that are + scheduled to be transferred, excluding any that + are filtered based on object conditions or + skipped due to sync. + objects_skipped (int): + Objects in the data source that are not + transferred because they already exist in the + data destination. + bytes_skipped (int): + Bytes in the data source that are not + transferred because they already exist in the + data destination. + objects_copied (int): + Objects that are copied to the data + destination. + bytes_copied (int): + Bytes that are copied to the data + destination. + """ + + objects_found: int = proto.Field( + proto.INT64, + number=1, + ) + bytes_found: int = proto.Field( + proto.INT64, + number=2, + ) + objects_skipped: int = proto.Field( + proto.INT64, + number=3, + ) + bytes_skipped: int = proto.Field( + proto.INT64, + number=4, + ) + objects_copied: int = proto.Field( + proto.INT64, + number=5, + ) + bytes_copied: int = proto.Field( + proto.INT64, + number=6, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1beta/services/parallelstore/async_client.py b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1beta/services/parallelstore/async_client.py index cfb13d90803a..af354b4aee21 100644 --- a/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1beta/services/parallelstore/async_client.py +++ b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1beta/services/parallelstore/async_client.py @@ -300,7 +300,7 @@ async def list_instances( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListInstancesAsyncPager: - r"""Lists Instances in a given project and location. + r"""Lists all instances in a given project and location. .. code-block:: python @@ -331,16 +331,14 @@ async def sample_list_instances(): Args: request (Optional[Union[google.cloud.parallelstore_v1beta.types.ListInstancesRequest, dict]]): - The request object. Message for requesting list of - Instances + The request object. List instances request. parent (:class:`str`): Required. The project and location for which to retrieve instance information, in the format - ``projects/{project_id}/locations/{location}``. For - Parallelstore locations map to Google Cloud zones, for - example **us-central1-a**. To retrieve instance - information for all locations, use "-" for the - ``{location}`` value. + ``projects/{project_id}/locations/{location}``. + + To retrieve instance information for all locations, use + "-" as the value of ``{location}``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -353,11 +351,11 @@ async def sample_list_instances(): Returns: google.cloud.parallelstore_v1beta.services.parallelstore.pagers.ListInstancesAsyncPager: - Message for response to listing - Instances - Iterating over this object will yield - results and resolve additional pages - automatically. + Response from + [ListInstances][google.cloud.parallelstore.v1beta.Parallelstore.ListInstances]. + + Iterating over this object will yield results and + resolve additional pages automatically. """ # Create or coerce a protobuf request object. @@ -426,7 +424,7 @@ async def get_instance( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> parallelstore.Instance: - r"""Gets details of a single Instance. + r"""Gets details of a single instance. .. code-block:: python @@ -456,7 +454,7 @@ async def sample_get_instance(): Args: request (Optional[Union[google.cloud.parallelstore_v1beta.types.GetInstanceRequest, dict]]): - The request object. Request to get an instance's details. + The request object. Get an instance's details. name (:class:`str`): Required. The instance resource name, in the format ``projects/{project_id}/locations/{location}/instances/{instance_id}``. @@ -571,13 +569,12 @@ async def sample_create_instance(): Args: request (Optional[Union[google.cloud.parallelstore_v1beta.types.CreateInstanceRequest, dict]]): - The request object. Request for - [CreateInstance][google.cloud.parallelstore.v1beta.Parallelstore.CreateInstance] + The request object. Create a new Parallelstore instance. parent (:class:`str`): Required. The instance's project and location, in the format ``projects/{project}/locations/{location}``. - Locations map to Google Cloud zones, for example - **us-west1-b**. + Locations map to Google Cloud zones; for example, + ``us-west1-b``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -588,8 +585,7 @@ async def sample_create_instance(): on the ``request`` instance; if ``request`` is provided, this should not be set. instance_id (:class:`str`): - Required. The logical name of the Parallelstore instance - in the user project with the following restrictions: + Required. The name of the Parallelstore instance. - Must contain only lowercase letters, numbers, and hyphens. @@ -684,7 +680,7 @@ async def update_instance( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: - r"""Updates the parameters of a single Instance. + r"""Updates the parameters of a single instance. .. code-block:: python @@ -721,14 +717,14 @@ async def sample_update_instance(): Args: request (Optional[Union[google.cloud.parallelstore_v1beta.types.UpdateInstanceRequest, dict]]): - The request object. Message for updating a Instance + The request object. Update an instance. instance (:class:`google.cloud.parallelstore_v1beta.types.Instance`): - Required. The instance to update + Required. The instance to update. This corresponds to the ``instance`` field on the ``request`` instance; if ``request`` is provided, this should not be set. update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. Mask of fields to update .Field mask is used + Required. Mask of fields to update. Field mask is used to specify the fields to be overwritten in the Instance resource by the update. At least one path must be supplied in this field. The fields specified in the @@ -820,7 +816,7 @@ async def delete_instance( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: - r"""Deletes a single Instance. + r"""Deletes a single instance. .. code-block:: python @@ -854,7 +850,7 @@ async def sample_delete_instance(): Args: request (Optional[Union[google.cloud.parallelstore_v1beta.types.DeleteInstanceRequest, dict]]): - The request object. Message for deleting a Instance + The request object. Delete an instance. name (:class:`str`): Required. Name of the resource This corresponds to the ``name`` field @@ -944,8 +940,7 @@ async def import_data( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: - r"""ImportData copies data from Cloud Storage to - Parallelstore. + r"""Copies data from Cloud Storage to Parallelstore. .. code-block:: python @@ -983,9 +978,8 @@ async def sample_import_data(): Args: request (Optional[Union[google.cloud.parallelstore_v1beta.types.ImportDataRequest, dict]]): - The request object. Message representing the request - importing data from parallelstore to - Cloud Storage. + The request object. Import data from Cloud Storage into a + Parallelstore instance. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -998,8 +992,8 @@ async def sample_import_data(): The result type for the operation will be :class:`google.cloud.parallelstore_v1beta.types.ImportDataResponse` - ImportDataResponse is the response returned from - ImportData rpc. + The response to a request to import data to + Parallelstore. """ # Create or coerce a protobuf request object. @@ -1050,8 +1044,7 @@ async def export_data( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: - r"""ExportData copies data from Parallelstore to Cloud - Storage + r"""Copies data from Parallelstore to Cloud Storage. .. code-block:: python @@ -1089,9 +1082,8 @@ async def sample_export_data(): Args: request (Optional[Union[google.cloud.parallelstore_v1beta.types.ExportDataRequest, dict]]): - The request object. Message representing the request - exporting data from Cloud Storage to - parallelstore. + The request object. Export data from Parallelstore to + Cloud Storage. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1104,8 +1096,8 @@ async def sample_export_data(): The result type for the operation will be :class:`google.cloud.parallelstore_v1beta.types.ExportDataResponse` - ExportDataResponse is the response returned from - ExportData rpc + The response to a request to export data from + Parallelstore. """ # Create or coerce a protobuf request object. diff --git a/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1beta/services/parallelstore/client.py b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1beta/services/parallelstore/client.py index 1cb9552118a6..03db0bc0e5c8 100644 --- a/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1beta/services/parallelstore/client.py +++ b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1beta/services/parallelstore/client.py @@ -774,7 +774,7 @@ def list_instances( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListInstancesPager: - r"""Lists Instances in a given project and location. + r"""Lists all instances in a given project and location. .. code-block:: python @@ -805,16 +805,14 @@ def sample_list_instances(): Args: request (Union[google.cloud.parallelstore_v1beta.types.ListInstancesRequest, dict]): - The request object. Message for requesting list of - Instances + The request object. List instances request. parent (str): Required. The project and location for which to retrieve instance information, in the format - ``projects/{project_id}/locations/{location}``. For - Parallelstore locations map to Google Cloud zones, for - example **us-central1-a**. To retrieve instance - information for all locations, use "-" for the - ``{location}`` value. + ``projects/{project_id}/locations/{location}``. + + To retrieve instance information for all locations, use + "-" as the value of ``{location}``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -827,11 +825,11 @@ def sample_list_instances(): Returns: google.cloud.parallelstore_v1beta.services.parallelstore.pagers.ListInstancesPager: - Message for response to listing - Instances - Iterating over this object will yield - results and resolve additional pages - automatically. + Response from + [ListInstances][google.cloud.parallelstore.v1beta.Parallelstore.ListInstances]. + + Iterating over this object will yield results and + resolve additional pages automatically. """ # Create or coerce a protobuf request object. @@ -897,7 +895,7 @@ def get_instance( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> parallelstore.Instance: - r"""Gets details of a single Instance. + r"""Gets details of a single instance. .. code-block:: python @@ -927,7 +925,7 @@ def sample_get_instance(): Args: request (Union[google.cloud.parallelstore_v1beta.types.GetInstanceRequest, dict]): - The request object. Request to get an instance's details. + The request object. Get an instance's details. name (str): Required. The instance resource name, in the format ``projects/{project_id}/locations/{location}/instances/{instance_id}``. @@ -1039,13 +1037,12 @@ def sample_create_instance(): Args: request (Union[google.cloud.parallelstore_v1beta.types.CreateInstanceRequest, dict]): - The request object. Request for - [CreateInstance][google.cloud.parallelstore.v1beta.Parallelstore.CreateInstance] + The request object. Create a new Parallelstore instance. parent (str): Required. The instance's project and location, in the format ``projects/{project}/locations/{location}``. - Locations map to Google Cloud zones, for example - **us-west1-b**. + Locations map to Google Cloud zones; for example, + ``us-west1-b``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1056,8 +1053,7 @@ def sample_create_instance(): on the ``request`` instance; if ``request`` is provided, this should not be set. instance_id (str): - Required. The logical name of the Parallelstore instance - in the user project with the following restrictions: + Required. The name of the Parallelstore instance. - Must contain only lowercase letters, numbers, and hyphens. @@ -1149,7 +1145,7 @@ def update_instance( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: - r"""Updates the parameters of a single Instance. + r"""Updates the parameters of a single instance. .. code-block:: python @@ -1186,14 +1182,14 @@ def sample_update_instance(): Args: request (Union[google.cloud.parallelstore_v1beta.types.UpdateInstanceRequest, dict]): - The request object. Message for updating a Instance + The request object. Update an instance. instance (google.cloud.parallelstore_v1beta.types.Instance): - Required. The instance to update + Required. The instance to update. This corresponds to the ``instance`` field on the ``request`` instance; if ``request`` is provided, this should not be set. update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update .Field mask is used + Required. Mask of fields to update. Field mask is used to specify the fields to be overwritten in the Instance resource by the update. At least one path must be supplied in this field. The fields specified in the @@ -1282,7 +1278,7 @@ def delete_instance( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: - r"""Deletes a single Instance. + r"""Deletes a single instance. .. code-block:: python @@ -1316,7 +1312,7 @@ def sample_delete_instance(): Args: request (Union[google.cloud.parallelstore_v1beta.types.DeleteInstanceRequest, dict]): - The request object. Message for deleting a Instance + The request object. Delete an instance. name (str): Required. Name of the resource This corresponds to the ``name`` field @@ -1403,8 +1399,7 @@ def import_data( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: - r"""ImportData copies data from Cloud Storage to - Parallelstore. + r"""Copies data from Cloud Storage to Parallelstore. .. code-block:: python @@ -1442,9 +1437,8 @@ def sample_import_data(): Args: request (Union[google.cloud.parallelstore_v1beta.types.ImportDataRequest, dict]): - The request object. Message representing the request - importing data from parallelstore to - Cloud Storage. + The request object. Import data from Cloud Storage into a + Parallelstore instance. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1457,8 +1451,8 @@ def sample_import_data(): The result type for the operation will be :class:`google.cloud.parallelstore_v1beta.types.ImportDataResponse` - ImportDataResponse is the response returned from - ImportData rpc. + The response to a request to import data to + Parallelstore. """ # Create or coerce a protobuf request object. @@ -1507,8 +1501,7 @@ def export_data( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: - r"""ExportData copies data from Parallelstore to Cloud - Storage + r"""Copies data from Parallelstore to Cloud Storage. .. code-block:: python @@ -1546,9 +1539,8 @@ def sample_export_data(): Args: request (Union[google.cloud.parallelstore_v1beta.types.ExportDataRequest, dict]): - The request object. Message representing the request - exporting data from Cloud Storage to - parallelstore. + The request object. Export data from Parallelstore to + Cloud Storage. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1561,8 +1553,8 @@ def sample_export_data(): The result type for the operation will be :class:`google.cloud.parallelstore_v1beta.types.ExportDataResponse` - ExportDataResponse is the response returned from - ExportData rpc + The response to a request to export data from + Parallelstore. """ # Create or coerce a protobuf request object. diff --git a/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1beta/services/parallelstore/transports/grpc.py b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1beta/services/parallelstore/transports/grpc.py index 4e6eb0b5ad05..a57745ff945c 100644 --- a/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1beta/services/parallelstore/transports/grpc.py +++ b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1beta/services/parallelstore/transports/grpc.py @@ -279,7 +279,7 @@ def list_instances( ]: r"""Return a callable for the list instances method over gRPC. - Lists Instances in a given project and location. + Lists all instances in a given project and location. Returns: Callable[[~.ListInstancesRequest], @@ -305,7 +305,7 @@ def get_instance( ) -> Callable[[parallelstore.GetInstanceRequest], parallelstore.Instance]: r"""Return a callable for the get instance method over gRPC. - Gets details of a single Instance. + Gets details of a single instance. Returns: Callable[[~.GetInstanceRequest], @@ -358,7 +358,7 @@ def update_instance( ) -> Callable[[parallelstore.UpdateInstanceRequest], operations_pb2.Operation]: r"""Return a callable for the update instance method over gRPC. - Updates the parameters of a single Instance. + Updates the parameters of a single instance. Returns: Callable[[~.UpdateInstanceRequest], @@ -384,7 +384,7 @@ def delete_instance( ) -> Callable[[parallelstore.DeleteInstanceRequest], operations_pb2.Operation]: r"""Return a callable for the delete instance method over gRPC. - Deletes a single Instance. + Deletes a single instance. Returns: Callable[[~.DeleteInstanceRequest], @@ -410,8 +410,7 @@ def import_data( ) -> Callable[[parallelstore.ImportDataRequest], operations_pb2.Operation]: r"""Return a callable for the import data method over gRPC. - ImportData copies data from Cloud Storage to - Parallelstore. + Copies data from Cloud Storage to Parallelstore. Returns: Callable[[~.ImportDataRequest], @@ -437,8 +436,7 @@ def export_data( ) -> Callable[[parallelstore.ExportDataRequest], operations_pb2.Operation]: r"""Return a callable for the export data method over gRPC. - ExportData copies data from Parallelstore to Cloud - Storage + Copies data from Parallelstore to Cloud Storage. Returns: Callable[[~.ExportDataRequest], diff --git a/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1beta/services/parallelstore/transports/grpc_asyncio.py b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1beta/services/parallelstore/transports/grpc_asyncio.py index 67196ef6b27f..9a243a8a7c83 100644 --- a/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1beta/services/parallelstore/transports/grpc_asyncio.py +++ b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1beta/services/parallelstore/transports/grpc_asyncio.py @@ -286,7 +286,7 @@ def list_instances( ]: r"""Return a callable for the list instances method over gRPC. - Lists Instances in a given project and location. + Lists all instances in a given project and location. Returns: Callable[[~.ListInstancesRequest], @@ -314,7 +314,7 @@ def get_instance( ]: r"""Return a callable for the get instance method over gRPC. - Gets details of a single Instance. + Gets details of a single instance. Returns: Callable[[~.GetInstanceRequest], @@ -371,7 +371,7 @@ def update_instance( ]: r"""Return a callable for the update instance method over gRPC. - Updates the parameters of a single Instance. + Updates the parameters of a single instance. Returns: Callable[[~.UpdateInstanceRequest], @@ -399,7 +399,7 @@ def delete_instance( ]: r"""Return a callable for the delete instance method over gRPC. - Deletes a single Instance. + Deletes a single instance. Returns: Callable[[~.DeleteInstanceRequest], @@ -427,8 +427,7 @@ def import_data( ]: r"""Return a callable for the import data method over gRPC. - ImportData copies data from Cloud Storage to - Parallelstore. + Copies data from Cloud Storage to Parallelstore. Returns: Callable[[~.ImportDataRequest], @@ -456,8 +455,7 @@ def export_data( ]: r"""Return a callable for the export data method over gRPC. - ExportData copies data from Parallelstore to Cloud - Storage + Copies data from Parallelstore to Cloud Storage. Returns: Callable[[~.ExportDataRequest], diff --git a/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1beta/services/parallelstore/transports/rest.py b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1beta/services/parallelstore/transports/rest.py index 89a55da7e057..ff9e16346f08 100644 --- a/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1beta/services/parallelstore/transports/rest.py +++ b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1beta/services/parallelstore/transports/rest.py @@ -628,8 +628,7 @@ def __call__( Args: request (~.parallelstore.CreateInstanceRequest): - The request object. Request for - [CreateInstance][google.cloud.parallelstore.v1beta.Parallelstore.CreateInstance] + The request object. Create a new Parallelstore instance. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -722,7 +721,7 @@ def __call__( Args: request (~.parallelstore.DeleteInstanceRequest): - The request object. Message for deleting a Instance + The request object. Delete an instance. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -808,9 +807,8 @@ def __call__( Args: request (~.parallelstore.ExportDataRequest): - The request object. Message representing the request - exporting data from Cloud Storage to - parallelstore. + The request object. Export data from Parallelstore to + Cloud Storage. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -903,7 +901,7 @@ def __call__( Args: request (~.parallelstore.GetInstanceRequest): - The request object. Request to get an instance's details. + The request object. Get an instance's details. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -988,9 +986,8 @@ def __call__( Args: request (~.parallelstore.ImportDataRequest): - The request object. Message representing the request - importing data from parallelstore to - Cloud Storage. + The request object. Import data from Cloud Storage into a + Parallelstore instance. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1083,8 +1080,7 @@ def __call__( Args: request (~.parallelstore.ListInstancesRequest): - The request object. Message for requesting list of - Instances + The request object. List instances request. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1093,8 +1089,8 @@ def __call__( Returns: ~.parallelstore.ListInstancesResponse: - Message for response to listing - Instances + Response from + [ListInstances][google.cloud.parallelstore.v1beta.Parallelstore.ListInstances]. """ @@ -1173,7 +1169,7 @@ def __call__( Args: request (~.parallelstore.UpdateInstanceRequest): - The request object. Message for updating a Instance + The request object. Update an instance. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. diff --git a/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1beta/types/parallelstore.py b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1beta/types/parallelstore.py index ea929d73e265..7d28780d668e 100644 --- a/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1beta/types/parallelstore.py +++ b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1beta/types/parallelstore.py @@ -72,7 +72,8 @@ class FileStripeLevel(proto.Enum): Values: FILE_STRIPE_LEVEL_UNSPECIFIED (0): - Default file striping + If not set, FileStripeLevel will default to + FILE_STRIPE_LEVEL_BALANCED FILE_STRIPE_LEVEL_MIN (1): Minimum file striping FILE_STRIPE_LEVEL_BALANCED (2): @@ -91,7 +92,8 @@ class DirectoryStripeLevel(proto.Enum): Values: DIRECTORY_STRIPE_LEVEL_UNSPECIFIED (0): - Default directory striping + If not set, DirectoryStripeLevel will default to + DIRECTORY_STRIPE_LEVEL_MAX DIRECTORY_STRIPE_LEVEL_MIN (1): Minimum directory striping DIRECTORY_STRIPE_LEVEL_BALANCED (2): @@ -111,7 +113,7 @@ class Instance(proto.Message): Attributes: name (str): Identifier. The resource name of the instance, in the format - ``projects/{project}/locations/{location}/instances/{instance_id}`` + ``projects/{project}/locations/{location}/instances/{instance_id}``. description (str): Optional. The description of the instance. 2048 characters or less. @@ -124,76 +126,62 @@ class Instance(proto.Message): Output only. The time when the instance was updated. labels (MutableMapping[str, str]): - Optional. Cloud Labels are a flexible and lightweight - mechanism for organizing cloud resources into groups that - reflect a customer's organizational needs and deployment - strategies. Cloud Labels can be used to filter collections - of resources. They can be used to control how resource - metrics are aggregated. And they can be used as arguments to - policy management rules (e.g. route, firewall, load - balancing, etc.). - - - Label keys must be between 1 and 63 characters long and - must conform to the following regular expression: - ``[a-z][a-z0-9_-]{0,62}``. - - Label values must be between 0 and 63 characters long and - must conform to the regular expression - ``[a-z0-9_-]{0,63}``. - - No more than 64 labels can be associated with a given - resource. - - See https://goo.gl/xmQnxf for more information on and - examples of labels. - - If you plan to use labels in your own code, please note that - additional characters may be allowed in the future. - Therefore, you are advised to use an internal label - representation, such as JSON, which doesn't rely upon - specific characters being disallowed. For example, - representing labels as the string: name + "*" + value would - prove problematic if we were to allow "*" in a future - release. + Optional. Cloud Labels are a flexible and + lightweight mechanism for organizing cloud + resources into groups that reflect a customer's + organizational needs and deployment strategies. + See + https://cloud.google.com/resource-manager/docs/labels-overview + for details. capacity_gib (int): - Required. Immutable. Storage capacity of - Parallelstore instance in Gibibytes (GiB). + Required. Immutable. The instance's storage + capacity in Gibibytes (GiB). Allowed values are + between 12000 and 100000, in multiples of 4000; + e.g., 12000, 16000, 20000, ... daos_version (str): Output only. The version of DAOS software - running in the instance + running in the instance. access_points (MutableSequence[str]): - Output only. List of access_points. Contains a list of IPv4 - addresses used for client side configuration. + Output only. A list of IPv4 addresses used + for client side configuration. network (str): - Optional. Immutable. The name of the Google Compute Engine - `VPC network `__ to - which the instance is connected. + Optional. Immutable. The name of the Compute Engine `VPC + network `__ to which + the instance is connected. reserved_ip_range (str): - Optional. Immutable. Contains the id of the - allocated IP address range associated with the - private service access connection for example, - "test-default" associated with IP range - 10.0.0.0/29. If no range id is provided all - ranges will be considered. + Optional. Immutable. The ID of the IP address range being + used by the instance's VPC network. See `Configure a VPC + network `__. + If no ID is provided, all ranges are considered. effective_reserved_ip_range (str): - Output only. Immutable. Contains the id of - the allocated IP address range associated with - the private service access connection for - example, "test-default" associated with IP range - 10.0.0.0/29. This field is populated by the - service and and contains the value currently - used by the service. + Output only. Immutable. The ID of the IP + address range being used by the instance's VPC + network. This field is populated by the service + and contains the value currently used by the + service. file_stripe_level (google.cloud.parallelstore_v1beta.types.FileStripeLevel): - Optional. Stripe level for files. - MIN better suited for small size files. - MAX higher throughput performance for larger - files. + Optional. Stripe level for files. Allowed values are: + + - ``FILE_STRIPE_LEVEL_MIN``: offers the best performance + for small size files. + - ``FILE_STRIPE_LEVEL_BALANCED``: balances performance for + workloads involving a mix of small and large files. + - ``FILE_STRIPE_LEVEL_MAX``: higher throughput performance + for larger files. directory_stripe_level (google.cloud.parallelstore_v1beta.types.DirectoryStripeLevel): - Optional. Stripe level for directories. - MIN when directory has a small number of files. - MAX when directory has a large number of files. + Optional. Stripe level for directories. Allowed values are: + + - ``DIRECTORY_STRIPE_LEVEL_MIN``: recommended when + directories contain a small number of files. + - ``DIRECTORY_STRIPE_LEVEL_BALANCED``: balances performance + for workloads involving a mix of small and large + directories. + - ``DIRECTORY_STRIPE_LEVEL_MAX``: recommended for + directories with a large number of files. """ class State(proto.Enum): - r"""Represents the different states of a Parallelstore instance. + r"""The possible states of a Parallelstore instance. Values: STATE_UNSPECIFIED (0): @@ -206,12 +194,15 @@ class State(proto.Enum): The instance is being deleted. FAILED (4): The instance is not usable. + UPGRADING (5): + The instance is being upgraded. """ STATE_UNSPECIFIED = 0 CREATING = 1 ACTIVE = 2 DELETING = 3 FAILED = 4 + UPGRADING = 5 name: str = proto.Field( proto.STRING, @@ -278,28 +269,28 @@ class State(proto.Enum): class ListInstancesRequest(proto.Message): - r"""Message for requesting list of Instances + r"""List instances request. Attributes: parent (str): Required. The project and location for which to retrieve instance information, in the format - ``projects/{project_id}/locations/{location}``. For - Parallelstore locations map to Google Cloud zones, for - example **us-central1-a**. To retrieve instance information - for all locations, use "-" for the ``{location}`` value. + ``projects/{project_id}/locations/{location}``. + + To retrieve instance information for all locations, use "-" + as the value of ``{location}``. page_size (int): Optional. Requested page size. Server may return fewer items than requested. If - unspecified, server will pick an appropriate + unspecified, the server will pick an appropriate default. page_token (str): Optional. A token identifying a page of results the server should return. filter (str): - Optional. Filtering results + Optional. Filtering results. order_by (str): - Optional. Hint for how to order the results + Optional. Hint for how to order the results. """ parent: str = proto.Field( @@ -325,11 +316,12 @@ class ListInstancesRequest(proto.Message): class ListInstancesResponse(proto.Message): - r"""Message for response to listing Instances + r"""Response from + [ListInstances][google.cloud.parallelstore.v1beta.Parallelstore.ListInstances]. Attributes: instances (MutableSequence[google.cloud.parallelstore_v1beta.types.Instance]): - The list of Parallelstore Instances + The list of Parallelstore instances. next_page_token (str): A token identifying a page of results the server should return. @@ -357,7 +349,7 @@ def raw_page(self): class GetInstanceRequest(proto.Message): - r"""Request to get an instance's details. + r"""Get an instance's details. Attributes: name (str): @@ -372,17 +364,15 @@ class GetInstanceRequest(proto.Message): class CreateInstanceRequest(proto.Message): - r"""Request for - [CreateInstance][google.cloud.parallelstore.v1beta.Parallelstore.CreateInstance] + r"""Create a new Parallelstore instance. Attributes: parent (str): Required. The instance's project and location, in the format ``projects/{project}/locations/{location}``. Locations map - to Google Cloud zones, for example **us-west1-b**. + to Google Cloud zones; for example, ``us-west1-b``. instance_id (str): - Required. The logical name of the Parallelstore instance in - the user project with the following restrictions: + Required. The name of the Parallelstore instance. - Must contain only lowercase letters, numbers, and hyphens. @@ -434,17 +424,17 @@ class CreateInstanceRequest(proto.Message): class UpdateInstanceRequest(proto.Message): - r"""Message for updating a Instance + r"""Update an instance. Attributes: update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update .Field mask is used to + Required. Mask of fields to update. Field mask is used to specify the fields to be overwritten in the Instance resource by the update. At least one path must be supplied in this field. The fields specified in the update_mask are relative to the resource, not the full request. instance (google.cloud.parallelstore_v1beta.types.Instance): - Required. The instance to update + Required. The instance to update. request_id (str): Optional. An optional request ID to identify requests. Specify a unique request ID so that if @@ -484,7 +474,7 @@ class UpdateInstanceRequest(proto.Message): class DeleteInstanceRequest(proto.Message): - r"""Message for deleting a Instance + r"""Delete an instance. Attributes: name (str): @@ -522,7 +512,7 @@ class DeleteInstanceRequest(proto.Message): class OperationMetadata(proto.Message): - r"""Represents the metadata of the long-running operation. + r"""Long-running operation metadata. Attributes: create_time (google.protobuf.timestamp_pb2.Timestamp): @@ -584,12 +574,13 @@ class OperationMetadata(proto.Message): class SourceGcsBucket(proto.Message): - r"""Google Cloud Storage as a source. + r"""Cloud Storage as the source of a data transfer. Attributes: uri (str): - Required. URI to a Cloud Storage object in format: - 'gs:///'. + Required. URI to a Cloud Storage bucket in the format: + ``gs:///``. The path inside + the bucket is optional. """ uri: str = proto.Field( @@ -599,12 +590,13 @@ class SourceGcsBucket(proto.Message): class DestinationGcsBucket(proto.Message): - r"""Google Cloud Storage as a destination. + r"""Cloud Storage as the destination of a data transfer. Attributes: uri (str): - Required. URI to a Cloud Storage object in format: - 'gs:///'. + Required. URI to a Cloud Storage bucket in the format: + ``gs:///``. The path inside + the bucket is optional. """ uri: str = proto.Field( @@ -614,13 +606,12 @@ class DestinationGcsBucket(proto.Message): class SourceParallelstore(proto.Message): - r"""Pa as a source. + r"""Parallelstore as the source of a data transfer. Attributes: path (str): - Optional. Root directory path to the - Paralellstore filesystem, starting with '/'. - Defaults to '/' if unset. + Optional. Root directory path to the Paralellstore + filesystem, starting with ``/``. Defaults to ``/`` if unset. """ path: str = proto.Field( @@ -630,13 +621,12 @@ class SourceParallelstore(proto.Message): class DestinationParallelstore(proto.Message): - r"""Parallelstore as a destination. + r"""Parallelstore as the destination of a data transfer. Attributes: path (str): - Optional. Root directory path to the - Paralellstore filesystem, starting with '/'. - Defaults to '/' if unset. + Optional. Root directory path to the Paralellstore + filesystem, starting with ``/``. Defaults to ``/`` if unset. """ path: str = proto.Field( @@ -646,15 +636,14 @@ class DestinationParallelstore(proto.Message): class ImportDataRequest(proto.Message): - r"""Message representing the request importing data from - parallelstore to Cloud Storage. - + r"""Import data from Cloud Storage into a Parallelstore instance. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: source_gcs_bucket (google.cloud.parallelstore_v1beta.types.SourceGcsBucket): - Cloud Storage source. + The Cloud Storage source bucket and, + optionally, path inside the bucket. This field is a member of `oneof`_ ``source``. destination_parallelstore (google.cloud.parallelstore_v1beta.types.DestinationParallelstore): @@ -684,11 +673,17 @@ class ImportDataRequest(proto.Message): exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). service_account (str): - Optional. User-specified Service Account (SA) credentials to - be used when performing the transfer. Format: - ``projects/{project_id}/serviceAccounts/{service_account}`` + Optional. User-specified service account credentials to be + used when performing the transfer. + + Use one of the following formats: + + - ``{EMAIL_ADDRESS_OR_UNIQUE_ID}`` + - ``projects/{PROJECT_ID_OR_NUMBER}/serviceAccounts/{EMAIL_ADDRESS_OR_UNIQUE_ID}`` + - ``projects/-/serviceAccounts/{EMAIL_ADDRESS_OR_UNIQUE_ID}`` + If unspecified, the Parallelstore service agent is used: - service-@gcp-sa-parallelstore.iam.gserviceaccount.com) + ``service-@gcp-sa-parallelstore.iam.gserviceaccount.com`` """ source_gcs_bucket: "SourceGcsBucket" = proto.Field( @@ -718,9 +713,7 @@ class ImportDataRequest(proto.Message): class ExportDataRequest(proto.Message): - r"""Message representing the request exporting data from Cloud - Storage to parallelstore. - + r"""Export data from Parallelstore to Cloud Storage. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -757,10 +750,15 @@ class ExportDataRequest(proto.Message): (00000000-0000-0000-0000-000000000000). service_account (str): Optional. User-specified Service Account (SA) credentials to - be used when performing the transfer. Format: - ``projects/{project_id}/serviceAccounts/{service_account}`` + be used when performing the transfer. Use one of the + following formats: + + - ``{EMAIL_ADDRESS_OR_UNIQUE_ID}`` + - ``projects/{PROJECT_ID_OR_NUMBER}/serviceAccounts/{EMAIL_ADDRESS_OR_UNIQUE_ID}`` + - ``projects/-/serviceAccounts/{EMAIL_ADDRESS_OR_UNIQUE_ID}`` + If unspecified, the Parallelstore service agent is used: - service-@gcp-sa-parallelstore.iam.gserviceaccount.com) + ``service-@gcp-sa-parallelstore.iam.gserviceaccount.com`` """ source_parallelstore: "SourceParallelstore" = proto.Field( @@ -790,19 +788,15 @@ class ExportDataRequest(proto.Message): class ImportDataResponse(proto.Message): - r"""ImportDataResponse is the response returned from ImportData - rpc. - - """ + r"""The response to a request to import data to Parallelstore.""" class ImportDataMetadata(proto.Message): - r"""ImportDataMetadata contains import data operation metadata + r"""Metadata related to the data import operation. Attributes: operation_metadata (google.cloud.parallelstore_v1beta.types.TransferOperationMetadata): - Contains the data transfer operation - metadata. + Data transfer operation metadata. create_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The time the operation was created. @@ -867,19 +861,15 @@ class ImportDataMetadata(proto.Message): class ExportDataResponse(proto.Message): - r"""ExportDataResponse is the response returned from ExportData - rpc - - """ + r"""The response to a request to export data from Parallelstore.""" class ExportDataMetadata(proto.Message): - r"""ExportDataMetadata contains export data operation metadata + r"""Metadata related to the data export operation. Attributes: operation_metadata (google.cloud.parallelstore_v1beta.types.TransferOperationMetadata): - Contains the data transfer operation - metadata. + Data transfer operation metadata. create_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The time the operation was created. @@ -944,7 +934,7 @@ class ExportDataMetadata(proto.Message): class TransferOperationMetadata(proto.Message): - r"""Represents the metadata of the long-running operation. + r"""Long-running operation metadata related to a data transfer. This message has `oneof`_ fields (mutually exclusive fields). For each oneof, at most one member field can be set at the same time. @@ -971,8 +961,8 @@ class TransferOperationMetadata(proto.Message): This field is a member of `oneof`_ ``destination``. counters (google.cloud.parallelstore_v1beta.types.TransferCounters): - Output only. Information about the progress - of the transfer operation. + Output only. The progress of the transfer + operation. transfer_type (google.cloud.parallelstore_v1beta.types.TransferType): Output only. The type of transfer occurring. """ diff --git a/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_create_instance_async.py b/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_create_instance_async.py new file mode 100644 index 000000000000..c217c4b6b3f0 --- /dev/null +++ b/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_create_instance_async.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-parallelstore + + +# [START parallelstore_v1_generated_Parallelstore_CreateInstance_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import parallelstore_v1 + + +async def sample_create_instance(): + # Create a client + client = parallelstore_v1.ParallelstoreAsyncClient() + + # Initialize request argument(s) + instance = parallelstore_v1.Instance() + instance.capacity_gib = 1247 + + request = parallelstore_v1.CreateInstanceRequest( + parent="parent_value", + instance_id="instance_id_value", + instance=instance, + ) + + # Make the request + operation = client.create_instance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END parallelstore_v1_generated_Parallelstore_CreateInstance_async] diff --git a/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_create_instance_sync.py b/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_create_instance_sync.py new file mode 100644 index 000000000000..5eb6bc11cbbe --- /dev/null +++ b/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_create_instance_sync.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-parallelstore + + +# [START parallelstore_v1_generated_Parallelstore_CreateInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import parallelstore_v1 + + +def sample_create_instance(): + # Create a client + client = parallelstore_v1.ParallelstoreClient() + + # Initialize request argument(s) + instance = parallelstore_v1.Instance() + instance.capacity_gib = 1247 + + request = parallelstore_v1.CreateInstanceRequest( + parent="parent_value", + instance_id="instance_id_value", + instance=instance, + ) + + # Make the request + operation = client.create_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END parallelstore_v1_generated_Parallelstore_CreateInstance_sync] diff --git a/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_delete_instance_async.py b/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_delete_instance_async.py new file mode 100644 index 000000000000..2adef8a7466c --- /dev/null +++ b/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_delete_instance_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-parallelstore + + +# [START parallelstore_v1_generated_Parallelstore_DeleteInstance_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import parallelstore_v1 + + +async def sample_delete_instance(): + # Create a client + client = parallelstore_v1.ParallelstoreAsyncClient() + + # Initialize request argument(s) + request = parallelstore_v1.DeleteInstanceRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_instance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END parallelstore_v1_generated_Parallelstore_DeleteInstance_async] diff --git a/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_delete_instance_sync.py b/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_delete_instance_sync.py new file mode 100644 index 000000000000..50f83b7f75f5 --- /dev/null +++ b/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_delete_instance_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-parallelstore + + +# [START parallelstore_v1_generated_Parallelstore_DeleteInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import parallelstore_v1 + + +def sample_delete_instance(): + # Create a client + client = parallelstore_v1.ParallelstoreClient() + + # Initialize request argument(s) + request = parallelstore_v1.DeleteInstanceRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END parallelstore_v1_generated_Parallelstore_DeleteInstance_sync] diff --git a/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_export_data_async.py b/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_export_data_async.py new file mode 100644 index 000000000000..6667f548aeaa --- /dev/null +++ b/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_export_data_async.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExportData +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-parallelstore + + +# [START parallelstore_v1_generated_Parallelstore_ExportData_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import parallelstore_v1 + + +async def sample_export_data(): + # Create a client + client = parallelstore_v1.ParallelstoreAsyncClient() + + # Initialize request argument(s) + destination_gcs_bucket = parallelstore_v1.DestinationGcsBucket() + destination_gcs_bucket.uri = "uri_value" + + request = parallelstore_v1.ExportDataRequest( + destination_gcs_bucket=destination_gcs_bucket, + name="name_value", + ) + + # Make the request + operation = client.export_data(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END parallelstore_v1_generated_Parallelstore_ExportData_async] diff --git a/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_export_data_sync.py b/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_export_data_sync.py new file mode 100644 index 000000000000..aa13d529c028 --- /dev/null +++ b/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_export_data_sync.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExportData +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-parallelstore + + +# [START parallelstore_v1_generated_Parallelstore_ExportData_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import parallelstore_v1 + + +def sample_export_data(): + # Create a client + client = parallelstore_v1.ParallelstoreClient() + + # Initialize request argument(s) + destination_gcs_bucket = parallelstore_v1.DestinationGcsBucket() + destination_gcs_bucket.uri = "uri_value" + + request = parallelstore_v1.ExportDataRequest( + destination_gcs_bucket=destination_gcs_bucket, + name="name_value", + ) + + # Make the request + operation = client.export_data(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END parallelstore_v1_generated_Parallelstore_ExportData_sync] diff --git a/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_get_instance_async.py b/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_get_instance_async.py new file mode 100644 index 000000000000..1062bd309c96 --- /dev/null +++ b/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_get_instance_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-parallelstore + + +# [START parallelstore_v1_generated_Parallelstore_GetInstance_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import parallelstore_v1 + + +async def sample_get_instance(): + # Create a client + client = parallelstore_v1.ParallelstoreAsyncClient() + + # Initialize request argument(s) + request = parallelstore_v1.GetInstanceRequest( + name="name_value", + ) + + # Make the request + response = await client.get_instance(request=request) + + # Handle the response + print(response) + +# [END parallelstore_v1_generated_Parallelstore_GetInstance_async] diff --git a/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_get_instance_sync.py b/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_get_instance_sync.py new file mode 100644 index 000000000000..eddbfa698810 --- /dev/null +++ b/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_get_instance_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-parallelstore + + +# [START parallelstore_v1_generated_Parallelstore_GetInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import parallelstore_v1 + + +def sample_get_instance(): + # Create a client + client = parallelstore_v1.ParallelstoreClient() + + # Initialize request argument(s) + request = parallelstore_v1.GetInstanceRequest( + name="name_value", + ) + + # Make the request + response = client.get_instance(request=request) + + # Handle the response + print(response) + +# [END parallelstore_v1_generated_Parallelstore_GetInstance_sync] diff --git a/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_import_data_async.py b/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_import_data_async.py new file mode 100644 index 000000000000..7d8d7a3a062f --- /dev/null +++ b/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_import_data_async.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ImportData +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-parallelstore + + +# [START parallelstore_v1_generated_Parallelstore_ImportData_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import parallelstore_v1 + + +async def sample_import_data(): + # Create a client + client = parallelstore_v1.ParallelstoreAsyncClient() + + # Initialize request argument(s) + source_gcs_bucket = parallelstore_v1.SourceGcsBucket() + source_gcs_bucket.uri = "uri_value" + + request = parallelstore_v1.ImportDataRequest( + source_gcs_bucket=source_gcs_bucket, + name="name_value", + ) + + # Make the request + operation = client.import_data(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END parallelstore_v1_generated_Parallelstore_ImportData_async] diff --git a/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_import_data_sync.py b/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_import_data_sync.py new file mode 100644 index 000000000000..8e1753b883a9 --- /dev/null +++ b/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_import_data_sync.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ImportData +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-parallelstore + + +# [START parallelstore_v1_generated_Parallelstore_ImportData_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import parallelstore_v1 + + +def sample_import_data(): + # Create a client + client = parallelstore_v1.ParallelstoreClient() + + # Initialize request argument(s) + source_gcs_bucket = parallelstore_v1.SourceGcsBucket() + source_gcs_bucket.uri = "uri_value" + + request = parallelstore_v1.ImportDataRequest( + source_gcs_bucket=source_gcs_bucket, + name="name_value", + ) + + # Make the request + operation = client.import_data(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END parallelstore_v1_generated_Parallelstore_ImportData_sync] diff --git a/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_list_instances_async.py b/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_list_instances_async.py new file mode 100644 index 000000000000..8eca19544f79 --- /dev/null +++ b/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_list_instances_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListInstances +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-parallelstore + + +# [START parallelstore_v1_generated_Parallelstore_ListInstances_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import parallelstore_v1 + + +async def sample_list_instances(): + # Create a client + client = parallelstore_v1.ParallelstoreAsyncClient() + + # Initialize request argument(s) + request = parallelstore_v1.ListInstancesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instances(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END parallelstore_v1_generated_Parallelstore_ListInstances_async] diff --git a/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_list_instances_sync.py b/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_list_instances_sync.py new file mode 100644 index 000000000000..38ed25cccec7 --- /dev/null +++ b/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_list_instances_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListInstances +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-parallelstore + + +# [START parallelstore_v1_generated_Parallelstore_ListInstances_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import parallelstore_v1 + + +def sample_list_instances(): + # Create a client + client = parallelstore_v1.ParallelstoreClient() + + # Initialize request argument(s) + request = parallelstore_v1.ListInstancesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instances(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END parallelstore_v1_generated_Parallelstore_ListInstances_sync] diff --git a/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_update_instance_async.py b/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_update_instance_async.py new file mode 100644 index 000000000000..150644e36304 --- /dev/null +++ b/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_update_instance_async.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-parallelstore + + +# [START parallelstore_v1_generated_Parallelstore_UpdateInstance_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import parallelstore_v1 + + +async def sample_update_instance(): + # Create a client + client = parallelstore_v1.ParallelstoreAsyncClient() + + # Initialize request argument(s) + instance = parallelstore_v1.Instance() + instance.capacity_gib = 1247 + + request = parallelstore_v1.UpdateInstanceRequest( + instance=instance, + ) + + # Make the request + operation = client.update_instance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END parallelstore_v1_generated_Parallelstore_UpdateInstance_async] diff --git a/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_update_instance_sync.py b/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_update_instance_sync.py new file mode 100644 index 000000000000..c8283d173411 --- /dev/null +++ b/packages/google-cloud-parallelstore/samples/generated_samples/parallelstore_v1_generated_parallelstore_update_instance_sync.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-parallelstore + + +# [START parallelstore_v1_generated_Parallelstore_UpdateInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import parallelstore_v1 + + +def sample_update_instance(): + # Create a client + client = parallelstore_v1.ParallelstoreClient() + + # Initialize request argument(s) + instance = parallelstore_v1.Instance() + instance.capacity_gib = 1247 + + request = parallelstore_v1.UpdateInstanceRequest( + instance=instance, + ) + + # Make the request + operation = client.update_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END parallelstore_v1_generated_Parallelstore_UpdateInstance_sync] diff --git a/packages/google-cloud-parallelstore/samples/generated_samples/snippet_metadata_google.cloud.parallelstore.v1.json b/packages/google-cloud-parallelstore/samples/generated_samples/snippet_metadata_google.cloud.parallelstore.v1.json new file mode 100644 index 000000000000..aec0635b88a7 --- /dev/null +++ b/packages/google-cloud-parallelstore/samples/generated_samples/snippet_metadata_google.cloud.parallelstore.v1.json @@ -0,0 +1,1150 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.cloud.parallelstore.v1", + "version": "v1" + } + ], + "language": "PYTHON", + "name": "google-cloud-parallelstore", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.parallelstore_v1.ParallelstoreAsyncClient", + "shortName": "ParallelstoreAsyncClient" + }, + "fullName": "google.cloud.parallelstore_v1.ParallelstoreAsyncClient.create_instance", + "method": { + "fullName": "google.cloud.parallelstore.v1.Parallelstore.CreateInstance", + "service": { + "fullName": "google.cloud.parallelstore.v1.Parallelstore", + "shortName": "Parallelstore" + }, + "shortName": "CreateInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.parallelstore_v1.types.CreateInstanceRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "instance", + "type": "google.cloud.parallelstore_v1.types.Instance" + }, + { + "name": "instance_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_instance" + }, + "description": "Sample for CreateInstance", + "file": "parallelstore_v1_generated_parallelstore_create_instance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "parallelstore_v1_generated_Parallelstore_CreateInstance_async", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "parallelstore_v1_generated_parallelstore_create_instance_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.parallelstore_v1.ParallelstoreClient", + "shortName": "ParallelstoreClient" + }, + "fullName": "google.cloud.parallelstore_v1.ParallelstoreClient.create_instance", + "method": { + "fullName": "google.cloud.parallelstore.v1.Parallelstore.CreateInstance", + "service": { + "fullName": "google.cloud.parallelstore.v1.Parallelstore", + "shortName": "Parallelstore" + }, + "shortName": "CreateInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.parallelstore_v1.types.CreateInstanceRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "instance", + "type": "google.cloud.parallelstore_v1.types.Instance" + }, + { + "name": "instance_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_instance" + }, + "description": "Sample for CreateInstance", + "file": "parallelstore_v1_generated_parallelstore_create_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "parallelstore_v1_generated_Parallelstore_CreateInstance_sync", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "parallelstore_v1_generated_parallelstore_create_instance_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.parallelstore_v1.ParallelstoreAsyncClient", + "shortName": "ParallelstoreAsyncClient" + }, + "fullName": "google.cloud.parallelstore_v1.ParallelstoreAsyncClient.delete_instance", + "method": { + "fullName": "google.cloud.parallelstore.v1.Parallelstore.DeleteInstance", + "service": { + "fullName": "google.cloud.parallelstore.v1.Parallelstore", + "shortName": "Parallelstore" + }, + "shortName": "DeleteInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.parallelstore_v1.types.DeleteInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_instance" + }, + "description": "Sample for DeleteInstance", + "file": "parallelstore_v1_generated_parallelstore_delete_instance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "parallelstore_v1_generated_Parallelstore_DeleteInstance_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "parallelstore_v1_generated_parallelstore_delete_instance_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.parallelstore_v1.ParallelstoreClient", + "shortName": "ParallelstoreClient" + }, + "fullName": "google.cloud.parallelstore_v1.ParallelstoreClient.delete_instance", + "method": { + "fullName": "google.cloud.parallelstore.v1.Parallelstore.DeleteInstance", + "service": { + "fullName": "google.cloud.parallelstore.v1.Parallelstore", + "shortName": "Parallelstore" + }, + "shortName": "DeleteInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.parallelstore_v1.types.DeleteInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_instance" + }, + "description": "Sample for DeleteInstance", + "file": "parallelstore_v1_generated_parallelstore_delete_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "parallelstore_v1_generated_Parallelstore_DeleteInstance_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "parallelstore_v1_generated_parallelstore_delete_instance_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.parallelstore_v1.ParallelstoreAsyncClient", + "shortName": "ParallelstoreAsyncClient" + }, + "fullName": "google.cloud.parallelstore_v1.ParallelstoreAsyncClient.export_data", + "method": { + "fullName": "google.cloud.parallelstore.v1.Parallelstore.ExportData", + "service": { + "fullName": "google.cloud.parallelstore.v1.Parallelstore", + "shortName": "Parallelstore" + }, + "shortName": "ExportData" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.parallelstore_v1.types.ExportDataRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "export_data" + }, + "description": "Sample for ExportData", + "file": "parallelstore_v1_generated_parallelstore_export_data_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "parallelstore_v1_generated_Parallelstore_ExportData_async", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "parallelstore_v1_generated_parallelstore_export_data_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.parallelstore_v1.ParallelstoreClient", + "shortName": "ParallelstoreClient" + }, + "fullName": "google.cloud.parallelstore_v1.ParallelstoreClient.export_data", + "method": { + "fullName": "google.cloud.parallelstore.v1.Parallelstore.ExportData", + "service": { + "fullName": "google.cloud.parallelstore.v1.Parallelstore", + "shortName": "Parallelstore" + }, + "shortName": "ExportData" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.parallelstore_v1.types.ExportDataRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "export_data" + }, + "description": "Sample for ExportData", + "file": "parallelstore_v1_generated_parallelstore_export_data_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "parallelstore_v1_generated_Parallelstore_ExportData_sync", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "parallelstore_v1_generated_parallelstore_export_data_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.parallelstore_v1.ParallelstoreAsyncClient", + "shortName": "ParallelstoreAsyncClient" + }, + "fullName": "google.cloud.parallelstore_v1.ParallelstoreAsyncClient.get_instance", + "method": { + "fullName": "google.cloud.parallelstore.v1.Parallelstore.GetInstance", + "service": { + "fullName": "google.cloud.parallelstore.v1.Parallelstore", + "shortName": "Parallelstore" + }, + "shortName": "GetInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.parallelstore_v1.types.GetInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.parallelstore_v1.types.Instance", + "shortName": "get_instance" + }, + "description": "Sample for GetInstance", + "file": "parallelstore_v1_generated_parallelstore_get_instance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "parallelstore_v1_generated_Parallelstore_GetInstance_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "parallelstore_v1_generated_parallelstore_get_instance_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.parallelstore_v1.ParallelstoreClient", + "shortName": "ParallelstoreClient" + }, + "fullName": "google.cloud.parallelstore_v1.ParallelstoreClient.get_instance", + "method": { + "fullName": "google.cloud.parallelstore.v1.Parallelstore.GetInstance", + "service": { + "fullName": "google.cloud.parallelstore.v1.Parallelstore", + "shortName": "Parallelstore" + }, + "shortName": "GetInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.parallelstore_v1.types.GetInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.parallelstore_v1.types.Instance", + "shortName": "get_instance" + }, + "description": "Sample for GetInstance", + "file": "parallelstore_v1_generated_parallelstore_get_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "parallelstore_v1_generated_Parallelstore_GetInstance_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "parallelstore_v1_generated_parallelstore_get_instance_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.parallelstore_v1.ParallelstoreAsyncClient", + "shortName": "ParallelstoreAsyncClient" + }, + "fullName": "google.cloud.parallelstore_v1.ParallelstoreAsyncClient.import_data", + "method": { + "fullName": "google.cloud.parallelstore.v1.Parallelstore.ImportData", + "service": { + "fullName": "google.cloud.parallelstore.v1.Parallelstore", + "shortName": "Parallelstore" + }, + "shortName": "ImportData" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.parallelstore_v1.types.ImportDataRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "import_data" + }, + "description": "Sample for ImportData", + "file": "parallelstore_v1_generated_parallelstore_import_data_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "parallelstore_v1_generated_Parallelstore_ImportData_async", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "parallelstore_v1_generated_parallelstore_import_data_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.parallelstore_v1.ParallelstoreClient", + "shortName": "ParallelstoreClient" + }, + "fullName": "google.cloud.parallelstore_v1.ParallelstoreClient.import_data", + "method": { + "fullName": "google.cloud.parallelstore.v1.Parallelstore.ImportData", + "service": { + "fullName": "google.cloud.parallelstore.v1.Parallelstore", + "shortName": "Parallelstore" + }, + "shortName": "ImportData" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.parallelstore_v1.types.ImportDataRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "import_data" + }, + "description": "Sample for ImportData", + "file": "parallelstore_v1_generated_parallelstore_import_data_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "parallelstore_v1_generated_Parallelstore_ImportData_sync", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "parallelstore_v1_generated_parallelstore_import_data_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.parallelstore_v1.ParallelstoreAsyncClient", + "shortName": "ParallelstoreAsyncClient" + }, + "fullName": "google.cloud.parallelstore_v1.ParallelstoreAsyncClient.list_instances", + "method": { + "fullName": "google.cloud.parallelstore.v1.Parallelstore.ListInstances", + "service": { + "fullName": "google.cloud.parallelstore.v1.Parallelstore", + "shortName": "Parallelstore" + }, + "shortName": "ListInstances" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.parallelstore_v1.types.ListInstancesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.parallelstore_v1.services.parallelstore.pagers.ListInstancesAsyncPager", + "shortName": "list_instances" + }, + "description": "Sample for ListInstances", + "file": "parallelstore_v1_generated_parallelstore_list_instances_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "parallelstore_v1_generated_Parallelstore_ListInstances_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "parallelstore_v1_generated_parallelstore_list_instances_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.parallelstore_v1.ParallelstoreClient", + "shortName": "ParallelstoreClient" + }, + "fullName": "google.cloud.parallelstore_v1.ParallelstoreClient.list_instances", + "method": { + "fullName": "google.cloud.parallelstore.v1.Parallelstore.ListInstances", + "service": { + "fullName": "google.cloud.parallelstore.v1.Parallelstore", + "shortName": "Parallelstore" + }, + "shortName": "ListInstances" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.parallelstore_v1.types.ListInstancesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.parallelstore_v1.services.parallelstore.pagers.ListInstancesPager", + "shortName": "list_instances" + }, + "description": "Sample for ListInstances", + "file": "parallelstore_v1_generated_parallelstore_list_instances_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "parallelstore_v1_generated_Parallelstore_ListInstances_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "parallelstore_v1_generated_parallelstore_list_instances_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.parallelstore_v1.ParallelstoreAsyncClient", + "shortName": "ParallelstoreAsyncClient" + }, + "fullName": "google.cloud.parallelstore_v1.ParallelstoreAsyncClient.update_instance", + "method": { + "fullName": "google.cloud.parallelstore.v1.Parallelstore.UpdateInstance", + "service": { + "fullName": "google.cloud.parallelstore.v1.Parallelstore", + "shortName": "Parallelstore" + }, + "shortName": "UpdateInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.parallelstore_v1.types.UpdateInstanceRequest" + }, + { + "name": "instance", + "type": "google.cloud.parallelstore_v1.types.Instance" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_instance" + }, + "description": "Sample for UpdateInstance", + "file": "parallelstore_v1_generated_parallelstore_update_instance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "parallelstore_v1_generated_Parallelstore_UpdateInstance_async", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "parallelstore_v1_generated_parallelstore_update_instance_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.parallelstore_v1.ParallelstoreClient", + "shortName": "ParallelstoreClient" + }, + "fullName": "google.cloud.parallelstore_v1.ParallelstoreClient.update_instance", + "method": { + "fullName": "google.cloud.parallelstore.v1.Parallelstore.UpdateInstance", + "service": { + "fullName": "google.cloud.parallelstore.v1.Parallelstore", + "shortName": "Parallelstore" + }, + "shortName": "UpdateInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.parallelstore_v1.types.UpdateInstanceRequest" + }, + { + "name": "instance", + "type": "google.cloud.parallelstore_v1.types.Instance" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_instance" + }, + "description": "Sample for UpdateInstance", + "file": "parallelstore_v1_generated_parallelstore_update_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "parallelstore_v1_generated_Parallelstore_UpdateInstance_sync", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "parallelstore_v1_generated_parallelstore_update_instance_sync.py" + } + ] +} diff --git a/packages/google-cloud-parallelstore/scripts/fixup_parallelstore_v1_keywords.py b/packages/google-cloud-parallelstore/scripts/fixup_parallelstore_v1_keywords.py new file mode 100644 index 000000000000..7808e4ae2c7d --- /dev/null +++ b/packages/google-cloud-parallelstore/scripts/fixup_parallelstore_v1_keywords.py @@ -0,0 +1,182 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class parallelstoreCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_instance': ('parent', 'instance_id', 'instance', 'request_id', ), + 'delete_instance': ('name', 'request_id', ), + 'export_data': ('name', 'source_parallelstore', 'destination_gcs_bucket', 'request_id', 'service_account', ), + 'get_instance': ('name', ), + 'import_data': ('name', 'source_gcs_bucket', 'destination_parallelstore', 'request_id', 'service_account', ), + 'list_instances': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'update_instance': ('update_mask', 'instance', 'request_id', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=parallelstoreCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the parallelstore client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-parallelstore/tests/unit/gapic/parallelstore_v1/__init__.py b/packages/google-cloud-parallelstore/tests/unit/gapic/parallelstore_v1/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-cloud-parallelstore/tests/unit/gapic/parallelstore_v1/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-parallelstore/tests/unit/gapic/parallelstore_v1/test_parallelstore.py b/packages/google-cloud-parallelstore/tests/unit/gapic/parallelstore_v1/test_parallelstore.py new file mode 100644 index 000000000000..8f1b10a1e2e9 --- /dev/null +++ b/packages/google-cloud-parallelstore/tests/unit/gapic/parallelstore_v1/test_parallelstore.py @@ -0,0 +1,8278 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import operation_async # type: ignore +from google.api_core import retry as retries +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.parallelstore_v1.services.parallelstore import ( + ParallelstoreAsyncClient, + ParallelstoreClient, + pagers, + transports, +) +from google.cloud.parallelstore_v1.types import parallelstore + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert ParallelstoreClient._get_default_mtls_endpoint(None) is None + assert ( + ParallelstoreClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + ParallelstoreClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + ParallelstoreClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + ParallelstoreClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + ParallelstoreClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + ) + + +def test__read_environment_variables(): + assert ParallelstoreClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert ParallelstoreClient._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert ParallelstoreClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + ParallelstoreClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert ParallelstoreClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert ParallelstoreClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert ParallelstoreClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + ParallelstoreClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert ParallelstoreClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert ParallelstoreClient._get_client_cert_source(None, False) is None + assert ( + ParallelstoreClient._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + ParallelstoreClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + ParallelstoreClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + ParallelstoreClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + ParallelstoreClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ParallelstoreClient), +) +@mock.patch.object( + ParallelstoreAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ParallelstoreAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = ParallelstoreClient._DEFAULT_UNIVERSE + default_endpoint = ParallelstoreClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = ParallelstoreClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + ParallelstoreClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + ParallelstoreClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == ParallelstoreClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ParallelstoreClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + ParallelstoreClient._get_api_endpoint(None, None, default_universe, "always") + == ParallelstoreClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ParallelstoreClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == ParallelstoreClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ParallelstoreClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + ParallelstoreClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + ParallelstoreClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + ParallelstoreClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + ParallelstoreClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + ParallelstoreClient._get_universe_domain(None, None) + == ParallelstoreClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + ParallelstoreClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ParallelstoreClient, transports.ParallelstoreGrpcTransport, "grpc"), + (ParallelstoreClient, transports.ParallelstoreRestTransport, "rest"), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (ParallelstoreClient, "grpc"), + (ParallelstoreAsyncClient, "grpc_asyncio"), + (ParallelstoreClient, "rest"), + ], +) +def test_parallelstore_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "parallelstore.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://parallelstore.googleapis.com/" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.ParallelstoreGrpcTransport, "grpc"), + (transports.ParallelstoreGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.ParallelstoreRestTransport, "rest"), + ], +) +def test_parallelstore_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (ParallelstoreClient, "grpc"), + (ParallelstoreAsyncClient, "grpc_asyncio"), + (ParallelstoreClient, "rest"), + ], +) +def test_parallelstore_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "parallelstore.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://parallelstore.googleapis.com/" + ) + + +def test_parallelstore_client_get_transport_class(): + transport = ParallelstoreClient.get_transport_class() + available_transports = [ + transports.ParallelstoreGrpcTransport, + transports.ParallelstoreRestTransport, + ] + assert transport in available_transports + + transport = ParallelstoreClient.get_transport_class("grpc") + assert transport == transports.ParallelstoreGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ParallelstoreClient, transports.ParallelstoreGrpcTransport, "grpc"), + ( + ParallelstoreAsyncClient, + transports.ParallelstoreGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (ParallelstoreClient, transports.ParallelstoreRestTransport, "rest"), + ], +) +@mock.patch.object( + ParallelstoreClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ParallelstoreClient), +) +@mock.patch.object( + ParallelstoreAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ParallelstoreAsyncClient), +) +def test_parallelstore_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(ParallelstoreClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(ParallelstoreClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com/" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com/", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (ParallelstoreClient, transports.ParallelstoreGrpcTransport, "grpc", "true"), + ( + ParallelstoreAsyncClient, + transports.ParallelstoreGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (ParallelstoreClient, transports.ParallelstoreGrpcTransport, "grpc", "false"), + ( + ParallelstoreAsyncClient, + transports.ParallelstoreGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (ParallelstoreClient, transports.ParallelstoreRestTransport, "rest", "true"), + (ParallelstoreClient, transports.ParallelstoreRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + ParallelstoreClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ParallelstoreClient), +) +@mock.patch.object( + ParallelstoreAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ParallelstoreAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_parallelstore_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [ParallelstoreClient, ParallelstoreAsyncClient] +) +@mock.patch.object( + ParallelstoreClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ParallelstoreClient), +) +@mock.patch.object( + ParallelstoreAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ParallelstoreAsyncClient), +) +def test_parallelstore_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", [ParallelstoreClient, ParallelstoreAsyncClient] +) +@mock.patch.object( + ParallelstoreClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ParallelstoreClient), +) +@mock.patch.object( + ParallelstoreAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ParallelstoreAsyncClient), +) +def test_parallelstore_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = ParallelstoreClient._DEFAULT_UNIVERSE + default_endpoint = ParallelstoreClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = ParallelstoreClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ParallelstoreClient, transports.ParallelstoreGrpcTransport, "grpc"), + ( + ParallelstoreAsyncClient, + transports.ParallelstoreGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (ParallelstoreClient, transports.ParallelstoreRestTransport, "rest"), + ], +) +def test_parallelstore_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + ParallelstoreClient, + transports.ParallelstoreGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + ParallelstoreAsyncClient, + transports.ParallelstoreGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (ParallelstoreClient, transports.ParallelstoreRestTransport, "rest", None), + ], +) +def test_parallelstore_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_parallelstore_client_client_options_from_dict(): + with mock.patch( + "google.cloud.parallelstore_v1.services.parallelstore.transports.ParallelstoreGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = ParallelstoreClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + ParallelstoreClient, + transports.ParallelstoreGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + ParallelstoreAsyncClient, + transports.ParallelstoreGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_parallelstore_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "parallelstore.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="parallelstore.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + parallelstore.ListInstancesRequest, + dict, + ], +) +def test_list_instances(request_type, transport: str = "grpc"): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = parallelstore.ListInstancesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = parallelstore.ListInstancesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstancesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_instances_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_instances() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == parallelstore.ListInstancesRequest() + + +def test_list_instances_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = parallelstore.ListInstancesRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_instances(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == parallelstore.ListInstancesRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + +def test_list_instances_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_instances in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_instances] = mock_rpc + request = {} + client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_instances(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_instances_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + parallelstore.ListInstancesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_instances() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == parallelstore.ListInstancesRequest() + + +@pytest.mark.asyncio +async def test_list_instances_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_instances + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_instances + ] = mock_rpc + + request = {} + await client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_instances(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_instances_async( + transport: str = "grpc_asyncio", request_type=parallelstore.ListInstancesRequest +): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + parallelstore.ListInstancesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = parallelstore.ListInstancesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstancesAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_instances_async_from_dict(): + await test_list_instances_async(request_type=dict) + + +def test_list_instances_field_headers(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = parallelstore.ListInstancesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + call.return_value = parallelstore.ListInstancesResponse() + client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_instances_field_headers_async(): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = parallelstore.ListInstancesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + parallelstore.ListInstancesResponse() + ) + await client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_instances_flattened(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = parallelstore.ListInstancesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_instances( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_instances_flattened_error(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_instances( + parallelstore.ListInstancesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_instances_flattened_async(): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = parallelstore.ListInstancesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + parallelstore.ListInstancesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_instances( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_instances_flattened_error_async(): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_instances( + parallelstore.ListInstancesRequest(), + parent="parent_value", + ) + + +def test_list_instances_pager(transport_name: str = "grpc"): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + parallelstore.ListInstancesResponse( + instances=[ + parallelstore.Instance(), + parallelstore.Instance(), + parallelstore.Instance(), + ], + next_page_token="abc", + ), + parallelstore.ListInstancesResponse( + instances=[], + next_page_token="def", + ), + parallelstore.ListInstancesResponse( + instances=[ + parallelstore.Instance(), + ], + next_page_token="ghi", + ), + parallelstore.ListInstancesResponse( + instances=[ + parallelstore.Instance(), + parallelstore.Instance(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_instances(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, parallelstore.Instance) for i in results) + + +def test_list_instances_pages(transport_name: str = "grpc"): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + parallelstore.ListInstancesResponse( + instances=[ + parallelstore.Instance(), + parallelstore.Instance(), + parallelstore.Instance(), + ], + next_page_token="abc", + ), + parallelstore.ListInstancesResponse( + instances=[], + next_page_token="def", + ), + parallelstore.ListInstancesResponse( + instances=[ + parallelstore.Instance(), + ], + next_page_token="ghi", + ), + parallelstore.ListInstancesResponse( + instances=[ + parallelstore.Instance(), + parallelstore.Instance(), + ], + ), + RuntimeError, + ) + pages = list(client.list_instances(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_instances_async_pager(): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + parallelstore.ListInstancesResponse( + instances=[ + parallelstore.Instance(), + parallelstore.Instance(), + parallelstore.Instance(), + ], + next_page_token="abc", + ), + parallelstore.ListInstancesResponse( + instances=[], + next_page_token="def", + ), + parallelstore.ListInstancesResponse( + instances=[ + parallelstore.Instance(), + ], + next_page_token="ghi", + ), + parallelstore.ListInstancesResponse( + instances=[ + parallelstore.Instance(), + parallelstore.Instance(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_instances( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, parallelstore.Instance) for i in responses) + + +@pytest.mark.asyncio +async def test_list_instances_async_pages(): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + parallelstore.ListInstancesResponse( + instances=[ + parallelstore.Instance(), + parallelstore.Instance(), + parallelstore.Instance(), + ], + next_page_token="abc", + ), + parallelstore.ListInstancesResponse( + instances=[], + next_page_token="def", + ), + parallelstore.ListInstancesResponse( + instances=[ + parallelstore.Instance(), + ], + next_page_token="ghi", + ), + parallelstore.ListInstancesResponse( + instances=[ + parallelstore.Instance(), + parallelstore.Instance(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_instances(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + parallelstore.GetInstanceRequest, + dict, + ], +) +def test_get_instance(request_type, transport: str = "grpc"): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = parallelstore.Instance( + name="name_value", + description="description_value", + state=parallelstore.Instance.State.CREATING, + capacity_gib=1247, + daos_version="daos_version_value", + access_points=["access_points_value"], + network="network_value", + reserved_ip_range="reserved_ip_range_value", + effective_reserved_ip_range="effective_reserved_ip_range_value", + file_stripe_level=parallelstore.FileStripeLevel.FILE_STRIPE_LEVEL_MIN, + directory_stripe_level=parallelstore.DirectoryStripeLevel.DIRECTORY_STRIPE_LEVEL_MIN, + ) + response = client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = parallelstore.GetInstanceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, parallelstore.Instance) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.state == parallelstore.Instance.State.CREATING + assert response.capacity_gib == 1247 + assert response.daos_version == "daos_version_value" + assert response.access_points == ["access_points_value"] + assert response.network == "network_value" + assert response.reserved_ip_range == "reserved_ip_range_value" + assert response.effective_reserved_ip_range == "effective_reserved_ip_range_value" + assert ( + response.file_stripe_level + == parallelstore.FileStripeLevel.FILE_STRIPE_LEVEL_MIN + ) + assert ( + response.directory_stripe_level + == parallelstore.DirectoryStripeLevel.DIRECTORY_STRIPE_LEVEL_MIN + ) + + +def test_get_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == parallelstore.GetInstanceRequest() + + +def test_get_instance_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = parallelstore.GetInstanceRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_instance(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == parallelstore.GetInstanceRequest( + name="name_value", + ) + + +def test_get_instance_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_instance] = mock_rpc + request = {} + client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_instance_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + parallelstore.Instance( + name="name_value", + description="description_value", + state=parallelstore.Instance.State.CREATING, + capacity_gib=1247, + daos_version="daos_version_value", + access_points=["access_points_value"], + network="network_value", + reserved_ip_range="reserved_ip_range_value", + effective_reserved_ip_range="effective_reserved_ip_range_value", + file_stripe_level=parallelstore.FileStripeLevel.FILE_STRIPE_LEVEL_MIN, + directory_stripe_level=parallelstore.DirectoryStripeLevel.DIRECTORY_STRIPE_LEVEL_MIN, + ) + ) + response = await client.get_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == parallelstore.GetInstanceRequest() + + +@pytest.mark.asyncio +async def test_get_instance_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_instance + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_instance + ] = mock_rpc + + request = {} + await client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_instance_async( + transport: str = "grpc_asyncio", request_type=parallelstore.GetInstanceRequest +): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + parallelstore.Instance( + name="name_value", + description="description_value", + state=parallelstore.Instance.State.CREATING, + capacity_gib=1247, + daos_version="daos_version_value", + access_points=["access_points_value"], + network="network_value", + reserved_ip_range="reserved_ip_range_value", + effective_reserved_ip_range="effective_reserved_ip_range_value", + file_stripe_level=parallelstore.FileStripeLevel.FILE_STRIPE_LEVEL_MIN, + directory_stripe_level=parallelstore.DirectoryStripeLevel.DIRECTORY_STRIPE_LEVEL_MIN, + ) + ) + response = await client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = parallelstore.GetInstanceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, parallelstore.Instance) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.state == parallelstore.Instance.State.CREATING + assert response.capacity_gib == 1247 + assert response.daos_version == "daos_version_value" + assert response.access_points == ["access_points_value"] + assert response.network == "network_value" + assert response.reserved_ip_range == "reserved_ip_range_value" + assert response.effective_reserved_ip_range == "effective_reserved_ip_range_value" + assert ( + response.file_stripe_level + == parallelstore.FileStripeLevel.FILE_STRIPE_LEVEL_MIN + ) + assert ( + response.directory_stripe_level + == parallelstore.DirectoryStripeLevel.DIRECTORY_STRIPE_LEVEL_MIN + ) + + +@pytest.mark.asyncio +async def test_get_instance_async_from_dict(): + await test_get_instance_async(request_type=dict) + + +def test_get_instance_field_headers(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = parallelstore.GetInstanceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + call.return_value = parallelstore.Instance() + client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_instance_field_headers_async(): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = parallelstore.GetInstanceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + parallelstore.Instance() + ) + await client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_instance_flattened(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = parallelstore.Instance() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_instance( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_instance_flattened_error(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_instance( + parallelstore.GetInstanceRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_instance_flattened_async(): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = parallelstore.Instance() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + parallelstore.Instance() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_instance( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_instance_flattened_error_async(): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_instance( + parallelstore.GetInstanceRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + parallelstore.CreateInstanceRequest, + dict, + ], +) +def test_create_instance(request_type, transport: str = "grpc"): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = parallelstore.CreateInstanceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == parallelstore.CreateInstanceRequest() + + +def test_create_instance_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = parallelstore.CreateInstanceRequest( + parent="parent_value", + instance_id="instance_id_value", + request_id="request_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_instance(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == parallelstore.CreateInstanceRequest( + parent="parent_value", + instance_id="instance_id_value", + request_id="request_id_value", + ) + + +def test_create_instance_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_instance] = mock_rpc + request = {} + client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_instance_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == parallelstore.CreateInstanceRequest() + + +@pytest.mark.asyncio +async def test_create_instance_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_instance + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_instance + ] = mock_rpc + + request = {} + await client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_instance_async( + transport: str = "grpc_asyncio", request_type=parallelstore.CreateInstanceRequest +): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = parallelstore.CreateInstanceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_instance_async_from_dict(): + await test_create_instance_async(request_type=dict) + + +def test_create_instance_field_headers(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = parallelstore.CreateInstanceRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_instance_field_headers_async(): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = parallelstore.CreateInstanceRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_instance_flattened(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_instance( + parent="parent_value", + instance=parallelstore.Instance(name="name_value"), + instance_id="instance_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].instance + mock_val = parallelstore.Instance(name="name_value") + assert arg == mock_val + arg = args[0].instance_id + mock_val = "instance_id_value" + assert arg == mock_val + + +def test_create_instance_flattened_error(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_instance( + parallelstore.CreateInstanceRequest(), + parent="parent_value", + instance=parallelstore.Instance(name="name_value"), + instance_id="instance_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_instance_flattened_async(): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_instance( + parent="parent_value", + instance=parallelstore.Instance(name="name_value"), + instance_id="instance_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].instance + mock_val = parallelstore.Instance(name="name_value") + assert arg == mock_val + arg = args[0].instance_id + mock_val = "instance_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_instance_flattened_error_async(): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_instance( + parallelstore.CreateInstanceRequest(), + parent="parent_value", + instance=parallelstore.Instance(name="name_value"), + instance_id="instance_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + parallelstore.UpdateInstanceRequest, + dict, + ], +) +def test_update_instance(request_type, transport: str = "grpc"): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = parallelstore.UpdateInstanceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == parallelstore.UpdateInstanceRequest() + + +def test_update_instance_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = parallelstore.UpdateInstanceRequest( + request_id="request_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_instance(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == parallelstore.UpdateInstanceRequest( + request_id="request_id_value", + ) + + +def test_update_instance_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_instance] = mock_rpc + request = {} + client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_instance_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == parallelstore.UpdateInstanceRequest() + + +@pytest.mark.asyncio +async def test_update_instance_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_instance + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_instance + ] = mock_rpc + + request = {} + await client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_instance_async( + transport: str = "grpc_asyncio", request_type=parallelstore.UpdateInstanceRequest +): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = parallelstore.UpdateInstanceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_instance_async_from_dict(): + await test_update_instance_async(request_type=dict) + + +def test_update_instance_field_headers(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = parallelstore.UpdateInstanceRequest() + + request.instance.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "instance.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_instance_field_headers_async(): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = parallelstore.UpdateInstanceRequest() + + request.instance.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "instance.name=name_value", + ) in kw["metadata"] + + +def test_update_instance_flattened(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_instance( + instance=parallelstore.Instance(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].instance + mock_val = parallelstore.Instance(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_instance_flattened_error(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_instance( + parallelstore.UpdateInstanceRequest(), + instance=parallelstore.Instance(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_instance_flattened_async(): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_instance( + instance=parallelstore.Instance(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].instance + mock_val = parallelstore.Instance(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_instance_flattened_error_async(): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_instance( + parallelstore.UpdateInstanceRequest(), + instance=parallelstore.Instance(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + parallelstore.DeleteInstanceRequest, + dict, + ], +) +def test_delete_instance(request_type, transport: str = "grpc"): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = parallelstore.DeleteInstanceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == parallelstore.DeleteInstanceRequest() + + +def test_delete_instance_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = parallelstore.DeleteInstanceRequest( + name="name_value", + request_id="request_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_instance(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == parallelstore.DeleteInstanceRequest( + name="name_value", + request_id="request_id_value", + ) + + +def test_delete_instance_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_instance] = mock_rpc + request = {} + client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_instance_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == parallelstore.DeleteInstanceRequest() + + +@pytest.mark.asyncio +async def test_delete_instance_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_instance + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_instance + ] = mock_rpc + + request = {} + await client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_instance_async( + transport: str = "grpc_asyncio", request_type=parallelstore.DeleteInstanceRequest +): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = parallelstore.DeleteInstanceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_instance_async_from_dict(): + await test_delete_instance_async(request_type=dict) + + +def test_delete_instance_field_headers(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = parallelstore.DeleteInstanceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_instance_field_headers_async(): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = parallelstore.DeleteInstanceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_instance_flattened(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_instance( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_instance_flattened_error(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_instance( + parallelstore.DeleteInstanceRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_instance_flattened_async(): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_instance( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_instance_flattened_error_async(): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_instance( + parallelstore.DeleteInstanceRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + parallelstore.ImportDataRequest, + dict, + ], +) +def test_import_data(request_type, transport: str = "grpc"): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.import_data), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.import_data(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = parallelstore.ImportDataRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_import_data_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.import_data), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.import_data() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == parallelstore.ImportDataRequest() + + +def test_import_data_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = parallelstore.ImportDataRequest( + name="name_value", + service_account="service_account_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.import_data), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.import_data(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == parallelstore.ImportDataRequest( + name="name_value", + service_account="service_account_value", + ) + + +def test_import_data_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.import_data in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.import_data] = mock_rpc + request = {} + client.import_data(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.import_data(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_import_data_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.import_data), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.import_data() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == parallelstore.ImportDataRequest() + + +@pytest.mark.asyncio +async def test_import_data_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.import_data + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.import_data + ] = mock_rpc + + request = {} + await client.import_data(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.import_data(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_import_data_async( + transport: str = "grpc_asyncio", request_type=parallelstore.ImportDataRequest +): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.import_data), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.import_data(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = parallelstore.ImportDataRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_import_data_async_from_dict(): + await test_import_data_async(request_type=dict) + + +def test_import_data_field_headers(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = parallelstore.ImportDataRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.import_data), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.import_data(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_import_data_field_headers_async(): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = parallelstore.ImportDataRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.import_data), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.import_data(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + parallelstore.ExportDataRequest, + dict, + ], +) +def test_export_data(request_type, transport: str = "grpc"): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.export_data), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.export_data(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = parallelstore.ExportDataRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_export_data_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.export_data), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.export_data() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == parallelstore.ExportDataRequest() + + +def test_export_data_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = parallelstore.ExportDataRequest( + name="name_value", + service_account="service_account_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.export_data), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.export_data(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == parallelstore.ExportDataRequest( + name="name_value", + service_account="service_account_value", + ) + + +def test_export_data_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.export_data in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.export_data] = mock_rpc + request = {} + client.export_data(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.export_data(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_export_data_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.export_data), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.export_data() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == parallelstore.ExportDataRequest() + + +@pytest.mark.asyncio +async def test_export_data_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.export_data + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.export_data + ] = mock_rpc + + request = {} + await client.export_data(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.export_data(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_export_data_async( + transport: str = "grpc_asyncio", request_type=parallelstore.ExportDataRequest +): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.export_data), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.export_data(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = parallelstore.ExportDataRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_export_data_async_from_dict(): + await test_export_data_async(request_type=dict) + + +def test_export_data_field_headers(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = parallelstore.ExportDataRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.export_data), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.export_data(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_export_data_field_headers_async(): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = parallelstore.ExportDataRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.export_data), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.export_data(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + parallelstore.ListInstancesRequest, + dict, + ], +) +def test_list_instances_rest(request_type): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = parallelstore.ListInstancesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = parallelstore.ListInstancesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_instances(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstancesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_instances_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_instances in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_instances] = mock_rpc + + request = {} + client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_instances(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_instances_rest_required_fields( + request_type=parallelstore.ListInstancesRequest, +): + transport_class = transports.ParallelstoreRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_instances._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_instances._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = parallelstore.ListInstancesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = parallelstore.ListInstancesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_instances(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_instances_rest_unset_required_fields(): + transport = transports.ParallelstoreRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_instances._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_instances_rest_interceptors(null_interceptor): + transport = transports.ParallelstoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ParallelstoreRestInterceptor(), + ) + client = ParallelstoreClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ParallelstoreRestInterceptor, "post_list_instances" + ) as post, mock.patch.object( + transports.ParallelstoreRestInterceptor, "pre_list_instances" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = parallelstore.ListInstancesRequest.pb( + parallelstore.ListInstancesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = parallelstore.ListInstancesResponse.to_json( + parallelstore.ListInstancesResponse() + ) + + request = parallelstore.ListInstancesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = parallelstore.ListInstancesResponse() + + client.list_instances( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_instances_rest_bad_request( + transport: str = "rest", request_type=parallelstore.ListInstancesRequest +): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_instances(request) + + +def test_list_instances_rest_flattened(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = parallelstore.ListInstancesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = parallelstore.ListInstancesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_instances(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/instances" % client.transport._host, + args[1], + ) + + +def test_list_instances_rest_flattened_error(transport: str = "rest"): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_instances( + parallelstore.ListInstancesRequest(), + parent="parent_value", + ) + + +def test_list_instances_rest_pager(transport: str = "rest"): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + parallelstore.ListInstancesResponse( + instances=[ + parallelstore.Instance(), + parallelstore.Instance(), + parallelstore.Instance(), + ], + next_page_token="abc", + ), + parallelstore.ListInstancesResponse( + instances=[], + next_page_token="def", + ), + parallelstore.ListInstancesResponse( + instances=[ + parallelstore.Instance(), + ], + next_page_token="ghi", + ), + parallelstore.ListInstancesResponse( + instances=[ + parallelstore.Instance(), + parallelstore.Instance(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + parallelstore.ListInstancesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_instances(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, parallelstore.Instance) for i in results) + + pages = list(client.list_instances(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + parallelstore.GetInstanceRequest, + dict, + ], +) +def test_get_instance_rest(request_type): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = parallelstore.Instance( + name="name_value", + description="description_value", + state=parallelstore.Instance.State.CREATING, + capacity_gib=1247, + daos_version="daos_version_value", + access_points=["access_points_value"], + network="network_value", + reserved_ip_range="reserved_ip_range_value", + effective_reserved_ip_range="effective_reserved_ip_range_value", + file_stripe_level=parallelstore.FileStripeLevel.FILE_STRIPE_LEVEL_MIN, + directory_stripe_level=parallelstore.DirectoryStripeLevel.DIRECTORY_STRIPE_LEVEL_MIN, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = parallelstore.Instance.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_instance(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, parallelstore.Instance) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.state == parallelstore.Instance.State.CREATING + assert response.capacity_gib == 1247 + assert response.daos_version == "daos_version_value" + assert response.access_points == ["access_points_value"] + assert response.network == "network_value" + assert response.reserved_ip_range == "reserved_ip_range_value" + assert response.effective_reserved_ip_range == "effective_reserved_ip_range_value" + assert ( + response.file_stripe_level + == parallelstore.FileStripeLevel.FILE_STRIPE_LEVEL_MIN + ) + assert ( + response.directory_stripe_level + == parallelstore.DirectoryStripeLevel.DIRECTORY_STRIPE_LEVEL_MIN + ) + + +def test_get_instance_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_instance] = mock_rpc + + request = {} + client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_instance_rest_required_fields( + request_type=parallelstore.GetInstanceRequest, +): + transport_class = transports.ParallelstoreRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = parallelstore.Instance() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = parallelstore.Instance.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_instance(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_instance_rest_unset_required_fields(): + transport = transports.ParallelstoreRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_instance_rest_interceptors(null_interceptor): + transport = transports.ParallelstoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ParallelstoreRestInterceptor(), + ) + client = ParallelstoreClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ParallelstoreRestInterceptor, "post_get_instance" + ) as post, mock.patch.object( + transports.ParallelstoreRestInterceptor, "pre_get_instance" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = parallelstore.GetInstanceRequest.pb( + parallelstore.GetInstanceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = parallelstore.Instance.to_json( + parallelstore.Instance() + ) + + request = parallelstore.GetInstanceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = parallelstore.Instance() + + client.get_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_instance_rest_bad_request( + transport: str = "rest", request_type=parallelstore.GetInstanceRequest +): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_instance(request) + + +def test_get_instance_rest_flattened(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = parallelstore.Instance() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/instances/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = parallelstore.Instance.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/instances/*}" % client.transport._host, + args[1], + ) + + +def test_get_instance_rest_flattened_error(transport: str = "rest"): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_instance( + parallelstore.GetInstanceRequest(), + name="name_value", + ) + + +def test_get_instance_rest_error(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + parallelstore.CreateInstanceRequest, + dict, + ], +) +def test_create_instance_rest(request_type): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["instance"] = { + "name": "name_value", + "description": "description_value", + "state": 1, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "capacity_gib": 1247, + "daos_version": "daos_version_value", + "access_points": ["access_points_value1", "access_points_value2"], + "network": "network_value", + "reserved_ip_range": "reserved_ip_range_value", + "effective_reserved_ip_range": "effective_reserved_ip_range_value", + "file_stripe_level": 1, + "directory_stripe_level": 1, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = parallelstore.CreateInstanceRequest.meta.fields["instance"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["instance"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["instance"][field])): + del request_init["instance"][field][i][subfield] + else: + del request_init["instance"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_instance(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_instance_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_instance] = mock_rpc + + request = {} + client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_instance_rest_required_fields( + request_type=parallelstore.CreateInstanceRequest, +): + transport_class = transports.ParallelstoreRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["instance_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "instanceId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "instanceId" in jsonified_request + assert jsonified_request["instanceId"] == request_init["instance_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["instanceId"] = "instance_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_instance._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "instance_id", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "instanceId" in jsonified_request + assert jsonified_request["instanceId"] == "instance_id_value" + + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_instance(request) + + expected_params = [ + ( + "instanceId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_instance_rest_unset_required_fields(): + transport = transports.ParallelstoreRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_instance._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "instanceId", + "requestId", + ) + ) + & set( + ( + "parent", + "instanceId", + "instance", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_instance_rest_interceptors(null_interceptor): + transport = transports.ParallelstoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ParallelstoreRestInterceptor(), + ) + client = ParallelstoreClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ParallelstoreRestInterceptor, "post_create_instance" + ) as post, mock.patch.object( + transports.ParallelstoreRestInterceptor, "pre_create_instance" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = parallelstore.CreateInstanceRequest.pb( + parallelstore.CreateInstanceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = parallelstore.CreateInstanceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_instance_rest_bad_request( + transport: str = "rest", request_type=parallelstore.CreateInstanceRequest +): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_instance(request) + + +def test_create_instance_rest_flattened(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + instance=parallelstore.Instance(name="name_value"), + instance_id="instance_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/instances" % client.transport._host, + args[1], + ) + + +def test_create_instance_rest_flattened_error(transport: str = "rest"): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_instance( + parallelstore.CreateInstanceRequest(), + parent="parent_value", + instance=parallelstore.Instance(name="name_value"), + instance_id="instance_id_value", + ) + + +def test_create_instance_rest_error(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + parallelstore.UpdateInstanceRequest, + dict, + ], +) +def test_update_instance_rest(request_type): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "instance": {"name": "projects/sample1/locations/sample2/instances/sample3"} + } + request_init["instance"] = { + "name": "projects/sample1/locations/sample2/instances/sample3", + "description": "description_value", + "state": 1, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "capacity_gib": 1247, + "daos_version": "daos_version_value", + "access_points": ["access_points_value1", "access_points_value2"], + "network": "network_value", + "reserved_ip_range": "reserved_ip_range_value", + "effective_reserved_ip_range": "effective_reserved_ip_range_value", + "file_stripe_level": 1, + "directory_stripe_level": 1, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = parallelstore.UpdateInstanceRequest.meta.fields["instance"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["instance"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["instance"][field])): + del request_init["instance"][field][i][subfield] + else: + del request_init["instance"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_instance(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_instance_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_instance] = mock_rpc + + request = {} + client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_instance_rest_required_fields( + request_type=parallelstore.UpdateInstanceRequest, +): + transport_class = transports.ParallelstoreRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_instance._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_instance(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_instance_rest_unset_required_fields(): + transport = transports.ParallelstoreRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_instance._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "requestId", + "updateMask", + ) + ) + & set( + ( + "updateMask", + "instance", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_instance_rest_interceptors(null_interceptor): + transport = transports.ParallelstoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ParallelstoreRestInterceptor(), + ) + client = ParallelstoreClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ParallelstoreRestInterceptor, "post_update_instance" + ) as post, mock.patch.object( + transports.ParallelstoreRestInterceptor, "pre_update_instance" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = parallelstore.UpdateInstanceRequest.pb( + parallelstore.UpdateInstanceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = parallelstore.UpdateInstanceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_instance_rest_bad_request( + transport: str = "rest", request_type=parallelstore.UpdateInstanceRequest +): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "instance": {"name": "projects/sample1/locations/sample2/instances/sample3"} + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_instance(request) + + +def test_update_instance_rest_flattened(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "instance": {"name": "projects/sample1/locations/sample2/instances/sample3"} + } + + # get truthy value for each flattened field + mock_args = dict( + instance=parallelstore.Instance(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{instance.name=projects/*/locations/*/instances/*}" + % client.transport._host, + args[1], + ) + + +def test_update_instance_rest_flattened_error(transport: str = "rest"): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_instance( + parallelstore.UpdateInstanceRequest(), + instance=parallelstore.Instance(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_instance_rest_error(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + parallelstore.DeleteInstanceRequest, + dict, + ], +) +def test_delete_instance_rest(request_type): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_instance(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_instance_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_instance] = mock_rpc + + request = {} + client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_instance_rest_required_fields( + request_type=parallelstore.DeleteInstanceRequest, +): + transport_class = transports.ParallelstoreRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_instance._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_instance(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_instance_rest_unset_required_fields(): + transport = transports.ParallelstoreRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_instance_rest_interceptors(null_interceptor): + transport = transports.ParallelstoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ParallelstoreRestInterceptor(), + ) + client = ParallelstoreClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ParallelstoreRestInterceptor, "post_delete_instance" + ) as post, mock.patch.object( + transports.ParallelstoreRestInterceptor, "pre_delete_instance" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = parallelstore.DeleteInstanceRequest.pb( + parallelstore.DeleteInstanceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = parallelstore.DeleteInstanceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_instance_rest_bad_request( + transport: str = "rest", request_type=parallelstore.DeleteInstanceRequest +): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_instance(request) + + +def test_delete_instance_rest_flattened(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/instances/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/instances/*}" % client.transport._host, + args[1], + ) + + +def test_delete_instance_rest_flattened_error(transport: str = "rest"): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_instance( + parallelstore.DeleteInstanceRequest(), + name="name_value", + ) + + +def test_delete_instance_rest_error(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + parallelstore.ImportDataRequest, + dict, + ], +) +def test_import_data_rest(request_type): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.import_data(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_import_data_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.import_data in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.import_data] = mock_rpc + + request = {} + client.import_data(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.import_data(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_import_data_rest_required_fields(request_type=parallelstore.ImportDataRequest): + transport_class = transports.ParallelstoreRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).import_data._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).import_data._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.import_data(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_import_data_rest_unset_required_fields(): + transport = transports.ParallelstoreRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.import_data._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_import_data_rest_interceptors(null_interceptor): + transport = transports.ParallelstoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ParallelstoreRestInterceptor(), + ) + client = ParallelstoreClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ParallelstoreRestInterceptor, "post_import_data" + ) as post, mock.patch.object( + transports.ParallelstoreRestInterceptor, "pre_import_data" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = parallelstore.ImportDataRequest.pb( + parallelstore.ImportDataRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = parallelstore.ImportDataRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.import_data( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_import_data_rest_bad_request( + transport: str = "rest", request_type=parallelstore.ImportDataRequest +): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.import_data(request) + + +def test_import_data_rest_error(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + parallelstore.ExportDataRequest, + dict, + ], +) +def test_export_data_rest(request_type): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.export_data(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_export_data_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.export_data in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.export_data] = mock_rpc + + request = {} + client.export_data(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.export_data(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_export_data_rest_required_fields(request_type=parallelstore.ExportDataRequest): + transport_class = transports.ParallelstoreRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).export_data._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).export_data._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.export_data(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_export_data_rest_unset_required_fields(): + transport = transports.ParallelstoreRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.export_data._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_export_data_rest_interceptors(null_interceptor): + transport = transports.ParallelstoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ParallelstoreRestInterceptor(), + ) + client = ParallelstoreClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ParallelstoreRestInterceptor, "post_export_data" + ) as post, mock.patch.object( + transports.ParallelstoreRestInterceptor, "pre_export_data" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = parallelstore.ExportDataRequest.pb( + parallelstore.ExportDataRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = parallelstore.ExportDataRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.export_data( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_export_data_rest_bad_request( + transport: str = "rest", request_type=parallelstore.ExportDataRequest +): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.export_data(request) + + +def test_export_data_rest_error(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ParallelstoreGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.ParallelstoreGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ParallelstoreClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.ParallelstoreGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ParallelstoreClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ParallelstoreClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.ParallelstoreGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ParallelstoreClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ParallelstoreGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = ParallelstoreClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.ParallelstoreGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.ParallelstoreGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ParallelstoreGrpcTransport, + transports.ParallelstoreGrpcAsyncIOTransport, + transports.ParallelstoreRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = ParallelstoreClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.ParallelstoreGrpcTransport, + ) + + +def test_parallelstore_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.ParallelstoreTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_parallelstore_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.parallelstore_v1.services.parallelstore.transports.ParallelstoreTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.ParallelstoreTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "list_instances", + "get_instance", + "create_instance", + "update_instance", + "delete_instance", + "import_data", + "export_data", + "get_location", + "list_locations", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_parallelstore_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.parallelstore_v1.services.parallelstore.transports.ParallelstoreTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ParallelstoreTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_parallelstore_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.parallelstore_v1.services.parallelstore.transports.ParallelstoreTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ParallelstoreTransport() + adc.assert_called_once() + + +def test_parallelstore_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ParallelstoreClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ParallelstoreGrpcTransport, + transports.ParallelstoreGrpcAsyncIOTransport, + ], +) +def test_parallelstore_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ParallelstoreGrpcTransport, + transports.ParallelstoreGrpcAsyncIOTransport, + transports.ParallelstoreRestTransport, + ], +) +def test_parallelstore_transport_auth_gdch_credentials(transport_class): + host = "https://language.com/" + api_audience_tests = [None, "https://language2.com/"] + api_audience_expect = [host, "https://language2.com/"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.ParallelstoreGrpcTransport, grpc_helpers), + (transports.ParallelstoreGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_parallelstore_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "parallelstore.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="parallelstore.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ParallelstoreGrpcTransport, + transports.ParallelstoreGrpcAsyncIOTransport, + ], +) +def test_parallelstore_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_parallelstore_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.ParallelstoreRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_parallelstore_rest_lro_client(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_parallelstore_host_no_port(transport_name): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="parallelstore.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "parallelstore.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://parallelstore.googleapis.com/" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_parallelstore_host_with_port(transport_name): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="parallelstore.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "parallelstore.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://parallelstore.googleapis.com:8000/" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_parallelstore_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = ParallelstoreClient( + credentials=creds1, + transport=transport_name, + ) + client2 = ParallelstoreClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.list_instances._session + session2 = client2.transport.list_instances._session + assert session1 != session2 + session1 = client1.transport.get_instance._session + session2 = client2.transport.get_instance._session + assert session1 != session2 + session1 = client1.transport.create_instance._session + session2 = client2.transport.create_instance._session + assert session1 != session2 + session1 = client1.transport.update_instance._session + session2 = client2.transport.update_instance._session + assert session1 != session2 + session1 = client1.transport.delete_instance._session + session2 = client2.transport.delete_instance._session + assert session1 != session2 + session1 = client1.transport.import_data._session + session2 = client2.transport.import_data._session + assert session1 != session2 + session1 = client1.transport.export_data._session + session2 = client2.transport.export_data._session + assert session1 != session2 + + +def test_parallelstore_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ParallelstoreGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_parallelstore_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ParallelstoreGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.ParallelstoreGrpcTransport, + transports.ParallelstoreGrpcAsyncIOTransport, + ], +) +def test_parallelstore_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.ParallelstoreGrpcTransport, + transports.ParallelstoreGrpcAsyncIOTransport, + ], +) +def test_parallelstore_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_parallelstore_grpc_lro_client(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_parallelstore_grpc_lro_async_client(): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_address_path(): + project = "squid" + region = "clam" + address = "whelk" + expected = "projects/{project}/regions/{region}/addresses/{address}".format( + project=project, + region=region, + address=address, + ) + actual = ParallelstoreClient.address_path(project, region, address) + assert expected == actual + + +def test_parse_address_path(): + expected = { + "project": "octopus", + "region": "oyster", + "address": "nudibranch", + } + path = ParallelstoreClient.address_path(**expected) + + # Check that the path construction is reversible. + actual = ParallelstoreClient.parse_address_path(path) + assert expected == actual + + +def test_instance_path(): + project = "cuttlefish" + location = "mussel" + instance = "winkle" + expected = "projects/{project}/locations/{location}/instances/{instance}".format( + project=project, + location=location, + instance=instance, + ) + actual = ParallelstoreClient.instance_path(project, location, instance) + assert expected == actual + + +def test_parse_instance_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "instance": "abalone", + } + path = ParallelstoreClient.instance_path(**expected) + + # Check that the path construction is reversible. + actual = ParallelstoreClient.parse_instance_path(path) + assert expected == actual + + +def test_network_path(): + project = "squid" + network = "clam" + expected = "projects/{project}/global/networks/{network}".format( + project=project, + network=network, + ) + actual = ParallelstoreClient.network_path(project, network) + assert expected == actual + + +def test_parse_network_path(): + expected = { + "project": "whelk", + "network": "octopus", + } + path = ParallelstoreClient.network_path(**expected) + + # Check that the path construction is reversible. + actual = ParallelstoreClient.parse_network_path(path) + assert expected == actual + + +def test_service_account_path(): + project = "oyster" + service_account = "nudibranch" + expected = "projects/{project}/serviceAccounts/{service_account}".format( + project=project, + service_account=service_account, + ) + actual = ParallelstoreClient.service_account_path(project, service_account) + assert expected == actual + + +def test_parse_service_account_path(): + expected = { + "project": "cuttlefish", + "service_account": "mussel", + } + path = ParallelstoreClient.service_account_path(**expected) + + # Check that the path construction is reversible. + actual = ParallelstoreClient.parse_service_account_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "winkle" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = ParallelstoreClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nautilus", + } + path = ParallelstoreClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = ParallelstoreClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "scallop" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = ParallelstoreClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "abalone", + } + path = ParallelstoreClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = ParallelstoreClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "squid" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = ParallelstoreClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "clam", + } + path = ParallelstoreClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = ParallelstoreClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "whelk" + expected = "projects/{project}".format( + project=project, + ) + actual = ParallelstoreClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "octopus", + } + path = ParallelstoreClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = ParallelstoreClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "oyster" + location = "nudibranch" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = ParallelstoreClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "cuttlefish", + "location": "mussel", + } + path = ParallelstoreClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = ParallelstoreClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.ParallelstoreTransport, "_prep_wrapped_messages" + ) as prep: + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.ParallelstoreTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = ParallelstoreClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_get_location_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.GetLocationRequest +): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_location(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) +def test_get_location_rest(request_type): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_list_locations_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.ListLocationsRequest +): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_locations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) +def test_list_locations_rest(request_type): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest +): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_delete_operation(transport: str = "grpc"): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc_asyncio"): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_field_headers(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_delete_operation_from_dict(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_locations(transport: str = "grpc"): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + response = client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +@pytest.mark.asyncio +async def test_list_locations_async(transport: str = "grpc_asyncio"): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_list_locations_field_headers(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = locations_pb2.ListLocationsResponse() + + client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_locations_field_headers_async(): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_locations_from_dict(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + + response = client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_locations_from_dict_async(): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_location(transport: str = "grpc"): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + response = client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +@pytest.mark.asyncio +async def test_get_location_async(transport: str = "grpc_asyncio"): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_get_location_field_headers(): + client = ParallelstoreClient(credentials=ga_credentials.AnonymousCredentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = locations_pb2.Location() + + client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_location_field_headers_async(): + client = ParallelstoreAsyncClient(credentials=ga_credentials.AnonymousCredentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +def test_get_location_from_dict(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + + response = client.get_location( + request={ + "name": "locations/abc", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_location_from_dict_async(): + client = ParallelstoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = ParallelstoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (ParallelstoreClient, transports.ParallelstoreGrpcTransport), + (ParallelstoreAsyncClient, transports.ParallelstoreGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) From 052585c63dfa172b7f88fdb5882eda446fc47bfe Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 4 Oct 2024 11:26:14 -0400 Subject: [PATCH 093/108] feat: [google-cloud-bigquery-datatransfer]Add scheduleOptionsV2 and Error fields for TransferConfig (#13116) BEGIN_COMMIT_OVERRIDE feat:Add scheduleOptionsV2 and Error fields for TransferConfig END_COMMIT_OVERRIDE - [ ] Regenerate this pull request now. PiperOrigin-RevId: 680586383 Source-Link: https://github.com/googleapis/googleapis/commit/463b5a6b06e20504fb44bfedff59ba05b42bf0b2 Source-Link: https://github.com/googleapis/googleapis-gen/commit/68d602fca86cfbf7653612f50c5cf9e3105065c9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWJpZ3F1ZXJ5LWRhdGF0cmFuc2Zlci8uT3dsQm90LnlhbWwiLCJoIjoiNjhkNjAyZmNhODZjZmJmNzY1MzYxMmY1MGM1Y2Y5ZTMxMDUwNjVjOSJ9 --------- Co-authored-by: Owl Bot Co-authored-by: ohmayr --- .../cloud/bigquery_datatransfer/__init__.py | 8 + .../bigquery_datatransfer/gapic_version.py | 2 +- .../bigquery_datatransfer_v1/__init__.py | 8 + .../bigquery_datatransfer_v1/gapic_version.py | 2 +- .../types/__init__.py | 8 + .../types/transfer.py | 147 ++++++++++++++++++ ...google.cloud.bigquery.datatransfer.v1.json | 2 +- .../test_data_transfer_service.py | 43 +++++ 8 files changed, 217 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/__init__.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/__init__.py index c2cce6e9d9e7..bb51c87c8254 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/__init__.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/__init__.py @@ -54,7 +54,11 @@ from google.cloud.bigquery_datatransfer_v1.types.transfer import ( EmailPreferences, EncryptionConfiguration, + EventDrivenSchedule, + ManualSchedule, ScheduleOptions, + ScheduleOptionsV2, + TimeBasedSchedule, TransferConfig, TransferMessage, TransferRun, @@ -93,7 +97,11 @@ "UpdateTransferConfigRequest", "EmailPreferences", "EncryptionConfiguration", + "EventDrivenSchedule", + "ManualSchedule", "ScheduleOptions", + "ScheduleOptionsV2", + "TimeBasedSchedule", "TransferConfig", "TransferMessage", "TransferRun", diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/gapic_version.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/gapic_version.py index fc64b41dd679..558c8aab67c5 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/gapic_version.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.15.7" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/__init__.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/__init__.py index 41ad09552699..7df301ab3c59 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/__init__.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/__init__.py @@ -52,7 +52,11 @@ from .types.transfer import ( EmailPreferences, EncryptionConfiguration, + EventDrivenSchedule, + ManualSchedule, ScheduleOptions, + ScheduleOptionsV2, + TimeBasedSchedule, TransferConfig, TransferMessage, TransferRun, @@ -74,6 +78,7 @@ "EmailPreferences", "EncryptionConfiguration", "EnrollDataSourcesRequest", + "EventDrivenSchedule", "GetDataSourceRequest", "GetTransferConfigRequest", "GetTransferRunRequest", @@ -85,11 +90,14 @@ "ListTransferLogsResponse", "ListTransferRunsRequest", "ListTransferRunsResponse", + "ManualSchedule", "ScheduleOptions", + "ScheduleOptionsV2", "ScheduleTransferRunsRequest", "ScheduleTransferRunsResponse", "StartManualTransferRunsRequest", "StartManualTransferRunsResponse", + "TimeBasedSchedule", "TransferConfig", "TransferMessage", "TransferRun", diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic_version.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic_version.py index fc64b41dd679..558c8aab67c5 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic_version.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.15.7" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/types/__init__.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/types/__init__.py index 2caa0e24a50d..f704ac5f758d 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/types/__init__.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/types/__init__.py @@ -43,7 +43,11 @@ from .transfer import ( EmailPreferences, EncryptionConfiguration, + EventDrivenSchedule, + ManualSchedule, ScheduleOptions, + ScheduleOptionsV2, + TimeBasedSchedule, TransferConfig, TransferMessage, TransferRun, @@ -80,7 +84,11 @@ "UpdateTransferConfigRequest", "EmailPreferences", "EncryptionConfiguration", + "EventDrivenSchedule", + "ManualSchedule", "ScheduleOptions", + "ScheduleOptionsV2", + "TimeBasedSchedule", "TransferConfig", "TransferMessage", "TransferRun", diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/types/transfer.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/types/transfer.py index 4403154949f8..bd37dfdec84b 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/types/transfer.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/types/transfer.py @@ -30,6 +30,10 @@ "TransferState", "EmailPreferences", "ScheduleOptions", + "ScheduleOptionsV2", + "TimeBasedSchedule", + "ManualSchedule", + "EventDrivenSchedule", "UserInfo", "TransferConfig", "EncryptionConfiguration", @@ -144,6 +148,130 @@ class ScheduleOptions(proto.Message): ) +class ScheduleOptionsV2(proto.Message): + r"""V2 options customizing different types of data transfer + schedule. This field supports existing time-based and manual + transfer schedule. Also supports Event-Driven transfer schedule. + ScheduleOptionsV2 cannot be used together with + ScheduleOptions/Schedule. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + time_based_schedule (google.cloud.bigquery_datatransfer_v1.types.TimeBasedSchedule): + Time based transfer schedule options. This is + the default schedule option. + + This field is a member of `oneof`_ ``schedule``. + manual_schedule (google.cloud.bigquery_datatransfer_v1.types.ManualSchedule): + Manual transfer schedule. If set, the transfer run will not + be auto-scheduled by the system, unless the client invokes + StartManualTransferRuns. This is equivalent to + disable_auto_scheduling = true. + + This field is a member of `oneof`_ ``schedule``. + event_driven_schedule (google.cloud.bigquery_datatransfer_v1.types.EventDrivenSchedule): + Event driven transfer schedule options. If + set, the transfer will be scheduled upon events + arrial. + + This field is a member of `oneof`_ ``schedule``. + """ + + time_based_schedule: "TimeBasedSchedule" = proto.Field( + proto.MESSAGE, + number=1, + oneof="schedule", + message="TimeBasedSchedule", + ) + manual_schedule: "ManualSchedule" = proto.Field( + proto.MESSAGE, + number=2, + oneof="schedule", + message="ManualSchedule", + ) + event_driven_schedule: "EventDrivenSchedule" = proto.Field( + proto.MESSAGE, + number=3, + oneof="schedule", + message="EventDrivenSchedule", + ) + + +class TimeBasedSchedule(proto.Message): + r"""Options customizing the time based transfer schedule. + Options are migrated from the original ScheduleOptions message. + + Attributes: + schedule (str): + Data transfer schedule. If the data source does not support + a custom schedule, this should be empty. If it is empty, the + default value for the data source will be used. The + specified times are in UTC. Examples of valid format: + ``1st,3rd monday of month 15:30``, + ``every wed,fri of jan,jun 13:15``, and + ``first sunday of quarter 00:00``. See more explanation + about the format here: + https://cloud.google.com/appengine/docs/flexible/python/scheduling-jobs-with-cron-yaml#the_schedule_format + + NOTE: The minimum interval time between recurring transfers + depends on the data source; refer to the documentation for + your data source. + start_time (google.protobuf.timestamp_pb2.Timestamp): + Specifies time to start scheduling transfer + runs. The first run will be scheduled at or + after the start time according to a recurrence + pattern defined in the schedule string. The + start time can be changed at any moment. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Defines time to stop scheduling transfer + runs. A transfer run cannot be scheduled at or + after the end time. The end time can be changed + at any moment. + """ + + schedule: str = proto.Field( + proto.STRING, + number=1, + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + + +class ManualSchedule(proto.Message): + r"""Options customizing manual transfers schedule.""" + + +class EventDrivenSchedule(proto.Message): + r"""Options customizing EventDriven transfers schedule. + + Attributes: + pubsub_subscription (str): + Pub/Sub subscription name used to receive + events. Only Google Cloud Storage data source + support this option. Format: + projects/{project}/subscriptions/{subscription} + """ + + pubsub_subscription: str = proto.Field( + proto.STRING, + number=1, + ) + + class UserInfo(proto.Message): r"""Information about a user. @@ -222,6 +350,11 @@ class TransferConfig(proto.Message): schedule_options (google.cloud.bigquery_datatransfer_v1.types.ScheduleOptions): Options customizing the data transfer schedule. + schedule_options_v2 (google.cloud.bigquery_datatransfer_v1.types.ScheduleOptionsV2): + Options customizing different types of data transfer + schedule. This field replaces "schedule" and + "schedule_options" fields. ScheduleOptionsV2 cannot be used + together with ScheduleOptions/Schedule. data_refresh_window_days (int): The number of days to look back to automatically refresh the data. For example, if ``data_refresh_window_days = 10``, @@ -274,6 +407,10 @@ class TransferConfig(proto.Message): effect. Write methods will apply the key if it is present, or otherwise try to apply project default keys if it is absent. + error (google.rpc.status_pb2.Status): + Output only. Error code with detailed + information about reason of the latest config + failure. """ name: str = proto.Field( @@ -307,6 +444,11 @@ class TransferConfig(proto.Message): number=24, message="ScheduleOptions", ) + schedule_options_v2: "ScheduleOptionsV2" = proto.Field( + proto.MESSAGE, + number=31, + message="ScheduleOptionsV2", + ) data_refresh_window_days: int = proto.Field( proto.INT32, number=12, @@ -358,6 +500,11 @@ class TransferConfig(proto.Message): number=28, message="EncryptionConfiguration", ) + error: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=32, + message=status_pb2.Status, + ) class EncryptionConfiguration(proto.Message): diff --git a/packages/google-cloud-bigquery-datatransfer/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datatransfer.v1.json b/packages/google-cloud-bigquery-datatransfer/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datatransfer.v1.json index da58d7e46817..adc8c281da8f 100644 --- a/packages/google-cloud-bigquery-datatransfer/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datatransfer.v1.json +++ b/packages/google-cloud-bigquery-datatransfer/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datatransfer.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-datatransfer", - "version": "3.15.7" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-bigquery-datatransfer/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py b/packages/google-cloud-bigquery-datatransfer/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py index 527a6e81160e..f144355cd636 100644 --- a/packages/google-cloud-bigquery-datatransfer/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py +++ b/packages/google-cloud-bigquery-datatransfer/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py @@ -35,6 +35,7 @@ from google.auth.exceptions import MutualTLSChannelError from google.cloud.location import locations_pb2 from google.oauth2 import service_account +from google.protobuf import any_pb2 # type: ignore from google.protobuf import duration_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import json_format @@ -8880,6 +8881,17 @@ def test_create_transfer_config_rest(request_type): "start_time": {"seconds": 751, "nanos": 543}, "end_time": {}, }, + "schedule_options_v2": { + "time_based_schedule": { + "schedule": "schedule_value", + "start_time": {}, + "end_time": {}, + }, + "manual_schedule": {}, + "event_driven_schedule": { + "pubsub_subscription": "pubsub_subscription_value" + }, + }, "data_refresh_window_days": 2543, "disabled": True, "update_time": {}, @@ -8891,6 +8903,16 @@ def test_create_transfer_config_rest(request_type): "email_preferences": {"enable_failure_email": True}, "owner_info": {"email": "email_value"}, "encryption_configuration": {"kms_key_name": {"value": "value_value"}}, + "error": { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + }, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -9327,6 +9349,17 @@ def test_update_transfer_config_rest(request_type): "start_time": {"seconds": 751, "nanos": 543}, "end_time": {}, }, + "schedule_options_v2": { + "time_based_schedule": { + "schedule": "schedule_value", + "start_time": {}, + "end_time": {}, + }, + "manual_schedule": {}, + "event_driven_schedule": { + "pubsub_subscription": "pubsub_subscription_value" + }, + }, "data_refresh_window_days": 2543, "disabled": True, "update_time": {}, @@ -9338,6 +9371,16 @@ def test_update_transfer_config_rest(request_type): "email_preferences": {"enable_failure_email": True}, "owner_info": {"email": "email_value"}, "encryption_configuration": {"kms_key_name": {"value": "value_value"}}, + "error": { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + }, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency From 8d79ca81a3f2f01a1f0c77231e77566860f1d4ab Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 4 Oct 2024 11:52:13 -0400 Subject: [PATCH 094/108] feat: [google-shopping-merchant-accounts] add 'force' parameter for accounts.delete method (#13111) BEGIN_COMMIT_OVERRIDE feat: add 'force' parameter for accounts.delete method docs: updated descriptions for the DeleteAccount and ListAccounts RPCs fix!: The type of an existing field `time_zone` is changed from `message` to `string` in message `.google.shopping.merchant.accounts.v1beta.ListAccountIssuesRequest` fix!: An existing field `account_aggregation` is removed from message `.google.shopping.merchant.accounts.v1beta.CreateAndConfigureAccountRequest` fix!: Changed field behavior for an existing field `service` in message `.google.shopping.merchant.accounts.v1beta.CreateAndConfigureAccountRequest` fix!: Changed field behavior for an existing field `region_code` in message `.google.shopping.merchant.accounts.v1beta.RetrieveLatestTermsOfServiceRequest` fix!: Changed field behavior for an existing field `kind` in message `.google.shopping.merchant.accounts.v1beta.RetrieveLatestTermsOfServiceRequest` feat: A new field `account_aggregation` is added to message `.google.shopping.merchant.accounts.v1beta.CreateAndConfigureAccountRequest` feat: A new message `AccountAggregation` is added feat: A new service `AutofeedSettingsService` is added feat: A new message `AutofeedSettings` is added feat: A new resource_definition `[merchantapi.googleapis.com/AutofeedSettings](https://www.google.com/url?sa=D&q=http%3A%2F%2Fmerchantapi.googleapis.com%2FAutofeedSettings)` is added feat: A new message `GetAutofeedSettingsRequest` is added feat: A new message `UpdateAutofeedSettingsRequest` is added feat: A new field `korean_business_registration_number` is added to message `.google.shopping.merchant.accounts.v1beta.BusinessInfo` END_COMMIT_OVERRIDE - [ ] Regenerate this pull request now. docs: updated descriptions for the DeleteAccount and ListAccounts RPCs PiperOrigin-RevId: 680468173 Source-Link: https://github.com/googleapis/googleapis/commit/1b2f804bf43253118ff0e56f9524979265afdfe6 Source-Link: https://github.com/googleapis/googleapis-gen/commit/aaf00b070e000374e0f531319e4b7dd797de3165 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLXNob3BwaW5nLW1lcmNoYW50LWFjY291bnRzLy5Pd2xCb3QueWFtbCIsImgiOiJhYWYwMGIwNzBlMDAwMzc0ZTBmNTMxMzE5ZTRiN2RkNzk3ZGUzMTY1In0= BEGIN_NESTED_COMMIT fix!: [google-shopping-merchant-accounts] The type of an existing field `time_zone` is changed from `message` to `string` in message `.google.shopping.merchant.accounts.v1beta.ListAccountIssuesRequest` fix!: An existing field `account_aggregation` is removed from message `.google.shopping.merchant.accounts.v1beta.CreateAndConfigureAccountRequest` fix!: Changed field behavior for an existing field `service` in message `.google.shopping.merchant.accounts.v1beta.CreateAndConfigureAccountRequest` fix!: Changed field behavior for an existing field `region_code` in message `.google.shopping.merchant.accounts.v1beta.RetrieveLatestTermsOfServiceRequest` fix!: Changed field behavior for an existing field `kind` in message `.google.shopping.merchant.accounts.v1beta.RetrieveLatestTermsOfServiceRequest` feat: A new field `account_aggregation` is added to message `.google.shopping.merchant.accounts.v1beta.CreateAndConfigureAccountRequest` feat: A new message `AccountAggregation` is added feat: A new service `AutofeedSettingsService` is added feat: A new message `AutofeedSettings` is added feat: A new resource_definition `merchantapi.googleapis.com/AutofeedSettings` is added feat: A new message `GetAutofeedSettingsRequest` is added feat: A new message `UpdateAutofeedSettingsRequest` is added feat: A new field `korean_business_registration_number` is added to message `.google.shopping.merchant.accounts.v1beta.BusinessInfo` PiperOrigin-RevId: 678841094 Source-Link: https://github.com/googleapis/googleapis/commit/005df4681b89bd204a90b76168a6dc9d9e7bf4fe Source-Link: https://github.com/googleapis/googleapis-gen/commit/1c58da100531d09e9123331d121f410e7d00e4aa Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLXNob3BwaW5nLW1lcmNoYW50LWFjY291bnRzLy5Pd2xCb3QueWFtbCIsImgiOiIxYzU4ZGExMDA1MzFkMDllOTEyMzMzMWQxMjFmNDEwZTdkMDBlNGFhIn0= END_NESTED_COMMIT --------- Co-authored-by: Owl Bot Co-authored-by: ohmayr Co-authored-by: ohmayr --- .../autofeed_settings_service.rst | 6 + .../merchant_accounts_v1beta/services_.rst | 1 + .../shopping/merchant_accounts/__init__.py | 20 + .../merchant_accounts_v1beta/__init__.py | 16 + .../gapic_metadata.json | 49 + .../services/accounts_service/async_client.py | 13 +- .../services/accounts_service/client.py | 13 +- .../accounts_service/transports/grpc.py | 13 +- .../transports/grpc_asyncio.py | 13 +- .../autofeed_settings_service/__init__.py | 22 + .../autofeed_settings_service/async_client.py | 519 +++ .../autofeed_settings_service/client.py | 931 +++++ .../transports/__init__.py | 41 + .../transports/base.py | 181 + .../transports/grpc.py | 304 ++ .../transports/grpc_asyncio.py | 320 ++ .../transports/rest.py | 454 +++ .../email_preferences_service/async_client.py | 4 +- .../email_preferences_service/client.py | 4 +- .../async_client.py | 6 +- .../online_return_policy_service/client.py | 6 +- .../shipping_settings_service/async_client.py | 4 +- .../shipping_settings_service/client.py | 4 +- .../transports/rest.py | 8 +- .../async_client.py | 2 + .../client.py | 2 + .../terms_of_service_service/async_client.py | 2 + .../terms_of_service_service/client.py | 2 + .../transports/rest.py | 14 + .../types/__init__.py | 10 + .../types/accountissue.py | 8 +- .../types/accounts.py | 44 +- .../types/accountservices.py | 34 + .../types/autofeedsettings.py | 109 + .../types/businessinfo.py | 11 + .../types/shippingsettings.py | 27 +- .../types/termsofservice.py | 6 +- .../types/termsofserviceagreementstate.py | 3 + ...ngs_service_get_autofeed_settings_async.py | 52 + ...ings_service_get_autofeed_settings_sync.py | 52 + ..._service_update_autofeed_settings_async.py | 55 + ...s_service_update_autofeed_settings_sync.py | 55 + ..._retrieve_latest_terms_of_service_async.py | 2 + ...e_retrieve_latest_terms_of_service_sync.py | 2 + ...gle.shopping.merchant.accounts.v1beta.json | 358 +- ...fixup_merchant_accounts_v1beta_keywords.py | 6 +- .../test_account_issue_service.py | 3 +- .../test_accounts_service.py | 22 +- .../test_autofeed_settings_service.py | 3474 +++++++++++++++++ .../test_business_info_service.py | 33 + .../test_terms_of_service_service.py | 114 + 51 files changed, 7363 insertions(+), 91 deletions(-) create mode 100644 packages/google-shopping-merchant-accounts/docs/merchant_accounts_v1beta/autofeed_settings_service.rst create mode 100644 packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/autofeed_settings_service/__init__.py create mode 100644 packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/autofeed_settings_service/async_client.py create mode 100644 packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/autofeed_settings_service/client.py create mode 100644 packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/autofeed_settings_service/transports/__init__.py create mode 100644 packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/autofeed_settings_service/transports/base.py create mode 100644 packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/autofeed_settings_service/transports/grpc.py create mode 100644 packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/autofeed_settings_service/transports/grpc_asyncio.py create mode 100644 packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/autofeed_settings_service/transports/rest.py create mode 100644 packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/accountservices.py create mode 100644 packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/autofeedsettings.py create mode 100644 packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_autofeed_settings_service_get_autofeed_settings_async.py create mode 100644 packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_autofeed_settings_service_get_autofeed_settings_sync.py create mode 100644 packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_autofeed_settings_service_update_autofeed_settings_async.py create mode 100644 packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_autofeed_settings_service_update_autofeed_settings_sync.py create mode 100644 packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_autofeed_settings_service.py diff --git a/packages/google-shopping-merchant-accounts/docs/merchant_accounts_v1beta/autofeed_settings_service.rst b/packages/google-shopping-merchant-accounts/docs/merchant_accounts_v1beta/autofeed_settings_service.rst new file mode 100644 index 000000000000..7d16dfb96a4b --- /dev/null +++ b/packages/google-shopping-merchant-accounts/docs/merchant_accounts_v1beta/autofeed_settings_service.rst @@ -0,0 +1,6 @@ +AutofeedSettingsService +----------------------------------------- + +.. automodule:: google.shopping.merchant_accounts_v1beta.services.autofeed_settings_service + :members: + :inherited-members: diff --git a/packages/google-shopping-merchant-accounts/docs/merchant_accounts_v1beta/services_.rst b/packages/google-shopping-merchant-accounts/docs/merchant_accounts_v1beta/services_.rst index 1c502de92ff3..b9c209efb0f7 100644 --- a/packages/google-shopping-merchant-accounts/docs/merchant_accounts_v1beta/services_.rst +++ b/packages/google-shopping-merchant-accounts/docs/merchant_accounts_v1beta/services_.rst @@ -6,6 +6,7 @@ Services for Google Shopping Merchant Accounts v1beta API account_issue_service accounts_service account_tax_service + autofeed_settings_service business_identity_service business_info_service email_preferences_service diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts/__init__.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts/__init__.py index 97bb9de71cdf..2af1a6d2c96c 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts/__init__.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts/__init__.py @@ -36,6 +36,12 @@ from google.shopping.merchant_accounts_v1beta.services.accounts_service.client import ( AccountsServiceClient, ) +from google.shopping.merchant_accounts_v1beta.services.autofeed_settings_service.async_client import ( + AutofeedSettingsServiceAsyncClient, +) +from google.shopping.merchant_accounts_v1beta.services.autofeed_settings_service.client import ( + AutofeedSettingsServiceClient, +) from google.shopping.merchant_accounts_v1beta.services.business_identity_service.async_client import ( BusinessIdentityServiceAsyncClient, ) @@ -126,6 +132,14 @@ ListSubAccountsResponse, UpdateAccountRequest, ) +from google.shopping.merchant_accounts_v1beta.types.accountservices import ( + AccountAggregation, +) +from google.shopping.merchant_accounts_v1beta.types.autofeedsettings import ( + AutofeedSettings, + GetAutofeedSettingsRequest, + UpdateAutofeedSettingsRequest, +) from google.shopping.merchant_accounts_v1beta.types.businessidentity import ( BusinessIdentity, GetBusinessIdentityRequest, @@ -234,6 +248,8 @@ "AccountsServiceAsyncClient", "AccountTaxServiceClient", "AccountTaxServiceAsyncClient", + "AutofeedSettingsServiceClient", + "AutofeedSettingsServiceAsyncClient", "BusinessIdentityServiceClient", "BusinessIdentityServiceAsyncClient", "BusinessInfoServiceClient", @@ -274,6 +290,10 @@ "ListSubAccountsRequest", "ListSubAccountsResponse", "UpdateAccountRequest", + "AccountAggregation", + "AutofeedSettings", + "GetAutofeedSettingsRequest", + "UpdateAutofeedSettingsRequest", "BusinessIdentity", "GetBusinessIdentityRequest", "UpdateBusinessIdentityRequest", diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/__init__.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/__init__.py index 645cb37080cd..fd3bbce8a284 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/__init__.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/__init__.py @@ -27,6 +27,10 @@ AccountTaxServiceClient, ) from .services.accounts_service import AccountsServiceAsyncClient, AccountsServiceClient +from .services.autofeed_settings_service import ( + AutofeedSettingsServiceAsyncClient, + AutofeedSettingsServiceClient, +) from .services.business_identity_service import ( BusinessIdentityServiceAsyncClient, BusinessIdentityServiceClient, @@ -83,6 +87,12 @@ ListSubAccountsResponse, UpdateAccountRequest, ) +from .types.accountservices import AccountAggregation +from .types.autofeedsettings import ( + AutofeedSettings, + GetAutofeedSettingsRequest, + UpdateAutofeedSettingsRequest, +) from .types.businessidentity import ( BusinessIdentity, GetBusinessIdentityRequest, @@ -182,6 +192,7 @@ "AccountIssueServiceAsyncClient", "AccountTaxServiceAsyncClient", "AccountsServiceAsyncClient", + "AutofeedSettingsServiceAsyncClient", "BusinessIdentityServiceAsyncClient", "BusinessInfoServiceAsyncClient", "EmailPreferencesServiceAsyncClient", @@ -197,12 +208,15 @@ "Accepted", "AccessRight", "Account", + "AccountAggregation", "AccountIssue", "AccountIssueServiceClient", "AccountTax", "AccountTaxServiceClient", "AccountsServiceClient", "Address", + "AutofeedSettings", + "AutofeedSettingsServiceClient", "BusinessDayConfig", "BusinessIdentity", "BusinessIdentityServiceClient", @@ -226,6 +240,7 @@ "EnableProgramRequest", "GetAccountRequest", "GetAccountTaxRequest", + "GetAutofeedSettingsRequest", "GetBusinessIdentityRequest", "GetBusinessInfoRequest", "GetEmailPreferencesRequest", @@ -285,6 +300,7 @@ "UnclaimHomepageRequest", "UpdateAccountRequest", "UpdateAccountTaxRequest", + "UpdateAutofeedSettingsRequest", "UpdateBusinessIdentityRequest", "UpdateBusinessInfoRequest", "UpdateEmailPreferencesRequest", diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/gapic_metadata.json b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/gapic_metadata.json index 3823aac9f4fa..2e91ad70f4fb 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/gapic_metadata.json +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/gapic_metadata.json @@ -212,6 +212,55 @@ } } }, + "AutofeedSettingsService": { + "clients": { + "grpc": { + "libraryClient": "AutofeedSettingsServiceClient", + "rpcs": { + "GetAutofeedSettings": { + "methods": [ + "get_autofeed_settings" + ] + }, + "UpdateAutofeedSettings": { + "methods": [ + "update_autofeed_settings" + ] + } + } + }, + "grpc-async": { + "libraryClient": "AutofeedSettingsServiceAsyncClient", + "rpcs": { + "GetAutofeedSettings": { + "methods": [ + "get_autofeed_settings" + ] + }, + "UpdateAutofeedSettings": { + "methods": [ + "update_autofeed_settings" + ] + } + } + }, + "rest": { + "libraryClient": "AutofeedSettingsServiceClient", + "rpcs": { + "GetAutofeedSettings": { + "methods": [ + "get_autofeed_settings" + ] + }, + "UpdateAutofeedSettings": { + "methods": [ + "update_autofeed_settings" + ] + } + } + } + } + }, "BusinessIdentityService": { "clients": { "grpc": { diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/accounts_service/async_client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/accounts_service/async_client.py index 2b9a05d02cc8..7a39ba6829d3 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/accounts_service/async_client.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/accounts_service/async_client.py @@ -465,9 +465,12 @@ async def delete_account( metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes the specified account regardless of its type: - standalone, MCA or sub-account. Deleting an MCA leads to - the deletion of all of its sub-accounts. Executing this - method requires admin access. + standalone, MCA or sub-account. Deleting an MCA leads to the + deletion of all of its sub-accounts. Executing this method + requires admin access. The deletion succeeds only if the account + does not provide services to any other account and has no + processed offers. You can use the ``force`` parameter to + override this. .. code-block:: python @@ -685,7 +688,9 @@ async def list_accounts( size or filters. This is not just listing the sub-accounts of an MCA, but all accounts the calling user has access to including other MCAs, linked - accounts, standalone accounts and so on. + accounts, standalone accounts and so on. If no filter is + provided, then it returns accounts the user is directly + added to. .. code-block:: python diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/accounts_service/client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/accounts_service/client.py index b5bb3423a3df..12d7a2732205 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/accounts_service/client.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/accounts_service/client.py @@ -904,9 +904,12 @@ def delete_account( metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes the specified account regardless of its type: - standalone, MCA or sub-account. Deleting an MCA leads to - the deletion of all of its sub-accounts. Executing this - method requires admin access. + standalone, MCA or sub-account. Deleting an MCA leads to the + deletion of all of its sub-accounts. Executing this method + requires admin access. The deletion succeeds only if the account + does not provide services to any other account and has no + processed offers. You can use the ``force`` parameter to + override this. .. code-block:: python @@ -1118,7 +1121,9 @@ def list_accounts( size or filters. This is not just listing the sub-accounts of an MCA, but all accounts the calling user has access to including other MCAs, linked - accounts, standalone accounts and so on. + accounts, standalone accounts and so on. If no filter is + provided, then it returns accounts the user is directly + added to. .. code-block:: python diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/accounts_service/transports/grpc.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/accounts_service/transports/grpc.py index 9ddc7ec8854e..c2c0d8c4abc7 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/accounts_service/transports/grpc.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/accounts_service/transports/grpc.py @@ -297,9 +297,12 @@ def delete_account( r"""Return a callable for the delete account method over gRPC. Deletes the specified account regardless of its type: - standalone, MCA or sub-account. Deleting an MCA leads to - the deletion of all of its sub-accounts. Executing this - method requires admin access. + standalone, MCA or sub-account. Deleting an MCA leads to the + deletion of all of its sub-accounts. Executing this method + requires admin access. The deletion succeeds only if the account + does not provide services to any other account and has no + processed offers. You can use the ``force`` parameter to + override this. Returns: Callable[[~.DeleteAccountRequest], @@ -358,7 +361,9 @@ def list_accounts( size or filters. This is not just listing the sub-accounts of an MCA, but all accounts the calling user has access to including other MCAs, linked - accounts, standalone accounts and so on. + accounts, standalone accounts and so on. If no filter is + provided, then it returns accounts the user is directly + added to. Returns: Callable[[~.ListAccountsRequest], diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/accounts_service/transports/grpc_asyncio.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/accounts_service/transports/grpc_asyncio.py index 391cc69c72fe..a90a29d80462 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/accounts_service/transports/grpc_asyncio.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/accounts_service/transports/grpc_asyncio.py @@ -305,9 +305,12 @@ def delete_account( r"""Return a callable for the delete account method over gRPC. Deletes the specified account regardless of its type: - standalone, MCA or sub-account. Deleting an MCA leads to - the deletion of all of its sub-accounts. Executing this - method requires admin access. + standalone, MCA or sub-account. Deleting an MCA leads to the + deletion of all of its sub-accounts. Executing this method + requires admin access. The deletion succeeds only if the account + does not provide services to any other account and has no + processed offers. You can use the ``force`` parameter to + override this. Returns: Callable[[~.DeleteAccountRequest], @@ -368,7 +371,9 @@ def list_accounts( size or filters. This is not just listing the sub-accounts of an MCA, but all accounts the calling user has access to including other MCAs, linked - accounts, standalone accounts and so on. + accounts, standalone accounts and so on. If no filter is + provided, then it returns accounts the user is directly + added to. Returns: Callable[[~.ListAccountsRequest], diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/autofeed_settings_service/__init__.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/autofeed_settings_service/__init__.py new file mode 100644 index 000000000000..b4e4ddd5e568 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/autofeed_settings_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import AutofeedSettingsServiceAsyncClient +from .client import AutofeedSettingsServiceClient + +__all__ = ( + "AutofeedSettingsServiceClient", + "AutofeedSettingsServiceAsyncClient", +) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/autofeed_settings_service/async_client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/autofeed_settings_service/async_client.py new file mode 100644 index 000000000000..e01855825f41 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/autofeed_settings_service/async_client.py @@ -0,0 +1,519 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.shopping.merchant_accounts_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.protobuf import field_mask_pb2 # type: ignore + +from google.shopping.merchant_accounts_v1beta.types import autofeedsettings + +from .client import AutofeedSettingsServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, AutofeedSettingsServiceTransport +from .transports.grpc_asyncio import AutofeedSettingsServiceGrpcAsyncIOTransport + + +class AutofeedSettingsServiceAsyncClient: + """Service to support + `autofeed `__ + setting. + """ + + _client: AutofeedSettingsServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = AutofeedSettingsServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = AutofeedSettingsServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = ( + AutofeedSettingsServiceClient._DEFAULT_ENDPOINT_TEMPLATE + ) + _DEFAULT_UNIVERSE = AutofeedSettingsServiceClient._DEFAULT_UNIVERSE + + autofeed_settings_path = staticmethod( + AutofeedSettingsServiceClient.autofeed_settings_path + ) + parse_autofeed_settings_path = staticmethod( + AutofeedSettingsServiceClient.parse_autofeed_settings_path + ) + common_billing_account_path = staticmethod( + AutofeedSettingsServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + AutofeedSettingsServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(AutofeedSettingsServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + AutofeedSettingsServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + AutofeedSettingsServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + AutofeedSettingsServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod( + AutofeedSettingsServiceClient.common_project_path + ) + parse_common_project_path = staticmethod( + AutofeedSettingsServiceClient.parse_common_project_path + ) + common_location_path = staticmethod( + AutofeedSettingsServiceClient.common_location_path + ) + parse_common_location_path = staticmethod( + AutofeedSettingsServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AutofeedSettingsServiceAsyncClient: The constructed client. + """ + return AutofeedSettingsServiceClient.from_service_account_info.__func__(AutofeedSettingsServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AutofeedSettingsServiceAsyncClient: The constructed client. + """ + return AutofeedSettingsServiceClient.from_service_account_file.__func__(AutofeedSettingsServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return AutofeedSettingsServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> AutofeedSettingsServiceTransport: + """Returns the transport used by the client instance. + + Returns: + AutofeedSettingsServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = AutofeedSettingsServiceClient.get_transport_class + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + AutofeedSettingsServiceTransport, + Callable[..., AutofeedSettingsServiceTransport], + ] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the autofeed settings service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,AutofeedSettingsServiceTransport,Callable[..., AutofeedSettingsServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the AutofeedSettingsServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = AutofeedSettingsServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def get_autofeed_settings( + self, + request: Optional[ + Union[autofeedsettings.GetAutofeedSettingsRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> autofeedsettings.AutofeedSettings: + r"""Retrieves the autofeed settings of an account. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + async def sample_get_autofeed_settings(): + # Create a client + client = merchant_accounts_v1beta.AutofeedSettingsServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.GetAutofeedSettingsRequest( + name="name_value", + ) + + # Make the request + response = await client.get_autofeed_settings(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.shopping.merchant_accounts_v1beta.types.GetAutofeedSettingsRequest, dict]]): + The request object. Request message for the ``GetAutofeedSettings`` method. + name (:class:`str`): + Required. The resource name of the autofeed settings. + Format: ``accounts/{account}/autofeedSettings`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.types.AutofeedSettings: + Collection of information related to the + [autofeed](https://support.google.com/merchants/answer/7538732) + settings. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, autofeedsettings.GetAutofeedSettingsRequest): + request = autofeedsettings.GetAutofeedSettingsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_autofeed_settings + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_autofeed_settings( + self, + request: Optional[ + Union[autofeedsettings.UpdateAutofeedSettingsRequest, dict] + ] = None, + *, + autofeed_settings: Optional[autofeedsettings.AutofeedSettings] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> autofeedsettings.AutofeedSettings: + r"""Updates the autofeed settings of an account. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + async def sample_update_autofeed_settings(): + # Create a client + client = merchant_accounts_v1beta.AutofeedSettingsServiceAsyncClient() + + # Initialize request argument(s) + autofeed_settings = merchant_accounts_v1beta.AutofeedSettings() + autofeed_settings.enable_products = True + + request = merchant_accounts_v1beta.UpdateAutofeedSettingsRequest( + autofeed_settings=autofeed_settings, + ) + + # Make the request + response = await client.update_autofeed_settings(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.shopping.merchant_accounts_v1beta.types.UpdateAutofeedSettingsRequest, dict]]): + The request object. Request message for the ``UpdateAutofeedSettings`` + method. + autofeed_settings (:class:`google.shopping.merchant_accounts_v1beta.types.AutofeedSettings`): + Required. The new version of the + autofeed setting. + + This corresponds to the ``autofeed_settings`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. List of fields being + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.types.AutofeedSettings: + Collection of information related to the + [autofeed](https://support.google.com/merchants/answer/7538732) + settings. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([autofeed_settings, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, autofeedsettings.UpdateAutofeedSettingsRequest): + request = autofeedsettings.UpdateAutofeedSettingsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if autofeed_settings is not None: + request.autofeed_settings = autofeed_settings + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_autofeed_settings + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("autofeed_settings.name", request.autofeed_settings.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "AutofeedSettingsServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("AutofeedSettingsServiceAsyncClient",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/autofeed_settings_service/client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/autofeed_settings_service/client.py new file mode 100644 index 000000000000..047661f9ebf1 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/autofeed_settings_service/client.py @@ -0,0 +1,931 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.shopping.merchant_accounts_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.protobuf import field_mask_pb2 # type: ignore + +from google.shopping.merchant_accounts_v1beta.types import autofeedsettings + +from .transports.base import DEFAULT_CLIENT_INFO, AutofeedSettingsServiceTransport +from .transports.grpc import AutofeedSettingsServiceGrpcTransport +from .transports.grpc_asyncio import AutofeedSettingsServiceGrpcAsyncIOTransport +from .transports.rest import AutofeedSettingsServiceRestTransport + + +class AutofeedSettingsServiceClientMeta(type): + """Metaclass for the AutofeedSettingsService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[AutofeedSettingsServiceTransport]] + _transport_registry["grpc"] = AutofeedSettingsServiceGrpcTransport + _transport_registry["grpc_asyncio"] = AutofeedSettingsServiceGrpcAsyncIOTransport + _transport_registry["rest"] = AutofeedSettingsServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[AutofeedSettingsServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class AutofeedSettingsServiceClient(metaclass=AutofeedSettingsServiceClientMeta): + """Service to support + `autofeed `__ + setting. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "merchantapi.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "merchantapi.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AutofeedSettingsServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AutofeedSettingsServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> AutofeedSettingsServiceTransport: + """Returns the transport used by the client instance. + + Returns: + AutofeedSettingsServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def autofeed_settings_path( + account: str, + ) -> str: + """Returns a fully-qualified autofeed_settings string.""" + return "accounts/{account}/autofeedSettings".format( + account=account, + ) + + @staticmethod + def parse_autofeed_settings_path(path: str) -> Dict[str, str]: + """Parses a autofeed_settings path into its component segments.""" + m = re.match(r"^accounts/(?P.+?)/autofeedSettings$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = AutofeedSettingsServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = AutofeedSettingsServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = ( + AutofeedSettingsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = AutofeedSettingsServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = AutofeedSettingsServiceClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or AutofeedSettingsServiceClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + AutofeedSettingsServiceTransport, + Callable[..., AutofeedSettingsServiceTransport], + ] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the autofeed settings service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,AutofeedSettingsServiceTransport,Callable[..., AutofeedSettingsServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the AutofeedSettingsServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = AutofeedSettingsServiceClient._read_environment_variables() + self._client_cert_source = ( + AutofeedSettingsServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + ) + self._universe_domain = AutofeedSettingsServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, AutofeedSettingsServiceTransport) + if transport_provided: + # transport is a AutofeedSettingsServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(AutofeedSettingsServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or AutofeedSettingsServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[AutofeedSettingsServiceTransport], + Callable[..., AutofeedSettingsServiceTransport], + ] = ( + AutofeedSettingsServiceClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., AutofeedSettingsServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def get_autofeed_settings( + self, + request: Optional[ + Union[autofeedsettings.GetAutofeedSettingsRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> autofeedsettings.AutofeedSettings: + r"""Retrieves the autofeed settings of an account. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + def sample_get_autofeed_settings(): + # Create a client + client = merchant_accounts_v1beta.AutofeedSettingsServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.GetAutofeedSettingsRequest( + name="name_value", + ) + + # Make the request + response = client.get_autofeed_settings(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.shopping.merchant_accounts_v1beta.types.GetAutofeedSettingsRequest, dict]): + The request object. Request message for the ``GetAutofeedSettings`` method. + name (str): + Required. The resource name of the autofeed settings. + Format: ``accounts/{account}/autofeedSettings`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.types.AutofeedSettings: + Collection of information related to the + [autofeed](https://support.google.com/merchants/answer/7538732) + settings. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, autofeedsettings.GetAutofeedSettingsRequest): + request = autofeedsettings.GetAutofeedSettingsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_autofeed_settings] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_autofeed_settings( + self, + request: Optional[ + Union[autofeedsettings.UpdateAutofeedSettingsRequest, dict] + ] = None, + *, + autofeed_settings: Optional[autofeedsettings.AutofeedSettings] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> autofeedsettings.AutofeedSettings: + r"""Updates the autofeed settings of an account. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_accounts_v1beta + + def sample_update_autofeed_settings(): + # Create a client + client = merchant_accounts_v1beta.AutofeedSettingsServiceClient() + + # Initialize request argument(s) + autofeed_settings = merchant_accounts_v1beta.AutofeedSettings() + autofeed_settings.enable_products = True + + request = merchant_accounts_v1beta.UpdateAutofeedSettingsRequest( + autofeed_settings=autofeed_settings, + ) + + # Make the request + response = client.update_autofeed_settings(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.shopping.merchant_accounts_v1beta.types.UpdateAutofeedSettingsRequest, dict]): + The request object. Request message for the ``UpdateAutofeedSettings`` + method. + autofeed_settings (google.shopping.merchant_accounts_v1beta.types.AutofeedSettings): + Required. The new version of the + autofeed setting. + + This corresponds to the ``autofeed_settings`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. List of fields being + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_accounts_v1beta.types.AutofeedSettings: + Collection of information related to the + [autofeed](https://support.google.com/merchants/answer/7538732) + settings. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([autofeed_settings, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, autofeedsettings.UpdateAutofeedSettingsRequest): + request = autofeedsettings.UpdateAutofeedSettingsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if autofeed_settings is not None: + request.autofeed_settings = autofeed_settings + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_autofeed_settings] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("autofeed_settings.name", request.autofeed_settings.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "AutofeedSettingsServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("AutofeedSettingsServiceClient",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/autofeed_settings_service/transports/__init__.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/autofeed_settings_service/transports/__init__.py new file mode 100644 index 000000000000..e9bea840fd96 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/autofeed_settings_service/transports/__init__.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import AutofeedSettingsServiceTransport +from .grpc import AutofeedSettingsServiceGrpcTransport +from .grpc_asyncio import AutofeedSettingsServiceGrpcAsyncIOTransport +from .rest import ( + AutofeedSettingsServiceRestInterceptor, + AutofeedSettingsServiceRestTransport, +) + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[AutofeedSettingsServiceTransport]] +_transport_registry["grpc"] = AutofeedSettingsServiceGrpcTransport +_transport_registry["grpc_asyncio"] = AutofeedSettingsServiceGrpcAsyncIOTransport +_transport_registry["rest"] = AutofeedSettingsServiceRestTransport + +__all__ = ( + "AutofeedSettingsServiceTransport", + "AutofeedSettingsServiceGrpcTransport", + "AutofeedSettingsServiceGrpcAsyncIOTransport", + "AutofeedSettingsServiceRestTransport", + "AutofeedSettingsServiceRestInterceptor", +) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/autofeed_settings_service/transports/base.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/autofeed_settings_service/transports/base.py new file mode 100644 index 000000000000..edee0c38537d --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/autofeed_settings_service/transports/base.py @@ -0,0 +1,181 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.shopping.merchant_accounts_v1beta import gapic_version as package_version +from google.shopping.merchant_accounts_v1beta.types import autofeedsettings + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class AutofeedSettingsServiceTransport(abc.ABC): + """Abstract transport class for AutofeedSettingsService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/content",) + + DEFAULT_HOST: str = "merchantapi.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.get_autofeed_settings: gapic_v1.method.wrap_method( + self.get_autofeed_settings, + default_timeout=None, + client_info=client_info, + ), + self.update_autofeed_settings: gapic_v1.method.wrap_method( + self.update_autofeed_settings, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def get_autofeed_settings( + self, + ) -> Callable[ + [autofeedsettings.GetAutofeedSettingsRequest], + Union[ + autofeedsettings.AutofeedSettings, + Awaitable[autofeedsettings.AutofeedSettings], + ], + ]: + raise NotImplementedError() + + @property + def update_autofeed_settings( + self, + ) -> Callable[ + [autofeedsettings.UpdateAutofeedSettingsRequest], + Union[ + autofeedsettings.AutofeedSettings, + Awaitable[autofeedsettings.AutofeedSettings], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("AutofeedSettingsServiceTransport",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/autofeed_settings_service/transports/grpc.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/autofeed_settings_service/transports/grpc.py new file mode 100644 index 000000000000..dbe95388a05c --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/autofeed_settings_service/transports/grpc.py @@ -0,0 +1,304 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore + +from google.shopping.merchant_accounts_v1beta.types import autofeedsettings + +from .base import DEFAULT_CLIENT_INFO, AutofeedSettingsServiceTransport + + +class AutofeedSettingsServiceGrpcTransport(AutofeedSettingsServiceTransport): + """gRPC backend transport for AutofeedSettingsService. + + Service to support + `autofeed `__ + setting. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def get_autofeed_settings( + self, + ) -> Callable[ + [autofeedsettings.GetAutofeedSettingsRequest], autofeedsettings.AutofeedSettings + ]: + r"""Return a callable for the get autofeed settings method over gRPC. + + Retrieves the autofeed settings of an account. + + Returns: + Callable[[~.GetAutofeedSettingsRequest], + ~.AutofeedSettings]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_autofeed_settings" not in self._stubs: + self._stubs["get_autofeed_settings"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.AutofeedSettingsService/GetAutofeedSettings", + request_serializer=autofeedsettings.GetAutofeedSettingsRequest.serialize, + response_deserializer=autofeedsettings.AutofeedSettings.deserialize, + ) + return self._stubs["get_autofeed_settings"] + + @property + def update_autofeed_settings( + self, + ) -> Callable[ + [autofeedsettings.UpdateAutofeedSettingsRequest], + autofeedsettings.AutofeedSettings, + ]: + r"""Return a callable for the update autofeed settings method over gRPC. + + Updates the autofeed settings of an account. + + Returns: + Callable[[~.UpdateAutofeedSettingsRequest], + ~.AutofeedSettings]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_autofeed_settings" not in self._stubs: + self._stubs["update_autofeed_settings"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.AutofeedSettingsService/UpdateAutofeedSettings", + request_serializer=autofeedsettings.UpdateAutofeedSettingsRequest.serialize, + response_deserializer=autofeedsettings.AutofeedSettings.deserialize, + ) + return self._stubs["update_autofeed_settings"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("AutofeedSettingsServiceGrpcTransport",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/autofeed_settings_service/transports/grpc_asyncio.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/autofeed_settings_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..99364a401b73 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/autofeed_settings_service/transports/grpc_asyncio.py @@ -0,0 +1,320 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.shopping.merchant_accounts_v1beta.types import autofeedsettings + +from .base import DEFAULT_CLIENT_INFO, AutofeedSettingsServiceTransport +from .grpc import AutofeedSettingsServiceGrpcTransport + + +class AutofeedSettingsServiceGrpcAsyncIOTransport(AutofeedSettingsServiceTransport): + """gRPC AsyncIO backend transport for AutofeedSettingsService. + + Service to support + `autofeed `__ + setting. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def get_autofeed_settings( + self, + ) -> Callable[ + [autofeedsettings.GetAutofeedSettingsRequest], + Awaitable[autofeedsettings.AutofeedSettings], + ]: + r"""Return a callable for the get autofeed settings method over gRPC. + + Retrieves the autofeed settings of an account. + + Returns: + Callable[[~.GetAutofeedSettingsRequest], + Awaitable[~.AutofeedSettings]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_autofeed_settings" not in self._stubs: + self._stubs["get_autofeed_settings"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.AutofeedSettingsService/GetAutofeedSettings", + request_serializer=autofeedsettings.GetAutofeedSettingsRequest.serialize, + response_deserializer=autofeedsettings.AutofeedSettings.deserialize, + ) + return self._stubs["get_autofeed_settings"] + + @property + def update_autofeed_settings( + self, + ) -> Callable[ + [autofeedsettings.UpdateAutofeedSettingsRequest], + Awaitable[autofeedsettings.AutofeedSettings], + ]: + r"""Return a callable for the update autofeed settings method over gRPC. + + Updates the autofeed settings of an account. + + Returns: + Callable[[~.UpdateAutofeedSettingsRequest], + Awaitable[~.AutofeedSettings]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_autofeed_settings" not in self._stubs: + self._stubs["update_autofeed_settings"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.accounts.v1beta.AutofeedSettingsService/UpdateAutofeedSettings", + request_serializer=autofeedsettings.UpdateAutofeedSettingsRequest.serialize, + response_deserializer=autofeedsettings.AutofeedSettings.deserialize, + ) + return self._stubs["update_autofeed_settings"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.get_autofeed_settings: gapic_v1.method_async.wrap_method( + self.get_autofeed_settings, + default_timeout=None, + client_info=client_info, + ), + self.update_autofeed_settings: gapic_v1.method_async.wrap_method( + self.update_autofeed_settings, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("AutofeedSettingsServiceGrpcAsyncIOTransport",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/autofeed_settings_service/transports/rest.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/autofeed_settings_service/transports/rest.py new file mode 100644 index 000000000000..57aff5878d5f --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/autofeed_settings_service/transports/rest.py @@ -0,0 +1,454 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.shopping.merchant_accounts_v1beta.types import autofeedsettings + +from .base import AutofeedSettingsServiceTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class AutofeedSettingsServiceRestInterceptor: + """Interceptor for AutofeedSettingsService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the AutofeedSettingsServiceRestTransport. + + .. code-block:: python + class MyCustomAutofeedSettingsServiceInterceptor(AutofeedSettingsServiceRestInterceptor): + def pre_get_autofeed_settings(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_autofeed_settings(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_autofeed_settings(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_autofeed_settings(self, response): + logging.log(f"Received response: {response}") + return response + + transport = AutofeedSettingsServiceRestTransport(interceptor=MyCustomAutofeedSettingsServiceInterceptor()) + client = AutofeedSettingsServiceClient(transport=transport) + + + """ + + def pre_get_autofeed_settings( + self, + request: autofeedsettings.GetAutofeedSettingsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[autofeedsettings.GetAutofeedSettingsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_autofeed_settings + + Override in a subclass to manipulate the request or metadata + before they are sent to the AutofeedSettingsService server. + """ + return request, metadata + + def post_get_autofeed_settings( + self, response: autofeedsettings.AutofeedSettings + ) -> autofeedsettings.AutofeedSettings: + """Post-rpc interceptor for get_autofeed_settings + + Override in a subclass to manipulate the response + after it is returned by the AutofeedSettingsService server but before + it is returned to user code. + """ + return response + + def pre_update_autofeed_settings( + self, + request: autofeedsettings.UpdateAutofeedSettingsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + autofeedsettings.UpdateAutofeedSettingsRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for update_autofeed_settings + + Override in a subclass to manipulate the request or metadata + before they are sent to the AutofeedSettingsService server. + """ + return request, metadata + + def post_update_autofeed_settings( + self, response: autofeedsettings.AutofeedSettings + ) -> autofeedsettings.AutofeedSettings: + """Post-rpc interceptor for update_autofeed_settings + + Override in a subclass to manipulate the response + after it is returned by the AutofeedSettingsService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class AutofeedSettingsServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: AutofeedSettingsServiceRestInterceptor + + +class AutofeedSettingsServiceRestTransport(AutofeedSettingsServiceTransport): + """REST backend transport for AutofeedSettingsService. + + Service to support + `autofeed `__ + setting. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[AutofeedSettingsServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or AutofeedSettingsServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _GetAutofeedSettings(AutofeedSettingsServiceRestStub): + def __hash__(self): + return hash("GetAutofeedSettings") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: autofeedsettings.GetAutofeedSettingsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> autofeedsettings.AutofeedSettings: + r"""Call the get autofeed settings method over HTTP. + + Args: + request (~.autofeedsettings.GetAutofeedSettingsRequest): + The request object. Request message for the ``GetAutofeedSettings`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.autofeedsettings.AutofeedSettings: + Collection of information related to the + `autofeed `__ + settings. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/accounts/v1beta/{name=accounts/*/autofeedSettings}", + }, + ] + request, metadata = self._interceptor.pre_get_autofeed_settings( + request, metadata + ) + pb_request = autofeedsettings.GetAutofeedSettingsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = autofeedsettings.AutofeedSettings() + pb_resp = autofeedsettings.AutofeedSettings.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_autofeed_settings(resp) + return resp + + class _UpdateAutofeedSettings(AutofeedSettingsServiceRestStub): + def __hash__(self): + return hash("UpdateAutofeedSettings") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: autofeedsettings.UpdateAutofeedSettingsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> autofeedsettings.AutofeedSettings: + r"""Call the update autofeed settings method over HTTP. + + Args: + request (~.autofeedsettings.UpdateAutofeedSettingsRequest): + The request object. Request message for the ``UpdateAutofeedSettings`` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.autofeedsettings.AutofeedSettings: + Collection of information related to the + `autofeed `__ + settings. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/accounts/v1beta/{autofeed_settings.name=accounts/*/autofeedSettings}", + "body": "autofeed_settings", + }, + ] + request, metadata = self._interceptor.pre_update_autofeed_settings( + request, metadata + ) + pb_request = autofeedsettings.UpdateAutofeedSettingsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = autofeedsettings.AutofeedSettings() + pb_resp = autofeedsettings.AutofeedSettings.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_autofeed_settings(resp) + return resp + + @property + def get_autofeed_settings( + self, + ) -> Callable[ + [autofeedsettings.GetAutofeedSettingsRequest], autofeedsettings.AutofeedSettings + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetAutofeedSettings(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_autofeed_settings( + self, + ) -> Callable[ + [autofeedsettings.UpdateAutofeedSettingsRequest], + autofeedsettings.AutofeedSettings, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateAutofeedSettings(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("AutofeedSettingsServiceRestTransport",) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/email_preferences_service/async_client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/email_preferences_service/async_client.py index e01a3b2bcc9a..570799bb8e27 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/email_preferences_service/async_client.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/email_preferences_service/async_client.py @@ -287,7 +287,7 @@ async def get_email_preferences( r"""Returns the email preferences for a Merchant Center account user. - Use the name=accounts/*/users/me/emailPreferences alias to get + Use the ``name=accounts/*/users/me/emailPreferences`` alias to get preferences for the authenticated user. .. code-block:: python @@ -411,7 +411,7 @@ async def update_email_preferences( It is invalid for updates to specify an UNCONFIRMED opt-in status value. - Use the name=accounts/*/users/me/emailPreferences alias to + Use the ``name=accounts/*/users/me/emailPreferences`` alias to update preferences for the authenticated user. .. code-block:: python diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/email_preferences_service/client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/email_preferences_service/client.py index e2d8285b4900..d1a194f82f4b 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/email_preferences_service/client.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/email_preferences_service/client.py @@ -702,7 +702,7 @@ def get_email_preferences( r"""Returns the email preferences for a Merchant Center account user. - Use the name=accounts/*/users/me/emailPreferences alias to get + Use the ``name=accounts/*/users/me/emailPreferences`` alias to get preferences for the authenticated user. .. code-block:: python @@ -823,7 +823,7 @@ def update_email_preferences( It is invalid for updates to specify an UNCONFIRMED opt-in status value. - Use the name=accounts/*/users/me/emailPreferences alias to + Use the ``name=accounts/*/users/me/emailPreferences`` alias to update preferences for the authenticated user. .. code-block:: python diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/online_return_policy_service/async_client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/online_return_policy_service/async_client.py index 14f3f87a6b36..edfee0c82f58 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/online_return_policy_service/async_client.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/online_return_policy_service/async_client.py @@ -55,10 +55,8 @@ class OnlineReturnPolicyServiceAsyncClient: """The service facilitates the management of a merchant's remorse return policy configuration, encompassing return policies for both - ads and free listings - - programs. This API defines the following resource model: - -------------------------------------------------------- + ads and free listings programs. This API defines the following resource model: + ----------------------------------------------------------------------------- [OnlineReturnPolicy][google.shopping.merchant.accounts.v1.OnlineReturnPolicy] """ diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/online_return_policy_service/client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/online_return_policy_service/client.py index 5159022c0212..690860b09d70 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/online_return_policy_service/client.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/online_return_policy_service/client.py @@ -99,10 +99,8 @@ def get_transport_class( class OnlineReturnPolicyServiceClient(metaclass=OnlineReturnPolicyServiceClientMeta): """The service facilitates the management of a merchant's remorse return policy configuration, encompassing return policies for both - ads and free listings - - programs. This API defines the following resource model: - -------------------------------------------------------- + ads and free listings programs. This API defines the following resource model: + ----------------------------------------------------------------------------- [OnlineReturnPolicy][google.shopping.merchant.accounts.v1.OnlineReturnPolicy] """ diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/shipping_settings_service/async_client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/shipping_settings_service/async_client.py index aab6ce594de6..6565278a30e3 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/shipping_settings_service/async_client.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/shipping_settings_service/async_client.py @@ -327,7 +327,7 @@ async def sample_get_shipping_settings(): Returns: google.shopping.merchant_accounts_v1beta.types.ShippingSettings: The merchant account's [shipping - setting]((\ https://support.google.com/merchants/answer/6069284). + setting](\ https://support.google.com/merchants/answer/6069284). """ # Create or coerce a protobuf request object. @@ -433,7 +433,7 @@ async def sample_insert_shipping_settings(): Returns: google.shopping.merchant_accounts_v1beta.types.ShippingSettings: The merchant account's [shipping - setting]((\ https://support.google.com/merchants/answer/6069284). + setting](\ https://support.google.com/merchants/answer/6069284). """ # Create or coerce a protobuf request object. diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/shipping_settings_service/client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/shipping_settings_service/client.py index a54e3edc102b..131a35b84f53 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/shipping_settings_service/client.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/shipping_settings_service/client.py @@ -738,7 +738,7 @@ def sample_get_shipping_settings(): Returns: google.shopping.merchant_accounts_v1beta.types.ShippingSettings: The merchant account's [shipping - setting]((\ https://support.google.com/merchants/answer/6069284). + setting](\ https://support.google.com/merchants/answer/6069284). """ # Create or coerce a protobuf request object. @@ -841,7 +841,7 @@ def sample_insert_shipping_settings(): Returns: google.shopping.merchant_accounts_v1beta.types.ShippingSettings: The merchant account's [shipping - setting]((\ https://support.google.com/merchants/answer/6069284). + setting](\ https://support.google.com/merchants/answer/6069284). """ # Create or coerce a protobuf request object. diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/shipping_settings_service/transports/rest.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/shipping_settings_service/transports/rest.py index f93d7be50bf0..7e2a8e043d3a 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/shipping_settings_service/transports/rest.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/shipping_settings_service/transports/rest.py @@ -266,8 +266,8 @@ def __call__( Returns: ~.shippingsettings.ShippingSettings: - The merchant account's [shipping - setting]((https://support.google.com/merchants/answer/6069284). + The merchant account's `shipping + setting `__. """ @@ -356,8 +356,8 @@ def __call__( Returns: ~.shippingsettings.ShippingSettings: - The merchant account's [shipping - setting]((https://support.google.com/merchants/answer/6069284). + The merchant account's `shipping + setting `__. """ diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_agreement_state_service/async_client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_agreement_state_service/async_client.py index c32d92d51405..128ef18fec98 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_agreement_state_service/async_client.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_agreement_state_service/async_client.py @@ -339,6 +339,8 @@ async def sample_get_terms_of_service_agreement_state(): Required. The resource name of the terms of service version. Format: ``accounts/{account}/termsOfServiceAgreementState/{identifier}`` + The identifier format is: + ``{TermsOfServiceKind}-{country}`` This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_agreement_state_service/client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_agreement_state_service/client.py index 98cffc24a88e..bcce71b62393 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_agreement_state_service/client.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_agreement_state_service/client.py @@ -786,6 +786,8 @@ def sample_get_terms_of_service_agreement_state(): Required. The resource name of the terms of service version. Format: ``accounts/{account}/termsOfServiceAgreementState/{identifier}`` + The identifier format is: + ``{TermsOfServiceKind}-{country}`` This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_service/async_client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_service/async_client.py index 841d23664e98..601dd62bcbee 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_service/async_client.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_service/async_client.py @@ -402,6 +402,8 @@ async def sample_retrieve_latest_terms_of_service(): # Initialize request argument(s) request = merchant_accounts_v1beta.RetrieveLatestTermsOfServiceRequest( + region_code="region_code_value", + kind="MERCHANT_CENTER", ) # Make the request diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_service/client.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_service/client.py index 8b47907a1926..ccf312d7ddc9 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_service/client.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_service/client.py @@ -825,6 +825,8 @@ def sample_retrieve_latest_terms_of_service(): # Initialize request argument(s) request = merchant_accounts_v1beta.RetrieveLatestTermsOfServiceRequest( + region_code="region_code_value", + kind="MERCHANT_CENTER", ) # Make the request diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_service/transports/rest.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_service/transports/rest.py index c2a44909985b..d6840f71ab24 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_service/transports/rest.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/services/terms_of_service_service/transports/rest.py @@ -417,6 +417,19 @@ class _RetrieveLatestTermsOfService(TermsOfServiceServiceRestStub): def __hash__(self): return hash("RetrieveLatestTermsOfService") + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "regionCode": "", + "kind": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + def __call__( self, request: termsofservice.RetrieveLatestTermsOfServiceRequest, @@ -465,6 +478,7 @@ def __call__( use_integers_for_enums=True, ) ) + query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/__init__.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/__init__.py index fdd477be2f6a..90a54b07fd70 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/__init__.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/__init__.py @@ -37,6 +37,12 @@ ListSubAccountsResponse, UpdateAccountRequest, ) +from .accountservices import AccountAggregation +from .autofeedsettings import ( + AutofeedSettings, + GetAutofeedSettingsRequest, + UpdateAutofeedSettingsRequest, +) from .businessidentity import ( BusinessIdentity, GetBusinessIdentityRequest, @@ -151,6 +157,10 @@ "ListSubAccountsRequest", "ListSubAccountsResponse", "UpdateAccountRequest", + "AccountAggregation", + "AutofeedSettings", + "GetAutofeedSettingsRequest", + "UpdateAutofeedSettingsRequest", "BusinessIdentity", "GetBusinessIdentityRequest", "UpdateBusinessIdentityRequest", diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/accountissue.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/accountissue.py index 91e837cf849b..4cf1f6bc9f24 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/accountissue.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/accountissue.py @@ -18,7 +18,6 @@ from typing import MutableMapping, MutableSequence from google.shopping.type.types import types -from google.type import datetime_pb2 # type: ignore import proto # type: ignore __protobuf__ = proto.module( @@ -179,7 +178,7 @@ class ListAccountIssuesRequest(proto.Message): `BCP-47 `__, such as ``en-US`` or ``sr-Latn``. If not value is provided, ``en-US`` will be used. - time_zone (google.type.datetime_pb2.TimeZone): + time_zone (str): Optional. The `IANA `__ timezone used to localize times in human-readable fields. For example 'America/Los_Angeles'. If not set, @@ -202,10 +201,9 @@ class ListAccountIssuesRequest(proto.Message): proto.STRING, number=4, ) - time_zone: datetime_pb2.TimeZone = proto.Field( - proto.MESSAGE, + time_zone: str = proto.Field( + proto.STRING, number=5, - message=datetime_pb2.TimeZone, ) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/accounts.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/accounts.py index a7b454e96fa1..fcbf154b7b41 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/accounts.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/accounts.py @@ -17,12 +17,11 @@ from typing import MutableMapping, MutableSequence -from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.type import datetime_pb2 # type: ignore import proto # type: ignore -from google.shopping.merchant_accounts_v1beta.types import user +from google.shopping.merchant_accounts_v1beta.types import accountservices, user __protobuf__ = proto.module( package="google.shopping.merchant.accounts.v1beta", @@ -136,10 +135,14 @@ class CreateAndConfigureAccountRequest(proto.Message): This field is a member of `oneof`_ ``_accept_terms_of_service``. service (MutableSequence[google.shopping.merchant_accounts_v1beta.types.CreateAndConfigureAccountRequest.AddAccountService]): - Optional. If specified, an account service - between the account to be created and the - provider account is initialized as part of the - creation. + Required. An account service between the account to be + created and the provider account is initialized as part of + the creation. At least one such service needs to be + provided. Currently exactly one of these needs to be + ``account_aggregation``, which means you can only create sub + accounts, not standalone account through this method. + Additional ``account_management`` or ``product_management`` + services may be provided. """ class AcceptTermsOfService(proto.Message): @@ -147,8 +150,11 @@ class AcceptTermsOfService(proto.Message): Attributes: name (str): - Required. The resource name of the terms of - service version. + Required. The resource name of the terms of service version + in the format ``termsOfService/{version}``. To retrieve the + latest version, use the + `termsOfService.retrieveLatest `__ + method. region_code (str): Required. Region code as defined by `CLDR `__. This is either a @@ -173,9 +179,11 @@ class AddAccountService(proto.Message): .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: - account_aggregation (google.protobuf.empty_pb2.Empty): - The provider is an aggregator for the - account. + account_aggregation (google.shopping.merchant_accounts_v1beta.types.AccountAggregation): + The provider is an + `aggregator `__ + for the account. Payload for service type Account + Aggregation. This field is a member of `oneof`_ ``service_type``. provider (str): @@ -185,11 +193,11 @@ class AddAccountService(proto.Message): This field is a member of `oneof`_ ``_provider``. """ - account_aggregation: empty_pb2.Empty = proto.Field( + account_aggregation: accountservices.AccountAggregation = proto.Field( proto.MESSAGE, - number=2, + number=103, oneof="service_type", - message=empty_pb2.Empty, + message=accountservices.AccountAggregation, ) provider: str = proto.Field( proto.STRING, @@ -227,12 +235,20 @@ class DeleteAccountRequest(proto.Message): name (str): Required. The name of the account to delete. Format: ``accounts/{account}`` + force (bool): + Optional. If set to ``true``, the account is deleted even if + it provides services to other accounts or has processed + offers. """ name: str = proto.Field( proto.STRING, number=1, ) + force: bool = proto.Field( + proto.BOOL, + number=2, + ) class UpdateAccountRequest(proto.Message): diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/accountservices.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/accountservices.py new file mode 100644 index 000000000000..3862f71dfbb4 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/accountservices.py @@ -0,0 +1,34 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.shopping.merchant.accounts.v1beta", + manifest={ + "AccountAggregation", + }, +) + + +class AccountAggregation(proto.Message): + r"""``AccountAggregation`` payload.""" + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/autofeedsettings.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/autofeedsettings.py new file mode 100644 index 000000000000..475d7f62b5ee --- /dev/null +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/autofeedsettings.py @@ -0,0 +1,109 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.shopping.merchant.accounts.v1beta", + manifest={ + "AutofeedSettings", + "GetAutofeedSettingsRequest", + "UpdateAutofeedSettingsRequest", + }, +) + + +class AutofeedSettings(proto.Message): + r"""Collection of information related to the + `autofeed `__ + settings. + + Attributes: + name (str): + Identifier. The resource name of the autofeed settings. + Format: ``accounts/{account}/autofeedSettings``. + enable_products (bool): + Required. Enables or disables product crawling through the + autofeed for the given account. Autofeed accounts must meet + `certain + conditions `__, + which can be checked through the ``eligible`` field. The + account must **not** be a marketplace. When the autofeed is + enabled for the first time, the products usually appear + instantly. When re-enabling, it might take up to 24 hours + for products to appear. + eligible (bool): + Output only. Determines whether merchant is + eligible for being enrolled into an autofeed. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + enable_products: bool = proto.Field( + proto.BOOL, + number=2, + ) + eligible: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class GetAutofeedSettingsRequest(proto.Message): + r"""Request message for the ``GetAutofeedSettings`` method. + + Attributes: + name (str): + Required. The resource name of the autofeed settings. + Format: ``accounts/{account}/autofeedSettings`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateAutofeedSettingsRequest(proto.Message): + r"""Request message for the ``UpdateAutofeedSettings`` method. + + Attributes: + autofeed_settings (google.shopping.merchant_accounts_v1beta.types.AutofeedSettings): + Required. The new version of the autofeed + setting. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. List of fields being updated. + """ + + autofeed_settings: "AutofeedSettings" = proto.Field( + proto.MESSAGE, + number=1, + message="AutofeedSettings", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/businessinfo.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/businessinfo.py index ab8e68e599c3..3c20aa2d7756 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/businessinfo.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/businessinfo.py @@ -65,6 +65,12 @@ class BusinessInfo(proto.Message): business. This field is a member of `oneof`_ ``_customer_service``. + korean_business_registration_number (str): + Optional. The 10-digit `Korean business registration + number `__ + separated with dashes in the format: XXX-XX-XXXXX. + + This field is a member of `oneof`_ ``_korean_business_registration_number``. """ name: str = proto.Field( @@ -97,6 +103,11 @@ class BusinessInfo(proto.Message): optional=True, message=customerservice.CustomerService, ) + korean_business_registration_number: str = proto.Field( + proto.STRING, + number=6, + optional=True, + ) class GetBusinessInfoRequest(proto.Message): diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/shippingsettings.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/shippingsettings.py index 2afe56a87508..eacef556bcd1 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/shippingsettings.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/shippingsettings.py @@ -49,8 +49,8 @@ class ShippingSettings(proto.Message): - r"""The merchant account's [shipping - setting]((https://support.google.com/merchants/answer/6069284). + r"""The merchant account's `shipping + setting `__. Attributes: name (str): @@ -65,24 +65,24 @@ class ShippingSettings(proto.Message): etag (str): Required. This field is used for avoid async issue. Make sure shipping setting data - didn't change between get call and insert call. - The user should do following steps: + didn't change between get call and insert call. + The user should do following steps: - 1. Set etag field as empty string for initial + 1. Set etag field as empty string for initial shipping setting creation. - 2. After initial creation, call get method to + 2. After initial creation, call get method to obtain an etag and current shipping setting data before call insert. - 3. Modify to wanted shipping setting + 3. Modify to wanted shipping setting information. - 4. Call insert method with the wanted shipping + 4. Call insert method with the wanted shipping setting information with the etag obtained from step 2. - 5. If shipping setting data changed between step + 5. If shipping setting data changed between step 2 and step 4. Insert request will fail because the etag changes every time the shipping setting data changes. User should @@ -709,15 +709,18 @@ class DeliveryTime(proto.Message): This field is a member of `oneof`_ ``_cutoff_time``. min_handling_days (int): - Minimum number of business days spent before - an order is shipped. 0 means same day shipped, 1 - means next day shipped. + Minimum number of business days spent before an order is + shipped. 0 means same day shipped, 1 means next day shipped. + 'min_handling_days' and 'max_handling_days' should be either + set or not set at the same time. This field is a member of `oneof`_ ``_min_handling_days``. max_handling_days (int): Maximum number of business days spent before an order is shipped. 0 means same day shipped, 1 means next day shipped. Must be greater than or equal to ``min_handling_days``. + 'min_handling_days' and 'max_handling_days' should be either + set or not set at the same time. This field is a member of `oneof`_ ``_max_handling_days``. transit_time_table (google.shopping.merchant_accounts_v1beta.types.TransitTable): diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/termsofservice.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/termsofservice.py index b2e6d71a77e1..222139600000 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/termsofservice.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/termsofservice.py @@ -109,13 +109,13 @@ class RetrieveLatestTermsOfServiceRequest(proto.Message): Attributes: region_code (str): - Region code as defined by + Required. Region code as defined by `CLDR `__. This is either a country when the ToS applies specifically to that country or 001 when it applies globally. kind (google.shopping.merchant_accounts_v1beta.types.TermsOfServiceKind): - The Kind this terms of service version - applies to. + Required. The Kind this terms of service + version applies to. """ region_code: str = proto.Field( diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/termsofserviceagreementstate.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/termsofserviceagreementstate.py index 15b09f8d665b..d9ffe986c203 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/termsofserviceagreementstate.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1beta/types/termsofserviceagreementstate.py @@ -66,6 +66,8 @@ class TermsOfServiceAgreementState(proto.Message): Identifier. The resource name of the terms of service version. Format: ``accounts/{account}/termsOfServiceAgreementState/{identifier}`` + The identifier format is: ``{TermsOfServiceKind}-{country}`` + For example, an identifier could be: ``MERCHANT_CENTER-US`` region_code (str): Region code as defined by https://cldr.unicode.org/. This is the country @@ -186,6 +188,7 @@ class GetTermsOfServiceAgreementStateRequest(proto.Message): Required. The resource name of the terms of service version. Format: ``accounts/{account}/termsOfServiceAgreementState/{identifier}`` + The identifier format is: ``{TermsOfServiceKind}-{country}`` """ name: str = proto.Field( diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_autofeed_settings_service_get_autofeed_settings_async.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_autofeed_settings_service_get_autofeed_settings_async.py new file mode 100644 index 000000000000..d207adb015ce --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_autofeed_settings_service_get_autofeed_settings_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetAutofeedSettings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_AutofeedSettingsService_GetAutofeedSettings_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +async def sample_get_autofeed_settings(): + # Create a client + client = merchant_accounts_v1beta.AutofeedSettingsServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.GetAutofeedSettingsRequest( + name="name_value", + ) + + # Make the request + response = await client.get_autofeed_settings(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_AutofeedSettingsService_GetAutofeedSettings_async] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_autofeed_settings_service_get_autofeed_settings_sync.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_autofeed_settings_service_get_autofeed_settings_sync.py new file mode 100644 index 000000000000..d0cadcbcce63 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_autofeed_settings_service_get_autofeed_settings_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetAutofeedSettings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_AutofeedSettingsService_GetAutofeedSettings_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +def sample_get_autofeed_settings(): + # Create a client + client = merchant_accounts_v1beta.AutofeedSettingsServiceClient() + + # Initialize request argument(s) + request = merchant_accounts_v1beta.GetAutofeedSettingsRequest( + name="name_value", + ) + + # Make the request + response = client.get_autofeed_settings(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_AutofeedSettingsService_GetAutofeedSettings_sync] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_autofeed_settings_service_update_autofeed_settings_async.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_autofeed_settings_service_update_autofeed_settings_async.py new file mode 100644 index 000000000000..68d50c7746e8 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_autofeed_settings_service_update_autofeed_settings_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateAutofeedSettings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_AutofeedSettingsService_UpdateAutofeedSettings_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +async def sample_update_autofeed_settings(): + # Create a client + client = merchant_accounts_v1beta.AutofeedSettingsServiceAsyncClient() + + # Initialize request argument(s) + autofeed_settings = merchant_accounts_v1beta.AutofeedSettings() + autofeed_settings.enable_products = True + + request = merchant_accounts_v1beta.UpdateAutofeedSettingsRequest( + autofeed_settings=autofeed_settings, + ) + + # Make the request + response = await client.update_autofeed_settings(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_AutofeedSettingsService_UpdateAutofeedSettings_async] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_autofeed_settings_service_update_autofeed_settings_sync.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_autofeed_settings_service_update_autofeed_settings_sync.py new file mode 100644 index 000000000000..98f5f24a8c97 --- /dev/null +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_autofeed_settings_service_update_autofeed_settings_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateAutofeedSettings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-accounts + + +# [START merchantapi_v1beta_generated_AutofeedSettingsService_UpdateAutofeedSettings_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_accounts_v1beta + + +def sample_update_autofeed_settings(): + # Create a client + client = merchant_accounts_v1beta.AutofeedSettingsServiceClient() + + # Initialize request argument(s) + autofeed_settings = merchant_accounts_v1beta.AutofeedSettings() + autofeed_settings.enable_products = True + + request = merchant_accounts_v1beta.UpdateAutofeedSettingsRequest( + autofeed_settings=autofeed_settings, + ) + + # Make the request + response = client.update_autofeed_settings(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_AutofeedSettingsService_UpdateAutofeedSettings_sync] diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_terms_of_service_service_retrieve_latest_terms_of_service_async.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_terms_of_service_service_retrieve_latest_terms_of_service_async.py index f41dc0f89b50..918d61da5083 100644 --- a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_terms_of_service_service_retrieve_latest_terms_of_service_async.py +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_terms_of_service_service_retrieve_latest_terms_of_service_async.py @@ -40,6 +40,8 @@ async def sample_retrieve_latest_terms_of_service(): # Initialize request argument(s) request = merchant_accounts_v1beta.RetrieveLatestTermsOfServiceRequest( + region_code="region_code_value", + kind="MERCHANT_CENTER", ) # Make the request diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_terms_of_service_service_retrieve_latest_terms_of_service_sync.py b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_terms_of_service_service_retrieve_latest_terms_of_service_sync.py index c9cc12dbc4eb..afb5673b7735 100644 --- a/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_terms_of_service_service_retrieve_latest_terms_of_service_sync.py +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/merchantapi_v1beta_generated_terms_of_service_service_retrieve_latest_terms_of_service_sync.py @@ -40,6 +40,8 @@ def sample_retrieve_latest_terms_of_service(): # Initialize request argument(s) request = merchant_accounts_v1beta.RetrieveLatestTermsOfServiceRequest( + region_code="region_code_value", + kind="MERCHANT_CENTER", ) # Make the request diff --git a/packages/google-shopping-merchant-accounts/samples/generated_samples/snippet_metadata_google.shopping.merchant.accounts.v1beta.json b/packages/google-shopping-merchant-accounts/samples/generated_samples/snippet_metadata_google.shopping.merchant.accounts.v1beta.json index a6cc2aef08d1..91ea23ca0671 100644 --- a/packages/google-shopping-merchant-accounts/samples/generated_samples/snippet_metadata_google.shopping.merchant.accounts.v1beta.json +++ b/packages/google-shopping-merchant-accounts/samples/generated_samples/snippet_metadata_google.shopping.merchant.accounts.v1beta.json @@ -1615,6 +1615,336 @@ ], "title": "merchantapi_v1beta_generated_accounts_service_update_account_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.AutofeedSettingsServiceAsyncClient", + "shortName": "AutofeedSettingsServiceAsyncClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.AutofeedSettingsServiceAsyncClient.get_autofeed_settings", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.AutofeedSettingsService.GetAutofeedSettings", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.AutofeedSettingsService", + "shortName": "AutofeedSettingsService" + }, + "shortName": "GetAutofeedSettings" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.GetAutofeedSettingsRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.types.AutofeedSettings", + "shortName": "get_autofeed_settings" + }, + "description": "Sample for GetAutofeedSettings", + "file": "merchantapi_v1beta_generated_autofeed_settings_service_get_autofeed_settings_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_AutofeedSettingsService_GetAutofeedSettings_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_autofeed_settings_service_get_autofeed_settings_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.AutofeedSettingsServiceClient", + "shortName": "AutofeedSettingsServiceClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.AutofeedSettingsServiceClient.get_autofeed_settings", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.AutofeedSettingsService.GetAutofeedSettings", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.AutofeedSettingsService", + "shortName": "AutofeedSettingsService" + }, + "shortName": "GetAutofeedSettings" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.GetAutofeedSettingsRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.types.AutofeedSettings", + "shortName": "get_autofeed_settings" + }, + "description": "Sample for GetAutofeedSettings", + "file": "merchantapi_v1beta_generated_autofeed_settings_service_get_autofeed_settings_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_AutofeedSettingsService_GetAutofeedSettings_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_autofeed_settings_service_get_autofeed_settings_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.AutofeedSettingsServiceAsyncClient", + "shortName": "AutofeedSettingsServiceAsyncClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.AutofeedSettingsServiceAsyncClient.update_autofeed_settings", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.AutofeedSettingsService.UpdateAutofeedSettings", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.AutofeedSettingsService", + "shortName": "AutofeedSettingsService" + }, + "shortName": "UpdateAutofeedSettings" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.UpdateAutofeedSettingsRequest" + }, + { + "name": "autofeed_settings", + "type": "google.shopping.merchant_accounts_v1beta.types.AutofeedSettings" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.types.AutofeedSettings", + "shortName": "update_autofeed_settings" + }, + "description": "Sample for UpdateAutofeedSettings", + "file": "merchantapi_v1beta_generated_autofeed_settings_service_update_autofeed_settings_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_AutofeedSettingsService_UpdateAutofeedSettings_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_autofeed_settings_service_update_autofeed_settings_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.shopping.merchant_accounts_v1beta.AutofeedSettingsServiceClient", + "shortName": "AutofeedSettingsServiceClient" + }, + "fullName": "google.shopping.merchant_accounts_v1beta.AutofeedSettingsServiceClient.update_autofeed_settings", + "method": { + "fullName": "google.shopping.merchant.accounts.v1beta.AutofeedSettingsService.UpdateAutofeedSettings", + "service": { + "fullName": "google.shopping.merchant.accounts.v1beta.AutofeedSettingsService", + "shortName": "AutofeedSettingsService" + }, + "shortName": "UpdateAutofeedSettings" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_accounts_v1beta.types.UpdateAutofeedSettingsRequest" + }, + { + "name": "autofeed_settings", + "type": "google.shopping.merchant_accounts_v1beta.types.AutofeedSettings" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_accounts_v1beta.types.AutofeedSettings", + "shortName": "update_autofeed_settings" + }, + "description": "Sample for UpdateAutofeedSettings", + "file": "merchantapi_v1beta_generated_autofeed_settings_service_update_autofeed_settings_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_AutofeedSettingsService_UpdateAutofeedSettings_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_autofeed_settings_service_update_autofeed_settings_sync.py" + }, { "canonical": true, "clientMethod": { @@ -6027,12 +6357,12 @@ "regionTag": "merchantapi_v1beta_generated_TermsOfServiceService_RetrieveLatestTermsOfService_async", "segments": [ { - "end": 50, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 52, "start": 27, "type": "SHORT" }, @@ -6042,18 +6372,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 47, - "start": 45, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 51, - "start": 48, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], @@ -6103,12 +6433,12 @@ "regionTag": "merchantapi_v1beta_generated_TermsOfServiceService_RetrieveLatestTermsOfService_sync", "segments": [ { - "end": 50, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 52, "start": 27, "type": "SHORT" }, @@ -6118,18 +6448,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 47, - "start": 45, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 51, - "start": 48, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], diff --git a/packages/google-shopping-merchant-accounts/scripts/fixup_merchant_accounts_v1beta_keywords.py b/packages/google-shopping-merchant-accounts/scripts/fixup_merchant_accounts_v1beta_keywords.py index fb344faa3a22..a7c0d0a5f668 100644 --- a/packages/google-shopping-merchant-accounts/scripts/fixup_merchant_accounts_v1beta_keywords.py +++ b/packages/google-shopping-merchant-accounts/scripts/fixup_merchant_accounts_v1beta_keywords.py @@ -41,16 +41,17 @@ class merchant_accountsCallTransformer(cst.CSTTransformer): METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { 'accept_terms_of_service': ('name', 'account', 'region_code', ), 'claim_homepage': ('name', ), - 'create_and_configure_account': ('account', 'users', 'accept_terms_of_service', 'service', ), + 'create_and_configure_account': ('account', 'service', 'users', 'accept_terms_of_service', ), 'create_region': ('parent', 'region_id', 'region', ), 'create_user': ('parent', 'user_id', 'user', ), - 'delete_account': ('name', ), + 'delete_account': ('name', 'force', ), 'delete_region': ('name', ), 'delete_user': ('name', ), 'disable_program': ('name', ), 'enable_program': ('name', ), 'get_account': ('name', ), 'get_account_tax': ('name', ), + 'get_autofeed_settings': ('name', ), 'get_business_identity': ('name', ), 'get_business_info': ('name', ), 'get_email_preferences': ('name', ), @@ -76,6 +77,7 @@ class merchant_accountsCallTransformer(cst.CSTTransformer): 'unclaim_homepage': ('name', ), 'update_account': ('account', 'update_mask', ), 'update_account_tax': ('account_tax', 'update_mask', ), + 'update_autofeed_settings': ('autofeed_settings', 'update_mask', ), 'update_business_identity': ('business_identity', 'update_mask', ), 'update_business_info': ('business_info', 'update_mask', ), 'update_email_preferences': ('email_preferences', 'update_mask', ), diff --git a/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_account_issue_service.py b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_account_issue_service.py index 9cfff1670b0a..800b1a6017e0 100644 --- a/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_account_issue_service.py +++ b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_account_issue_service.py @@ -35,7 +35,6 @@ from google.auth.exceptions import MutualTLSChannelError from google.oauth2 import service_account from google.protobuf import json_format -from google.type import datetime_pb2 # type: ignore import grpc from grpc.experimental import aio from proto.marshal.rules import wrappers @@ -1260,6 +1259,7 @@ def test_list_account_issues_non_empty_request_with_auto_populated_field(): parent="parent_value", page_token="page_token_value", language_code="language_code_value", + time_zone="time_zone_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1276,6 +1276,7 @@ def test_list_account_issues_non_empty_request_with_auto_populated_field(): parent="parent_value", page_token="page_token_value", language_code="language_code_value", + time_zone="time_zone_value", ) diff --git a/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_accounts_service.py b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_accounts_service.py index c5e99b1fa6eb..c688a4d567ce 100644 --- a/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_accounts_service.py +++ b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_accounts_service.py @@ -34,7 +34,6 @@ from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.oauth2 import service_account -from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import json_format from google.type import datetime_pb2 # type: ignore @@ -52,7 +51,12 @@ pagers, transports, ) -from google.shopping.merchant_accounts_v1beta.types import accessright, accounts, user +from google.shopping.merchant_accounts_v1beta.types import ( + accessright, + accounts, + accountservices, + user, +) def client_cert_source_callback(): @@ -4006,7 +4010,15 @@ def test_create_and_configure_account_rest_unset_required_fields(): ) unset_fields = transport.create_and_configure_account._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("account",))) + assert set(unset_fields) == ( + set(()) + & set( + ( + "account", + "service", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -4192,6 +4204,8 @@ def test_delete_account_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() ).delete_account._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("force",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -4242,7 +4256,7 @@ def test_delete_account_rest_unset_required_fields(): ) unset_fields = transport.delete_account._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + assert set(unset_fields) == (set(("force",)) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) diff --git a/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_autofeed_settings_service.py b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_autofeed_settings_service.py new file mode 100644 index 000000000000..0c01e92c6a8c --- /dev/null +++ b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_autofeed_settings_service.py @@ -0,0 +1,3474 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.oauth2 import service_account +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.shopping.merchant_accounts_v1beta.services.autofeed_settings_service import ( + AutofeedSettingsServiceAsyncClient, + AutofeedSettingsServiceClient, + transports, +) +from google.shopping.merchant_accounts_v1beta.types import autofeedsettings + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert AutofeedSettingsServiceClient._get_default_mtls_endpoint(None) is None + assert ( + AutofeedSettingsServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + AutofeedSettingsServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + AutofeedSettingsServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + AutofeedSettingsServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + AutofeedSettingsServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +def test__read_environment_variables(): + assert AutofeedSettingsServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert AutofeedSettingsServiceClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert AutofeedSettingsServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + AutofeedSettingsServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert AutofeedSettingsServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert AutofeedSettingsServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert AutofeedSettingsServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + AutofeedSettingsServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert AutofeedSettingsServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert AutofeedSettingsServiceClient._get_client_cert_source(None, False) is None + assert ( + AutofeedSettingsServiceClient._get_client_cert_source( + mock_provided_cert_source, False + ) + is None + ) + assert ( + AutofeedSettingsServiceClient._get_client_cert_source( + mock_provided_cert_source, True + ) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + AutofeedSettingsServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + AutofeedSettingsServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + AutofeedSettingsServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AutofeedSettingsServiceClient), +) +@mock.patch.object( + AutofeedSettingsServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AutofeedSettingsServiceAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = AutofeedSettingsServiceClient._DEFAULT_UNIVERSE + default_endpoint = AutofeedSettingsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = AutofeedSettingsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + AutofeedSettingsServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + AutofeedSettingsServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == AutofeedSettingsServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + AutofeedSettingsServiceClient._get_api_endpoint( + None, None, default_universe, "auto" + ) + == default_endpoint + ) + assert ( + AutofeedSettingsServiceClient._get_api_endpoint( + None, None, default_universe, "always" + ) + == AutofeedSettingsServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + AutofeedSettingsServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == AutofeedSettingsServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + AutofeedSettingsServiceClient._get_api_endpoint( + None, None, mock_universe, "never" + ) + == mock_endpoint + ) + assert ( + AutofeedSettingsServiceClient._get_api_endpoint( + None, None, default_universe, "never" + ) + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + AutofeedSettingsServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + AutofeedSettingsServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + AutofeedSettingsServiceClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + AutofeedSettingsServiceClient._get_universe_domain(None, None) + == AutofeedSettingsServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + AutofeedSettingsServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + AutofeedSettingsServiceClient, + transports.AutofeedSettingsServiceGrpcTransport, + "grpc", + ), + ( + AutofeedSettingsServiceClient, + transports.AutofeedSettingsServiceRestTransport, + "rest", + ), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (AutofeedSettingsServiceClient, "grpc"), + (AutofeedSettingsServiceAsyncClient, "grpc_asyncio"), + (AutofeedSettingsServiceClient, "rest"), + ], +) +def test_autofeed_settings_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "merchantapi.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com/" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.AutofeedSettingsServiceGrpcTransport, "grpc"), + (transports.AutofeedSettingsServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.AutofeedSettingsServiceRestTransport, "rest"), + ], +) +def test_autofeed_settings_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (AutofeedSettingsServiceClient, "grpc"), + (AutofeedSettingsServiceAsyncClient, "grpc_asyncio"), + (AutofeedSettingsServiceClient, "rest"), + ], +) +def test_autofeed_settings_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "merchantapi.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com/" + ) + + +def test_autofeed_settings_service_client_get_transport_class(): + transport = AutofeedSettingsServiceClient.get_transport_class() + available_transports = [ + transports.AutofeedSettingsServiceGrpcTransport, + transports.AutofeedSettingsServiceRestTransport, + ] + assert transport in available_transports + + transport = AutofeedSettingsServiceClient.get_transport_class("grpc") + assert transport == transports.AutofeedSettingsServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + AutofeedSettingsServiceClient, + transports.AutofeedSettingsServiceGrpcTransport, + "grpc", + ), + ( + AutofeedSettingsServiceAsyncClient, + transports.AutofeedSettingsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + AutofeedSettingsServiceClient, + transports.AutofeedSettingsServiceRestTransport, + "rest", + ), + ], +) +@mock.patch.object( + AutofeedSettingsServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AutofeedSettingsServiceClient), +) +@mock.patch.object( + AutofeedSettingsServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AutofeedSettingsServiceAsyncClient), +) +def test_autofeed_settings_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(AutofeedSettingsServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(AutofeedSettingsServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com/" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com/", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + AutofeedSettingsServiceClient, + transports.AutofeedSettingsServiceGrpcTransport, + "grpc", + "true", + ), + ( + AutofeedSettingsServiceAsyncClient, + transports.AutofeedSettingsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + AutofeedSettingsServiceClient, + transports.AutofeedSettingsServiceGrpcTransport, + "grpc", + "false", + ), + ( + AutofeedSettingsServiceAsyncClient, + transports.AutofeedSettingsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + AutofeedSettingsServiceClient, + transports.AutofeedSettingsServiceRestTransport, + "rest", + "true", + ), + ( + AutofeedSettingsServiceClient, + transports.AutofeedSettingsServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + AutofeedSettingsServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AutofeedSettingsServiceClient), +) +@mock.patch.object( + AutofeedSettingsServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AutofeedSettingsServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_autofeed_settings_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [AutofeedSettingsServiceClient, AutofeedSettingsServiceAsyncClient] +) +@mock.patch.object( + AutofeedSettingsServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(AutofeedSettingsServiceClient), +) +@mock.patch.object( + AutofeedSettingsServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(AutofeedSettingsServiceAsyncClient), +) +def test_autofeed_settings_service_client_get_mtls_endpoint_and_cert_source( + client_class, +): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", [AutofeedSettingsServiceClient, AutofeedSettingsServiceAsyncClient] +) +@mock.patch.object( + AutofeedSettingsServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AutofeedSettingsServiceClient), +) +@mock.patch.object( + AutofeedSettingsServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AutofeedSettingsServiceAsyncClient), +) +def test_autofeed_settings_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = AutofeedSettingsServiceClient._DEFAULT_UNIVERSE + default_endpoint = AutofeedSettingsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = AutofeedSettingsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + AutofeedSettingsServiceClient, + transports.AutofeedSettingsServiceGrpcTransport, + "grpc", + ), + ( + AutofeedSettingsServiceAsyncClient, + transports.AutofeedSettingsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + AutofeedSettingsServiceClient, + transports.AutofeedSettingsServiceRestTransport, + "rest", + ), + ], +) +def test_autofeed_settings_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + AutofeedSettingsServiceClient, + transports.AutofeedSettingsServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + AutofeedSettingsServiceAsyncClient, + transports.AutofeedSettingsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + AutofeedSettingsServiceClient, + transports.AutofeedSettingsServiceRestTransport, + "rest", + None, + ), + ], +) +def test_autofeed_settings_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_autofeed_settings_service_client_client_options_from_dict(): + with mock.patch( + "google.shopping.merchant_accounts_v1beta.services.autofeed_settings_service.transports.AutofeedSettingsServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = AutofeedSettingsServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + AutofeedSettingsServiceClient, + transports.AutofeedSettingsServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + AutofeedSettingsServiceAsyncClient, + transports.AutofeedSettingsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_autofeed_settings_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "merchantapi.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/content",), + scopes=None, + default_host="merchantapi.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + autofeedsettings.GetAutofeedSettingsRequest, + dict, + ], +) +def test_get_autofeed_settings(request_type, transport: str = "grpc"): + client = AutofeedSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_autofeed_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = autofeedsettings.AutofeedSettings( + name="name_value", + enable_products=True, + eligible=True, + ) + response = client.get_autofeed_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = autofeedsettings.GetAutofeedSettingsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, autofeedsettings.AutofeedSettings) + assert response.name == "name_value" + assert response.enable_products is True + assert response.eligible is True + + +def test_get_autofeed_settings_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AutofeedSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_autofeed_settings), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_autofeed_settings() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == autofeedsettings.GetAutofeedSettingsRequest() + + +def test_get_autofeed_settings_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AutofeedSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = autofeedsettings.GetAutofeedSettingsRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_autofeed_settings), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_autofeed_settings(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == autofeedsettings.GetAutofeedSettingsRequest( + name="name_value", + ) + + +def test_get_autofeed_settings_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AutofeedSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_autofeed_settings + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_autofeed_settings + ] = mock_rpc + request = {} + client.get_autofeed_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_autofeed_settings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_autofeed_settings_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AutofeedSettingsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_autofeed_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + autofeedsettings.AutofeedSettings( + name="name_value", + enable_products=True, + eligible=True, + ) + ) + response = await client.get_autofeed_settings() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == autofeedsettings.GetAutofeedSettingsRequest() + + +@pytest.mark.asyncio +async def test_get_autofeed_settings_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AutofeedSettingsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_autofeed_settings + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_autofeed_settings + ] = mock_rpc + + request = {} + await client.get_autofeed_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_autofeed_settings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_autofeed_settings_async( + transport: str = "grpc_asyncio", + request_type=autofeedsettings.GetAutofeedSettingsRequest, +): + client = AutofeedSettingsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_autofeed_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + autofeedsettings.AutofeedSettings( + name="name_value", + enable_products=True, + eligible=True, + ) + ) + response = await client.get_autofeed_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = autofeedsettings.GetAutofeedSettingsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, autofeedsettings.AutofeedSettings) + assert response.name == "name_value" + assert response.enable_products is True + assert response.eligible is True + + +@pytest.mark.asyncio +async def test_get_autofeed_settings_async_from_dict(): + await test_get_autofeed_settings_async(request_type=dict) + + +def test_get_autofeed_settings_field_headers(): + client = AutofeedSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = autofeedsettings.GetAutofeedSettingsRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_autofeed_settings), "__call__" + ) as call: + call.return_value = autofeedsettings.AutofeedSettings() + client.get_autofeed_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_autofeed_settings_field_headers_async(): + client = AutofeedSettingsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = autofeedsettings.GetAutofeedSettingsRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_autofeed_settings), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + autofeedsettings.AutofeedSettings() + ) + await client.get_autofeed_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_autofeed_settings_flattened(): + client = AutofeedSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_autofeed_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = autofeedsettings.AutofeedSettings() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_autofeed_settings( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_autofeed_settings_flattened_error(): + client = AutofeedSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_autofeed_settings( + autofeedsettings.GetAutofeedSettingsRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_autofeed_settings_flattened_async(): + client = AutofeedSettingsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_autofeed_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = autofeedsettings.AutofeedSettings() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + autofeedsettings.AutofeedSettings() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_autofeed_settings( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_autofeed_settings_flattened_error_async(): + client = AutofeedSettingsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_autofeed_settings( + autofeedsettings.GetAutofeedSettingsRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + autofeedsettings.UpdateAutofeedSettingsRequest, + dict, + ], +) +def test_update_autofeed_settings(request_type, transport: str = "grpc"): + client = AutofeedSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_autofeed_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = autofeedsettings.AutofeedSettings( + name="name_value", + enable_products=True, + eligible=True, + ) + response = client.update_autofeed_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = autofeedsettings.UpdateAutofeedSettingsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, autofeedsettings.AutofeedSettings) + assert response.name == "name_value" + assert response.enable_products is True + assert response.eligible is True + + +def test_update_autofeed_settings_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AutofeedSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_autofeed_settings), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_autofeed_settings() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == autofeedsettings.UpdateAutofeedSettingsRequest() + + +def test_update_autofeed_settings_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AutofeedSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = autofeedsettings.UpdateAutofeedSettingsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_autofeed_settings), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_autofeed_settings(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == autofeedsettings.UpdateAutofeedSettingsRequest() + + +def test_update_autofeed_settings_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AutofeedSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_autofeed_settings + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_autofeed_settings + ] = mock_rpc + request = {} + client.update_autofeed_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_autofeed_settings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_autofeed_settings_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AutofeedSettingsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_autofeed_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + autofeedsettings.AutofeedSettings( + name="name_value", + enable_products=True, + eligible=True, + ) + ) + response = await client.update_autofeed_settings() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == autofeedsettings.UpdateAutofeedSettingsRequest() + + +@pytest.mark.asyncio +async def test_update_autofeed_settings_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AutofeedSettingsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_autofeed_settings + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_autofeed_settings + ] = mock_rpc + + request = {} + await client.update_autofeed_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.update_autofeed_settings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_autofeed_settings_async( + transport: str = "grpc_asyncio", + request_type=autofeedsettings.UpdateAutofeedSettingsRequest, +): + client = AutofeedSettingsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_autofeed_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + autofeedsettings.AutofeedSettings( + name="name_value", + enable_products=True, + eligible=True, + ) + ) + response = await client.update_autofeed_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = autofeedsettings.UpdateAutofeedSettingsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, autofeedsettings.AutofeedSettings) + assert response.name == "name_value" + assert response.enable_products is True + assert response.eligible is True + + +@pytest.mark.asyncio +async def test_update_autofeed_settings_async_from_dict(): + await test_update_autofeed_settings_async(request_type=dict) + + +def test_update_autofeed_settings_field_headers(): + client = AutofeedSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = autofeedsettings.UpdateAutofeedSettingsRequest() + + request.autofeed_settings.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_autofeed_settings), "__call__" + ) as call: + call.return_value = autofeedsettings.AutofeedSettings() + client.update_autofeed_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "autofeed_settings.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_autofeed_settings_field_headers_async(): + client = AutofeedSettingsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = autofeedsettings.UpdateAutofeedSettingsRequest() + + request.autofeed_settings.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_autofeed_settings), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + autofeedsettings.AutofeedSettings() + ) + await client.update_autofeed_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "autofeed_settings.name=name_value", + ) in kw["metadata"] + + +def test_update_autofeed_settings_flattened(): + client = AutofeedSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_autofeed_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = autofeedsettings.AutofeedSettings() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_autofeed_settings( + autofeed_settings=autofeedsettings.AutofeedSettings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].autofeed_settings + mock_val = autofeedsettings.AutofeedSettings(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_autofeed_settings_flattened_error(): + client = AutofeedSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_autofeed_settings( + autofeedsettings.UpdateAutofeedSettingsRequest(), + autofeed_settings=autofeedsettings.AutofeedSettings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_autofeed_settings_flattened_async(): + client = AutofeedSettingsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_autofeed_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = autofeedsettings.AutofeedSettings() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + autofeedsettings.AutofeedSettings() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_autofeed_settings( + autofeed_settings=autofeedsettings.AutofeedSettings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].autofeed_settings + mock_val = autofeedsettings.AutofeedSettings(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_autofeed_settings_flattened_error_async(): + client = AutofeedSettingsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_autofeed_settings( + autofeedsettings.UpdateAutofeedSettingsRequest(), + autofeed_settings=autofeedsettings.AutofeedSettings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + autofeedsettings.GetAutofeedSettingsRequest, + dict, + ], +) +def test_get_autofeed_settings_rest(request_type): + client = AutofeedSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "accounts/sample1/autofeedSettings"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = autofeedsettings.AutofeedSettings( + name="name_value", + enable_products=True, + eligible=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = autofeedsettings.AutofeedSettings.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_autofeed_settings(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, autofeedsettings.AutofeedSettings) + assert response.name == "name_value" + assert response.enable_products is True + assert response.eligible is True + + +def test_get_autofeed_settings_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AutofeedSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_autofeed_settings + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_autofeed_settings + ] = mock_rpc + + request = {} + client.get_autofeed_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_autofeed_settings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_autofeed_settings_rest_required_fields( + request_type=autofeedsettings.GetAutofeedSettingsRequest, +): + transport_class = transports.AutofeedSettingsServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_autofeed_settings._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_autofeed_settings._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = AutofeedSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = autofeedsettings.AutofeedSettings() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = autofeedsettings.AutofeedSettings.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_autofeed_settings(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_autofeed_settings_rest_unset_required_fields(): + transport = transports.AutofeedSettingsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_autofeed_settings._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_autofeed_settings_rest_interceptors(null_interceptor): + transport = transports.AutofeedSettingsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AutofeedSettingsServiceRestInterceptor(), + ) + client = AutofeedSettingsServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AutofeedSettingsServiceRestInterceptor, "post_get_autofeed_settings" + ) as post, mock.patch.object( + transports.AutofeedSettingsServiceRestInterceptor, "pre_get_autofeed_settings" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = autofeedsettings.GetAutofeedSettingsRequest.pb( + autofeedsettings.GetAutofeedSettingsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = autofeedsettings.AutofeedSettings.to_json( + autofeedsettings.AutofeedSettings() + ) + + request = autofeedsettings.GetAutofeedSettingsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = autofeedsettings.AutofeedSettings() + + client.get_autofeed_settings( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_autofeed_settings_rest_bad_request( + transport: str = "rest", request_type=autofeedsettings.GetAutofeedSettingsRequest +): + client = AutofeedSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "accounts/sample1/autofeedSettings"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_autofeed_settings(request) + + +def test_get_autofeed_settings_rest_flattened(): + client = AutofeedSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = autofeedsettings.AutofeedSettings() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "accounts/sample1/autofeedSettings"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = autofeedsettings.AutofeedSettings.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_autofeed_settings(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/accounts/v1beta/{name=accounts/*/autofeedSettings}" + % client.transport._host, + args[1], + ) + + +def test_get_autofeed_settings_rest_flattened_error(transport: str = "rest"): + client = AutofeedSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_autofeed_settings( + autofeedsettings.GetAutofeedSettingsRequest(), + name="name_value", + ) + + +def test_get_autofeed_settings_rest_error(): + client = AutofeedSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + autofeedsettings.UpdateAutofeedSettingsRequest, + dict, + ], +) +def test_update_autofeed_settings_rest(request_type): + client = AutofeedSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"autofeed_settings": {"name": "accounts/sample1/autofeedSettings"}} + request_init["autofeed_settings"] = { + "name": "accounts/sample1/autofeedSettings", + "enable_products": True, + "eligible": True, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = autofeedsettings.UpdateAutofeedSettingsRequest.meta.fields[ + "autofeed_settings" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["autofeed_settings"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["autofeed_settings"][field])): + del request_init["autofeed_settings"][field][i][subfield] + else: + del request_init["autofeed_settings"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = autofeedsettings.AutofeedSettings( + name="name_value", + enable_products=True, + eligible=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = autofeedsettings.AutofeedSettings.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_autofeed_settings(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, autofeedsettings.AutofeedSettings) + assert response.name == "name_value" + assert response.enable_products is True + assert response.eligible is True + + +def test_update_autofeed_settings_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AutofeedSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_autofeed_settings + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_autofeed_settings + ] = mock_rpc + + request = {} + client.update_autofeed_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_autofeed_settings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_autofeed_settings_rest_required_fields( + request_type=autofeedsettings.UpdateAutofeedSettingsRequest, +): + transport_class = transports.AutofeedSettingsServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_autofeed_settings._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_autofeed_settings._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = AutofeedSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = autofeedsettings.AutofeedSettings() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = autofeedsettings.AutofeedSettings.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_autofeed_settings(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_autofeed_settings_rest_unset_required_fields(): + transport = transports.AutofeedSettingsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_autofeed_settings._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "autofeedSettings", + "updateMask", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_autofeed_settings_rest_interceptors(null_interceptor): + transport = transports.AutofeedSettingsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AutofeedSettingsServiceRestInterceptor(), + ) + client = AutofeedSettingsServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AutofeedSettingsServiceRestInterceptor, + "post_update_autofeed_settings", + ) as post, mock.patch.object( + transports.AutofeedSettingsServiceRestInterceptor, + "pre_update_autofeed_settings", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = autofeedsettings.UpdateAutofeedSettingsRequest.pb( + autofeedsettings.UpdateAutofeedSettingsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = autofeedsettings.AutofeedSettings.to_json( + autofeedsettings.AutofeedSettings() + ) + + request = autofeedsettings.UpdateAutofeedSettingsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = autofeedsettings.AutofeedSettings() + + client.update_autofeed_settings( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_autofeed_settings_rest_bad_request( + transport: str = "rest", request_type=autofeedsettings.UpdateAutofeedSettingsRequest +): + client = AutofeedSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"autofeed_settings": {"name": "accounts/sample1/autofeedSettings"}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_autofeed_settings(request) + + +def test_update_autofeed_settings_rest_flattened(): + client = AutofeedSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = autofeedsettings.AutofeedSettings() + + # get arguments that satisfy an http rule for this method + sample_request = { + "autofeed_settings": {"name": "accounts/sample1/autofeedSettings"} + } + + # get truthy value for each flattened field + mock_args = dict( + autofeed_settings=autofeedsettings.AutofeedSettings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = autofeedsettings.AutofeedSettings.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_autofeed_settings(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/accounts/v1beta/{autofeed_settings.name=accounts/*/autofeedSettings}" + % client.transport._host, + args[1], + ) + + +def test_update_autofeed_settings_rest_flattened_error(transport: str = "rest"): + client = AutofeedSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_autofeed_settings( + autofeedsettings.UpdateAutofeedSettingsRequest(), + autofeed_settings=autofeedsettings.AutofeedSettings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_autofeed_settings_rest_error(): + client = AutofeedSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.AutofeedSettingsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AutofeedSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.AutofeedSettingsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AutofeedSettingsServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.AutofeedSettingsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = AutofeedSettingsServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = AutofeedSettingsServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.AutofeedSettingsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AutofeedSettingsServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.AutofeedSettingsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = AutofeedSettingsServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.AutofeedSettingsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.AutofeedSettingsServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AutofeedSettingsServiceGrpcTransport, + transports.AutofeedSettingsServiceGrpcAsyncIOTransport, + transports.AutofeedSettingsServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = AutofeedSettingsServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = AutofeedSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.AutofeedSettingsServiceGrpcTransport, + ) + + +def test_autofeed_settings_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.AutofeedSettingsServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_autofeed_settings_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.shopping.merchant_accounts_v1beta.services.autofeed_settings_service.transports.AutofeedSettingsServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.AutofeedSettingsServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "get_autofeed_settings", + "update_autofeed_settings", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_autofeed_settings_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.shopping.merchant_accounts_v1beta.services.autofeed_settings_service.transports.AutofeedSettingsServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.AutofeedSettingsServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/content",), + quota_project_id="octopus", + ) + + +def test_autofeed_settings_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.shopping.merchant_accounts_v1beta.services.autofeed_settings_service.transports.AutofeedSettingsServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.AutofeedSettingsServiceTransport() + adc.assert_called_once() + + +def test_autofeed_settings_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + AutofeedSettingsServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/content",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AutofeedSettingsServiceGrpcTransport, + transports.AutofeedSettingsServiceGrpcAsyncIOTransport, + ], +) +def test_autofeed_settings_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/content",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AutofeedSettingsServiceGrpcTransport, + transports.AutofeedSettingsServiceGrpcAsyncIOTransport, + transports.AutofeedSettingsServiceRestTransport, + ], +) +def test_autofeed_settings_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com/" + api_audience_tests = [None, "https://language2.com/"] + api_audience_expect = [host, "https://language2.com/"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.AutofeedSettingsServiceGrpcTransport, grpc_helpers), + (transports.AutofeedSettingsServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_autofeed_settings_service_transport_create_channel( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "merchantapi.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/content",), + scopes=["1", "2"], + default_host="merchantapi.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AutofeedSettingsServiceGrpcTransport, + transports.AutofeedSettingsServiceGrpcAsyncIOTransport, + ], +) +def test_autofeed_settings_service_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_autofeed_settings_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.AutofeedSettingsServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_autofeed_settings_service_host_no_port(transport_name): + client = AutofeedSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="merchantapi.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "merchantapi.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com/" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_autofeed_settings_service_host_with_port(transport_name): + client = AutofeedSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="merchantapi.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "merchantapi.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com:8000/" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_autofeed_settings_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = AutofeedSettingsServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = AutofeedSettingsServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get_autofeed_settings._session + session2 = client2.transport.get_autofeed_settings._session + assert session1 != session2 + session1 = client1.transport.update_autofeed_settings._session + session2 = client2.transport.update_autofeed_settings._session + assert session1 != session2 + + +def test_autofeed_settings_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.AutofeedSettingsServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_autofeed_settings_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.AutofeedSettingsServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.AutofeedSettingsServiceGrpcTransport, + transports.AutofeedSettingsServiceGrpcAsyncIOTransport, + ], +) +def test_autofeed_settings_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.AutofeedSettingsServiceGrpcTransport, + transports.AutofeedSettingsServiceGrpcAsyncIOTransport, + ], +) +def test_autofeed_settings_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_autofeed_settings_path(): + account = "squid" + expected = "accounts/{account}/autofeedSettings".format( + account=account, + ) + actual = AutofeedSettingsServiceClient.autofeed_settings_path(account) + assert expected == actual + + +def test_parse_autofeed_settings_path(): + expected = { + "account": "clam", + } + path = AutofeedSettingsServiceClient.autofeed_settings_path(**expected) + + # Check that the path construction is reversible. + actual = AutofeedSettingsServiceClient.parse_autofeed_settings_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = AutofeedSettingsServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = AutofeedSettingsServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = AutofeedSettingsServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = AutofeedSettingsServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = AutofeedSettingsServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = AutofeedSettingsServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = AutofeedSettingsServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = AutofeedSettingsServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = AutofeedSettingsServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format( + project=project, + ) + actual = AutofeedSettingsServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = AutofeedSettingsServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = AutofeedSettingsServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = AutofeedSettingsServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = AutofeedSettingsServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = AutofeedSettingsServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.AutofeedSettingsServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = AutofeedSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.AutofeedSettingsServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = AutofeedSettingsServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = AutofeedSettingsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = AutofeedSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = AutofeedSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + ( + AutofeedSettingsServiceClient, + transports.AutofeedSettingsServiceGrpcTransport, + ), + ( + AutofeedSettingsServiceAsyncClient, + transports.AutofeedSettingsServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_business_info_service.py b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_business_info_service.py index 15afcf6bc479..e2e4eb7c38fb 100644 --- a/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_business_info_service.py +++ b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_business_info_service.py @@ -1216,6 +1216,7 @@ def test_get_business_info(request_type, transport: str = "grpc"): call.return_value = businessinfo.BusinessInfo( name="name_value", phone_verification_state=phoneverificationstate.PhoneVerificationState.PHONE_VERIFICATION_STATE_VERIFIED, + korean_business_registration_number="korean_business_registration_number_value", ) response = client.get_business_info(request) @@ -1232,6 +1233,10 @@ def test_get_business_info(request_type, transport: str = "grpc"): response.phone_verification_state == phoneverificationstate.PhoneVerificationState.PHONE_VERIFICATION_STATE_VERIFIED ) + assert ( + response.korean_business_registration_number + == "korean_business_registration_number_value" + ) def test_get_business_info_empty_call(): @@ -1340,6 +1345,7 @@ async def test_get_business_info_empty_call_async(): businessinfo.BusinessInfo( name="name_value", phone_verification_state=phoneverificationstate.PhoneVerificationState.PHONE_VERIFICATION_STATE_VERIFIED, + korean_business_registration_number="korean_business_registration_number_value", ) ) response = await client.get_business_info() @@ -1412,6 +1418,7 @@ async def test_get_business_info_async( businessinfo.BusinessInfo( name="name_value", phone_verification_state=phoneverificationstate.PhoneVerificationState.PHONE_VERIFICATION_STATE_VERIFIED, + korean_business_registration_number="korean_business_registration_number_value", ) ) response = await client.get_business_info(request) @@ -1429,6 +1436,10 @@ async def test_get_business_info_async( response.phone_verification_state == phoneverificationstate.PhoneVerificationState.PHONE_VERIFICATION_STATE_VERIFIED ) + assert ( + response.korean_business_registration_number + == "korean_business_registration_number_value" + ) @pytest.mark.asyncio @@ -1612,6 +1623,7 @@ def test_update_business_info(request_type, transport: str = "grpc"): call.return_value = businessinfo.BusinessInfo( name="name_value", phone_verification_state=phoneverificationstate.PhoneVerificationState.PHONE_VERIFICATION_STATE_VERIFIED, + korean_business_registration_number="korean_business_registration_number_value", ) response = client.update_business_info(request) @@ -1628,6 +1640,10 @@ def test_update_business_info(request_type, transport: str = "grpc"): response.phone_verification_state == phoneverificationstate.PhoneVerificationState.PHONE_VERIFICATION_STATE_VERIFIED ) + assert ( + response.korean_business_registration_number + == "korean_business_registration_number_value" + ) def test_update_business_info_empty_call(): @@ -1734,6 +1750,7 @@ async def test_update_business_info_empty_call_async(): businessinfo.BusinessInfo( name="name_value", phone_verification_state=phoneverificationstate.PhoneVerificationState.PHONE_VERIFICATION_STATE_VERIFIED, + korean_business_registration_number="korean_business_registration_number_value", ) ) response = await client.update_business_info() @@ -1806,6 +1823,7 @@ async def test_update_business_info_async( businessinfo.BusinessInfo( name="name_value", phone_verification_state=phoneverificationstate.PhoneVerificationState.PHONE_VERIFICATION_STATE_VERIFIED, + korean_business_registration_number="korean_business_registration_number_value", ) ) response = await client.update_business_info(request) @@ -1823,6 +1841,10 @@ async def test_update_business_info_async( response.phone_verification_state == phoneverificationstate.PhoneVerificationState.PHONE_VERIFICATION_STATE_VERIFIED ) + assert ( + response.korean_business_registration_number + == "korean_business_registration_number_value" + ) @pytest.mark.asyncio @@ -2014,6 +2036,7 @@ def test_get_business_info_rest(request_type): return_value = businessinfo.BusinessInfo( name="name_value", phone_verification_state=phoneverificationstate.PhoneVerificationState.PHONE_VERIFICATION_STATE_VERIFIED, + korean_business_registration_number="korean_business_registration_number_value", ) # Wrap the value into a proper Response obj @@ -2034,6 +2057,10 @@ def test_get_business_info_rest(request_type): response.phone_verification_state == phoneverificationstate.PhoneVerificationState.PHONE_VERIFICATION_STATE_VERIFIED ) + assert ( + response.korean_business_registration_number + == "korean_business_registration_number_value" + ) def test_get_business_info_rest_use_cached_wrapped_rpc(): @@ -2341,6 +2368,7 @@ def test_update_business_info_rest(request_type): }, "phone_verification_state": 1, "customer_service": {"uri": "uri_value", "email": "email_value", "phone": {}}, + "korean_business_registration_number": "korean_business_registration_number_value", } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -2417,6 +2445,7 @@ def get_message_fields(field): return_value = businessinfo.BusinessInfo( name="name_value", phone_verification_state=phoneverificationstate.PhoneVerificationState.PHONE_VERIFICATION_STATE_VERIFIED, + korean_business_registration_number="korean_business_registration_number_value", ) # Wrap the value into a proper Response obj @@ -2437,6 +2466,10 @@ def get_message_fields(field): response.phone_verification_state == phoneverificationstate.PhoneVerificationState.PHONE_VERIFICATION_STATE_VERIFIED ) + assert ( + response.korean_business_registration_number + == "korean_business_registration_number_value" + ) def test_update_business_info_rest_use_cached_wrapped_rpc(): diff --git a/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_terms_of_service_service.py b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_terms_of_service_service.py index e50f9373a6c8..4ea60222ebc9 100644 --- a/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_terms_of_service_service.py +++ b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1beta/test_terms_of_service_service.py @@ -2637,6 +2637,120 @@ def test_retrieve_latest_terms_of_service_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 +def test_retrieve_latest_terms_of_service_rest_required_fields( + request_type=termsofservice.RetrieveLatestTermsOfServiceRequest, +): + transport_class = transports.TermsOfServiceServiceRestTransport + + request_init = {} + request_init["region_code"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "regionCode" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).retrieve_latest_terms_of_service._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "regionCode" in jsonified_request + assert jsonified_request["regionCode"] == request_init["region_code"] + + jsonified_request["regionCode"] = "region_code_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).retrieve_latest_terms_of_service._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "kind", + "region_code", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "regionCode" in jsonified_request + assert jsonified_request["regionCode"] == "region_code_value" + + client = TermsOfServiceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = termsofservice.TermsOfService() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = termsofservice.TermsOfService.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.retrieve_latest_terms_of_service(request) + + expected_params = [ + ( + "regionCode", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_retrieve_latest_terms_of_service_rest_unset_required_fields(): + transport = transports.TermsOfServiceServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.retrieve_latest_terms_of_service._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set( + ( + "kind", + "regionCode", + ) + ) + & set( + ( + "regionCode", + "kind", + ) + ) + ) + + @pytest.mark.parametrize("null_interceptor", [True, False]) def test_retrieve_latest_terms_of_service_rest_interceptors(null_interceptor): transport = transports.TermsOfServiceServiceRestTransport( From c38431b363fd4f18bb692593f401e3ac3759637c Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 4 Oct 2024 11:58:20 -0400 Subject: [PATCH 095/108] feat: [google-cloud-channel] Add support for primary_admin_email as customer_identity for ImportCustomer (#13126) BEGIN_COMMIT_OVERRIDE feat: Add support for primary_admin_email as customer_identity for ImportCustomer feat: Add support for importing team customer from a different reseller feat: Add support to look up team customer Cloud Identity information docs: Clarify the expected value of the domain field for team type customers END_COMMIT_OVERRIDE - [ ] Regenerate this pull request now. feat: Add support for importing team customer from a different reseller feat: Add support to look up team customer Cloud Identity information docs: Clarify the expected value of the domain field for team type customers PiperOrigin-RevId: 682051698 Source-Link: https://github.com/googleapis/googleapis/commit/b6a27d13a2f0223051ef720e4e9d0d52323560e6 Source-Link: https://github.com/googleapis/googleapis-gen/commit/efd321b69d8e4032c2690c19e8131d7b1702f977 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWNoYW5uZWwvLk93bEJvdC55YW1sIiwiaCI6ImVmZDMyMWI2OWQ4ZTQwMzJjMjY5MGMxOWU4MTMxZDdiMTcwMmY5NzcifQ== --------- Co-authored-by: Owl Bot Co-authored-by: ohmayr --- .../google/cloud/channel_v1/types/service.py | 34 ++++++++++++++++++- .../scripts/fixup_channel_v1_keywords.py | 4 +-- .../channel_v1/test_cloud_channel_service.py | 4 +++ 3 files changed, 39 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-channel/google/cloud/channel_v1/types/service.py b/packages/google-cloud-channel/google/cloud/channel_v1/types/service.py index c8ae1f8d383f..9738f170a601 100644 --- a/packages/google-cloud-channel/google/cloud/channel_v1/types/service.py +++ b/packages/google-cloud-channel/google/cloud/channel_v1/types/service.py @@ -127,7 +127,12 @@ class CheckCloudIdentityAccountsExistRequest(proto.Message): the format: accounts/{account_id} domain (str): Required. Domain to fetch for Cloud Identity - account customer. + account customers, including domain and team + customers. For team customers, please use the + domain for their emails. + primary_admin_email (str): + Optional. Primary admin email to fetch for + Cloud Identity account team customer. """ parent: str = proto.Field( @@ -138,6 +143,10 @@ class CheckCloudIdentityAccountsExistRequest(proto.Message): proto.STRING, number=2, ) + primary_admin_email: str = proto.Field( + proto.STRING, + number=4, + ) class CloudIdentityCustomerAccount(proto.Message): @@ -159,6 +168,11 @@ class CloudIdentityCustomerAccount(proto.Message): customer_cloud_identity_id (str): If existing = true, the Cloud Identity ID of the customer. + customer_type (google.cloud.channel_v1.types.CloudIdentityInfo.CustomerType): + If existing = true, the type of the customer. + channel_partner_cloud_identity_id (str): + If existing = true, and is 2-tier customer, + the channel partner of the customer. """ existing: bool = proto.Field( @@ -177,6 +191,15 @@ class CloudIdentityCustomerAccount(proto.Message): proto.STRING, number=4, ) + customer_type: common.CloudIdentityInfo.CustomerType = proto.Field( + proto.ENUM, + number=5, + enum=common.CloudIdentityInfo.CustomerType, + ) + channel_partner_cloud_identity_id: str = proto.Field( + proto.STRING, + number=6, + ) class CheckCloudIdentityAccountsExistResponse(proto.Message): @@ -373,6 +396,10 @@ class ImportCustomerRequest(proto.Message): cloud_identity_id (str): Required. Customer's Cloud Identity ID + This field is a member of `oneof`_ ``customer_identity``. + primary_admin_email (str): + Required. Customer's primary admin email. + This field is a member of `oneof`_ ``customer_identity``. parent (str): Required. The resource name of the reseller's account. @@ -413,6 +440,11 @@ class ImportCustomerRequest(proto.Message): number=3, oneof="customer_identity", ) + primary_admin_email: str = proto.Field( + proto.STRING, + number=8, + oneof="customer_identity", + ) parent: str = proto.Field( proto.STRING, number=1, diff --git a/packages/google-cloud-channel/scripts/fixup_channel_v1_keywords.py b/packages/google-cloud-channel/scripts/fixup_channel_v1_keywords.py index 7c3e175a35d5..a7022924a590 100644 --- a/packages/google-cloud-channel/scripts/fixup_channel_v1_keywords.py +++ b/packages/google-cloud-channel/scripts/fixup_channel_v1_keywords.py @@ -44,7 +44,7 @@ class channelCallTransformer(cst.CSTTransformer): 'change_offer': ('name', 'offer', 'parameters', 'purchase_order_id', 'request_id', 'billing_account', ), 'change_parameters': ('name', 'parameters', 'request_id', 'purchase_order_id', ), 'change_renewal_settings': ('name', 'renewal_settings', 'request_id', ), - 'check_cloud_identity_accounts_exist': ('parent', 'domain', ), + 'check_cloud_identity_accounts_exist': ('parent', 'domain', 'primary_admin_email', ), 'create_channel_partner_link': ('parent', 'channel_partner_link', ), 'create_channel_partner_repricing_config': ('parent', 'channel_partner_repricing_config', ), 'create_customer': ('parent', 'customer', ), @@ -59,7 +59,7 @@ class channelCallTransformer(cst.CSTTransformer): 'get_customer': ('name', ), 'get_customer_repricing_config': ('name', ), 'get_entitlement': ('name', ), - 'import_customer': ('domain', 'cloud_identity_id', 'parent', 'overwrite_if_exists', 'auth_token', 'channel_partner_id', 'customer', ), + 'import_customer': ('domain', 'cloud_identity_id', 'primary_admin_email', 'parent', 'overwrite_if_exists', 'auth_token', 'channel_partner_id', 'customer', ), 'list_channel_partner_links': ('parent', 'page_size', 'page_token', 'view', ), 'list_channel_partner_repricing_configs': ('parent', 'page_size', 'page_token', 'filter', ), 'list_customer_repricing_configs': ('parent', 'page_size', 'page_token', 'filter', ), diff --git a/packages/google-cloud-channel/tests/unit/gapic/channel_v1/test_cloud_channel_service.py b/packages/google-cloud-channel/tests/unit/gapic/channel_v1/test_cloud_channel_service.py index f454d78314e5..b6ab34645279 100644 --- a/packages/google-cloud-channel/tests/unit/gapic/channel_v1/test_cloud_channel_service.py +++ b/packages/google-cloud-channel/tests/unit/gapic/channel_v1/test_cloud_channel_service.py @@ -2110,6 +2110,7 @@ def test_check_cloud_identity_accounts_exist_non_empty_request_with_auto_populat request = service.CheckCloudIdentityAccountsExistRequest( parent="parent_value", domain="domain_value", + primary_admin_email="primary_admin_email_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2125,6 +2126,7 @@ def test_check_cloud_identity_accounts_exist_non_empty_request_with_auto_populat assert args[0] == service.CheckCloudIdentityAccountsExistRequest( parent="parent_value", domain="domain_value", + primary_admin_email="primary_admin_email_value", ) @@ -3397,6 +3399,7 @@ def test_import_customer_non_empty_request_with_auto_populated_field(): request = service.ImportCustomerRequest( domain="domain_value", cloud_identity_id="cloud_identity_id_value", + primary_admin_email="primary_admin_email_value", parent="parent_value", auth_token="auth_token_value", channel_partner_id="channel_partner_id_value", @@ -3414,6 +3417,7 @@ def test_import_customer_non_empty_request_with_auto_populated_field(): assert args[0] == service.ImportCustomerRequest( domain="domain_value", cloud_identity_id="cloud_identity_id_value", + primary_admin_email="primary_admin_email_value", parent="parent_value", auth_token="auth_token_value", channel_partner_id="channel_partner_id_value", From eeab5c1ee68559605ab3dc251314d45bd92191cd Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 4 Oct 2024 12:24:29 -0400 Subject: [PATCH 096/108] feat: [google-cloud-documentai] Removed deprecated api versions (#13125) BEGIN_COMMIT_OVERRIDE feat: Removed deprecated api versions END_COMMIT_OVERRIDE - [ ] Regenerate this pull request now. PiperOrigin-RevId: 681956159 Source-Link: https://github.com/googleapis/googleapis/commit/ff4436beeb10715bedeb54da24f87a3b4dcddcea Source-Link: https://github.com/googleapis/googleapis-gen/commit/1b41353956190b52884e5bcc7eceda2b9a92f756 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWRvY3VtZW50YWkvLk93bEJvdC55YW1sIiwiaCI6IjFiNDEzNTM5NTYxOTBiNTI4ODRlNWJjYzdlY2VkYTJiOWE5MmY3NTYifQ== --------- Co-authored-by: Owl Bot Co-authored-by: ohmayr --- .../google/cloud/documentai/gapic_version.py | 2 +- .../cloud/documentai_v1/gapic_version.py | 2 +- .../cloud/documentai_v1beta3/gapic_version.py | 2 +- ...g_service_batch_process_documents_async.py | 60 ---- ...ng_service_batch_process_documents_sync.py | 60 ---- ...standing_service_process_document_async.py | 56 --- ...rstanding_service_process_document_sync.py | 56 --- ...t_metadata_google.cloud.documentai.v1.json | 2 +- ...adata_google.cloud.documentai.v1beta2.json | 329 ------------------ ...adata_google.cloud.documentai.v1beta3.json | 2 +- 10 files changed, 5 insertions(+), 566 deletions(-) delete mode 100644 packages/google-cloud-documentai/samples/generated_samples/documentai_v1beta2_generated_document_understanding_service_batch_process_documents_async.py delete mode 100644 packages/google-cloud-documentai/samples/generated_samples/documentai_v1beta2_generated_document_understanding_service_batch_process_documents_sync.py delete mode 100644 packages/google-cloud-documentai/samples/generated_samples/documentai_v1beta2_generated_document_understanding_service_process_document_async.py delete mode 100644 packages/google-cloud-documentai/samples/generated_samples/documentai_v1beta2_generated_document_understanding_service_process_document_sync.py delete mode 100644 packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta2.json diff --git a/packages/google-cloud-documentai/google/cloud/documentai/gapic_version.py b/packages/google-cloud-documentai/google/cloud/documentai/gapic_version.py index c82b1e137507..558c8aab67c5 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai/gapic_version.py +++ b/packages/google-cloud-documentai/google/cloud/documentai/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.32.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1/gapic_version.py b/packages/google-cloud-documentai/google/cloud/documentai_v1/gapic_version.py index c82b1e137507..558c8aab67c5 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1/gapic_version.py +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.32.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/gapic_version.py b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/gapic_version.py index c82b1e137507..558c8aab67c5 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/gapic_version.py +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.32.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-documentai/samples/generated_samples/documentai_v1beta2_generated_document_understanding_service_batch_process_documents_async.py b/packages/google-cloud-documentai/samples/generated_samples/documentai_v1beta2_generated_document_understanding_service_batch_process_documents_async.py deleted file mode 100644 index 460ca2fce376..000000000000 --- a/packages/google-cloud-documentai/samples/generated_samples/documentai_v1beta2_generated_document_understanding_service_batch_process_documents_async.py +++ /dev/null @@ -1,60 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for BatchProcessDocuments -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-documentai - - -# [START documentai_v1beta2_generated_DocumentUnderstandingService_BatchProcessDocuments_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import documentai_v1beta2 - - -async def sample_batch_process_documents(): - # Create a client - client = documentai_v1beta2.DocumentUnderstandingServiceAsyncClient() - - # Initialize request argument(s) - requests = documentai_v1beta2.ProcessDocumentRequest() - requests.input_config.gcs_source.uri = "uri_value" - requests.input_config.mime_type = "mime_type_value" - - request = documentai_v1beta2.BatchProcessDocumentsRequest( - requests=requests, - ) - - # Make the request - operation = client.batch_process_documents(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END documentai_v1beta2_generated_DocumentUnderstandingService_BatchProcessDocuments_async] diff --git a/packages/google-cloud-documentai/samples/generated_samples/documentai_v1beta2_generated_document_understanding_service_batch_process_documents_sync.py b/packages/google-cloud-documentai/samples/generated_samples/documentai_v1beta2_generated_document_understanding_service_batch_process_documents_sync.py deleted file mode 100644 index b381b928b678..000000000000 --- a/packages/google-cloud-documentai/samples/generated_samples/documentai_v1beta2_generated_document_understanding_service_batch_process_documents_sync.py +++ /dev/null @@ -1,60 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for BatchProcessDocuments -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-documentai - - -# [START documentai_v1beta2_generated_DocumentUnderstandingService_BatchProcessDocuments_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import documentai_v1beta2 - - -def sample_batch_process_documents(): - # Create a client - client = documentai_v1beta2.DocumentUnderstandingServiceClient() - - # Initialize request argument(s) - requests = documentai_v1beta2.ProcessDocumentRequest() - requests.input_config.gcs_source.uri = "uri_value" - requests.input_config.mime_type = "mime_type_value" - - request = documentai_v1beta2.BatchProcessDocumentsRequest( - requests=requests, - ) - - # Make the request - operation = client.batch_process_documents(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END documentai_v1beta2_generated_DocumentUnderstandingService_BatchProcessDocuments_sync] diff --git a/packages/google-cloud-documentai/samples/generated_samples/documentai_v1beta2_generated_document_understanding_service_process_document_async.py b/packages/google-cloud-documentai/samples/generated_samples/documentai_v1beta2_generated_document_understanding_service_process_document_async.py deleted file mode 100644 index 54eef08269df..000000000000 --- a/packages/google-cloud-documentai/samples/generated_samples/documentai_v1beta2_generated_document_understanding_service_process_document_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ProcessDocument -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-documentai - - -# [START documentai_v1beta2_generated_DocumentUnderstandingService_ProcessDocument_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import documentai_v1beta2 - - -async def sample_process_document(): - # Create a client - client = documentai_v1beta2.DocumentUnderstandingServiceAsyncClient() - - # Initialize request argument(s) - input_config = documentai_v1beta2.InputConfig() - input_config.gcs_source.uri = "uri_value" - input_config.mime_type = "mime_type_value" - - request = documentai_v1beta2.ProcessDocumentRequest( - input_config=input_config, - ) - - # Make the request - response = await client.process_document(request=request) - - # Handle the response - print(response) - -# [END documentai_v1beta2_generated_DocumentUnderstandingService_ProcessDocument_async] diff --git a/packages/google-cloud-documentai/samples/generated_samples/documentai_v1beta2_generated_document_understanding_service_process_document_sync.py b/packages/google-cloud-documentai/samples/generated_samples/documentai_v1beta2_generated_document_understanding_service_process_document_sync.py deleted file mode 100644 index f81a39312de1..000000000000 --- a/packages/google-cloud-documentai/samples/generated_samples/documentai_v1beta2_generated_document_understanding_service_process_document_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ProcessDocument -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-documentai - - -# [START documentai_v1beta2_generated_DocumentUnderstandingService_ProcessDocument_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import documentai_v1beta2 - - -def sample_process_document(): - # Create a client - client = documentai_v1beta2.DocumentUnderstandingServiceClient() - - # Initialize request argument(s) - input_config = documentai_v1beta2.InputConfig() - input_config.gcs_source.uri = "uri_value" - input_config.mime_type = "mime_type_value" - - request = documentai_v1beta2.ProcessDocumentRequest( - input_config=input_config, - ) - - # Make the request - response = client.process_document(request=request) - - # Handle the response - print(response) - -# [END documentai_v1beta2_generated_DocumentUnderstandingService_ProcessDocument_sync] diff --git a/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1.json b/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1.json index 2fc98b45f209..96d60af285a2 100644 --- a/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1.json +++ b/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-documentai", - "version": "2.32.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta2.json b/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta2.json deleted file mode 100644 index 31e4348ff0cb..000000000000 --- a/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta2.json +++ /dev/null @@ -1,329 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.cloud.documentai.v1beta2", - "version": "v1beta2" - } - ], - "language": "PYTHON", - "name": "google-cloud-documentai", - "version": "2.32.0" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.documentai_v1beta2.DocumentUnderstandingServiceAsyncClient", - "shortName": "DocumentUnderstandingServiceAsyncClient" - }, - "fullName": "google.cloud.documentai_v1beta2.DocumentUnderstandingServiceAsyncClient.batch_process_documents", - "method": { - "fullName": "google.cloud.documentai.v1beta2.DocumentUnderstandingService.BatchProcessDocuments", - "service": { - "fullName": "google.cloud.documentai.v1beta2.DocumentUnderstandingService", - "shortName": "DocumentUnderstandingService" - }, - "shortName": "BatchProcessDocuments" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.documentai_v1beta2.types.BatchProcessDocumentsRequest" - }, - { - "name": "requests", - "type": "MutableSequence[google.cloud.documentai_v1beta2.types.ProcessDocumentRequest]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "batch_process_documents" - }, - "description": "Sample for BatchProcessDocuments", - "file": "documentai_v1beta2_generated_document_understanding_service_batch_process_documents_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "documentai_v1beta2_generated_DocumentUnderstandingService_BatchProcessDocuments_async", - "segments": [ - { - "end": 59, - "start": 27, - "type": "FULL" - }, - { - "end": 59, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 56, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 60, - "start": 57, - "type": "RESPONSE_HANDLING" - } - ], - "title": "documentai_v1beta2_generated_document_understanding_service_batch_process_documents_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.documentai_v1beta2.DocumentUnderstandingServiceClient", - "shortName": "DocumentUnderstandingServiceClient" - }, - "fullName": "google.cloud.documentai_v1beta2.DocumentUnderstandingServiceClient.batch_process_documents", - "method": { - "fullName": "google.cloud.documentai.v1beta2.DocumentUnderstandingService.BatchProcessDocuments", - "service": { - "fullName": "google.cloud.documentai.v1beta2.DocumentUnderstandingService", - "shortName": "DocumentUnderstandingService" - }, - "shortName": "BatchProcessDocuments" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.documentai_v1beta2.types.BatchProcessDocumentsRequest" - }, - { - "name": "requests", - "type": "MutableSequence[google.cloud.documentai_v1beta2.types.ProcessDocumentRequest]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "batch_process_documents" - }, - "description": "Sample for BatchProcessDocuments", - "file": "documentai_v1beta2_generated_document_understanding_service_batch_process_documents_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "documentai_v1beta2_generated_DocumentUnderstandingService_BatchProcessDocuments_sync", - "segments": [ - { - "end": 59, - "start": 27, - "type": "FULL" - }, - { - "end": 59, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 56, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 60, - "start": 57, - "type": "RESPONSE_HANDLING" - } - ], - "title": "documentai_v1beta2_generated_document_understanding_service_batch_process_documents_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.documentai_v1beta2.DocumentUnderstandingServiceAsyncClient", - "shortName": "DocumentUnderstandingServiceAsyncClient" - }, - "fullName": "google.cloud.documentai_v1beta2.DocumentUnderstandingServiceAsyncClient.process_document", - "method": { - "fullName": "google.cloud.documentai.v1beta2.DocumentUnderstandingService.ProcessDocument", - "service": { - "fullName": "google.cloud.documentai.v1beta2.DocumentUnderstandingService", - "shortName": "DocumentUnderstandingService" - }, - "shortName": "ProcessDocument" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.documentai_v1beta2.types.ProcessDocumentRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.documentai_v1beta2.types.Document", - "shortName": "process_document" - }, - "description": "Sample for ProcessDocument", - "file": "documentai_v1beta2_generated_document_understanding_service_process_document_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "documentai_v1beta2_generated_DocumentUnderstandingService_ProcessDocument_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "documentai_v1beta2_generated_document_understanding_service_process_document_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.documentai_v1beta2.DocumentUnderstandingServiceClient", - "shortName": "DocumentUnderstandingServiceClient" - }, - "fullName": "google.cloud.documentai_v1beta2.DocumentUnderstandingServiceClient.process_document", - "method": { - "fullName": "google.cloud.documentai.v1beta2.DocumentUnderstandingService.ProcessDocument", - "service": { - "fullName": "google.cloud.documentai.v1beta2.DocumentUnderstandingService", - "shortName": "DocumentUnderstandingService" - }, - "shortName": "ProcessDocument" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.documentai_v1beta2.types.ProcessDocumentRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.documentai_v1beta2.types.Document", - "shortName": "process_document" - }, - "description": "Sample for ProcessDocument", - "file": "documentai_v1beta2_generated_document_understanding_service_process_document_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "documentai_v1beta2_generated_DocumentUnderstandingService_ProcessDocument_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "documentai_v1beta2_generated_document_understanding_service_process_document_sync.py" - } - ] -} diff --git a/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta3.json b/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta3.json index 43bcd3c8902f..f47545a8ed3e 100644 --- a/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta3.json +++ b/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta3.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-documentai", - "version": "2.32.0" + "version": "0.1.0" }, "snippets": [ { From 3881914b43b47bf2ee187f62447ef9eccc851749 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 4 Oct 2024 13:33:30 -0400 Subject: [PATCH 097/108] feat: [google-apps-chat] Add doc for permission settings & announcement space support (#13120) BEGIN_COMMIT_OVERRIDE feat: Removed deprecated api versions END_COMMIT_OVERRIDE - [ ] Regenerate this pull request now. feat: Add doc for import mode external users support docs: Messages API dev docs improvement docs: Memberships API dev docs improvement docs: Discoverable space docs improvement PiperOrigin-RevId: 681521060 Source-Link: https://github.com/googleapis/googleapis/commit/c472cf7c64e401e8f55353fddab1b5cd81efb607 Source-Link: https://github.com/googleapis/googleapis-gen/commit/a905bb22c968ebdded136b282ef073992fc140c5 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWFwcHMtY2hhdC8uT3dsQm90LnlhbWwiLCJoIjoiYTkwNWJiMjJjOTY4ZWJkZGVkMTM2YjI4MmVmMDczOTkyZmMxNDBjNSJ9 --------- Co-authored-by: Owl Bot Co-authored-by: ohmayr Co-authored-by: ohmayr --- .../google/apps/chat/gapic_version.py | 2 +- .../google/apps/chat_v1/gapic_version.py | 2 +- .../services/chat_service/async_client.py | 271 +++++++------ .../chat_v1/services/chat_service/client.py | 273 +++++++------ .../services/chat_service/transports/grpc.py | 114 +++--- .../chat_service/transports/grpc_asyncio.py | 114 +++--- .../services/chat_service/transports/rest.py | 3 +- .../google/apps/chat_v1/types/message.py | 29 +- .../google/apps/chat_v1/types/space.py | 372 +++++++++++++----- .../google/apps/chat_v1/types/space_event.py | 5 +- ...nerated_chat_service_create_space_async.py | 4 + ...enerated_chat_service_create_space_sync.py | 4 + ...nerated_chat_service_set_up_space_async.py | 4 + ...enerated_chat_service_set_up_space_sync.py | 4 + ...nerated_chat_service_update_space_async.py | 4 + ...enerated_chat_service_update_space_sync.py | 4 + .../snippet_metadata_google.chat.v1.json | 86 ++-- .../unit/gapic/chat_v1/test_chat_service.py | 66 ++++ 18 files changed, 862 insertions(+), 499 deletions(-) diff --git a/packages/google-apps-chat/google/apps/chat/gapic_version.py b/packages/google-apps-chat/google/apps/chat/gapic_version.py index 4b834789ba9e..558c8aab67c5 100644 --- a/packages/google-apps-chat/google/apps/chat/gapic_version.py +++ b/packages/google-apps-chat/google/apps/chat/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-apps-chat/google/apps/chat_v1/gapic_version.py b/packages/google-apps-chat/google/apps/chat_v1/gapic_version.py index 4b834789ba9e..558c8aab67c5 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/gapic_version.py +++ b/packages/google-apps-chat/google/apps/chat_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/async_client.py b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/async_client.py index 82774eb03431..8e88d84deba6 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/async_client.py +++ b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/async_client.py @@ -314,19 +314,36 @@ async def create_message( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> gc_message.Message: - r"""Creates a message in a Google Chat space. The maximum message - size, including text and cards, is 32,000 bytes. For an example, - see `Send a + r"""Creates a message in a Google Chat space. For an example, see + `Send a message `__. - Calling this method requires - `authentication `__ - and supports the following authentication types: + The ``create()`` method requires either user or app + authentication. Chat attributes the message sender differently + depending on the type of authentication that you use in your + request. - - For text messages, user authentication or app authentication - are supported. - - For card messages, only app authentication is supported. - (Only Chat apps can create card messages.) + The following image shows how Chat attributes a message when you + use app authentication. Chat displays the Chat app as the + message sender. The content of the message can contain text + (``text``), cards (``cardsV2``), and accessory widgets + (``accessoryWidgets``). + + |Message sent with app authentication async client| + + The following image shows how Chat attributes a message when you + use user authentication. Chat displays the user as the message + sender and attributes the Chat app to the message by displaying + its name. The content of message can only contain text + (``text``). + + |Message sent with user authentication async client| + + The maximum message size, including the message contents, is + 32,000 bytes. + + .. |Message sent with app authentication async client| image:: https://developers.google.com/workspace/chat/images/message-app-auth.svg + .. |Message sent with user authentication async client| image:: https://developers.google.com/workspace/chat/images/message-user-auth.svg .. code-block:: python @@ -464,9 +481,12 @@ async def list_messages( metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListMessagesAsyncPager: r"""Lists messages in a space that the caller is a member of, - including messages from blocked members and spaces. For an - example, see `List - messages `__. Requires `user + including messages from blocked members and spaces. If you list + messages from a space with no messages, the response is an empty + object. When using a REST/HTTP interface, the response contains + an empty JSON object, ``{}``. For an example, see `List + messages `__. + Requires `user authentication `__. .. code-block:: python @@ -1759,8 +1779,9 @@ async def create_space( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> gc_space.Space: - r"""Creates a named space. Spaces grouped by topics aren't - supported. For an example, see `Create a + r"""Creates a space with no members. Can be used to create a named + space. Spaces grouped by topics aren't supported. For an + example, see `Create a space `__. If you receive the error message ``ALREADY_EXISTS`` when @@ -1768,6 +1789,11 @@ async def create_space( space within the Google Workspace organization might already use this display name. + If you're a member of the `Developer Preview + program `__, + you can create a group chat in import mode using + ``spaceType.GROUP_CHAT``. + Requires `user authentication `__. @@ -1787,7 +1813,11 @@ async def sample_create_space(): client = chat_v1.ChatServiceAsyncClient() # Initialize request argument(s) + space = chat_v1.Space() + space.predefined_permission_settings = "ANNOUNCEMENT_SPACE" + request = chat_v1.CreateSpaceRequest( + space=space, ) # Make the request @@ -1798,16 +1828,22 @@ async def sample_create_space(): Args: request (Optional[Union[google.apps.chat_v1.types.CreateSpaceRequest, dict]]): - The request object. A request to create a named space. + The request object. A request to create a named space + with no members. space (:class:`google.apps.chat_v1.types.Space`): Required. The ``displayName`` and ``spaceType`` fields must be populated. Only ``SpaceType.SPACE`` is supported. - If you receive the error message ``ALREADY_EXISTS`` when - creating a space, try a different ``displayName``. An - existing space within the Google Workspace organization - might already use this display name. + If you receive the error message ``ALREADY_EXISTS``, try + a different ``displayName``. An existing space within + the Google Workspace organization might already use this + display name. + + If you're a member of the `Developer Preview + program `__, + ``SpaceType.GROUP_CHAT`` can be used if ``importMode`` + is set to true. The space ``name`` is assigned on the server so anything specified in this field will be ignored. @@ -1952,7 +1988,11 @@ async def sample_set_up_space(): client = chat_v1.ChatServiceAsyncClient() # Initialize request argument(s) + space = chat_v1.Space() + space.predefined_permission_settings = "ANNOUNCEMENT_SPACE" + request = chat_v1.SetUpSpaceRequest( + space=space, ) # Make the request @@ -2042,7 +2082,11 @@ async def sample_update_space(): client = chat_v1.ChatServiceAsyncClient() # Initialize request argument(s) + space = chat_v1.Space() + space.predefined_permission_settings = "ANNOUNCEMENT_SPACE" + request = chat_v1.UpdateSpaceRequest( + space=space, ) # Make the request @@ -2067,68 +2111,73 @@ async def sample_update_space(): Required. The updated field paths, comma separated if there are multiple. - Currently supported field paths: - - - ``display_name`` (Only supports changing the display - name of a space with the ``SPACE`` type, or when also - including the ``space_type`` mask to change a - ``GROUP_CHAT`` space type to ``SPACE``. Trying to - update the display name of a ``GROUP_CHAT`` or a - ``DIRECT_MESSAGE`` space results in an invalid - argument error. If you receive the error message - ``ALREADY_EXISTS`` when updating the ``displayName``, - try a different ``displayName``. An existing space - within the Google Workspace organization might - already use this display name.) - - - ``space_type`` (Only supports changing a - ``GROUP_CHAT`` space type to ``SPACE``. Include - ``display_name`` together with ``space_type`` in the - update mask and ensure that the specified space has a - non-empty display name and the ``SPACE`` space type. - Including the ``space_type`` mask and the ``SPACE`` - type in the specified space when updating the display - name is optional if the existing space already has - the ``SPACE`` type. Trying to update the space type - in other ways results in an invalid argument error). - ``space_type`` is not supported with admin access. - - - ``space_details`` - - - ``space_history_state`` (Supports `turning history on - or off for the - space `__ - if `the organization allows users to change their - history - setting `__. - Warning: mutually exclusive with all other field - paths.) ``space_history_state`` is not supported with - admin access. - - - ``access_settings.audience`` (Supports changing the - `access - setting `__ - of who can discover the space, join the space, and - preview the messages in space. If no audience is - specified in the access setting, the space's access - setting is updated to private. Warning: mutually - exclusive with all other field paths.) - ``access_settings.audience`` is not supported with - admin access. - - - Developer Preview: Supports changing the `permission - settings `__ - of a space, supported field paths include: - ``permission_settings.manage_members_and_groups``, - ``permission_settings.modify_space_details``, - ``permission_settings.toggle_history``, - ``permission_settings.use_at_mention_all``, - ``permission_settings.manage_apps``, - ``permission_settings.manage_webhooks``, - ``permission_settings.reply_messages`` (Warning: - mutually exclusive with all other non-permission - settings field paths). ``permission_settings`` is not - supported with admin access. + You can update the following fields for a space: + + ``space_details``: Updates the space's description. + Supports up to 150 characters. + + ``display_name``: Only supports updating the display + name for spaces where ``spaceType`` field is ``SPACE``. + If you receive the error message ``ALREADY_EXISTS``, try + a different value. An existing space within the Google + Workspace organization might already use this display + name. + + ``space_type``: Only supports changing a ``GROUP_CHAT`` + space type to ``SPACE``. Include ``display_name`` + together with ``space_type`` in the update mask and + ensure that the specified space has a non-empty display + name and the ``SPACE`` space type. Including the + ``space_type`` mask and the ``SPACE`` type in the + specified space when updating the display name is + optional if the existing space already has the ``SPACE`` + type. Trying to update the space type in other ways + results in an invalid argument error. ``space_type`` is + not supported with ``useAdminAccess``. + + ``space_history_state``: Updates `space history + settings `__ + by turning history on or off for the space. Only + supported if history settings are enabled for the Google + Workspace organization. To update the space history + state, you must omit all other field masks in your + request. ``space_history_state`` is not supported with + ``useAdminAccess``. + + ``access_settings.audience``: Updates the `access + setting `__ + of who can discover the space, join the space, and + preview the messages in named space where ``spaceType`` + field is ``SPACE``. If the existing space has a target + audience, you can remove the audience and restrict space + access by omitting a value for this field mask. To + update access settings for a space, the authenticating + user must be a space manager and omit all other field + masks in your request. You can't update this field if + the space is in `import + mode `__. + To learn more, see `Make a space discoverable to + specific + users `__. + ``access_settings.audience`` is not supported with + ``useAdminAccess``. + + ``permission_settings``: Supports changing the + `permission + settings `__ + of a space. When updating permission settings, you can + only specify ``permissionSettings`` field masks; you + cannot update other field masks at the same time. + ``permissionSettings`` is not supported with + ``useAdminAccess``. The supported field masks include: + + - ``permission_settings.manageMembersAndGroups`` + - ``permission_settings.modifySpaceDetails`` + - ``permission_settings.toggleHistory`` + - ``permission_settings.useAtMentionAll`` + - ``permission_settings.manageApps`` + - ``permission_settings.manageWebhooks`` + - ``permission_settings.replyMessages`` This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -2495,46 +2544,25 @@ async def create_membership( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> gc_membership.Membership: - r"""Creates a human membership or app membership for the calling - app. Creating memberships for other apps isn't supported. For an - example, see `Invite or add a user or a Google Chat app to a - space `__. - When creating a membership, if the specified member has their - auto-accept policy turned off, then they're invited, and must - accept the space invitation before joining. Otherwise, creating - a membership adds the member directly to the specified space. - Requires `user + r"""Creates a membership for the calling Chat app, a user, or a + Google Group. Creating memberships for other Chat apps isn't + supported. When creating a membership, if the specified member + has their auto-accept policy turned off, then they're invited, + and must accept the space invitation before joining. Otherwise, + creating a membership adds the member directly to the specified + space. Requires `user authentication `__. - To specify the member to add, set the ``membership.member.name`` - for the human or app member, or set the - ``membership.group_member.name`` for the group member. - - - To add the calling app to a space or a direct message between - two human users, use ``users/app``. Unable to add other apps - to the space. - - - To add a human user, use ``users/{user}``, where ``{user}`` - can be the email address for the user. For users in the same - Workspace organization ``{user}`` can also be the ``id`` for - the person from the People API, or the ``id`` for the user in - the Directory API. For example, if the People API Person - profile ID for ``user@example.com`` is ``123456789``, you can - add the user to the space by setting the - ``membership.member.name`` to ``users/user@example.com`` or - ``users/123456789``. - - - To add or invite a Google group in a named space, use - ``groups/{group}``, where ``{group}`` is the ``id`` for the - group from the Cloud Identity Groups API. For example, you - can use `Cloud Identity Groups lookup - API `__ - to retrieve the ID ``123456789`` for group email - ``group@example.com``, then you can add or invite the group - to a named space by setting the - ``membership.group_member.name`` to ``groups/123456789``. - Group email is not supported, and Google groups can only be - added as members in named spaces. + For example usage, see: + + - `Invite or add a user to a + space `__. + + - `Invite or add a Google Group to a + space `__. + + - `Add the Chat app to a + space `__. .. code-block:: python @@ -3694,6 +3722,9 @@ async def get_space_event( message was later updated, the server returns the updated ``Message`` resource in the event payload. + Note: The ``permissionSettings`` field is not returned in the + Space object of the Space event data for this request. + Requires `user authentication `__. To get an event, the authenticated user must be a member of the diff --git a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/client.py b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/client.py index 0d542091414a..a65d4dcb0a5a 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/client.py +++ b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/client.py @@ -878,23 +878,40 @@ def create_message( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> gc_message.Message: - r"""Creates a message in a Google Chat space. The maximum message - size, including text and cards, is 32,000 bytes. For an example, - see `Send a + r"""Creates a message in a Google Chat space. For an example, see + `Send a message `__. - Calling this method requires - `authentication `__ - and supports the following authentication types: + The ``create()`` method requires either user or app + authentication. Chat attributes the message sender differently + depending on the type of authentication that you use in your + request. - - For text messages, user authentication or app authentication - are supported. - - For card messages, only app authentication is supported. - (Only Chat apps can create card messages.) + The following image shows how Chat attributes a message when you + use app authentication. Chat displays the Chat app as the + message sender. The content of the message can contain text + (``text``), cards (``cardsV2``), and accessory widgets + (``accessoryWidgets``). + + |Message sent with app authentication client| + + The following image shows how Chat attributes a message when you + use user authentication. Chat displays the user as the message + sender and attributes the Chat app to the message by displaying + its name. The content of message can only contain text + (``text``). + + |Message sent with user authentication client| + + The maximum message size, including the message contents, is + 32,000 bytes. + + .. |Message sent with app authentication client| image:: https://developers.google.com/workspace/chat/images/message-app-auth.svg + .. |Message sent with user authentication client| image:: https://developers.google.com/workspace/chat/images/message-user-auth.svg .. code-block:: python - # This snippet has been automatically generated and should be regarded as a + # This snippet has been aubuomatically generated and should be regarded as a # code template only. # It will require modifications to work: # - It may require correct/in-range values for request initialization. @@ -1025,9 +1042,12 @@ def list_messages( metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListMessagesPager: r"""Lists messages in a space that the caller is a member of, - including messages from blocked members and spaces. For an - example, see `List - messages `__. Requires `user + including messages from blocked members and spaces. If you list + messages from a space with no messages, the response is an empty + object. When using a REST/HTTP interface, the response contains + an empty JSON object, ``{}``. For an example, see `List + messages `__. + Requires `user authentication `__. .. code-block:: python @@ -2290,8 +2310,9 @@ def create_space( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> gc_space.Space: - r"""Creates a named space. Spaces grouped by topics aren't - supported. For an example, see `Create a + r"""Creates a space with no members. Can be used to create a named + space. Spaces grouped by topics aren't supported. For an + example, see `Create a space `__. If you receive the error message ``ALREADY_EXISTS`` when @@ -2299,6 +2320,11 @@ def create_space( space within the Google Workspace organization might already use this display name. + If you're a member of the `Developer Preview + program `__, + you can create a group chat in import mode using + ``spaceType.GROUP_CHAT``. + Requires `user authentication `__. @@ -2318,7 +2344,11 @@ def sample_create_space(): client = chat_v1.ChatServiceClient() # Initialize request argument(s) + space = chat_v1.Space() + space.predefined_permission_settings = "ANNOUNCEMENT_SPACE" + request = chat_v1.CreateSpaceRequest( + space=space, ) # Make the request @@ -2329,16 +2359,22 @@ def sample_create_space(): Args: request (Union[google.apps.chat_v1.types.CreateSpaceRequest, dict]): - The request object. A request to create a named space. + The request object. A request to create a named space + with no members. space (google.apps.chat_v1.types.Space): Required. The ``displayName`` and ``spaceType`` fields must be populated. Only ``SpaceType.SPACE`` is supported. - If you receive the error message ``ALREADY_EXISTS`` when - creating a space, try a different ``displayName``. An - existing space within the Google Workspace organization - might already use this display name. + If you receive the error message ``ALREADY_EXISTS``, try + a different ``displayName``. An existing space within + the Google Workspace organization might already use this + display name. + + If you're a member of the `Developer Preview + program `__, + ``SpaceType.GROUP_CHAT`` can be used if ``importMode`` + is set to true. The space ``name`` is assigned on the server so anything specified in this field will be ignored. @@ -2480,7 +2516,11 @@ def sample_set_up_space(): client = chat_v1.ChatServiceClient() # Initialize request argument(s) + space = chat_v1.Space() + space.predefined_permission_settings = "ANNOUNCEMENT_SPACE" + request = chat_v1.SetUpSpaceRequest( + space=space, ) # Make the request @@ -2568,7 +2608,11 @@ def sample_update_space(): client = chat_v1.ChatServiceClient() # Initialize request argument(s) + space = chat_v1.Space() + space.predefined_permission_settings = "ANNOUNCEMENT_SPACE" + request = chat_v1.UpdateSpaceRequest( + space=space, ) # Make the request @@ -2593,68 +2637,73 @@ def sample_update_space(): Required. The updated field paths, comma separated if there are multiple. - Currently supported field paths: - - - ``display_name`` (Only supports changing the display - name of a space with the ``SPACE`` type, or when also - including the ``space_type`` mask to change a - ``GROUP_CHAT`` space type to ``SPACE``. Trying to - update the display name of a ``GROUP_CHAT`` or a - ``DIRECT_MESSAGE`` space results in an invalid - argument error. If you receive the error message - ``ALREADY_EXISTS`` when updating the ``displayName``, - try a different ``displayName``. An existing space - within the Google Workspace organization might - already use this display name.) - - - ``space_type`` (Only supports changing a - ``GROUP_CHAT`` space type to ``SPACE``. Include - ``display_name`` together with ``space_type`` in the - update mask and ensure that the specified space has a - non-empty display name and the ``SPACE`` space type. - Including the ``space_type`` mask and the ``SPACE`` - type in the specified space when updating the display - name is optional if the existing space already has - the ``SPACE`` type. Trying to update the space type - in other ways results in an invalid argument error). - ``space_type`` is not supported with admin access. - - - ``space_details`` - - - ``space_history_state`` (Supports `turning history on - or off for the - space `__ - if `the organization allows users to change their - history - setting `__. - Warning: mutually exclusive with all other field - paths.) ``space_history_state`` is not supported with - admin access. - - - ``access_settings.audience`` (Supports changing the - `access - setting `__ - of who can discover the space, join the space, and - preview the messages in space. If no audience is - specified in the access setting, the space's access - setting is updated to private. Warning: mutually - exclusive with all other field paths.) - ``access_settings.audience`` is not supported with - admin access. - - - Developer Preview: Supports changing the `permission - settings `__ - of a space, supported field paths include: - ``permission_settings.manage_members_and_groups``, - ``permission_settings.modify_space_details``, - ``permission_settings.toggle_history``, - ``permission_settings.use_at_mention_all``, - ``permission_settings.manage_apps``, - ``permission_settings.manage_webhooks``, - ``permission_settings.reply_messages`` (Warning: - mutually exclusive with all other non-permission - settings field paths). ``permission_settings`` is not - supported with admin access. + You can update the following fields for a space: + + ``space_details``: Updates the space's description. + Supports up to 150 characters. + + ``display_name``: Only supports updating the display + name for spaces where ``spaceType`` field is ``SPACE``. + If you receive the error message ``ALREADY_EXISTS``, try + a different value. An existing space within the Google + Workspace organization might already use this display + name. + + ``space_type``: Only supports changing a ``GROUP_CHAT`` + space type to ``SPACE``. Include ``display_name`` + together with ``space_type`` in the update mask and + ensure that the specified space has a non-empty display + name and the ``SPACE`` space type. Including the + ``space_type`` mask and the ``SPACE`` type in the + specified space when updating the display name is + optional if the existing space already has the ``SPACE`` + type. Trying to update the space type in other ways + results in an invalid argument error. ``space_type`` is + not supported with ``useAdminAccess``. + + ``space_history_state``: Updates `space history + settings `__ + by turning history on or off for the space. Only + supported if history settings are enabled for the Google + Workspace organization. To update the space history + state, you must omit all other field masks in your + request. ``space_history_state`` is not supported with + ``useAdminAccess``. + + ``access_settings.audience``: Updates the `access + setting `__ + of who can discover the space, join the space, and + preview the messages in named space where ``spaceType`` + field is ``SPACE``. If the existing space has a target + audience, you can remove the audience and restrict space + access by omitting a value for this field mask. To + update access settings for a space, the authenticating + user must be a space manager and omit all other field + masks in your request. You can't update this field if + the space is in `import + mode `__. + To learn more, see `Make a space discoverable to + specific + users `__. + ``access_settings.audience`` is not supported with + ``useAdminAccess``. + + ``permission_settings``: Supports changing the + `permission + settings `__ + of a space. When updating permission settings, you can + only specify ``permissionSettings`` field masks; you + cannot update other field masks at the same time. + ``permissionSettings`` is not supported with + ``useAdminAccess``. The supported field masks include: + + - ``permission_settings.manageMembersAndGroups`` + - ``permission_settings.modifySpaceDetails`` + - ``permission_settings.toggleHistory`` + - ``permission_settings.useAtMentionAll`` + - ``permission_settings.manageApps`` + - ``permission_settings.manageWebhooks`` + - ``permission_settings.replyMessages`` This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -3011,46 +3060,25 @@ def create_membership( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> gc_membership.Membership: - r"""Creates a human membership or app membership for the calling - app. Creating memberships for other apps isn't supported. For an - example, see `Invite or add a user or a Google Chat app to a - space `__. - When creating a membership, if the specified member has their - auto-accept policy turned off, then they're invited, and must - accept the space invitation before joining. Otherwise, creating - a membership adds the member directly to the specified space. - Requires `user + r"""Creates a membership for the calling Chat app, a user, or a + Google Group. Creating memberships for other Chat apps isn't + supported. When creating a membership, if the specified member + has their auto-accept policy turned off, then they're invited, + and must accept the space invitation before joining. Otherwise, + creating a membership adds the member directly to the specified + space. Requires `user authentication `__. - To specify the member to add, set the ``membership.member.name`` - for the human or app member, or set the - ``membership.group_member.name`` for the group member. - - - To add the calling app to a space or a direct message between - two human users, use ``users/app``. Unable to add other apps - to the space. - - - To add a human user, use ``users/{user}``, where ``{user}`` - can be the email address for the user. For users in the same - Workspace organization ``{user}`` can also be the ``id`` for - the person from the People API, or the ``id`` for the user in - the Directory API. For example, if the People API Person - profile ID for ``user@example.com`` is ``123456789``, you can - add the user to the space by setting the - ``membership.member.name`` to ``users/user@example.com`` or - ``users/123456789``. - - - To add or invite a Google group in a named space, use - ``groups/{group}``, where ``{group}`` is the ``id`` for the - group from the Cloud Identity Groups API. For example, you - can use `Cloud Identity Groups lookup - API `__ - to retrieve the ID ``123456789`` for group email - ``group@example.com``, then you can add or invite the group - to a named space by setting the - ``membership.group_member.name`` to ``groups/123456789``. - Group email is not supported, and Google groups can only be - added as members in named spaces. + For example usage, see: + + - `Invite or add a user to a + space `__. + + - `Invite or add a Google Group to a + space `__. + + - `Add the Chat app to a + space `__. .. code-block:: python @@ -4183,6 +4211,9 @@ def get_space_event( message was later updated, the server returns the updated ``Message`` resource in the event payload. + Note: The ``permissionSettings`` field is not returned in the + Space object of the Space event data for this request. + Requires `user authentication `__. To get an event, the authenticated user must be a member of the diff --git a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc.py b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc.py index bfb0492b21e8..d02bc6784c2a 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc.py +++ b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc.py @@ -254,19 +254,36 @@ def create_message( ) -> Callable[[gc_message.CreateMessageRequest], gc_message.Message]: r"""Return a callable for the create message method over gRPC. - Creates a message in a Google Chat space. The maximum message - size, including text and cards, is 32,000 bytes. For an example, - see `Send a + Creates a message in a Google Chat space. For an example, see + `Send a message `__. - Calling this method requires - `authentication `__ - and supports the following authentication types: + The ``create()`` method requires either user or app + authentication. Chat attributes the message sender differently + depending on the type of authentication that you use in your + request. - - For text messages, user authentication or app authentication - are supported. - - For card messages, only app authentication is supported. - (Only Chat apps can create card messages.) + The following image shows how Chat attributes a message when you + use app authentication. Chat displays the Chat app as the + message sender. The content of the message can contain text + (``text``), cards (``cardsV2``), and accessory widgets + (``accessoryWidgets``). + + |Message sent with app authentication gRPC| + + The following image shows how Chat attributes a message when you + use user authentication. Chat displays the user as the message + sender and attributes the Chat app to the message by displaying + its name. The content of message can only contain text + (``text``). + + |Message sent with user authentication gRPC| + + The maximum message size, including the message contents, is + 32,000 bytes. + + .. |Message sent with app authentication gRPC| image:: https://developers.google.com/workspace/chat/images/message-app-auth.svg + .. |Message sent with user authentication gRPC| image:: https://developers.google.com/workspace/chat/images/message-user-auth.svg Returns: Callable[[~.CreateMessageRequest], @@ -293,9 +310,12 @@ def list_messages( r"""Return a callable for the list messages method over gRPC. Lists messages in a space that the caller is a member of, - including messages from blocked members and spaces. For an - example, see `List - messages `__. Requires `user + including messages from blocked members and spaces. If you list + messages from a space with no messages, the response is an empty + object. When using a REST/HTTP interface, the response contains + an empty JSON object, ``{}``. For an example, see `List + messages `__. + Requires `user authentication `__. Returns: @@ -686,8 +706,9 @@ def get_space(self) -> Callable[[space.GetSpaceRequest], space.Space]: def create_space(self) -> Callable[[gc_space.CreateSpaceRequest], gc_space.Space]: r"""Return a callable for the create space method over gRPC. - Creates a named space. Spaces grouped by topics aren't - supported. For an example, see `Create a + Creates a space with no members. Can be used to create a named + space. Spaces grouped by topics aren't supported. For an + example, see `Create a space `__. If you receive the error message ``ALREADY_EXISTS`` when @@ -695,6 +716,11 @@ def create_space(self) -> Callable[[gc_space.CreateSpaceRequest], gc_space.Space space within the Google Workspace organization might already use this display name. + If you're a member of the `Developer Preview + program `__, + you can create a group chat in import mode using + ``spaceType.GROUP_CHAT``. + Requires `user authentication `__. @@ -944,46 +970,25 @@ def create_membership( ) -> Callable[[gc_membership.CreateMembershipRequest], gc_membership.Membership]: r"""Return a callable for the create membership method over gRPC. - Creates a human membership or app membership for the calling - app. Creating memberships for other apps isn't supported. For an - example, see `Invite or add a user or a Google Chat app to a - space `__. - When creating a membership, if the specified member has their - auto-accept policy turned off, then they're invited, and must - accept the space invitation before joining. Otherwise, creating - a membership adds the member directly to the specified space. - Requires `user + Creates a membership for the calling Chat app, a user, or a + Google Group. Creating memberships for other Chat apps isn't + supported. When creating a membership, if the specified member + has their auto-accept policy turned off, then they're invited, + and must accept the space invitation before joining. Otherwise, + creating a membership adds the member directly to the specified + space. Requires `user authentication `__. - To specify the member to add, set the ``membership.member.name`` - for the human or app member, or set the - ``membership.group_member.name`` for the group member. - - - To add the calling app to a space or a direct message between - two human users, use ``users/app``. Unable to add other apps - to the space. - - - To add a human user, use ``users/{user}``, where ``{user}`` - can be the email address for the user. For users in the same - Workspace organization ``{user}`` can also be the ``id`` for - the person from the People API, or the ``id`` for the user in - the Directory API. For example, if the People API Person - profile ID for ``user@example.com`` is ``123456789``, you can - add the user to the space by setting the - ``membership.member.name`` to ``users/user@example.com`` or - ``users/123456789``. - - - To add or invite a Google group in a named space, use - ``groups/{group}``, where ``{group}`` is the ``id`` for the - group from the Cloud Identity Groups API. For example, you - can use `Cloud Identity Groups lookup - API `__ - to retrieve the ID ``123456789`` for group email - ``group@example.com``, then you can add or invite the group - to a named space by setting the - ``membership.group_member.name`` to ``groups/123456789``. - Group email is not supported, and Google groups can only be - added as members in named spaces. + For example usage, see: + + - `Invite or add a user to a + space `__. + + - `Invite or add a Google Group to a + space `__. + + - `Add the Chat app to a + space `__. Returns: Callable[[~.CreateMembershipRequest], @@ -1271,6 +1276,9 @@ def get_space_event( message was later updated, the server returns the updated ``Message`` resource in the event payload. + Note: The ``permissionSettings`` field is not returned in the + Space object of the Space event data for this request. + Requires `user authentication `__. To get an event, the authenticated user must be a member of the diff --git a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc_asyncio.py b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc_asyncio.py index a404fca34305..86137f66eff8 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc_asyncio.py +++ b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc_asyncio.py @@ -258,19 +258,36 @@ def create_message( ) -> Callable[[gc_message.CreateMessageRequest], Awaitable[gc_message.Message]]: r"""Return a callable for the create message method over gRPC. - Creates a message in a Google Chat space. The maximum message - size, including text and cards, is 32,000 bytes. For an example, - see `Send a + Creates a message in a Google Chat space. For an example, see + `Send a message `__. - Calling this method requires - `authentication `__ - and supports the following authentication types: + The ``create()`` method requires either user or app + authentication. Chat attributes the message sender differently + depending on the type of authentication that you use in your + request. - - For text messages, user authentication or app authentication - are supported. - - For card messages, only app authentication is supported. - (Only Chat apps can create card messages.) + The following image shows how Chat attributes a message when you + use app authentication. Chat displays the Chat app as the + message sender. The content of the message can contain text + (``text``), cards (``cardsV2``), and accessory widgets + (``accessoryWidgets``). + + |Message sent with app authentication async gRPC| + + The following image shows how Chat attributes a message when you + use user authentication. Chat displays the user as the message + sender and attributes the Chat app to the message by displaying + its name. The content of message can only contain text + (``text``). + + |Message sent with user authentication async gRPC| + + The maximum message size, including the message contents, is + 32,000 bytes. + + .. |Message sent with app authentication async gRPC| image:: https://developers.google.com/workspace/chat/images/message-app-auth.svg + .. |Message sent with user authentication async gRPC| image:: https://developers.google.com/workspace/chat/images/message-user-auth.svg Returns: Callable[[~.CreateMessageRequest], @@ -299,9 +316,12 @@ def list_messages( r"""Return a callable for the list messages method over gRPC. Lists messages in a space that the caller is a member of, - including messages from blocked members and spaces. For an - example, see `List - messages `__. Requires `user + including messages from blocked members and spaces. If you list + messages from a space with no messages, the response is an empty + object. When using a REST/HTTP interface, the response contains + an empty JSON object, ``{}``. For an example, see `List + messages `__. + Requires `user authentication `__. Returns: @@ -698,8 +718,9 @@ def create_space( ) -> Callable[[gc_space.CreateSpaceRequest], Awaitable[gc_space.Space]]: r"""Return a callable for the create space method over gRPC. - Creates a named space. Spaces grouped by topics aren't - supported. For an example, see `Create a + Creates a space with no members. Can be used to create a named + space. Spaces grouped by topics aren't supported. For an + example, see `Create a space `__. If you receive the error message ``ALREADY_EXISTS`` when @@ -707,6 +728,11 @@ def create_space( space within the Google Workspace organization might already use this display name. + If you're a member of the `Developer Preview + program `__, + you can create a group chat in import mode using + ``spaceType.GROUP_CHAT``. + Requires `user authentication `__. @@ -964,46 +990,25 @@ def create_membership( ]: r"""Return a callable for the create membership method over gRPC. - Creates a human membership or app membership for the calling - app. Creating memberships for other apps isn't supported. For an - example, see `Invite or add a user or a Google Chat app to a - space `__. - When creating a membership, if the specified member has their - auto-accept policy turned off, then they're invited, and must - accept the space invitation before joining. Otherwise, creating - a membership adds the member directly to the specified space. - Requires `user + Creates a membership for the calling Chat app, a user, or a + Google Group. Creating memberships for other Chat apps isn't + supported. When creating a membership, if the specified member + has their auto-accept policy turned off, then they're invited, + and must accept the space invitation before joining. Otherwise, + creating a membership adds the member directly to the specified + space. Requires `user authentication `__. - To specify the member to add, set the ``membership.member.name`` - for the human or app member, or set the - ``membership.group_member.name`` for the group member. - - - To add the calling app to a space or a direct message between - two human users, use ``users/app``. Unable to add other apps - to the space. - - - To add a human user, use ``users/{user}``, where ``{user}`` - can be the email address for the user. For users in the same - Workspace organization ``{user}`` can also be the ``id`` for - the person from the People API, or the ``id`` for the user in - the Directory API. For example, if the People API Person - profile ID for ``user@example.com`` is ``123456789``, you can - add the user to the space by setting the - ``membership.member.name`` to ``users/user@example.com`` or - ``users/123456789``. - - - To add or invite a Google group in a named space, use - ``groups/{group}``, where ``{group}`` is the ``id`` for the - group from the Cloud Identity Groups API. For example, you - can use `Cloud Identity Groups lookup - API `__ - to retrieve the ID ``123456789`` for group email - ``group@example.com``, then you can add or invite the group - to a named space by setting the - ``membership.group_member.name`` to ``groups/123456789``. - Group email is not supported, and Google groups can only be - added as members in named spaces. + For example usage, see: + + - `Invite or add a user to a + space `__. + + - `Invite or add a Google Group to a + space `__. + + - `Add the Chat app to a + space `__. Returns: Callable[[~.CreateMembershipRequest], @@ -1301,6 +1306,9 @@ def get_space_event( message was later updated, the server returns the updated ``Message`` resource in the event payload. + Note: The ``permissionSettings`` field is not returned in the + Space object of the Space event data for this request. + Requires `user authentication `__. To get an event, the authenticated user must be a member of the diff --git a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/rest.py b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/rest.py index f9c4a5cd53a3..d9717f7c33ea 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/rest.py +++ b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/rest.py @@ -1409,7 +1409,8 @@ def __call__( Args: request (~.gc_space.CreateSpaceRequest): - The request object. A request to create a named space. + The request object. A request to create a named space + with no members. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. diff --git a/packages/google-apps-chat/google/apps/chat_v1/types/message.py b/packages/google-apps-chat/google/apps/chat_v1/types/message.py index 90dda263a1ec..56d10d7b3574 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/types/message.py +++ b/packages/google-apps-chat/google/apps/chat_v1/types/message.py @@ -109,8 +109,8 @@ class Message(proto.Message): user `__, or everyone in the space. - To learn about creating text messages, see `Send a text - message `__. + To learn about creating text messages, see `Send a + message `__. formatted_text (str): Output only. Contains the message ``text`` with markups added to communicate formatting. This field might not @@ -154,8 +154,9 @@ class Message(proto.Message): user `__, the messages can't contain cards. - To learn about cards and how to create them, see `Send card - messages `__. + To learn how to create a message that contains cards, see + `Send a + message `__. `Card builder `__ @@ -213,17 +214,17 @@ class Message(proto.Message): Immutable. Input for creating a message, otherwise output only. The user that can view the message. When set, the message is private and only visible to the specified user - and the Chat app. Link previews and attachments aren't - supported for private messages. + and the Chat app. To include this field in your request, you + must call the Chat API using `app + authentication `__ + and omit the following: - Only Chat apps can send private messages. If your Chat app - `authenticates as a - user `__ - to send a message, the message can't be private and must - omit this field. + - `Attachments `__ + - `Accessory + widgets `__ - For details, see `Send private messages to Google Chat - users `__. + For details, see `Send a message + privately `__. deletion_metadata (google.apps.chat_v1.types.DeletionMetadata): Output only. Information about a deleted message. A message is deleted when ``delete_time`` is set. @@ -428,7 +429,7 @@ class Thread(proto.Message): Attributes: name (str): - Output only. Resource name of the thread. + Resource name of the thread. Example: ``spaces/{space}/threads/{thread}`` thread_key (str): diff --git a/packages/google-apps-chat/google/apps/chat_v1/types/space.py b/packages/google-apps-chat/google/apps/chat_v1/types/space.py index 75456c5e5e13..694375fc0ebd 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/types/space.py +++ b/packages/google-apps-chat/google/apps/chat_v1/types/space.py @@ -46,11 +46,26 @@ class Space(proto.Message): r"""A space in Google Chat. Spaces are conversations between two or more users or 1:1 messages between a user and a Chat app. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: name (str): Resource name of the space. Format: ``spaces/{space}`` + + Where ``{space}`` represents the system-assigned ID for the + space. You can obtain the space ID by calling the + ```spaces.list()`` `__ + method or from the space URL. For example, if the space URL + is + ``https://mail.google.com/mail/u/0/#chat/space/AAAAAAAAA``, + the space ID is ``AAAAAAAAA``. type_ (google.apps.chat_v1.types.Space.Type): Output only. Deprecated: Use ``space_type`` instead. The type of a space. @@ -66,12 +81,12 @@ class Space(proto.Message): instead. Whether messages are threaded in this space. display_name (str): The space's display name. Required when `creating a - space `__. - If you receive the error message ``ALREADY_EXISTS`` when - creating a space or updating the ``displayName``, try a - different ``displayName``. An existing space within the - Google Workspace organization might already use this display - name. + space `__ + with a ``spaceType`` of ``SPACE``. If you receive the error + message ``ALREADY_EXISTS`` when creating a space or updating + the ``displayName``, try a different ``displayName``. An + existing space within the Google Workspace organization + might already use this display name. For direct messages, this field might be empty. @@ -86,15 +101,6 @@ class Space(proto.Message): user account). By default, a space created by a consumer account permits any Google Chat user. - - The space is used to [import data to Google Chat] - (https://developers.google.com/chat/api/guides/import-data-overview) - because import mode spaces must only permit members from - the same Google Workspace organization. However, as part - of the `Google Workspace Developer Preview - Program `__, - import mode spaces can permit any Google Chat user so - this field can then be set for import mode spaces. - For existing spaces, this field is output only. space_threading_state (google.apps.chat_v1.types.Space.SpaceThreadingState): Output only. The threading state in the Chat @@ -146,6 +152,21 @@ class Space(proto.Message): space_uri (str): Output only. The URI for a user to access the space. + predefined_permission_settings (google.apps.chat_v1.types.Space.PredefinedPermissionSettings): + Optional. Input only. Predefined space permission settings, + input only when creating a space. If the field is not set, a + collaboration space is created. After you create the space, + settings are populated in the ``PermissionSettings`` field. + + This field is a member of `oneof`_ ``space_permission_settings``. + permission_settings (google.apps.chat_v1.types.Space.PermissionSettings): + Optional. Space permission settings for + existing spaces. Input for updating exact space + permission settings, where existing permission + settings are replaced. Output lists current + permission settings. + + This field is a member of `oneof`_ ``space_permission_settings``. """ class Type(proto.Enum): @@ -212,6 +233,27 @@ class SpaceThreadingState(proto.Enum): GROUPED_MESSAGES = 3 UNTHREADED_MESSAGES = 4 + class PredefinedPermissionSettings(proto.Enum): + r"""Predefined permission settings that you can only specify when + creating a named space. More settings might be added in the future. + For details about permission settings for named spaces, see `Learn + about spaces `__. + + Values: + PREDEFINED_PERMISSION_SETTINGS_UNSPECIFIED (0): + Unspecified. Don't use. + COLLABORATION_SPACE (1): + Setting to make the space a collaboration + space where all members can post messages. + ANNOUNCEMENT_SPACE (2): + Setting to make the space an announcement + space where only space managers can post + messages. + """ + PREDEFINED_PERMISSION_SETTINGS_UNSPECIFIED = 0 + COLLABORATION_SPACE = 1 + ANNOUNCEMENT_SPACE = 2 + class SpaceDetails(proto.Message): r"""Details about the space including description and rules. @@ -273,14 +315,20 @@ class AccessSettings(proto.Message): Optional. The resource name of the `target audience `__ who can discover the space, join the space, and preview the - messages in the space. For details, see `Make a space - discoverable to a target + messages in the space. If unset, only users or Google Groups + who have been individually invited or added to the space can + access it. For details, see `Make a space discoverable to a + target audience `__. Format: ``audiences/{audience}`` To use the default target audience for the Google Workspace organization, set to ``audiences/default``. + + This field is not populated when using the ``chat.bot`` + scope with `app + authentication `__. """ class AccessState(proto.Enum): @@ -291,12 +339,17 @@ class AccessState(proto.Enum): Access state is unknown or not supported in this API. PRIVATE (1): - Space is discoverable by added or invited - members or groups. + Only users or Google Groups that have been + individually added or invited by other users or + Google Workspace administrators can discover and + access the space. DISCOVERABLE (2): - Space is discoverable by the selected `target - audience `__, - as well as added or invited members or groups. + A space manager has granted a target audience access to the + space. Users or Google Groups that have been individually + added or invited to the space can also discover and access + the space. To learn more, see `Make a space discoverable to + specific + users `__. """ ACCESS_STATE_UNSPECIFIED = 0 PRIVATE = 1 @@ -312,6 +365,125 @@ class AccessState(proto.Enum): number=3, ) + class PermissionSettings(proto.Message): + r"""`Permission + settings `__ that + you can specify when updating an existing named space. + + To set permission settings when creating a space, specify the + ``PredefinedPermissionSettings`` field in your request. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + manage_members_and_groups (google.apps.chat_v1.types.Space.PermissionSetting): + Setting for managing members and groups in a + space. + + This field is a member of `oneof`_ ``_manage_members_and_groups``. + modify_space_details (google.apps.chat_v1.types.Space.PermissionSetting): + Setting for updating space name, avatar, + description and guidelines. + + This field is a member of `oneof`_ ``_modify_space_details``. + toggle_history (google.apps.chat_v1.types.Space.PermissionSetting): + Setting for toggling space history on and + off. + + This field is a member of `oneof`_ ``_toggle_history``. + use_at_mention_all (google.apps.chat_v1.types.Space.PermissionSetting): + Setting for using @all in a space. + + This field is a member of `oneof`_ ``_use_at_mention_all``. + manage_apps (google.apps.chat_v1.types.Space.PermissionSetting): + Setting for managing apps in a space. + + This field is a member of `oneof`_ ``_manage_apps``. + manage_webhooks (google.apps.chat_v1.types.Space.PermissionSetting): + Setting for managing webhooks in a space. + + This field is a member of `oneof`_ ``_manage_webhooks``. + post_messages (google.apps.chat_v1.types.Space.PermissionSetting): + Output only. Setting for posting messages in + a space. + + This field is a member of `oneof`_ ``_post_messages``. + reply_messages (google.apps.chat_v1.types.Space.PermissionSetting): + Setting for replying to messages in a space. + + This field is a member of `oneof`_ ``_reply_messages``. + """ + + manage_members_and_groups: "Space.PermissionSetting" = proto.Field( + proto.MESSAGE, + number=1, + optional=True, + message="Space.PermissionSetting", + ) + modify_space_details: "Space.PermissionSetting" = proto.Field( + proto.MESSAGE, + number=2, + optional=True, + message="Space.PermissionSetting", + ) + toggle_history: "Space.PermissionSetting" = proto.Field( + proto.MESSAGE, + number=3, + optional=True, + message="Space.PermissionSetting", + ) + use_at_mention_all: "Space.PermissionSetting" = proto.Field( + proto.MESSAGE, + number=4, + optional=True, + message="Space.PermissionSetting", + ) + manage_apps: "Space.PermissionSetting" = proto.Field( + proto.MESSAGE, + number=5, + optional=True, + message="Space.PermissionSetting", + ) + manage_webhooks: "Space.PermissionSetting" = proto.Field( + proto.MESSAGE, + number=6, + optional=True, + message="Space.PermissionSetting", + ) + post_messages: "Space.PermissionSetting" = proto.Field( + proto.MESSAGE, + number=7, + optional=True, + message="Space.PermissionSetting", + ) + reply_messages: "Space.PermissionSetting" = proto.Field( + proto.MESSAGE, + number=8, + optional=True, + message="Space.PermissionSetting", + ) + + class PermissionSetting(proto.Message): + r"""Represents a space permission setting. + + Attributes: + managers_allowed (bool): + Whether spaces managers have this permission. + members_allowed (bool): + Whether non-manager members have this + permission. + """ + + managers_allowed: bool = proto.Field( + proto.BOOL, + number=1, + ) + members_allowed: bool = proto.Field( + proto.BOOL, + number=2, + ) + name: str = proto.Field( proto.STRING, number=1, @@ -389,20 +561,37 @@ class AccessState(proto.Enum): proto.STRING, number=25, ) + predefined_permission_settings: PredefinedPermissionSettings = proto.Field( + proto.ENUM, + number=26, + oneof="space_permission_settings", + enum=PredefinedPermissionSettings, + ) + permission_settings: PermissionSettings = proto.Field( + proto.MESSAGE, + number=27, + oneof="space_permission_settings", + message=PermissionSettings, + ) class CreateSpaceRequest(proto.Message): - r"""A request to create a named space. + r"""A request to create a named space with no members. Attributes: space (google.apps.chat_v1.types.Space): Required. The ``displayName`` and ``spaceType`` fields must be populated. Only ``SpaceType.SPACE`` is supported. - If you receive the error message ``ALREADY_EXISTS`` when - creating a space, try a different ``displayName``. An - existing space within the Google Workspace organization - might already use this display name. + If you receive the error message ``ALREADY_EXISTS``, try a + different ``displayName``. An existing space within the + Google Workspace organization might already use this display + name. + + If you're a member of the `Developer Preview + program `__, + ``SpaceType.GROUP_CHAT`` can be used if ``importMode`` is + set to true. The space ``name`` is assigned on the server so anything specified in this field will be ignored. @@ -491,8 +680,9 @@ class ListSpacesResponse(proto.Message): Attributes: spaces (MutableSequence[google.apps.chat_v1.types.Space]): - List of spaces in the requested (or first) - page. + List of spaces in the requested (or first) page. Note: The + ``permissionSettings`` field is not returned in the Space + object for list requests. next_page_token (str): You can send a token as ``pageToken`` to retrieve the next page of results. If empty, there are no subsequent pages. @@ -587,67 +777,69 @@ class UpdateSpaceRequest(proto.Message): Required. The updated field paths, comma separated if there are multiple. - Currently supported field paths: - - - ``display_name`` (Only supports changing the display name - of a space with the ``SPACE`` type, or when also - including the ``space_type`` mask to change a - ``GROUP_CHAT`` space type to ``SPACE``. Trying to update - the display name of a ``GROUP_CHAT`` or a - ``DIRECT_MESSAGE`` space results in an invalid argument - error. If you receive the error message - ``ALREADY_EXISTS`` when updating the ``displayName``, try - a different ``displayName``. An existing space within the - Google Workspace organization might already use this - display name.) - - - ``space_type`` (Only supports changing a ``GROUP_CHAT`` - space type to ``SPACE``. Include ``display_name`` - together with ``space_type`` in the update mask and - ensure that the specified space has a non-empty display - name and the ``SPACE`` space type. Including the - ``space_type`` mask and the ``SPACE`` type in the - specified space when updating the display name is - optional if the existing space already has the ``SPACE`` - type. Trying to update the space type in other ways - results in an invalid argument error). ``space_type`` is - not supported with admin access. - - - ``space_details`` - - - ``space_history_state`` (Supports `turning history on or - off for the - space `__ - if `the organization allows users to change their history - setting `__. - Warning: mutually exclusive with all other field paths.) - ``space_history_state`` is not supported with admin - access. - - - ``access_settings.audience`` (Supports changing the - `access - setting `__ - of who can discover the space, join the space, and - preview the messages in space. If no audience is - specified in the access setting, the space's access - setting is updated to private. Warning: mutually - exclusive with all other field paths.) - ``access_settings.audience`` is not supported with admin - access. - - - Developer Preview: Supports changing the `permission - settings `__ - of a space, supported field paths include: - ``permission_settings.manage_members_and_groups``, - ``permission_settings.modify_space_details``, - ``permission_settings.toggle_history``, - ``permission_settings.use_at_mention_all``, - ``permission_settings.manage_apps``, - ``permission_settings.manage_webhooks``, - ``permission_settings.reply_messages`` (Warning: mutually - exclusive with all other non-permission settings field - paths). ``permission_settings`` is not supported with - admin access. + You can update the following fields for a space: + + ``space_details``: Updates the space's description. Supports + up to 150 characters. + + ``display_name``: Only supports updating the display name + for spaces where ``spaceType`` field is ``SPACE``. If you + receive the error message ``ALREADY_EXISTS``, try a + different value. An existing space within the Google + Workspace organization might already use this display name. + + ``space_type``: Only supports changing a ``GROUP_CHAT`` + space type to ``SPACE``. Include ``display_name`` together + with ``space_type`` in the update mask and ensure that the + specified space has a non-empty display name and the + ``SPACE`` space type. Including the ``space_type`` mask and + the ``SPACE`` type in the specified space when updating the + display name is optional if the existing space already has + the ``SPACE`` type. Trying to update the space type in other + ways results in an invalid argument error. ``space_type`` is + not supported with ``useAdminAccess``. + + ``space_history_state``: Updates `space history + settings `__ + by turning history on or off for the space. Only supported + if history settings are enabled for the Google Workspace + organization. To update the space history state, you must + omit all other field masks in your request. + ``space_history_state`` is not supported with + ``useAdminAccess``. + + ``access_settings.audience``: Updates the `access + setting `__ + of who can discover the space, join the space, and preview + the messages in named space where ``spaceType`` field is + ``SPACE``. If the existing space has a target audience, you + can remove the audience and restrict space access by + omitting a value for this field mask. To update access + settings for a space, the authenticating user must be a + space manager and omit all other field masks in your + request. You can't update this field if the space is in + `import + mode `__. + To learn more, see `Make a space discoverable to specific + users `__. + ``access_settings.audience`` is not supported with + ``useAdminAccess``. + + ``permission_settings``: Supports changing the `permission + settings `__ + of a space. When updating permission settings, you can only + specify ``permissionSettings`` field masks; you cannot + update other field masks at the same time. + ``permissionSettings`` is not supported with + ``useAdminAccess``. The supported field masks include: + + - ``permission_settings.manageMembersAndGroups`` + - ``permission_settings.modifySpaceDetails`` + - ``permission_settings.toggleHistory`` + - ``permission_settings.useAtMentionAll`` + - ``permission_settings.manageApps`` + - ``permission_settings.manageWebhooks`` + - ``permission_settings.replyMessages`` use_admin_access (bool): When ``true``, the method runs using the user's Google Workspace administrator privileges. diff --git a/packages/google-apps-chat/google/apps/chat_v1/types/space_event.py b/packages/google-apps-chat/google/apps/chat_v1/types/space_event.py index 96591e800225..17bf07ba27d7 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/types/space_event.py +++ b/packages/google-apps-chat/google/apps/chat_v1/types/space_event.py @@ -492,8 +492,9 @@ class ListSpaceEventsResponse(proto.Message): Attributes: space_events (MutableSequence[google.apps.chat_v1.types.SpaceEvent]): - Results are returned in chronological order - (oldest event first). + Results are returned in chronological order (oldest event + first). Note: The ``permissionSettings`` field is not + returned in the Space object for list requests. next_page_token (str): Continuation token used to fetch more events. If this field is omitted, there are no diff --git a/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_create_space_async.py b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_create_space_async.py index 047b4031cee0..b72fb46759d1 100644 --- a/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_create_space_async.py +++ b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_create_space_async.py @@ -39,7 +39,11 @@ async def sample_create_space(): client = chat_v1.ChatServiceAsyncClient() # Initialize request argument(s) + space = chat_v1.Space() + space.predefined_permission_settings = "ANNOUNCEMENT_SPACE" + request = chat_v1.CreateSpaceRequest( + space=space, ) # Make the request diff --git a/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_create_space_sync.py b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_create_space_sync.py index 845ce548ddc0..083a0684ae1e 100644 --- a/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_create_space_sync.py +++ b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_create_space_sync.py @@ -39,7 +39,11 @@ def sample_create_space(): client = chat_v1.ChatServiceClient() # Initialize request argument(s) + space = chat_v1.Space() + space.predefined_permission_settings = "ANNOUNCEMENT_SPACE" + request = chat_v1.CreateSpaceRequest( + space=space, ) # Make the request diff --git a/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_set_up_space_async.py b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_set_up_space_async.py index 6870b8744a05..8e2b99a025ad 100644 --- a/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_set_up_space_async.py +++ b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_set_up_space_async.py @@ -39,7 +39,11 @@ async def sample_set_up_space(): client = chat_v1.ChatServiceAsyncClient() # Initialize request argument(s) + space = chat_v1.Space() + space.predefined_permission_settings = "ANNOUNCEMENT_SPACE" + request = chat_v1.SetUpSpaceRequest( + space=space, ) # Make the request diff --git a/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_set_up_space_sync.py b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_set_up_space_sync.py index 4d27131a243c..61ad37d3d045 100644 --- a/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_set_up_space_sync.py +++ b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_set_up_space_sync.py @@ -39,7 +39,11 @@ def sample_set_up_space(): client = chat_v1.ChatServiceClient() # Initialize request argument(s) + space = chat_v1.Space() + space.predefined_permission_settings = "ANNOUNCEMENT_SPACE" + request = chat_v1.SetUpSpaceRequest( + space=space, ) # Make the request diff --git a/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_update_space_async.py b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_update_space_async.py index 14e0944aaaeb..2c2a593b22a2 100644 --- a/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_update_space_async.py +++ b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_update_space_async.py @@ -39,7 +39,11 @@ async def sample_update_space(): client = chat_v1.ChatServiceAsyncClient() # Initialize request argument(s) + space = chat_v1.Space() + space.predefined_permission_settings = "ANNOUNCEMENT_SPACE" + request = chat_v1.UpdateSpaceRequest( + space=space, ) # Make the request diff --git a/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_update_space_sync.py b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_update_space_sync.py index a5158f7a9e07..362d50feb376 100644 --- a/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_update_space_sync.py +++ b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_update_space_sync.py @@ -39,7 +39,11 @@ def sample_update_space(): client = chat_v1.ChatServiceClient() # Initialize request argument(s) + space = chat_v1.Space() + space.predefined_permission_settings = "ANNOUNCEMENT_SPACE" + request = chat_v1.UpdateSpaceRequest( + space=space, ) # Make the request diff --git a/packages/google-apps-chat/samples/generated_samples/snippet_metadata_google.chat.v1.json b/packages/google-apps-chat/samples/generated_samples/snippet_metadata_google.chat.v1.json index f0d8a1017646..8f568256e424 100644 --- a/packages/google-apps-chat/samples/generated_samples/snippet_metadata_google.chat.v1.json +++ b/packages/google-apps-chat/samples/generated_samples/snippet_metadata_google.chat.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-apps-chat", - "version": "0.1.11" + "version": "0.1.0" }, "snippets": [ { @@ -728,12 +728,12 @@ "regionTag": "chat_v1_generated_ChatService_CreateSpace_async", "segments": [ { - "end": 50, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 54, "start": 27, "type": "SHORT" }, @@ -743,18 +743,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 48, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 47, - "start": 45, + "end": 51, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 51, - "start": 48, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], @@ -808,12 +808,12 @@ "regionTag": "chat_v1_generated_ChatService_CreateSpace_sync", "segments": [ { - "end": 50, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 54, "start": 27, "type": "SHORT" }, @@ -823,18 +823,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 48, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 47, - "start": 45, + "end": 51, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 51, - "start": 48, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], @@ -3749,12 +3749,12 @@ "regionTag": "chat_v1_generated_ChatService_SetUpSpace_async", "segments": [ { - "end": 50, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 54, "start": 27, "type": "SHORT" }, @@ -3764,18 +3764,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 48, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 47, - "start": 45, + "end": 51, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 51, - "start": 48, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], @@ -3825,12 +3825,12 @@ "regionTag": "chat_v1_generated_ChatService_SetUpSpace_sync", "segments": [ { - "end": 50, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 54, "start": 27, "type": "SHORT" }, @@ -3840,18 +3840,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 48, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 47, - "start": 45, + "end": 51, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 51, - "start": 48, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], @@ -4417,12 +4417,12 @@ "regionTag": "chat_v1_generated_ChatService_UpdateSpace_async", "segments": [ { - "end": 50, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 54, "start": 27, "type": "SHORT" }, @@ -4432,18 +4432,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 48, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 47, - "start": 45, + "end": 51, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 51, - "start": 48, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], @@ -4501,12 +4501,12 @@ "regionTag": "chat_v1_generated_ChatService_UpdateSpace_sync", "segments": [ { - "end": 50, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 54, "start": 27, "type": "SHORT" }, @@ -4516,18 +4516,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 48, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 47, - "start": 45, + "end": 51, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 51, - "start": 48, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], diff --git a/packages/google-apps-chat/tests/unit/gapic/chat_v1/test_chat_service.py b/packages/google-apps-chat/tests/unit/gapic/chat_v1/test_chat_service.py index 70b8272ff798..893992dd3ab5 100644 --- a/packages/google-apps-chat/tests/unit/gapic/chat_v1/test_chat_service.py +++ b/packages/google-apps-chat/tests/unit/gapic/chat_v1/test_chat_service.py @@ -5744,6 +5744,7 @@ def test_get_space(request_type, transport: str = "grpc"): import_mode=True, admin_installed=True, space_uri="space_uri_value", + predefined_permission_settings=space.Space.PredefinedPermissionSettings.COLLABORATION_SPACE, ) response = client.get_space(request) @@ -6164,6 +6165,7 @@ def test_create_space(request_type, transport: str = "grpc"): import_mode=True, admin_installed=True, space_uri="space_uri_value", + predefined_permission_settings=gc_space.Space.PredefinedPermissionSettings.COLLABORATION_SPACE, ) response = client.create_space(request) @@ -6527,6 +6529,7 @@ def test_set_up_space(request_type, transport: str = "grpc"): import_mode=True, admin_installed=True, space_uri="space_uri_value", + predefined_permission_settings=space.Space.PredefinedPermissionSettings.COLLABORATION_SPACE, ) response = client.set_up_space(request) @@ -6810,6 +6813,7 @@ def test_update_space(request_type, transport: str = "grpc"): import_mode=True, admin_installed=True, space_uri="space_uri_value", + predefined_permission_settings=gc_space.Space.PredefinedPermissionSettings.COLLABORATION_SPACE, ) response = client.update_space(request) @@ -7883,6 +7887,7 @@ def test_find_direct_message(request_type, transport: str = "grpc"): import_mode=True, admin_installed=True, space_uri="space_uri_value", + predefined_permission_settings=space.Space.PredefinedPermissionSettings.COLLABORATION_SPACE, ) response = client.find_direct_message(request) @@ -13112,6 +13117,20 @@ def test_create_message_rest(request_type): }, "access_settings": {"access_state": 1, "audience": "audience_value"}, "space_uri": "space_uri_value", + "predefined_permission_settings": 1, + "permission_settings": { + "manage_members_and_groups": { + "managers_allowed": True, + "members_allowed": True, + }, + "modify_space_details": {}, + "toggle_history": {}, + "use_at_mention_all": {}, + "manage_apps": {}, + "manage_webhooks": {}, + "post_messages": {}, + "reply_messages": {}, + }, }, "fallback_text": "fallback_text_value", "action_response": { @@ -15259,6 +15278,20 @@ def test_update_message_rest(request_type): }, "access_settings": {"access_state": 1, "audience": "audience_value"}, "space_uri": "space_uri_value", + "predefined_permission_settings": 1, + "permission_settings": { + "manage_members_and_groups": { + "managers_allowed": True, + "members_allowed": True, + }, + "modify_space_details": {}, + "toggle_history": {}, + "use_at_mention_all": {}, + "manage_apps": {}, + "manage_webhooks": {}, + "post_messages": {}, + "reply_messages": {}, + }, }, "fallback_text": "fallback_text_value", "action_response": { @@ -17113,6 +17146,7 @@ def test_get_space_rest(request_type): import_mode=True, admin_installed=True, space_uri="space_uri_value", + predefined_permission_settings=space.Space.PredefinedPermissionSettings.COLLABORATION_SPACE, ) # Wrap the value into a proper Response obj @@ -17441,6 +17475,20 @@ def test_create_space_rest(request_type): }, "access_settings": {"access_state": 1, "audience": "audience_value"}, "space_uri": "space_uri_value", + "predefined_permission_settings": 1, + "permission_settings": { + "manage_members_and_groups": { + "managers_allowed": True, + "members_allowed": True, + }, + "modify_space_details": {}, + "toggle_history": {}, + "use_at_mention_all": {}, + "manage_apps": {}, + "manage_webhooks": {}, + "post_messages": {}, + "reply_messages": {}, + }, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -17527,6 +17575,7 @@ def get_message_fields(field): import_mode=True, admin_installed=True, space_uri="space_uri_value", + predefined_permission_settings=gc_space.Space.PredefinedPermissionSettings.COLLABORATION_SPACE, ) # Wrap the value into a proper Response obj @@ -17843,6 +17892,7 @@ def test_set_up_space_rest(request_type): import_mode=True, admin_installed=True, space_uri="space_uri_value", + predefined_permission_settings=space.Space.PredefinedPermissionSettings.COLLABORATION_SPACE, ) # Wrap the value into a proper Response obj @@ -18110,6 +18160,20 @@ def test_update_space_rest(request_type): }, "access_settings": {"access_state": 1, "audience": "audience_value"}, "space_uri": "space_uri_value", + "predefined_permission_settings": 1, + "permission_settings": { + "manage_members_and_groups": { + "managers_allowed": True, + "members_allowed": True, + }, + "modify_space_details": {}, + "toggle_history": {}, + "use_at_mention_all": {}, + "manage_apps": {}, + "manage_webhooks": {}, + "post_messages": {}, + "reply_messages": {}, + }, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -18196,6 +18260,7 @@ def get_message_fields(field): import_mode=True, admin_installed=True, space_uri="space_uri_value", + predefined_permission_settings=gc_space.Space.PredefinedPermissionSettings.COLLABORATION_SPACE, ) # Wrap the value into a proper Response obj @@ -19059,6 +19124,7 @@ def test_find_direct_message_rest(request_type): import_mode=True, admin_installed=True, space_uri="space_uri_value", + predefined_permission_settings=space.Space.PredefinedPermissionSettings.COLLABORATION_SPACE, ) # Wrap the value into a proper Response obj From bbe5daf0c71a02ae780c7609d433787dec1bc168 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 4 Oct 2024 13:46:55 -0400 Subject: [PATCH 098/108] feat: [google-cloud-deploy] added support for deploy policies (#13114) BEGIN_COMMIT_OVERRIDE feat: added support for deploy policies docs: Minor documentation updates END_COMMIT_OVERRIDE - [ ] Regenerate this pull request now. docs: Minor documentation updates PiperOrigin-RevId: 679600689 Source-Link: https://github.com/googleapis/googleapis/commit/bd4f3686c1831554b71129898e20f2cdb7aeac49 Source-Link: https://github.com/googleapis/googleapis-gen/commit/12a2d22bf1bda85ba9bed1fa60484306a2e55781 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWRlcGxveS8uT3dsQm90LnlhbWwiLCJoIjoiMTJhMmQyMmJmMWJkYTg1YmE5YmVkMWZhNjA0ODQzMDZhMmU1NTc4MSJ9 --------- Co-authored-by: Owl Bot Co-authored-by: ohmayr Co-authored-by: ohmayr --- .../google/cloud/deploy/__init__.py | 36 + .../google/cloud/deploy_v1/__init__.py | 34 + .../cloud/deploy_v1/gapic_metadata.json | 75 + .../services/cloud_deploy/async_client.py | 645 + .../deploy_v1/services/cloud_deploy/client.py | 650 + .../deploy_v1/services/cloud_deploy/pagers.py | 152 + .../services/cloud_deploy/transports/base.py | 91 + .../services/cloud_deploy/transports/grpc.py | 134 + .../cloud_deploy/transports/grpc_asyncio.py | 185 + .../services/cloud_deploy/transports/rest.py | 734 +- .../google/cloud/deploy_v1/types/__init__.py | 34 + .../cloud/deploy_v1/types/cloud_deploy.py | 1018 +- .../google/cloud/deploy_v1/types/log_enums.py | 3 + ...cloud_deploy_create_deploy_policy_async.py | 62 + ..._cloud_deploy_create_deploy_policy_sync.py | 62 + ...cloud_deploy_delete_deploy_policy_async.py | 56 + ..._cloud_deploy_delete_deploy_policy_sync.py | 56 + ...ed_cloud_deploy_get_deploy_policy_async.py | 52 + ...ted_cloud_deploy_get_deploy_policy_sync.py | 52 + ...cloud_deploy_list_deploy_policies_async.py | 53 + ..._cloud_deploy_list_deploy_policies_sync.py | 53 + ...cloud_deploy_update_deploy_policy_async.py | 60 + ..._cloud_deploy_update_deploy_policy_sync.py | 60 + ...ippet_metadata_google.cloud.deploy.v1.json | 1079 +- .../scripts/fixup_deploy_v1_keywords.py | 23 +- .../unit/gapic/deploy_v1/test_cloud_deploy.py | 14708 ++++++++++------ 26 files changed, 14650 insertions(+), 5517 deletions(-) create mode 100644 packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_create_deploy_policy_async.py create mode 100644 packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_create_deploy_policy_sync.py create mode 100644 packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_delete_deploy_policy_async.py create mode 100644 packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_delete_deploy_policy_sync.py create mode 100644 packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_get_deploy_policy_async.py create mode 100644 packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_get_deploy_policy_sync.py create mode 100644 packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_list_deploy_policies_async.py create mode 100644 packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_list_deploy_policies_sync.py create mode 100644 packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_update_deploy_policy_async.py create mode 100644 packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_update_deploy_policy_sync.py diff --git a/packages/google-cloud-deploy/google/cloud/deploy/__init__.py b/packages/google-cloud-deploy/google/cloud/deploy/__init__.py index 6a2f11ab9889..4b051017ed31 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy/__init__.py +++ b/packages/google-cloud-deploy/google/cloud/deploy/__init__.py @@ -61,6 +61,7 @@ CreateChildRolloutJobRun, CreateCustomTargetTypeRequest, CreateDeliveryPipelineRequest, + CreateDeployPolicyRequest, CreateReleaseRequest, CreateRolloutRequest, CreateTargetRequest, @@ -74,20 +75,25 @@ DeleteAutomationRequest, DeleteCustomTargetTypeRequest, DeleteDeliveryPipelineRequest, + DeleteDeployPolicyRequest, DeleteTargetRequest, DeliveryPipeline, + DeliveryPipelineAttribute, DeployArtifact, DeployJob, DeployJobRun, DeployJobRunMetadata, DeploymentJobs, DeployParameters, + DeployPolicy, + DeployPolicyResourceSelector, ExecutionConfig, GetAutomationRequest, GetAutomationRunRequest, GetConfigRequest, GetCustomTargetTypeRequest, GetDeliveryPipelineRequest, + GetDeployPolicyRequest, GetJobRunRequest, GetReleaseRequest, GetRolloutRequest, @@ -106,6 +112,8 @@ ListCustomTargetTypesResponse, ListDeliveryPipelinesRequest, ListDeliveryPipelinesResponse, + ListDeployPoliciesRequest, + ListDeployPoliciesResponse, ListJobRunsRequest, ListJobRunsResponse, ListReleasesRequest, @@ -116,10 +124,14 @@ ListTargetsResponse, Metadata, MultiTarget, + OneTimeWindow, OperationMetadata, Phase, PipelineCondition, PipelineReadyCondition, + PolicyRule, + PolicyViolation, + PolicyViolationDetails, Postdeploy, PostdeployJob, PostdeployJobRun, @@ -144,6 +156,7 @@ RollbackTargetRequest, RollbackTargetResponse, Rollout, + RolloutRestriction, RuntimeConfig, SerialPipeline, SkaffoldModules, @@ -159,12 +172,15 @@ TargetsTypeCondition, TerminateJobRunRequest, TerminateJobRunResponse, + TimeWindows, UpdateAutomationRequest, UpdateCustomTargetTypeRequest, UpdateDeliveryPipelineRequest, + UpdateDeployPolicyRequest, UpdateTargetRequest, VerifyJob, VerifyJobRun, + WeeklyWindow, ) from google.cloud.deploy_v1.types.customtargettype_notification_payload import ( CustomTargetTypeNotificationEvent, @@ -172,6 +188,9 @@ from google.cloud.deploy_v1.types.deliverypipeline_notification_payload import ( DeliveryPipelineNotificationEvent, ) +from google.cloud.deploy_v1.types.deploypolicy_evaluation_payload import ( + DeployPolicyEvaluationEvent, +) from google.cloud.deploy_v1.types.deploypolicy_notification_payload import ( DeployPolicyNotificationEvent, ) @@ -231,6 +250,7 @@ "CreateChildRolloutJobRun", "CreateCustomTargetTypeRequest", "CreateDeliveryPipelineRequest", + "CreateDeployPolicyRequest", "CreateReleaseRequest", "CreateRolloutRequest", "CreateTargetRequest", @@ -244,20 +264,25 @@ "DeleteAutomationRequest", "DeleteCustomTargetTypeRequest", "DeleteDeliveryPipelineRequest", + "DeleteDeployPolicyRequest", "DeleteTargetRequest", "DeliveryPipeline", + "DeliveryPipelineAttribute", "DeployArtifact", "DeployJob", "DeployJobRun", "DeployJobRunMetadata", "DeploymentJobs", "DeployParameters", + "DeployPolicy", + "DeployPolicyResourceSelector", "ExecutionConfig", "GetAutomationRequest", "GetAutomationRunRequest", "GetConfigRequest", "GetCustomTargetTypeRequest", "GetDeliveryPipelineRequest", + "GetDeployPolicyRequest", "GetJobRunRequest", "GetReleaseRequest", "GetRolloutRequest", @@ -276,6 +301,8 @@ "ListCustomTargetTypesResponse", "ListDeliveryPipelinesRequest", "ListDeliveryPipelinesResponse", + "ListDeployPoliciesRequest", + "ListDeployPoliciesResponse", "ListJobRunsRequest", "ListJobRunsResponse", "ListReleasesRequest", @@ -286,10 +313,14 @@ "ListTargetsResponse", "Metadata", "MultiTarget", + "OneTimeWindow", "OperationMetadata", "Phase", "PipelineCondition", "PipelineReadyCondition", + "PolicyRule", + "PolicyViolation", + "PolicyViolationDetails", "Postdeploy", "PostdeployJob", "PostdeployJobRun", @@ -313,6 +344,7 @@ "RollbackTargetRequest", "RollbackTargetResponse", "Rollout", + "RolloutRestriction", "RuntimeConfig", "SerialPipeline", "SkaffoldModules", @@ -327,17 +359,21 @@ "TargetsTypeCondition", "TerminateJobRunRequest", "TerminateJobRunResponse", + "TimeWindows", "UpdateAutomationRequest", "UpdateCustomTargetTypeRequest", "UpdateDeliveryPipelineRequest", + "UpdateDeployPolicyRequest", "UpdateTargetRequest", "VerifyJob", "VerifyJobRun", + "WeeklyWindow", "BackoffMode", "RepairState", "SkaffoldSupportState", "CustomTargetTypeNotificationEvent", "DeliveryPipelineNotificationEvent", + "DeployPolicyEvaluationEvent", "DeployPolicyNotificationEvent", "JobRunNotificationEvent", "Type", diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/__init__.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/__init__.py index 80367e9c4284..c9c4ecd71f25 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/__init__.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/__init__.py @@ -58,6 +58,7 @@ CreateChildRolloutJobRun, CreateCustomTargetTypeRequest, CreateDeliveryPipelineRequest, + CreateDeployPolicyRequest, CreateReleaseRequest, CreateRolloutRequest, CreateTargetRequest, @@ -71,20 +72,25 @@ DeleteAutomationRequest, DeleteCustomTargetTypeRequest, DeleteDeliveryPipelineRequest, + DeleteDeployPolicyRequest, DeleteTargetRequest, DeliveryPipeline, + DeliveryPipelineAttribute, DeployArtifact, DeployJob, DeployJobRun, DeployJobRunMetadata, DeploymentJobs, DeployParameters, + DeployPolicy, + DeployPolicyResourceSelector, ExecutionConfig, GetAutomationRequest, GetAutomationRunRequest, GetConfigRequest, GetCustomTargetTypeRequest, GetDeliveryPipelineRequest, + GetDeployPolicyRequest, GetJobRunRequest, GetReleaseRequest, GetRolloutRequest, @@ -103,6 +109,8 @@ ListCustomTargetTypesResponse, ListDeliveryPipelinesRequest, ListDeliveryPipelinesResponse, + ListDeployPoliciesRequest, + ListDeployPoliciesResponse, ListJobRunsRequest, ListJobRunsResponse, ListReleasesRequest, @@ -113,10 +121,14 @@ ListTargetsResponse, Metadata, MultiTarget, + OneTimeWindow, OperationMetadata, Phase, PipelineCondition, PipelineReadyCondition, + PolicyRule, + PolicyViolation, + PolicyViolationDetails, Postdeploy, PostdeployJob, PostdeployJobRun, @@ -141,6 +153,7 @@ RollbackTargetRequest, RollbackTargetResponse, Rollout, + RolloutRestriction, RuntimeConfig, SerialPipeline, SkaffoldModules, @@ -156,12 +169,15 @@ TargetsTypeCondition, TerminateJobRunRequest, TerminateJobRunResponse, + TimeWindows, UpdateAutomationRequest, UpdateCustomTargetTypeRequest, UpdateDeliveryPipelineRequest, + UpdateDeployPolicyRequest, UpdateTargetRequest, VerifyJob, VerifyJobRun, + WeeklyWindow, ) from .types.customtargettype_notification_payload import ( CustomTargetTypeNotificationEvent, @@ -169,6 +185,7 @@ from .types.deliverypipeline_notification_payload import ( DeliveryPipelineNotificationEvent, ) +from .types.deploypolicy_evaluation_payload import DeployPolicyEvaluationEvent from .types.deploypolicy_notification_payload import DeployPolicyNotificationEvent from .types.jobrun_notification_payload import JobRunNotificationEvent from .types.log_enums import Type @@ -219,6 +236,7 @@ "CreateChildRolloutJobRun", "CreateCustomTargetTypeRequest", "CreateDeliveryPipelineRequest", + "CreateDeployPolicyRequest", "CreateReleaseRequest", "CreateRolloutRequest", "CreateTargetRequest", @@ -233,15 +251,20 @@ "DeleteAutomationRequest", "DeleteCustomTargetTypeRequest", "DeleteDeliveryPipelineRequest", + "DeleteDeployPolicyRequest", "DeleteTargetRequest", "DeliveryPipeline", + "DeliveryPipelineAttribute", "DeliveryPipelineNotificationEvent", "DeployArtifact", "DeployJob", "DeployJobRun", "DeployJobRunMetadata", "DeployParameters", + "DeployPolicy", + "DeployPolicyEvaluationEvent", "DeployPolicyNotificationEvent", + "DeployPolicyResourceSelector", "DeploymentJobs", "ExecutionConfig", "GetAutomationRequest", @@ -249,6 +272,7 @@ "GetConfigRequest", "GetCustomTargetTypeRequest", "GetDeliveryPipelineRequest", + "GetDeployPolicyRequest", "GetJobRunRequest", "GetReleaseRequest", "GetRolloutRequest", @@ -268,6 +292,8 @@ "ListCustomTargetTypesResponse", "ListDeliveryPipelinesRequest", "ListDeliveryPipelinesResponse", + "ListDeployPoliciesRequest", + "ListDeployPoliciesResponse", "ListJobRunsRequest", "ListJobRunsResponse", "ListReleasesRequest", @@ -278,10 +304,14 @@ "ListTargetsResponse", "Metadata", "MultiTarget", + "OneTimeWindow", "OperationMetadata", "Phase", "PipelineCondition", "PipelineReadyCondition", + "PolicyRule", + "PolicyViolation", + "PolicyViolationDetails", "Postdeploy", "PostdeployJob", "PostdeployJobRun", @@ -309,6 +339,7 @@ "RollbackTargetResponse", "Rollout", "RolloutNotificationEvent", + "RolloutRestriction", "RolloutUpdateEvent", "RuntimeConfig", "SerialPipeline", @@ -326,11 +357,14 @@ "TargetsTypeCondition", "TerminateJobRunRequest", "TerminateJobRunResponse", + "TimeWindows", "Type", "UpdateAutomationRequest", "UpdateCustomTargetTypeRequest", "UpdateDeliveryPipelineRequest", + "UpdateDeployPolicyRequest", "UpdateTargetRequest", "VerifyJob", "VerifyJobRun", + "WeeklyWindow", ) diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/gapic_metadata.json b/packages/google-cloud-deploy/google/cloud/deploy_v1/gapic_metadata.json index a8eee6244a5f..32200eac6f82 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/gapic_metadata.json +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/gapic_metadata.json @@ -50,6 +50,11 @@ "create_delivery_pipeline" ] }, + "CreateDeployPolicy": { + "methods": [ + "create_deploy_policy" + ] + }, "CreateRelease": { "methods": [ "create_release" @@ -80,6 +85,11 @@ "delete_delivery_pipeline" ] }, + "DeleteDeployPolicy": { + "methods": [ + "delete_deploy_policy" + ] + }, "DeleteTarget": { "methods": [ "delete_target" @@ -110,6 +120,11 @@ "get_delivery_pipeline" ] }, + "GetDeployPolicy": { + "methods": [ + "get_deploy_policy" + ] + }, "GetJobRun": { "methods": [ "get_job_run" @@ -155,6 +170,11 @@ "list_delivery_pipelines" ] }, + "ListDeployPolicies": { + "methods": [ + "list_deploy_policies" + ] + }, "ListJobRuns": { "methods": [ "list_job_runs" @@ -205,6 +225,11 @@ "update_delivery_pipeline" ] }, + "UpdateDeployPolicy": { + "methods": [ + "update_deploy_policy" + ] + }, "UpdateTarget": { "methods": [ "update_target" @@ -255,6 +280,11 @@ "create_delivery_pipeline" ] }, + "CreateDeployPolicy": { + "methods": [ + "create_deploy_policy" + ] + }, "CreateRelease": { "methods": [ "create_release" @@ -285,6 +315,11 @@ "delete_delivery_pipeline" ] }, + "DeleteDeployPolicy": { + "methods": [ + "delete_deploy_policy" + ] + }, "DeleteTarget": { "methods": [ "delete_target" @@ -315,6 +350,11 @@ "get_delivery_pipeline" ] }, + "GetDeployPolicy": { + "methods": [ + "get_deploy_policy" + ] + }, "GetJobRun": { "methods": [ "get_job_run" @@ -360,6 +400,11 @@ "list_delivery_pipelines" ] }, + "ListDeployPolicies": { + "methods": [ + "list_deploy_policies" + ] + }, "ListJobRuns": { "methods": [ "list_job_runs" @@ -410,6 +455,11 @@ "update_delivery_pipeline" ] }, + "UpdateDeployPolicy": { + "methods": [ + "update_deploy_policy" + ] + }, "UpdateTarget": { "methods": [ "update_target" @@ -460,6 +510,11 @@ "create_delivery_pipeline" ] }, + "CreateDeployPolicy": { + "methods": [ + "create_deploy_policy" + ] + }, "CreateRelease": { "methods": [ "create_release" @@ -490,6 +545,11 @@ "delete_delivery_pipeline" ] }, + "DeleteDeployPolicy": { + "methods": [ + "delete_deploy_policy" + ] + }, "DeleteTarget": { "methods": [ "delete_target" @@ -520,6 +580,11 @@ "get_delivery_pipeline" ] }, + "GetDeployPolicy": { + "methods": [ + "get_deploy_policy" + ] + }, "GetJobRun": { "methods": [ "get_job_run" @@ -565,6 +630,11 @@ "list_delivery_pipelines" ] }, + "ListDeployPolicies": { + "methods": [ + "list_deploy_policies" + ] + }, "ListJobRuns": { "methods": [ "list_job_runs" @@ -615,6 +685,11 @@ "update_delivery_pipeline" ] }, + "UpdateDeployPolicy": { + "methods": [ + "update_deploy_policy" + ] + }, "UpdateTarget": { "methods": [ "update_target" diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/async_client.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/async_client.py index 72c3f7a4098b..d4fb28cb770e 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/async_client.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/async_client.py @@ -95,6 +95,8 @@ class CloudDeployAsyncClient: parse_delivery_pipeline_path = staticmethod( CloudDeployClient.parse_delivery_pipeline_path ) + deploy_policy_path = staticmethod(CloudDeployClient.deploy_policy_path) + parse_deploy_policy_path = staticmethod(CloudDeployClient.parse_deploy_policy_path) job_path = staticmethod(CloudDeployClient.job_path) parse_job_path = staticmethod(CloudDeployClient.parse_job_path) job_run_path = staticmethod(CloudDeployClient.job_run_path) @@ -2826,6 +2828,649 @@ async def sample_abandon_release(): # Done; return the response. return response + async def create_deploy_policy( + self, + request: Optional[Union[cloud_deploy.CreateDeployPolicyRequest, dict]] = None, + *, + parent: Optional[str] = None, + deploy_policy: Optional[cloud_deploy.DeployPolicy] = None, + deploy_policy_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new DeployPolicy in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import deploy_v1 + + async def sample_create_deploy_policy(): + # Create a client + client = deploy_v1.CloudDeployAsyncClient() + + # Initialize request argument(s) + deploy_policy = deploy_v1.DeployPolicy() + deploy_policy.rules.rollout_restriction.id = "id_value" + deploy_policy.rules.rollout_restriction.time_windows.time_zone = "time_zone_value" + + request = deploy_v1.CreateDeployPolicyRequest( + parent="parent_value", + deploy_policy_id="deploy_policy_id_value", + deploy_policy=deploy_policy, + ) + + # Make the request + operation = client.create_deploy_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.deploy_v1.types.CreateDeployPolicyRequest, dict]]): + The request object. The request object for ``CreateDeployPolicy``. + parent (:class:`str`): + Required. The parent collection in which the + ``DeployPolicy`` must be created. The format is + ``projects/{project_id}/locations/{location_name}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + deploy_policy (:class:`google.cloud.deploy_v1.types.DeployPolicy`): + Required. The ``DeployPolicy`` to create. + This corresponds to the ``deploy_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + deploy_policy_id (:class:`str`): + Required. ID of the ``DeployPolicy``. + This corresponds to the ``deploy_policy_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.deploy_v1.types.DeployPolicy` A + DeployPolicy resource in the Cloud Deploy API. + + A DeployPolicy inhibits manual or automation-driven + actions within a Delivery Pipeline or Target. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, deploy_policy, deploy_policy_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloud_deploy.CreateDeployPolicyRequest): + request = cloud_deploy.CreateDeployPolicyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if deploy_policy is not None: + request.deploy_policy = deploy_policy + if deploy_policy_id is not None: + request.deploy_policy_id = deploy_policy_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_deploy_policy + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + cloud_deploy.DeployPolicy, + metadata_type=cloud_deploy.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_deploy_policy( + self, + request: Optional[Union[cloud_deploy.UpdateDeployPolicyRequest, dict]] = None, + *, + deploy_policy: Optional[cloud_deploy.DeployPolicy] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates the parameters of a single DeployPolicy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import deploy_v1 + + async def sample_update_deploy_policy(): + # Create a client + client = deploy_v1.CloudDeployAsyncClient() + + # Initialize request argument(s) + deploy_policy = deploy_v1.DeployPolicy() + deploy_policy.rules.rollout_restriction.id = "id_value" + deploy_policy.rules.rollout_restriction.time_windows.time_zone = "time_zone_value" + + request = deploy_v1.UpdateDeployPolicyRequest( + deploy_policy=deploy_policy, + ) + + # Make the request + operation = client.update_deploy_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.deploy_v1.types.UpdateDeployPolicyRequest, dict]]): + The request object. The request object for ``UpdateDeployPolicy``. + deploy_policy (:class:`google.cloud.deploy_v1.types.DeployPolicy`): + Required. The ``DeployPolicy`` to update. + This corresponds to the ``deploy_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Field mask is used to specify the fields to be + overwritten by the update in the ``DeployPolicy`` + resource. The fields specified in the update_mask are + relative to the resource, not the full request. A field + will be overwritten if it's in the mask. If the user + doesn't provide a mask then all fields are overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.deploy_v1.types.DeployPolicy` A + DeployPolicy resource in the Cloud Deploy API. + + A DeployPolicy inhibits manual or automation-driven + actions within a Delivery Pipeline or Target. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([deploy_policy, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloud_deploy.UpdateDeployPolicyRequest): + request = cloud_deploy.UpdateDeployPolicyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if deploy_policy is not None: + request.deploy_policy = deploy_policy + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_deploy_policy + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("deploy_policy.name", request.deploy_policy.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + cloud_deploy.DeployPolicy, + metadata_type=cloud_deploy.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_deploy_policy( + self, + request: Optional[Union[cloud_deploy.DeleteDeployPolicyRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a single DeployPolicy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import deploy_v1 + + async def sample_delete_deploy_policy(): + # Create a client + client = deploy_v1.CloudDeployAsyncClient() + + # Initialize request argument(s) + request = deploy_v1.DeleteDeployPolicyRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_deploy_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.deploy_v1.types.DeleteDeployPolicyRequest, dict]]): + The request object. The request object for ``DeleteDeployPolicy``. + name (:class:`str`): + Required. The name of the ``DeployPolicy`` to delete. + The format is + ``projects/{project_id}/locations/{location_name}/deployPolicies/{deploy_policy_name}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloud_deploy.DeleteDeployPolicyRequest): + request = cloud_deploy.DeleteDeployPolicyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_deploy_policy + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=cloud_deploy.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_deploy_policies( + self, + request: Optional[Union[cloud_deploy.ListDeployPoliciesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDeployPoliciesAsyncPager: + r"""Lists DeployPolicies in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import deploy_v1 + + async def sample_list_deploy_policies(): + # Create a client + client = deploy_v1.CloudDeployAsyncClient() + + # Initialize request argument(s) + request = deploy_v1.ListDeployPoliciesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_deploy_policies(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.deploy_v1.types.ListDeployPoliciesRequest, dict]]): + The request object. The request object for ``ListDeployPolicies``. + parent (:class:`str`): + Required. The parent, which owns this collection of + deploy policies. Format must be + ``projects/{project_id}/locations/{location_name}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.deploy_v1.services.cloud_deploy.pagers.ListDeployPoliciesAsyncPager: + The response object from ListDeployPolicies. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloud_deploy.ListDeployPoliciesRequest): + request = cloud_deploy.ListDeployPoliciesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_deploy_policies + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDeployPoliciesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_deploy_policy( + self, + request: Optional[Union[cloud_deploy.GetDeployPolicyRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloud_deploy.DeployPolicy: + r"""Gets details of a single DeployPolicy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import deploy_v1 + + async def sample_get_deploy_policy(): + # Create a client + client = deploy_v1.CloudDeployAsyncClient() + + # Initialize request argument(s) + request = deploy_v1.GetDeployPolicyRequest( + name="name_value", + ) + + # Make the request + response = await client.get_deploy_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.deploy_v1.types.GetDeployPolicyRequest, dict]]): + The request object. The request object for ``GetDeployPolicy`` + name (:class:`str`): + Required. Name of the ``DeployPolicy``. Format must be + ``projects/{project_id}/locations/{location_name}/deployPolicies/{deploy_policy_name}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.deploy_v1.types.DeployPolicy: + A DeployPolicy resource in the Cloud Deploy API. + + A DeployPolicy inhibits manual or automation-driven + actions within a Delivery Pipeline or Target. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloud_deploy.GetDeployPolicyRequest): + request = cloud_deploy.GetDeployPolicyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_deploy_policy + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def approve_rollout( self, request: Optional[Union[cloud_deploy.ApproveRolloutRequest, dict]] = None, diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/client.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/client.py index af6eb35e86ed..8bb6a9e6f9b9 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/client.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/client.py @@ -349,6 +349,28 @@ def parse_delivery_pipeline_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def deploy_policy_path( + project: str, + location: str, + deploy_policy: str, + ) -> str: + """Returns a fully-qualified deploy_policy string.""" + return "projects/{project}/locations/{location}/deployPolicies/{deploy_policy}".format( + project=project, + location=location, + deploy_policy=deploy_policy, + ) + + @staticmethod + def parse_deploy_policy_path(path: str) -> Dict[str, str]: + """Parses a deploy_policy path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/deployPolicies/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def job_path( project: str, @@ -3507,6 +3529,634 @@ def sample_abandon_release(): # Done; return the response. return response + def create_deploy_policy( + self, + request: Optional[Union[cloud_deploy.CreateDeployPolicyRequest, dict]] = None, + *, + parent: Optional[str] = None, + deploy_policy: Optional[cloud_deploy.DeployPolicy] = None, + deploy_policy_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new DeployPolicy in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import deploy_v1 + + def sample_create_deploy_policy(): + # Create a client + client = deploy_v1.CloudDeployClient() + + # Initialize request argument(s) + deploy_policy = deploy_v1.DeployPolicy() + deploy_policy.rules.rollout_restriction.id = "id_value" + deploy_policy.rules.rollout_restriction.time_windows.time_zone = "time_zone_value" + + request = deploy_v1.CreateDeployPolicyRequest( + parent="parent_value", + deploy_policy_id="deploy_policy_id_value", + deploy_policy=deploy_policy, + ) + + # Make the request + operation = client.create_deploy_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.deploy_v1.types.CreateDeployPolicyRequest, dict]): + The request object. The request object for ``CreateDeployPolicy``. + parent (str): + Required. The parent collection in which the + ``DeployPolicy`` must be created. The format is + ``projects/{project_id}/locations/{location_name}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + deploy_policy (google.cloud.deploy_v1.types.DeployPolicy): + Required. The ``DeployPolicy`` to create. + This corresponds to the ``deploy_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + deploy_policy_id (str): + Required. ID of the ``DeployPolicy``. + This corresponds to the ``deploy_policy_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.deploy_v1.types.DeployPolicy` A + DeployPolicy resource in the Cloud Deploy API. + + A DeployPolicy inhibits manual or automation-driven + actions within a Delivery Pipeline or Target. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, deploy_policy, deploy_policy_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloud_deploy.CreateDeployPolicyRequest): + request = cloud_deploy.CreateDeployPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if deploy_policy is not None: + request.deploy_policy = deploy_policy + if deploy_policy_id is not None: + request.deploy_policy_id = deploy_policy_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_deploy_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + cloud_deploy.DeployPolicy, + metadata_type=cloud_deploy.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_deploy_policy( + self, + request: Optional[Union[cloud_deploy.UpdateDeployPolicyRequest, dict]] = None, + *, + deploy_policy: Optional[cloud_deploy.DeployPolicy] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates the parameters of a single DeployPolicy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import deploy_v1 + + def sample_update_deploy_policy(): + # Create a client + client = deploy_v1.CloudDeployClient() + + # Initialize request argument(s) + deploy_policy = deploy_v1.DeployPolicy() + deploy_policy.rules.rollout_restriction.id = "id_value" + deploy_policy.rules.rollout_restriction.time_windows.time_zone = "time_zone_value" + + request = deploy_v1.UpdateDeployPolicyRequest( + deploy_policy=deploy_policy, + ) + + # Make the request + operation = client.update_deploy_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.deploy_v1.types.UpdateDeployPolicyRequest, dict]): + The request object. The request object for ``UpdateDeployPolicy``. + deploy_policy (google.cloud.deploy_v1.types.DeployPolicy): + Required. The ``DeployPolicy`` to update. + This corresponds to the ``deploy_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten by the update in the ``DeployPolicy`` + resource. The fields specified in the update_mask are + relative to the resource, not the full request. A field + will be overwritten if it's in the mask. If the user + doesn't provide a mask then all fields are overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.deploy_v1.types.DeployPolicy` A + DeployPolicy resource in the Cloud Deploy API. + + A DeployPolicy inhibits manual or automation-driven + actions within a Delivery Pipeline or Target. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([deploy_policy, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloud_deploy.UpdateDeployPolicyRequest): + request = cloud_deploy.UpdateDeployPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if deploy_policy is not None: + request.deploy_policy = deploy_policy + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_deploy_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("deploy_policy.name", request.deploy_policy.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + cloud_deploy.DeployPolicy, + metadata_type=cloud_deploy.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_deploy_policy( + self, + request: Optional[Union[cloud_deploy.DeleteDeployPolicyRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a single DeployPolicy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import deploy_v1 + + def sample_delete_deploy_policy(): + # Create a client + client = deploy_v1.CloudDeployClient() + + # Initialize request argument(s) + request = deploy_v1.DeleteDeployPolicyRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_deploy_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.deploy_v1.types.DeleteDeployPolicyRequest, dict]): + The request object. The request object for ``DeleteDeployPolicy``. + name (str): + Required. The name of the ``DeployPolicy`` to delete. + The format is + ``projects/{project_id}/locations/{location_name}/deployPolicies/{deploy_policy_name}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloud_deploy.DeleteDeployPolicyRequest): + request = cloud_deploy.DeleteDeployPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_deploy_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=cloud_deploy.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_deploy_policies( + self, + request: Optional[Union[cloud_deploy.ListDeployPoliciesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDeployPoliciesPager: + r"""Lists DeployPolicies in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import deploy_v1 + + def sample_list_deploy_policies(): + # Create a client + client = deploy_v1.CloudDeployClient() + + # Initialize request argument(s) + request = deploy_v1.ListDeployPoliciesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_deploy_policies(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.deploy_v1.types.ListDeployPoliciesRequest, dict]): + The request object. The request object for ``ListDeployPolicies``. + parent (str): + Required. The parent, which owns this collection of + deploy policies. Format must be + ``projects/{project_id}/locations/{location_name}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.deploy_v1.services.cloud_deploy.pagers.ListDeployPoliciesPager: + The response object from ListDeployPolicies. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloud_deploy.ListDeployPoliciesRequest): + request = cloud_deploy.ListDeployPoliciesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_deploy_policies] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDeployPoliciesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_deploy_policy( + self, + request: Optional[Union[cloud_deploy.GetDeployPolicyRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloud_deploy.DeployPolicy: + r"""Gets details of a single DeployPolicy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import deploy_v1 + + def sample_get_deploy_policy(): + # Create a client + client = deploy_v1.CloudDeployClient() + + # Initialize request argument(s) + request = deploy_v1.GetDeployPolicyRequest( + name="name_value", + ) + + # Make the request + response = client.get_deploy_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.deploy_v1.types.GetDeployPolicyRequest, dict]): + The request object. The request object for ``GetDeployPolicy`` + name (str): + Required. Name of the ``DeployPolicy``. Format must be + ``projects/{project_id}/locations/{location_name}/deployPolicies/{deploy_policy_name}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.deploy_v1.types.DeployPolicy: + A DeployPolicy resource in the Cloud Deploy API. + + A DeployPolicy inhibits manual or automation-driven + actions within a Delivery Pipeline or Target. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloud_deploy.GetDeployPolicyRequest): + request = cloud_deploy.GetDeployPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_deploy_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def approve_rollout( self, request: Optional[Union[cloud_deploy.ApproveRolloutRequest, dict]] = None, diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/pagers.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/pagers.py index 508ba8ca2d07..29ee6b427605 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/pagers.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/pagers.py @@ -649,6 +649,158 @@ def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) +class ListDeployPoliciesPager: + """A pager for iterating through ``list_deploy_policies`` requests. + + This class thinly wraps an initial + :class:`google.cloud.deploy_v1.types.ListDeployPoliciesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``deploy_policies`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDeployPolicies`` requests and continue to iterate + through the ``deploy_policies`` field on the + corresponding responses. + + All the usual :class:`google.cloud.deploy_v1.types.ListDeployPoliciesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., cloud_deploy.ListDeployPoliciesResponse], + request: cloud_deploy.ListDeployPoliciesRequest, + response: cloud_deploy.ListDeployPoliciesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.deploy_v1.types.ListDeployPoliciesRequest): + The initial request object. + response (google.cloud.deploy_v1.types.ListDeployPoliciesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloud_deploy.ListDeployPoliciesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[cloud_deploy.ListDeployPoliciesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[cloud_deploy.DeployPolicy]: + for page in self.pages: + yield from page.deploy_policies + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListDeployPoliciesAsyncPager: + """A pager for iterating through ``list_deploy_policies`` requests. + + This class thinly wraps an initial + :class:`google.cloud.deploy_v1.types.ListDeployPoliciesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``deploy_policies`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDeployPolicies`` requests and continue to iterate + through the ``deploy_policies`` field on the + corresponding responses. + + All the usual :class:`google.cloud.deploy_v1.types.ListDeployPoliciesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[cloud_deploy.ListDeployPoliciesResponse]], + request: cloud_deploy.ListDeployPoliciesRequest, + response: cloud_deploy.ListDeployPoliciesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.deploy_v1.types.ListDeployPoliciesRequest): + The initial request object. + response (google.cloud.deploy_v1.types.ListDeployPoliciesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloud_deploy.ListDeployPoliciesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[cloud_deploy.ListDeployPoliciesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[cloud_deploy.DeployPolicy]: + async def async_generator(): + async for page in self.pages: + for response in page.deploy_policies: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + class ListRolloutsPager: """A pager for iterating through ``list_rollouts`` requests. diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/base.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/base.py index d3042176dd81..d52eff30b350 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/base.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/base.py @@ -304,6 +304,49 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.create_deploy_policy: gapic_v1.method.wrap_method( + self.create_deploy_policy, + default_timeout=60.0, + client_info=client_info, + ), + self.update_deploy_policy: gapic_v1.method.wrap_method( + self.update_deploy_policy, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_deploy_policy: gapic_v1.method.wrap_method( + self.delete_deploy_policy, + default_timeout=60.0, + client_info=client_info, + ), + self.list_deploy_policies: gapic_v1.method.wrap_method( + self.list_deploy_policies, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_deploy_policy: gapic_v1.method.wrap_method( + self.get_deploy_policy, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), self.approve_rollout: gapic_v1.method.wrap_method( self.approve_rollout, default_timeout=60.0, @@ -699,6 +742,54 @@ def abandon_release( ]: raise NotImplementedError() + @property + def create_deploy_policy( + self, + ) -> Callable[ + [cloud_deploy.CreateDeployPolicyRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_deploy_policy( + self, + ) -> Callable[ + [cloud_deploy.UpdateDeployPolicyRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_deploy_policy( + self, + ) -> Callable[ + [cloud_deploy.DeleteDeployPolicyRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_deploy_policies( + self, + ) -> Callable[ + [cloud_deploy.ListDeployPoliciesRequest], + Union[ + cloud_deploy.ListDeployPoliciesResponse, + Awaitable[cloud_deploy.ListDeployPoliciesResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_deploy_policy( + self, + ) -> Callable[ + [cloud_deploy.GetDeployPolicyRequest], + Union[cloud_deploy.DeployPolicy, Awaitable[cloud_deploy.DeployPolicy]], + ]: + raise NotImplementedError() + @property def approve_rollout( self, diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/grpc.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/grpc.py index 4ce9240634f7..4590ce080f41 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/grpc.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/grpc.py @@ -808,6 +808,140 @@ def abandon_release( ) return self._stubs["abandon_release"] + @property + def create_deploy_policy( + self, + ) -> Callable[[cloud_deploy.CreateDeployPolicyRequest], operations_pb2.Operation]: + r"""Return a callable for the create deploy policy method over gRPC. + + Creates a new DeployPolicy in a given project and + location. + + Returns: + Callable[[~.CreateDeployPolicyRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_deploy_policy" not in self._stubs: + self._stubs["create_deploy_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.deploy.v1.CloudDeploy/CreateDeployPolicy", + request_serializer=cloud_deploy.CreateDeployPolicyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_deploy_policy"] + + @property + def update_deploy_policy( + self, + ) -> Callable[[cloud_deploy.UpdateDeployPolicyRequest], operations_pb2.Operation]: + r"""Return a callable for the update deploy policy method over gRPC. + + Updates the parameters of a single DeployPolicy. + + Returns: + Callable[[~.UpdateDeployPolicyRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_deploy_policy" not in self._stubs: + self._stubs["update_deploy_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.deploy.v1.CloudDeploy/UpdateDeployPolicy", + request_serializer=cloud_deploy.UpdateDeployPolicyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_deploy_policy"] + + @property + def delete_deploy_policy( + self, + ) -> Callable[[cloud_deploy.DeleteDeployPolicyRequest], operations_pb2.Operation]: + r"""Return a callable for the delete deploy policy method over gRPC. + + Deletes a single DeployPolicy. + + Returns: + Callable[[~.DeleteDeployPolicyRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_deploy_policy" not in self._stubs: + self._stubs["delete_deploy_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.deploy.v1.CloudDeploy/DeleteDeployPolicy", + request_serializer=cloud_deploy.DeleteDeployPolicyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_deploy_policy"] + + @property + def list_deploy_policies( + self, + ) -> Callable[ + [cloud_deploy.ListDeployPoliciesRequest], + cloud_deploy.ListDeployPoliciesResponse, + ]: + r"""Return a callable for the list deploy policies method over gRPC. + + Lists DeployPolicies in a given project and location. + + Returns: + Callable[[~.ListDeployPoliciesRequest], + ~.ListDeployPoliciesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_deploy_policies" not in self._stubs: + self._stubs["list_deploy_policies"] = self.grpc_channel.unary_unary( + "/google.cloud.deploy.v1.CloudDeploy/ListDeployPolicies", + request_serializer=cloud_deploy.ListDeployPoliciesRequest.serialize, + response_deserializer=cloud_deploy.ListDeployPoliciesResponse.deserialize, + ) + return self._stubs["list_deploy_policies"] + + @property + def get_deploy_policy( + self, + ) -> Callable[[cloud_deploy.GetDeployPolicyRequest], cloud_deploy.DeployPolicy]: + r"""Return a callable for the get deploy policy method over gRPC. + + Gets details of a single DeployPolicy. + + Returns: + Callable[[~.GetDeployPolicyRequest], + ~.DeployPolicy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_deploy_policy" not in self._stubs: + self._stubs["get_deploy_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.deploy.v1.CloudDeploy/GetDeployPolicy", + request_serializer=cloud_deploy.GetDeployPolicyRequest.serialize, + response_deserializer=cloud_deploy.DeployPolicy.deserialize, + ) + return self._stubs["get_deploy_policy"] + @property def approve_rollout( self, diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/grpc_asyncio.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/grpc_asyncio.py index 62883f74e557..db641300032a 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/grpc_asyncio.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/grpc_asyncio.py @@ -834,6 +834,148 @@ def abandon_release( ) return self._stubs["abandon_release"] + @property + def create_deploy_policy( + self, + ) -> Callable[ + [cloud_deploy.CreateDeployPolicyRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the create deploy policy method over gRPC. + + Creates a new DeployPolicy in a given project and + location. + + Returns: + Callable[[~.CreateDeployPolicyRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_deploy_policy" not in self._stubs: + self._stubs["create_deploy_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.deploy.v1.CloudDeploy/CreateDeployPolicy", + request_serializer=cloud_deploy.CreateDeployPolicyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_deploy_policy"] + + @property + def update_deploy_policy( + self, + ) -> Callable[ + [cloud_deploy.UpdateDeployPolicyRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the update deploy policy method over gRPC. + + Updates the parameters of a single DeployPolicy. + + Returns: + Callable[[~.UpdateDeployPolicyRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_deploy_policy" not in self._stubs: + self._stubs["update_deploy_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.deploy.v1.CloudDeploy/UpdateDeployPolicy", + request_serializer=cloud_deploy.UpdateDeployPolicyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_deploy_policy"] + + @property + def delete_deploy_policy( + self, + ) -> Callable[ + [cloud_deploy.DeleteDeployPolicyRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the delete deploy policy method over gRPC. + + Deletes a single DeployPolicy. + + Returns: + Callable[[~.DeleteDeployPolicyRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_deploy_policy" not in self._stubs: + self._stubs["delete_deploy_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.deploy.v1.CloudDeploy/DeleteDeployPolicy", + request_serializer=cloud_deploy.DeleteDeployPolicyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_deploy_policy"] + + @property + def list_deploy_policies( + self, + ) -> Callable[ + [cloud_deploy.ListDeployPoliciesRequest], + Awaitable[cloud_deploy.ListDeployPoliciesResponse], + ]: + r"""Return a callable for the list deploy policies method over gRPC. + + Lists DeployPolicies in a given project and location. + + Returns: + Callable[[~.ListDeployPoliciesRequest], + Awaitable[~.ListDeployPoliciesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_deploy_policies" not in self._stubs: + self._stubs["list_deploy_policies"] = self.grpc_channel.unary_unary( + "/google.cloud.deploy.v1.CloudDeploy/ListDeployPolicies", + request_serializer=cloud_deploy.ListDeployPoliciesRequest.serialize, + response_deserializer=cloud_deploy.ListDeployPoliciesResponse.deserialize, + ) + return self._stubs["list_deploy_policies"] + + @property + def get_deploy_policy( + self, + ) -> Callable[ + [cloud_deploy.GetDeployPolicyRequest], Awaitable[cloud_deploy.DeployPolicy] + ]: + r"""Return a callable for the get deploy policy method over gRPC. + + Gets details of a single DeployPolicy. + + Returns: + Callable[[~.GetDeployPolicyRequest], + Awaitable[~.DeployPolicy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_deploy_policy" not in self._stubs: + self._stubs["get_deploy_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.deploy.v1.CloudDeploy/GetDeployPolicy", + request_serializer=cloud_deploy.GetDeployPolicyRequest.serialize, + response_deserializer=cloud_deploy.DeployPolicy.deserialize, + ) + return self._stubs["get_deploy_policy"] + @property def approve_rollout( self, @@ -1577,6 +1719,49 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.create_deploy_policy: gapic_v1.method_async.wrap_method( + self.create_deploy_policy, + default_timeout=60.0, + client_info=client_info, + ), + self.update_deploy_policy: gapic_v1.method_async.wrap_method( + self.update_deploy_policy, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_deploy_policy: gapic_v1.method_async.wrap_method( + self.delete_deploy_policy, + default_timeout=60.0, + client_info=client_info, + ), + self.list_deploy_policies: gapic_v1.method_async.wrap_method( + self.list_deploy_policies, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_deploy_policy: gapic_v1.method_async.wrap_method( + self.get_deploy_policy, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), self.approve_rollout: gapic_v1.method_async.wrap_method( self.approve_rollout, default_timeout=60.0, diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/rest.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/rest.py index a96dad3bb982..bd704448565e 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/rest.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/rest.py @@ -138,6 +138,14 @@ def post_create_delivery_pipeline(self, response): logging.log(f"Received response: {response}") return response + def pre_create_deploy_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_deploy_policy(self, response): + logging.log(f"Received response: {response}") + return response + def pre_create_release(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -186,6 +194,14 @@ def post_delete_delivery_pipeline(self, response): logging.log(f"Received response: {response}") return response + def pre_delete_deploy_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_deploy_policy(self, response): + logging.log(f"Received response: {response}") + return response + def pre_delete_target(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -234,6 +250,14 @@ def post_get_delivery_pipeline(self, response): logging.log(f"Received response: {response}") return response + def pre_get_deploy_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_deploy_policy(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_job_run(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -306,6 +330,14 @@ def post_list_delivery_pipelines(self, response): logging.log(f"Received response: {response}") return response + def pre_list_deploy_policies(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_deploy_policies(self, response): + logging.log(f"Received response: {response}") + return response + def pre_list_job_runs(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -386,6 +418,14 @@ def post_update_delivery_pipeline(self, response): logging.log(f"Received response: {response}") return response + def pre_update_deploy_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_deploy_policy(self, response): + logging.log(f"Received response: {response}") + return response + def pre_update_target(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -584,6 +624,29 @@ def post_create_delivery_pipeline( """ return response + def pre_create_deploy_policy( + self, + request: cloud_deploy.CreateDeployPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloud_deploy.CreateDeployPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_deploy_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudDeploy server. + """ + return request, metadata + + def post_create_deploy_policy( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_deploy_policy + + Override in a subclass to manipulate the response + after it is returned by the CloudDeploy server but before + it is returned to user code. + """ + return response + def pre_create_release( self, request: cloud_deploy.CreateReleaseRequest, @@ -722,6 +785,29 @@ def post_delete_delivery_pipeline( """ return response + def pre_delete_deploy_policy( + self, + request: cloud_deploy.DeleteDeployPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloud_deploy.DeleteDeployPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_deploy_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudDeploy server. + """ + return request, metadata + + def post_delete_deploy_policy( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_deploy_policy + + Override in a subclass to manipulate the response + after it is returned by the CloudDeploy server but before + it is returned to user code. + """ + return response + def pre_delete_target( self, request: cloud_deploy.DeleteTargetRequest, @@ -858,6 +944,29 @@ def post_get_delivery_pipeline( """ return response + def pre_get_deploy_policy( + self, + request: cloud_deploy.GetDeployPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloud_deploy.GetDeployPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_deploy_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudDeploy server. + """ + return request, metadata + + def post_get_deploy_policy( + self, response: cloud_deploy.DeployPolicy + ) -> cloud_deploy.DeployPolicy: + """Post-rpc interceptor for get_deploy_policy + + Override in a subclass to manipulate the response + after it is returned by the CloudDeploy server but before + it is returned to user code. + """ + return response + def pre_get_job_run( self, request: cloud_deploy.GetJobRunRequest, @@ -1057,6 +1166,29 @@ def post_list_delivery_pipelines( """ return response + def pre_list_deploy_policies( + self, + request: cloud_deploy.ListDeployPoliciesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloud_deploy.ListDeployPoliciesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_deploy_policies + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudDeploy server. + """ + return request, metadata + + def post_list_deploy_policies( + self, response: cloud_deploy.ListDeployPoliciesResponse + ) -> cloud_deploy.ListDeployPoliciesResponse: + """Post-rpc interceptor for list_deploy_policies + + Override in a subclass to manipulate the response + after it is returned by the CloudDeploy server but before + it is returned to user code. + """ + return response + def pre_list_job_runs( self, request: cloud_deploy.ListJobRunsRequest, @@ -1285,6 +1417,29 @@ def post_update_delivery_pipeline( """ return response + def pre_update_deploy_policy( + self, + request: cloud_deploy.UpdateDeployPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloud_deploy.UpdateDeployPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_deploy_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudDeploy server. + """ + return request, metadata + + def post_update_deploy_policy( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_deploy_policy + + Override in a subclass to manipulate the response + after it is returned by the CloudDeploy server but before + it is returned to user code. + """ + return response + def pre_update_target( self, request: cloud_deploy.UpdateTargetRequest, @@ -2413,6 +2568,103 @@ def __call__( resp = self._interceptor.post_create_delivery_pipeline(resp) return resp + class _CreateDeployPolicy(CloudDeployRestStub): + def __hash__(self): + return hash("CreateDeployPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "deployPolicyId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloud_deploy.CreateDeployPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create deploy policy method over HTTP. + + Args: + request (~.cloud_deploy.CreateDeployPolicyRequest): + The request object. The request object for ``CreateDeployPolicy``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/deployPolicies", + "body": "deploy_policy", + }, + ] + request, metadata = self._interceptor.pre_create_deploy_policy( + request, metadata + ) + pb_request = cloud_deploy.CreateDeployPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_deploy_policy(resp) + return resp + class _CreateRelease(CloudDeployRestStub): def __hash__(self): return hash("CreateRelease") @@ -2963,9 +3215,9 @@ def __call__( resp = self._interceptor.post_delete_delivery_pipeline(resp) return resp - class _DeleteTarget(CloudDeployRestStub): + class _DeleteDeployPolicy(CloudDeployRestStub): def __hash__(self): - return hash("DeleteTarget") + return hash("DeleteDeployPolicy") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -2979,17 +3231,17 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: cloud_deploy.DeleteTargetRequest, + request: cloud_deploy.DeleteDeployPolicyRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: - r"""Call the delete target method over HTTP. + r"""Call the delete deploy policy method over HTTP. Args: - request (~.cloud_deploy.DeleteTargetRequest): - The request object. The request object for ``DeleteTarget``. + request (~.cloud_deploy.DeleteDeployPolicyRequest): + The request object. The request object for ``DeleteDeployPolicy``. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -3007,11 +3259,13 @@ def __call__( http_options: List[Dict[str, str]] = [ { "method": "delete", - "uri": "/v1/{name=projects/*/locations/*/targets/*}", + "uri": "/v1/{name=projects/*/locations/*/deployPolicies/*}", }, ] - request, metadata = self._interceptor.pre_delete_target(request, metadata) - pb_request = cloud_deploy.DeleteTargetRequest.pb(request) + request, metadata = self._interceptor.pre_delete_deploy_policy( + request, metadata + ) + pb_request = cloud_deploy.DeleteDeployPolicyRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) uri = transcoded_request["uri"] @@ -3046,12 +3300,12 @@ def __call__( # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_delete_target(resp) + resp = self._interceptor.post_delete_deploy_policy(resp) return resp - class _GetAutomation(CloudDeployRestStub): + class _DeleteTarget(CloudDeployRestStub): def __hash__(self): - return hash("GetAutomation") + return hash("DeleteTarget") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -3065,17 +3319,17 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: cloud_deploy.GetAutomationRequest, + request: cloud_deploy.DeleteTargetRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> cloud_deploy.Automation: - r"""Call the get automation method over HTTP. + ) -> operations_pb2.Operation: + r"""Call the delete target method over HTTP. Args: - request (~.cloud_deploy.GetAutomationRequest): - The request object. The request object for ``GetAutomation`` + request (~.cloud_deploy.DeleteTargetRequest): + The request object. The request object for ``DeleteTarget``. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -3083,26 +3337,21 @@ def __call__( sent along with the request as metadata. Returns: - ~.cloud_deploy.Automation: - An ``Automation`` resource in the Cloud Deploy API. - - An ``Automation`` enables the automation of manually - driven actions for a Delivery Pipeline, which includes - Release promotion among Targets, Rollout repair and - Rollout deployment strategy advancement. The intention - of Automation is to reduce manual intervention in the - continuous delivery process. + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. """ http_options: List[Dict[str, str]] = [ { - "method": "get", - "uri": "/v1/{name=projects/*/locations/*/deliveryPipelines/*/automations/*}", + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/targets/*}", }, ] - request, metadata = self._interceptor.pre_get_automation(request, metadata) - pb_request = cloud_deploy.GetAutomationRequest.pb(request) + request, metadata = self._interceptor.pre_delete_target(request, metadata) + pb_request = cloud_deploy.DeleteTargetRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) uri = transcoded_request["uri"] @@ -3135,16 +3384,14 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = cloud_deploy.Automation() - pb_resp = cloud_deploy.Automation.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_automation(resp) + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_target(resp) return resp - class _GetAutomationRun(CloudDeployRestStub): + class _GetAutomation(CloudDeployRestStub): def __hash__(self): - return hash("GetAutomationRun") + return hash("GetAutomation") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -3158,7 +3405,100 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: cloud_deploy.GetAutomationRunRequest, + request: cloud_deploy.GetAutomationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloud_deploy.Automation: + r"""Call the get automation method over HTTP. + + Args: + request (~.cloud_deploy.GetAutomationRequest): + The request object. The request object for ``GetAutomation`` + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloud_deploy.Automation: + An ``Automation`` resource in the Cloud Deploy API. + + An ``Automation`` enables the automation of manually + driven actions for a Delivery Pipeline, which includes + Release promotion among Targets, Rollout repair and + Rollout deployment strategy advancement. The intention + of Automation is to reduce manual intervention in the + continuous delivery process. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/deliveryPipelines/*/automations/*}", + }, + ] + request, metadata = self._interceptor.pre_get_automation(request, metadata) + pb_request = cloud_deploy.GetAutomationRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloud_deploy.Automation() + pb_resp = cloud_deploy.Automation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_automation(resp) + return resp + + class _GetAutomationRun(CloudDeployRestStub): + def __hash__(self): + return hash("GetAutomationRun") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloud_deploy.GetAutomationRunRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, @@ -3502,6 +3842,97 @@ def __call__( resp = self._interceptor.post_get_delivery_pipeline(resp) return resp + class _GetDeployPolicy(CloudDeployRestStub): + def __hash__(self): + return hash("GetDeployPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloud_deploy.GetDeployPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloud_deploy.DeployPolicy: + r"""Call the get deploy policy method over HTTP. + + Args: + request (~.cloud_deploy.GetDeployPolicyRequest): + The request object. The request object for ``GetDeployPolicy`` + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloud_deploy.DeployPolicy: + A ``DeployPolicy`` resource in the Cloud Deploy API. + + A ``DeployPolicy`` inhibits manual or automation-driven + actions within a Delivery Pipeline or Target. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/deployPolicies/*}", + }, + ] + request, metadata = self._interceptor.pre_get_deploy_policy( + request, metadata + ) + pb_request = cloud_deploy.GetDeployPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloud_deploy.DeployPolicy() + pb_resp = cloud_deploy.DeployPolicy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_deploy_policy(resp) + return resp + class _GetJobRun(CloudDeployRestStub): def __hash__(self): return hash("GetJobRun") @@ -4300,6 +4731,93 @@ def __call__( resp = self._interceptor.post_list_delivery_pipelines(resp) return resp + class _ListDeployPolicies(CloudDeployRestStub): + def __hash__(self): + return hash("ListDeployPolicies") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloud_deploy.ListDeployPoliciesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloud_deploy.ListDeployPoliciesResponse: + r"""Call the list deploy policies method over HTTP. + + Args: + request (~.cloud_deploy.ListDeployPoliciesRequest): + The request object. The request object for ``ListDeployPolicies``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloud_deploy.ListDeployPoliciesResponse: + The response object from ``ListDeployPolicies``. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/deployPolicies", + }, + ] + request, metadata = self._interceptor.pre_list_deploy_policies( + request, metadata + ) + pb_request = cloud_deploy.ListDeployPoliciesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloud_deploy.ListDeployPoliciesResponse() + pb_resp = cloud_deploy.ListDeployPoliciesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_deploy_policies(resp) + return resp + class _ListJobRuns(CloudDeployRestStub): def __hash__(self): return hash("ListJobRuns") @@ -5216,6 +5734,103 @@ def __call__( resp = self._interceptor.post_update_delivery_pipeline(resp) return resp + class _UpdateDeployPolicy(CloudDeployRestStub): + def __hash__(self): + return hash("UpdateDeployPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloud_deploy.UpdateDeployPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the update deploy policy method over HTTP. + + Args: + request (~.cloud_deploy.UpdateDeployPolicyRequest): + The request object. The request object for ``UpdateDeployPolicy``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{deploy_policy.name=projects/*/locations/*/deployPolicies/*}", + "body": "deploy_policy", + }, + ] + request, metadata = self._interceptor.pre_update_deploy_policy( + request, metadata + ) + pb_request = cloud_deploy.UpdateDeployPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_deploy_policy(resp) + return resp + class _UpdateTarget(CloudDeployRestStub): def __hash__(self): return hash("UpdateTarget") @@ -5390,6 +6005,14 @@ def create_delivery_pipeline( # In C++ this would require a dynamic_cast return self._CreateDeliveryPipeline(self._session, self._host, self._interceptor) # type: ignore + @property + def create_deploy_policy( + self, + ) -> Callable[[cloud_deploy.CreateDeployPolicyRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateDeployPolicy(self._session, self._host, self._interceptor) # type: ignore + @property def create_release( self, @@ -5442,6 +6065,14 @@ def delete_delivery_pipeline( # In C++ this would require a dynamic_cast return self._DeleteDeliveryPipeline(self._session, self._host, self._interceptor) # type: ignore + @property + def delete_deploy_policy( + self, + ) -> Callable[[cloud_deploy.DeleteDeployPolicyRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteDeployPolicy(self._session, self._host, self._interceptor) # type: ignore + @property def delete_target( self, @@ -5494,6 +6125,14 @@ def get_delivery_pipeline( # In C++ this would require a dynamic_cast return self._GetDeliveryPipeline(self._session, self._host, self._interceptor) # type: ignore + @property + def get_deploy_policy( + self, + ) -> Callable[[cloud_deploy.GetDeployPolicyRequest], cloud_deploy.DeployPolicy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetDeployPolicy(self._session, self._host, self._interceptor) # type: ignore + @property def get_job_run( self, @@ -5577,6 +6216,17 @@ def list_delivery_pipelines( # In C++ this would require a dynamic_cast return self._ListDeliveryPipelines(self._session, self._host, self._interceptor) # type: ignore + @property + def list_deploy_policies( + self, + ) -> Callable[ + [cloud_deploy.ListDeployPoliciesRequest], + cloud_deploy.ListDeployPoliciesResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListDeployPolicies(self._session, self._host, self._interceptor) # type: ignore + @property def list_job_runs( self, @@ -5669,6 +6319,14 @@ def update_delivery_pipeline( # In C++ this would require a dynamic_cast return self._UpdateDeliveryPipeline(self._session, self._host, self._interceptor) # type: ignore + @property + def update_deploy_policy( + self, + ) -> Callable[[cloud_deploy.UpdateDeployPolicyRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateDeployPolicy(self._session, self._host, self._interceptor) # type: ignore + @property def update_target( self, diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/types/__init__.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/types/__init__.py index 215038669273..7017500e27c7 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/types/__init__.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/types/__init__.py @@ -52,6 +52,7 @@ CreateChildRolloutJobRun, CreateCustomTargetTypeRequest, CreateDeliveryPipelineRequest, + CreateDeployPolicyRequest, CreateReleaseRequest, CreateRolloutRequest, CreateTargetRequest, @@ -65,20 +66,25 @@ DeleteAutomationRequest, DeleteCustomTargetTypeRequest, DeleteDeliveryPipelineRequest, + DeleteDeployPolicyRequest, DeleteTargetRequest, DeliveryPipeline, + DeliveryPipelineAttribute, DeployArtifact, DeployJob, DeployJobRun, DeployJobRunMetadata, DeploymentJobs, DeployParameters, + DeployPolicy, + DeployPolicyResourceSelector, ExecutionConfig, GetAutomationRequest, GetAutomationRunRequest, GetConfigRequest, GetCustomTargetTypeRequest, GetDeliveryPipelineRequest, + GetDeployPolicyRequest, GetJobRunRequest, GetReleaseRequest, GetRolloutRequest, @@ -97,6 +103,8 @@ ListCustomTargetTypesResponse, ListDeliveryPipelinesRequest, ListDeliveryPipelinesResponse, + ListDeployPoliciesRequest, + ListDeployPoliciesResponse, ListJobRunsRequest, ListJobRunsResponse, ListReleasesRequest, @@ -107,10 +115,14 @@ ListTargetsResponse, Metadata, MultiTarget, + OneTimeWindow, OperationMetadata, Phase, PipelineCondition, PipelineReadyCondition, + PolicyRule, + PolicyViolation, + PolicyViolationDetails, Postdeploy, PostdeployJob, PostdeployJobRun, @@ -135,6 +147,7 @@ RollbackTargetRequest, RollbackTargetResponse, Rollout, + RolloutRestriction, RuntimeConfig, SerialPipeline, SkaffoldModules, @@ -150,15 +163,19 @@ TargetsTypeCondition, TerminateJobRunRequest, TerminateJobRunResponse, + TimeWindows, UpdateAutomationRequest, UpdateCustomTargetTypeRequest, UpdateDeliveryPipelineRequest, + UpdateDeployPolicyRequest, UpdateTargetRequest, VerifyJob, VerifyJobRun, + WeeklyWindow, ) from .customtargettype_notification_payload import CustomTargetTypeNotificationEvent from .deliverypipeline_notification_payload import DeliveryPipelineNotificationEvent +from .deploypolicy_evaluation_payload import DeployPolicyEvaluationEvent from .deploypolicy_notification_payload import DeployPolicyNotificationEvent from .jobrun_notification_payload import JobRunNotificationEvent from .log_enums import Type @@ -206,6 +223,7 @@ "CreateChildRolloutJobRun", "CreateCustomTargetTypeRequest", "CreateDeliveryPipelineRequest", + "CreateDeployPolicyRequest", "CreateReleaseRequest", "CreateRolloutRequest", "CreateTargetRequest", @@ -219,20 +237,25 @@ "DeleteAutomationRequest", "DeleteCustomTargetTypeRequest", "DeleteDeliveryPipelineRequest", + "DeleteDeployPolicyRequest", "DeleteTargetRequest", "DeliveryPipeline", + "DeliveryPipelineAttribute", "DeployArtifact", "DeployJob", "DeployJobRun", "DeployJobRunMetadata", "DeploymentJobs", "DeployParameters", + "DeployPolicy", + "DeployPolicyResourceSelector", "ExecutionConfig", "GetAutomationRequest", "GetAutomationRunRequest", "GetConfigRequest", "GetCustomTargetTypeRequest", "GetDeliveryPipelineRequest", + "GetDeployPolicyRequest", "GetJobRunRequest", "GetReleaseRequest", "GetRolloutRequest", @@ -251,6 +274,8 @@ "ListCustomTargetTypesResponse", "ListDeliveryPipelinesRequest", "ListDeliveryPipelinesResponse", + "ListDeployPoliciesRequest", + "ListDeployPoliciesResponse", "ListJobRunsRequest", "ListJobRunsResponse", "ListReleasesRequest", @@ -261,10 +286,14 @@ "ListTargetsResponse", "Metadata", "MultiTarget", + "OneTimeWindow", "OperationMetadata", "Phase", "PipelineCondition", "PipelineReadyCondition", + "PolicyRule", + "PolicyViolation", + "PolicyViolationDetails", "Postdeploy", "PostdeployJob", "PostdeployJobRun", @@ -288,6 +317,7 @@ "RollbackTargetRequest", "RollbackTargetResponse", "Rollout", + "RolloutRestriction", "RuntimeConfig", "SerialPipeline", "SkaffoldModules", @@ -302,17 +332,21 @@ "TargetsTypeCondition", "TerminateJobRunRequest", "TerminateJobRunResponse", + "TimeWindows", "UpdateAutomationRequest", "UpdateCustomTargetTypeRequest", "UpdateDeliveryPipelineRequest", + "UpdateDeployPolicyRequest", "UpdateTargetRequest", "VerifyJob", "VerifyJobRun", + "WeeklyWindow", "BackoffMode", "RepairState", "SkaffoldSupportState", "CustomTargetTypeNotificationEvent", "DeliveryPipelineNotificationEvent", + "DeployPolicyEvaluationEvent", "DeployPolicyNotificationEvent", "JobRunNotificationEvent", "Type", diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/types/cloud_deploy.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/types/cloud_deploy.py index c7d9e95ecf4f..809363dd9595 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/types/cloud_deploy.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/types/cloud_deploy.py @@ -21,6 +21,8 @@ from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.type import date_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +from google.type import timeofday_pb2 # type: ignore import proto # type: ignore __protobuf__ = proto.module( @@ -80,8 +82,24 @@ "CreateCustomTargetTypeRequest", "UpdateCustomTargetTypeRequest", "DeleteCustomTargetTypeRequest", + "DeployPolicy", + "DeployPolicyResourceSelector", + "DeliveryPipelineAttribute", "TargetAttribute", + "PolicyRule", + "RolloutRestriction", + "TimeWindows", + "OneTimeWindow", + "WeeklyWindow", + "PolicyViolation", + "PolicyViolationDetails", "Release", + "CreateDeployPolicyRequest", + "UpdateDeployPolicyRequest", + "DeleteDeployPolicyRequest", + "ListDeployPoliciesRequest", + "ListDeployPoliciesResponse", + "GetDeployPolicyRequest", "BuildArtifact", "TargetArtifact", "DeployArtifact", @@ -1387,6 +1405,9 @@ class RollbackTargetRequest(proto.Message): validate_only (bool): Optional. If set to true, the request is validated and the user is provided with a ``RollbackTargetResponse``. + override_deploy_policy (MutableSequence[str]): + Optional. Deploy policies to override. Format is + ``projects/{project}/locations/{location}/deployPolicies/{deploy_policy}``. """ name: str = proto.Field( @@ -1418,6 +1439,10 @@ class RollbackTargetRequest(proto.Message): proto.BOOL, number=7, ) + override_deploy_policy: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=9, + ) class RollbackTargetResponse(proto.Message): @@ -2398,7 +2423,7 @@ class SkaffoldGCSSource(proto.Message): Attributes: source (str): Required. Cloud Storage source paths to copy recursively. - For example, providing "gs://my-bucket/dir/configs/*" will + For example, providing ``"gs://my-bucket/dir/configs/*"`` will result in Skaffold copying all files within the "dir/configs" directory in the bucket "my-bucket". path (str): @@ -2761,8 +2786,212 @@ class DeleteCustomTargetTypeRequest(proto.Message): ) +class DeployPolicy(proto.Message): + r"""A ``DeployPolicy`` resource in the Cloud Deploy API. + + A ``DeployPolicy`` inhibits manual or automation-driven actions + within a Delivery Pipeline or Target. + + Attributes: + name (str): + Output only. Name of the ``DeployPolicy``. Format is + ``projects/{project}/locations/{location}/deployPolicies/{deployPolicy}``. + The ``deployPolicy`` component must match + ``[a-z]([a-z0-9-]{0,61}[a-z0-9])?`` + uid (str): + Output only. Unique identifier of the ``DeployPolicy``. + description (str): + Description of the ``DeployPolicy``. Max length is 255 + characters. + annotations (MutableMapping[str, str]): + User annotations. These attributes can only be set and used + by the user, and not by Cloud Deploy. Annotations must meet + the following constraints: + + - Annotations are key/value pairs. + - Valid annotation keys have two segments: an optional + prefix and name, separated by a slash (``/``). + - The name segment is required and must be 63 characters or + less, beginning and ending with an alphanumeric character + (``[a-z0-9A-Z]``) with dashes (``-``), underscores + (``_``), dots (``.``), and alphanumerics between. + - The prefix is optional. If specified, the prefix must be + a DNS subdomain: a series of DNS labels separated by + dots(\ ``.``), not longer than 253 characters in total, + followed by a slash (``/``). + + See + https://kubernetes.io/docs/concepts/overview/working-with-objects/annotations/#syntax-and-character-set + for more details. + labels (MutableMapping[str, str]): + Labels are attributes that can be set and used by both the + user and by Cloud Deploy. Labels must meet the following + constraints: + + - Keys and values can contain only lowercase letters, + numeric characters, underscores, and dashes. + - All characters must use UTF-8 encoding, and international + characters are allowed. + - Keys must start with a lowercase letter or international + character. + - Each resource is limited to a maximum of 64 labels. + + Both keys and values are additionally constrained to be <= + 128 bytes. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time at which the deploy policy + was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Most recent time at which the + deploy policy was updated. + suspended (bool): + When suspended, the policy will not prevent + actions from occurring, even if the action + violates the policy. + selectors (MutableSequence[google.cloud.deploy_v1.types.DeployPolicyResourceSelector]): + Required. Selected resources to which the + policy will be applied. At least one selector is + required. If one selector matches the resource + the policy applies. For example, if there are + two selectors and the action being attempted + matches one of them, the policy will apply to + that action. + rules (MutableSequence[google.cloud.deploy_v1.types.PolicyRule]): + Required. Rules to apply. At least one rule + must be present. + etag (str): + The weak etag of the ``Automation`` resource. This checksum + is computed by the server based on the value of other + fields, and may be sent on update and delete requests to + ensure the client has an up-to-date value before proceeding. + """ + + class Invoker(proto.Enum): + r"""What invoked the action. Filters enforcing the policy + depending on what invoked the action. + + Values: + INVOKER_UNSPECIFIED (0): + Unspecified. + USER (1): + The action is user-driven. For example, + creating a rollout manually via a gcloud create + command. + DEPLOY_AUTOMATION (2): + Automated action by Cloud Deploy. + """ + INVOKER_UNSPECIFIED = 0 + USER = 1 + DEPLOY_AUTOMATION = 2 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + uid: str = proto.Field( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + annotations: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=5, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + suspended: bool = proto.Field( + proto.BOOL, + number=8, + ) + selectors: MutableSequence["DeployPolicyResourceSelector"] = proto.RepeatedField( + proto.MESSAGE, + number=12, + message="DeployPolicyResourceSelector", + ) + rules: MutableSequence["PolicyRule"] = proto.RepeatedField( + proto.MESSAGE, + number=10, + message="PolicyRule", + ) + etag: str = proto.Field( + proto.STRING, + number=11, + ) + + +class DeployPolicyResourceSelector(proto.Message): + r"""Contains information on the resources to select for a deploy + policy. Attributes provided must all match the resource in order + for policy restrictions to apply. For example, if delivery + pipelines attributes given are an id "prod" and labels "foo: + bar", a delivery pipeline resource must match both that id and + have that label in order to be subject to the policy. + + Attributes: + delivery_pipeline (google.cloud.deploy_v1.types.DeliveryPipelineAttribute): + Optional. Contains attributes about a + delivery pipeline. + target (google.cloud.deploy_v1.types.TargetAttribute): + Optional. Contains attributes about a target. + """ + + delivery_pipeline: "DeliveryPipelineAttribute" = proto.Field( + proto.MESSAGE, + number=1, + message="DeliveryPipelineAttribute", + ) + target: "TargetAttribute" = proto.Field( + proto.MESSAGE, + number=2, + message="TargetAttribute", + ) + + +class DeliveryPipelineAttribute(proto.Message): + r"""Contains criteria for selecting DeliveryPipelines. + + Attributes: + id (str): + ID of the ``DeliveryPipeline``. The value of this field + could be one of the following: + + - The last segment of a pipeline name + - "*", all delivery pipelines in a location + labels (MutableMapping[str, str]): + DeliveryPipeline labels. + """ + + id: str = proto.Field( + proto.STRING, + number=1, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=2, + ) + + class TargetAttribute(proto.Message): - r"""Contains criteria for selecting Targets. + r"""Contains criteria for selecting Targets. This could be used + to select targets for a Deploy Policy or for an Automation. Attributes: id (str): @@ -2786,6 +3015,262 @@ class TargetAttribute(proto.Message): ) +class PolicyRule(proto.Message): + r"""Deploy Policy rule. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + rollout_restriction (google.cloud.deploy_v1.types.RolloutRestriction): + Rollout restrictions. + + This field is a member of `oneof`_ ``rule``. + """ + + rollout_restriction: "RolloutRestriction" = proto.Field( + proto.MESSAGE, + number=2, + oneof="rule", + message="RolloutRestriction", + ) + + +class RolloutRestriction(proto.Message): + r"""Rollout restrictions. + + Attributes: + id (str): + Required. Restriction rule ID. Required and must be unique + within a DeployPolicy. The format is + ``[a-z]([a-z0-9-]{0,61}[a-z0-9])?``. + invokers (MutableSequence[google.cloud.deploy_v1.types.DeployPolicy.Invoker]): + Optional. What invoked the action. If left + empty, all invoker types will be restricted. + actions (MutableSequence[google.cloud.deploy_v1.types.RolloutRestriction.RolloutActions]): + Optional. Rollout actions to be restricted as + part of the policy. If left empty, all actions + will be restricted. + time_windows (google.cloud.deploy_v1.types.TimeWindows): + Required. Time window within which actions + are restricted. + """ + + class RolloutActions(proto.Enum): + r"""Rollout actions to be restricted as part of the policy. + + Values: + ROLLOUT_ACTIONS_UNSPECIFIED (0): + Unspecified. + ADVANCE (1): + Advance the rollout to the next phase. + APPROVE (2): + Approve the rollout. + CANCEL (3): + Cancel the rollout. + CREATE (4): + Create a rollout. + IGNORE_JOB (5): + Ignore a job result on the rollout. + RETRY_JOB (6): + Retry a job for a rollout. + ROLLBACK (7): + Rollback a rollout. + TERMINATE_JOBRUN (8): + Terminate a jobrun. + """ + ROLLOUT_ACTIONS_UNSPECIFIED = 0 + ADVANCE = 1 + APPROVE = 2 + CANCEL = 3 + CREATE = 4 + IGNORE_JOB = 5 + RETRY_JOB = 6 + ROLLBACK = 7 + TERMINATE_JOBRUN = 8 + + id: str = proto.Field( + proto.STRING, + number=1, + ) + invokers: MutableSequence["DeployPolicy.Invoker"] = proto.RepeatedField( + proto.ENUM, + number=2, + enum="DeployPolicy.Invoker", + ) + actions: MutableSequence[RolloutActions] = proto.RepeatedField( + proto.ENUM, + number=3, + enum=RolloutActions, + ) + time_windows: "TimeWindows" = proto.Field( + proto.MESSAGE, + number=4, + message="TimeWindows", + ) + + +class TimeWindows(proto.Message): + r"""Time windows within which actions are restricted. See the + `documentation `__ + for more information on how to configure dates/times. + + Attributes: + time_zone (str): + Required. The time zone in IANA format `IANA Time Zone + Database `__ (e.g. + America/New_York). + one_time_windows (MutableSequence[google.cloud.deploy_v1.types.OneTimeWindow]): + Optional. One-time windows within which + actions are restricted. + weekly_windows (MutableSequence[google.cloud.deploy_v1.types.WeeklyWindow]): + Optional. Recurring weekly windows within + which actions are restricted. + """ + + time_zone: str = proto.Field( + proto.STRING, + number=1, + ) + one_time_windows: MutableSequence["OneTimeWindow"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="OneTimeWindow", + ) + weekly_windows: MutableSequence["WeeklyWindow"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="WeeklyWindow", + ) + + +class OneTimeWindow(proto.Message): + r"""One-time window within which actions are restricted. For + example, blocking actions over New Year's Eve from December 31st + at 5pm to January 1st at 9am. + + Attributes: + start_date (google.type.date_pb2.Date): + Required. Start date. + start_time (google.type.timeofday_pb2.TimeOfDay): + Required. Start time (inclusive). Use 00:00 + for the beginning of the day. + end_date (google.type.date_pb2.Date): + Required. End date. + end_time (google.type.timeofday_pb2.TimeOfDay): + Required. End time (exclusive). You may use + 24:00 for the end of the day. + """ + + start_date: date_pb2.Date = proto.Field( + proto.MESSAGE, + number=1, + message=date_pb2.Date, + ) + start_time: timeofday_pb2.TimeOfDay = proto.Field( + proto.MESSAGE, + number=2, + message=timeofday_pb2.TimeOfDay, + ) + end_date: date_pb2.Date = proto.Field( + proto.MESSAGE, + number=3, + message=date_pb2.Date, + ) + end_time: timeofday_pb2.TimeOfDay = proto.Field( + proto.MESSAGE, + number=4, + message=timeofday_pb2.TimeOfDay, + ) + + +class WeeklyWindow(proto.Message): + r"""Weekly windows. For example, blocking actions every Saturday + and Sunday. Another example would be blocking actions every + weekday from 5pm to midnight. + + Attributes: + days_of_week (MutableSequence[google.type.dayofweek_pb2.DayOfWeek]): + Optional. Days of week. If left empty, all + days of the week will be included. + start_time (google.type.timeofday_pb2.TimeOfDay): + Optional. Start time (inclusive). Use 00:00 for the + beginning of the day. If you specify start_time you must + also specify end_time. If left empty, this will block for + the entire day for the days specified in days_of_week. + end_time (google.type.timeofday_pb2.TimeOfDay): + Optional. End time (exclusive). Use 24:00 to indicate + midnight. If you specify end_time you must also specify + start_time. If left empty, this will block for the entire + day for the days specified in days_of_week. + """ + + days_of_week: MutableSequence[dayofweek_pb2.DayOfWeek] = proto.RepeatedField( + proto.ENUM, + number=1, + enum=dayofweek_pb2.DayOfWeek, + ) + start_time: timeofday_pb2.TimeOfDay = proto.Field( + proto.MESSAGE, + number=2, + message=timeofday_pb2.TimeOfDay, + ) + end_time: timeofday_pb2.TimeOfDay = proto.Field( + proto.MESSAGE, + number=3, + message=timeofday_pb2.TimeOfDay, + ) + + +class PolicyViolation(proto.Message): + r"""Returned from an action if one or more policies were + violated, and therefore the action was prevented. Contains + information about what policies were violated and why. + + Attributes: + policy_violation_details (MutableSequence[google.cloud.deploy_v1.types.PolicyViolationDetails]): + Policy violation details. + """ + + policy_violation_details: MutableSequence[ + "PolicyViolationDetails" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="PolicyViolationDetails", + ) + + +class PolicyViolationDetails(proto.Message): + r"""Policy violation details. + + Attributes: + policy (str): + Name of the policy that was violated. Policy resource will + be in the format of + ``projects/{project}/locations/{location}/policies/{policy}``. + rule_id (str): + Id of the rule that triggered the policy + violation. + failure_message (str): + User readable message about why the request + violated a policy. This is not intended for + machine parsing. + """ + + policy: str = proto.Field( + proto.STRING, + number=1, + ) + rule_id: str = proto.Field( + proto.STRING, + number=2, + ) + failure_message: str = proto.Field( + proto.STRING, + number=3, + ) + + class Release(proto.Message): r"""A ``Release`` resource in the Cloud Deploy API. @@ -3084,124 +3569,417 @@ class ReleaseCondition(proto.Message): release's Skaffold version. """ - release_ready_condition: "Release.ReleaseReadyCondition" = proto.Field( - proto.MESSAGE, - number=1, - message="Release.ReleaseReadyCondition", - ) - skaffold_supported_condition: "Release.SkaffoldSupportedCondition" = ( - proto.Field( - proto.MESSAGE, - number=2, - message="Release.SkaffoldSupportedCondition", - ) - ) + release_ready_condition: "Release.ReleaseReadyCondition" = proto.Field( + proto.MESSAGE, + number=1, + message="Release.ReleaseReadyCondition", + ) + skaffold_supported_condition: "Release.SkaffoldSupportedCondition" = ( + proto.Field( + proto.MESSAGE, + number=2, + message="Release.SkaffoldSupportedCondition", + ) + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + uid: str = proto.Field( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + annotations: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=5, + ) + abandoned: bool = proto.Field( + proto.BOOL, + number=23, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + render_start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + render_end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + skaffold_config_uri: str = proto.Field( + proto.STRING, + number=17, + ) + skaffold_config_path: str = proto.Field( + proto.STRING, + number=9, + ) + build_artifacts: MutableSequence["BuildArtifact"] = proto.RepeatedField( + proto.MESSAGE, + number=10, + message="BuildArtifact", + ) + delivery_pipeline_snapshot: "DeliveryPipeline" = proto.Field( + proto.MESSAGE, + number=11, + message="DeliveryPipeline", + ) + target_snapshots: MutableSequence["Target"] = proto.RepeatedField( + proto.MESSAGE, + number=12, + message="Target", + ) + custom_target_type_snapshots: MutableSequence[ + "CustomTargetType" + ] = proto.RepeatedField( + proto.MESSAGE, + number=27, + message="CustomTargetType", + ) + render_state: RenderState = proto.Field( + proto.ENUM, + number=13, + enum=RenderState, + ) + etag: str = proto.Field( + proto.STRING, + number=16, + ) + skaffold_version: str = proto.Field( + proto.STRING, + number=19, + ) + target_artifacts: MutableMapping[str, "TargetArtifact"] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=20, + message="TargetArtifact", + ) + target_renders: MutableMapping[str, TargetRender] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=22, + message=TargetRender, + ) + condition: ReleaseCondition = proto.Field( + proto.MESSAGE, + number=24, + message=ReleaseCondition, + ) + deploy_parameters: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=25, + ) + + +class CreateDeployPolicyRequest(proto.Message): + r"""The request object for ``CreateDeployPolicy``. + + Attributes: + parent (str): + Required. The parent collection in which the + ``DeployPolicy`` must be created. The format is + ``projects/{project_id}/locations/{location_name}``. + deploy_policy_id (str): + Required. ID of the ``DeployPolicy``. + deploy_policy (google.cloud.deploy_v1.types.DeployPolicy): + Required. The ``DeployPolicy`` to create. + request_id (str): + Optional. A request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server knows to ignore + the request if it has already been completed. + The server guarantees that for at least 60 + minutes after the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + validate_only (bool): + Optional. If set to true, the request is + validated and the user is provided with an + expected result, but no actual change is made. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + deploy_policy_id: str = proto.Field( + proto.STRING, + number=2, + ) + deploy_policy: "DeployPolicy" = proto.Field( + proto.MESSAGE, + number=3, + message="DeployPolicy", + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=5, + ) + + +class UpdateDeployPolicyRequest(proto.Message): + r"""The request object for ``UpdateDeployPolicy``. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten by the update in the ``DeployPolicy`` resource. + The fields specified in the update_mask are relative to the + resource, not the full request. A field will be overwritten + if it's in the mask. If the user doesn't provide a mask then + all fields are overwritten. + deploy_policy (google.cloud.deploy_v1.types.DeployPolicy): + Required. The ``DeployPolicy`` to update. + request_id (str): + Optional. A request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server knows to ignore + the request if it has already been completed. + The server guarantees that for at least 60 + minutes after the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + allow_missing (bool): + Optional. If set to true, updating a ``DeployPolicy`` that + does not exist will result in the creation of a new + ``DeployPolicy``. + validate_only (bool): + Optional. If set to true, the request is + validated and the user is provided with an + expected result, but no actual change is made. + """ - name: str = proto.Field( - proto.STRING, + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, number=1, + message=field_mask_pb2.FieldMask, ) - uid: str = proto.Field( - proto.STRING, + deploy_policy: "DeployPolicy" = proto.Field( + proto.MESSAGE, number=2, + message="DeployPolicy", ) - description: str = proto.Field( + request_id: str = proto.Field( proto.STRING, number=3, ) - annotations: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, + allow_missing: bool = proto.Field( + proto.BOOL, number=4, ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=5, - ) - abandoned: bool = proto.Field( + validate_only: bool = proto.Field( proto.BOOL, - number=23, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=6, - message=timestamp_pb2.Timestamp, - ) - render_start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=7, - message=timestamp_pb2.Timestamp, - ) - render_end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=8, - message=timestamp_pb2.Timestamp, + number=5, ) - skaffold_config_uri: str = proto.Field( + + +class DeleteDeployPolicyRequest(proto.Message): + r"""The request object for ``DeleteDeployPolicy``. + + Attributes: + name (str): + Required. The name of the ``DeployPolicy`` to delete. The + format is + ``projects/{project_id}/locations/{location_name}/deployPolicies/{deploy_policy_name}``. + request_id (str): + Optional. A request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server knows to ignore + the request if it has already been completed. + The server guarantees that for at least 60 + minutes after the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + allow_missing (bool): + Optional. If set to true, then deleting an already deleted + or non-existing ``DeployPolicy`` will succeed. + validate_only (bool): + Optional. If set, validate the request and + preview the review, but do not actually post it. + etag (str): + Optional. This checksum is computed by the + server based on the value of other fields, and + may be sent on update and delete requests to + ensure the client has an up-to-date value before + proceeding. + """ + + name: str = proto.Field( proto.STRING, - number=17, + number=1, ) - skaffold_config_path: str = proto.Field( + request_id: str = proto.Field( proto.STRING, - number=9, + number=2, ) - build_artifacts: MutableSequence["BuildArtifact"] = proto.RepeatedField( - proto.MESSAGE, - number=10, - message="BuildArtifact", + allow_missing: bool = proto.Field( + proto.BOOL, + number=3, ) - delivery_pipeline_snapshot: "DeliveryPipeline" = proto.Field( - proto.MESSAGE, - number=11, - message="DeliveryPipeline", + validate_only: bool = proto.Field( + proto.BOOL, + number=4, ) - target_snapshots: MutableSequence["Target"] = proto.RepeatedField( - proto.MESSAGE, - number=12, - message="Target", + etag: str = proto.Field( + proto.STRING, + number=5, ) - custom_target_type_snapshots: MutableSequence[ - "CustomTargetType" - ] = proto.RepeatedField( - proto.MESSAGE, - number=27, - message="CustomTargetType", + + +class ListDeployPoliciesRequest(proto.Message): + r"""The request object for ``ListDeployPolicies``. + + Attributes: + parent (str): + Required. The parent, which owns this collection of deploy + policies. Format must be + ``projects/{project_id}/locations/{location_name}``. + page_size (int): + The maximum number of deploy policies to + return. The service may return fewer than this + value. If unspecified, at most 50 deploy + policies will be returned. The maximum value is + 1000; values above 1000 will be set to 1000. + page_token (str): + A page token, received from a previous + ``ListDeployPolicies`` call. Provide this to retrieve the + subsequent page. + + When paginating, all other provided parameters match the + call that provided the page token. + filter (str): + Filter deploy policies to be returned. See + https://google.aip.dev/160 for more details. All + fields can be used in the filter. + order_by (str): + Field to sort by. See + https://google.aip.dev/132#ordering for more + details. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, ) - render_state: RenderState = proto.Field( - proto.ENUM, - number=13, - enum=RenderState, + page_size: int = proto.Field( + proto.INT32, + number=2, ) - etag: str = proto.Field( + page_token: str = proto.Field( proto.STRING, - number=16, + number=3, ) - skaffold_version: str = proto.Field( + filter: str = proto.Field( proto.STRING, - number=19, + number=4, ) - target_artifacts: MutableMapping[str, "TargetArtifact"] = proto.MapField( + order_by: str = proto.Field( proto.STRING, - proto.MESSAGE, - number=20, - message="TargetArtifact", + number=5, ) - target_renders: MutableMapping[str, TargetRender] = proto.MapField( - proto.STRING, + + +class ListDeployPoliciesResponse(proto.Message): + r"""The response object from ``ListDeployPolicies``. + + Attributes: + deploy_policies (MutableSequence[google.cloud.deploy_v1.types.DeployPolicy]): + The ``DeployPolicy`` objects. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + deploy_policies: MutableSequence["DeployPolicy"] = proto.RepeatedField( proto.MESSAGE, - number=22, - message=TargetRender, + number=1, + message="DeployPolicy", ) - condition: ReleaseCondition = proto.Field( - proto.MESSAGE, - number=24, - message=ReleaseCondition, + next_page_token: str = proto.Field( + proto.STRING, + number=2, ) - deploy_parameters: MutableMapping[str, str] = proto.MapField( + unreachable: MutableSequence[str] = proto.RepeatedField( proto.STRING, + number=3, + ) + + +class GetDeployPolicyRequest(proto.Message): + r"""The request object for ``GetDeployPolicy`` + + Attributes: + name (str): + Required. Name of the ``DeployPolicy``. Format must be + ``projects/{project_id}/locations/{location_name}/deployPolicies/{deploy_policy_name}``. + """ + + name: str = proto.Field( proto.STRING, - number=25, + number=1, ) @@ -3504,6 +4282,9 @@ class CreateReleaseRequest(proto.Message): Optional. If set to true, the request is validated and the user is provided with an expected result, but no actual change is made. + override_deploy_policy (MutableSequence[str]): + Optional. Deploy policies to override. Format is + ``projects/{project}/locations/{location}/deployPolicies/{deployPolicy}``. """ parent: str = proto.Field( @@ -3527,6 +4308,10 @@ class CreateReleaseRequest(proto.Message): proto.BOOL, number=5, ) + override_deploy_policy: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=6, + ) class Rollout(proto.Message): @@ -4457,6 +5242,9 @@ class CreateRolloutRequest(proto.Message): Optional. If set to true, the request is validated and the user is provided with an expected result, but no actual change is made. + override_deploy_policy (MutableSequence[str]): + Optional. Deploy policies to override. Format is + ``projects/{project}/locations/{location}/deployPolicies/{deployPolicy}``. starting_phase_id (str): Optional. The starting phase ID for the ``Rollout``. If empty the ``Rollout`` will start at the first phase. @@ -4483,6 +5271,10 @@ class CreateRolloutRequest(proto.Message): proto.BOOL, number=5, ) + override_deploy_policy: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=6, + ) starting_phase_id: str = proto.Field( proto.STRING, number=7, @@ -4560,6 +5352,9 @@ class ApproveRolloutRequest(proto.Message): ``projects/{project}/locations/{location}/deliveryPipelines/{deliveryPipeline}/releases/{release}/rollouts/{rollout}``. approved (bool): Required. True = approve; false = reject + override_deploy_policy (MutableSequence[str]): + Optional. Deploy policies to override. Format is + ``projects/{project}/locations/{location}/deployPolicies/{deployPolicy}``. """ name: str = proto.Field( @@ -4570,6 +5365,10 @@ class ApproveRolloutRequest(proto.Message): proto.BOOL, number=2, ) + override_deploy_policy: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) class ApproveRolloutResponse(proto.Message): @@ -4585,6 +5384,9 @@ class AdvanceRolloutRequest(proto.Message): ``projects/{project}/locations/{location}/deliveryPipelines/{deliveryPipeline}/releases/{release}/rollouts/{rollout}``. phase_id (str): Required. The phase ID to advance the ``Rollout`` to. + override_deploy_policy (MutableSequence[str]): + Optional. Deploy policies to override. Format is + ``projects/{project}/locations/{location}/deployPolicies/{deployPolicy}``. """ name: str = proto.Field( @@ -4595,6 +5397,10 @@ class AdvanceRolloutRequest(proto.Message): proto.STRING, number=2, ) + override_deploy_policy: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) class AdvanceRolloutResponse(proto.Message): @@ -4608,12 +5414,19 @@ class CancelRolloutRequest(proto.Message): name (str): Required. Name of the Rollout. Format is ``projects/{project}/locations/{location}/deliveryPipelines/{deliveryPipeline}/releases/{release}/rollouts/{rollout}``. + override_deploy_policy (MutableSequence[str]): + Optional. Deploy policies to override. Format is + ``projects/{project}/locations/{location}/deployPolicies/{deployPolicy}``. """ name: str = proto.Field( proto.STRING, number=1, ) + override_deploy_policy: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) class CancelRolloutResponse(proto.Message): @@ -4632,6 +5445,9 @@ class IgnoreJobRequest(proto.Message): belongs to. job_id (str): Required. The job ID for the Job to ignore. + override_deploy_policy (MutableSequence[str]): + Optional. Deploy policies to override. Format is + ``projects/{project}/locations/{location}/deployPolicies/{deployPolicy}``. """ rollout: str = proto.Field( @@ -4646,6 +5462,10 @@ class IgnoreJobRequest(proto.Message): proto.STRING, number=3, ) + override_deploy_policy: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) class IgnoreJobResponse(proto.Message): @@ -4664,6 +5484,9 @@ class RetryJobRequest(proto.Message): belongs to. job_id (str): Required. The job ID for the Job to retry. + override_deploy_policy (MutableSequence[str]): + Optional. Deploy policies to override. Format is + ``projects/{project}/locations/{location}/deployPolicies/{deployPolicy}``. """ rollout: str = proto.Field( @@ -4678,6 +5501,10 @@ class RetryJobRequest(proto.Message): proto.STRING, number=3, ) + override_deploy_policy: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) class RetryJobResponse(proto.Message): @@ -5310,12 +6137,19 @@ class TerminateJobRunRequest(proto.Message): name (str): Required. Name of the ``JobRun``. Format must be ``projects/{project}/locations/{location}/deliveryPipelines/{deliveryPipeline}/releases/{release}/rollouts/{rollout}/jobRuns/{jobRun}``. + override_deploy_policy (MutableSequence[str]): + Optional. Deploy policies to override. Format is + ``projects/{project}/locations/{location}/deployPolicies/{deployPolicy}``. """ name: str = proto.Field( proto.STRING, number=1, ) + override_deploy_policy: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) class TerminateJobRunResponse(proto.Message): @@ -6095,6 +6929,9 @@ class AutomationRun(proto.Message): Output only. Explains the current state of the ``AutomationRun``. Present only when an explanation is needed. + policy_violation (google.cloud.deploy_v1.types.PolicyViolation): + Output only. Contains information about what policies + prevented the ``AutomationRun`` from proceeding. expire_time (google.protobuf.timestamp_pb2.Timestamp): Output only. Time the ``AutomationRun`` expires. An ``AutomationRun`` expires after 14 days from its creation @@ -6192,6 +7029,11 @@ class State(proto.Enum): proto.STRING, number=9, ) + policy_violation: "PolicyViolation" = proto.Field( + proto.MESSAGE, + number=10, + message="PolicyViolation", + ) expire_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=11, diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/types/log_enums.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/types/log_enums.py index 28a732b7b078..853e64e15f60 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/types/log_enums.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/types/log_enums.py @@ -46,6 +46,8 @@ class Type(proto.Enum): Resource deleted. TYPE_ROLLOUT_UPDATE (7): Rollout updated. + TYPE_DEPLOY_POLICY_EVALUATION (8): + Deploy Policy evaluation. TYPE_RENDER_STATUES_CHANGE (2): Deprecated: This field is never used. Use release_render log type instead. @@ -57,6 +59,7 @@ class Type(proto.Enum): TYPE_RESTRICTION_VIOLATED = 5 TYPE_RESOURCE_DELETED = 6 TYPE_ROLLOUT_UPDATE = 7 + TYPE_DEPLOY_POLICY_EVALUATION = 8 TYPE_RENDER_STATUES_CHANGE = 2 diff --git a/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_create_deploy_policy_async.py b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_create_deploy_policy_async.py new file mode 100644 index 000000000000..5c0a67fbf626 --- /dev/null +++ b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_create_deploy_policy_async.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDeployPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-deploy + + +# [START clouddeploy_v1_generated_CloudDeploy_CreateDeployPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import deploy_v1 + + +async def sample_create_deploy_policy(): + # Create a client + client = deploy_v1.CloudDeployAsyncClient() + + # Initialize request argument(s) + deploy_policy = deploy_v1.DeployPolicy() + deploy_policy.rules.rollout_restriction.id = "id_value" + deploy_policy.rules.rollout_restriction.time_windows.time_zone = "time_zone_value" + + request = deploy_v1.CreateDeployPolicyRequest( + parent="parent_value", + deploy_policy_id="deploy_policy_id_value", + deploy_policy=deploy_policy, + ) + + # Make the request + operation = client.create_deploy_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END clouddeploy_v1_generated_CloudDeploy_CreateDeployPolicy_async] diff --git a/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_create_deploy_policy_sync.py b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_create_deploy_policy_sync.py new file mode 100644 index 000000000000..87445443040f --- /dev/null +++ b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_create_deploy_policy_sync.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDeployPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-deploy + + +# [START clouddeploy_v1_generated_CloudDeploy_CreateDeployPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import deploy_v1 + + +def sample_create_deploy_policy(): + # Create a client + client = deploy_v1.CloudDeployClient() + + # Initialize request argument(s) + deploy_policy = deploy_v1.DeployPolicy() + deploy_policy.rules.rollout_restriction.id = "id_value" + deploy_policy.rules.rollout_restriction.time_windows.time_zone = "time_zone_value" + + request = deploy_v1.CreateDeployPolicyRequest( + parent="parent_value", + deploy_policy_id="deploy_policy_id_value", + deploy_policy=deploy_policy, + ) + + # Make the request + operation = client.create_deploy_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END clouddeploy_v1_generated_CloudDeploy_CreateDeployPolicy_sync] diff --git a/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_delete_deploy_policy_async.py b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_delete_deploy_policy_async.py new file mode 100644 index 000000000000..6f6545a0350c --- /dev/null +++ b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_delete_deploy_policy_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDeployPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-deploy + + +# [START clouddeploy_v1_generated_CloudDeploy_DeleteDeployPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import deploy_v1 + + +async def sample_delete_deploy_policy(): + # Create a client + client = deploy_v1.CloudDeployAsyncClient() + + # Initialize request argument(s) + request = deploy_v1.DeleteDeployPolicyRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_deploy_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END clouddeploy_v1_generated_CloudDeploy_DeleteDeployPolicy_async] diff --git a/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_delete_deploy_policy_sync.py b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_delete_deploy_policy_sync.py new file mode 100644 index 000000000000..1c9d7dd414fe --- /dev/null +++ b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_delete_deploy_policy_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDeployPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-deploy + + +# [START clouddeploy_v1_generated_CloudDeploy_DeleteDeployPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import deploy_v1 + + +def sample_delete_deploy_policy(): + # Create a client + client = deploy_v1.CloudDeployClient() + + # Initialize request argument(s) + request = deploy_v1.DeleteDeployPolicyRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_deploy_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END clouddeploy_v1_generated_CloudDeploy_DeleteDeployPolicy_sync] diff --git a/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_get_deploy_policy_async.py b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_get_deploy_policy_async.py new file mode 100644 index 000000000000..a96e7a4309c2 --- /dev/null +++ b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_get_deploy_policy_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDeployPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-deploy + + +# [START clouddeploy_v1_generated_CloudDeploy_GetDeployPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import deploy_v1 + + +async def sample_get_deploy_policy(): + # Create a client + client = deploy_v1.CloudDeployAsyncClient() + + # Initialize request argument(s) + request = deploy_v1.GetDeployPolicyRequest( + name="name_value", + ) + + # Make the request + response = await client.get_deploy_policy(request=request) + + # Handle the response + print(response) + +# [END clouddeploy_v1_generated_CloudDeploy_GetDeployPolicy_async] diff --git a/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_get_deploy_policy_sync.py b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_get_deploy_policy_sync.py new file mode 100644 index 000000000000..669a50729182 --- /dev/null +++ b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_get_deploy_policy_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDeployPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-deploy + + +# [START clouddeploy_v1_generated_CloudDeploy_GetDeployPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import deploy_v1 + + +def sample_get_deploy_policy(): + # Create a client + client = deploy_v1.CloudDeployClient() + + # Initialize request argument(s) + request = deploy_v1.GetDeployPolicyRequest( + name="name_value", + ) + + # Make the request + response = client.get_deploy_policy(request=request) + + # Handle the response + print(response) + +# [END clouddeploy_v1_generated_CloudDeploy_GetDeployPolicy_sync] diff --git a/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_list_deploy_policies_async.py b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_list_deploy_policies_async.py new file mode 100644 index 000000000000..f3932c8119a1 --- /dev/null +++ b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_list_deploy_policies_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDeployPolicies +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-deploy + + +# [START clouddeploy_v1_generated_CloudDeploy_ListDeployPolicies_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import deploy_v1 + + +async def sample_list_deploy_policies(): + # Create a client + client = deploy_v1.CloudDeployAsyncClient() + + # Initialize request argument(s) + request = deploy_v1.ListDeployPoliciesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_deploy_policies(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END clouddeploy_v1_generated_CloudDeploy_ListDeployPolicies_async] diff --git a/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_list_deploy_policies_sync.py b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_list_deploy_policies_sync.py new file mode 100644 index 000000000000..8c6baf6b8c95 --- /dev/null +++ b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_list_deploy_policies_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDeployPolicies +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-deploy + + +# [START clouddeploy_v1_generated_CloudDeploy_ListDeployPolicies_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import deploy_v1 + + +def sample_list_deploy_policies(): + # Create a client + client = deploy_v1.CloudDeployClient() + + # Initialize request argument(s) + request = deploy_v1.ListDeployPoliciesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_deploy_policies(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END clouddeploy_v1_generated_CloudDeploy_ListDeployPolicies_sync] diff --git a/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_update_deploy_policy_async.py b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_update_deploy_policy_async.py new file mode 100644 index 000000000000..d08ab6a4ded1 --- /dev/null +++ b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_update_deploy_policy_async.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDeployPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-deploy + + +# [START clouddeploy_v1_generated_CloudDeploy_UpdateDeployPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import deploy_v1 + + +async def sample_update_deploy_policy(): + # Create a client + client = deploy_v1.CloudDeployAsyncClient() + + # Initialize request argument(s) + deploy_policy = deploy_v1.DeployPolicy() + deploy_policy.rules.rollout_restriction.id = "id_value" + deploy_policy.rules.rollout_restriction.time_windows.time_zone = "time_zone_value" + + request = deploy_v1.UpdateDeployPolicyRequest( + deploy_policy=deploy_policy, + ) + + # Make the request + operation = client.update_deploy_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END clouddeploy_v1_generated_CloudDeploy_UpdateDeployPolicy_async] diff --git a/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_update_deploy_policy_sync.py b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_update_deploy_policy_sync.py new file mode 100644 index 000000000000..18ab7cd6a8b9 --- /dev/null +++ b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_update_deploy_policy_sync.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDeployPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-deploy + + +# [START clouddeploy_v1_generated_CloudDeploy_UpdateDeployPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import deploy_v1 + + +def sample_update_deploy_policy(): + # Create a client + client = deploy_v1.CloudDeployClient() + + # Initialize request argument(s) + deploy_policy = deploy_v1.DeployPolicy() + deploy_policy.rules.rollout_restriction.id = "id_value" + deploy_policy.rules.rollout_restriction.time_windows.time_zone = "time_zone_value" + + request = deploy_v1.UpdateDeployPolicyRequest( + deploy_policy=deploy_policy, + ) + + # Make the request + operation = client.update_deploy_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END clouddeploy_v1_generated_CloudDeploy_UpdateDeployPolicy_sync] diff --git a/packages/google-cloud-deploy/samples/generated_samples/snippet_metadata_google.cloud.deploy.v1.json b/packages/google-cloud-deploy/samples/generated_samples/snippet_metadata_google.cloud.deploy.v1.json index b4f5eeee1a80..dfbc37400a05 100644 --- a/packages/google-cloud-deploy/samples/generated_samples/snippet_metadata_google.cloud.deploy.v1.json +++ b/packages/google-cloud-deploy/samples/generated_samples/snippet_metadata_google.cloud.deploy.v1.json @@ -1355,6 +1355,183 @@ ], "title": "clouddeploy_v1_generated_cloud_deploy_create_delivery_pipeline_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient", + "shortName": "CloudDeployAsyncClient" + }, + "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient.create_deploy_policy", + "method": { + "fullName": "google.cloud.deploy.v1.CloudDeploy.CreateDeployPolicy", + "service": { + "fullName": "google.cloud.deploy.v1.CloudDeploy", + "shortName": "CloudDeploy" + }, + "shortName": "CreateDeployPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.deploy_v1.types.CreateDeployPolicyRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "deploy_policy", + "type": "google.cloud.deploy_v1.types.DeployPolicy" + }, + { + "name": "deploy_policy_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_deploy_policy" + }, + "description": "Sample for CreateDeployPolicy", + "file": "clouddeploy_v1_generated_cloud_deploy_create_deploy_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_CreateDeployPolicy_async", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "clouddeploy_v1_generated_cloud_deploy_create_deploy_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.deploy_v1.CloudDeployClient", + "shortName": "CloudDeployClient" + }, + "fullName": "google.cloud.deploy_v1.CloudDeployClient.create_deploy_policy", + "method": { + "fullName": "google.cloud.deploy.v1.CloudDeploy.CreateDeployPolicy", + "service": { + "fullName": "google.cloud.deploy.v1.CloudDeploy", + "shortName": "CloudDeploy" + }, + "shortName": "CreateDeployPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.deploy_v1.types.CreateDeployPolicyRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "deploy_policy", + "type": "google.cloud.deploy_v1.types.DeployPolicy" + }, + { + "name": "deploy_policy_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_deploy_policy" + }, + "description": "Sample for CreateDeployPolicy", + "file": "clouddeploy_v1_generated_cloud_deploy_create_deploy_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_CreateDeployPolicy_sync", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "clouddeploy_v1_generated_cloud_deploy_create_deploy_policy_sync.py" + }, { "canonical": true, "clientMethod": { @@ -2377,19 +2554,19 @@ "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient", "shortName": "CloudDeployAsyncClient" }, - "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient.delete_target", + "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient.delete_deploy_policy", "method": { - "fullName": "google.cloud.deploy.v1.CloudDeploy.DeleteTarget", + "fullName": "google.cloud.deploy.v1.CloudDeploy.DeleteDeployPolicy", "service": { "fullName": "google.cloud.deploy.v1.CloudDeploy", "shortName": "CloudDeploy" }, - "shortName": "DeleteTarget" + "shortName": "DeleteDeployPolicy" }, "parameters": [ { "name": "request", - "type": "google.cloud.deploy_v1.types.DeleteTargetRequest" + "type": "google.cloud.deploy_v1.types.DeleteDeployPolicyRequest" }, { "name": "name", @@ -2409,13 +2586,13 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_target" + "shortName": "delete_deploy_policy" }, - "description": "Sample for DeleteTarget", - "file": "clouddeploy_v1_generated_cloud_deploy_delete_target_async.py", + "description": "Sample for DeleteDeployPolicy", + "file": "clouddeploy_v1_generated_cloud_deploy_delete_deploy_policy_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "clouddeploy_v1_generated_CloudDeploy_DeleteTarget_async", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_DeleteDeployPolicy_async", "segments": [ { "end": 55, @@ -2448,7 +2625,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "clouddeploy_v1_generated_cloud_deploy_delete_target_async.py" + "title": "clouddeploy_v1_generated_cloud_deploy_delete_deploy_policy_async.py" }, { "canonical": true, @@ -2457,19 +2634,19 @@ "fullName": "google.cloud.deploy_v1.CloudDeployClient", "shortName": "CloudDeployClient" }, - "fullName": "google.cloud.deploy_v1.CloudDeployClient.delete_target", + "fullName": "google.cloud.deploy_v1.CloudDeployClient.delete_deploy_policy", "method": { - "fullName": "google.cloud.deploy.v1.CloudDeploy.DeleteTarget", + "fullName": "google.cloud.deploy.v1.CloudDeploy.DeleteDeployPolicy", "service": { "fullName": "google.cloud.deploy.v1.CloudDeploy", "shortName": "CloudDeploy" }, - "shortName": "DeleteTarget" + "shortName": "DeleteDeployPolicy" }, "parameters": [ { "name": "request", - "type": "google.cloud.deploy_v1.types.DeleteTargetRequest" + "type": "google.cloud.deploy_v1.types.DeleteDeployPolicyRequest" }, { "name": "name", @@ -2489,13 +2666,13 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "delete_target" + "shortName": "delete_deploy_policy" }, - "description": "Sample for DeleteTarget", - "file": "clouddeploy_v1_generated_cloud_deploy_delete_target_sync.py", + "description": "Sample for DeleteDeployPolicy", + "file": "clouddeploy_v1_generated_cloud_deploy_delete_deploy_policy_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "clouddeploy_v1_generated_CloudDeploy_DeleteTarget_sync", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_DeleteDeployPolicy_sync", "segments": [ { "end": 55, @@ -2528,7 +2705,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "clouddeploy_v1_generated_cloud_deploy_delete_target_sync.py" + "title": "clouddeploy_v1_generated_cloud_deploy_delete_deploy_policy_sync.py" }, { "canonical": true, @@ -2538,19 +2715,19 @@ "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient", "shortName": "CloudDeployAsyncClient" }, - "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient.get_automation_run", + "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient.delete_target", "method": { - "fullName": "google.cloud.deploy.v1.CloudDeploy.GetAutomationRun", + "fullName": "google.cloud.deploy.v1.CloudDeploy.DeleteTarget", "service": { "fullName": "google.cloud.deploy.v1.CloudDeploy", "shortName": "CloudDeploy" }, - "shortName": "GetAutomationRun" + "shortName": "DeleteTarget" }, "parameters": [ { "name": "request", - "type": "google.cloud.deploy_v1.types.GetAutomationRunRequest" + "type": "google.cloud.deploy_v1.types.DeleteTargetRequest" }, { "name": "name", @@ -2569,22 +2746,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.deploy_v1.types.AutomationRun", - "shortName": "get_automation_run" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_target" }, - "description": "Sample for GetAutomationRun", - "file": "clouddeploy_v1_generated_cloud_deploy_get_automation_run_async.py", + "description": "Sample for DeleteTarget", + "file": "clouddeploy_v1_generated_cloud_deploy_delete_target_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "clouddeploy_v1_generated_CloudDeploy_GetAutomationRun_async", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_DeleteTarget_async", "segments": [ { - "end": 51, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 55, "start": 27, "type": "SHORT" }, @@ -2599,17 +2776,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "clouddeploy_v1_generated_cloud_deploy_get_automation_run_async.py" + "title": "clouddeploy_v1_generated_cloud_deploy_delete_target_async.py" }, { "canonical": true, @@ -2618,19 +2795,19 @@ "fullName": "google.cloud.deploy_v1.CloudDeployClient", "shortName": "CloudDeployClient" }, - "fullName": "google.cloud.deploy_v1.CloudDeployClient.get_automation_run", + "fullName": "google.cloud.deploy_v1.CloudDeployClient.delete_target", "method": { - "fullName": "google.cloud.deploy.v1.CloudDeploy.GetAutomationRun", + "fullName": "google.cloud.deploy.v1.CloudDeploy.DeleteTarget", "service": { "fullName": "google.cloud.deploy.v1.CloudDeploy", "shortName": "CloudDeploy" }, - "shortName": "GetAutomationRun" + "shortName": "DeleteTarget" }, "parameters": [ { "name": "request", - "type": "google.cloud.deploy_v1.types.GetAutomationRunRequest" + "type": "google.cloud.deploy_v1.types.DeleteTargetRequest" }, { "name": "name", @@ -2649,22 +2826,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.deploy_v1.types.AutomationRun", - "shortName": "get_automation_run" + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_target" }, - "description": "Sample for GetAutomationRun", - "file": "clouddeploy_v1_generated_cloud_deploy_get_automation_run_sync.py", + "description": "Sample for DeleteTarget", + "file": "clouddeploy_v1_generated_cloud_deploy_delete_target_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "clouddeploy_v1_generated_CloudDeploy_GetAutomationRun_sync", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_DeleteTarget_sync", "segments": [ { - "end": 51, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 55, "start": 27, "type": "SHORT" }, @@ -2679,17 +2856,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "clouddeploy_v1_generated_cloud_deploy_get_automation_run_sync.py" + "title": "clouddeploy_v1_generated_cloud_deploy_delete_target_sync.py" }, { "canonical": true, @@ -2699,19 +2876,19 @@ "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient", "shortName": "CloudDeployAsyncClient" }, - "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient.get_automation", + "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient.get_automation_run", "method": { - "fullName": "google.cloud.deploy.v1.CloudDeploy.GetAutomation", + "fullName": "google.cloud.deploy.v1.CloudDeploy.GetAutomationRun", "service": { "fullName": "google.cloud.deploy.v1.CloudDeploy", "shortName": "CloudDeploy" }, - "shortName": "GetAutomation" + "shortName": "GetAutomationRun" }, "parameters": [ { "name": "request", - "type": "google.cloud.deploy_v1.types.GetAutomationRequest" + "type": "google.cloud.deploy_v1.types.GetAutomationRunRequest" }, { "name": "name", @@ -2730,14 +2907,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.deploy_v1.types.Automation", - "shortName": "get_automation" + "resultType": "google.cloud.deploy_v1.types.AutomationRun", + "shortName": "get_automation_run" }, - "description": "Sample for GetAutomation", - "file": "clouddeploy_v1_generated_cloud_deploy_get_automation_async.py", + "description": "Sample for GetAutomationRun", + "file": "clouddeploy_v1_generated_cloud_deploy_get_automation_run_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "clouddeploy_v1_generated_CloudDeploy_GetAutomation_async", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_GetAutomationRun_async", "segments": [ { "end": 51, @@ -2770,7 +2947,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "clouddeploy_v1_generated_cloud_deploy_get_automation_async.py" + "title": "clouddeploy_v1_generated_cloud_deploy_get_automation_run_async.py" }, { "canonical": true, @@ -2779,14 +2956,175 @@ "fullName": "google.cloud.deploy_v1.CloudDeployClient", "shortName": "CloudDeployClient" }, - "fullName": "google.cloud.deploy_v1.CloudDeployClient.get_automation", + "fullName": "google.cloud.deploy_v1.CloudDeployClient.get_automation_run", "method": { - "fullName": "google.cloud.deploy.v1.CloudDeploy.GetAutomation", + "fullName": "google.cloud.deploy.v1.CloudDeploy.GetAutomationRun", "service": { "fullName": "google.cloud.deploy.v1.CloudDeploy", "shortName": "CloudDeploy" }, - "shortName": "GetAutomation" + "shortName": "GetAutomationRun" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.deploy_v1.types.GetAutomationRunRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.deploy_v1.types.AutomationRun", + "shortName": "get_automation_run" + }, + "description": "Sample for GetAutomationRun", + "file": "clouddeploy_v1_generated_cloud_deploy_get_automation_run_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_GetAutomationRun_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "clouddeploy_v1_generated_cloud_deploy_get_automation_run_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient", + "shortName": "CloudDeployAsyncClient" + }, + "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient.get_automation", + "method": { + "fullName": "google.cloud.deploy.v1.CloudDeploy.GetAutomation", + "service": { + "fullName": "google.cloud.deploy.v1.CloudDeploy", + "shortName": "CloudDeploy" + }, + "shortName": "GetAutomation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.deploy_v1.types.GetAutomationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.deploy_v1.types.Automation", + "shortName": "get_automation" + }, + "description": "Sample for GetAutomation", + "file": "clouddeploy_v1_generated_cloud_deploy_get_automation_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_GetAutomation_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "clouddeploy_v1_generated_cloud_deploy_get_automation_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.deploy_v1.CloudDeployClient", + "shortName": "CloudDeployClient" + }, + "fullName": "google.cloud.deploy_v1.CloudDeployClient.get_automation", + "method": { + "fullName": "google.cloud.deploy.v1.CloudDeploy.GetAutomation", + "service": { + "fullName": "google.cloud.deploy.v1.CloudDeploy", + "shortName": "CloudDeploy" + }, + "shortName": "GetAutomation" }, "parameters": [ { @@ -3335,6 +3673,167 @@ ], "title": "clouddeploy_v1_generated_cloud_deploy_get_delivery_pipeline_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient", + "shortName": "CloudDeployAsyncClient" + }, + "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient.get_deploy_policy", + "method": { + "fullName": "google.cloud.deploy.v1.CloudDeploy.GetDeployPolicy", + "service": { + "fullName": "google.cloud.deploy.v1.CloudDeploy", + "shortName": "CloudDeploy" + }, + "shortName": "GetDeployPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.deploy_v1.types.GetDeployPolicyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.deploy_v1.types.DeployPolicy", + "shortName": "get_deploy_policy" + }, + "description": "Sample for GetDeployPolicy", + "file": "clouddeploy_v1_generated_cloud_deploy_get_deploy_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_GetDeployPolicy_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "clouddeploy_v1_generated_cloud_deploy_get_deploy_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.deploy_v1.CloudDeployClient", + "shortName": "CloudDeployClient" + }, + "fullName": "google.cloud.deploy_v1.CloudDeployClient.get_deploy_policy", + "method": { + "fullName": "google.cloud.deploy.v1.CloudDeploy.GetDeployPolicy", + "service": { + "fullName": "google.cloud.deploy.v1.CloudDeploy", + "shortName": "CloudDeploy" + }, + "shortName": "GetDeployPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.deploy_v1.types.GetDeployPolicyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.deploy_v1.types.DeployPolicy", + "shortName": "get_deploy_policy" + }, + "description": "Sample for GetDeployPolicy", + "file": "clouddeploy_v1_generated_cloud_deploy_get_deploy_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_GetDeployPolicy_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "clouddeploy_v1_generated_cloud_deploy_get_deploy_policy_sync.py" + }, { "canonical": true, "clientMethod": { @@ -4278,11 +4777,172 @@ "resultType": "google.cloud.deploy_v1.services.cloud_deploy.pagers.ListAutomationRunsPager", "shortName": "list_automation_runs" }, - "description": "Sample for ListAutomationRuns", - "file": "clouddeploy_v1_generated_cloud_deploy_list_automation_runs_sync.py", + "description": "Sample for ListAutomationRuns", + "file": "clouddeploy_v1_generated_cloud_deploy_list_automation_runs_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_ListAutomationRuns_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "clouddeploy_v1_generated_cloud_deploy_list_automation_runs_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient", + "shortName": "CloudDeployAsyncClient" + }, + "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient.list_automations", + "method": { + "fullName": "google.cloud.deploy.v1.CloudDeploy.ListAutomations", + "service": { + "fullName": "google.cloud.deploy.v1.CloudDeploy", + "shortName": "CloudDeploy" + }, + "shortName": "ListAutomations" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.deploy_v1.types.ListAutomationsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.deploy_v1.services.cloud_deploy.pagers.ListAutomationsAsyncPager", + "shortName": "list_automations" + }, + "description": "Sample for ListAutomations", + "file": "clouddeploy_v1_generated_cloud_deploy_list_automations_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_ListAutomations_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "clouddeploy_v1_generated_cloud_deploy_list_automations_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.deploy_v1.CloudDeployClient", + "shortName": "CloudDeployClient" + }, + "fullName": "google.cloud.deploy_v1.CloudDeployClient.list_automations", + "method": { + "fullName": "google.cloud.deploy.v1.CloudDeploy.ListAutomations", + "service": { + "fullName": "google.cloud.deploy.v1.CloudDeploy", + "shortName": "CloudDeploy" + }, + "shortName": "ListAutomations" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.deploy_v1.types.ListAutomationsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.deploy_v1.services.cloud_deploy.pagers.ListAutomationsPager", + "shortName": "list_automations" + }, + "description": "Sample for ListAutomations", + "file": "clouddeploy_v1_generated_cloud_deploy_list_automations_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "clouddeploy_v1_generated_CloudDeploy_ListAutomationRuns_sync", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_ListAutomations_sync", "segments": [ { "end": 52, @@ -4315,7 +4975,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "clouddeploy_v1_generated_cloud_deploy_list_automation_runs_sync.py" + "title": "clouddeploy_v1_generated_cloud_deploy_list_automations_sync.py" }, { "canonical": true, @@ -4325,19 +4985,19 @@ "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient", "shortName": "CloudDeployAsyncClient" }, - "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient.list_automations", + "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient.list_custom_target_types", "method": { - "fullName": "google.cloud.deploy.v1.CloudDeploy.ListAutomations", + "fullName": "google.cloud.deploy.v1.CloudDeploy.ListCustomTargetTypes", "service": { "fullName": "google.cloud.deploy.v1.CloudDeploy", "shortName": "CloudDeploy" }, - "shortName": "ListAutomations" + "shortName": "ListCustomTargetTypes" }, "parameters": [ { "name": "request", - "type": "google.cloud.deploy_v1.types.ListAutomationsRequest" + "type": "google.cloud.deploy_v1.types.ListCustomTargetTypesRequest" }, { "name": "parent", @@ -4356,14 +5016,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.deploy_v1.services.cloud_deploy.pagers.ListAutomationsAsyncPager", - "shortName": "list_automations" + "resultType": "google.cloud.deploy_v1.services.cloud_deploy.pagers.ListCustomTargetTypesAsyncPager", + "shortName": "list_custom_target_types" }, - "description": "Sample for ListAutomations", - "file": "clouddeploy_v1_generated_cloud_deploy_list_automations_async.py", + "description": "Sample for ListCustomTargetTypes", + "file": "clouddeploy_v1_generated_cloud_deploy_list_custom_target_types_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "clouddeploy_v1_generated_CloudDeploy_ListAutomations_async", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_ListCustomTargetTypes_async", "segments": [ { "end": 52, @@ -4396,7 +5056,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "clouddeploy_v1_generated_cloud_deploy_list_automations_async.py" + "title": "clouddeploy_v1_generated_cloud_deploy_list_custom_target_types_async.py" }, { "canonical": true, @@ -4405,19 +5065,19 @@ "fullName": "google.cloud.deploy_v1.CloudDeployClient", "shortName": "CloudDeployClient" }, - "fullName": "google.cloud.deploy_v1.CloudDeployClient.list_automations", + "fullName": "google.cloud.deploy_v1.CloudDeployClient.list_custom_target_types", "method": { - "fullName": "google.cloud.deploy.v1.CloudDeploy.ListAutomations", + "fullName": "google.cloud.deploy.v1.CloudDeploy.ListCustomTargetTypes", "service": { "fullName": "google.cloud.deploy.v1.CloudDeploy", "shortName": "CloudDeploy" }, - "shortName": "ListAutomations" + "shortName": "ListCustomTargetTypes" }, "parameters": [ { "name": "request", - "type": "google.cloud.deploy_v1.types.ListAutomationsRequest" + "type": "google.cloud.deploy_v1.types.ListCustomTargetTypesRequest" }, { "name": "parent", @@ -4436,14 +5096,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.deploy_v1.services.cloud_deploy.pagers.ListAutomationsPager", - "shortName": "list_automations" + "resultType": "google.cloud.deploy_v1.services.cloud_deploy.pagers.ListCustomTargetTypesPager", + "shortName": "list_custom_target_types" }, - "description": "Sample for ListAutomations", - "file": "clouddeploy_v1_generated_cloud_deploy_list_automations_sync.py", + "description": "Sample for ListCustomTargetTypes", + "file": "clouddeploy_v1_generated_cloud_deploy_list_custom_target_types_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "clouddeploy_v1_generated_CloudDeploy_ListAutomations_sync", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_ListCustomTargetTypes_sync", "segments": [ { "end": 52, @@ -4476,7 +5136,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "clouddeploy_v1_generated_cloud_deploy_list_automations_sync.py" + "title": "clouddeploy_v1_generated_cloud_deploy_list_custom_target_types_sync.py" }, { "canonical": true, @@ -4486,19 +5146,19 @@ "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient", "shortName": "CloudDeployAsyncClient" }, - "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient.list_custom_target_types", + "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient.list_delivery_pipelines", "method": { - "fullName": "google.cloud.deploy.v1.CloudDeploy.ListCustomTargetTypes", + "fullName": "google.cloud.deploy.v1.CloudDeploy.ListDeliveryPipelines", "service": { "fullName": "google.cloud.deploy.v1.CloudDeploy", "shortName": "CloudDeploy" }, - "shortName": "ListCustomTargetTypes" + "shortName": "ListDeliveryPipelines" }, "parameters": [ { "name": "request", - "type": "google.cloud.deploy_v1.types.ListCustomTargetTypesRequest" + "type": "google.cloud.deploy_v1.types.ListDeliveryPipelinesRequest" }, { "name": "parent", @@ -4517,14 +5177,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.deploy_v1.services.cloud_deploy.pagers.ListCustomTargetTypesAsyncPager", - "shortName": "list_custom_target_types" + "resultType": "google.cloud.deploy_v1.services.cloud_deploy.pagers.ListDeliveryPipelinesAsyncPager", + "shortName": "list_delivery_pipelines" }, - "description": "Sample for ListCustomTargetTypes", - "file": "clouddeploy_v1_generated_cloud_deploy_list_custom_target_types_async.py", + "description": "Sample for ListDeliveryPipelines", + "file": "clouddeploy_v1_generated_cloud_deploy_list_delivery_pipelines_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "clouddeploy_v1_generated_CloudDeploy_ListCustomTargetTypes_async", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_ListDeliveryPipelines_async", "segments": [ { "end": 52, @@ -4557,7 +5217,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "clouddeploy_v1_generated_cloud_deploy_list_custom_target_types_async.py" + "title": "clouddeploy_v1_generated_cloud_deploy_list_delivery_pipelines_async.py" }, { "canonical": true, @@ -4566,19 +5226,19 @@ "fullName": "google.cloud.deploy_v1.CloudDeployClient", "shortName": "CloudDeployClient" }, - "fullName": "google.cloud.deploy_v1.CloudDeployClient.list_custom_target_types", + "fullName": "google.cloud.deploy_v1.CloudDeployClient.list_delivery_pipelines", "method": { - "fullName": "google.cloud.deploy.v1.CloudDeploy.ListCustomTargetTypes", + "fullName": "google.cloud.deploy.v1.CloudDeploy.ListDeliveryPipelines", "service": { "fullName": "google.cloud.deploy.v1.CloudDeploy", "shortName": "CloudDeploy" }, - "shortName": "ListCustomTargetTypes" + "shortName": "ListDeliveryPipelines" }, "parameters": [ { "name": "request", - "type": "google.cloud.deploy_v1.types.ListCustomTargetTypesRequest" + "type": "google.cloud.deploy_v1.types.ListDeliveryPipelinesRequest" }, { "name": "parent", @@ -4597,14 +5257,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.deploy_v1.services.cloud_deploy.pagers.ListCustomTargetTypesPager", - "shortName": "list_custom_target_types" + "resultType": "google.cloud.deploy_v1.services.cloud_deploy.pagers.ListDeliveryPipelinesPager", + "shortName": "list_delivery_pipelines" }, - "description": "Sample for ListCustomTargetTypes", - "file": "clouddeploy_v1_generated_cloud_deploy_list_custom_target_types_sync.py", + "description": "Sample for ListDeliveryPipelines", + "file": "clouddeploy_v1_generated_cloud_deploy_list_delivery_pipelines_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "clouddeploy_v1_generated_CloudDeploy_ListCustomTargetTypes_sync", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_ListDeliveryPipelines_sync", "segments": [ { "end": 52, @@ -4637,7 +5297,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "clouddeploy_v1_generated_cloud_deploy_list_custom_target_types_sync.py" + "title": "clouddeploy_v1_generated_cloud_deploy_list_delivery_pipelines_sync.py" }, { "canonical": true, @@ -4647,19 +5307,19 @@ "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient", "shortName": "CloudDeployAsyncClient" }, - "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient.list_delivery_pipelines", + "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient.list_deploy_policies", "method": { - "fullName": "google.cloud.deploy.v1.CloudDeploy.ListDeliveryPipelines", + "fullName": "google.cloud.deploy.v1.CloudDeploy.ListDeployPolicies", "service": { "fullName": "google.cloud.deploy.v1.CloudDeploy", "shortName": "CloudDeploy" }, - "shortName": "ListDeliveryPipelines" + "shortName": "ListDeployPolicies" }, "parameters": [ { "name": "request", - "type": "google.cloud.deploy_v1.types.ListDeliveryPipelinesRequest" + "type": "google.cloud.deploy_v1.types.ListDeployPoliciesRequest" }, { "name": "parent", @@ -4678,14 +5338,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.deploy_v1.services.cloud_deploy.pagers.ListDeliveryPipelinesAsyncPager", - "shortName": "list_delivery_pipelines" + "resultType": "google.cloud.deploy_v1.services.cloud_deploy.pagers.ListDeployPoliciesAsyncPager", + "shortName": "list_deploy_policies" }, - "description": "Sample for ListDeliveryPipelines", - "file": "clouddeploy_v1_generated_cloud_deploy_list_delivery_pipelines_async.py", + "description": "Sample for ListDeployPolicies", + "file": "clouddeploy_v1_generated_cloud_deploy_list_deploy_policies_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "clouddeploy_v1_generated_CloudDeploy_ListDeliveryPipelines_async", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_ListDeployPolicies_async", "segments": [ { "end": 52, @@ -4718,7 +5378,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "clouddeploy_v1_generated_cloud_deploy_list_delivery_pipelines_async.py" + "title": "clouddeploy_v1_generated_cloud_deploy_list_deploy_policies_async.py" }, { "canonical": true, @@ -4727,19 +5387,19 @@ "fullName": "google.cloud.deploy_v1.CloudDeployClient", "shortName": "CloudDeployClient" }, - "fullName": "google.cloud.deploy_v1.CloudDeployClient.list_delivery_pipelines", + "fullName": "google.cloud.deploy_v1.CloudDeployClient.list_deploy_policies", "method": { - "fullName": "google.cloud.deploy.v1.CloudDeploy.ListDeliveryPipelines", + "fullName": "google.cloud.deploy.v1.CloudDeploy.ListDeployPolicies", "service": { "fullName": "google.cloud.deploy.v1.CloudDeploy", "shortName": "CloudDeploy" }, - "shortName": "ListDeliveryPipelines" + "shortName": "ListDeployPolicies" }, "parameters": [ { "name": "request", - "type": "google.cloud.deploy_v1.types.ListDeliveryPipelinesRequest" + "type": "google.cloud.deploy_v1.types.ListDeployPoliciesRequest" }, { "name": "parent", @@ -4758,14 +5418,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.deploy_v1.services.cloud_deploy.pagers.ListDeliveryPipelinesPager", - "shortName": "list_delivery_pipelines" + "resultType": "google.cloud.deploy_v1.services.cloud_deploy.pagers.ListDeployPoliciesPager", + "shortName": "list_deploy_policies" }, - "description": "Sample for ListDeliveryPipelines", - "file": "clouddeploy_v1_generated_cloud_deploy_list_delivery_pipelines_sync.py", + "description": "Sample for ListDeployPolicies", + "file": "clouddeploy_v1_generated_cloud_deploy_list_deploy_policies_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "clouddeploy_v1_generated_CloudDeploy_ListDeliveryPipelines_sync", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_ListDeployPolicies_sync", "segments": [ { "end": 52, @@ -4798,7 +5458,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "clouddeploy_v1_generated_cloud_deploy_list_delivery_pipelines_sync.py" + "title": "clouddeploy_v1_generated_cloud_deploy_list_deploy_policies_sync.py" }, { "canonical": true, @@ -6466,6 +7126,175 @@ ], "title": "clouddeploy_v1_generated_cloud_deploy_update_delivery_pipeline_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient", + "shortName": "CloudDeployAsyncClient" + }, + "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient.update_deploy_policy", + "method": { + "fullName": "google.cloud.deploy.v1.CloudDeploy.UpdateDeployPolicy", + "service": { + "fullName": "google.cloud.deploy.v1.CloudDeploy", + "shortName": "CloudDeploy" + }, + "shortName": "UpdateDeployPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.deploy_v1.types.UpdateDeployPolicyRequest" + }, + { + "name": "deploy_policy", + "type": "google.cloud.deploy_v1.types.DeployPolicy" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_deploy_policy" + }, + "description": "Sample for UpdateDeployPolicy", + "file": "clouddeploy_v1_generated_cloud_deploy_update_deploy_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_UpdateDeployPolicy_async", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "clouddeploy_v1_generated_cloud_deploy_update_deploy_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.deploy_v1.CloudDeployClient", + "shortName": "CloudDeployClient" + }, + "fullName": "google.cloud.deploy_v1.CloudDeployClient.update_deploy_policy", + "method": { + "fullName": "google.cloud.deploy.v1.CloudDeploy.UpdateDeployPolicy", + "service": { + "fullName": "google.cloud.deploy.v1.CloudDeploy", + "shortName": "CloudDeploy" + }, + "shortName": "UpdateDeployPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.deploy_v1.types.UpdateDeployPolicyRequest" + }, + { + "name": "deploy_policy", + "type": "google.cloud.deploy_v1.types.DeployPolicy" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_deploy_policy" + }, + "description": "Sample for UpdateDeployPolicy", + "file": "clouddeploy_v1_generated_cloud_deploy_update_deploy_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_UpdateDeployPolicy_sync", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "clouddeploy_v1_generated_cloud_deploy_update_deploy_policy_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-deploy/scripts/fixup_deploy_v1_keywords.py b/packages/google-cloud-deploy/scripts/fixup_deploy_v1_keywords.py index 1a652b4ea5be..10255de70644 100644 --- a/packages/google-cloud-deploy/scripts/fixup_deploy_v1_keywords.py +++ b/packages/google-cloud-deploy/scripts/fixup_deploy_v1_keywords.py @@ -40,44 +40,49 @@ class deployCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { 'abandon_release': ('name', ), - 'advance_rollout': ('name', 'phase_id', ), - 'approve_rollout': ('name', 'approved', ), + 'advance_rollout': ('name', 'phase_id', 'override_deploy_policy', ), + 'approve_rollout': ('name', 'approved', 'override_deploy_policy', ), 'cancel_automation_run': ('name', ), - 'cancel_rollout': ('name', ), + 'cancel_rollout': ('name', 'override_deploy_policy', ), 'create_automation': ('parent', 'automation_id', 'automation', 'request_id', 'validate_only', ), 'create_custom_target_type': ('parent', 'custom_target_type_id', 'custom_target_type', 'request_id', 'validate_only', ), 'create_delivery_pipeline': ('parent', 'delivery_pipeline_id', 'delivery_pipeline', 'request_id', 'validate_only', ), - 'create_release': ('parent', 'release_id', 'release', 'request_id', 'validate_only', ), - 'create_rollout': ('parent', 'rollout_id', 'rollout', 'request_id', 'validate_only', 'starting_phase_id', ), + 'create_deploy_policy': ('parent', 'deploy_policy_id', 'deploy_policy', 'request_id', 'validate_only', ), + 'create_release': ('parent', 'release_id', 'release', 'request_id', 'validate_only', 'override_deploy_policy', ), + 'create_rollout': ('parent', 'rollout_id', 'rollout', 'request_id', 'validate_only', 'override_deploy_policy', 'starting_phase_id', ), 'create_target': ('parent', 'target_id', 'target', 'request_id', 'validate_only', ), 'delete_automation': ('name', 'request_id', 'allow_missing', 'validate_only', 'etag', ), 'delete_custom_target_type': ('name', 'request_id', 'allow_missing', 'validate_only', 'etag', ), 'delete_delivery_pipeline': ('name', 'request_id', 'allow_missing', 'validate_only', 'force', 'etag', ), + 'delete_deploy_policy': ('name', 'request_id', 'allow_missing', 'validate_only', 'etag', ), 'delete_target': ('name', 'request_id', 'allow_missing', 'validate_only', 'etag', ), 'get_automation': ('name', ), 'get_automation_run': ('name', ), 'get_config': ('name', ), 'get_custom_target_type': ('name', ), 'get_delivery_pipeline': ('name', ), + 'get_deploy_policy': ('name', ), 'get_job_run': ('name', ), 'get_release': ('name', ), 'get_rollout': ('name', ), 'get_target': ('name', ), - 'ignore_job': ('rollout', 'phase_id', 'job_id', ), + 'ignore_job': ('rollout', 'phase_id', 'job_id', 'override_deploy_policy', ), 'list_automation_runs': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_automations': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_custom_target_types': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_delivery_pipelines': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_deploy_policies': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_job_runs': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_releases': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_rollouts': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_targets': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), - 'retry_job': ('rollout', 'phase_id', 'job_id', ), - 'rollback_target': ('name', 'target_id', 'rollout_id', 'release_id', 'rollout_to_roll_back', 'rollback_config', 'validate_only', ), - 'terminate_job_run': ('name', ), + 'retry_job': ('rollout', 'phase_id', 'job_id', 'override_deploy_policy', ), + 'rollback_target': ('name', 'target_id', 'rollout_id', 'release_id', 'rollout_to_roll_back', 'rollback_config', 'validate_only', 'override_deploy_policy', ), + 'terminate_job_run': ('name', 'override_deploy_policy', ), 'update_automation': ('update_mask', 'automation', 'request_id', 'allow_missing', 'validate_only', ), 'update_custom_target_type': ('update_mask', 'custom_target_type', 'request_id', 'allow_missing', 'validate_only', ), 'update_delivery_pipeline': ('update_mask', 'delivery_pipeline', 'request_id', 'allow_missing', 'validate_only', ), + 'update_deploy_policy': ('update_mask', 'deploy_policy', 'request_id', 'allow_missing', 'validate_only', ), 'update_target': ('update_mask', 'target', 'request_id', 'allow_missing', 'validate_only', ), } diff --git a/packages/google-cloud-deploy/tests/unit/gapic/deploy_v1/test_cloud_deploy.py b/packages/google-cloud-deploy/tests/unit/gapic/deploy_v1/test_cloud_deploy.py index ff22c1a2c000..b6c62348fc82 100644 --- a/packages/google-cloud-deploy/tests/unit/gapic/deploy_v1/test_cloud_deploy.py +++ b/packages/google-cloud-deploy/tests/unit/gapic/deploy_v1/test_cloud_deploy.py @@ -53,6 +53,9 @@ from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import json_format from google.protobuf import timestamp_pb2 # type: ignore +from google.type import date_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +from google.type import timeofday_pb2 # type: ignore import grpc from grpc.experimental import aio from proto.marshal.rules import wrappers @@ -9748,11 +9751,11 @@ async def test_abandon_release_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.ApproveRolloutRequest, + cloud_deploy.CreateDeployPolicyRequest, dict, ], ) -def test_approve_rollout(request_type, transport: str = "grpc"): +def test_create_deploy_policy(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9763,22 +9766,24 @@ def test_approve_rollout(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.create_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.ApproveRolloutResponse() - response = client.approve_rollout(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.ApproveRolloutRequest() + request = cloud_deploy.CreateDeployPolicyRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.ApproveRolloutResponse) + assert isinstance(response, future.Future) -def test_approve_rollout_empty_call(): +def test_create_deploy_policy_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -9787,17 +9792,19 @@ def test_approve_rollout_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.create_deploy_policy), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.approve_rollout() + client.create_deploy_policy() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.ApproveRolloutRequest() + assert args[0] == cloud_deploy.CreateDeployPolicyRequest() -def test_approve_rollout_non_empty_request_with_auto_populated_field(): +def test_create_deploy_policy_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -9808,24 +9815,30 @@ def test_approve_rollout_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.ApproveRolloutRequest( - name="name_value", + request = cloud_deploy.CreateDeployPolicyRequest( + parent="parent_value", + deploy_policy_id="deploy_policy_id_value", + request_id="request_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.create_deploy_policy), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.approve_rollout(request=request) + client.create_deploy_policy(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.ApproveRolloutRequest( - name="name_value", + assert args[0] == cloud_deploy.CreateDeployPolicyRequest( + parent="parent_value", + deploy_policy_id="deploy_policy_id_value", + request_id="request_id_value", ) -def test_approve_rollout_use_cached_wrapped_rpc(): +def test_create_deploy_policy_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -9839,21 +9852,30 @@ def test_approve_rollout_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.approve_rollout in client._transport._wrapped_methods + assert ( + client._transport.create_deploy_policy in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.approve_rollout] = mock_rpc + client._transport._wrapped_methods[ + client._transport.create_deploy_policy + ] = mock_rpc request = {} - client.approve_rollout(request) + client.create_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.approve_rollout(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_deploy_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -9861,7 +9883,7 @@ def test_approve_rollout_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_approve_rollout_empty_call_async(): +async def test_create_deploy_policy_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -9870,19 +9892,21 @@ async def test_approve_rollout_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.create_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ApproveRolloutResponse() + operations_pb2.Operation(name="operations/spam") ) - response = await client.approve_rollout() + response = await client.create_deploy_policy() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.ApproveRolloutRequest() + assert args[0] == cloud_deploy.CreateDeployPolicyRequest() @pytest.mark.asyncio -async def test_approve_rollout_async_use_cached_wrapped_rpc( +async def test_create_deploy_policy_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -9899,7 +9923,7 @@ async def test_approve_rollout_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.approve_rollout + client._client._transport.create_deploy_policy in client._client._transport._wrapped_methods ) @@ -9907,16 +9931,21 @@ async def test_approve_rollout_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.approve_rollout + client._client._transport.create_deploy_policy ] = mock_rpc request = {} - await client.approve_rollout(request) + await client.create_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.approve_rollout(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_deploy_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -9924,8 +9953,8 @@ async def test_approve_rollout_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_approve_rollout_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.ApproveRolloutRequest +async def test_create_deploy_policy_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.CreateDeployPolicyRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9937,43 +9966,47 @@ async def test_approve_rollout_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.create_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ApproveRolloutResponse() + operations_pb2.Operation(name="operations/spam") ) - response = await client.approve_rollout(request) + response = await client.create_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.ApproveRolloutRequest() + request = cloud_deploy.CreateDeployPolicyRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.ApproveRolloutResponse) + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_approve_rollout_async_from_dict(): - await test_approve_rollout_async(request_type=dict) +async def test_create_deploy_policy_async_from_dict(): + await test_create_deploy_policy_async(request_type=dict) -def test_approve_rollout_field_headers(): +def test_create_deploy_policy_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.ApproveRolloutRequest() + request = cloud_deploy.CreateDeployPolicyRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: - call.return_value = cloud_deploy.ApproveRolloutResponse() - client.approve_rollout(request) + with mock.patch.object( + type(client.transport.create_deploy_policy), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -9984,28 +10017,30 @@ def test_approve_rollout_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_approve_rollout_field_headers_async(): +async def test_create_deploy_policy_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.ApproveRolloutRequest() + request = cloud_deploy.CreateDeployPolicyRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.create_deploy_policy), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ApproveRolloutResponse() + operations_pb2.Operation(name="operations/op") ) - await client.approve_rollout(request) + await client.create_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -10016,35 +10051,45 @@ async def test_approve_rollout_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_approve_rollout_flattened(): +def test_create_deploy_policy_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.create_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.ApproveRolloutResponse() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.approve_rollout( - name="name_value", + client.create_deploy_policy( + parent="parent_value", + deploy_policy=cloud_deploy.DeployPolicy(name="name_value"), + deploy_policy_id="deploy_policy_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].deploy_policy + mock_val = cloud_deploy.DeployPolicy(name="name_value") + assert arg == mock_val + arg = args[0].deploy_policy_id + mock_val = "deploy_policy_id_value" assert arg == mock_val -def test_approve_rollout_flattened_error(): +def test_create_deploy_policy_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -10052,43 +10097,55 @@ def test_approve_rollout_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.approve_rollout( - cloud_deploy.ApproveRolloutRequest(), - name="name_value", + client.create_deploy_policy( + cloud_deploy.CreateDeployPolicyRequest(), + parent="parent_value", + deploy_policy=cloud_deploy.DeployPolicy(name="name_value"), + deploy_policy_id="deploy_policy_id_value", ) @pytest.mark.asyncio -async def test_approve_rollout_flattened_async(): +async def test_create_deploy_policy_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.create_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.ApproveRolloutResponse() + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ApproveRolloutResponse() + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.approve_rollout( - name="name_value", + response = await client.create_deploy_policy( + parent="parent_value", + deploy_policy=cloud_deploy.DeployPolicy(name="name_value"), + deploy_policy_id="deploy_policy_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].deploy_policy + mock_val = cloud_deploy.DeployPolicy(name="name_value") + assert arg == mock_val + arg = args[0].deploy_policy_id + mock_val = "deploy_policy_id_value" assert arg == mock_val @pytest.mark.asyncio -async def test_approve_rollout_flattened_error_async(): +async def test_create_deploy_policy_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -10096,20 +10153,22 @@ async def test_approve_rollout_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.approve_rollout( - cloud_deploy.ApproveRolloutRequest(), - name="name_value", + await client.create_deploy_policy( + cloud_deploy.CreateDeployPolicyRequest(), + parent="parent_value", + deploy_policy=cloud_deploy.DeployPolicy(name="name_value"), + deploy_policy_id="deploy_policy_id_value", ) @pytest.mark.parametrize( "request_type", [ - cloud_deploy.AdvanceRolloutRequest, + cloud_deploy.UpdateDeployPolicyRequest, dict, ], ) -def test_advance_rollout(request_type, transport: str = "grpc"): +def test_update_deploy_policy(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -10120,22 +10179,24 @@ def test_advance_rollout(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.update_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.AdvanceRolloutResponse() - response = client.advance_rollout(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.AdvanceRolloutRequest() + request = cloud_deploy.UpdateDeployPolicyRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.AdvanceRolloutResponse) + assert isinstance(response, future.Future) -def test_advance_rollout_empty_call(): +def test_update_deploy_policy_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -10144,17 +10205,19 @@ def test_advance_rollout_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.update_deploy_policy), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.advance_rollout() + client.update_deploy_policy() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.AdvanceRolloutRequest() + assert args[0] == cloud_deploy.UpdateDeployPolicyRequest() -def test_advance_rollout_non_empty_request_with_auto_populated_field(): +def test_update_deploy_policy_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -10165,26 +10228,26 @@ def test_advance_rollout_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.AdvanceRolloutRequest( - name="name_value", - phase_id="phase_id_value", + request = cloud_deploy.UpdateDeployPolicyRequest( + request_id="request_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.update_deploy_policy), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.advance_rollout(request=request) + client.update_deploy_policy(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.AdvanceRolloutRequest( - name="name_value", - phase_id="phase_id_value", + assert args[0] == cloud_deploy.UpdateDeployPolicyRequest( + request_id="request_id_value", ) -def test_advance_rollout_use_cached_wrapped_rpc(): +def test_update_deploy_policy_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -10198,21 +10261,30 @@ def test_advance_rollout_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.advance_rollout in client._transport._wrapped_methods + assert ( + client._transport.update_deploy_policy in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.advance_rollout] = mock_rpc + client._transport._wrapped_methods[ + client._transport.update_deploy_policy + ] = mock_rpc request = {} - client.advance_rollout(request) + client.update_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.advance_rollout(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_deploy_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -10220,7 +10292,7 @@ def test_advance_rollout_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_advance_rollout_empty_call_async(): +async def test_update_deploy_policy_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -10229,19 +10301,21 @@ async def test_advance_rollout_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.update_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.AdvanceRolloutResponse() + operations_pb2.Operation(name="operations/spam") ) - response = await client.advance_rollout() + response = await client.update_deploy_policy() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.AdvanceRolloutRequest() + assert args[0] == cloud_deploy.UpdateDeployPolicyRequest() @pytest.mark.asyncio -async def test_advance_rollout_async_use_cached_wrapped_rpc( +async def test_update_deploy_policy_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -10258,7 +10332,7 @@ async def test_advance_rollout_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.advance_rollout + client._client._transport.update_deploy_policy in client._client._transport._wrapped_methods ) @@ -10266,16 +10340,21 @@ async def test_advance_rollout_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.advance_rollout + client._client._transport.update_deploy_policy ] = mock_rpc request = {} - await client.advance_rollout(request) + await client.update_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.advance_rollout(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_deploy_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -10283,8 +10362,8 @@ async def test_advance_rollout_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_advance_rollout_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.AdvanceRolloutRequest +async def test_update_deploy_policy_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.UpdateDeployPolicyRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -10296,43 +10375,47 @@ async def test_advance_rollout_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.update_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.AdvanceRolloutResponse() + operations_pb2.Operation(name="operations/spam") ) - response = await client.advance_rollout(request) + response = await client.update_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.AdvanceRolloutRequest() + request = cloud_deploy.UpdateDeployPolicyRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.AdvanceRolloutResponse) + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_advance_rollout_async_from_dict(): - await test_advance_rollout_async(request_type=dict) +async def test_update_deploy_policy_async_from_dict(): + await test_update_deploy_policy_async(request_type=dict) -def test_advance_rollout_field_headers(): +def test_update_deploy_policy_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.AdvanceRolloutRequest() + request = cloud_deploy.UpdateDeployPolicyRequest() - request.name = "name_value" + request.deploy_policy.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: - call.return_value = cloud_deploy.AdvanceRolloutResponse() - client.advance_rollout(request) + with mock.patch.object( + type(client.transport.update_deploy_policy), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -10343,28 +10426,30 @@ def test_advance_rollout_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "deploy_policy.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_advance_rollout_field_headers_async(): +async def test_update_deploy_policy_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.AdvanceRolloutRequest() + request = cloud_deploy.UpdateDeployPolicyRequest() - request.name = "name_value" + request.deploy_policy.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.update_deploy_policy), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.AdvanceRolloutResponse() + operations_pb2.Operation(name="operations/op") ) - await client.advance_rollout(request) + await client.update_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -10375,39 +10460,41 @@ async def test_advance_rollout_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "deploy_policy.name=name_value", ) in kw["metadata"] -def test_advance_rollout_flattened(): +def test_update_deploy_policy_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.update_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.AdvanceRolloutResponse() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.advance_rollout( - name="name_value", - phase_id="phase_id_value", + client.update_deploy_policy( + deploy_policy=cloud_deploy.DeployPolicy(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].deploy_policy + mock_val = cloud_deploy.DeployPolicy(name="name_value") assert arg == mock_val - arg = args[0].phase_id - mock_val = "phase_id_value" + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val -def test_advance_rollout_flattened_error(): +def test_update_deploy_policy_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -10415,48 +10502,50 @@ def test_advance_rollout_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.advance_rollout( - cloud_deploy.AdvanceRolloutRequest(), - name="name_value", - phase_id="phase_id_value", + client.update_deploy_policy( + cloud_deploy.UpdateDeployPolicyRequest(), + deploy_policy=cloud_deploy.DeployPolicy(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio -async def test_advance_rollout_flattened_async(): +async def test_update_deploy_policy_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.update_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.AdvanceRolloutResponse() + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.AdvanceRolloutResponse() + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.advance_rollout( - name="name_value", - phase_id="phase_id_value", + response = await client.update_deploy_policy( + deploy_policy=cloud_deploy.DeployPolicy(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].deploy_policy + mock_val = cloud_deploy.DeployPolicy(name="name_value") assert arg == mock_val - arg = args[0].phase_id - mock_val = "phase_id_value" + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @pytest.mark.asyncio -async def test_advance_rollout_flattened_error_async(): +async def test_update_deploy_policy_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -10464,21 +10553,21 @@ async def test_advance_rollout_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.advance_rollout( - cloud_deploy.AdvanceRolloutRequest(), - name="name_value", - phase_id="phase_id_value", + await client.update_deploy_policy( + cloud_deploy.UpdateDeployPolicyRequest(), + deploy_policy=cloud_deploy.DeployPolicy(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.parametrize( "request_type", [ - cloud_deploy.CancelRolloutRequest, + cloud_deploy.DeleteDeployPolicyRequest, dict, ], ) -def test_cancel_rollout(request_type, transport: str = "grpc"): +def test_delete_deploy_policy(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -10489,22 +10578,24 @@ def test_cancel_rollout(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.delete_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.CancelRolloutResponse() - response = client.cancel_rollout(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.CancelRolloutRequest() + request = cloud_deploy.DeleteDeployPolicyRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.CancelRolloutResponse) + assert isinstance(response, future.Future) -def test_cancel_rollout_empty_call(): +def test_delete_deploy_policy_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -10513,17 +10604,19 @@ def test_cancel_rollout_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.delete_deploy_policy), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.cancel_rollout() + client.delete_deploy_policy() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.CancelRolloutRequest() + assert args[0] == cloud_deploy.DeleteDeployPolicyRequest() -def test_cancel_rollout_non_empty_request_with_auto_populated_field(): +def test_delete_deploy_policy_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -10534,24 +10627,30 @@ def test_cancel_rollout_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.CancelRolloutRequest( + request = cloud_deploy.DeleteDeployPolicyRequest( name="name_value", + request_id="request_id_value", + etag="etag_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.delete_deploy_policy), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.cancel_rollout(request=request) + client.delete_deploy_policy(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.CancelRolloutRequest( + assert args[0] == cloud_deploy.DeleteDeployPolicyRequest( name="name_value", + request_id="request_id_value", + etag="etag_value", ) -def test_cancel_rollout_use_cached_wrapped_rpc(): +def test_delete_deploy_policy_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -10565,21 +10664,30 @@ def test_cancel_rollout_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.cancel_rollout in client._transport._wrapped_methods + assert ( + client._transport.delete_deploy_policy in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.cancel_rollout] = mock_rpc + client._transport._wrapped_methods[ + client._transport.delete_deploy_policy + ] = mock_rpc request = {} - client.cancel_rollout(request) + client.delete_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.cancel_rollout(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_deploy_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -10587,7 +10695,7 @@ def test_cancel_rollout_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_cancel_rollout_empty_call_async(): +async def test_delete_deploy_policy_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -10596,19 +10704,21 @@ async def test_cancel_rollout_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.delete_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.CancelRolloutResponse() + operations_pb2.Operation(name="operations/spam") ) - response = await client.cancel_rollout() + response = await client.delete_deploy_policy() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.CancelRolloutRequest() + assert args[0] == cloud_deploy.DeleteDeployPolicyRequest() @pytest.mark.asyncio -async def test_cancel_rollout_async_use_cached_wrapped_rpc( +async def test_delete_deploy_policy_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -10625,7 +10735,7 @@ async def test_cancel_rollout_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.cancel_rollout + client._client._transport.delete_deploy_policy in client._client._transport._wrapped_methods ) @@ -10633,16 +10743,21 @@ async def test_cancel_rollout_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.cancel_rollout + client._client._transport.delete_deploy_policy ] = mock_rpc request = {} - await client.cancel_rollout(request) + await client.delete_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.cancel_rollout(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_deploy_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -10650,8 +10765,8 @@ async def test_cancel_rollout_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_cancel_rollout_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.CancelRolloutRequest +async def test_delete_deploy_policy_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.DeleteDeployPolicyRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -10663,43 +10778,47 @@ async def test_cancel_rollout_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.delete_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.CancelRolloutResponse() + operations_pb2.Operation(name="operations/spam") ) - response = await client.cancel_rollout(request) + response = await client.delete_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.CancelRolloutRequest() + request = cloud_deploy.DeleteDeployPolicyRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.CancelRolloutResponse) + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_cancel_rollout_async_from_dict(): - await test_cancel_rollout_async(request_type=dict) +async def test_delete_deploy_policy_async_from_dict(): + await test_delete_deploy_policy_async(request_type=dict) -def test_cancel_rollout_field_headers(): +def test_delete_deploy_policy_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.CancelRolloutRequest() + request = cloud_deploy.DeleteDeployPolicyRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: - call.return_value = cloud_deploy.CancelRolloutResponse() - client.cancel_rollout(request) + with mock.patch.object( + type(client.transport.delete_deploy_policy), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -10715,23 +10834,25 @@ def test_cancel_rollout_field_headers(): @pytest.mark.asyncio -async def test_cancel_rollout_field_headers_async(): +async def test_delete_deploy_policy_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.CancelRolloutRequest() + request = cloud_deploy.DeleteDeployPolicyRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.delete_deploy_policy), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.CancelRolloutResponse() + operations_pb2.Operation(name="operations/op") ) - await client.cancel_rollout(request) + await client.delete_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -10746,18 +10867,20 @@ async def test_cancel_rollout_field_headers_async(): ) in kw["metadata"] -def test_cancel_rollout_flattened(): +def test_delete_deploy_policy_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.delete_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.CancelRolloutResponse() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.cancel_rollout( + client.delete_deploy_policy( name="name_value", ) @@ -10770,7 +10893,7 @@ def test_cancel_rollout_flattened(): assert arg == mock_val -def test_cancel_rollout_flattened_error(): +def test_delete_deploy_policy_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -10778,29 +10901,31 @@ def test_cancel_rollout_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.cancel_rollout( - cloud_deploy.CancelRolloutRequest(), + client.delete_deploy_policy( + cloud_deploy.DeleteDeployPolicyRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_cancel_rollout_flattened_async(): +async def test_delete_deploy_policy_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.delete_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.CancelRolloutResponse() + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.CancelRolloutResponse() + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.cancel_rollout( + response = await client.delete_deploy_policy( name="name_value", ) @@ -10814,7 +10939,7 @@ async def test_cancel_rollout_flattened_async(): @pytest.mark.asyncio -async def test_cancel_rollout_flattened_error_async(): +async def test_delete_deploy_policy_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -10822,8 +10947,8 @@ async def test_cancel_rollout_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.cancel_rollout( - cloud_deploy.CancelRolloutRequest(), + await client.delete_deploy_policy( + cloud_deploy.DeleteDeployPolicyRequest(), name="name_value", ) @@ -10831,11 +10956,11 @@ async def test_cancel_rollout_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.ListRolloutsRequest, + cloud_deploy.ListDeployPoliciesRequest, dict, ], ) -def test_list_rollouts(request_type, transport: str = "grpc"): +def test_list_deploy_policies(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -10846,27 +10971,29 @@ def test_list_rollouts(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: + with mock.patch.object( + type(client.transport.list_deploy_policies), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.ListRolloutsResponse( + call.return_value = cloud_deploy.ListDeployPoliciesResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) - response = client.list_rollouts(request) + response = client.list_deploy_policies(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.ListRolloutsRequest() + request = cloud_deploy.ListDeployPoliciesRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListRolloutsPager) + assert isinstance(response, pagers.ListDeployPoliciesPager) assert response.next_page_token == "next_page_token_value" assert response.unreachable == ["unreachable_value"] -def test_list_rollouts_empty_call(): +def test_list_deploy_policies_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -10875,17 +11002,19 @@ def test_list_rollouts_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: + with mock.patch.object( + type(client.transport.list_deploy_policies), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_rollouts() + client.list_deploy_policies() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.ListRolloutsRequest() + assert args[0] == cloud_deploy.ListDeployPoliciesRequest() -def test_list_rollouts_non_empty_request_with_auto_populated_field(): +def test_list_deploy_policies_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -10896,7 +11025,7 @@ def test_list_rollouts_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.ListRolloutsRequest( + request = cloud_deploy.ListDeployPoliciesRequest( parent="parent_value", page_token="page_token_value", filter="filter_value", @@ -10904,14 +11033,16 @@ def test_list_rollouts_non_empty_request_with_auto_populated_field(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: + with mock.patch.object( + type(client.transport.list_deploy_policies), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_rollouts(request=request) + client.list_deploy_policies(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.ListRolloutsRequest( + assert args[0] == cloud_deploy.ListDeployPoliciesRequest( parent="parent_value", page_token="page_token_value", filter="filter_value", @@ -10919,7 +11050,7 @@ def test_list_rollouts_non_empty_request_with_auto_populated_field(): ) -def test_list_rollouts_use_cached_wrapped_rpc(): +def test_list_deploy_policies_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -10933,21 +11064,25 @@ def test_list_rollouts_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_rollouts in client._transport._wrapped_methods + assert ( + client._transport.list_deploy_policies in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_rollouts] = mock_rpc + client._transport._wrapped_methods[ + client._transport.list_deploy_policies + ] = mock_rpc request = {} - client.list_rollouts(request) + client.list_deploy_policies(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_rollouts(request) + client.list_deploy_policies(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -10955,7 +11090,7 @@ def test_list_rollouts_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_rollouts_empty_call_async(): +async def test_list_deploy_policies_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -10964,22 +11099,24 @@ async def test_list_rollouts_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: + with mock.patch.object( + type(client.transport.list_deploy_policies), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ListRolloutsResponse( + cloud_deploy.ListDeployPoliciesResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) ) - response = await client.list_rollouts() + response = await client.list_deploy_policies() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.ListRolloutsRequest() + assert args[0] == cloud_deploy.ListDeployPoliciesRequest() @pytest.mark.asyncio -async def test_list_rollouts_async_use_cached_wrapped_rpc( +async def test_list_deploy_policies_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -10996,7 +11133,7 @@ async def test_list_rollouts_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.list_rollouts + client._client._transport.list_deploy_policies in client._client._transport._wrapped_methods ) @@ -11004,16 +11141,16 @@ async def test_list_rollouts_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_rollouts + client._client._transport.list_deploy_policies ] = mock_rpc request = {} - await client.list_rollouts(request) + await client.list_deploy_policies(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.list_rollouts(request) + await client.list_deploy_policies(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -11021,8 +11158,8 @@ async def test_list_rollouts_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_list_rollouts_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.ListRolloutsRequest +async def test_list_deploy_policies_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.ListDeployPoliciesRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11034,48 +11171,52 @@ async def test_list_rollouts_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: + with mock.patch.object( + type(client.transport.list_deploy_policies), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ListRolloutsResponse( + cloud_deploy.ListDeployPoliciesResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) ) - response = await client.list_rollouts(request) + response = await client.list_deploy_policies(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.ListRolloutsRequest() + request = cloud_deploy.ListDeployPoliciesRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListRolloutsAsyncPager) + assert isinstance(response, pagers.ListDeployPoliciesAsyncPager) assert response.next_page_token == "next_page_token_value" assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio -async def test_list_rollouts_async_from_dict(): - await test_list_rollouts_async(request_type=dict) +async def test_list_deploy_policies_async_from_dict(): + await test_list_deploy_policies_async(request_type=dict) -def test_list_rollouts_field_headers(): +def test_list_deploy_policies_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.ListRolloutsRequest() + request = cloud_deploy.ListDeployPoliciesRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: - call.return_value = cloud_deploy.ListRolloutsResponse() - client.list_rollouts(request) + with mock.patch.object( + type(client.transport.list_deploy_policies), "__call__" + ) as call: + call.return_value = cloud_deploy.ListDeployPoliciesResponse() + client.list_deploy_policies(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -11091,23 +11232,25 @@ def test_list_rollouts_field_headers(): @pytest.mark.asyncio -async def test_list_rollouts_field_headers_async(): +async def test_list_deploy_policies_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.ListRolloutsRequest() + request = cloud_deploy.ListDeployPoliciesRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: + with mock.patch.object( + type(client.transport.list_deploy_policies), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ListRolloutsResponse() + cloud_deploy.ListDeployPoliciesResponse() ) - await client.list_rollouts(request) + await client.list_deploy_policies(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -11122,18 +11265,20 @@ async def test_list_rollouts_field_headers_async(): ) in kw["metadata"] -def test_list_rollouts_flattened(): +def test_list_deploy_policies_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: + with mock.patch.object( + type(client.transport.list_deploy_policies), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.ListRolloutsResponse() + call.return_value = cloud_deploy.ListDeployPoliciesResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_rollouts( + client.list_deploy_policies( parent="parent_value", ) @@ -11146,7 +11291,7 @@ def test_list_rollouts_flattened(): assert arg == mock_val -def test_list_rollouts_flattened_error(): +def test_list_deploy_policies_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -11154,29 +11299,31 @@ def test_list_rollouts_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_rollouts( - cloud_deploy.ListRolloutsRequest(), + client.list_deploy_policies( + cloud_deploy.ListDeployPoliciesRequest(), parent="parent_value", ) @pytest.mark.asyncio -async def test_list_rollouts_flattened_async(): +async def test_list_deploy_policies_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: + with mock.patch.object( + type(client.transport.list_deploy_policies), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.ListRolloutsResponse() + call.return_value = cloud_deploy.ListDeployPoliciesResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ListRolloutsResponse() + cloud_deploy.ListDeployPoliciesResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_rollouts( + response = await client.list_deploy_policies( parent="parent_value", ) @@ -11190,7 +11337,7 @@ async def test_list_rollouts_flattened_async(): @pytest.mark.asyncio -async def test_list_rollouts_flattened_error_async(): +async def test_list_deploy_policies_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -11198,44 +11345,46 @@ async def test_list_rollouts_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_rollouts( - cloud_deploy.ListRolloutsRequest(), + await client.list_deploy_policies( + cloud_deploy.ListDeployPoliciesRequest(), parent="parent_value", ) -def test_list_rollouts_pager(transport_name: str = "grpc"): +def test_list_deploy_policies_pager(transport_name: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: + with mock.patch.object( + type(client.transport.list_deploy_policies), "__call__" + ) as call: # Set the response to a series of pages. call.side_effect = ( - cloud_deploy.ListRolloutsResponse( - rollouts=[ - cloud_deploy.Rollout(), - cloud_deploy.Rollout(), - cloud_deploy.Rollout(), + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[ + cloud_deploy.DeployPolicy(), + cloud_deploy.DeployPolicy(), + cloud_deploy.DeployPolicy(), ], next_page_token="abc", ), - cloud_deploy.ListRolloutsResponse( - rollouts=[], + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[], next_page_token="def", ), - cloud_deploy.ListRolloutsResponse( - rollouts=[ - cloud_deploy.Rollout(), + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[ + cloud_deploy.DeployPolicy(), ], next_page_token="ghi", ), - cloud_deploy.ListRolloutsResponse( - rollouts=[ - cloud_deploy.Rollout(), - cloud_deploy.Rollout(), + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[ + cloud_deploy.DeployPolicy(), + cloud_deploy.DeployPolicy(), ], ), RuntimeError, @@ -11247,7 +11396,7 @@ def test_list_rollouts_pager(transport_name: str = "grpc"): expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) - pager = client.list_rollouts(request={}, retry=retry, timeout=timeout) + pager = client.list_deploy_policies(request={}, retry=retry, timeout=timeout) assert pager._metadata == expected_metadata assert pager._retry == retry @@ -11255,89 +11404,93 @@ def test_list_rollouts_pager(transport_name: str = "grpc"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, cloud_deploy.Rollout) for i in results) + assert all(isinstance(i, cloud_deploy.DeployPolicy) for i in results) -def test_list_rollouts_pages(transport_name: str = "grpc"): +def test_list_deploy_policies_pages(transport_name: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: + with mock.patch.object( + type(client.transport.list_deploy_policies), "__call__" + ) as call: # Set the response to a series of pages. call.side_effect = ( - cloud_deploy.ListRolloutsResponse( - rollouts=[ - cloud_deploy.Rollout(), - cloud_deploy.Rollout(), - cloud_deploy.Rollout(), + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[ + cloud_deploy.DeployPolicy(), + cloud_deploy.DeployPolicy(), + cloud_deploy.DeployPolicy(), ], next_page_token="abc", ), - cloud_deploy.ListRolloutsResponse( - rollouts=[], + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[], next_page_token="def", ), - cloud_deploy.ListRolloutsResponse( - rollouts=[ - cloud_deploy.Rollout(), + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[ + cloud_deploy.DeployPolicy(), ], next_page_token="ghi", ), - cloud_deploy.ListRolloutsResponse( - rollouts=[ - cloud_deploy.Rollout(), - cloud_deploy.Rollout(), + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[ + cloud_deploy.DeployPolicy(), + cloud_deploy.DeployPolicy(), ], ), RuntimeError, ) - pages = list(client.list_rollouts(request={}).pages) + pages = list(client.list_deploy_policies(request={}).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @pytest.mark.asyncio -async def test_list_rollouts_async_pager(): +async def test_list_deploy_policies_async_pager(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_rollouts), "__call__", new_callable=mock.AsyncMock + type(client.transport.list_deploy_policies), + "__call__", + new_callable=mock.AsyncMock, ) as call: # Set the response to a series of pages. call.side_effect = ( - cloud_deploy.ListRolloutsResponse( - rollouts=[ - cloud_deploy.Rollout(), - cloud_deploy.Rollout(), - cloud_deploy.Rollout(), + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[ + cloud_deploy.DeployPolicy(), + cloud_deploy.DeployPolicy(), + cloud_deploy.DeployPolicy(), ], next_page_token="abc", ), - cloud_deploy.ListRolloutsResponse( - rollouts=[], + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[], next_page_token="def", ), - cloud_deploy.ListRolloutsResponse( - rollouts=[ - cloud_deploy.Rollout(), + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[ + cloud_deploy.DeployPolicy(), ], next_page_token="ghi", ), - cloud_deploy.ListRolloutsResponse( - rollouts=[ - cloud_deploy.Rollout(), - cloud_deploy.Rollout(), + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[ + cloud_deploy.DeployPolicy(), + cloud_deploy.DeployPolicy(), ], ), RuntimeError, ) - async_pager = await client.list_rollouts( + async_pager = await client.list_deploy_policies( request={}, ) assert async_pager.next_page_token == "abc" @@ -11346,43 +11499,45 @@ async def test_list_rollouts_async_pager(): responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, cloud_deploy.Rollout) for i in responses) + assert all(isinstance(i, cloud_deploy.DeployPolicy) for i in responses) @pytest.mark.asyncio -async def test_list_rollouts_async_pages(): +async def test_list_deploy_policies_async_pages(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_rollouts), "__call__", new_callable=mock.AsyncMock + type(client.transport.list_deploy_policies), + "__call__", + new_callable=mock.AsyncMock, ) as call: # Set the response to a series of pages. call.side_effect = ( - cloud_deploy.ListRolloutsResponse( - rollouts=[ - cloud_deploy.Rollout(), - cloud_deploy.Rollout(), - cloud_deploy.Rollout(), + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[ + cloud_deploy.DeployPolicy(), + cloud_deploy.DeployPolicy(), + cloud_deploy.DeployPolicy(), ], next_page_token="abc", ), - cloud_deploy.ListRolloutsResponse( - rollouts=[], + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[], next_page_token="def", ), - cloud_deploy.ListRolloutsResponse( - rollouts=[ - cloud_deploy.Rollout(), + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[ + cloud_deploy.DeployPolicy(), ], next_page_token="ghi", ), - cloud_deploy.ListRolloutsResponse( - rollouts=[ - cloud_deploy.Rollout(), - cloud_deploy.Rollout(), + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[ + cloud_deploy.DeployPolicy(), + cloud_deploy.DeployPolicy(), ], ), RuntimeError, @@ -11391,7 +11546,7 @@ async def test_list_rollouts_async_pages(): # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 async for page_ in ( # pragma: no branch - await client.list_rollouts(request={}) + await client.list_deploy_policies(request={}) ).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): @@ -11401,11 +11556,11 @@ async def test_list_rollouts_async_pages(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.GetRolloutRequest, + cloud_deploy.GetDeployPolicyRequest, dict, ], ) -def test_get_rollout(request_type, transport: str = "grpc"): +def test_get_deploy_policy(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -11416,52 +11571,35 @@ def test_get_rollout(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.get_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.Rollout( + call.return_value = cloud_deploy.DeployPolicy( name="name_value", uid="uid_value", description="description_value", - target_id="target_id_value", - approval_state=cloud_deploy.Rollout.ApprovalState.NEEDS_APPROVAL, - state=cloud_deploy.Rollout.State.SUCCEEDED, - failure_reason="failure_reason_value", - deploying_build="deploying_build_value", + suspended=True, etag="etag_value", - deploy_failure_cause=cloud_deploy.Rollout.FailureCause.CLOUD_BUILD_UNAVAILABLE, - controller_rollout="controller_rollout_value", - rollback_of_rollout="rollback_of_rollout_value", - rolled_back_by_rollouts=["rolled_back_by_rollouts_value"], ) - response = client.get_rollout(request) + response = client.get_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.GetRolloutRequest() + request = cloud_deploy.GetDeployPolicyRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.Rollout) + assert isinstance(response, cloud_deploy.DeployPolicy) assert response.name == "name_value" assert response.uid == "uid_value" assert response.description == "description_value" - assert response.target_id == "target_id_value" - assert response.approval_state == cloud_deploy.Rollout.ApprovalState.NEEDS_APPROVAL - assert response.state == cloud_deploy.Rollout.State.SUCCEEDED - assert response.failure_reason == "failure_reason_value" - assert response.deploying_build == "deploying_build_value" + assert response.suspended is True assert response.etag == "etag_value" - assert ( - response.deploy_failure_cause - == cloud_deploy.Rollout.FailureCause.CLOUD_BUILD_UNAVAILABLE - ) - assert response.controller_rollout == "controller_rollout_value" - assert response.rollback_of_rollout == "rollback_of_rollout_value" - assert response.rolled_back_by_rollouts == ["rolled_back_by_rollouts_value"] -def test_get_rollout_empty_call(): +def test_get_deploy_policy_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -11470,17 +11608,19 @@ def test_get_rollout_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.get_deploy_policy), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_rollout() + client.get_deploy_policy() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.GetRolloutRequest() + assert args[0] == cloud_deploy.GetDeployPolicyRequest() -def test_get_rollout_non_empty_request_with_auto_populated_field(): +def test_get_deploy_policy_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -11491,24 +11631,26 @@ def test_get_rollout_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.GetRolloutRequest( + request = cloud_deploy.GetDeployPolicyRequest( name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.get_deploy_policy), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_rollout(request=request) + client.get_deploy_policy(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.GetRolloutRequest( + assert args[0] == cloud_deploy.GetDeployPolicyRequest( name="name_value", ) -def test_get_rollout_use_cached_wrapped_rpc(): +def test_get_deploy_policy_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -11522,21 +11664,23 @@ def test_get_rollout_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_rollout in client._transport._wrapped_methods + assert client._transport.get_deploy_policy in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_rollout] = mock_rpc + client._transport._wrapped_methods[ + client._transport.get_deploy_policy + ] = mock_rpc request = {} - client.get_rollout(request) + client.get_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_rollout(request) + client.get_deploy_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -11544,7 +11688,7 @@ def test_get_rollout_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_rollout_empty_call_async(): +async def test_get_deploy_policy_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -11553,33 +11697,27 @@ async def test_get_rollout_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.get_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.Rollout( + cloud_deploy.DeployPolicy( name="name_value", uid="uid_value", description="description_value", - target_id="target_id_value", - approval_state=cloud_deploy.Rollout.ApprovalState.NEEDS_APPROVAL, - state=cloud_deploy.Rollout.State.SUCCEEDED, - failure_reason="failure_reason_value", - deploying_build="deploying_build_value", + suspended=True, etag="etag_value", - deploy_failure_cause=cloud_deploy.Rollout.FailureCause.CLOUD_BUILD_UNAVAILABLE, - controller_rollout="controller_rollout_value", - rollback_of_rollout="rollback_of_rollout_value", - rolled_back_by_rollouts=["rolled_back_by_rollouts_value"], ) ) - response = await client.get_rollout() + response = await client.get_deploy_policy() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.GetRolloutRequest() + assert args[0] == cloud_deploy.GetDeployPolicyRequest() @pytest.mark.asyncio -async def test_get_rollout_async_use_cached_wrapped_rpc( +async def test_get_deploy_policy_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -11596,7 +11734,7 @@ async def test_get_rollout_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_rollout + client._client._transport.get_deploy_policy in client._client._transport._wrapped_methods ) @@ -11604,16 +11742,16 @@ async def test_get_rollout_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_rollout + client._client._transport.get_deploy_policy ] = mock_rpc request = {} - await client.get_rollout(request) + await client.get_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.get_rollout(request) + await client.get_deploy_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -11621,8 +11759,8 @@ async def test_get_rollout_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_get_rollout_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.GetRolloutRequest +async def test_get_deploy_policy_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.GetDeployPolicyRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11634,73 +11772,58 @@ async def test_get_rollout_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.get_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.Rollout( + cloud_deploy.DeployPolicy( name="name_value", uid="uid_value", description="description_value", - target_id="target_id_value", - approval_state=cloud_deploy.Rollout.ApprovalState.NEEDS_APPROVAL, - state=cloud_deploy.Rollout.State.SUCCEEDED, - failure_reason="failure_reason_value", - deploying_build="deploying_build_value", + suspended=True, etag="etag_value", - deploy_failure_cause=cloud_deploy.Rollout.FailureCause.CLOUD_BUILD_UNAVAILABLE, - controller_rollout="controller_rollout_value", - rollback_of_rollout="rollback_of_rollout_value", - rolled_back_by_rollouts=["rolled_back_by_rollouts_value"], ) ) - response = await client.get_rollout(request) + response = await client.get_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.GetRolloutRequest() + request = cloud_deploy.GetDeployPolicyRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.Rollout) + assert isinstance(response, cloud_deploy.DeployPolicy) assert response.name == "name_value" assert response.uid == "uid_value" assert response.description == "description_value" - assert response.target_id == "target_id_value" - assert response.approval_state == cloud_deploy.Rollout.ApprovalState.NEEDS_APPROVAL - assert response.state == cloud_deploy.Rollout.State.SUCCEEDED - assert response.failure_reason == "failure_reason_value" - assert response.deploying_build == "deploying_build_value" + assert response.suspended is True assert response.etag == "etag_value" - assert ( - response.deploy_failure_cause - == cloud_deploy.Rollout.FailureCause.CLOUD_BUILD_UNAVAILABLE - ) - assert response.controller_rollout == "controller_rollout_value" - assert response.rollback_of_rollout == "rollback_of_rollout_value" - assert response.rolled_back_by_rollouts == ["rolled_back_by_rollouts_value"] @pytest.mark.asyncio -async def test_get_rollout_async_from_dict(): - await test_get_rollout_async(request_type=dict) +async def test_get_deploy_policy_async_from_dict(): + await test_get_deploy_policy_async(request_type=dict) -def test_get_rollout_field_headers(): +def test_get_deploy_policy_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.GetRolloutRequest() + request = cloud_deploy.GetDeployPolicyRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: - call.return_value = cloud_deploy.Rollout() - client.get_rollout(request) + with mock.patch.object( + type(client.transport.get_deploy_policy), "__call__" + ) as call: + call.return_value = cloud_deploy.DeployPolicy() + client.get_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -11716,23 +11839,25 @@ def test_get_rollout_field_headers(): @pytest.mark.asyncio -async def test_get_rollout_field_headers_async(): +async def test_get_deploy_policy_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.GetRolloutRequest() + request = cloud_deploy.GetDeployPolicyRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.get_deploy_policy), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.Rollout() + cloud_deploy.DeployPolicy() ) - await client.get_rollout(request) + await client.get_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -11747,18 +11872,20 @@ async def test_get_rollout_field_headers_async(): ) in kw["metadata"] -def test_get_rollout_flattened(): +def test_get_deploy_policy_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.get_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.Rollout() + call.return_value = cloud_deploy.DeployPolicy() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_rollout( + client.get_deploy_policy( name="name_value", ) @@ -11771,7 +11898,7 @@ def test_get_rollout_flattened(): assert arg == mock_val -def test_get_rollout_flattened_error(): +def test_get_deploy_policy_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -11779,29 +11906,31 @@ def test_get_rollout_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_rollout( - cloud_deploy.GetRolloutRequest(), + client.get_deploy_policy( + cloud_deploy.GetDeployPolicyRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_get_rollout_flattened_async(): +async def test_get_deploy_policy_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.get_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.Rollout() + call.return_value = cloud_deploy.DeployPolicy() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.Rollout() + cloud_deploy.DeployPolicy() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_rollout( + response = await client.get_deploy_policy( name="name_value", ) @@ -11815,7 +11944,7 @@ async def test_get_rollout_flattened_async(): @pytest.mark.asyncio -async def test_get_rollout_flattened_error_async(): +async def test_get_deploy_policy_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -11823,8 +11952,8 @@ async def test_get_rollout_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_rollout( - cloud_deploy.GetRolloutRequest(), + await client.get_deploy_policy( + cloud_deploy.GetDeployPolicyRequest(), name="name_value", ) @@ -11832,11 +11961,11 @@ async def test_get_rollout_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.CreateRolloutRequest, + cloud_deploy.ApproveRolloutRequest, dict, ], ) -def test_create_rollout(request_type, transport: str = "grpc"): +def test_approve_rollout(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -11847,22 +11976,22 @@ def test_create_rollout(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: + with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.create_rollout(request) + call.return_value = cloud_deploy.ApproveRolloutResponse() + response = client.approve_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.CreateRolloutRequest() + request = cloud_deploy.ApproveRolloutRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, cloud_deploy.ApproveRolloutResponse) -def test_create_rollout_empty_call(): +def test_approve_rollout_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -11871,17 +12000,17 @@ def test_create_rollout_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: + with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_rollout() + client.approve_rollout() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.CreateRolloutRequest() + assert args[0] == cloud_deploy.ApproveRolloutRequest() -def test_create_rollout_non_empty_request_with_auto_populated_field(): +def test_approve_rollout_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -11892,30 +12021,24 @@ def test_create_rollout_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.CreateRolloutRequest( - parent="parent_value", - rollout_id="rollout_id_value", - request_id="request_id_value", - starting_phase_id="starting_phase_id_value", + request = cloud_deploy.ApproveRolloutRequest( + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: + with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_rollout(request=request) + client.approve_rollout(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.CreateRolloutRequest( - parent="parent_value", - rollout_id="rollout_id_value", - request_id="request_id_value", - starting_phase_id="starting_phase_id_value", + assert args[0] == cloud_deploy.ApproveRolloutRequest( + name="name_value", ) -def test_create_rollout_use_cached_wrapped_rpc(): +def test_approve_rollout_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -11929,26 +12052,21 @@ def test_create_rollout_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_rollout in client._transport._wrapped_methods + assert client._transport.approve_rollout in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_rollout] = mock_rpc + client._transport._wrapped_methods[client._transport.approve_rollout] = mock_rpc request = {} - client.create_rollout(request) + client.approve_rollout(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_rollout(request) + client.approve_rollout(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -11956,7 +12074,7 @@ def test_create_rollout_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_create_rollout_empty_call_async(): +async def test_approve_rollout_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -11965,19 +12083,19 @@ async def test_create_rollout_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: + with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + cloud_deploy.ApproveRolloutResponse() ) - response = await client.create_rollout() + response = await client.approve_rollout() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.CreateRolloutRequest() + assert args[0] == cloud_deploy.ApproveRolloutRequest() @pytest.mark.asyncio -async def test_create_rollout_async_use_cached_wrapped_rpc( +async def test_approve_rollout_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -11994,7 +12112,7 @@ async def test_create_rollout_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.create_rollout + client._client._transport.approve_rollout in client._client._transport._wrapped_methods ) @@ -12002,21 +12120,16 @@ async def test_create_rollout_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.create_rollout + client._client._transport.approve_rollout ] = mock_rpc request = {} - await client.create_rollout(request) + await client.approve_rollout(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_rollout(request) + await client.approve_rollout(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -12024,8 +12137,8 @@ async def test_create_rollout_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_create_rollout_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.CreateRolloutRequest +async def test_approve_rollout_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.ApproveRolloutRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12037,43 +12150,43 @@ async def test_create_rollout_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: + with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + cloud_deploy.ApproveRolloutResponse() ) - response = await client.create_rollout(request) + response = await client.approve_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.CreateRolloutRequest() + request = cloud_deploy.ApproveRolloutRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, cloud_deploy.ApproveRolloutResponse) @pytest.mark.asyncio -async def test_create_rollout_async_from_dict(): - await test_create_rollout_async(request_type=dict) +async def test_approve_rollout_async_from_dict(): + await test_approve_rollout_async(request_type=dict) -def test_create_rollout_field_headers(): +def test_approve_rollout_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.CreateRolloutRequest() + request = cloud_deploy.ApproveRolloutRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.create_rollout(request) + with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: + call.return_value = cloud_deploy.ApproveRolloutResponse() + client.approve_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -12084,28 +12197,28 @@ def test_create_rollout_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_create_rollout_field_headers_async(): +async def test_approve_rollout_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.CreateRolloutRequest() + request = cloud_deploy.ApproveRolloutRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: + with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") + cloud_deploy.ApproveRolloutResponse() ) - await client.create_rollout(request) + await client.approve_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -12116,43 +12229,35 @@ async def test_create_rollout_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_create_rollout_flattened(): +def test_approve_rollout_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: + with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = cloud_deploy.ApproveRolloutResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.create_rollout( - parent="parent_value", - rollout=cloud_deploy.Rollout(name="name_value"), - rollout_id="rollout_id_value", + client.approve_rollout( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].rollout - mock_val = cloud_deploy.Rollout(name="name_value") - assert arg == mock_val - arg = args[0].rollout_id - mock_val = "rollout_id_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_create_rollout_flattened_error(): +def test_approve_rollout_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -12160,53 +12265,43 @@ def test_create_rollout_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_rollout( - cloud_deploy.CreateRolloutRequest(), - parent="parent_value", - rollout=cloud_deploy.Rollout(name="name_value"), - rollout_id="rollout_id_value", + client.approve_rollout( + cloud_deploy.ApproveRolloutRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_create_rollout_flattened_async(): +async def test_approve_rollout_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: + with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = cloud_deploy.ApproveRolloutResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + cloud_deploy.ApproveRolloutResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.create_rollout( - parent="parent_value", - rollout=cloud_deploy.Rollout(name="name_value"), - rollout_id="rollout_id_value", + response = await client.approve_rollout( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].rollout - mock_val = cloud_deploy.Rollout(name="name_value") - assert arg == mock_val - arg = args[0].rollout_id - mock_val = "rollout_id_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_create_rollout_flattened_error_async(): +async def test_approve_rollout_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -12214,22 +12309,20 @@ async def test_create_rollout_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_rollout( - cloud_deploy.CreateRolloutRequest(), - parent="parent_value", - rollout=cloud_deploy.Rollout(name="name_value"), - rollout_id="rollout_id_value", + await client.approve_rollout( + cloud_deploy.ApproveRolloutRequest(), + name="name_value", ) @pytest.mark.parametrize( "request_type", [ - cloud_deploy.IgnoreJobRequest, + cloud_deploy.AdvanceRolloutRequest, dict, ], ) -def test_ignore_job(request_type, transport: str = "grpc"): +def test_advance_rollout(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -12240,22 +12333,22 @@ def test_ignore_job(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: + with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.IgnoreJobResponse() - response = client.ignore_job(request) + call.return_value = cloud_deploy.AdvanceRolloutResponse() + response = client.advance_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.IgnoreJobRequest() + request = cloud_deploy.AdvanceRolloutRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.IgnoreJobResponse) + assert isinstance(response, cloud_deploy.AdvanceRolloutResponse) -def test_ignore_job_empty_call(): +def test_advance_rollout_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -12264,17 +12357,17 @@ def test_ignore_job_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: + with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.ignore_job() + client.advance_rollout() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.IgnoreJobRequest() + assert args[0] == cloud_deploy.AdvanceRolloutRequest() -def test_ignore_job_non_empty_request_with_auto_populated_field(): +def test_advance_rollout_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -12285,28 +12378,26 @@ def test_ignore_job_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.IgnoreJobRequest( - rollout="rollout_value", + request = cloud_deploy.AdvanceRolloutRequest( + name="name_value", phase_id="phase_id_value", - job_id="job_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: + with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.ignore_job(request=request) + client.advance_rollout(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.IgnoreJobRequest( - rollout="rollout_value", + assert args[0] == cloud_deploy.AdvanceRolloutRequest( + name="name_value", phase_id="phase_id_value", - job_id="job_id_value", ) -def test_ignore_job_use_cached_wrapped_rpc(): +def test_advance_rollout_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -12320,21 +12411,21 @@ def test_ignore_job_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.ignore_job in client._transport._wrapped_methods + assert client._transport.advance_rollout in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.ignore_job] = mock_rpc + client._transport._wrapped_methods[client._transport.advance_rollout] = mock_rpc request = {} - client.ignore_job(request) + client.advance_rollout(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.ignore_job(request) + client.advance_rollout(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -12342,7 +12433,7 @@ def test_ignore_job_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_ignore_job_empty_call_async(): +async def test_advance_rollout_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -12351,19 +12442,21 @@ async def test_ignore_job_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: + with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.IgnoreJobResponse() + cloud_deploy.AdvanceRolloutResponse() ) - response = await client.ignore_job() + response = await client.advance_rollout() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.IgnoreJobRequest() + assert args[0] == cloud_deploy.AdvanceRolloutRequest() @pytest.mark.asyncio -async def test_ignore_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_advance_rollout_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -12378,7 +12471,7 @@ async def test_ignore_job_async_use_cached_wrapped_rpc(transport: str = "grpc_as # Ensure method has been cached assert ( - client._client._transport.ignore_job + client._client._transport.advance_rollout in client._client._transport._wrapped_methods ) @@ -12386,16 +12479,16 @@ async def test_ignore_job_async_use_cached_wrapped_rpc(transport: str = "grpc_as mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.ignore_job + client._client._transport.advance_rollout ] = mock_rpc request = {} - await client.ignore_job(request) + await client.advance_rollout(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.ignore_job(request) + await client.advance_rollout(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -12403,8 +12496,8 @@ async def test_ignore_job_async_use_cached_wrapped_rpc(transport: str = "grpc_as @pytest.mark.asyncio -async def test_ignore_job_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.IgnoreJobRequest +async def test_advance_rollout_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.AdvanceRolloutRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12416,43 +12509,43 @@ async def test_ignore_job_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: + with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.IgnoreJobResponse() + cloud_deploy.AdvanceRolloutResponse() ) - response = await client.ignore_job(request) + response = await client.advance_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.IgnoreJobRequest() + request = cloud_deploy.AdvanceRolloutRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.IgnoreJobResponse) + assert isinstance(response, cloud_deploy.AdvanceRolloutResponse) @pytest.mark.asyncio -async def test_ignore_job_async_from_dict(): - await test_ignore_job_async(request_type=dict) +async def test_advance_rollout_async_from_dict(): + await test_advance_rollout_async(request_type=dict) -def test_ignore_job_field_headers(): +def test_advance_rollout_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.IgnoreJobRequest() + request = cloud_deploy.AdvanceRolloutRequest() - request.rollout = "rollout_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: - call.return_value = cloud_deploy.IgnoreJobResponse() - client.ignore_job(request) + with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: + call.return_value = cloud_deploy.AdvanceRolloutResponse() + client.advance_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -12463,28 +12556,28 @@ def test_ignore_job_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "rollout=rollout_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_ignore_job_field_headers_async(): +async def test_advance_rollout_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.IgnoreJobRequest() + request = cloud_deploy.AdvanceRolloutRequest() - request.rollout = "rollout_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: + with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.IgnoreJobResponse() + cloud_deploy.AdvanceRolloutResponse() ) - await client.ignore_job(request) + await client.advance_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -12495,43 +12588,39 @@ async def test_ignore_job_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "rollout=rollout_value", + "name=name_value", ) in kw["metadata"] -def test_ignore_job_flattened(): +def test_advance_rollout_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: + with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.IgnoreJobResponse() + call.return_value = cloud_deploy.AdvanceRolloutResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.ignore_job( - rollout="rollout_value", + client.advance_rollout( + name="name_value", phase_id="phase_id_value", - job_id="job_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].rollout - mock_val = "rollout_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val arg = args[0].phase_id mock_val = "phase_id_value" assert arg == mock_val - arg = args[0].job_id - mock_val = "job_id_value" - assert arg == mock_val -def test_ignore_job_flattened_error(): +def test_advance_rollout_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -12539,53 +12628,48 @@ def test_ignore_job_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.ignore_job( - cloud_deploy.IgnoreJobRequest(), - rollout="rollout_value", + client.advance_rollout( + cloud_deploy.AdvanceRolloutRequest(), + name="name_value", phase_id="phase_id_value", - job_id="job_id_value", ) @pytest.mark.asyncio -async def test_ignore_job_flattened_async(): +async def test_advance_rollout_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: + with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.IgnoreJobResponse() + call.return_value = cloud_deploy.AdvanceRolloutResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.IgnoreJobResponse() + cloud_deploy.AdvanceRolloutResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.ignore_job( - rollout="rollout_value", + response = await client.advance_rollout( + name="name_value", phase_id="phase_id_value", - job_id="job_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].rollout - mock_val = "rollout_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val arg = args[0].phase_id mock_val = "phase_id_value" assert arg == mock_val - arg = args[0].job_id - mock_val = "job_id_value" - assert arg == mock_val @pytest.mark.asyncio -async def test_ignore_job_flattened_error_async(): +async def test_advance_rollout_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -12593,22 +12677,21 @@ async def test_ignore_job_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.ignore_job( - cloud_deploy.IgnoreJobRequest(), - rollout="rollout_value", + await client.advance_rollout( + cloud_deploy.AdvanceRolloutRequest(), + name="name_value", phase_id="phase_id_value", - job_id="job_id_value", ) @pytest.mark.parametrize( "request_type", [ - cloud_deploy.RetryJobRequest, + cloud_deploy.CancelRolloutRequest, dict, ], ) -def test_retry_job(request_type, transport: str = "grpc"): +def test_cancel_rollout(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -12619,22 +12702,22 @@ def test_retry_job(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.retry_job), "__call__") as call: + with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.RetryJobResponse() - response = client.retry_job(request) + call.return_value = cloud_deploy.CancelRolloutResponse() + response = client.cancel_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.RetryJobRequest() + request = cloud_deploy.CancelRolloutRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.RetryJobResponse) + assert isinstance(response, cloud_deploy.CancelRolloutResponse) -def test_retry_job_empty_call(): +def test_cancel_rollout_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -12643,17 +12726,17 @@ def test_retry_job_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.retry_job), "__call__") as call: + with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.retry_job() + client.cancel_rollout() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.RetryJobRequest() + assert args[0] == cloud_deploy.CancelRolloutRequest() -def test_retry_job_non_empty_request_with_auto_populated_field(): +def test_cancel_rollout_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -12664,28 +12747,24 @@ def test_retry_job_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.RetryJobRequest( - rollout="rollout_value", - phase_id="phase_id_value", - job_id="job_id_value", + request = cloud_deploy.CancelRolloutRequest( + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.retry_job), "__call__") as call: + with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.retry_job(request=request) + client.cancel_rollout(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.RetryJobRequest( - rollout="rollout_value", - phase_id="phase_id_value", - job_id="job_id_value", + assert args[0] == cloud_deploy.CancelRolloutRequest( + name="name_value", ) -def test_retry_job_use_cached_wrapped_rpc(): +def test_cancel_rollout_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -12699,21 +12778,21 @@ def test_retry_job_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.retry_job in client._transport._wrapped_methods + assert client._transport.cancel_rollout in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.retry_job] = mock_rpc + client._transport._wrapped_methods[client._transport.cancel_rollout] = mock_rpc request = {} - client.retry_job(request) + client.cancel_rollout(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.retry_job(request) + client.cancel_rollout(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -12721,7 +12800,7 @@ def test_retry_job_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_retry_job_empty_call_async(): +async def test_cancel_rollout_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -12730,19 +12809,21 @@ async def test_retry_job_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.retry_job), "__call__") as call: + with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.RetryJobResponse() + cloud_deploy.CancelRolloutResponse() ) - response = await client.retry_job() + response = await client.cancel_rollout() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.RetryJobRequest() + assert args[0] == cloud_deploy.CancelRolloutRequest() @pytest.mark.asyncio -async def test_retry_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_cancel_rollout_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -12757,7 +12838,7 @@ async def test_retry_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asy # Ensure method has been cached assert ( - client._client._transport.retry_job + client._client._transport.cancel_rollout in client._client._transport._wrapped_methods ) @@ -12765,16 +12846,16 @@ async def test_retry_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asy mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.retry_job + client._client._transport.cancel_rollout ] = mock_rpc request = {} - await client.retry_job(request) + await client.cancel_rollout(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.retry_job(request) + await client.cancel_rollout(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -12782,8 +12863,8 @@ async def test_retry_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asy @pytest.mark.asyncio -async def test_retry_job_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.RetryJobRequest +async def test_cancel_rollout_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.CancelRolloutRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12795,43 +12876,43 @@ async def test_retry_job_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.retry_job), "__call__") as call: + with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.RetryJobResponse() + cloud_deploy.CancelRolloutResponse() ) - response = await client.retry_job(request) + response = await client.cancel_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.RetryJobRequest() + request = cloud_deploy.CancelRolloutRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.RetryJobResponse) + assert isinstance(response, cloud_deploy.CancelRolloutResponse) @pytest.mark.asyncio -async def test_retry_job_async_from_dict(): - await test_retry_job_async(request_type=dict) +async def test_cancel_rollout_async_from_dict(): + await test_cancel_rollout_async(request_type=dict) -def test_retry_job_field_headers(): +def test_cancel_rollout_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.RetryJobRequest() + request = cloud_deploy.CancelRolloutRequest() - request.rollout = "rollout_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.retry_job), "__call__") as call: - call.return_value = cloud_deploy.RetryJobResponse() - client.retry_job(request) + with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: + call.return_value = cloud_deploy.CancelRolloutResponse() + client.cancel_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -12842,28 +12923,28 @@ def test_retry_job_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "rollout=rollout_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_retry_job_field_headers_async(): +async def test_cancel_rollout_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.RetryJobRequest() + request = cloud_deploy.CancelRolloutRequest() - request.rollout = "rollout_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.retry_job), "__call__") as call: + with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.RetryJobResponse() + cloud_deploy.CancelRolloutResponse() ) - await client.retry_job(request) + await client.cancel_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -12874,43 +12955,35 @@ async def test_retry_job_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "rollout=rollout_value", + "name=name_value", ) in kw["metadata"] -def test_retry_job_flattened(): +def test_cancel_rollout_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.retry_job), "__call__") as call: + with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.RetryJobResponse() + call.return_value = cloud_deploy.CancelRolloutResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.retry_job( - rollout="rollout_value", - phase_id="phase_id_value", - job_id="job_id_value", + client.cancel_rollout( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].rollout - mock_val = "rollout_value" - assert arg == mock_val - arg = args[0].phase_id - mock_val = "phase_id_value" - assert arg == mock_val - arg = args[0].job_id - mock_val = "job_id_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_retry_job_flattened_error(): +def test_cancel_rollout_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -12918,53 +12991,43 @@ def test_retry_job_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.retry_job( - cloud_deploy.RetryJobRequest(), - rollout="rollout_value", - phase_id="phase_id_value", - job_id="job_id_value", + client.cancel_rollout( + cloud_deploy.CancelRolloutRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_retry_job_flattened_async(): +async def test_cancel_rollout_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.retry_job), "__call__") as call: + with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.RetryJobResponse() + call.return_value = cloud_deploy.CancelRolloutResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.RetryJobResponse() + cloud_deploy.CancelRolloutResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.retry_job( - rollout="rollout_value", - phase_id="phase_id_value", - job_id="job_id_value", + response = await client.cancel_rollout( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].rollout - mock_val = "rollout_value" - assert arg == mock_val - arg = args[0].phase_id - mock_val = "phase_id_value" - assert arg == mock_val - arg = args[0].job_id - mock_val = "job_id_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_retry_job_flattened_error_async(): +async def test_cancel_rollout_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -12972,22 +13035,20 @@ async def test_retry_job_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.retry_job( - cloud_deploy.RetryJobRequest(), - rollout="rollout_value", - phase_id="phase_id_value", - job_id="job_id_value", + await client.cancel_rollout( + cloud_deploy.CancelRolloutRequest(), + name="name_value", ) @pytest.mark.parametrize( "request_type", [ - cloud_deploy.ListJobRunsRequest, + cloud_deploy.ListRolloutsRequest, dict, ], ) -def test_list_job_runs(request_type, transport: str = "grpc"): +def test_list_rollouts(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -12998,27 +13059,27 @@ def test_list_job_runs(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: + with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.ListJobRunsResponse( + call.return_value = cloud_deploy.ListRolloutsResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) - response = client.list_job_runs(request) + response = client.list_rollouts(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.ListJobRunsRequest() + request = cloud_deploy.ListRolloutsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListJobRunsPager) + assert isinstance(response, pagers.ListRolloutsPager) assert response.next_page_token == "next_page_token_value" assert response.unreachable == ["unreachable_value"] -def test_list_job_runs_empty_call(): +def test_list_rollouts_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -13027,17 +13088,17 @@ def test_list_job_runs_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: + with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_job_runs() + client.list_rollouts() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.ListJobRunsRequest() + assert args[0] == cloud_deploy.ListRolloutsRequest() -def test_list_job_runs_non_empty_request_with_auto_populated_field(): +def test_list_rollouts_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -13048,7 +13109,7 @@ def test_list_job_runs_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.ListJobRunsRequest( + request = cloud_deploy.ListRolloutsRequest( parent="parent_value", page_token="page_token_value", filter="filter_value", @@ -13056,14 +13117,14 @@ def test_list_job_runs_non_empty_request_with_auto_populated_field(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: + with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_job_runs(request=request) + client.list_rollouts(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.ListJobRunsRequest( + assert args[0] == cloud_deploy.ListRolloutsRequest( parent="parent_value", page_token="page_token_value", filter="filter_value", @@ -13071,7 +13132,7 @@ def test_list_job_runs_non_empty_request_with_auto_populated_field(): ) -def test_list_job_runs_use_cached_wrapped_rpc(): +def test_list_rollouts_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -13085,21 +13146,21 @@ def test_list_job_runs_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_job_runs in client._transport._wrapped_methods + assert client._transport.list_rollouts in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_job_runs] = mock_rpc + client._transport._wrapped_methods[client._transport.list_rollouts] = mock_rpc request = {} - client.list_job_runs(request) + client.list_rollouts(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_job_runs(request) + client.list_rollouts(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -13107,7 +13168,7 @@ def test_list_job_runs_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_job_runs_empty_call_async(): +async def test_list_rollouts_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -13116,22 +13177,22 @@ async def test_list_job_runs_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: + with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ListJobRunsResponse( + cloud_deploy.ListRolloutsResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) ) - response = await client.list_job_runs() + response = await client.list_rollouts() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.ListJobRunsRequest() + assert args[0] == cloud_deploy.ListRolloutsRequest() @pytest.mark.asyncio -async def test_list_job_runs_async_use_cached_wrapped_rpc( +async def test_list_rollouts_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -13148,7 +13209,7 @@ async def test_list_job_runs_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.list_job_runs + client._client._transport.list_rollouts in client._client._transport._wrapped_methods ) @@ -13156,16 +13217,16 @@ async def test_list_job_runs_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_job_runs + client._client._transport.list_rollouts ] = mock_rpc request = {} - await client.list_job_runs(request) + await client.list_rollouts(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.list_job_runs(request) + await client.list_rollouts(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -13173,8 +13234,8 @@ async def test_list_job_runs_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_list_job_runs_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.ListJobRunsRequest +async def test_list_rollouts_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.ListRolloutsRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13186,48 +13247,48 @@ async def test_list_job_runs_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: + with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ListJobRunsResponse( + cloud_deploy.ListRolloutsResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) ) - response = await client.list_job_runs(request) + response = await client.list_rollouts(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.ListJobRunsRequest() + request = cloud_deploy.ListRolloutsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListJobRunsAsyncPager) + assert isinstance(response, pagers.ListRolloutsAsyncPager) assert response.next_page_token == "next_page_token_value" assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio -async def test_list_job_runs_async_from_dict(): - await test_list_job_runs_async(request_type=dict) +async def test_list_rollouts_async_from_dict(): + await test_list_rollouts_async(request_type=dict) -def test_list_job_runs_field_headers(): +def test_list_rollouts_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.ListJobRunsRequest() + request = cloud_deploy.ListRolloutsRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: - call.return_value = cloud_deploy.ListJobRunsResponse() - client.list_job_runs(request) + with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: + call.return_value = cloud_deploy.ListRolloutsResponse() + client.list_rollouts(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -13243,23 +13304,23 @@ def test_list_job_runs_field_headers(): @pytest.mark.asyncio -async def test_list_job_runs_field_headers_async(): +async def test_list_rollouts_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.ListJobRunsRequest() + request = cloud_deploy.ListRolloutsRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: + with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ListJobRunsResponse() + cloud_deploy.ListRolloutsResponse() ) - await client.list_job_runs(request) + await client.list_rollouts(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -13274,18 +13335,18 @@ async def test_list_job_runs_field_headers_async(): ) in kw["metadata"] -def test_list_job_runs_flattened(): +def test_list_rollouts_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: + with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.ListJobRunsResponse() + call.return_value = cloud_deploy.ListRolloutsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_job_runs( + client.list_rollouts( parent="parent_value", ) @@ -13298,7 +13359,7 @@ def test_list_job_runs_flattened(): assert arg == mock_val -def test_list_job_runs_flattened_error(): +def test_list_rollouts_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -13306,29 +13367,29 @@ def test_list_job_runs_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_job_runs( - cloud_deploy.ListJobRunsRequest(), + client.list_rollouts( + cloud_deploy.ListRolloutsRequest(), parent="parent_value", ) @pytest.mark.asyncio -async def test_list_job_runs_flattened_async(): +async def test_list_rollouts_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: + with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.ListJobRunsResponse() + call.return_value = cloud_deploy.ListRolloutsResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ListJobRunsResponse() + cloud_deploy.ListRolloutsResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_job_runs( + response = await client.list_rollouts( parent="parent_value", ) @@ -13342,7 +13403,7 @@ async def test_list_job_runs_flattened_async(): @pytest.mark.asyncio -async def test_list_job_runs_flattened_error_async(): +async def test_list_rollouts_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -13350,44 +13411,44 @@ async def test_list_job_runs_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_job_runs( - cloud_deploy.ListJobRunsRequest(), + await client.list_rollouts( + cloud_deploy.ListRolloutsRequest(), parent="parent_value", ) -def test_list_job_runs_pager(transport_name: str = "grpc"): +def test_list_rollouts_pager(transport_name: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: + with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( - cloud_deploy.ListJobRunsResponse( - job_runs=[ - cloud_deploy.JobRun(), - cloud_deploy.JobRun(), - cloud_deploy.JobRun(), + cloud_deploy.ListRolloutsResponse( + rollouts=[ + cloud_deploy.Rollout(), + cloud_deploy.Rollout(), + cloud_deploy.Rollout(), ], next_page_token="abc", ), - cloud_deploy.ListJobRunsResponse( - job_runs=[], + cloud_deploy.ListRolloutsResponse( + rollouts=[], next_page_token="def", ), - cloud_deploy.ListJobRunsResponse( - job_runs=[ - cloud_deploy.JobRun(), + cloud_deploy.ListRolloutsResponse( + rollouts=[ + cloud_deploy.Rollout(), ], next_page_token="ghi", ), - cloud_deploy.ListJobRunsResponse( - job_runs=[ - cloud_deploy.JobRun(), - cloud_deploy.JobRun(), + cloud_deploy.ListRolloutsResponse( + rollouts=[ + cloud_deploy.Rollout(), + cloud_deploy.Rollout(), ], ), RuntimeError, @@ -13399,7 +13460,7 @@ def test_list_job_runs_pager(transport_name: str = "grpc"): expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) - pager = client.list_job_runs(request={}, retry=retry, timeout=timeout) + pager = client.list_rollouts(request={}, retry=retry, timeout=timeout) assert pager._metadata == expected_metadata assert pager._retry == retry @@ -13407,89 +13468,89 @@ def test_list_job_runs_pager(transport_name: str = "grpc"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, cloud_deploy.JobRun) for i in results) + assert all(isinstance(i, cloud_deploy.Rollout) for i in results) -def test_list_job_runs_pages(transport_name: str = "grpc"): +def test_list_rollouts_pages(transport_name: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: + with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( - cloud_deploy.ListJobRunsResponse( - job_runs=[ - cloud_deploy.JobRun(), - cloud_deploy.JobRun(), - cloud_deploy.JobRun(), + cloud_deploy.ListRolloutsResponse( + rollouts=[ + cloud_deploy.Rollout(), + cloud_deploy.Rollout(), + cloud_deploy.Rollout(), ], next_page_token="abc", ), - cloud_deploy.ListJobRunsResponse( - job_runs=[], + cloud_deploy.ListRolloutsResponse( + rollouts=[], next_page_token="def", ), - cloud_deploy.ListJobRunsResponse( - job_runs=[ - cloud_deploy.JobRun(), + cloud_deploy.ListRolloutsResponse( + rollouts=[ + cloud_deploy.Rollout(), ], next_page_token="ghi", ), - cloud_deploy.ListJobRunsResponse( - job_runs=[ - cloud_deploy.JobRun(), - cloud_deploy.JobRun(), + cloud_deploy.ListRolloutsResponse( + rollouts=[ + cloud_deploy.Rollout(), + cloud_deploy.Rollout(), ], ), RuntimeError, ) - pages = list(client.list_job_runs(request={}).pages) + pages = list(client.list_rollouts(request={}).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @pytest.mark.asyncio -async def test_list_job_runs_async_pager(): +async def test_list_rollouts_async_pager(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_job_runs), "__call__", new_callable=mock.AsyncMock + type(client.transport.list_rollouts), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( - cloud_deploy.ListJobRunsResponse( - job_runs=[ - cloud_deploy.JobRun(), - cloud_deploy.JobRun(), - cloud_deploy.JobRun(), + cloud_deploy.ListRolloutsResponse( + rollouts=[ + cloud_deploy.Rollout(), + cloud_deploy.Rollout(), + cloud_deploy.Rollout(), ], next_page_token="abc", ), - cloud_deploy.ListJobRunsResponse( - job_runs=[], + cloud_deploy.ListRolloutsResponse( + rollouts=[], next_page_token="def", ), - cloud_deploy.ListJobRunsResponse( - job_runs=[ - cloud_deploy.JobRun(), + cloud_deploy.ListRolloutsResponse( + rollouts=[ + cloud_deploy.Rollout(), ], next_page_token="ghi", ), - cloud_deploy.ListJobRunsResponse( - job_runs=[ - cloud_deploy.JobRun(), - cloud_deploy.JobRun(), + cloud_deploy.ListRolloutsResponse( + rollouts=[ + cloud_deploy.Rollout(), + cloud_deploy.Rollout(), ], ), RuntimeError, ) - async_pager = await client.list_job_runs( + async_pager = await client.list_rollouts( request={}, ) assert async_pager.next_page_token == "abc" @@ -13498,43 +13559,43 @@ async def test_list_job_runs_async_pager(): responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, cloud_deploy.JobRun) for i in responses) + assert all(isinstance(i, cloud_deploy.Rollout) for i in responses) @pytest.mark.asyncio -async def test_list_job_runs_async_pages(): +async def test_list_rollouts_async_pages(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_job_runs), "__call__", new_callable=mock.AsyncMock + type(client.transport.list_rollouts), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( - cloud_deploy.ListJobRunsResponse( - job_runs=[ - cloud_deploy.JobRun(), - cloud_deploy.JobRun(), - cloud_deploy.JobRun(), + cloud_deploy.ListRolloutsResponse( + rollouts=[ + cloud_deploy.Rollout(), + cloud_deploy.Rollout(), + cloud_deploy.Rollout(), ], next_page_token="abc", ), - cloud_deploy.ListJobRunsResponse( - job_runs=[], + cloud_deploy.ListRolloutsResponse( + rollouts=[], next_page_token="def", ), - cloud_deploy.ListJobRunsResponse( - job_runs=[ - cloud_deploy.JobRun(), + cloud_deploy.ListRolloutsResponse( + rollouts=[ + cloud_deploy.Rollout(), ], next_page_token="ghi", ), - cloud_deploy.ListJobRunsResponse( - job_runs=[ - cloud_deploy.JobRun(), - cloud_deploy.JobRun(), + cloud_deploy.ListRolloutsResponse( + rollouts=[ + cloud_deploy.Rollout(), + cloud_deploy.Rollout(), ], ), RuntimeError, @@ -13543,7 +13604,7 @@ async def test_list_job_runs_async_pages(): # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 async for page_ in ( # pragma: no branch - await client.list_job_runs(request={}) + await client.list_rollouts(request={}) ).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): @@ -13553,11 +13614,11 @@ async def test_list_job_runs_async_pages(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.GetJobRunRequest, + cloud_deploy.GetRolloutRequest, dict, ], ) -def test_get_job_run(request_type, transport: str = "grpc"): +def test_get_rollout(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -13568,35 +13629,52 @@ def test_get_job_run(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: + with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.JobRun( + call.return_value = cloud_deploy.Rollout( name="name_value", uid="uid_value", - phase_id="phase_id_value", - job_id="job_id_value", - state=cloud_deploy.JobRun.State.IN_PROGRESS, + description="description_value", + target_id="target_id_value", + approval_state=cloud_deploy.Rollout.ApprovalState.NEEDS_APPROVAL, + state=cloud_deploy.Rollout.State.SUCCEEDED, + failure_reason="failure_reason_value", + deploying_build="deploying_build_value", etag="etag_value", + deploy_failure_cause=cloud_deploy.Rollout.FailureCause.CLOUD_BUILD_UNAVAILABLE, + controller_rollout="controller_rollout_value", + rollback_of_rollout="rollback_of_rollout_value", + rolled_back_by_rollouts=["rolled_back_by_rollouts_value"], ) - response = client.get_job_run(request) + response = client.get_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.GetJobRunRequest() + request = cloud_deploy.GetRolloutRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.JobRun) + assert isinstance(response, cloud_deploy.Rollout) assert response.name == "name_value" assert response.uid == "uid_value" - assert response.phase_id == "phase_id_value" - assert response.job_id == "job_id_value" - assert response.state == cloud_deploy.JobRun.State.IN_PROGRESS + assert response.description == "description_value" + assert response.target_id == "target_id_value" + assert response.approval_state == cloud_deploy.Rollout.ApprovalState.NEEDS_APPROVAL + assert response.state == cloud_deploy.Rollout.State.SUCCEEDED + assert response.failure_reason == "failure_reason_value" + assert response.deploying_build == "deploying_build_value" assert response.etag == "etag_value" + assert ( + response.deploy_failure_cause + == cloud_deploy.Rollout.FailureCause.CLOUD_BUILD_UNAVAILABLE + ) + assert response.controller_rollout == "controller_rollout_value" + assert response.rollback_of_rollout == "rollback_of_rollout_value" + assert response.rolled_back_by_rollouts == ["rolled_back_by_rollouts_value"] -def test_get_job_run_empty_call(): +def test_get_rollout_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -13605,17 +13683,17 @@ def test_get_job_run_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: + with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_job_run() + client.get_rollout() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.GetJobRunRequest() + assert args[0] == cloud_deploy.GetRolloutRequest() -def test_get_job_run_non_empty_request_with_auto_populated_field(): +def test_get_rollout_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -13626,24 +13704,24 @@ def test_get_job_run_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.GetJobRunRequest( + request = cloud_deploy.GetRolloutRequest( name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: + with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_job_run(request=request) + client.get_rollout(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.GetJobRunRequest( + assert args[0] == cloud_deploy.GetRolloutRequest( name="name_value", ) -def test_get_job_run_use_cached_wrapped_rpc(): +def test_get_rollout_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -13657,21 +13735,21 @@ def test_get_job_run_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_job_run in client._transport._wrapped_methods + assert client._transport.get_rollout in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_job_run] = mock_rpc + client._transport._wrapped_methods[client._transport.get_rollout] = mock_rpc request = {} - client.get_job_run(request) + client.get_rollout(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_job_run(request) + client.get_rollout(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -13679,7 +13757,7 @@ def test_get_job_run_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_job_run_empty_call_async(): +async def test_get_rollout_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -13688,26 +13766,33 @@ async def test_get_job_run_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: + with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.JobRun( + cloud_deploy.Rollout( name="name_value", uid="uid_value", - phase_id="phase_id_value", - job_id="job_id_value", - state=cloud_deploy.JobRun.State.IN_PROGRESS, + description="description_value", + target_id="target_id_value", + approval_state=cloud_deploy.Rollout.ApprovalState.NEEDS_APPROVAL, + state=cloud_deploy.Rollout.State.SUCCEEDED, + failure_reason="failure_reason_value", + deploying_build="deploying_build_value", etag="etag_value", + deploy_failure_cause=cloud_deploy.Rollout.FailureCause.CLOUD_BUILD_UNAVAILABLE, + controller_rollout="controller_rollout_value", + rollback_of_rollout="rollback_of_rollout_value", + rolled_back_by_rollouts=["rolled_back_by_rollouts_value"], ) ) - response = await client.get_job_run() + response = await client.get_rollout() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.GetJobRunRequest() + assert args[0] == cloud_deploy.GetRolloutRequest() @pytest.mark.asyncio -async def test_get_job_run_async_use_cached_wrapped_rpc( +async def test_get_rollout_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -13724,7 +13809,7 @@ async def test_get_job_run_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_job_run + client._client._transport.get_rollout in client._client._transport._wrapped_methods ) @@ -13732,16 +13817,16 @@ async def test_get_job_run_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_job_run + client._client._transport.get_rollout ] = mock_rpc request = {} - await client.get_job_run(request) + await client.get_rollout(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.get_job_run(request) + await client.get_rollout(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -13749,8 +13834,8 @@ async def test_get_job_run_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_get_job_run_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.GetJobRunRequest +async def test_get_rollout_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.GetRolloutRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13762,56 +13847,73 @@ async def test_get_job_run_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: + with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.JobRun( + cloud_deploy.Rollout( name="name_value", uid="uid_value", - phase_id="phase_id_value", - job_id="job_id_value", - state=cloud_deploy.JobRun.State.IN_PROGRESS, + description="description_value", + target_id="target_id_value", + approval_state=cloud_deploy.Rollout.ApprovalState.NEEDS_APPROVAL, + state=cloud_deploy.Rollout.State.SUCCEEDED, + failure_reason="failure_reason_value", + deploying_build="deploying_build_value", etag="etag_value", + deploy_failure_cause=cloud_deploy.Rollout.FailureCause.CLOUD_BUILD_UNAVAILABLE, + controller_rollout="controller_rollout_value", + rollback_of_rollout="rollback_of_rollout_value", + rolled_back_by_rollouts=["rolled_back_by_rollouts_value"], ) ) - response = await client.get_job_run(request) + response = await client.get_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.GetJobRunRequest() + request = cloud_deploy.GetRolloutRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.JobRun) + assert isinstance(response, cloud_deploy.Rollout) assert response.name == "name_value" assert response.uid == "uid_value" - assert response.phase_id == "phase_id_value" - assert response.job_id == "job_id_value" - assert response.state == cloud_deploy.JobRun.State.IN_PROGRESS + assert response.description == "description_value" + assert response.target_id == "target_id_value" + assert response.approval_state == cloud_deploy.Rollout.ApprovalState.NEEDS_APPROVAL + assert response.state == cloud_deploy.Rollout.State.SUCCEEDED + assert response.failure_reason == "failure_reason_value" + assert response.deploying_build == "deploying_build_value" assert response.etag == "etag_value" + assert ( + response.deploy_failure_cause + == cloud_deploy.Rollout.FailureCause.CLOUD_BUILD_UNAVAILABLE + ) + assert response.controller_rollout == "controller_rollout_value" + assert response.rollback_of_rollout == "rollback_of_rollout_value" + assert response.rolled_back_by_rollouts == ["rolled_back_by_rollouts_value"] @pytest.mark.asyncio -async def test_get_job_run_async_from_dict(): - await test_get_job_run_async(request_type=dict) +async def test_get_rollout_async_from_dict(): + await test_get_rollout_async(request_type=dict) -def test_get_job_run_field_headers(): +def test_get_rollout_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.GetJobRunRequest() + request = cloud_deploy.GetRolloutRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: - call.return_value = cloud_deploy.JobRun() - client.get_job_run(request) + with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: + call.return_value = cloud_deploy.Rollout() + client.get_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -13827,21 +13929,23 @@ def test_get_job_run_field_headers(): @pytest.mark.asyncio -async def test_get_job_run_field_headers_async(): +async def test_get_rollout_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.GetJobRunRequest() + request = cloud_deploy.GetRolloutRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_deploy.JobRun()) - await client.get_job_run(request) + with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.Rollout() + ) + await client.get_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -13856,18 +13960,18 @@ async def test_get_job_run_field_headers_async(): ) in kw["metadata"] -def test_get_job_run_flattened(): +def test_get_rollout_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: + with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.JobRun() + call.return_value = cloud_deploy.Rollout() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_job_run( + client.get_rollout( name="name_value", ) @@ -13880,7 +13984,7 @@ def test_get_job_run_flattened(): assert arg == mock_val -def test_get_job_run_flattened_error(): +def test_get_rollout_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -13888,27 +13992,29 @@ def test_get_job_run_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_job_run( - cloud_deploy.GetJobRunRequest(), + client.get_rollout( + cloud_deploy.GetRolloutRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_get_job_run_flattened_async(): +async def test_get_rollout_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: + with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.JobRun() + call.return_value = cloud_deploy.Rollout() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_deploy.JobRun()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.Rollout() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_job_run( + response = await client.get_rollout( name="name_value", ) @@ -13922,7 +14028,7 @@ async def test_get_job_run_flattened_async(): @pytest.mark.asyncio -async def test_get_job_run_flattened_error_async(): +async def test_get_rollout_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -13930,8 +14036,8 @@ async def test_get_job_run_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_job_run( - cloud_deploy.GetJobRunRequest(), + await client.get_rollout( + cloud_deploy.GetRolloutRequest(), name="name_value", ) @@ -13939,11 +14045,11 @@ async def test_get_job_run_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.TerminateJobRunRequest, + cloud_deploy.CreateRolloutRequest, dict, ], ) -def test_terminate_job_run(request_type, transport: str = "grpc"): +def test_create_rollout(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -13954,24 +14060,22 @@ def test_terminate_job_run(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.terminate_job_run), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.TerminateJobRunResponse() - response = client.terminate_job_run(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.TerminateJobRunRequest() + request = cloud_deploy.CreateRolloutRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.TerminateJobRunResponse) + assert isinstance(response, future.Future) -def test_terminate_job_run_empty_call(): +def test_create_rollout_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -13980,19 +14084,17 @@ def test_terminate_job_run_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.terminate_job_run), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.terminate_job_run() + client.create_rollout() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.TerminateJobRunRequest() + assert args[0] == cloud_deploy.CreateRolloutRequest() -def test_terminate_job_run_non_empty_request_with_auto_populated_field(): +def test_create_rollout_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -14003,26 +14105,30 @@ def test_terminate_job_run_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.TerminateJobRunRequest( - name="name_value", + request = cloud_deploy.CreateRolloutRequest( + parent="parent_value", + rollout_id="rollout_id_value", + request_id="request_id_value", + starting_phase_id="starting_phase_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.terminate_job_run), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.terminate_job_run(request=request) + client.create_rollout(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.TerminateJobRunRequest( - name="name_value", + assert args[0] == cloud_deploy.CreateRolloutRequest( + parent="parent_value", + rollout_id="rollout_id_value", + request_id="request_id_value", + starting_phase_id="starting_phase_id_value", ) -def test_terminate_job_run_use_cached_wrapped_rpc(): +def test_create_rollout_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -14036,23 +14142,26 @@ def test_terminate_job_run_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.terminate_job_run in client._transport._wrapped_methods + assert client._transport.create_rollout in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.terminate_job_run - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_rollout] = mock_rpc request = {} - client.terminate_job_run(request) + client.create_rollout(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.terminate_job_run(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_rollout(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -14060,7 +14169,7 @@ def test_terminate_job_run_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_terminate_job_run_empty_call_async(): +async def test_create_rollout_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -14069,21 +14178,19 @@ async def test_terminate_job_run_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.terminate_job_run), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.TerminateJobRunResponse() + operations_pb2.Operation(name="operations/spam") ) - response = await client.terminate_job_run() + response = await client.create_rollout() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.TerminateJobRunRequest() + assert args[0] == cloud_deploy.CreateRolloutRequest() @pytest.mark.asyncio -async def test_terminate_job_run_async_use_cached_wrapped_rpc( +async def test_create_rollout_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -14100,7 +14207,7 @@ async def test_terminate_job_run_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.terminate_job_run + client._client._transport.create_rollout in client._client._transport._wrapped_methods ) @@ -14108,16 +14215,21 @@ async def test_terminate_job_run_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.terminate_job_run + client._client._transport.create_rollout ] = mock_rpc request = {} - await client.terminate_job_run(request) + await client.create_rollout(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.terminate_job_run(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_rollout(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -14125,8 +14237,8 @@ async def test_terminate_job_run_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_terminate_job_run_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.TerminateJobRunRequest +async def test_create_rollout_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.CreateRolloutRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -14138,47 +14250,43 @@ async def test_terminate_job_run_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.terminate_job_run), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.TerminateJobRunResponse() + operations_pb2.Operation(name="operations/spam") ) - response = await client.terminate_job_run(request) + response = await client.create_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.TerminateJobRunRequest() + request = cloud_deploy.CreateRolloutRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.TerminateJobRunResponse) + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_terminate_job_run_async_from_dict(): - await test_terminate_job_run_async(request_type=dict) +async def test_create_rollout_async_from_dict(): + await test_create_rollout_async(request_type=dict) -def test_terminate_job_run_field_headers(): +def test_create_rollout_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.TerminateJobRunRequest() + request = cloud_deploy.CreateRolloutRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.terminate_job_run), "__call__" - ) as call: - call.return_value = cloud_deploy.TerminateJobRunResponse() - client.terminate_job_run(request) + with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -14189,30 +14297,28 @@ def test_terminate_job_run_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_terminate_job_run_field_headers_async(): +async def test_create_rollout_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.TerminateJobRunRequest() + request = cloud_deploy.CreateRolloutRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.terminate_job_run), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.TerminateJobRunResponse() + operations_pb2.Operation(name="operations/op") ) - await client.terminate_job_run(request) + await client.create_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -14223,37 +14329,43 @@ async def test_terminate_job_run_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_terminate_job_run_flattened(): +def test_create_rollout_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.terminate_job_run), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.TerminateJobRunResponse() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.terminate_job_run( - name="name_value", + client.create_rollout( + parent="parent_value", + rollout=cloud_deploy.Rollout(name="name_value"), + rollout_id="rollout_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].rollout + mock_val = cloud_deploy.Rollout(name="name_value") + assert arg == mock_val + arg = args[0].rollout_id + mock_val = "rollout_id_value" assert arg == mock_val -def test_terminate_job_run_flattened_error(): +def test_create_rollout_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -14261,45 +14373,53 @@ def test_terminate_job_run_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.terminate_job_run( - cloud_deploy.TerminateJobRunRequest(), - name="name_value", + client.create_rollout( + cloud_deploy.CreateRolloutRequest(), + parent="parent_value", + rollout=cloud_deploy.Rollout(name="name_value"), + rollout_id="rollout_id_value", ) @pytest.mark.asyncio -async def test_terminate_job_run_flattened_async(): +async def test_create_rollout_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.terminate_job_run), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.TerminateJobRunResponse() + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.TerminateJobRunResponse() + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.terminate_job_run( - name="name_value", + response = await client.create_rollout( + parent="parent_value", + rollout=cloud_deploy.Rollout(name="name_value"), + rollout_id="rollout_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].rollout + mock_val = cloud_deploy.Rollout(name="name_value") + assert arg == mock_val + arg = args[0].rollout_id + mock_val = "rollout_id_value" assert arg == mock_val @pytest.mark.asyncio -async def test_terminate_job_run_flattened_error_async(): +async def test_create_rollout_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -14307,20 +14427,22 @@ async def test_terminate_job_run_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.terminate_job_run( - cloud_deploy.TerminateJobRunRequest(), - name="name_value", + await client.create_rollout( + cloud_deploy.CreateRolloutRequest(), + parent="parent_value", + rollout=cloud_deploy.Rollout(name="name_value"), + rollout_id="rollout_id_value", ) @pytest.mark.parametrize( "request_type", [ - cloud_deploy.GetConfigRequest, + cloud_deploy.IgnoreJobRequest, dict, ], ) -def test_get_config(request_type, transport: str = "grpc"): +def test_ignore_job(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -14331,27 +14453,22 @@ def test_get_config(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_config), "__call__") as call: + with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.Config( - name="name_value", - default_skaffold_version="default_skaffold_version_value", - ) - response = client.get_config(request) + call.return_value = cloud_deploy.IgnoreJobResponse() + response = client.ignore_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.GetConfigRequest() + request = cloud_deploy.IgnoreJobRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.Config) - assert response.name == "name_value" - assert response.default_skaffold_version == "default_skaffold_version_value" + assert isinstance(response, cloud_deploy.IgnoreJobResponse) -def test_get_config_empty_call(): +def test_ignore_job_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -14360,17 +14477,17 @@ def test_get_config_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_config), "__call__") as call: + with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_config() + client.ignore_job() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.GetConfigRequest() + assert args[0] == cloud_deploy.IgnoreJobRequest() -def test_get_config_non_empty_request_with_auto_populated_field(): +def test_ignore_job_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -14381,24 +14498,28 @@ def test_get_config_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.GetConfigRequest( - name="name_value", + request = cloud_deploy.IgnoreJobRequest( + rollout="rollout_value", + phase_id="phase_id_value", + job_id="job_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_config), "__call__") as call: + with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_config(request=request) + client.ignore_job(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.GetConfigRequest( - name="name_value", + assert args[0] == cloud_deploy.IgnoreJobRequest( + rollout="rollout_value", + phase_id="phase_id_value", + job_id="job_id_value", ) -def test_get_config_use_cached_wrapped_rpc(): +def test_ignore_job_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -14412,21 +14533,21 @@ def test_get_config_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_config in client._transport._wrapped_methods + assert client._transport.ignore_job in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_config] = mock_rpc + client._transport._wrapped_methods[client._transport.ignore_job] = mock_rpc request = {} - client.get_config(request) + client.ignore_job(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_config(request) + client.ignore_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -14434,7 +14555,7 @@ def test_get_config_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_config_empty_call_async(): +async def test_ignore_job_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -14443,22 +14564,19 @@ async def test_get_config_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_config), "__call__") as call: + with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.Config( - name="name_value", - default_skaffold_version="default_skaffold_version_value", - ) + cloud_deploy.IgnoreJobResponse() ) - response = await client.get_config() + response = await client.ignore_job() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.GetConfigRequest() + assert args[0] == cloud_deploy.IgnoreJobRequest() @pytest.mark.asyncio -async def test_get_config_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_ignore_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -14473,7 +14591,7 @@ async def test_get_config_async_use_cached_wrapped_rpc(transport: str = "grpc_as # Ensure method has been cached assert ( - client._client._transport.get_config + client._client._transport.ignore_job in client._client._transport._wrapped_methods ) @@ -14481,16 +14599,16 @@ async def test_get_config_async_use_cached_wrapped_rpc(transport: str = "grpc_as mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_config + client._client._transport.ignore_job ] = mock_rpc request = {} - await client.get_config(request) + await client.ignore_job(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.get_config(request) + await client.ignore_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -14498,8 +14616,8 @@ async def test_get_config_async_use_cached_wrapped_rpc(transport: str = "grpc_as @pytest.mark.asyncio -async def test_get_config_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.GetConfigRequest +async def test_ignore_job_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.IgnoreJobRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -14511,48 +14629,43 @@ async def test_get_config_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_config), "__call__") as call: + with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.Config( - name="name_value", - default_skaffold_version="default_skaffold_version_value", - ) + cloud_deploy.IgnoreJobResponse() ) - response = await client.get_config(request) + response = await client.ignore_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.GetConfigRequest() + request = cloud_deploy.IgnoreJobRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.Config) - assert response.name == "name_value" - assert response.default_skaffold_version == "default_skaffold_version_value" + assert isinstance(response, cloud_deploy.IgnoreJobResponse) @pytest.mark.asyncio -async def test_get_config_async_from_dict(): - await test_get_config_async(request_type=dict) +async def test_ignore_job_async_from_dict(): + await test_ignore_job_async(request_type=dict) -def test_get_config_field_headers(): +def test_ignore_job_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.GetConfigRequest() + request = cloud_deploy.IgnoreJobRequest() - request.name = "name_value" + request.rollout = "rollout_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_config), "__call__") as call: - call.return_value = cloud_deploy.Config() - client.get_config(request) + with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: + call.return_value = cloud_deploy.IgnoreJobResponse() + client.ignore_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -14563,26 +14676,28 @@ def test_get_config_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "rollout=rollout_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_config_field_headers_async(): +async def test_ignore_job_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.GetConfigRequest() + request = cloud_deploy.IgnoreJobRequest() - request.name = "name_value" + request.rollout = "rollout_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_config), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_deploy.Config()) - await client.get_config(request) + with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.IgnoreJobResponse() + ) + await client.ignore_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -14593,35 +14708,43 @@ async def test_get_config_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "rollout=rollout_value", ) in kw["metadata"] -def test_get_config_flattened(): +def test_ignore_job_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_config), "__call__") as call: + with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.Config() + call.return_value = cloud_deploy.IgnoreJobResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_config( - name="name_value", + client.ignore_job( + rollout="rollout_value", + phase_id="phase_id_value", + job_id="job_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].rollout + mock_val = "rollout_value" + assert arg == mock_val + arg = args[0].phase_id + mock_val = "phase_id_value" + assert arg == mock_val + arg = args[0].job_id + mock_val = "job_id_value" assert arg == mock_val -def test_get_config_flattened_error(): +def test_ignore_job_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -14629,41 +14752,53 @@ def test_get_config_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_config( - cloud_deploy.GetConfigRequest(), - name="name_value", + client.ignore_job( + cloud_deploy.IgnoreJobRequest(), + rollout="rollout_value", + phase_id="phase_id_value", + job_id="job_id_value", ) @pytest.mark.asyncio -async def test_get_config_flattened_async(): +async def test_ignore_job_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_config), "__call__") as call: + with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.Config() + call.return_value = cloud_deploy.IgnoreJobResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_deploy.Config()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.IgnoreJobResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_config( - name="name_value", + response = await client.ignore_job( + rollout="rollout_value", + phase_id="phase_id_value", + job_id="job_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].rollout + mock_val = "rollout_value" + assert arg == mock_val + arg = args[0].phase_id + mock_val = "phase_id_value" + assert arg == mock_val + arg = args[0].job_id + mock_val = "job_id_value" assert arg == mock_val @pytest.mark.asyncio -async def test_get_config_flattened_error_async(): +async def test_ignore_job_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -14671,20 +14806,22 @@ async def test_get_config_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_config( - cloud_deploy.GetConfigRequest(), - name="name_value", + await client.ignore_job( + cloud_deploy.IgnoreJobRequest(), + rollout="rollout_value", + phase_id="phase_id_value", + job_id="job_id_value", ) @pytest.mark.parametrize( "request_type", [ - cloud_deploy.CreateAutomationRequest, + cloud_deploy.RetryJobRequest, dict, ], ) -def test_create_automation(request_type, transport: str = "grpc"): +def test_retry_job(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -14695,24 +14832,22 @@ def test_create_automation(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.retry_job), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.create_automation(request) + call.return_value = cloud_deploy.RetryJobResponse() + response = client.retry_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.CreateAutomationRequest() + request = cloud_deploy.RetryJobRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, cloud_deploy.RetryJobResponse) -def test_create_automation_empty_call(): +def test_retry_job_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -14721,19 +14856,17 @@ def test_create_automation_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.retry_job), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_automation() + client.retry_job() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.CreateAutomationRequest() + assert args[0] == cloud_deploy.RetryJobRequest() -def test_create_automation_non_empty_request_with_auto_populated_field(): +def test_retry_job_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -14744,30 +14877,28 @@ def test_create_automation_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.CreateAutomationRequest( - parent="parent_value", - automation_id="automation_id_value", - request_id="request_id_value", + request = cloud_deploy.RetryJobRequest( + rollout="rollout_value", + phase_id="phase_id_value", + job_id="job_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.retry_job), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_automation(request=request) + client.retry_job(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.CreateAutomationRequest( - parent="parent_value", - automation_id="automation_id_value", - request_id="request_id_value", + assert args[0] == cloud_deploy.RetryJobRequest( + rollout="rollout_value", + phase_id="phase_id_value", + job_id="job_id_value", ) -def test_create_automation_use_cached_wrapped_rpc(): +def test_retry_job_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -14781,28 +14912,21 @@ def test_create_automation_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_automation in client._transport._wrapped_methods + assert client._transport.retry_job in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_automation - ] = mock_rpc + client._transport._wrapped_methods[client._transport.retry_job] = mock_rpc request = {} - client.create_automation(request) + client.retry_job(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_automation(request) + client.retry_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -14810,7 +14934,7 @@ def test_create_automation_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_create_automation_empty_call_async(): +async def test_retry_job_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -14819,23 +14943,19 @@ async def test_create_automation_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.retry_job), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + cloud_deploy.RetryJobResponse() ) - response = await client.create_automation() + response = await client.retry_job() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.CreateAutomationRequest() + assert args[0] == cloud_deploy.RetryJobRequest() @pytest.mark.asyncio -async def test_create_automation_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): +async def test_retry_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -14850,7 +14970,7 @@ async def test_create_automation_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.create_automation + client._client._transport.retry_job in client._client._transport._wrapped_methods ) @@ -14858,21 +14978,16 @@ async def test_create_automation_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.create_automation + client._client._transport.retry_job ] = mock_rpc request = {} - await client.create_automation(request) + await client.retry_job(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_automation(request) + await client.retry_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -14880,8 +14995,8 @@ async def test_create_automation_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_create_automation_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.CreateAutomationRequest +async def test_retry_job_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.RetryJobRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -14893,47 +15008,43 @@ async def test_create_automation_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.retry_job), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + cloud_deploy.RetryJobResponse() ) - response = await client.create_automation(request) + response = await client.retry_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.CreateAutomationRequest() + request = cloud_deploy.RetryJobRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, cloud_deploy.RetryJobResponse) @pytest.mark.asyncio -async def test_create_automation_async_from_dict(): - await test_create_automation_async(request_type=dict) +async def test_retry_job_async_from_dict(): + await test_retry_job_async(request_type=dict) -def test_create_automation_field_headers(): +def test_retry_job_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.CreateAutomationRequest() + request = cloud_deploy.RetryJobRequest() - request.parent = "parent_value" + request.rollout = "rollout_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_automation), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.create_automation(request) + with mock.patch.object(type(client.transport.retry_job), "__call__") as call: + call.return_value = cloud_deploy.RetryJobResponse() + client.retry_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -14944,30 +15055,28 @@ def test_create_automation_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "rollout=rollout_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_create_automation_field_headers_async(): +async def test_retry_job_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.CreateAutomationRequest() + request = cloud_deploy.RetryJobRequest() - request.parent = "parent_value" + request.rollout = "rollout_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.retry_job), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") + cloud_deploy.RetryJobResponse() ) - await client.create_automation(request) + await client.retry_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -14978,45 +15087,43 @@ async def test_create_automation_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "rollout=rollout_value", ) in kw["metadata"] -def test_create_automation_flattened(): +def test_retry_job_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.retry_job), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = cloud_deploy.RetryJobResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.create_automation( - parent="parent_value", - automation=cloud_deploy.Automation(name="name_value"), - automation_id="automation_id_value", + client.retry_job( + rollout="rollout_value", + phase_id="phase_id_value", + job_id="job_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].rollout + mock_val = "rollout_value" assert arg == mock_val - arg = args[0].automation - mock_val = cloud_deploy.Automation(name="name_value") + arg = args[0].phase_id + mock_val = "phase_id_value" assert arg == mock_val - arg = args[0].automation_id - mock_val = "automation_id_value" + arg = args[0].job_id + mock_val = "job_id_value" assert arg == mock_val -def test_create_automation_flattened_error(): +def test_retry_job_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -15024,55 +15131,53 @@ def test_create_automation_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_automation( - cloud_deploy.CreateAutomationRequest(), - parent="parent_value", - automation=cloud_deploy.Automation(name="name_value"), - automation_id="automation_id_value", + client.retry_job( + cloud_deploy.RetryJobRequest(), + rollout="rollout_value", + phase_id="phase_id_value", + job_id="job_id_value", ) @pytest.mark.asyncio -async def test_create_automation_flattened_async(): +async def test_retry_job_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.retry_job), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = cloud_deploy.RetryJobResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + cloud_deploy.RetryJobResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.create_automation( - parent="parent_value", - automation=cloud_deploy.Automation(name="name_value"), - automation_id="automation_id_value", + response = await client.retry_job( + rollout="rollout_value", + phase_id="phase_id_value", + job_id="job_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].rollout + mock_val = "rollout_value" assert arg == mock_val - arg = args[0].automation - mock_val = cloud_deploy.Automation(name="name_value") + arg = args[0].phase_id + mock_val = "phase_id_value" assert arg == mock_val - arg = args[0].automation_id - mock_val = "automation_id_value" + arg = args[0].job_id + mock_val = "job_id_value" assert arg == mock_val @pytest.mark.asyncio -async def test_create_automation_flattened_error_async(): +async def test_retry_job_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -15080,22 +15185,22 @@ async def test_create_automation_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_automation( - cloud_deploy.CreateAutomationRequest(), - parent="parent_value", - automation=cloud_deploy.Automation(name="name_value"), - automation_id="automation_id_value", + await client.retry_job( + cloud_deploy.RetryJobRequest(), + rollout="rollout_value", + phase_id="phase_id_value", + job_id="job_id_value", ) @pytest.mark.parametrize( "request_type", [ - cloud_deploy.UpdateAutomationRequest, + cloud_deploy.ListJobRunsRequest, dict, ], ) -def test_update_automation(request_type, transport: str = "grpc"): +def test_list_job_runs(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -15106,24 +15211,27 @@ def test_update_automation(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.update_automation(request) + call.return_value = cloud_deploy.ListJobRunsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_job_runs(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.UpdateAutomationRequest() + request = cloud_deploy.ListJobRunsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, pagers.ListJobRunsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_update_automation_empty_call(): +def test_list_job_runs_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -15132,19 +15240,17 @@ def test_update_automation_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_automation() + client.list_job_runs() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.UpdateAutomationRequest() + assert args[0] == cloud_deploy.ListJobRunsRequest() -def test_update_automation_non_empty_request_with_auto_populated_field(): +def test_list_job_runs_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -15155,26 +15261,30 @@ def test_update_automation_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.UpdateAutomationRequest( - request_id="request_id_value", + request = cloud_deploy.ListJobRunsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_automation(request=request) + client.list_job_runs(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.UpdateAutomationRequest( - request_id="request_id_value", + assert args[0] == cloud_deploy.ListJobRunsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", ) -def test_update_automation_use_cached_wrapped_rpc(): +def test_list_job_runs_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -15188,28 +15298,21 @@ def test_update_automation_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_automation in client._transport._wrapped_methods + assert client._transport.list_job_runs in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_automation - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_job_runs] = mock_rpc request = {} - client.update_automation(request) + client.list_job_runs(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_automation(request) + client.list_job_runs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -15217,7 +15320,7 @@ def test_update_automation_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_update_automation_empty_call_async(): +async def test_list_job_runs_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -15226,21 +15329,22 @@ async def test_update_automation_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + cloud_deploy.ListJobRunsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) ) - response = await client.update_automation() + response = await client.list_job_runs() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.UpdateAutomationRequest() + assert args[0] == cloud_deploy.ListJobRunsRequest() @pytest.mark.asyncio -async def test_update_automation_async_use_cached_wrapped_rpc( +async def test_list_job_runs_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -15257,7 +15361,7 @@ async def test_update_automation_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.update_automation + client._client._transport.list_job_runs in client._client._transport._wrapped_methods ) @@ -15265,21 +15369,16 @@ async def test_update_automation_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.update_automation + client._client._transport.list_job_runs ] = mock_rpc request = {} - await client.update_automation(request) + await client.list_job_runs(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.update_automation(request) + await client.list_job_runs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -15287,8 +15386,8 @@ async def test_update_automation_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_update_automation_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.UpdateAutomationRequest +async def test_list_job_runs_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.ListJobRunsRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15300,47 +15399,48 @@ async def test_update_automation_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + cloud_deploy.ListJobRunsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) ) - response = await client.update_automation(request) + response = await client.list_job_runs(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.UpdateAutomationRequest() + request = cloud_deploy.ListJobRunsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, pagers.ListJobRunsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio -async def test_update_automation_async_from_dict(): - await test_update_automation_async(request_type=dict) +async def test_list_job_runs_async_from_dict(): + await test_list_job_runs_async(request_type=dict) -def test_update_automation_field_headers(): +def test_list_job_runs_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.UpdateAutomationRequest() + request = cloud_deploy.ListJobRunsRequest() - request.automation.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_automation), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.update_automation(request) + with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: + call.return_value = cloud_deploy.ListJobRunsResponse() + client.list_job_runs(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -15351,30 +15451,28 @@ def test_update_automation_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "automation.name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_update_automation_field_headers_async(): +async def test_list_job_runs_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.UpdateAutomationRequest() + request = cloud_deploy.ListJobRunsRequest() - request.automation.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") + cloud_deploy.ListJobRunsResponse() ) - await client.update_automation(request) + await client.list_job_runs(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -15385,41 +15483,35 @@ async def test_update_automation_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "automation.name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_update_automation_flattened(): +def test_list_job_runs_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = cloud_deploy.ListJobRunsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.update_automation( - automation=cloud_deploy.Automation(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.list_job_runs( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].automation - mock_val = cloud_deploy.Automation(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val -def test_update_automation_flattened_error(): +def test_list_job_runs_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -15427,50 +15519,43 @@ def test_update_automation_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_automation( - cloud_deploy.UpdateAutomationRequest(), - automation=cloud_deploy.Automation(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.list_job_runs( + cloud_deploy.ListJobRunsRequest(), + parent="parent_value", ) @pytest.mark.asyncio -async def test_update_automation_flattened_async(): +async def test_list_job_runs_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = cloud_deploy.ListJobRunsResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + cloud_deploy.ListJobRunsResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.update_automation( - automation=cloud_deploy.Automation(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + response = await client.list_job_runs( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].automation - mock_val = cloud_deploy.Automation(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio -async def test_update_automation_flattened_error_async(): +async def test_list_job_runs_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -15478,21 +15563,214 @@ async def test_update_automation_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_automation( - cloud_deploy.UpdateAutomationRequest(), - automation=cloud_deploy.Automation(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + await client.list_job_runs( + cloud_deploy.ListJobRunsRequest(), + parent="parent_value", + ) + + +def test_list_job_runs_pager(transport_name: str = "grpc"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + cloud_deploy.ListJobRunsResponse( + job_runs=[ + cloud_deploy.JobRun(), + cloud_deploy.JobRun(), + cloud_deploy.JobRun(), + ], + next_page_token="abc", + ), + cloud_deploy.ListJobRunsResponse( + job_runs=[], + next_page_token="def", + ), + cloud_deploy.ListJobRunsResponse( + job_runs=[ + cloud_deploy.JobRun(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListJobRunsResponse( + job_runs=[ + cloud_deploy.JobRun(), + cloud_deploy.JobRun(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_job_runs(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, cloud_deploy.JobRun) for i in results) + + +def test_list_job_runs_pages(transport_name: str = "grpc"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + cloud_deploy.ListJobRunsResponse( + job_runs=[ + cloud_deploy.JobRun(), + cloud_deploy.JobRun(), + cloud_deploy.JobRun(), + ], + next_page_token="abc", + ), + cloud_deploy.ListJobRunsResponse( + job_runs=[], + next_page_token="def", + ), + cloud_deploy.ListJobRunsResponse( + job_runs=[ + cloud_deploy.JobRun(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListJobRunsResponse( + job_runs=[ + cloud_deploy.JobRun(), + cloud_deploy.JobRun(), + ], + ), + RuntimeError, + ) + pages = list(client.list_job_runs(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_job_runs_async_pager(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_runs), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloud_deploy.ListJobRunsResponse( + job_runs=[ + cloud_deploy.JobRun(), + cloud_deploy.JobRun(), + cloud_deploy.JobRun(), + ], + next_page_token="abc", + ), + cloud_deploy.ListJobRunsResponse( + job_runs=[], + next_page_token="def", + ), + cloud_deploy.ListJobRunsResponse( + job_runs=[ + cloud_deploy.JobRun(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListJobRunsResponse( + job_runs=[ + cloud_deploy.JobRun(), + cloud_deploy.JobRun(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_job_runs( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, cloud_deploy.JobRun) for i in responses) + + +@pytest.mark.asyncio +async def test_list_job_runs_async_pages(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_runs), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloud_deploy.ListJobRunsResponse( + job_runs=[ + cloud_deploy.JobRun(), + cloud_deploy.JobRun(), + cloud_deploy.JobRun(), + ], + next_page_token="abc", + ), + cloud_deploy.ListJobRunsResponse( + job_runs=[], + next_page_token="def", + ), + cloud_deploy.ListJobRunsResponse( + job_runs=[ + cloud_deploy.JobRun(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListJobRunsResponse( + job_runs=[ + cloud_deploy.JobRun(), + cloud_deploy.JobRun(), + ], + ), + RuntimeError, ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_job_runs(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.parametrize( "request_type", [ - cloud_deploy.DeleteAutomationRequest, + cloud_deploy.GetJobRunRequest, dict, ], ) -def test_delete_automation(request_type, transport: str = "grpc"): +def test_get_job_run(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -15503,24 +15781,35 @@ def test_delete_automation(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.delete_automation(request) + call.return_value = cloud_deploy.JobRun( + name="name_value", + uid="uid_value", + phase_id="phase_id_value", + job_id="job_id_value", + state=cloud_deploy.JobRun.State.IN_PROGRESS, + etag="etag_value", + ) + response = client.get_job_run(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.DeleteAutomationRequest() + request = cloud_deploy.GetJobRunRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, cloud_deploy.JobRun) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.phase_id == "phase_id_value" + assert response.job_id == "job_id_value" + assert response.state == cloud_deploy.JobRun.State.IN_PROGRESS + assert response.etag == "etag_value" -def test_delete_automation_empty_call(): +def test_get_job_run_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -15529,19 +15818,17 @@ def test_delete_automation_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_automation() + client.get_job_run() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.DeleteAutomationRequest() + assert args[0] == cloud_deploy.GetJobRunRequest() -def test_delete_automation_non_empty_request_with_auto_populated_field(): +def test_get_job_run_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -15552,30 +15839,24 @@ def test_delete_automation_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.DeleteAutomationRequest( + request = cloud_deploy.GetJobRunRequest( name="name_value", - request_id="request_id_value", - etag="etag_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_automation(request=request) + client.get_job_run(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.DeleteAutomationRequest( + assert args[0] == cloud_deploy.GetJobRunRequest( name="name_value", - request_id="request_id_value", - etag="etag_value", ) -def test_delete_automation_use_cached_wrapped_rpc(): +def test_get_job_run_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -15589,28 +15870,21 @@ def test_delete_automation_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_automation in client._transport._wrapped_methods + assert client._transport.get_job_run in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_automation - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_job_run] = mock_rpc request = {} - client.delete_automation(request) + client.get_job_run(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_automation(request) + client.get_job_run(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -15618,7 +15892,7 @@ def test_delete_automation_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_delete_automation_empty_call_async(): +async def test_get_job_run_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -15627,21 +15901,26 @@ async def test_delete_automation_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + cloud_deploy.JobRun( + name="name_value", + uid="uid_value", + phase_id="phase_id_value", + job_id="job_id_value", + state=cloud_deploy.JobRun.State.IN_PROGRESS, + etag="etag_value", + ) ) - response = await client.delete_automation() + response = await client.get_job_run() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.DeleteAutomationRequest() + assert args[0] == cloud_deploy.GetJobRunRequest() @pytest.mark.asyncio -async def test_delete_automation_async_use_cached_wrapped_rpc( +async def test_get_job_run_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -15658,7 +15937,7 @@ async def test_delete_automation_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.delete_automation + client._client._transport.get_job_run in client._client._transport._wrapped_methods ) @@ -15666,21 +15945,16 @@ async def test_delete_automation_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.delete_automation + client._client._transport.get_job_run ] = mock_rpc request = {} - await client.delete_automation(request) + await client.get_job_run(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.delete_automation(request) + await client.get_job_run(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -15688,8 +15962,8 @@ async def test_delete_automation_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_delete_automation_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.DeleteAutomationRequest +async def test_get_job_run_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.GetJobRunRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15701,47 +15975,56 @@ async def test_delete_automation_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + cloud_deploy.JobRun( + name="name_value", + uid="uid_value", + phase_id="phase_id_value", + job_id="job_id_value", + state=cloud_deploy.JobRun.State.IN_PROGRESS, + etag="etag_value", + ) ) - response = await client.delete_automation(request) + response = await client.get_job_run(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.DeleteAutomationRequest() + request = cloud_deploy.GetJobRunRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, cloud_deploy.JobRun) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.phase_id == "phase_id_value" + assert response.job_id == "job_id_value" + assert response.state == cloud_deploy.JobRun.State.IN_PROGRESS + assert response.etag == "etag_value" @pytest.mark.asyncio -async def test_delete_automation_async_from_dict(): - await test_delete_automation_async(request_type=dict) +async def test_get_job_run_async_from_dict(): + await test_get_job_run_async(request_type=dict) -def test_delete_automation_field_headers(): +def test_get_job_run_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.DeleteAutomationRequest() + request = cloud_deploy.GetJobRunRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_automation), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.delete_automation(request) + with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: + call.return_value = cloud_deploy.JobRun() + client.get_job_run(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -15757,25 +16040,21 @@ def test_delete_automation_field_headers(): @pytest.mark.asyncio -async def test_delete_automation_field_headers_async(): +async def test_get_job_run_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.DeleteAutomationRequest() + request = cloud_deploy.GetJobRunRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_automation), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") - ) - await client.delete_automation(request) + with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_deploy.JobRun()) + await client.get_job_run(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -15790,20 +16069,18 @@ async def test_delete_automation_field_headers_async(): ) in kw["metadata"] -def test_delete_automation_flattened(): +def test_get_job_run_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = cloud_deploy.JobRun() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_automation( + client.get_job_run( name="name_value", ) @@ -15816,7 +16093,7 @@ def test_delete_automation_flattened(): assert arg == mock_val -def test_delete_automation_flattened_error(): +def test_get_job_run_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -15824,31 +16101,27 @@ def test_delete_automation_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_automation( - cloud_deploy.DeleteAutomationRequest(), + client.get_job_run( + cloud_deploy.GetJobRunRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_delete_automation_flattened_async(): +async def test_get_job_run_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = cloud_deploy.JobRun() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_deploy.JobRun()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_automation( + response = await client.get_job_run( name="name_value", ) @@ -15862,7 +16135,7 @@ async def test_delete_automation_flattened_async(): @pytest.mark.asyncio -async def test_delete_automation_flattened_error_async(): +async def test_get_job_run_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -15870,8 +16143,8 @@ async def test_delete_automation_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_automation( - cloud_deploy.DeleteAutomationRequest(), + await client.get_job_run( + cloud_deploy.GetJobRunRequest(), name="name_value", ) @@ -15879,11 +16152,11 @@ async def test_delete_automation_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.GetAutomationRequest, + cloud_deploy.TerminateJobRunRequest, dict, ], ) -def test_get_automation(request_type, transport: str = "grpc"): +def test_terminate_job_run(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -15894,35 +16167,24 @@ def test_get_automation(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_automation), "__call__") as call: + with mock.patch.object( + type(client.transport.terminate_job_run), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.Automation( - name="name_value", - uid="uid_value", - description="description_value", - etag="etag_value", - suspended=True, - service_account="service_account_value", - ) - response = client.get_automation(request) + call.return_value = cloud_deploy.TerminateJobRunResponse() + response = client.terminate_job_run(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.GetAutomationRequest() + request = cloud_deploy.TerminateJobRunRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.Automation) - assert response.name == "name_value" - assert response.uid == "uid_value" - assert response.description == "description_value" - assert response.etag == "etag_value" - assert response.suspended is True - assert response.service_account == "service_account_value" + assert isinstance(response, cloud_deploy.TerminateJobRunResponse) -def test_get_automation_empty_call(): +def test_terminate_job_run_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -15931,17 +16193,19 @@ def test_get_automation_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_automation), "__call__") as call: + with mock.patch.object( + type(client.transport.terminate_job_run), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_automation() + client.terminate_job_run() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.GetAutomationRequest() + assert args[0] == cloud_deploy.TerminateJobRunRequest() -def test_get_automation_non_empty_request_with_auto_populated_field(): +def test_terminate_job_run_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -15952,24 +16216,26 @@ def test_get_automation_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.GetAutomationRequest( + request = cloud_deploy.TerminateJobRunRequest( name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_automation), "__call__") as call: + with mock.patch.object( + type(client.transport.terminate_job_run), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_automation(request=request) + client.terminate_job_run(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.GetAutomationRequest( + assert args[0] == cloud_deploy.TerminateJobRunRequest( name="name_value", ) -def test_get_automation_use_cached_wrapped_rpc(): +def test_terminate_job_run_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -15983,21 +16249,23 @@ def test_get_automation_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_automation in client._transport._wrapped_methods + assert client._transport.terminate_job_run in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_automation] = mock_rpc + client._transport._wrapped_methods[ + client._transport.terminate_job_run + ] = mock_rpc request = {} - client.get_automation(request) + client.terminate_job_run(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_automation(request) + client.terminate_job_run(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -16005,7 +16273,7 @@ def test_get_automation_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_automation_empty_call_async(): +async def test_terminate_job_run_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -16014,26 +16282,21 @@ async def test_get_automation_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_automation), "__call__") as call: + with mock.patch.object( + type(client.transport.terminate_job_run), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.Automation( - name="name_value", - uid="uid_value", - description="description_value", - etag="etag_value", - suspended=True, - service_account="service_account_value", - ) + cloud_deploy.TerminateJobRunResponse() ) - response = await client.get_automation() + response = await client.terminate_job_run() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.GetAutomationRequest() + assert args[0] == cloud_deploy.TerminateJobRunRequest() @pytest.mark.asyncio -async def test_get_automation_async_use_cached_wrapped_rpc( +async def test_terminate_job_run_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -16050,7 +16313,7 @@ async def test_get_automation_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_automation + client._client._transport.terminate_job_run in client._client._transport._wrapped_methods ) @@ -16058,16 +16321,16 @@ async def test_get_automation_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_automation + client._client._transport.terminate_job_run ] = mock_rpc request = {} - await client.get_automation(request) + await client.terminate_job_run(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.get_automation(request) + await client.terminate_job_run(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -16075,8 +16338,8 @@ async def test_get_automation_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_get_automation_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.GetAutomationRequest +async def test_terminate_job_run_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.TerminateJobRunRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16088,56 +16351,47 @@ async def test_get_automation_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_automation), "__call__") as call: + with mock.patch.object( + type(client.transport.terminate_job_run), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.Automation( - name="name_value", - uid="uid_value", - description="description_value", - etag="etag_value", - suspended=True, - service_account="service_account_value", - ) + cloud_deploy.TerminateJobRunResponse() ) - response = await client.get_automation(request) + response = await client.terminate_job_run(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.GetAutomationRequest() + request = cloud_deploy.TerminateJobRunRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.Automation) - assert response.name == "name_value" - assert response.uid == "uid_value" - assert response.description == "description_value" - assert response.etag == "etag_value" - assert response.suspended is True - assert response.service_account == "service_account_value" + assert isinstance(response, cloud_deploy.TerminateJobRunResponse) @pytest.mark.asyncio -async def test_get_automation_async_from_dict(): - await test_get_automation_async(request_type=dict) +async def test_terminate_job_run_async_from_dict(): + await test_terminate_job_run_async(request_type=dict) -def test_get_automation_field_headers(): +def test_terminate_job_run_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.GetAutomationRequest() + request = cloud_deploy.TerminateJobRunRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_automation), "__call__") as call: - call.return_value = cloud_deploy.Automation() - client.get_automation(request) + with mock.patch.object( + type(client.transport.terminate_job_run), "__call__" + ) as call: + call.return_value = cloud_deploy.TerminateJobRunResponse() + client.terminate_job_run(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -16153,23 +16407,25 @@ def test_get_automation_field_headers(): @pytest.mark.asyncio -async def test_get_automation_field_headers_async(): +async def test_terminate_job_run_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.GetAutomationRequest() + request = cloud_deploy.TerminateJobRunRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_automation), "__call__") as call: + with mock.patch.object( + type(client.transport.terminate_job_run), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.Automation() + cloud_deploy.TerminateJobRunResponse() ) - await client.get_automation(request) + await client.terminate_job_run(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -16184,18 +16440,20 @@ async def test_get_automation_field_headers_async(): ) in kw["metadata"] -def test_get_automation_flattened(): +def test_terminate_job_run_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_automation), "__call__") as call: + with mock.patch.object( + type(client.transport.terminate_job_run), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.Automation() + call.return_value = cloud_deploy.TerminateJobRunResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_automation( + client.terminate_job_run( name="name_value", ) @@ -16208,7 +16466,7 @@ def test_get_automation_flattened(): assert arg == mock_val -def test_get_automation_flattened_error(): +def test_terminate_job_run_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -16216,29 +16474,31 @@ def test_get_automation_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_automation( - cloud_deploy.GetAutomationRequest(), + client.terminate_job_run( + cloud_deploy.TerminateJobRunRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_get_automation_flattened_async(): +async def test_terminate_job_run_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_automation), "__call__") as call: + with mock.patch.object( + type(client.transport.terminate_job_run), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.Automation() + call.return_value = cloud_deploy.TerminateJobRunResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.Automation() + cloud_deploy.TerminateJobRunResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_automation( + response = await client.terminate_job_run( name="name_value", ) @@ -16252,7 +16512,7 @@ async def test_get_automation_flattened_async(): @pytest.mark.asyncio -async def test_get_automation_flattened_error_async(): +async def test_terminate_job_run_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -16260,8 +16520,8 @@ async def test_get_automation_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_automation( - cloud_deploy.GetAutomationRequest(), + await client.terminate_job_run( + cloud_deploy.TerminateJobRunRequest(), name="name_value", ) @@ -16269,11 +16529,11 @@ async def test_get_automation_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.ListAutomationsRequest, + cloud_deploy.GetConfigRequest, dict, ], ) -def test_list_automations(request_type, transport: str = "grpc"): +def test_get_config(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -16284,27 +16544,27 @@ def test_list_automations(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + with mock.patch.object(type(client.transport.get_config), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.ListAutomationsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + call.return_value = cloud_deploy.Config( + name="name_value", + default_skaffold_version="default_skaffold_version_value", ) - response = client.list_automations(request) + response = client.get_config(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.ListAutomationsRequest() + request = cloud_deploy.GetConfigRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAutomationsPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, cloud_deploy.Config) + assert response.name == "name_value" + assert response.default_skaffold_version == "default_skaffold_version_value" -def test_list_automations_empty_call(): +def test_get_config_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -16313,17 +16573,17 @@ def test_list_automations_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + with mock.patch.object(type(client.transport.get_config), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_automations() + client.get_config() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.ListAutomationsRequest() + assert args[0] == cloud_deploy.GetConfigRequest() -def test_list_automations_non_empty_request_with_auto_populated_field(): +def test_get_config_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -16334,30 +16594,24 @@ def test_list_automations_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.ListAutomationsRequest( - parent="parent_value", - page_token="page_token_value", - filter="filter_value", - order_by="order_by_value", + request = cloud_deploy.GetConfigRequest( + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + with mock.patch.object(type(client.transport.get_config), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_automations(request=request) + client.get_config(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.ListAutomationsRequest( - parent="parent_value", - page_token="page_token_value", - filter="filter_value", - order_by="order_by_value", + assert args[0] == cloud_deploy.GetConfigRequest( + name="name_value", ) -def test_list_automations_use_cached_wrapped_rpc(): +def test_get_config_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -16371,23 +16625,21 @@ def test_list_automations_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_automations in client._transport._wrapped_methods + assert client._transport.get_config in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_automations - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_config] = mock_rpc request = {} - client.list_automations(request) + client.get_config(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_automations(request) + client.get_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -16395,7 +16647,7 @@ def test_list_automations_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_automations_empty_call_async(): +async def test_get_config_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -16404,24 +16656,22 @@ async def test_list_automations_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + with mock.patch.object(type(client.transport.get_config), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ListAutomationsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + cloud_deploy.Config( + name="name_value", + default_skaffold_version="default_skaffold_version_value", ) ) - response = await client.list_automations() + response = await client.get_config() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.ListAutomationsRequest() + assert args[0] == cloud_deploy.GetConfigRequest() @pytest.mark.asyncio -async def test_list_automations_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): +async def test_get_config_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -16436,7 +16686,7 @@ async def test_list_automations_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.list_automations + client._client._transport.get_config in client._client._transport._wrapped_methods ) @@ -16444,16 +16694,16 @@ async def test_list_automations_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_automations + client._client._transport.get_config ] = mock_rpc request = {} - await client.list_automations(request) + await client.get_config(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.list_automations(request) + await client.get_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -16461,8 +16711,8 @@ async def test_list_automations_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_list_automations_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.ListAutomationsRequest +async def test_get_config_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.GetConfigRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16474,48 +16724,48 @@ async def test_list_automations_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + with mock.patch.object(type(client.transport.get_config), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ListAutomationsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + cloud_deploy.Config( + name="name_value", + default_skaffold_version="default_skaffold_version_value", ) ) - response = await client.list_automations(request) + response = await client.get_config(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.ListAutomationsRequest() + request = cloud_deploy.GetConfigRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAutomationsAsyncPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, cloud_deploy.Config) + assert response.name == "name_value" + assert response.default_skaffold_version == "default_skaffold_version_value" @pytest.mark.asyncio -async def test_list_automations_async_from_dict(): - await test_list_automations_async(request_type=dict) +async def test_get_config_async_from_dict(): + await test_get_config_async(request_type=dict) -def test_list_automations_field_headers(): +def test_get_config_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.ListAutomationsRequest() + request = cloud_deploy.GetConfigRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_automations), "__call__") as call: - call.return_value = cloud_deploy.ListAutomationsResponse() - client.list_automations(request) + with mock.patch.object(type(client.transport.get_config), "__call__") as call: + call.return_value = cloud_deploy.Config() + client.get_config(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -16526,28 +16776,26 @@ def test_list_automations_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_list_automations_field_headers_async(): +async def test_get_config_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.ListAutomationsRequest() + request = cloud_deploy.GetConfigRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_automations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ListAutomationsResponse() - ) - await client.list_automations(request) + with mock.patch.object(type(client.transport.get_config), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_deploy.Config()) + await client.get_config(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -16558,35 +16806,35 @@ async def test_list_automations_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_list_automations_flattened(): +def test_get_config_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + with mock.patch.object(type(client.transport.get_config), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.ListAutomationsResponse() + call.return_value = cloud_deploy.Config() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_automations( - parent="parent_value", + client.get_config( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_list_automations_flattened_error(): +def test_get_config_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -16594,43 +16842,41 @@ def test_list_automations_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_automations( - cloud_deploy.ListAutomationsRequest(), - parent="parent_value", + client.get_config( + cloud_deploy.GetConfigRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_list_automations_flattened_async(): +async def test_get_config_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + with mock.patch.object(type(client.transport.get_config), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.ListAutomationsResponse() + call.return_value = cloud_deploy.Config() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ListAutomationsResponse() - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_deploy.Config()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_automations( - parent="parent_value", + response = await client.get_config( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_list_automations_flattened_error_async(): +async def test_get_config_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -16638,214 +16884,20 @@ async def test_list_automations_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_automations( - cloud_deploy.ListAutomationsRequest(), - parent="parent_value", - ) - - -def test_list_automations_pager(transport_name: str = "grpc"): - client = CloudDeployClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_automations), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - cloud_deploy.ListAutomationsResponse( - automations=[ - cloud_deploy.Automation(), - cloud_deploy.Automation(), - cloud_deploy.Automation(), - ], - next_page_token="abc", - ), - cloud_deploy.ListAutomationsResponse( - automations=[], - next_page_token="def", - ), - cloud_deploy.ListAutomationsResponse( - automations=[ - cloud_deploy.Automation(), - ], - next_page_token="ghi", - ), - cloud_deploy.ListAutomationsResponse( - automations=[ - cloud_deploy.Automation(), - cloud_deploy.Automation(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) - pager = client.list_automations(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, cloud_deploy.Automation) for i in results) - - -def test_list_automations_pages(transport_name: str = "grpc"): - client = CloudDeployClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_automations), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - cloud_deploy.ListAutomationsResponse( - automations=[ - cloud_deploy.Automation(), - cloud_deploy.Automation(), - cloud_deploy.Automation(), - ], - next_page_token="abc", - ), - cloud_deploy.ListAutomationsResponse( - automations=[], - next_page_token="def", - ), - cloud_deploy.ListAutomationsResponse( - automations=[ - cloud_deploy.Automation(), - ], - next_page_token="ghi", - ), - cloud_deploy.ListAutomationsResponse( - automations=[ - cloud_deploy.Automation(), - cloud_deploy.Automation(), - ], - ), - RuntimeError, - ) - pages = list(client.list_automations(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.asyncio -async def test_list_automations_async_pager(): - client = CloudDeployAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_automations), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - cloud_deploy.ListAutomationsResponse( - automations=[ - cloud_deploy.Automation(), - cloud_deploy.Automation(), - cloud_deploy.Automation(), - ], - next_page_token="abc", - ), - cloud_deploy.ListAutomationsResponse( - automations=[], - next_page_token="def", - ), - cloud_deploy.ListAutomationsResponse( - automations=[ - cloud_deploy.Automation(), - ], - next_page_token="ghi", - ), - cloud_deploy.ListAutomationsResponse( - automations=[ - cloud_deploy.Automation(), - cloud_deploy.Automation(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_automations( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, cloud_deploy.Automation) for i in responses) - - -@pytest.mark.asyncio -async def test_list_automations_async_pages(): - client = CloudDeployAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_automations), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - cloud_deploy.ListAutomationsResponse( - automations=[ - cloud_deploy.Automation(), - cloud_deploy.Automation(), - cloud_deploy.Automation(), - ], - next_page_token="abc", - ), - cloud_deploy.ListAutomationsResponse( - automations=[], - next_page_token="def", - ), - cloud_deploy.ListAutomationsResponse( - automations=[ - cloud_deploy.Automation(), - ], - next_page_token="ghi", - ), - cloud_deploy.ListAutomationsResponse( - automations=[ - cloud_deploy.Automation(), - cloud_deploy.Automation(), - ], - ), - RuntimeError, + await client.get_config( + cloud_deploy.GetConfigRequest(), + name="name_value", ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_automations(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token @pytest.mark.parametrize( "request_type", [ - cloud_deploy.GetAutomationRunRequest, + cloud_deploy.CreateAutomationRequest, dict, ], ) -def test_get_automation_run(request_type, transport: str = "grpc"): +def test_create_automation(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -16857,40 +16909,23 @@ def test_get_automation_run(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_automation_run), "__call__" + type(client.transport.create_automation), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.AutomationRun( - name="name_value", - etag="etag_value", - service_account="service_account_value", - target_id="target_id_value", - state=cloud_deploy.AutomationRun.State.SUCCEEDED, - state_description="state_description_value", - rule_id="rule_id_value", - automation_id="automation_id_value", - ) - response = client.get_automation_run(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_automation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.GetAutomationRunRequest() + request = cloud_deploy.CreateAutomationRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.AutomationRun) - assert response.name == "name_value" - assert response.etag == "etag_value" - assert response.service_account == "service_account_value" - assert response.target_id == "target_id_value" - assert response.state == cloud_deploy.AutomationRun.State.SUCCEEDED - assert response.state_description == "state_description_value" - assert response.rule_id == "rule_id_value" - assert response.automation_id == "automation_id_value" + assert isinstance(response, future.Future) -def test_get_automation_run_empty_call(): +def test_create_automation_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -16900,18 +16935,18 @@ def test_get_automation_run_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_automation_run), "__call__" + type(client.transport.create_automation), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_automation_run() + client.create_automation() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.GetAutomationRunRequest() + assert args[0] == cloud_deploy.CreateAutomationRequest() -def test_get_automation_run_non_empty_request_with_auto_populated_field(): +def test_create_automation_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -16922,26 +16957,30 @@ def test_get_automation_run_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.GetAutomationRunRequest( - name="name_value", + request = cloud_deploy.CreateAutomationRequest( + parent="parent_value", + automation_id="automation_id_value", + request_id="request_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_automation_run), "__call__" + type(client.transport.create_automation), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_automation_run(request=request) + client.create_automation(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.GetAutomationRunRequest( - name="name_value", + assert args[0] == cloud_deploy.CreateAutomationRequest( + parent="parent_value", + automation_id="automation_id_value", + request_id="request_id_value", ) -def test_get_automation_run_use_cached_wrapped_rpc(): +def test_create_automation_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -16955,9 +16994,7 @@ def test_get_automation_run_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.get_automation_run in client._transport._wrapped_methods - ) + assert client._transport.create_automation in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -16965,15 +17002,20 @@ def test_get_automation_run_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_automation_run + client._transport.create_automation ] = mock_rpc request = {} - client.get_automation_run(request) + client.create_automation(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_automation_run(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_automation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -16981,7 +17023,7 @@ def test_get_automation_run_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_automation_run_empty_call_async(): +async def test_create_automation_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -16991,29 +17033,20 @@ async def test_get_automation_run_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_automation_run), "__call__" + type(client.transport.create_automation), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.AutomationRun( - name="name_value", - etag="etag_value", - service_account="service_account_value", - target_id="target_id_value", - state=cloud_deploy.AutomationRun.State.SUCCEEDED, - state_description="state_description_value", - rule_id="rule_id_value", - automation_id="automation_id_value", - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.get_automation_run() + response = await client.create_automation() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.GetAutomationRunRequest() + assert args[0] == cloud_deploy.CreateAutomationRequest() @pytest.mark.asyncio -async def test_get_automation_run_async_use_cached_wrapped_rpc( +async def test_create_automation_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -17030,7 +17063,7 @@ async def test_get_automation_run_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_automation_run + client._client._transport.create_automation in client._client._transport._wrapped_methods ) @@ -17038,16 +17071,21 @@ async def test_get_automation_run_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_automation_run + client._client._transport.create_automation ] = mock_rpc request = {} - await client.get_automation_run(request) + await client.create_automation(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.get_automation_run(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_automation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -17055,8 +17093,8 @@ async def test_get_automation_run_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_get_automation_run_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.GetAutomationRunRequest +async def test_create_automation_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.CreateAutomationRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -17069,63 +17107,46 @@ async def test_get_automation_run_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_automation_run), "__call__" + type(client.transport.create_automation), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.AutomationRun( - name="name_value", - etag="etag_value", - service_account="service_account_value", - target_id="target_id_value", - state=cloud_deploy.AutomationRun.State.SUCCEEDED, - state_description="state_description_value", - rule_id="rule_id_value", - automation_id="automation_id_value", - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.get_automation_run(request) + response = await client.create_automation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.GetAutomationRunRequest() + request = cloud_deploy.CreateAutomationRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.AutomationRun) - assert response.name == "name_value" - assert response.etag == "etag_value" - assert response.service_account == "service_account_value" - assert response.target_id == "target_id_value" - assert response.state == cloud_deploy.AutomationRun.State.SUCCEEDED - assert response.state_description == "state_description_value" - assert response.rule_id == "rule_id_value" - assert response.automation_id == "automation_id_value" + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_get_automation_run_async_from_dict(): - await test_get_automation_run_async(request_type=dict) +async def test_create_automation_async_from_dict(): + await test_create_automation_async(request_type=dict) -def test_get_automation_run_field_headers(): +def test_create_automation_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.GetAutomationRunRequest() + request = cloud_deploy.CreateAutomationRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_automation_run), "__call__" + type(client.transport.create_automation), "__call__" ) as call: - call.return_value = cloud_deploy.AutomationRun() - client.get_automation_run(request) + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_automation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -17136,30 +17157,30 @@ def test_get_automation_run_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_automation_run_field_headers_async(): +async def test_create_automation_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.GetAutomationRunRequest() + request = cloud_deploy.CreateAutomationRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_automation_run), "__call__" + type(client.transport.create_automation), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.AutomationRun() + operations_pb2.Operation(name="operations/op") ) - await client.get_automation_run(request) + await client.create_automation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -17170,37 +17191,45 @@ async def test_get_automation_run_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_get_automation_run_flattened(): +def test_create_automation_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_automation_run), "__call__" + type(client.transport.create_automation), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.AutomationRun() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_automation_run( - name="name_value", + client.create_automation( + parent="parent_value", + automation=cloud_deploy.Automation(name="name_value"), + automation_id="automation_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].automation + mock_val = cloud_deploy.Automation(name="name_value") + assert arg == mock_val + arg = args[0].automation_id + mock_val = "automation_id_value" assert arg == mock_val -def test_get_automation_run_flattened_error(): +def test_create_automation_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -17208,45 +17237,55 @@ def test_get_automation_run_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_automation_run( - cloud_deploy.GetAutomationRunRequest(), - name="name_value", + client.create_automation( + cloud_deploy.CreateAutomationRequest(), + parent="parent_value", + automation=cloud_deploy.Automation(name="name_value"), + automation_id="automation_id_value", ) @pytest.mark.asyncio -async def test_get_automation_run_flattened_async(): +async def test_create_automation_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_automation_run), "__call__" + type(client.transport.create_automation), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.AutomationRun() + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.AutomationRun() + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_automation_run( - name="name_value", + response = await client.create_automation( + parent="parent_value", + automation=cloud_deploy.Automation(name="name_value"), + automation_id="automation_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].automation + mock_val = cloud_deploy.Automation(name="name_value") + assert arg == mock_val + arg = args[0].automation_id + mock_val = "automation_id_value" assert arg == mock_val @pytest.mark.asyncio -async def test_get_automation_run_flattened_error_async(): +async def test_create_automation_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -17254,20 +17293,22 @@ async def test_get_automation_run_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_automation_run( - cloud_deploy.GetAutomationRunRequest(), - name="name_value", + await client.create_automation( + cloud_deploy.CreateAutomationRequest(), + parent="parent_value", + automation=cloud_deploy.Automation(name="name_value"), + automation_id="automation_id_value", ) @pytest.mark.parametrize( "request_type", [ - cloud_deploy.ListAutomationRunsRequest, + cloud_deploy.UpdateAutomationRequest, dict, ], ) -def test_list_automation_runs(request_type, transport: str = "grpc"): +def test_update_automation(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -17279,28 +17320,23 @@ def test_list_automation_runs(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_automation_runs), "__call__" + type(client.transport.update_automation), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.ListAutomationRunsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) - response = client.list_automation_runs(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_automation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.ListAutomationRunsRequest() + request = cloud_deploy.UpdateAutomationRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAutomationRunsPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, future.Future) -def test_list_automation_runs_empty_call(): +def test_update_automation_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -17310,18 +17346,18 @@ def test_list_automation_runs_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_automation_runs), "__call__" + type(client.transport.update_automation), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_automation_runs() + client.update_automation() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.ListAutomationRunsRequest() + assert args[0] == cloud_deploy.UpdateAutomationRequest() -def test_list_automation_runs_non_empty_request_with_auto_populated_field(): +def test_update_automation_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -17332,32 +17368,26 @@ def test_list_automation_runs_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.ListAutomationRunsRequest( - parent="parent_value", - page_token="page_token_value", - filter="filter_value", - order_by="order_by_value", + request = cloud_deploy.UpdateAutomationRequest( + request_id="request_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_automation_runs), "__call__" + type(client.transport.update_automation), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_automation_runs(request=request) + client.update_automation(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.ListAutomationRunsRequest( - parent="parent_value", - page_token="page_token_value", - filter="filter_value", - order_by="order_by_value", + assert args[0] == cloud_deploy.UpdateAutomationRequest( + request_id="request_id_value", ) -def test_list_automation_runs_use_cached_wrapped_rpc(): +def test_update_automation_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -17371,9 +17401,7 @@ def test_list_automation_runs_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.list_automation_runs in client._transport._wrapped_methods - ) + assert client._transport.update_automation in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -17381,15 +17409,20 @@ def test_list_automation_runs_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_automation_runs + client._transport.update_automation ] = mock_rpc request = {} - client.list_automation_runs(request) + client.update_automation(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_automation_runs(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_automation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -17397,7 +17430,7 @@ def test_list_automation_runs_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_automation_runs_empty_call_async(): +async def test_update_automation_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -17407,23 +17440,20 @@ async def test_list_automation_runs_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_automation_runs), "__call__" + type(client.transport.update_automation), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ListAutomationRunsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.list_automation_runs() + response = await client.update_automation() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.ListAutomationRunsRequest() + assert args[0] == cloud_deploy.UpdateAutomationRequest() @pytest.mark.asyncio -async def test_list_automation_runs_async_use_cached_wrapped_rpc( +async def test_update_automation_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -17440,7 +17470,7 @@ async def test_list_automation_runs_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.list_automation_runs + client._client._transport.update_automation in client._client._transport._wrapped_methods ) @@ -17448,16 +17478,21 @@ async def test_list_automation_runs_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_automation_runs + client._client._transport.update_automation ] = mock_rpc request = {} - await client.list_automation_runs(request) + await client.update_automation(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.list_automation_runs(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_automation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -17465,8 +17500,8 @@ async def test_list_automation_runs_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_list_automation_runs_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.ListAutomationRunsRequest +async def test_update_automation_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.UpdateAutomationRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -17479,51 +17514,46 @@ async def test_list_automation_runs_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_automation_runs), "__call__" + type(client.transport.update_automation), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ListAutomationRunsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.list_automation_runs(request) + response = await client.update_automation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.ListAutomationRunsRequest() + request = cloud_deploy.UpdateAutomationRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAutomationRunsAsyncPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_list_automation_runs_async_from_dict(): - await test_list_automation_runs_async(request_type=dict) +async def test_update_automation_async_from_dict(): + await test_update_automation_async(request_type=dict) -def test_list_automation_runs_field_headers(): +def test_update_automation_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.ListAutomationRunsRequest() + request = cloud_deploy.UpdateAutomationRequest() - request.parent = "parent_value" + request.automation.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_automation_runs), "__call__" + type(client.transport.update_automation), "__call__" ) as call: - call.return_value = cloud_deploy.ListAutomationRunsResponse() - client.list_automation_runs(request) + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_automation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -17534,30 +17564,30 @@ def test_list_automation_runs_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "automation.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_list_automation_runs_field_headers_async(): +async def test_update_automation_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.ListAutomationRunsRequest() + request = cloud_deploy.UpdateAutomationRequest() - request.parent = "parent_value" + request.automation.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_automation_runs), "__call__" + type(client.transport.update_automation), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ListAutomationRunsResponse() + operations_pb2.Operation(name="operations/op") ) - await client.list_automation_runs(request) + await client.update_automation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -17568,37 +17598,41 @@ async def test_list_automation_runs_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "automation.name=name_value", ) in kw["metadata"] -def test_list_automation_runs_flattened(): +def test_update_automation_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_automation_runs), "__call__" + type(client.transport.update_automation), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.ListAutomationRunsResponse() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_automation_runs( - parent="parent_value", + client.update_automation( + automation=cloud_deploy.Automation(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].automation + mock_val = cloud_deploy.Automation(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val -def test_list_automation_runs_flattened_error(): +def test_update_automation_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -17606,45 +17640,50 @@ def test_list_automation_runs_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_automation_runs( - cloud_deploy.ListAutomationRunsRequest(), - parent="parent_value", + client.update_automation( + cloud_deploy.UpdateAutomationRequest(), + automation=cloud_deploy.Automation(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio -async def test_list_automation_runs_flattened_async(): +async def test_update_automation_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_automation_runs), "__call__" + type(client.transport.update_automation), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.ListAutomationRunsResponse() + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ListAutomationRunsResponse() + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_automation_runs( - parent="parent_value", + response = await client.update_automation( + automation=cloud_deploy.Automation(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].automation + mock_val = cloud_deploy.Automation(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @pytest.mark.asyncio -async def test_list_automation_runs_flattened_error_async(): +async def test_update_automation_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -17652,271 +17691,70 @@ async def test_list_automation_runs_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_automation_runs( - cloud_deploy.ListAutomationRunsRequest(), - parent="parent_value", + await client.update_automation( + cloud_deploy.UpdateAutomationRequest(), + automation=cloud_deploy.Automation(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_list_automation_runs_pager(transport_name: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.DeleteAutomationRequest, + dict, + ], +) +def test_delete_automation(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_automation_runs), "__call__" + type(client.transport.delete_automation), "__call__" ) as call: - # Set the response to a series of pages. - call.side_effect = ( - cloud_deploy.ListAutomationRunsResponse( - automation_runs=[ - cloud_deploy.AutomationRun(), - cloud_deploy.AutomationRun(), - cloud_deploy.AutomationRun(), - ], - next_page_token="abc", - ), - cloud_deploy.ListAutomationRunsResponse( - automation_runs=[], - next_page_token="def", - ), - cloud_deploy.ListAutomationRunsResponse( - automation_runs=[ - cloud_deploy.AutomationRun(), - ], - next_page_token="ghi", - ), - cloud_deploy.ListAutomationRunsResponse( - automation_runs=[ - cloud_deploy.AutomationRun(), - cloud_deploy.AutomationRun(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) - pager = client.list_automation_runs(request={}, retry=retry, timeout=timeout) + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_automation(request) - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = cloud_deploy.DeleteAutomationRequest() + assert args[0] == request - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, cloud_deploy.AutomationRun) for i in results) + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) -def test_list_automation_runs_pages(transport_name: str = "grpc"): +def test_delete_automation_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_automation_runs), "__call__" + type(client.transport.delete_automation), "__call__" ) as call: - # Set the response to a series of pages. - call.side_effect = ( - cloud_deploy.ListAutomationRunsResponse( - automation_runs=[ - cloud_deploy.AutomationRun(), - cloud_deploy.AutomationRun(), - cloud_deploy.AutomationRun(), - ], - next_page_token="abc", - ), - cloud_deploy.ListAutomationRunsResponse( - automation_runs=[], - next_page_token="def", - ), - cloud_deploy.ListAutomationRunsResponse( - automation_runs=[ - cloud_deploy.AutomationRun(), - ], - next_page_token="ghi", - ), - cloud_deploy.ListAutomationRunsResponse( - automation_runs=[ - cloud_deploy.AutomationRun(), - cloud_deploy.AutomationRun(), - ], - ), - RuntimeError, + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) - pages = list(client.list_automation_runs(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.asyncio -async def test_list_automation_runs_async_pager(): - client = CloudDeployAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_automation_runs), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - cloud_deploy.ListAutomationRunsResponse( - automation_runs=[ - cloud_deploy.AutomationRun(), - cloud_deploy.AutomationRun(), - cloud_deploy.AutomationRun(), - ], - next_page_token="abc", - ), - cloud_deploy.ListAutomationRunsResponse( - automation_runs=[], - next_page_token="def", - ), - cloud_deploy.ListAutomationRunsResponse( - automation_runs=[ - cloud_deploy.AutomationRun(), - ], - next_page_token="ghi", - ), - cloud_deploy.ListAutomationRunsResponse( - automation_runs=[ - cloud_deploy.AutomationRun(), - cloud_deploy.AutomationRun(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_automation_runs( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, cloud_deploy.AutomationRun) for i in responses) - - -@pytest.mark.asyncio -async def test_list_automation_runs_async_pages(): - client = CloudDeployAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_automation_runs), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - cloud_deploy.ListAutomationRunsResponse( - automation_runs=[ - cloud_deploy.AutomationRun(), - cloud_deploy.AutomationRun(), - cloud_deploy.AutomationRun(), - ], - next_page_token="abc", - ), - cloud_deploy.ListAutomationRunsResponse( - automation_runs=[], - next_page_token="def", - ), - cloud_deploy.ListAutomationRunsResponse( - automation_runs=[ - cloud_deploy.AutomationRun(), - ], - next_page_token="ghi", - ), - cloud_deploy.ListAutomationRunsResponse( - automation_runs=[ - cloud_deploy.AutomationRun(), - cloud_deploy.AutomationRun(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_automation_runs(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - cloud_deploy.CancelAutomationRunRequest, - dict, - ], -) -def test_cancel_automation_run(request_type, transport: str = "grpc"): - client = CloudDeployClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_automation_run), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.CancelAutomationRunResponse() - response = client.cancel_automation_run(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = cloud_deploy.CancelAutomationRunRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.CancelAutomationRunResponse) - - -def test_cancel_automation_run_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudDeployClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_automation_run), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.cancel_automation_run() + client.delete_automation() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.CancelAutomationRunRequest() + assert args[0] == cloud_deploy.DeleteAutomationRequest() -def test_cancel_automation_run_non_empty_request_with_auto_populated_field(): +def test_delete_automation_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -17927,26 +17765,30 @@ def test_cancel_automation_run_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.CancelAutomationRunRequest( + request = cloud_deploy.DeleteAutomationRequest( name="name_value", + request_id="request_id_value", + etag="etag_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.cancel_automation_run), "__call__" + type(client.transport.delete_automation), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.cancel_automation_run(request=request) + client.delete_automation(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.CancelAutomationRunRequest( + assert args[0] == cloud_deploy.DeleteAutomationRequest( name="name_value", + request_id="request_id_value", + etag="etag_value", ) -def test_cancel_automation_run_use_cached_wrapped_rpc(): +def test_delete_automation_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -17960,10 +17802,7 @@ def test_cancel_automation_run_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.cancel_automation_run - in client._transport._wrapped_methods - ) + assert client._transport.delete_automation in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -17971,15 +17810,20 @@ def test_cancel_automation_run_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.cancel_automation_run + client._transport.delete_automation ] = mock_rpc request = {} - client.cancel_automation_run(request) + client.delete_automation(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.cancel_automation_run(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_automation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -17987,7 +17831,7 @@ def test_cancel_automation_run_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_cancel_automation_run_empty_call_async(): +async def test_delete_automation_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -17997,20 +17841,20 @@ async def test_cancel_automation_run_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.cancel_automation_run), "__call__" + type(client.transport.delete_automation), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.CancelAutomationRunResponse() + operations_pb2.Operation(name="operations/spam") ) - response = await client.cancel_automation_run() + response = await client.delete_automation() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.CancelAutomationRunRequest() + assert args[0] == cloud_deploy.DeleteAutomationRequest() @pytest.mark.asyncio -async def test_cancel_automation_run_async_use_cached_wrapped_rpc( +async def test_delete_automation_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -18027,7 +17871,7 @@ async def test_cancel_automation_run_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.cancel_automation_run + client._client._transport.delete_automation in client._client._transport._wrapped_methods ) @@ -18035,16 +17879,21 @@ async def test_cancel_automation_run_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.cancel_automation_run + client._client._transport.delete_automation ] = mock_rpc request = {} - await client.cancel_automation_run(request) + await client.delete_automation(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.cancel_automation_run(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_automation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -18052,9 +17901,8 @@ async def test_cancel_automation_run_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_cancel_automation_run_async( - transport: str = "grpc_asyncio", - request_type=cloud_deploy.CancelAutomationRunRequest, +async def test_delete_automation_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.DeleteAutomationRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -18067,46 +17915,46 @@ async def test_cancel_automation_run_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.cancel_automation_run), "__call__" + type(client.transport.delete_automation), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.CancelAutomationRunResponse() + operations_pb2.Operation(name="operations/spam") ) - response = await client.cancel_automation_run(request) + response = await client.delete_automation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.CancelAutomationRunRequest() + request = cloud_deploy.DeleteAutomationRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.CancelAutomationRunResponse) + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_cancel_automation_run_async_from_dict(): - await test_cancel_automation_run_async(request_type=dict) +async def test_delete_automation_async_from_dict(): + await test_delete_automation_async(request_type=dict) -def test_cancel_automation_run_field_headers(): +def test_delete_automation_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.CancelAutomationRunRequest() + request = cloud_deploy.DeleteAutomationRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.cancel_automation_run), "__call__" + type(client.transport.delete_automation), "__call__" ) as call: - call.return_value = cloud_deploy.CancelAutomationRunResponse() - client.cancel_automation_run(request) + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_automation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -18122,25 +17970,25 @@ def test_cancel_automation_run_field_headers(): @pytest.mark.asyncio -async def test_cancel_automation_run_field_headers_async(): +async def test_delete_automation_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.CancelAutomationRunRequest() + request = cloud_deploy.DeleteAutomationRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.cancel_automation_run), "__call__" + type(client.transport.delete_automation), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.CancelAutomationRunResponse() + operations_pb2.Operation(name="operations/op") ) - await client.cancel_automation_run(request) + await client.delete_automation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -18155,20 +18003,20 @@ async def test_cancel_automation_run_field_headers_async(): ) in kw["metadata"] -def test_cancel_automation_run_flattened(): +def test_delete_automation_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.cancel_automation_run), "__call__" + type(client.transport.delete_automation), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.CancelAutomationRunResponse() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.cancel_automation_run( + client.delete_automation( name="name_value", ) @@ -18181,7 +18029,7 @@ def test_cancel_automation_run_flattened(): assert arg == mock_val -def test_cancel_automation_run_flattened_error(): +def test_delete_automation_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -18189,31 +18037,31 @@ def test_cancel_automation_run_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.cancel_automation_run( - cloud_deploy.CancelAutomationRunRequest(), + client.delete_automation( + cloud_deploy.DeleteAutomationRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_cancel_automation_run_flattened_async(): +async def test_delete_automation_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.cancel_automation_run), "__call__" + type(client.transport.delete_automation), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.CancelAutomationRunResponse() + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.CancelAutomationRunResponse() + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.cancel_automation_run( + response = await client.delete_automation( name="name_value", ) @@ -18227,7 +18075,7 @@ async def test_cancel_automation_run_flattened_async(): @pytest.mark.asyncio -async def test_cancel_automation_run_flattened_error_async(): +async def test_delete_automation_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -18235,8 +18083,8 @@ async def test_cancel_automation_run_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.cancel_automation_run( - cloud_deploy.CancelAutomationRunRequest(), + await client.delete_automation( + cloud_deploy.DeleteAutomationRequest(), name="name_value", ) @@ -18244,52 +18092,103 @@ async def test_cancel_automation_run_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.ListDeliveryPipelinesRequest, + cloud_deploy.GetAutomationRequest, dict, ], ) -def test_list_delivery_pipelines_rest(request_type): +def test_get_automation(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = cloud_deploy.ListDeliveryPipelinesResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_automation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_deploy.Automation( + name="name_value", + uid="uid_value", + description="description_value", + etag="etag_value", + suspended=True, + service_account="service_account_value", ) + response = client.get_automation(request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = cloud_deploy.ListDeliveryPipelinesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_delivery_pipelines(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = cloud_deploy.GetAutomationRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDeliveryPipelinesPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, cloud_deploy.Automation) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.description == "description_value" + assert response.etag == "etag_value" + assert response.suspended is True + assert response.service_account == "service_account_value" -def test_list_delivery_pipelines_rest_use_cached_wrapped_rpc(): +def test_get_automation_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_automation), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_automation() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_deploy.GetAutomationRequest() + + +def test_get_automation_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = cloud_deploy.GetAutomationRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_automation), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_automation(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_deploy.GetAutomationRequest( + name="name_value", + ) + + +def test_get_automation_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -18297,137 +18196,4184 @@ def test_list_delivery_pipelines_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.list_delivery_pipelines - in client._transport._wrapped_methods - ) + assert client._transport.get_automation in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_delivery_pipelines - ] = mock_rpc - + client._transport._wrapped_methods[client._transport.get_automation] = mock_rpc request = {} - client.list_delivery_pipelines(request) + client.get_automation(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_delivery_pipelines(request) + client.get_automation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_delivery_pipelines_rest_required_fields( - request_type=cloud_deploy.ListDeliveryPipelinesRequest, -): - transport_class = transports.CloudDeployRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) +@pytest.mark.asyncio +async def test_get_automation_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) - # verify fields with default values are dropped + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_automation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.Automation( + name="name_value", + uid="uid_value", + description="description_value", + etag="etag_value", + suspended=True, + service_account="service_account_value", + ) + ) + response = await client.get_automation() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_deploy.GetAutomationRequest() - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_delivery_pipelines._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - # verify required fields with default values are now present +@pytest.mark.asyncio +async def test_get_automation_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - jsonified_request["parent"] = "parent_value" + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_delivery_pipelines._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", + # Ensure method has been cached + assert ( + client._client._transport.get_automation + in client._client._transport._wrapped_methods ) - ) - jsonified_request.update(unset_fields) - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_automation + ] = mock_rpc - client = CloudDeployClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) + request = {} + await client.get_automation(request) - # Designate an appropriate value for the returned response. - return_value = cloud_deploy.ListDeliveryPipelinesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - response_value = Response() - response_value.status_code = 200 + await client.get_automation(request) - # Convert return value to protobuf type + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_automation_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.GetAutomationRequest +): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_automation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.Automation( + name="name_value", + uid="uid_value", + description="description_value", + etag="etag_value", + suspended=True, + service_account="service_account_value", + ) + ) + response = await client.get_automation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = cloud_deploy.GetAutomationRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_deploy.Automation) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.description == "description_value" + assert response.etag == "etag_value" + assert response.suspended is True + assert response.service_account == "service_account_value" + + +@pytest.mark.asyncio +async def test_get_automation_async_from_dict(): + await test_get_automation_async(request_type=dict) + + +def test_get_automation_field_headers(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_deploy.GetAutomationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_automation), "__call__") as call: + call.return_value = cloud_deploy.Automation() + client.get_automation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_automation_field_headers_async(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_deploy.GetAutomationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_automation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.Automation() + ) + await client.get_automation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_automation_flattened(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_automation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_deploy.Automation() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_automation( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_automation_flattened_error(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_automation( + cloud_deploy.GetAutomationRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_automation_flattened_async(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_automation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_deploy.Automation() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.Automation() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_automation( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_automation_flattened_error_async(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_automation( + cloud_deploy.GetAutomationRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.ListAutomationsRequest, + dict, + ], +) +def test_list_automations(request_type, transport: str = "grpc"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_deploy.ListAutomationsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_automations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = cloud_deploy.ListAutomationsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAutomationsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_automations_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_automations() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_deploy.ListAutomationsRequest() + + +def test_list_automations_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = cloud_deploy.ListAutomationsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_automations(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_deploy.ListAutomationsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + +def test_list_automations_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_automations in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_automations + ] = mock_rpc + request = {} + client.list_automations(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_automations(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_automations_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.ListAutomationsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_automations() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_deploy.ListAutomationsRequest() + + +@pytest.mark.asyncio +async def test_list_automations_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_automations + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_automations + ] = mock_rpc + + request = {} + await client.list_automations(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_automations(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_automations_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.ListAutomationsRequest +): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.ListAutomationsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_automations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = cloud_deploy.ListAutomationsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAutomationsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_automations_async_from_dict(): + await test_list_automations_async(request_type=dict) + + +def test_list_automations_field_headers(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_deploy.ListAutomationsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + call.return_value = cloud_deploy.ListAutomationsResponse() + client.list_automations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_automations_field_headers_async(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_deploy.ListAutomationsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.ListAutomationsResponse() + ) + await client.list_automations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_automations_flattened(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_deploy.ListAutomationsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_automations( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_automations_flattened_error(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_automations( + cloud_deploy.ListAutomationsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_automations_flattened_async(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_deploy.ListAutomationsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.ListAutomationsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_automations( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_automations_flattened_error_async(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_automations( + cloud_deploy.ListAutomationsRequest(), + parent="parent_value", + ) + + +def test_list_automations_pager(transport_name: str = "grpc"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + cloud_deploy.ListAutomationsResponse( + automations=[ + cloud_deploy.Automation(), + cloud_deploy.Automation(), + cloud_deploy.Automation(), + ], + next_page_token="abc", + ), + cloud_deploy.ListAutomationsResponse( + automations=[], + next_page_token="def", + ), + cloud_deploy.ListAutomationsResponse( + automations=[ + cloud_deploy.Automation(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListAutomationsResponse( + automations=[ + cloud_deploy.Automation(), + cloud_deploy.Automation(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_automations(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, cloud_deploy.Automation) for i in results) + + +def test_list_automations_pages(transport_name: str = "grpc"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + cloud_deploy.ListAutomationsResponse( + automations=[ + cloud_deploy.Automation(), + cloud_deploy.Automation(), + cloud_deploy.Automation(), + ], + next_page_token="abc", + ), + cloud_deploy.ListAutomationsResponse( + automations=[], + next_page_token="def", + ), + cloud_deploy.ListAutomationsResponse( + automations=[ + cloud_deploy.Automation(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListAutomationsResponse( + automations=[ + cloud_deploy.Automation(), + cloud_deploy.Automation(), + ], + ), + RuntimeError, + ) + pages = list(client.list_automations(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_automations_async_pager(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_automations), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloud_deploy.ListAutomationsResponse( + automations=[ + cloud_deploy.Automation(), + cloud_deploy.Automation(), + cloud_deploy.Automation(), + ], + next_page_token="abc", + ), + cloud_deploy.ListAutomationsResponse( + automations=[], + next_page_token="def", + ), + cloud_deploy.ListAutomationsResponse( + automations=[ + cloud_deploy.Automation(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListAutomationsResponse( + automations=[ + cloud_deploy.Automation(), + cloud_deploy.Automation(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_automations( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, cloud_deploy.Automation) for i in responses) + + +@pytest.mark.asyncio +async def test_list_automations_async_pages(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_automations), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloud_deploy.ListAutomationsResponse( + automations=[ + cloud_deploy.Automation(), + cloud_deploy.Automation(), + cloud_deploy.Automation(), + ], + next_page_token="abc", + ), + cloud_deploy.ListAutomationsResponse( + automations=[], + next_page_token="def", + ), + cloud_deploy.ListAutomationsResponse( + automations=[ + cloud_deploy.Automation(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListAutomationsResponse( + automations=[ + cloud_deploy.Automation(), + cloud_deploy.Automation(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_automations(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.GetAutomationRunRequest, + dict, + ], +) +def test_get_automation_run(request_type, transport: str = "grpc"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_automation_run), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_deploy.AutomationRun( + name="name_value", + etag="etag_value", + service_account="service_account_value", + target_id="target_id_value", + state=cloud_deploy.AutomationRun.State.SUCCEEDED, + state_description="state_description_value", + rule_id="rule_id_value", + automation_id="automation_id_value", + ) + response = client.get_automation_run(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = cloud_deploy.GetAutomationRunRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_deploy.AutomationRun) + assert response.name == "name_value" + assert response.etag == "etag_value" + assert response.service_account == "service_account_value" + assert response.target_id == "target_id_value" + assert response.state == cloud_deploy.AutomationRun.State.SUCCEEDED + assert response.state_description == "state_description_value" + assert response.rule_id == "rule_id_value" + assert response.automation_id == "automation_id_value" + + +def test_get_automation_run_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_automation_run), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_automation_run() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_deploy.GetAutomationRunRequest() + + +def test_get_automation_run_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = cloud_deploy.GetAutomationRunRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_automation_run), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_automation_run(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_deploy.GetAutomationRunRequest( + name="name_value", + ) + + +def test_get_automation_run_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_automation_run in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_automation_run + ] = mock_rpc + request = {} + client.get_automation_run(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_automation_run(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_automation_run_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_automation_run), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.AutomationRun( + name="name_value", + etag="etag_value", + service_account="service_account_value", + target_id="target_id_value", + state=cloud_deploy.AutomationRun.State.SUCCEEDED, + state_description="state_description_value", + rule_id="rule_id_value", + automation_id="automation_id_value", + ) + ) + response = await client.get_automation_run() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_deploy.GetAutomationRunRequest() + + +@pytest.mark.asyncio +async def test_get_automation_run_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_automation_run + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_automation_run + ] = mock_rpc + + request = {} + await client.get_automation_run(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_automation_run(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_automation_run_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.GetAutomationRunRequest +): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_automation_run), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.AutomationRun( + name="name_value", + etag="etag_value", + service_account="service_account_value", + target_id="target_id_value", + state=cloud_deploy.AutomationRun.State.SUCCEEDED, + state_description="state_description_value", + rule_id="rule_id_value", + automation_id="automation_id_value", + ) + ) + response = await client.get_automation_run(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = cloud_deploy.GetAutomationRunRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_deploy.AutomationRun) + assert response.name == "name_value" + assert response.etag == "etag_value" + assert response.service_account == "service_account_value" + assert response.target_id == "target_id_value" + assert response.state == cloud_deploy.AutomationRun.State.SUCCEEDED + assert response.state_description == "state_description_value" + assert response.rule_id == "rule_id_value" + assert response.automation_id == "automation_id_value" + + +@pytest.mark.asyncio +async def test_get_automation_run_async_from_dict(): + await test_get_automation_run_async(request_type=dict) + + +def test_get_automation_run_field_headers(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_deploy.GetAutomationRunRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_automation_run), "__call__" + ) as call: + call.return_value = cloud_deploy.AutomationRun() + client.get_automation_run(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_automation_run_field_headers_async(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_deploy.GetAutomationRunRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_automation_run), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.AutomationRun() + ) + await client.get_automation_run(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_automation_run_flattened(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_automation_run), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_deploy.AutomationRun() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_automation_run( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_automation_run_flattened_error(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_automation_run( + cloud_deploy.GetAutomationRunRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_automation_run_flattened_async(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_automation_run), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_deploy.AutomationRun() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.AutomationRun() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_automation_run( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_automation_run_flattened_error_async(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_automation_run( + cloud_deploy.GetAutomationRunRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.ListAutomationRunsRequest, + dict, + ], +) +def test_list_automation_runs(request_type, transport: str = "grpc"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_automation_runs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_deploy.ListAutomationRunsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_automation_runs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = cloud_deploy.ListAutomationRunsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAutomationRunsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_automation_runs_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_automation_runs), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_automation_runs() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_deploy.ListAutomationRunsRequest() + + +def test_list_automation_runs_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = cloud_deploy.ListAutomationRunsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_automation_runs), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_automation_runs(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_deploy.ListAutomationRunsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + +def test_list_automation_runs_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_automation_runs in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_automation_runs + ] = mock_rpc + request = {} + client.list_automation_runs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_automation_runs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_automation_runs_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_automation_runs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.ListAutomationRunsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_automation_runs() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_deploy.ListAutomationRunsRequest() + + +@pytest.mark.asyncio +async def test_list_automation_runs_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_automation_runs + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_automation_runs + ] = mock_rpc + + request = {} + await client.list_automation_runs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_automation_runs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_automation_runs_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.ListAutomationRunsRequest +): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_automation_runs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.ListAutomationRunsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_automation_runs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = cloud_deploy.ListAutomationRunsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAutomationRunsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_automation_runs_async_from_dict(): + await test_list_automation_runs_async(request_type=dict) + + +def test_list_automation_runs_field_headers(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_deploy.ListAutomationRunsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_automation_runs), "__call__" + ) as call: + call.return_value = cloud_deploy.ListAutomationRunsResponse() + client.list_automation_runs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_automation_runs_field_headers_async(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_deploy.ListAutomationRunsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_automation_runs), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.ListAutomationRunsResponse() + ) + await client.list_automation_runs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_automation_runs_flattened(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_automation_runs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_deploy.ListAutomationRunsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_automation_runs( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_automation_runs_flattened_error(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_automation_runs( + cloud_deploy.ListAutomationRunsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_automation_runs_flattened_async(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_automation_runs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_deploy.ListAutomationRunsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.ListAutomationRunsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_automation_runs( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_automation_runs_flattened_error_async(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_automation_runs( + cloud_deploy.ListAutomationRunsRequest(), + parent="parent_value", + ) + + +def test_list_automation_runs_pager(transport_name: str = "grpc"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_automation_runs), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloud_deploy.ListAutomationRunsResponse( + automation_runs=[ + cloud_deploy.AutomationRun(), + cloud_deploy.AutomationRun(), + cloud_deploy.AutomationRun(), + ], + next_page_token="abc", + ), + cloud_deploy.ListAutomationRunsResponse( + automation_runs=[], + next_page_token="def", + ), + cloud_deploy.ListAutomationRunsResponse( + automation_runs=[ + cloud_deploy.AutomationRun(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListAutomationRunsResponse( + automation_runs=[ + cloud_deploy.AutomationRun(), + cloud_deploy.AutomationRun(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_automation_runs(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, cloud_deploy.AutomationRun) for i in results) + + +def test_list_automation_runs_pages(transport_name: str = "grpc"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_automation_runs), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloud_deploy.ListAutomationRunsResponse( + automation_runs=[ + cloud_deploy.AutomationRun(), + cloud_deploy.AutomationRun(), + cloud_deploy.AutomationRun(), + ], + next_page_token="abc", + ), + cloud_deploy.ListAutomationRunsResponse( + automation_runs=[], + next_page_token="def", + ), + cloud_deploy.ListAutomationRunsResponse( + automation_runs=[ + cloud_deploy.AutomationRun(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListAutomationRunsResponse( + automation_runs=[ + cloud_deploy.AutomationRun(), + cloud_deploy.AutomationRun(), + ], + ), + RuntimeError, + ) + pages = list(client.list_automation_runs(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_automation_runs_async_pager(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_automation_runs), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloud_deploy.ListAutomationRunsResponse( + automation_runs=[ + cloud_deploy.AutomationRun(), + cloud_deploy.AutomationRun(), + cloud_deploy.AutomationRun(), + ], + next_page_token="abc", + ), + cloud_deploy.ListAutomationRunsResponse( + automation_runs=[], + next_page_token="def", + ), + cloud_deploy.ListAutomationRunsResponse( + automation_runs=[ + cloud_deploy.AutomationRun(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListAutomationRunsResponse( + automation_runs=[ + cloud_deploy.AutomationRun(), + cloud_deploy.AutomationRun(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_automation_runs( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, cloud_deploy.AutomationRun) for i in responses) + + +@pytest.mark.asyncio +async def test_list_automation_runs_async_pages(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_automation_runs), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloud_deploy.ListAutomationRunsResponse( + automation_runs=[ + cloud_deploy.AutomationRun(), + cloud_deploy.AutomationRun(), + cloud_deploy.AutomationRun(), + ], + next_page_token="abc", + ), + cloud_deploy.ListAutomationRunsResponse( + automation_runs=[], + next_page_token="def", + ), + cloud_deploy.ListAutomationRunsResponse( + automation_runs=[ + cloud_deploy.AutomationRun(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListAutomationRunsResponse( + automation_runs=[ + cloud_deploy.AutomationRun(), + cloud_deploy.AutomationRun(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_automation_runs(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.CancelAutomationRunRequest, + dict, + ], +) +def test_cancel_automation_run(request_type, transport: str = "grpc"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_automation_run), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_deploy.CancelAutomationRunResponse() + response = client.cancel_automation_run(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = cloud_deploy.CancelAutomationRunRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_deploy.CancelAutomationRunResponse) + + +def test_cancel_automation_run_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_automation_run), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.cancel_automation_run() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_deploy.CancelAutomationRunRequest() + + +def test_cancel_automation_run_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = cloud_deploy.CancelAutomationRunRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_automation_run), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.cancel_automation_run(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_deploy.CancelAutomationRunRequest( + name="name_value", + ) + + +def test_cancel_automation_run_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.cancel_automation_run + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.cancel_automation_run + ] = mock_rpc + request = {} + client.cancel_automation_run(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.cancel_automation_run(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_cancel_automation_run_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_automation_run), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.CancelAutomationRunResponse() + ) + response = await client.cancel_automation_run() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_deploy.CancelAutomationRunRequest() + + +@pytest.mark.asyncio +async def test_cancel_automation_run_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.cancel_automation_run + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.cancel_automation_run + ] = mock_rpc + + request = {} + await client.cancel_automation_run(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.cancel_automation_run(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_cancel_automation_run_async( + transport: str = "grpc_asyncio", + request_type=cloud_deploy.CancelAutomationRunRequest, +): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_automation_run), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.CancelAutomationRunResponse() + ) + response = await client.cancel_automation_run(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = cloud_deploy.CancelAutomationRunRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_deploy.CancelAutomationRunResponse) + + +@pytest.mark.asyncio +async def test_cancel_automation_run_async_from_dict(): + await test_cancel_automation_run_async(request_type=dict) + + +def test_cancel_automation_run_field_headers(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_deploy.CancelAutomationRunRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_automation_run), "__call__" + ) as call: + call.return_value = cloud_deploy.CancelAutomationRunResponse() + client.cancel_automation_run(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_automation_run_field_headers_async(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_deploy.CancelAutomationRunRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_automation_run), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.CancelAutomationRunResponse() + ) + await client.cancel_automation_run(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_cancel_automation_run_flattened(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_automation_run), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_deploy.CancelAutomationRunResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.cancel_automation_run( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_cancel_automation_run_flattened_error(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.cancel_automation_run( + cloud_deploy.CancelAutomationRunRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_cancel_automation_run_flattened_async(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_automation_run), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_deploy.CancelAutomationRunResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.CancelAutomationRunResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.cancel_automation_run( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_cancel_automation_run_flattened_error_async(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.cancel_automation_run( + cloud_deploy.CancelAutomationRunRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.ListDeliveryPipelinesRequest, + dict, + ], +) +def test_list_delivery_pipelines_rest(request_type): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.ListDeliveryPipelinesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_deploy.ListDeliveryPipelinesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_delivery_pipelines(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDeliveryPipelinesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_delivery_pipelines_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_delivery_pipelines + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_delivery_pipelines + ] = mock_rpc + + request = {} + client.list_delivery_pipelines(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_delivery_pipelines(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_delivery_pipelines_rest_required_fields( + request_type=cloud_deploy.ListDeliveryPipelinesRequest, +): + transport_class = transports.CloudDeployRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_delivery_pipelines._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_delivery_pipelines._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.ListDeliveryPipelinesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type return_value = cloud_deploy.ListDeliveryPipelinesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_delivery_pipelines(request) + response = client.list_delivery_pipelines(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_delivery_pipelines_rest_unset_required_fields(): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_delivery_pipelines._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_delivery_pipelines_rest_interceptors(null_interceptor): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudDeployRestInterceptor(), + ) + client = CloudDeployClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudDeployRestInterceptor, "post_list_delivery_pipelines" + ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "pre_list_delivery_pipelines" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_deploy.ListDeliveryPipelinesRequest.pb( + cloud_deploy.ListDeliveryPipelinesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloud_deploy.ListDeliveryPipelinesResponse.to_json( + cloud_deploy.ListDeliveryPipelinesResponse() + ) + + request = cloud_deploy.ListDeliveryPipelinesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloud_deploy.ListDeliveryPipelinesResponse() + + client.list_delivery_pipelines( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_delivery_pipelines_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.ListDeliveryPipelinesRequest +): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_delivery_pipelines(request) + + +def test_list_delivery_pipelines_rest_flattened(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.ListDeliveryPipelinesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_deploy.ListDeliveryPipelinesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_delivery_pipelines(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/deliveryPipelines" + % client.transport._host, + args[1], + ) + + +def test_list_delivery_pipelines_rest_flattened_error(transport: str = "rest"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_delivery_pipelines( + cloud_deploy.ListDeliveryPipelinesRequest(), + parent="parent_value", + ) + + +def test_list_delivery_pipelines_rest_pager(transport: str = "rest"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + cloud_deploy.ListDeliveryPipelinesResponse( + delivery_pipelines=[ + cloud_deploy.DeliveryPipeline(), + cloud_deploy.DeliveryPipeline(), + cloud_deploy.DeliveryPipeline(), + ], + next_page_token="abc", + ), + cloud_deploy.ListDeliveryPipelinesResponse( + delivery_pipelines=[], + next_page_token="def", + ), + cloud_deploy.ListDeliveryPipelinesResponse( + delivery_pipelines=[ + cloud_deploy.DeliveryPipeline(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListDeliveryPipelinesResponse( + delivery_pipelines=[ + cloud_deploy.DeliveryPipeline(), + cloud_deploy.DeliveryPipeline(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + cloud_deploy.ListDeliveryPipelinesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_delivery_pipelines(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, cloud_deploy.DeliveryPipeline) for i in results) + + pages = list(client.list_delivery_pipelines(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.GetDeliveryPipelineRequest, + dict, + ], +) +def test_get_delivery_pipeline_rest(request_type): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.DeliveryPipeline( + name="name_value", + uid="uid_value", + description="description_value", + etag="etag_value", + suspended=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_deploy.DeliveryPipeline.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_delivery_pipeline(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_deploy.DeliveryPipeline) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.description == "description_value" + assert response.etag == "etag_value" + assert response.suspended is True + + +def test_get_delivery_pipeline_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_delivery_pipeline + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_delivery_pipeline + ] = mock_rpc + + request = {} + client.get_delivery_pipeline(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_delivery_pipeline(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_delivery_pipeline_rest_required_fields( + request_type=cloud_deploy.GetDeliveryPipelineRequest, +): + transport_class = transports.CloudDeployRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_delivery_pipeline._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_delivery_pipeline._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.DeliveryPipeline() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cloud_deploy.DeliveryPipeline.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_delivery_pipeline(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_delivery_pipeline_rest_unset_required_fields(): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_delivery_pipeline._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_delivery_pipeline_rest_interceptors(null_interceptor): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudDeployRestInterceptor(), + ) + client = CloudDeployClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudDeployRestInterceptor, "post_get_delivery_pipeline" + ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "pre_get_delivery_pipeline" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_deploy.GetDeliveryPipelineRequest.pb( + cloud_deploy.GetDeliveryPipelineRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloud_deploy.DeliveryPipeline.to_json( + cloud_deploy.DeliveryPipeline() + ) + + request = cloud_deploy.GetDeliveryPipelineRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloud_deploy.DeliveryPipeline() + + client.get_delivery_pipeline( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_delivery_pipeline_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.GetDeliveryPipelineRequest +): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_delivery_pipeline(request) + + +def test_get_delivery_pipeline_rest_flattened(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.DeliveryPipeline() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_deploy.DeliveryPipeline.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_delivery_pipeline(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/deliveryPipelines/*}" + % client.transport._host, + args[1], + ) + + +def test_get_delivery_pipeline_rest_flattened_error(transport: str = "rest"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_delivery_pipeline( + cloud_deploy.GetDeliveryPipelineRequest(), + name="name_value", + ) + + +def test_get_delivery_pipeline_rest_error(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.CreateDeliveryPipelineRequest, + dict, + ], +) +def test_create_delivery_pipeline_rest(request_type): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["delivery_pipeline"] = { + "name": "name_value", + "uid": "uid_value", + "description": "description_value", + "annotations": {}, + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "serial_pipeline": { + "stages": [ + { + "target_id": "target_id_value", + "profiles": ["profiles_value1", "profiles_value2"], + "strategy": { + "standard": { + "verify": True, + "predeploy": { + "actions": ["actions_value1", "actions_value2"] + }, + "postdeploy": { + "actions": ["actions_value1", "actions_value2"] + }, + }, + "canary": { + "runtime_config": { + "kubernetes": { + "gateway_service_mesh": { + "http_route": "http_route_value", + "service": "service_value", + "deployment": "deployment_value", + "route_update_wait_time": { + "seconds": 751, + "nanos": 543, + }, + "stable_cutback_duration": {}, + "pod_selector_label": "pod_selector_label_value", + }, + "service_networking": { + "service": "service_value", + "deployment": "deployment_value", + "disable_pod_overprovisioning": True, + "pod_selector_label": "pod_selector_label_value", + }, + }, + "cloud_run": { + "automatic_traffic_control": True, + "canary_revision_tags": [ + "canary_revision_tags_value1", + "canary_revision_tags_value2", + ], + "prior_revision_tags": [ + "prior_revision_tags_value1", + "prior_revision_tags_value2", + ], + "stable_revision_tags": [ + "stable_revision_tags_value1", + "stable_revision_tags_value2", + ], + }, + }, + "canary_deployment": { + "percentages": [1170, 1171], + "verify": True, + "predeploy": {}, + "postdeploy": {}, + }, + "custom_canary_deployment": { + "phase_configs": [ + { + "phase_id": "phase_id_value", + "percentage": 1054, + "profiles": [ + "profiles_value1", + "profiles_value2", + ], + "verify": True, + "predeploy": {}, + "postdeploy": {}, + } + ] + }, + }, + }, + "deploy_parameters": [{"values": {}, "match_target_labels": {}}], + } + ] + }, + "condition": { + "pipeline_ready_condition": {"status": True, "update_time": {}}, + "targets_present_condition": { + "status": True, + "missing_targets": ["missing_targets_value1", "missing_targets_value2"], + "update_time": {}, + }, + "targets_type_condition": { + "status": True, + "error_details": "error_details_value", + }, + }, + "etag": "etag_value", + "suspended": True, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = cloud_deploy.CreateDeliveryPipelineRequest.meta.fields[ + "delivery_pipeline" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["delivery_pipeline"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["delivery_pipeline"][field])): + del request_init["delivery_pipeline"][field][i][subfield] + else: + del request_init["delivery_pipeline"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_delivery_pipeline(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_delivery_pipeline_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_delivery_pipeline + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_delivery_pipeline + ] = mock_rpc + + request = {} + client.create_delivery_pipeline(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_delivery_pipeline(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_delivery_pipeline_rest_required_fields( + request_type=cloud_deploy.CreateDeliveryPipelineRequest, +): + transport_class = transports.CloudDeployRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["delivery_pipeline_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "deliveryPipelineId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_delivery_pipeline._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "deliveryPipelineId" in jsonified_request + assert ( + jsonified_request["deliveryPipelineId"] == request_init["delivery_pipeline_id"] + ) + + jsonified_request["parent"] = "parent_value" + jsonified_request["deliveryPipelineId"] = "delivery_pipeline_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_delivery_pipeline._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "delivery_pipeline_id", + "request_id", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "deliveryPipelineId" in jsonified_request + assert jsonified_request["deliveryPipelineId"] == "delivery_pipeline_id_value" + + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_delivery_pipeline(request) + + expected_params = [ + ( + "deliveryPipelineId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_delivery_pipeline_rest_unset_required_fields(): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_delivery_pipeline._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "deliveryPipelineId", + "requestId", + "validateOnly", + ) + ) + & set( + ( + "parent", + "deliveryPipelineId", + "deliveryPipeline", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_delivery_pipeline_rest_interceptors(null_interceptor): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudDeployRestInterceptor(), + ) + client = CloudDeployClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CloudDeployRestInterceptor, "post_create_delivery_pipeline" + ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "pre_create_delivery_pipeline" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_deploy.CreateDeliveryPipelineRequest.pb( + cloud_deploy.CreateDeliveryPipelineRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = cloud_deploy.CreateDeliveryPipelineRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_delivery_pipeline( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_delivery_pipeline_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.CreateDeliveryPipelineRequest +): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_delivery_pipeline(request) + + +def test_create_delivery_pipeline_rest_flattened(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + delivery_pipeline=cloud_deploy.DeliveryPipeline(name="name_value"), + delivery_pipeline_id="delivery_pipeline_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_delivery_pipeline(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/deliveryPipelines" + % client.transport._host, + args[1], + ) + + +def test_create_delivery_pipeline_rest_flattened_error(transport: str = "rest"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_delivery_pipeline( + cloud_deploy.CreateDeliveryPipelineRequest(), + parent="parent_value", + delivery_pipeline=cloud_deploy.DeliveryPipeline(name="name_value"), + delivery_pipeline_id="delivery_pipeline_id_value", + ) + + +def test_create_delivery_pipeline_rest_error(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.UpdateDeliveryPipelineRequest, + dict, + ], +) +def test_update_delivery_pipeline_rest(request_type): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "delivery_pipeline": { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" + } + } + request_init["delivery_pipeline"] = { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3", + "uid": "uid_value", + "description": "description_value", + "annotations": {}, + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "serial_pipeline": { + "stages": [ + { + "target_id": "target_id_value", + "profiles": ["profiles_value1", "profiles_value2"], + "strategy": { + "standard": { + "verify": True, + "predeploy": { + "actions": ["actions_value1", "actions_value2"] + }, + "postdeploy": { + "actions": ["actions_value1", "actions_value2"] + }, + }, + "canary": { + "runtime_config": { + "kubernetes": { + "gateway_service_mesh": { + "http_route": "http_route_value", + "service": "service_value", + "deployment": "deployment_value", + "route_update_wait_time": { + "seconds": 751, + "nanos": 543, + }, + "stable_cutback_duration": {}, + "pod_selector_label": "pod_selector_label_value", + }, + "service_networking": { + "service": "service_value", + "deployment": "deployment_value", + "disable_pod_overprovisioning": True, + "pod_selector_label": "pod_selector_label_value", + }, + }, + "cloud_run": { + "automatic_traffic_control": True, + "canary_revision_tags": [ + "canary_revision_tags_value1", + "canary_revision_tags_value2", + ], + "prior_revision_tags": [ + "prior_revision_tags_value1", + "prior_revision_tags_value2", + ], + "stable_revision_tags": [ + "stable_revision_tags_value1", + "stable_revision_tags_value2", + ], + }, + }, + "canary_deployment": { + "percentages": [1170, 1171], + "verify": True, + "predeploy": {}, + "postdeploy": {}, + }, + "custom_canary_deployment": { + "phase_configs": [ + { + "phase_id": "phase_id_value", + "percentage": 1054, + "profiles": [ + "profiles_value1", + "profiles_value2", + ], + "verify": True, + "predeploy": {}, + "postdeploy": {}, + } + ] + }, + }, + }, + "deploy_parameters": [{"values": {}, "match_target_labels": {}}], + } + ] + }, + "condition": { + "pipeline_ready_condition": {"status": True, "update_time": {}}, + "targets_present_condition": { + "status": True, + "missing_targets": ["missing_targets_value1", "missing_targets_value2"], + "update_time": {}, + }, + "targets_type_condition": { + "status": True, + "error_details": "error_details_value", + }, + }, + "etag": "etag_value", + "suspended": True, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = cloud_deploy.UpdateDeliveryPipelineRequest.meta.fields[ + "delivery_pipeline" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["delivery_pipeline"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["delivery_pipeline"][field])): + del request_init["delivery_pipeline"][field][i][subfield] + else: + del request_init["delivery_pipeline"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_delivery_pipeline(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_delivery_pipeline_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_delivery_pipeline + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_delivery_pipeline + ] = mock_rpc + + request = {} + client.update_delivery_pipeline(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_delivery_pipeline(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_delivery_pipeline_rest_required_fields( + request_type=cloud_deploy.UpdateDeliveryPipelineRequest, +): + transport_class = transports.CloudDeployRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_delivery_pipeline._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_delivery_pipeline._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "allow_missing", + "request_id", + "update_mask", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_delivery_pipeline(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_delivery_pipeline_rest_unset_required_fields(): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_delivery_pipeline._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "allowMissing", + "requestId", + "updateMask", + "validateOnly", + ) + ) + & set( + ( + "updateMask", + "deliveryPipeline", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_delivery_pipeline_rest_interceptors(null_interceptor): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudDeployRestInterceptor(), + ) + client = CloudDeployClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CloudDeployRestInterceptor, "post_update_delivery_pipeline" + ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "pre_update_delivery_pipeline" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_deploy.UpdateDeliveryPipelineRequest.pb( + cloud_deploy.UpdateDeliveryPipelineRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = cloud_deploy.UpdateDeliveryPipelineRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_delivery_pipeline( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_delivery_pipeline_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.UpdateDeliveryPipelineRequest +): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "delivery_pipeline": { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_delivery_pipeline(request) + + +def test_update_delivery_pipeline_rest_flattened(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "delivery_pipeline": { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + delivery_pipeline=cloud_deploy.DeliveryPipeline(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_delivery_pipeline(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{delivery_pipeline.name=projects/*/locations/*/deliveryPipelines/*}" + % client.transport._host, + args[1], + ) + + +def test_update_delivery_pipeline_rest_flattened_error(transport: str = "rest"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_delivery_pipeline( + cloud_deploy.UpdateDeliveryPipelineRequest(), + delivery_pipeline=cloud_deploy.DeliveryPipeline(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_delivery_pipeline_rest_error(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.DeleteDeliveryPipelineRequest, + dict, + ], +) +def test_delete_delivery_pipeline_rest(request_type): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_delivery_pipeline(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_delivery_pipeline_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_delivery_pipeline + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_delivery_pipeline + ] = mock_rpc + + request = {} + client.delete_delivery_pipeline(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_delivery_pipeline(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_delivery_pipeline_rest_required_fields( + request_type=cloud_deploy.DeleteDeliveryPipelineRequest, +): + transport_class = transports.CloudDeployRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_delivery_pipeline._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_delivery_pipeline._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "allow_missing", + "etag", + "force", + "request_id", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_delivery_pipeline(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_delivery_pipelines_rest_unset_required_fields(): +def test_delete_delivery_pipeline_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_delivery_pipelines._get_unset_required_fields({}) + unset_fields = transport.delete_delivery_pipeline._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "filter", - "orderBy", - "pageSize", - "pageToken", + "allowMissing", + "etag", + "force", + "requestId", + "validateOnly", ) ) - & set(("parent",)) + & set(("name",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_delivery_pipelines_rest_interceptors(null_interceptor): +def test_delete_delivery_pipeline_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -18440,14 +22386,16 @@ def test_list_delivery_pipelines_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CloudDeployRestInterceptor, "post_list_delivery_pipelines" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CloudDeployRestInterceptor, "post_delete_delivery_pipeline" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_list_delivery_pipelines" + transports.CloudDeployRestInterceptor, "pre_delete_delivery_pipeline" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.ListDeliveryPipelinesRequest.pb( - cloud_deploy.ListDeliveryPipelinesRequest() + pb_message = cloud_deploy.DeleteDeliveryPipelineRequest.pb( + cloud_deploy.DeleteDeliveryPipelineRequest() ) transcode.return_value = { "method": "post", @@ -18459,19 +22407,19 @@ def test_list_delivery_pipelines_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = cloud_deploy.ListDeliveryPipelinesResponse.to_json( - cloud_deploy.ListDeliveryPipelinesResponse() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = cloud_deploy.ListDeliveryPipelinesRequest() + request = cloud_deploy.DeleteDeliveryPipelineRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = cloud_deploy.ListDeliveryPipelinesResponse() + post.return_value = operations_pb2.Operation() - client.list_delivery_pipelines( + client.delete_delivery_pipeline( request, metadata=[ ("key", "val"), @@ -18483,8 +22431,8 @@ def test_list_delivery_pipelines_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_delivery_pipelines_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.ListDeliveryPipelinesRequest +def test_delete_delivery_pipeline_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.DeleteDeliveryPipelineRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -18492,7 +22440,9 @@ def test_list_delivery_pipelines_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -18504,10 +22454,10 @@ def test_list_delivery_pipelines_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_delivery_pipelines(request) + client.delete_delivery_pipeline(request) -def test_list_delivery_pipelines_rest_flattened(): +def test_delete_delivery_pipeline_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -18516,40 +22466,40 @@ def test_list_delivery_pipelines_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.ListDeliveryPipelinesResponse() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = cloud_deploy.ListDeliveryPipelinesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_delivery_pipelines(**mock_args) + client.delete_delivery_pipeline(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/deliveryPipelines" + "%s/v1/{name=projects/*/locations/*/deliveryPipelines/*}" % client.transport._host, args[1], ) -def test_list_delivery_pipelines_rest_flattened_error(transport: str = "rest"): +def test_delete_delivery_pipeline_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -18558,126 +22508,61 @@ def test_list_delivery_pipelines_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_delivery_pipelines( - cloud_deploy.ListDeliveryPipelinesRequest(), - parent="parent_value", + client.delete_delivery_pipeline( + cloud_deploy.DeleteDeliveryPipelineRequest(), + name="name_value", ) -def test_list_delivery_pipelines_rest_pager(transport: str = "rest"): +def test_delete_delivery_pipeline_rest_error(): client = CloudDeployClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - cloud_deploy.ListDeliveryPipelinesResponse( - delivery_pipelines=[ - cloud_deploy.DeliveryPipeline(), - cloud_deploy.DeliveryPipeline(), - cloud_deploy.DeliveryPipeline(), - ], - next_page_token="abc", - ), - cloud_deploy.ListDeliveryPipelinesResponse( - delivery_pipelines=[], - next_page_token="def", - ), - cloud_deploy.ListDeliveryPipelinesResponse( - delivery_pipelines=[ - cloud_deploy.DeliveryPipeline(), - ], - next_page_token="ghi", - ), - cloud_deploy.ListDeliveryPipelinesResponse( - delivery_pipelines=[ - cloud_deploy.DeliveryPipeline(), - cloud_deploy.DeliveryPipeline(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - cloud_deploy.ListDeliveryPipelinesResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_delivery_pipelines(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, cloud_deploy.DeliveryPipeline) for i in results) - - pages = list(client.list_delivery_pipelines(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - cloud_deploy.GetDeliveryPipelineRequest, + cloud_deploy.ListTargetsRequest, dict, ], ) -def test_get_delivery_pipeline_rest(request_type): +def test_list_targets_rest(request_type): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.DeliveryPipeline( - name="name_value", - uid="uid_value", - description="description_value", - etag="etag_value", - suspended=True, + return_value = cloud_deploy.ListTargetsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = cloud_deploy.DeliveryPipeline.pb(return_value) + return_value = cloud_deploy.ListTargetsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_delivery_pipeline(request) + response = client.list_targets(request) # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.DeliveryPipeline) - assert response.name == "name_value" - assert response.uid == "uid_value" - assert response.description == "description_value" - assert response.etag == "etag_value" - assert response.suspended is True + assert isinstance(response, pagers.ListTargetsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_get_delivery_pipeline_rest_use_cached_wrapped_rpc(): +def test_list_targets_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -18691,40 +22576,35 @@ def test_get_delivery_pipeline_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.get_delivery_pipeline - in client._transport._wrapped_methods - ) + assert client._transport.list_targets in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_delivery_pipeline - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_targets] = mock_rpc request = {} - client.get_delivery_pipeline(request) + client.list_targets(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_delivery_pipeline(request) + client.list_targets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_delivery_pipeline_rest_required_fields( - request_type=cloud_deploy.GetDeliveryPipelineRequest, +def test_list_targets_rest_required_fields( + request_type=cloud_deploy.ListTargetsRequest, ): transport_class = transports.CloudDeployRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -18735,21 +22615,30 @@ def test_get_delivery_pipeline_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_delivery_pipeline._get_unset_required_fields(jsonified_request) + ).list_targets._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_delivery_pipeline._get_unset_required_fields(jsonified_request) + ).list_targets._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -18758,7 +22647,7 @@ def test_get_delivery_pipeline_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = cloud_deploy.DeliveryPipeline() + return_value = cloud_deploy.ListTargetsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -18779,30 +22668,40 @@ def test_get_delivery_pipeline_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = cloud_deploy.DeliveryPipeline.pb(return_value) + return_value = cloud_deploy.ListTargetsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_delivery_pipeline(request) + response = client.list_targets(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_delivery_pipeline_rest_unset_required_fields(): +def test_list_targets_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_delivery_pipeline._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.list_targets._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_delivery_pipeline_rest_interceptors(null_interceptor): +def test_list_targets_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -18815,14 +22714,14 @@ def test_get_delivery_pipeline_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CloudDeployRestInterceptor, "post_get_delivery_pipeline" + transports.CloudDeployRestInterceptor, "post_list_targets" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_get_delivery_pipeline" + transports.CloudDeployRestInterceptor, "pre_list_targets" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.GetDeliveryPipelineRequest.pb( - cloud_deploy.GetDeliveryPipelineRequest() + pb_message = cloud_deploy.ListTargetsRequest.pb( + cloud_deploy.ListTargetsRequest() ) transcode.return_value = { "method": "post", @@ -18834,19 +22733,19 @@ def test_get_delivery_pipeline_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = cloud_deploy.DeliveryPipeline.to_json( - cloud_deploy.DeliveryPipeline() + req.return_value._content = cloud_deploy.ListTargetsResponse.to_json( + cloud_deploy.ListTargetsResponse() ) - request = cloud_deploy.GetDeliveryPipelineRequest() + request = cloud_deploy.ListTargetsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = cloud_deploy.DeliveryPipeline() + post.return_value = cloud_deploy.ListTargetsResponse() - client.get_delivery_pipeline( + client.list_targets( request, metadata=[ ("key", "val"), @@ -18858,8 +22757,8 @@ def test_get_delivery_pipeline_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_delivery_pipeline_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.GetDeliveryPipelineRequest +def test_list_targets_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.ListTargetsRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -18867,9 +22766,7 @@ def test_get_delivery_pipeline_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -18881,10 +22778,10 @@ def test_get_delivery_pipeline_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_delivery_pipeline(request) + client.list_targets(request) -def test_get_delivery_pipeline_rest_flattened(): +def test_list_targets_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -18893,16 +22790,14 @@ def test_get_delivery_pipeline_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.DeliveryPipeline() + return_value = cloud_deploy.ListTargetsResponse() # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) @@ -18910,25 +22805,24 @@ def test_get_delivery_pipeline_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = cloud_deploy.DeliveryPipeline.pb(return_value) + return_value = cloud_deploy.ListTargetsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_delivery_pipeline(**mock_args) + client.list_targets(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/deliveryPipelines/*}" - % client.transport._host, + "%s/v1/{parent=projects/*/locations/*}/targets" % client.transport._host, args[1], ) -def test_get_delivery_pipeline_rest_flattened_error(transport: str = "rest"): +def test_list_targets_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -18937,225 +22831,113 @@ def test_get_delivery_pipeline_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_delivery_pipeline( - cloud_deploy.GetDeliveryPipelineRequest(), - name="name_value", + client.list_targets( + cloud_deploy.ListTargetsRequest(), + parent="parent_value", + ) + + +def test_list_targets_rest_pager(transport: str = "rest"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + cloud_deploy.ListTargetsResponse( + targets=[ + cloud_deploy.Target(), + cloud_deploy.Target(), + cloud_deploy.Target(), + ], + next_page_token="abc", + ), + cloud_deploy.ListTargetsResponse( + targets=[], + next_page_token="def", + ), + cloud_deploy.ListTargetsResponse( + targets=[ + cloud_deploy.Target(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListTargetsResponse( + targets=[ + cloud_deploy.Target(), + cloud_deploy.Target(), + ], + ), ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(cloud_deploy.ListTargetsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + sample_request = {"parent": "projects/sample1/locations/sample2"} -def test_get_delivery_pipeline_rest_error(): - client = CloudDeployClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) + pager = client.list_targets(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, cloud_deploy.Target) for i in results) + + pages = list(client.list_targets(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.parametrize( "request_type", [ - cloud_deploy.CreateDeliveryPipelineRequest, + cloud_deploy.RollbackTargetRequest, dict, ], ) -def test_create_delivery_pipeline_rest(request_type): +def test_rollback_target_rest(request_type): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["delivery_pipeline"] = { - "name": "name_value", - "uid": "uid_value", - "description": "description_value", - "annotations": {}, - "labels": {}, - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "serial_pipeline": { - "stages": [ - { - "target_id": "target_id_value", - "profiles": ["profiles_value1", "profiles_value2"], - "strategy": { - "standard": { - "verify": True, - "predeploy": { - "actions": ["actions_value1", "actions_value2"] - }, - "postdeploy": { - "actions": ["actions_value1", "actions_value2"] - }, - }, - "canary": { - "runtime_config": { - "kubernetes": { - "gateway_service_mesh": { - "http_route": "http_route_value", - "service": "service_value", - "deployment": "deployment_value", - "route_update_wait_time": { - "seconds": 751, - "nanos": 543, - }, - "stable_cutback_duration": {}, - "pod_selector_label": "pod_selector_label_value", - }, - "service_networking": { - "service": "service_value", - "deployment": "deployment_value", - "disable_pod_overprovisioning": True, - "pod_selector_label": "pod_selector_label_value", - }, - }, - "cloud_run": { - "automatic_traffic_control": True, - "canary_revision_tags": [ - "canary_revision_tags_value1", - "canary_revision_tags_value2", - ], - "prior_revision_tags": [ - "prior_revision_tags_value1", - "prior_revision_tags_value2", - ], - "stable_revision_tags": [ - "stable_revision_tags_value1", - "stable_revision_tags_value2", - ], - }, - }, - "canary_deployment": { - "percentages": [1170, 1171], - "verify": True, - "predeploy": {}, - "postdeploy": {}, - }, - "custom_canary_deployment": { - "phase_configs": [ - { - "phase_id": "phase_id_value", - "percentage": 1054, - "profiles": [ - "profiles_value1", - "profiles_value2", - ], - "verify": True, - "predeploy": {}, - "postdeploy": {}, - } - ] - }, - }, - }, - "deploy_parameters": [{"values": {}, "match_target_labels": {}}], - } - ] - }, - "condition": { - "pipeline_ready_condition": {"status": True, "update_time": {}}, - "targets_present_condition": { - "status": True, - "missing_targets": ["missing_targets_value1", "missing_targets_value2"], - "update_time": {}, - }, - "targets_type_condition": { - "status": True, - "error_details": "error_details_value", - }, - }, - "etag": "etag_value", - "suspended": True, + request_init = { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = cloud_deploy.CreateDeliveryPipelineRequest.meta.fields[ - "delivery_pipeline" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["delivery_pipeline"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["delivery_pipeline"][field])): - del request_init["delivery_pipeline"][field][i][subfield] - else: - del request_init["delivery_pipeline"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = cloud_deploy.RollbackTargetResponse() # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_deploy.RollbackTargetResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_delivery_pipeline(request) + response = client.rollback_target(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, cloud_deploy.RollbackTargetResponse) -def test_create_delivery_pipeline_rest_use_cached_wrapped_rpc(): +def test_rollback_target_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -19169,45 +22951,37 @@ def test_create_delivery_pipeline_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.create_delivery_pipeline - in client._transport._wrapped_methods - ) + assert client._transport.rollback_target in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_delivery_pipeline - ] = mock_rpc + client._transport._wrapped_methods[client._transport.rollback_target] = mock_rpc request = {} - client.create_delivery_pipeline(request) + client.rollback_target(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_delivery_pipeline(request) + client.rollback_target(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_delivery_pipeline_rest_required_fields( - request_type=cloud_deploy.CreateDeliveryPipelineRequest, +def test_rollback_target_rest_required_fields( + request_type=cloud_deploy.RollbackTargetRequest, ): transport_class = transports.CloudDeployRestTransport request_init = {} - request_init["parent"] = "" - request_init["delivery_pipeline_id"] = "" + request_init["name"] = "" + request_init["target_id"] = "" + request_init["rollout_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -19215,40 +22989,30 @@ def test_create_delivery_pipeline_rest_required_fields( ) # verify fields with default values are dropped - assert "deliveryPipelineId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_delivery_pipeline._get_unset_required_fields(jsonified_request) + ).rollback_target._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "deliveryPipelineId" in jsonified_request - assert ( - jsonified_request["deliveryPipelineId"] == request_init["delivery_pipeline_id"] - ) - jsonified_request["parent"] = "parent_value" - jsonified_request["deliveryPipelineId"] = "delivery_pipeline_id_value" + jsonified_request["name"] = "name_value" + jsonified_request["targetId"] = "target_id_value" + jsonified_request["rolloutId"] = "rollout_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_delivery_pipeline._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "delivery_pipeline_id", - "request_id", - "validate_only", - ) - ) + ).rollback_target._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "deliveryPipelineId" in jsonified_request - assert jsonified_request["deliveryPipelineId"] == "delivery_pipeline_id_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + assert "targetId" in jsonified_request + assert jsonified_request["targetId"] == "target_id_value" + assert "rolloutId" in jsonified_request + assert jsonified_request["rolloutId"] == "rollout_id_value" client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -19257,7 +23021,7 @@ def test_create_delivery_pipeline_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = cloud_deploy.RollbackTargetResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -19277,50 +23041,41 @@ def test_create_delivery_pipeline_rest_required_fields( response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cloud_deploy.RollbackTargetResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_delivery_pipeline(request) + response = client.rollback_target(request) - expected_params = [ - ( - "deliveryPipelineId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_delivery_pipeline_rest_unset_required_fields(): +def test_rollback_target_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_delivery_pipeline._get_unset_required_fields({}) + unset_fields = transport.rollback_target._get_unset_required_fields({}) assert set(unset_fields) == ( - set( - ( - "deliveryPipelineId", - "requestId", - "validateOnly", - ) - ) + set(()) & set( ( - "parent", - "deliveryPipelineId", - "deliveryPipeline", + "name", + "targetId", + "rolloutId", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_delivery_pipeline_rest_interceptors(null_interceptor): +def test_rollback_target_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -19333,16 +23088,14 @@ def test_create_delivery_pipeline_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.CloudDeployRestInterceptor, "post_create_delivery_pipeline" + transports.CloudDeployRestInterceptor, "post_rollback_target" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_create_delivery_pipeline" + transports.CloudDeployRestInterceptor, "pre_rollback_target" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.CreateDeliveryPipelineRequest.pb( - cloud_deploy.CreateDeliveryPipelineRequest() + pb_message = cloud_deploy.RollbackTargetRequest.pb( + cloud_deploy.RollbackTargetRequest() ) transcode.return_value = { "method": "post", @@ -19354,19 +23107,19 @@ def test_create_delivery_pipeline_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = cloud_deploy.RollbackTargetResponse.to_json( + cloud_deploy.RollbackTargetResponse() ) - request = cloud_deploy.CreateDeliveryPipelineRequest() + request = cloud_deploy.RollbackTargetRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = cloud_deploy.RollbackTargetResponse() - client.create_delivery_pipeline( + client.rollback_target( request, metadata=[ ("key", "val"), @@ -19378,8 +23131,8 @@ def test_create_delivery_pipeline_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_delivery_pipeline_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.CreateDeliveryPipelineRequest +def test_rollback_target_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.RollbackTargetRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -19387,7 +23140,9 @@ def test_create_delivery_pipeline_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -19399,285 +23154,129 @@ def test_create_delivery_pipeline_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_delivery_pipeline(request) - - -def test_create_delivery_pipeline_rest_flattened(): - client = CloudDeployClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - delivery_pipeline=cloud_deploy.DeliveryPipeline(name="name_value"), - delivery_pipeline_id="delivery_pipeline_id_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.create_delivery_pipeline(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/deliveryPipelines" - % client.transport._host, - args[1], - ) - - -def test_create_delivery_pipeline_rest_flattened_error(transport: str = "rest"): - client = CloudDeployClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_delivery_pipeline( - cloud_deploy.CreateDeliveryPipelineRequest(), - parent="parent_value", - delivery_pipeline=cloud_deploy.DeliveryPipeline(name="name_value"), - delivery_pipeline_id="delivery_pipeline_id_value", - ) - - -def test_create_delivery_pipeline_rest_error(): - client = CloudDeployClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) + client.rollback_target(request) -@pytest.mark.parametrize( - "request_type", - [ - cloud_deploy.UpdateDeliveryPipelineRequest, - dict, - ], -) -def test_update_delivery_pipeline_rest(request_type): +def test_rollback_target_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "delivery_pipeline": { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" - } - } - request_init["delivery_pipeline"] = { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3", - "uid": "uid_value", - "description": "description_value", - "annotations": {}, - "labels": {}, - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "serial_pipeline": { - "stages": [ - { - "target_id": "target_id_value", - "profiles": ["profiles_value1", "profiles_value2"], - "strategy": { - "standard": { - "verify": True, - "predeploy": { - "actions": ["actions_value1", "actions_value2"] - }, - "postdeploy": { - "actions": ["actions_value1", "actions_value2"] - }, - }, - "canary": { - "runtime_config": { - "kubernetes": { - "gateway_service_mesh": { - "http_route": "http_route_value", - "service": "service_value", - "deployment": "deployment_value", - "route_update_wait_time": { - "seconds": 751, - "nanos": 543, - }, - "stable_cutback_duration": {}, - "pod_selector_label": "pod_selector_label_value", - }, - "service_networking": { - "service": "service_value", - "deployment": "deployment_value", - "disable_pod_overprovisioning": True, - "pod_selector_label": "pod_selector_label_value", - }, - }, - "cloud_run": { - "automatic_traffic_control": True, - "canary_revision_tags": [ - "canary_revision_tags_value1", - "canary_revision_tags_value2", - ], - "prior_revision_tags": [ - "prior_revision_tags_value1", - "prior_revision_tags_value2", - ], - "stable_revision_tags": [ - "stable_revision_tags_value1", - "stable_revision_tags_value2", - ], - }, - }, - "canary_deployment": { - "percentages": [1170, 1171], - "verify": True, - "predeploy": {}, - "postdeploy": {}, - }, - "custom_canary_deployment": { - "phase_configs": [ - { - "phase_id": "phase_id_value", - "percentage": 1054, - "profiles": [ - "profiles_value1", - "profiles_value2", - ], - "verify": True, - "predeploy": {}, - "postdeploy": {}, - } - ] - }, - }, - }, - "deploy_parameters": [{"values": {}, "match_target_labels": {}}], - } - ] - }, - "condition": { - "pipeline_ready_condition": {"status": True, "update_time": {}}, - "targets_present_condition": { - "status": True, - "missing_targets": ["missing_targets_value1", "missing_targets_value2"], - "update_time": {}, - }, - "targets_type_condition": { - "status": True, - "error_details": "error_details_value", - }, - }, - "etag": "etag_value", - "suspended": True, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 + ) - # Determine if the message type is proto-plus or protobuf - test_field = cloud_deploy.UpdateDeliveryPipelineRequest.meta.fields[ - "delivery_pipeline" - ] + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.RollbackTargetResponse() - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" + } - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + target_id="target_id_value", + rollout_id="rollout_id_value", + ) + mock_args.update(sample_request) - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_deploy.RollbackTargetResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] + client.rollback_target(**mock_args) - subfields_not_in_runtime = [] + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/deliveryPipelines/*}:rollbackTarget" + % client.transport._host, + args[1], + ) - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["delivery_pipeline"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) +def test_rollback_target_rest_flattened_error(transport: str = "rest"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["delivery_pipeline"][field])): - del request_init["delivery_pipeline"][field][i][subfield] - else: - del request_init["delivery_pipeline"][field][subfield] + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.rollback_target( + cloud_deploy.RollbackTargetRequest(), + name="name_value", + target_id="target_id_value", + rollout_id="rollout_id_value", + ) + + +def test_rollback_target_rest_error(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.GetTargetRequest, + dict, + ], +) +def test_get_target_rest(request_type): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/targets/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = cloud_deploy.Target( + name="name_value", + target_id="target_id_value", + uid="uid_value", + description="description_value", + require_approval=True, + etag="etag_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_deploy.Target.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_delivery_pipeline(request) + response = client.get_target(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, cloud_deploy.Target) + assert response.name == "name_value" + assert response.target_id == "target_id_value" + assert response.uid == "uid_value" + assert response.description == "description_value" + assert response.require_approval is True + assert response.etag == "etag_value" -def test_update_delivery_pipeline_rest_use_cached_wrapped_rpc(): +def test_get_target_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -19691,43 +23290,33 @@ def test_update_delivery_pipeline_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.update_delivery_pipeline - in client._transport._wrapped_methods - ) + assert client._transport.get_target in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_delivery_pipeline - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_target] = mock_rpc request = {} - client.update_delivery_pipeline(request) + client.get_target(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_delivery_pipeline(request) + client.get_target(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_delivery_pipeline_rest_required_fields( - request_type=cloud_deploy.UpdateDeliveryPipelineRequest, -): +def test_get_target_rest_required_fields(request_type=cloud_deploy.GetTargetRequest): transport_class = transports.CloudDeployRestTransport request_init = {} + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -19738,26 +23327,21 @@ def test_update_delivery_pipeline_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_delivery_pipeline._get_unset_required_fields(jsonified_request) + ).get_target._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_delivery_pipeline._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "allow_missing", - "request_id", - "update_mask", - "validate_only", - ) - ) + ).get_target._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -19766,7 +23350,7 @@ def test_update_delivery_pipeline_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = cloud_deploy.Target() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -19778,52 +23362,39 @@ def test_update_delivery_pipeline_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cloud_deploy.Target.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_delivery_pipeline(request) + response = client.get_target(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_delivery_pipeline_rest_unset_required_fields(): +def test_get_target_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_delivery_pipeline._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "allowMissing", - "requestId", - "updateMask", - "validateOnly", - ) - ) - & set( - ( - "updateMask", - "deliveryPipeline", - ) - ) - ) + unset_fields = transport.get_target._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_delivery_pipeline_rest_interceptors(null_interceptor): +def test_get_target_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -19836,17 +23407,13 @@ def test_update_delivery_pipeline_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.CloudDeployRestInterceptor, "post_update_delivery_pipeline" + transports.CloudDeployRestInterceptor, "post_get_target" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_update_delivery_pipeline" + transports.CloudDeployRestInterceptor, "pre_get_target" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.UpdateDeliveryPipelineRequest.pb( - cloud_deploy.UpdateDeliveryPipelineRequest() - ) + pb_message = cloud_deploy.GetTargetRequest.pb(cloud_deploy.GetTargetRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -19857,19 +23424,17 @@ def test_update_delivery_pipeline_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() - ) + req.return_value._content = cloud_deploy.Target.to_json(cloud_deploy.Target()) - request = cloud_deploy.UpdateDeliveryPipelineRequest() + request = cloud_deploy.GetTargetRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = cloud_deploy.Target() - client.update_delivery_pipeline( + client.get_target( request, metadata=[ ("key", "val"), @@ -19881,8 +23446,8 @@ def test_update_delivery_pipeline_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_delivery_pipeline_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.UpdateDeliveryPipelineRequest +def test_get_target_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.GetTargetRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -19890,11 +23455,7 @@ def test_update_delivery_pipeline_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "delivery_pipeline": { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" - } - } + request_init = {"name": "projects/sample1/locations/sample2/targets/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -19906,10 +23467,10 @@ def test_update_delivery_pipeline_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_delivery_pipeline(request) + client.get_target(request) -def test_update_delivery_pipeline_rest_flattened(): +def test_get_target_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -19918,43 +23479,39 @@ def test_update_delivery_pipeline_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = cloud_deploy.Target() # get arguments that satisfy an http rule for this method - sample_request = { - "delivery_pipeline": { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" - } - } + sample_request = {"name": "projects/sample1/locations/sample2/targets/sample3"} # get truthy value for each flattened field mock_args = dict( - delivery_pipeline=cloud_deploy.DeliveryPipeline(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_deploy.Target.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_delivery_pipeline(**mock_args) + client.get_target(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{delivery_pipeline.name=projects/*/locations/*/deliveryPipelines/*}" - % client.transport._host, + "%s/v1/{name=projects/*/locations/*/targets/*}" % client.transport._host, args[1], ) -def test_update_delivery_pipeline_rest_flattened_error(transport: str = "rest"): +def test_get_target_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -19963,14 +23520,13 @@ def test_update_delivery_pipeline_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_delivery_pipeline( - cloud_deploy.UpdateDeliveryPipelineRequest(), - delivery_pipeline=cloud_deploy.DeliveryPipeline(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.get_target( + cloud_deploy.GetTargetRequest(), + name="name_value", ) -def test_update_delivery_pipeline_rest_error(): +def test_get_target_rest_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -19979,20 +23535,126 @@ def test_update_delivery_pipeline_rest_error(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.DeleteDeliveryPipelineRequest, + cloud_deploy.CreateTargetRequest, dict, ], ) -def test_delete_delivery_pipeline_rest(request_type): +def test_create_target_rest(request_type): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["target"] = { + "name": "name_value", + "target_id": "target_id_value", + "uid": "uid_value", + "description": "description_value", + "annotations": {}, + "labels": {}, + "require_approval": True, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "gke": { + "cluster": "cluster_value", + "internal_ip": True, + "proxy_url": "proxy_url_value", + }, + "anthos_cluster": {"membership": "membership_value"}, + "run": {"location": "location_value"}, + "multi_target": {"target_ids": ["target_ids_value1", "target_ids_value2"]}, + "custom_target": {"custom_target_type": "custom_target_type_value"}, + "etag": "etag_value", + "execution_configs": [ + { + "usages": [1], + "default_pool": { + "service_account": "service_account_value", + "artifact_storage": "artifact_storage_value", + }, + "private_pool": { + "worker_pool": "worker_pool_value", + "service_account": "service_account_value", + "artifact_storage": "artifact_storage_value", + }, + "worker_pool": "worker_pool_value", + "service_account": "service_account_value", + "artifact_storage": "artifact_storage_value", + "execution_timeout": {"seconds": 751, "nanos": 543}, + "verbose": True, + } + ], + "deploy_parameters": {}, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = cloud_deploy.CreateTargetRequest.meta.fields["target"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["target"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["target"][field])): + del request_init["target"][field][i][subfield] + else: + del request_init["target"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -20007,13 +23669,13 @@ def test_delete_delivery_pipeline_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_delivery_pipeline(request) + response = client.create_target(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_delete_delivery_pipeline_rest_use_cached_wrapped_rpc(): +def test_create_target_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -20027,22 +23689,17 @@ def test_delete_delivery_pipeline_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.delete_delivery_pipeline - in client._transport._wrapped_methods - ) + assert client._transport.create_target in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_delivery_pipeline - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_target] = mock_rpc request = {} - client.delete_delivery_pipeline(request) + client.create_target(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -20051,20 +23708,21 @@ def test_delete_delivery_pipeline_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.delete_delivery_pipeline(request) + client.create_target(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_delivery_pipeline_rest_required_fields( - request_type=cloud_deploy.DeleteDeliveryPipelineRequest, +def test_create_target_rest_required_fields( + request_type=cloud_deploy.CreateTargetRequest, ): transport_class = transports.CloudDeployRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" + request_init["target_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -20072,34 +23730,38 @@ def test_delete_delivery_pipeline_rest_required_fields( ) # verify fields with default values are dropped + assert "targetId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_delivery_pipeline._get_unset_required_fields(jsonified_request) + ).create_target._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "targetId" in jsonified_request + assert jsonified_request["targetId"] == request_init["target_id"] - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" + jsonified_request["targetId"] = "target_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_delivery_pipeline._get_unset_required_fields(jsonified_request) + ).create_target._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "allow_missing", - "etag", - "force", "request_id", + "target_id", "validate_only", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "targetId" in jsonified_request + assert jsonified_request["targetId"] == "target_id_value" client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -20120,9 +23782,10 @@ def test_delete_delivery_pipeline_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -20132,35 +23795,45 @@ def test_delete_delivery_pipeline_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_delivery_pipeline(request) + response = client.create_target(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "targetId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_delivery_pipeline_rest_unset_required_fields(): +def test_create_target_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_delivery_pipeline._get_unset_required_fields({}) + unset_fields = transport.create_target._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "allowMissing", - "etag", - "force", "requestId", + "targetId", "validateOnly", ) ) - & set(("name",)) + & set( + ( + "parent", + "targetId", + "target", + ) + ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_delivery_pipeline_rest_interceptors(null_interceptor): +def test_create_target_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -20175,14 +23848,14 @@ def test_delete_delivery_pipeline_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.CloudDeployRestInterceptor, "post_delete_delivery_pipeline" + transports.CloudDeployRestInterceptor, "post_create_target" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_delete_delivery_pipeline" + transports.CloudDeployRestInterceptor, "pre_create_target" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.DeleteDeliveryPipelineRequest.pb( - cloud_deploy.DeleteDeliveryPipelineRequest() + pb_message = cloud_deploy.CreateTargetRequest.pb( + cloud_deploy.CreateTargetRequest() ) transcode.return_value = { "method": "post", @@ -20198,7 +23871,7 @@ def test_delete_delivery_pipeline_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = cloud_deploy.DeleteDeliveryPipelineRequest() + request = cloud_deploy.CreateTargetRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -20206,7 +23879,7 @@ def test_delete_delivery_pipeline_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.delete_delivery_pipeline( + client.create_target( request, metadata=[ ("key", "val"), @@ -20218,8 +23891,8 @@ def test_delete_delivery_pipeline_rest_interceptors(null_interceptor): post.assert_called_once() -def test_delete_delivery_pipeline_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.DeleteDeliveryPipelineRequest +def test_create_target_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.CreateTargetRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -20227,9 +23900,7 @@ def test_delete_delivery_pipeline_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -20241,10 +23912,10 @@ def test_delete_delivery_pipeline_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_delivery_pipeline(request) + client.create_target(request) -def test_delete_delivery_pipeline_rest_flattened(): +def test_create_target_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -20256,13 +23927,13 @@ def test_delete_delivery_pipeline_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", + target=cloud_deploy.Target(name="name_value"), + target_id="target_id_value", ) mock_args.update(sample_request) @@ -20273,20 +23944,19 @@ def test_delete_delivery_pipeline_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_delivery_pipeline(**mock_args) + client.create_target(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/deliveryPipelines/*}" - % client.transport._host, + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/targets" % client.transport._host, args[1], ) -def test_delete_delivery_pipeline_rest_flattened_error(transport: str = "rest"): +def test_create_target_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -20295,13 +23965,15 @@ def test_delete_delivery_pipeline_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_delivery_pipeline( - cloud_deploy.DeleteDeliveryPipelineRequest(), - name="name_value", + client.create_target( + cloud_deploy.CreateTargetRequest(), + parent="parent_value", + target=cloud_deploy.Target(name="name_value"), + target_id="target_id_value", ) -def test_delete_delivery_pipeline_rest_error(): +def test_create_target_rest_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -20310,46 +23982,149 @@ def test_delete_delivery_pipeline_rest_error(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.ListTargetsRequest, + cloud_deploy.UpdateTargetRequest, dict, ], ) -def test_list_targets_rest(request_type): +def test_update_target_rest(request_type): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "target": {"name": "projects/sample1/locations/sample2/targets/sample3"} + } + request_init["target"] = { + "name": "projects/sample1/locations/sample2/targets/sample3", + "target_id": "target_id_value", + "uid": "uid_value", + "description": "description_value", + "annotations": {}, + "labels": {}, + "require_approval": True, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "gke": { + "cluster": "cluster_value", + "internal_ip": True, + "proxy_url": "proxy_url_value", + }, + "anthos_cluster": {"membership": "membership_value"}, + "run": {"location": "location_value"}, + "multi_target": {"target_ids": ["target_ids_value1", "target_ids_value2"]}, + "custom_target": {"custom_target_type": "custom_target_type_value"}, + "etag": "etag_value", + "execution_configs": [ + { + "usages": [1], + "default_pool": { + "service_account": "service_account_value", + "artifact_storage": "artifact_storage_value", + }, + "private_pool": { + "worker_pool": "worker_pool_value", + "service_account": "service_account_value", + "artifact_storage": "artifact_storage_value", + }, + "worker_pool": "worker_pool_value", + "service_account": "service_account_value", + "artifact_storage": "artifact_storage_value", + "execution_timeout": {"seconds": 751, "nanos": 543}, + "verbose": True, + } + ], + "deploy_parameters": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = cloud_deploy.UpdateTargetRequest.meta.fields["target"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["target"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["target"][field])): + del request_init["target"][field][i][subfield] + else: + del request_init["target"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.ListTargetsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = cloud_deploy.ListTargetsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_targets(request) + response = client.update_target(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTargetsPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert response.operation.name == "operations/spam" -def test_list_targets_rest_use_cached_wrapped_rpc(): +def test_update_target_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -20363,35 +24138,38 @@ def test_list_targets_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_targets in client._transport._wrapped_methods + assert client._transport.update_target in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_targets] = mock_rpc + client._transport._wrapped_methods[client._transport.update_target] = mock_rpc request = {} - client.list_targets(request) + client.update_target(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_targets(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_target(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_targets_rest_required_fields( - request_type=cloud_deploy.ListTargetsRequest, +def test_update_target_rest_required_fields( + request_type=cloud_deploy.UpdateTargetRequest, ): transport_class = transports.CloudDeployRestTransport request_init = {} - request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -20402,30 +24180,26 @@ def test_list_targets_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_targets._get_unset_required_fields(jsonified_request) + ).update_target._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_targets._get_unset_required_fields(jsonified_request) + ).update_target._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "filter", - "order_by", - "page_size", - "page_token", + "allow_missing", + "request_id", + "update_mask", + "validate_only", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -20434,7 +24208,7 @@ def test_list_targets_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = cloud_deploy.ListTargetsResponse() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -20446,49 +24220,52 @@ def test_list_targets_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = cloud_deploy.ListTargetsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_targets(request) + response = client.update_target(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_targets_rest_unset_required_fields(): +def test_update_target_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_targets._get_unset_required_fields({}) + unset_fields = transport.update_target._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "filter", - "orderBy", - "pageSize", - "pageToken", + "allowMissing", + "requestId", + "updateMask", + "validateOnly", + ) + ) + & set( + ( + "updateMask", + "target", ) ) - & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_targets_rest_interceptors(null_interceptor): +def test_update_target_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -20501,14 +24278,16 @@ def test_list_targets_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CloudDeployRestInterceptor, "post_list_targets" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CloudDeployRestInterceptor, "post_update_target" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_list_targets" + transports.CloudDeployRestInterceptor, "pre_update_target" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.ListTargetsRequest.pb( - cloud_deploy.ListTargetsRequest() + pb_message = cloud_deploy.UpdateTargetRequest.pb( + cloud_deploy.UpdateTargetRequest() ) transcode.return_value = { "method": "post", @@ -20520,19 +24299,19 @@ def test_list_targets_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = cloud_deploy.ListTargetsResponse.to_json( - cloud_deploy.ListTargetsResponse() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = cloud_deploy.ListTargetsRequest() + request = cloud_deploy.UpdateTargetRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = cloud_deploy.ListTargetsResponse() + post.return_value = operations_pb2.Operation() - client.list_targets( + client.update_target( request, metadata=[ ("key", "val"), @@ -20544,8 +24323,8 @@ def test_list_targets_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_targets_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.ListTargetsRequest +def test_update_target_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.UpdateTargetRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -20553,7 +24332,9 @@ def test_list_targets_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "target": {"name": "projects/sample1/locations/sample2/targets/sample3"} + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -20565,10 +24346,10 @@ def test_list_targets_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_targets(request) + client.update_target(request) -def test_list_targets_rest_flattened(): +def test_update_target_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -20577,39 +24358,41 @@ def test_list_targets_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.ListTargetsResponse() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "target": {"name": "projects/sample1/locations/sample2/targets/sample3"} + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + target=cloud_deploy.Target(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = cloud_deploy.ListTargetsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_targets(**mock_args) + client.update_target(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/targets" % client.transport._host, + "%s/v1/{target.name=projects/*/locations/*/targets/*}" + % client.transport._host, args[1], ) -def test_list_targets_rest_flattened_error(transport: str = "rest"): +def test_update_target_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -20618,113 +24401,55 @@ def test_list_targets_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_targets( - cloud_deploy.ListTargetsRequest(), - parent="parent_value", + client.update_target( + cloud_deploy.UpdateTargetRequest(), + target=cloud_deploy.Target(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_list_targets_rest_pager(transport: str = "rest"): +def test_update_target_rest_error(): client = CloudDeployClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - cloud_deploy.ListTargetsResponse( - targets=[ - cloud_deploy.Target(), - cloud_deploy.Target(), - cloud_deploy.Target(), - ], - next_page_token="abc", - ), - cloud_deploy.ListTargetsResponse( - targets=[], - next_page_token="def", - ), - cloud_deploy.ListTargetsResponse( - targets=[ - cloud_deploy.Target(), - ], - next_page_token="ghi", - ), - cloud_deploy.ListTargetsResponse( - targets=[ - cloud_deploy.Target(), - cloud_deploy.Target(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(cloud_deploy.ListTargetsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_targets(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, cloud_deploy.Target) for i in results) - - pages = list(client.list_targets(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - cloud_deploy.RollbackTargetRequest, + cloud_deploy.DeleteTargetRequest, dict, ], ) -def test_rollback_target_rest(request_type): +def test_delete_target_rest(request_type): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" - } + request_init = {"name": "projects/sample1/locations/sample2/targets/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.RollbackTargetResponse() + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = cloud_deploy.RollbackTargetResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.rollback_target(request) + response = client.delete_target(request) # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.RollbackTargetResponse) + assert response.operation.name == "operations/spam" -def test_rollback_target_rest_use_cached_wrapped_rpc(): +def test_delete_target_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -20738,37 +24463,39 @@ def test_rollback_target_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.rollback_target in client._transport._wrapped_methods + assert client._transport.delete_target in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.rollback_target] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_target] = mock_rpc request = {} - client.rollback_target(request) + client.delete_target(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.rollback_target(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_target(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_rollback_target_rest_required_fields( - request_type=cloud_deploy.RollbackTargetRequest, +def test_delete_target_rest_required_fields( + request_type=cloud_deploy.DeleteTargetRequest, ): transport_class = transports.CloudDeployRestTransport request_init = {} request_init["name"] = "" - request_init["target_id"] = "" - request_init["rollout_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -20779,27 +24506,30 @@ def test_rollback_target_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).rollback_target._get_unset_required_fields(jsonified_request) + ).delete_target._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["name"] = "name_value" - jsonified_request["targetId"] = "target_id_value" - jsonified_request["rolloutId"] = "rollout_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).rollback_target._get_unset_required_fields(jsonified_request) + ).delete_target._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "allow_missing", + "etag", + "request_id", + "validate_only", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request assert jsonified_request["name"] == "name_value" - assert "targetId" in jsonified_request - assert jsonified_request["targetId"] == "target_id_value" - assert "rolloutId" in jsonified_request - assert jsonified_request["rolloutId"] == "rollout_id_value" client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -20808,7 +24538,7 @@ def test_rollback_target_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = cloud_deploy.RollbackTargetResponse() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -20820,49 +24550,46 @@ def test_rollback_target_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = cloud_deploy.RollbackTargetResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.rollback_target(request) + response = client.delete_target(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_rollback_target_rest_unset_required_fields(): +def test_delete_target_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.rollback_target._get_unset_required_fields({}) + unset_fields = transport.delete_target._get_unset_required_fields({}) assert set(unset_fields) == ( - set(()) - & set( + set( ( - "name", - "targetId", - "rolloutId", + "allowMissing", + "etag", + "requestId", + "validateOnly", ) ) + & set(("name",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_rollback_target_rest_interceptors(null_interceptor): +def test_delete_target_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -20875,14 +24602,16 @@ def test_rollback_target_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CloudDeployRestInterceptor, "post_rollback_target" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CloudDeployRestInterceptor, "post_delete_target" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_rollback_target" + transports.CloudDeployRestInterceptor, "pre_delete_target" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.RollbackTargetRequest.pb( - cloud_deploy.RollbackTargetRequest() + pb_message = cloud_deploy.DeleteTargetRequest.pb( + cloud_deploy.DeleteTargetRequest() ) transcode.return_value = { "method": "post", @@ -20894,19 +24623,19 @@ def test_rollback_target_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = cloud_deploy.RollbackTargetResponse.to_json( - cloud_deploy.RollbackTargetResponse() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = cloud_deploy.RollbackTargetRequest() + request = cloud_deploy.DeleteTargetRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = cloud_deploy.RollbackTargetResponse() + post.return_value = operations_pb2.Operation() - client.rollback_target( + client.delete_target( request, metadata=[ ("key", "val"), @@ -20918,8 +24647,8 @@ def test_rollback_target_rest_interceptors(null_interceptor): post.assert_called_once() -def test_rollback_target_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.RollbackTargetRequest +def test_delete_target_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.DeleteTargetRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -20927,9 +24656,7 @@ def test_rollback_target_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" - } + request_init = {"name": "projects/sample1/locations/sample2/targets/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -20941,10 +24668,10 @@ def test_rollback_target_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.rollback_target(request) + client.delete_target(request) -def test_rollback_target_rest_flattened(): +def test_delete_target_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -20953,44 +24680,37 @@ def test_rollback_target_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.RollbackTargetResponse() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" - } + sample_request = {"name": "projects/sample1/locations/sample2/targets/sample3"} # get truthy value for each flattened field mock_args = dict( name="name_value", - target_id="target_id_value", - rollout_id="rollout_id_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = cloud_deploy.RollbackTargetResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.rollback_target(**mock_args) + client.delete_target(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/deliveryPipelines/*}:rollbackTarget" - % client.transport._host, + "%s/v1/{name=projects/*/locations/*/targets/*}" % client.transport._host, args[1], ) -def test_rollback_target_rest_flattened_error(transport: str = "rest"): +def test_delete_target_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -20999,15 +24719,13 @@ def test_rollback_target_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.rollback_target( - cloud_deploy.RollbackTargetRequest(), + client.delete_target( + cloud_deploy.DeleteTargetRequest(), name="name_value", - target_id="target_id_value", - rollout_id="rollout_id_value", ) -def test_rollback_target_rest_error(): +def test_delete_target_rest_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -21016,54 +24734,46 @@ def test_rollback_target_rest_error(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.GetTargetRequest, + cloud_deploy.ListCustomTargetTypesRequest, dict, ], ) -def test_get_target_rest(request_type): +def test_list_custom_target_types_rest(request_type): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/targets/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.Target( - name="name_value", - target_id="target_id_value", - uid="uid_value", - description="description_value", - require_approval=True, - etag="etag_value", + return_value = cloud_deploy.ListCustomTargetTypesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = cloud_deploy.Target.pb(return_value) + return_value = cloud_deploy.ListCustomTargetTypesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_target(request) + response = client.list_custom_target_types(request) # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.Target) - assert response.name == "name_value" - assert response.target_id == "target_id_value" - assert response.uid == "uid_value" - assert response.description == "description_value" - assert response.require_approval is True - assert response.etag == "etag_value" + assert isinstance(response, pagers.ListCustomTargetTypesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_get_target_rest_use_cached_wrapped_rpc(): +def test_list_custom_target_types_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -21077,33 +24787,40 @@ def test_get_target_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_target in client._transport._wrapped_methods + assert ( + client._transport.list_custom_target_types + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_target] = mock_rpc + client._transport._wrapped_methods[ + client._transport.list_custom_target_types + ] = mock_rpc request = {} - client.get_target(request) + client.list_custom_target_types(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_target(request) + client.list_custom_target_types(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_target_rest_required_fields(request_type=cloud_deploy.GetTargetRequest): +def test_list_custom_target_types_rest_required_fields( + request_type=cloud_deploy.ListCustomTargetTypesRequest, +): transport_class = transports.CloudDeployRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -21114,21 +24831,30 @@ def test_get_target_rest_required_fields(request_type=cloud_deploy.GetTargetRequ unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_target._get_unset_required_fields(jsonified_request) + ).list_custom_target_types._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_target._get_unset_required_fields(jsonified_request) + ).list_custom_target_types._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -21137,7 +24863,7 @@ def test_get_target_rest_required_fields(request_type=cloud_deploy.GetTargetRequ request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = cloud_deploy.Target() + return_value = cloud_deploy.ListCustomTargetTypesResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -21158,30 +24884,40 @@ def test_get_target_rest_required_fields(request_type=cloud_deploy.GetTargetRequ response_value.status_code = 200 # Convert return value to protobuf type - return_value = cloud_deploy.Target.pb(return_value) + return_value = cloud_deploy.ListCustomTargetTypesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_target(request) + response = client.list_custom_target_types(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_target_rest_unset_required_fields(): +def test_list_custom_target_types_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_target._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.list_custom_target_types._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_target_rest_interceptors(null_interceptor): +def test_list_custom_target_types_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -21194,13 +24930,15 @@ def test_get_target_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CloudDeployRestInterceptor, "post_get_target" + transports.CloudDeployRestInterceptor, "post_list_custom_target_types" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_get_target" + transports.CloudDeployRestInterceptor, "pre_list_custom_target_types" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.GetTargetRequest.pb(cloud_deploy.GetTargetRequest()) + pb_message = cloud_deploy.ListCustomTargetTypesRequest.pb( + cloud_deploy.ListCustomTargetTypesRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -21211,17 +24949,19 @@ def test_get_target_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = cloud_deploy.Target.to_json(cloud_deploy.Target()) + req.return_value._content = cloud_deploy.ListCustomTargetTypesResponse.to_json( + cloud_deploy.ListCustomTargetTypesResponse() + ) - request = cloud_deploy.GetTargetRequest() + request = cloud_deploy.ListCustomTargetTypesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = cloud_deploy.Target() + post.return_value = cloud_deploy.ListCustomTargetTypesResponse() - client.get_target( + client.list_custom_target_types( request, metadata=[ ("key", "val"), @@ -21233,8 +24973,8 @@ def test_get_target_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_target_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.GetTargetRequest +def test_list_custom_target_types_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.ListCustomTargetTypesRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -21242,7 +24982,7 @@ def test_get_target_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/targets/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -21254,10 +24994,10 @@ def test_get_target_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_target(request) + client.list_custom_target_types(request) -def test_get_target_rest_flattened(): +def test_list_custom_target_types_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -21266,14 +25006,14 @@ def test_get_target_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.Target() + return_value = cloud_deploy.ListCustomTargetTypesResponse() # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/locations/sample2/targets/sample3"} + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) @@ -21281,24 +25021,25 @@ def test_get_target_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = cloud_deploy.Target.pb(return_value) + return_value = cloud_deploy.ListCustomTargetTypesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_target(**mock_args) + client.list_custom_target_types(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/targets/*}" % client.transport._host, + "%s/v1/{parent=projects/*/locations/*}/customTargetTypes" + % client.transport._host, args[1], ) -def test_get_target_rest_flattened_error(transport: str = "rest"): +def test_list_custom_target_types_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -21307,162 +25048,126 @@ def test_get_target_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_target( - cloud_deploy.GetTargetRequest(), - name="name_value", + client.list_custom_target_types( + cloud_deploy.ListCustomTargetTypesRequest(), + parent="parent_value", ) -def test_get_target_rest_error(): +def test_list_custom_target_types_rest_pager(transport: str = "rest"): client = CloudDeployClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + cloud_deploy.ListCustomTargetTypesResponse( + custom_target_types=[ + cloud_deploy.CustomTargetType(), + cloud_deploy.CustomTargetType(), + cloud_deploy.CustomTargetType(), + ], + next_page_token="abc", + ), + cloud_deploy.ListCustomTargetTypesResponse( + custom_target_types=[], + next_page_token="def", + ), + cloud_deploy.ListCustomTargetTypesResponse( + custom_target_types=[ + cloud_deploy.CustomTargetType(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListCustomTargetTypesResponse( + custom_target_types=[ + cloud_deploy.CustomTargetType(), + cloud_deploy.CustomTargetType(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + cloud_deploy.ListCustomTargetTypesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_custom_target_types(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, cloud_deploy.CustomTargetType) for i in results) + + pages = list(client.list_custom_target_types(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - cloud_deploy.CreateTargetRequest, + cloud_deploy.GetCustomTargetTypeRequest, dict, ], ) -def test_create_target_rest(request_type): +def test_get_custom_target_type_rest(request_type): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["target"] = { - "name": "name_value", - "target_id": "target_id_value", - "uid": "uid_value", - "description": "description_value", - "annotations": {}, - "labels": {}, - "require_approval": True, - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "gke": { - "cluster": "cluster_value", - "internal_ip": True, - "proxy_url": "proxy_url_value", - }, - "anthos_cluster": {"membership": "membership_value"}, - "run": {"location": "location_value"}, - "multi_target": {"target_ids": ["target_ids_value1", "target_ids_value2"]}, - "custom_target": {"custom_target_type": "custom_target_type_value"}, - "etag": "etag_value", - "execution_configs": [ - { - "usages": [1], - "default_pool": { - "service_account": "service_account_value", - "artifact_storage": "artifact_storage_value", - }, - "private_pool": { - "worker_pool": "worker_pool_value", - "service_account": "service_account_value", - "artifact_storage": "artifact_storage_value", - }, - "worker_pool": "worker_pool_value", - "service_account": "service_account_value", - "artifact_storage": "artifact_storage_value", - "execution_timeout": {"seconds": 751, "nanos": 543}, - "verbose": True, - } - ], - "deploy_parameters": {}, + request_init = { + "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = cloud_deploy.CreateTargetRequest.meta.fields["target"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["target"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["target"][field])): - del request_init["target"][field][i][subfield] - else: - del request_init["target"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = cloud_deploy.CustomTargetType( + name="name_value", + custom_target_type_id="custom_target_type_id_value", + uid="uid_value", + description="description_value", + etag="etag_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_deploy.CustomTargetType.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_target(request) + response = client.get_custom_target_type(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, cloud_deploy.CustomTargetType) + assert response.name == "name_value" + assert response.custom_target_type_id == "custom_target_type_id_value" + assert response.uid == "uid_value" + assert response.description == "description_value" + assert response.etag == "etag_value" -def test_create_target_rest_use_cached_wrapped_rpc(): +def test_get_custom_target_type_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -21476,40 +25181,40 @@ def test_create_target_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_target in client._transport._wrapped_methods + assert ( + client._transport.get_custom_target_type + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_target] = mock_rpc + client._transport._wrapped_methods[ + client._transport.get_custom_target_type + ] = mock_rpc request = {} - client.create_target(request) + client.get_custom_target_type(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_target(request) + client.get_custom_target_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_target_rest_required_fields( - request_type=cloud_deploy.CreateTargetRequest, +def test_get_custom_target_type_rest_required_fields( + request_type=cloud_deploy.GetCustomTargetTypeRequest, ): transport_class = transports.CloudDeployRestTransport request_init = {} - request_init["parent"] = "" - request_init["target_id"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -21517,38 +25222,24 @@ def test_create_target_rest_required_fields( ) # verify fields with default values are dropped - assert "targetId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_target._get_unset_required_fields(jsonified_request) + ).get_custom_target_type._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "targetId" in jsonified_request - assert jsonified_request["targetId"] == request_init["target_id"] - jsonified_request["parent"] = "parent_value" - jsonified_request["targetId"] = "target_id_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_target._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "request_id", - "target_id", - "validate_only", - ) - ) + ).get_custom_target_type._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "targetId" in jsonified_request - assert jsonified_request["targetId"] == "target_id_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -21557,7 +25248,7 @@ def test_create_target_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = cloud_deploy.CustomTargetType() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -21569,58 +25260,39 @@ def test_create_target_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cloud_deploy.CustomTargetType.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_target(request) + response = client.get_custom_target_type(request) - expected_params = [ - ( - "targetId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_target_rest_unset_required_fields(): +def test_get_custom_target_type_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_target._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "requestId", - "targetId", - "validateOnly", - ) - ) - & set( - ( - "parent", - "targetId", - "target", - ) - ) - ) + unset_fields = transport.get_custom_target_type._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_target_rest_interceptors(null_interceptor): +def test_get_custom_target_type_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -21633,16 +25305,14 @@ def test_create_target_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.CloudDeployRestInterceptor, "post_create_target" + transports.CloudDeployRestInterceptor, "post_get_custom_target_type" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_create_target" + transports.CloudDeployRestInterceptor, "pre_get_custom_target_type" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.CreateTargetRequest.pb( - cloud_deploy.CreateTargetRequest() + pb_message = cloud_deploy.GetCustomTargetTypeRequest.pb( + cloud_deploy.GetCustomTargetTypeRequest() ) transcode.return_value = { "method": "post", @@ -21654,19 +25324,19 @@ def test_create_target_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = cloud_deploy.CustomTargetType.to_json( + cloud_deploy.CustomTargetType() ) - request = cloud_deploy.CreateTargetRequest() + request = cloud_deploy.GetCustomTargetTypeRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = cloud_deploy.CustomTargetType() - client.create_target( + client.get_custom_target_type( request, metadata=[ ("key", "val"), @@ -21678,8 +25348,8 @@ def test_create_target_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_target_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.CreateTargetRequest +def test_get_custom_target_type_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.GetCustomTargetTypeRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -21687,7 +25357,9 @@ def test_create_target_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -21699,10 +25371,10 @@ def test_create_target_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_target(request) + client.get_custom_target_type(request) -def test_create_target_rest_flattened(): +def test_get_custom_target_type_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -21711,39 +25383,42 @@ def test_create_target_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = cloud_deploy.CustomTargetType() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - target=cloud_deploy.Target(name="name_value"), - target_id="target_id_value", + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_deploy.CustomTargetType.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_target(**mock_args) + client.get_custom_target_type(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/targets" % client.transport._host, + "%s/v1/{name=projects/*/locations/*/customTargetTypes/*}" + % client.transport._host, args[1], ) -def test_create_target_rest_flattened_error(transport: str = "rest"): +def test_get_custom_target_type_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -21752,15 +25427,13 @@ def test_create_target_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_target( - cloud_deploy.CreateTargetRequest(), - parent="parent_value", - target=cloud_deploy.Target(name="name_value"), - target_id="target_id_value", + client.get_custom_target_type( + cloud_deploy.GetCustomTargetTypeRequest(), + name="name_value", ) -def test_create_target_rest_error(): +def test_get_custom_target_type_rest_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -21769,67 +25442,60 @@ def test_create_target_rest_error(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.UpdateTargetRequest, + cloud_deploy.CreateCustomTargetTypeRequest, dict, ], ) -def test_update_target_rest(request_type): +def test_create_custom_target_type_rest(request_type): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "target": {"name": "projects/sample1/locations/sample2/targets/sample3"} - } - request_init["target"] = { - "name": "projects/sample1/locations/sample2/targets/sample3", - "target_id": "target_id_value", + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["custom_target_type"] = { + "name": "name_value", + "custom_target_type_id": "custom_target_type_id_value", "uid": "uid_value", "description": "description_value", "annotations": {}, "labels": {}, - "require_approval": True, "create_time": {"seconds": 751, "nanos": 543}, "update_time": {}, - "gke": { - "cluster": "cluster_value", - "internal_ip": True, - "proxy_url": "proxy_url_value", - }, - "anthos_cluster": {"membership": "membership_value"}, - "run": {"location": "location_value"}, - "multi_target": {"target_ids": ["target_ids_value1", "target_ids_value2"]}, - "custom_target": {"custom_target_type": "custom_target_type_value"}, "etag": "etag_value", - "execution_configs": [ - { - "usages": [1], - "default_pool": { - "service_account": "service_account_value", - "artifact_storage": "artifact_storage_value", - }, - "private_pool": { - "worker_pool": "worker_pool_value", - "service_account": "service_account_value", - "artifact_storage": "artifact_storage_value", - }, - "worker_pool": "worker_pool_value", - "service_account": "service_account_value", - "artifact_storage": "artifact_storage_value", - "execution_timeout": {"seconds": 751, "nanos": 543}, - "verbose": True, - } - ], - "deploy_parameters": {}, + "custom_actions": { + "render_action": "render_action_value", + "deploy_action": "deploy_action_value", + "include_skaffold_modules": [ + { + "configs": ["configs_value1", "configs_value2"], + "git": { + "repo": "repo_value", + "path": "path_value", + "ref": "ref_value", + }, + "google_cloud_storage": { + "source": "source_value", + "path": "path_value", + }, + "google_cloud_build_repo": { + "repository": "repository_value", + "path": "path_value", + "ref": "ref_value", + }, + } + ], + }, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = cloud_deploy.UpdateTargetRequest.meta.fields["target"] + test_field = cloud_deploy.CreateCustomTargetTypeRequest.meta.fields[ + "custom_target_type" + ] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -21857,7 +25523,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["target"].items(): # pragma: NO COVER + for field, value in request_init["custom_target_type"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -21887,10 +25553,10 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["target"][field])): - del request_init["target"][field][i][subfield] + for i in range(0, len(request_init["custom_target_type"][field])): + del request_init["custom_target_type"][field][i][subfield] else: - del request_init["target"][field][subfield] + del request_init["custom_target_type"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -21905,13 +25571,13 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_target(request) + response = client.create_custom_target_type(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_update_target_rest_use_cached_wrapped_rpc(): +def test_create_custom_target_type_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -21925,17 +25591,22 @@ def test_update_target_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_target in client._transport._wrapped_methods + assert ( + client._transport.create_custom_target_type + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.update_target] = mock_rpc + client._transport._wrapped_methods[ + client._transport.create_custom_target_type + ] = mock_rpc request = {} - client.update_target(request) + client.create_custom_target_type(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -21944,19 +25615,21 @@ def test_update_target_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.update_target(request) + client.create_custom_target_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_target_rest_required_fields( - request_type=cloud_deploy.UpdateTargetRequest, +def test_create_custom_target_type_rest_required_fields( + request_type=cloud_deploy.CreateCustomTargetTypeRequest, ): transport_class = transports.CloudDeployRestTransport request_init = {} + request_init["parent"] = "" + request_init["custom_target_type_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -21964,29 +25637,40 @@ def test_update_target_rest_required_fields( ) # verify fields with default values are dropped + assert "customTargetTypeId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_target._get_unset_required_fields(jsonified_request) + ).create_custom_target_type._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "customTargetTypeId" in jsonified_request + assert ( + jsonified_request["customTargetTypeId"] == request_init["custom_target_type_id"] + ) + + jsonified_request["parent"] = "parent_value" + jsonified_request["customTargetTypeId"] = "custom_target_type_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_target._get_unset_required_fields(jsonified_request) + ).create_custom_target_type._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "allow_missing", + "custom_target_type_id", "request_id", - "update_mask", "validate_only", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "customTargetTypeId" in jsonified_request + assert jsonified_request["customTargetTypeId"] == "custom_target_type_id_value" client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -22007,7 +25691,7 @@ def test_update_target_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "post", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -22020,39 +25704,45 @@ def test_update_target_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_target(request) + response = client.create_custom_target_type(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "customTargetTypeId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_target_rest_unset_required_fields(): +def test_create_custom_target_type_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_target._get_unset_required_fields({}) + unset_fields = transport.create_custom_target_type._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "allowMissing", + "customTargetTypeId", "requestId", - "updateMask", "validateOnly", ) ) & set( ( - "updateMask", - "target", + "parent", + "customTargetTypeId", + "customTargetType", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_target_rest_interceptors(null_interceptor): +def test_create_custom_target_type_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -22067,14 +25757,14 @@ def test_update_target_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.CloudDeployRestInterceptor, "post_update_target" + transports.CloudDeployRestInterceptor, "post_create_custom_target_type" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_update_target" + transports.CloudDeployRestInterceptor, "pre_create_custom_target_type" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.UpdateTargetRequest.pb( - cloud_deploy.UpdateTargetRequest() + pb_message = cloud_deploy.CreateCustomTargetTypeRequest.pb( + cloud_deploy.CreateCustomTargetTypeRequest() ) transcode.return_value = { "method": "post", @@ -22090,7 +25780,7 @@ def test_update_target_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = cloud_deploy.UpdateTargetRequest() + request = cloud_deploy.CreateCustomTargetTypeRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -22098,7 +25788,7 @@ def test_update_target_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.update_target( + client.create_custom_target_type( request, metadata=[ ("key", "val"), @@ -22110,8 +25800,8 @@ def test_update_target_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_target_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.UpdateTargetRequest +def test_create_custom_target_type_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.CreateCustomTargetTypeRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -22119,9 +25809,7 @@ def test_update_target_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "target": {"name": "projects/sample1/locations/sample2/targets/sample3"} - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -22133,10 +25821,10 @@ def test_update_target_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_target(request) + client.create_custom_target_type(request) -def test_update_target_rest_flattened(): +def test_create_custom_target_type_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -22148,14 +25836,13 @@ def test_update_target_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = { - "target": {"name": "projects/sample1/locations/sample2/targets/sample3"} - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - target=cloud_deploy.Target(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + parent="parent_value", + custom_target_type=cloud_deploy.CustomTargetType(name="name_value"), + custom_target_type_id="custom_target_type_id_value", ) mock_args.update(sample_request) @@ -22166,20 +25853,20 @@ def test_update_target_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_target(**mock_args) + client.create_custom_target_type(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{target.name=projects/*/locations/*/targets/*}" + "%s/v1/{parent=projects/*/locations/*}/customTargetTypes" % client.transport._host, args[1], ) -def test_update_target_rest_flattened_error(transport: str = "rest"): +def test_create_custom_target_type_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -22188,14 +25875,15 @@ def test_update_target_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_target( - cloud_deploy.UpdateTargetRequest(), - target=cloud_deploy.Target(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.create_custom_target_type( + cloud_deploy.CreateCustomTargetTypeRequest(), + parent="parent_value", + custom_target_type=cloud_deploy.CustomTargetType(name="name_value"), + custom_target_type_id="custom_target_type_id_value", ) -def test_update_target_rest_error(): +def test_create_custom_target_type_rest_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -22204,18 +25892,125 @@ def test_update_target_rest_error(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.DeleteTargetRequest, + cloud_deploy.UpdateCustomTargetTypeRequest, dict, ], ) -def test_delete_target_rest(request_type): +def test_update_custom_target_type_rest(request_type): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/targets/sample3"} + request_init = { + "custom_target_type": { + "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" + } + } + request_init["custom_target_type"] = { + "name": "projects/sample1/locations/sample2/customTargetTypes/sample3", + "custom_target_type_id": "custom_target_type_id_value", + "uid": "uid_value", + "description": "description_value", + "annotations": {}, + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "etag": "etag_value", + "custom_actions": { + "render_action": "render_action_value", + "deploy_action": "deploy_action_value", + "include_skaffold_modules": [ + { + "configs": ["configs_value1", "configs_value2"], + "git": { + "repo": "repo_value", + "path": "path_value", + "ref": "ref_value", + }, + "google_cloud_storage": { + "source": "source_value", + "path": "path_value", + }, + "google_cloud_build_repo": { + "repository": "repository_value", + "path": "path_value", + "ref": "ref_value", + }, + } + ], + }, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = cloud_deploy.UpdateCustomTargetTypeRequest.meta.fields[ + "custom_target_type" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["custom_target_type"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["custom_target_type"][field])): + del request_init["custom_target_type"][field][i][subfield] + else: + del request_init["custom_target_type"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -22230,13 +26025,13 @@ def test_delete_target_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_target(request) + response = client.update_custom_target_type(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_delete_target_rest_use_cached_wrapped_rpc(): +def test_update_custom_target_type_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -22250,17 +26045,22 @@ def test_delete_target_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_target in client._transport._wrapped_methods + assert ( + client._transport.update_custom_target_type + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.delete_target] = mock_rpc + client._transport._wrapped_methods[ + client._transport.update_custom_target_type + ] = mock_rpc request = {} - client.delete_target(request) + client.update_custom_target_type(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -22269,20 +26069,19 @@ def test_delete_target_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.delete_target(request) + client.update_custom_target_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_target_rest_required_fields( - request_type=cloud_deploy.DeleteTargetRequest, +def test_update_custom_target_type_rest_required_fields( + request_type=cloud_deploy.UpdateCustomTargetTypeRequest, ): transport_class = transports.CloudDeployRestTransport request_init = {} - request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -22293,30 +26092,26 @@ def test_delete_target_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_target._get_unset_required_fields(jsonified_request) + ).update_custom_target_type._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_target._get_unset_required_fields(jsonified_request) + ).update_custom_target_type._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( "allow_missing", - "etag", "request_id", + "update_mask", "validate_only", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -22337,9 +26132,10 @@ def test_delete_target_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -22349,34 +26145,39 @@ def test_delete_target_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_target(request) + response = client.update_custom_target_type(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_target_rest_unset_required_fields(): +def test_update_custom_target_type_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_target._get_unset_required_fields({}) + unset_fields = transport.update_custom_target_type._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( "allowMissing", - "etag", "requestId", + "updateMask", "validateOnly", ) ) - & set(("name",)) + & set( + ( + "updateMask", + "customTargetType", + ) + ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_target_rest_interceptors(null_interceptor): +def test_update_custom_target_type_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -22391,14 +26192,14 @@ def test_delete_target_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.CloudDeployRestInterceptor, "post_delete_target" + transports.CloudDeployRestInterceptor, "post_update_custom_target_type" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_delete_target" + transports.CloudDeployRestInterceptor, "pre_update_custom_target_type" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.DeleteTargetRequest.pb( - cloud_deploy.DeleteTargetRequest() + pb_message = cloud_deploy.UpdateCustomTargetTypeRequest.pb( + cloud_deploy.UpdateCustomTargetTypeRequest() ) transcode.return_value = { "method": "post", @@ -22414,7 +26215,7 @@ def test_delete_target_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = cloud_deploy.DeleteTargetRequest() + request = cloud_deploy.UpdateCustomTargetTypeRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -22422,7 +26223,7 @@ def test_delete_target_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.delete_target( + client.update_custom_target_type( request, metadata=[ ("key", "val"), @@ -22434,8 +26235,8 @@ def test_delete_target_rest_interceptors(null_interceptor): post.assert_called_once() -def test_delete_target_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.DeleteTargetRequest +def test_update_custom_target_type_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.UpdateCustomTargetTypeRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -22443,7 +26244,11 @@ def test_delete_target_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/targets/sample3"} + request_init = { + "custom_target_type": { + "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" + } + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -22455,10 +26260,10 @@ def test_delete_target_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_target(request) + client.update_custom_target_type(request) -def test_delete_target_rest_flattened(): +def test_update_custom_target_type_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -22470,11 +26275,16 @@ def test_delete_target_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/locations/sample2/targets/sample3"} + sample_request = { + "custom_target_type": { + "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" + } + } # get truthy value for each flattened field mock_args = dict( - name="name_value", + custom_target_type=cloud_deploy.CustomTargetType(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -22485,19 +26295,20 @@ def test_delete_target_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_target(**mock_args) + client.update_custom_target_type(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/targets/*}" % client.transport._host, + "%s/v1/{custom_target_type.name=projects/*/locations/*/customTargetTypes/*}" + % client.transport._host, args[1], ) -def test_delete_target_rest_flattened_error(transport: str = "rest"): +def test_update_custom_target_type_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -22506,13 +26317,14 @@ def test_delete_target_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_target( - cloud_deploy.DeleteTargetRequest(), - name="name_value", + client.update_custom_target_type( + cloud_deploy.UpdateCustomTargetTypeRequest(), + custom_target_type=cloud_deploy.CustomTargetType(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_delete_target_rest_error(): +def test_update_custom_target_type_rest_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -22521,46 +26333,41 @@ def test_delete_target_rest_error(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.ListCustomTargetTypesRequest, + cloud_deploy.DeleteCustomTargetTypeRequest, dict, ], ) -def test_list_custom_target_types_rest(request_type): +def test_delete_custom_target_type_rest(request_type): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.ListCustomTargetTypesResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = cloud_deploy.ListCustomTargetTypesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_custom_target_types(request) + response = client.delete_custom_target_type(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListCustomTargetTypesPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert response.operation.name == "operations/spam" -def test_list_custom_target_types_rest_use_cached_wrapped_rpc(): +def test_delete_custom_target_type_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -22575,7 +26382,7 @@ def test_list_custom_target_types_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_custom_target_types + client._transport.delete_custom_target_type in client._transport._wrapped_methods ) @@ -22585,29 +26392,33 @@ def test_list_custom_target_types_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_custom_target_types + client._transport.delete_custom_target_type ] = mock_rpc request = {} - client.list_custom_target_types(request) + client.delete_custom_target_type(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_custom_target_types(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_custom_target_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_custom_target_types_rest_required_fields( - request_type=cloud_deploy.ListCustomTargetTypesRequest, +def test_delete_custom_target_type_rest_required_fields( + request_type=cloud_deploy.DeleteCustomTargetTypeRequest, ): transport_class = transports.CloudDeployRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -22618,30 +26429,30 @@ def test_list_custom_target_types_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_custom_target_types._get_unset_required_fields(jsonified_request) + ).delete_custom_target_type._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_custom_target_types._get_unset_required_fields(jsonified_request) + ).delete_custom_target_type._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "filter", - "order_by", - "page_size", - "page_token", + "allow_missing", + "etag", + "request_id", + "validate_only", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -22650,7 +26461,7 @@ def test_list_custom_target_types_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = cloud_deploy.ListCustomTargetTypesResponse() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -22662,49 +26473,46 @@ def test_list_custom_target_types_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "delete", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = cloud_deploy.ListCustomTargetTypesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_custom_target_types(request) + response = client.delete_custom_target_type(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_custom_target_types_rest_unset_required_fields(): +def test_delete_custom_target_type_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_custom_target_types._get_unset_required_fields({}) + unset_fields = transport.delete_custom_target_type._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "filter", - "orderBy", - "pageSize", - "pageToken", + "allowMissing", + "etag", + "requestId", + "validateOnly", ) ) - & set(("parent",)) + & set(("name",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_custom_target_types_rest_interceptors(null_interceptor): +def test_delete_custom_target_type_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -22717,14 +26525,16 @@ def test_list_custom_target_types_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CloudDeployRestInterceptor, "post_list_custom_target_types" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CloudDeployRestInterceptor, "post_delete_custom_target_type" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_list_custom_target_types" + transports.CloudDeployRestInterceptor, "pre_delete_custom_target_type" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.ListCustomTargetTypesRequest.pb( - cloud_deploy.ListCustomTargetTypesRequest() + pb_message = cloud_deploy.DeleteCustomTargetTypeRequest.pb( + cloud_deploy.DeleteCustomTargetTypeRequest() ) transcode.return_value = { "method": "post", @@ -22736,19 +26546,19 @@ def test_list_custom_target_types_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = cloud_deploy.ListCustomTargetTypesResponse.to_json( - cloud_deploy.ListCustomTargetTypesResponse() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = cloud_deploy.ListCustomTargetTypesRequest() + request = cloud_deploy.DeleteCustomTargetTypeRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = cloud_deploy.ListCustomTargetTypesResponse() + post.return_value = operations_pb2.Operation() - client.list_custom_target_types( + client.delete_custom_target_type( request, metadata=[ ("key", "val"), @@ -22760,8 +26570,8 @@ def test_list_custom_target_types_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_custom_target_types_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.ListCustomTargetTypesRequest +def test_delete_custom_target_type_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.DeleteCustomTargetTypeRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -22769,7 +26579,9 @@ def test_list_custom_target_types_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -22781,10 +26593,10 @@ def test_list_custom_target_types_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_custom_target_types(request) + client.delete_custom_target_type(request) -def test_list_custom_target_types_rest_flattened(): +def test_delete_custom_target_type_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -22793,40 +26605,40 @@ def test_list_custom_target_types_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.ListCustomTargetTypesResponse() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = cloud_deploy.ListCustomTargetTypesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_custom_target_types(**mock_args) + client.delete_custom_target_type(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/customTargetTypes" + "%s/v1/{name=projects/*/locations/*/customTargetTypes/*}" % client.transport._host, args[1], ) -def test_list_custom_target_types_rest_flattened_error(transport: str = "rest"): +def test_delete_custom_target_type_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -22835,83 +26647,26 @@ def test_list_custom_target_types_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_custom_target_types( - cloud_deploy.ListCustomTargetTypesRequest(), - parent="parent_value", + client.delete_custom_target_type( + cloud_deploy.DeleteCustomTargetTypeRequest(), + name="name_value", ) -def test_list_custom_target_types_rest_pager(transport: str = "rest"): +def test_delete_custom_target_type_rest_error(): client = CloudDeployClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - cloud_deploy.ListCustomTargetTypesResponse( - custom_target_types=[ - cloud_deploy.CustomTargetType(), - cloud_deploy.CustomTargetType(), - cloud_deploy.CustomTargetType(), - ], - next_page_token="abc", - ), - cloud_deploy.ListCustomTargetTypesResponse( - custom_target_types=[], - next_page_token="def", - ), - cloud_deploy.ListCustomTargetTypesResponse( - custom_target_types=[ - cloud_deploy.CustomTargetType(), - ], - next_page_token="ghi", - ), - cloud_deploy.ListCustomTargetTypesResponse( - custom_target_types=[ - cloud_deploy.CustomTargetType(), - cloud_deploy.CustomTargetType(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - cloud_deploy.ListCustomTargetTypesResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_custom_target_types(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, cloud_deploy.CustomTargetType) for i in results) - - pages = list(client.list_custom_target_types(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - cloud_deploy.GetCustomTargetTypeRequest, + cloud_deploy.ListReleasesRequest, dict, ], ) -def test_get_custom_target_type_rest(request_type): +def test_list_releases_rest(request_type): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -22919,42 +26674,36 @@ def test_get_custom_target_type_rest(request_type): # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" + "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.CustomTargetType( - name="name_value", - custom_target_type_id="custom_target_type_id_value", - uid="uid_value", - description="description_value", - etag="etag_value", + return_value = cloud_deploy.ListReleasesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = cloud_deploy.CustomTargetType.pb(return_value) + return_value = cloud_deploy.ListReleasesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_custom_target_type(request) + response = client.list_releases(request) # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.CustomTargetType) - assert response.name == "name_value" - assert response.custom_target_type_id == "custom_target_type_id_value" - assert response.uid == "uid_value" - assert response.description == "description_value" - assert response.etag == "etag_value" + assert isinstance(response, pagers.ListReleasesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_get_custom_target_type_rest_use_cached_wrapped_rpc(): +def test_list_releases_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -22968,40 +26717,35 @@ def test_get_custom_target_type_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.get_custom_target_type - in client._transport._wrapped_methods - ) + assert client._transport.list_releases in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_custom_target_type - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_releases] = mock_rpc request = {} - client.get_custom_target_type(request) + client.list_releases(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_custom_target_type(request) + client.list_releases(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_custom_target_type_rest_required_fields( - request_type=cloud_deploy.GetCustomTargetTypeRequest, +def test_list_releases_rest_required_fields( + request_type=cloud_deploy.ListReleasesRequest, ): transport_class = transports.CloudDeployRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -23012,21 +26756,30 @@ def test_get_custom_target_type_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_custom_target_type._get_unset_required_fields(jsonified_request) + ).list_releases._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_custom_target_type._get_unset_required_fields(jsonified_request) + ).list_releases._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -23035,7 +26788,7 @@ def test_get_custom_target_type_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = cloud_deploy.CustomTargetType() + return_value = cloud_deploy.ListReleasesResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -23056,30 +26809,40 @@ def test_get_custom_target_type_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = cloud_deploy.CustomTargetType.pb(return_value) + return_value = cloud_deploy.ListReleasesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_custom_target_type(request) + response = client.list_releases(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_custom_target_type_rest_unset_required_fields(): +def test_list_releases_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_custom_target_type._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.list_releases._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_custom_target_type_rest_interceptors(null_interceptor): +def test_list_releases_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -23092,14 +26855,14 @@ def test_get_custom_target_type_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CloudDeployRestInterceptor, "post_get_custom_target_type" + transports.CloudDeployRestInterceptor, "post_list_releases" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_get_custom_target_type" + transports.CloudDeployRestInterceptor, "pre_list_releases" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.GetCustomTargetTypeRequest.pb( - cloud_deploy.GetCustomTargetTypeRequest() + pb_message = cloud_deploy.ListReleasesRequest.pb( + cloud_deploy.ListReleasesRequest() ) transcode.return_value = { "method": "post", @@ -23111,19 +26874,19 @@ def test_get_custom_target_type_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = cloud_deploy.CustomTargetType.to_json( - cloud_deploy.CustomTargetType() + req.return_value._content = cloud_deploy.ListReleasesResponse.to_json( + cloud_deploy.ListReleasesResponse() ) - request = cloud_deploy.GetCustomTargetTypeRequest() + request = cloud_deploy.ListReleasesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = cloud_deploy.CustomTargetType() + post.return_value = cloud_deploy.ListReleasesResponse() - client.get_custom_target_type( + client.list_releases( request, metadata=[ ("key", "val"), @@ -23135,8 +26898,8 @@ def test_get_custom_target_type_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_custom_target_type_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.GetCustomTargetTypeRequest +def test_list_releases_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.ListReleasesRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -23145,7 +26908,7 @@ def test_get_custom_target_type_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" + "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3" } request = request_type(**request_init) @@ -23158,10 +26921,10 @@ def test_get_custom_target_type_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_custom_target_type(request) + client.list_releases(request) -def test_get_custom_target_type_rest_flattened(): +def test_list_releases_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -23170,16 +26933,16 @@ def test_get_custom_target_type_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.CustomTargetType() + return_value = cloud_deploy.ListReleasesResponse() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" + "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3" } # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) @@ -23187,25 +26950,25 @@ def test_get_custom_target_type_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = cloud_deploy.CustomTargetType.pb(return_value) + return_value = cloud_deploy.ListReleasesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_custom_target_type(**mock_args) + client.list_releases(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/customTargetTypes/*}" + "%s/v1/{parent=projects/*/locations/*/deliveryPipelines/*}/releases" % client.transport._host, args[1], ) -def test_get_custom_target_type_rest_flattened_error(transport: str = "rest"): +def test_list_releases_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -23214,157 +26977,134 @@ def test_get_custom_target_type_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_custom_target_type( - cloud_deploy.GetCustomTargetTypeRequest(), - name="name_value", + client.list_releases( + cloud_deploy.ListReleasesRequest(), + parent="parent_value", ) -def test_get_custom_target_type_rest_error(): - client = CloudDeployClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - cloud_deploy.CreateCustomTargetTypeRequest, - dict, - ], -) -def test_create_custom_target_type_rest(request_type): +def test_list_releases_rest_pager(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["custom_target_type"] = { - "name": "name_value", - "custom_target_type_id": "custom_target_type_id_value", - "uid": "uid_value", - "description": "description_value", - "annotations": {}, - "labels": {}, - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "etag": "etag_value", - "custom_actions": { - "render_action": "render_action_value", - "deploy_action": "deploy_action_value", - "include_skaffold_modules": [ - { - "configs": ["configs_value1", "configs_value2"], - "git": { - "repo": "repo_value", - "path": "path_value", - "ref": "ref_value", - }, - "google_cloud_storage": { - "source": "source_value", - "path": "path_value", - }, - "google_cloud_build_repo": { - "repository": "repository_value", - "path": "path_value", - "ref": "ref_value", - }, - } - ], - }, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = cloud_deploy.CreateCustomTargetTypeRequest.meta.fields[ - "custom_target_type" - ] + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + cloud_deploy.ListReleasesResponse( + releases=[ + cloud_deploy.Release(), + cloud_deploy.Release(), + cloud_deploy.Release(), + ], + next_page_token="abc", + ), + cloud_deploy.ListReleasesResponse( + releases=[], + next_page_token="def", + ), + cloud_deploy.ListReleasesResponse( + releases=[ + cloud_deploy.Release(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListReleasesResponse( + releases=[ + cloud_deploy.Release(), + cloud_deploy.Release(), + ], + ), + ) + # Two responses for two calls + response = response + response - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] + # Wrap the values into proper Response objs + response = tuple(cloud_deploy.ListReleasesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + sample_request = { + "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3" + } - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields + pager = client.list_releases(request=sample_request) - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, cloud_deploy.Release) for i in results) - subfields_not_in_runtime = [] + pages = list(client.list_releases(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["custom_target_type"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.GetReleaseRequest, + dict, + ], +) +def test_get_release_rest(request_type): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["custom_target_type"][field])): - del request_init["custom_target_type"][field][i][subfield] - else: - del request_init["custom_target_type"][field][subfield] + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = cloud_deploy.Release( + name="name_value", + uid="uid_value", + description="description_value", + abandoned=True, + skaffold_config_uri="skaffold_config_uri_value", + skaffold_config_path="skaffold_config_path_value", + render_state=cloud_deploy.Release.RenderState.SUCCEEDED, + etag="etag_value", + skaffold_version="skaffold_version_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_deploy.Release.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_custom_target_type(request) + response = client.get_release(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, cloud_deploy.Release) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.description == "description_value" + assert response.abandoned is True + assert response.skaffold_config_uri == "skaffold_config_uri_value" + assert response.skaffold_config_path == "skaffold_config_path_value" + assert response.render_state == cloud_deploy.Release.RenderState.SUCCEEDED + assert response.etag == "etag_value" + assert response.skaffold_version == "skaffold_version_value" -def test_create_custom_target_type_rest_use_cached_wrapped_rpc(): +def test_get_release_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -23378,45 +27118,33 @@ def test_create_custom_target_type_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.create_custom_target_type - in client._transport._wrapped_methods - ) + assert client._transport.get_release in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_custom_target_type - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_release] = mock_rpc request = {} - client.create_custom_target_type(request) + client.get_release(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_custom_target_type(request) + client.get_release(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_custom_target_type_rest_required_fields( - request_type=cloud_deploy.CreateCustomTargetTypeRequest, -): +def test_get_release_rest_required_fields(request_type=cloud_deploy.GetReleaseRequest): transport_class = transports.CloudDeployRestTransport request_init = {} - request_init["parent"] = "" - request_init["custom_target_type_id"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -23424,40 +27152,24 @@ def test_create_custom_target_type_rest_required_fields( ) # verify fields with default values are dropped - assert "customTargetTypeId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_custom_target_type._get_unset_required_fields(jsonified_request) + ).get_release._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "customTargetTypeId" in jsonified_request - assert ( - jsonified_request["customTargetTypeId"] == request_init["custom_target_type_id"] - ) - jsonified_request["parent"] = "parent_value" - jsonified_request["customTargetTypeId"] = "custom_target_type_id_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_custom_target_type._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "custom_target_type_id", - "request_id", - "validate_only", - ) - ) + ).get_release._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "customTargetTypeId" in jsonified_request - assert jsonified_request["customTargetTypeId"] == "custom_target_type_id_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -23466,7 +27178,7 @@ def test_create_custom_target_type_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = cloud_deploy.Release() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -23478,58 +27190,39 @@ def test_create_custom_target_type_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cloud_deploy.Release.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_custom_target_type(request) + response = client.get_release(request) - expected_params = [ - ( - "customTargetTypeId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_custom_target_type_rest_unset_required_fields(): +def test_get_release_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_custom_target_type._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "customTargetTypeId", - "requestId", - "validateOnly", - ) - ) - & set( - ( - "parent", - "customTargetTypeId", - "customTargetType", - ) - ) - ) + unset_fields = transport.get_release._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_custom_target_type_rest_interceptors(null_interceptor): +def test_get_release_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -23542,17 +27235,13 @@ def test_create_custom_target_type_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.CloudDeployRestInterceptor, "post_create_custom_target_type" + transports.CloudDeployRestInterceptor, "post_get_release" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_create_custom_target_type" + transports.CloudDeployRestInterceptor, "pre_get_release" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.CreateCustomTargetTypeRequest.pb( - cloud_deploy.CreateCustomTargetTypeRequest() - ) + pb_message = cloud_deploy.GetReleaseRequest.pb(cloud_deploy.GetReleaseRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -23563,19 +27252,17 @@ def test_create_custom_target_type_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() - ) + req.return_value._content = cloud_deploy.Release.to_json(cloud_deploy.Release()) - request = cloud_deploy.CreateCustomTargetTypeRequest() + request = cloud_deploy.GetReleaseRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = cloud_deploy.Release() - client.create_custom_target_type( + client.get_release( request, metadata=[ ("key", "val"), @@ -23587,8 +27274,8 @@ def test_create_custom_target_type_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_custom_target_type_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.CreateCustomTargetTypeRequest +def test_get_release_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.GetReleaseRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -23596,7 +27283,9 @@ def test_create_custom_target_type_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -23608,10 +27297,10 @@ def test_create_custom_target_type_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_custom_target_type(request) + client.get_release(request) -def test_create_custom_target_type_rest_flattened(): +def test_get_release_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -23620,40 +27309,42 @@ def test_create_custom_target_type_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = cloud_deploy.Release() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - custom_target_type=cloud_deploy.CustomTargetType(name="name_value"), - custom_target_type_id="custom_target_type_id_value", + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_deploy.Release.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_custom_target_type(**mock_args) + client.get_release(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/customTargetTypes" + "%s/v1/{name=projects/*/locations/*/deliveryPipelines/*/releases/*}" % client.transport._host, args[1], ) -def test_create_custom_target_type_rest_flattened_error(transport: str = "rest"): +def test_get_release_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -23662,15 +27353,13 @@ def test_create_custom_target_type_rest_flattened_error(transport: str = "rest") # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_custom_target_type( - cloud_deploy.CreateCustomTargetTypeRequest(), - parent="parent_value", - custom_target_type=cloud_deploy.CustomTargetType(name="name_value"), - custom_target_type_id="custom_target_type_id_value", + client.get_release( + cloud_deploy.GetReleaseRequest(), + name="name_value", ) -def test_create_custom_target_type_rest_error(): +def test_get_release_rest_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -23679,11 +27368,11 @@ def test_create_custom_target_type_rest_error(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.UpdateCustomTargetTypeRequest, + cloud_deploy.CreateReleaseRequest, dict, ], ) -def test_update_custom_target_type_rest(request_type): +def test_create_release_rest(request_type): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -23691,52 +27380,231 @@ def test_update_custom_target_type_rest(request_type): # send a request that will satisfy transcoding request_init = { - "custom_target_type": { - "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" - } + "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3" } - request_init["custom_target_type"] = { - "name": "projects/sample1/locations/sample2/customTargetTypes/sample3", - "custom_target_type_id": "custom_target_type_id_value", + request_init["release"] = { + "name": "name_value", "uid": "uid_value", "description": "description_value", "annotations": {}, "labels": {}, + "abandoned": True, "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, + "render_start_time": {}, + "render_end_time": {}, + "skaffold_config_uri": "skaffold_config_uri_value", + "skaffold_config_path": "skaffold_config_path_value", + "build_artifacts": [{"image": "image_value", "tag": "tag_value"}], + "delivery_pipeline_snapshot": { + "name": "name_value", + "uid": "uid_value", + "description": "description_value", + "annotations": {}, + "labels": {}, + "create_time": {}, + "update_time": {}, + "serial_pipeline": { + "stages": [ + { + "target_id": "target_id_value", + "profiles": ["profiles_value1", "profiles_value2"], + "strategy": { + "standard": { + "verify": True, + "predeploy": { + "actions": ["actions_value1", "actions_value2"] + }, + "postdeploy": { + "actions": ["actions_value1", "actions_value2"] + }, + }, + "canary": { + "runtime_config": { + "kubernetes": { + "gateway_service_mesh": { + "http_route": "http_route_value", + "service": "service_value", + "deployment": "deployment_value", + "route_update_wait_time": { + "seconds": 751, + "nanos": 543, + }, + "stable_cutback_duration": {}, + "pod_selector_label": "pod_selector_label_value", + }, + "service_networking": { + "service": "service_value", + "deployment": "deployment_value", + "disable_pod_overprovisioning": True, + "pod_selector_label": "pod_selector_label_value", + }, + }, + "cloud_run": { + "automatic_traffic_control": True, + "canary_revision_tags": [ + "canary_revision_tags_value1", + "canary_revision_tags_value2", + ], + "prior_revision_tags": [ + "prior_revision_tags_value1", + "prior_revision_tags_value2", + ], + "stable_revision_tags": [ + "stable_revision_tags_value1", + "stable_revision_tags_value2", + ], + }, + }, + "canary_deployment": { + "percentages": [1170, 1171], + "verify": True, + "predeploy": {}, + "postdeploy": {}, + }, + "custom_canary_deployment": { + "phase_configs": [ + { + "phase_id": "phase_id_value", + "percentage": 1054, + "profiles": [ + "profiles_value1", + "profiles_value2", + ], + "verify": True, + "predeploy": {}, + "postdeploy": {}, + } + ] + }, + }, + }, + "deploy_parameters": [ + {"values": {}, "match_target_labels": {}} + ], + } + ] + }, + "condition": { + "pipeline_ready_condition": {"status": True, "update_time": {}}, + "targets_present_condition": { + "status": True, + "missing_targets": [ + "missing_targets_value1", + "missing_targets_value2", + ], + "update_time": {}, + }, + "targets_type_condition": { + "status": True, + "error_details": "error_details_value", + }, + }, + "etag": "etag_value", + "suspended": True, + }, + "target_snapshots": [ + { + "name": "name_value", + "target_id": "target_id_value", + "uid": "uid_value", + "description": "description_value", + "annotations": {}, + "labels": {}, + "require_approval": True, + "create_time": {}, + "update_time": {}, + "gke": { + "cluster": "cluster_value", + "internal_ip": True, + "proxy_url": "proxy_url_value", + }, + "anthos_cluster": {"membership": "membership_value"}, + "run": {"location": "location_value"}, + "multi_target": { + "target_ids": ["target_ids_value1", "target_ids_value2"] + }, + "custom_target": {"custom_target_type": "custom_target_type_value"}, + "etag": "etag_value", + "execution_configs": [ + { + "usages": [1], + "default_pool": { + "service_account": "service_account_value", + "artifact_storage": "artifact_storage_value", + }, + "private_pool": { + "worker_pool": "worker_pool_value", + "service_account": "service_account_value", + "artifact_storage": "artifact_storage_value", + }, + "worker_pool": "worker_pool_value", + "service_account": "service_account_value", + "artifact_storage": "artifact_storage_value", + "execution_timeout": {}, + "verbose": True, + } + ], + "deploy_parameters": {}, + } + ], + "custom_target_type_snapshots": [ + { + "name": "name_value", + "custom_target_type_id": "custom_target_type_id_value", + "uid": "uid_value", + "description": "description_value", + "annotations": {}, + "labels": {}, + "create_time": {}, + "update_time": {}, + "etag": "etag_value", + "custom_actions": { + "render_action": "render_action_value", + "deploy_action": "deploy_action_value", + "include_skaffold_modules": [ + { + "configs": ["configs_value1", "configs_value2"], + "git": { + "repo": "repo_value", + "path": "path_value", + "ref": "ref_value", + }, + "google_cloud_storage": { + "source": "source_value", + "path": "path_value", + }, + "google_cloud_build_repo": { + "repository": "repository_value", + "path": "path_value", + "ref": "ref_value", + }, + } + ], + }, + } + ], + "render_state": 1, "etag": "etag_value", - "custom_actions": { - "render_action": "render_action_value", - "deploy_action": "deploy_action_value", - "include_skaffold_modules": [ - { - "configs": ["configs_value1", "configs_value2"], - "git": { - "repo": "repo_value", - "path": "path_value", - "ref": "ref_value", - }, - "google_cloud_storage": { - "source": "source_value", - "path": "path_value", - }, - "google_cloud_build_repo": { - "repository": "repository_value", - "path": "path_value", - "ref": "ref_value", - }, - } - ], + "skaffold_version": "skaffold_version_value", + "target_artifacts": {}, + "target_renders": {}, + "condition": { + "release_ready_condition": {"status": True}, + "skaffold_supported_condition": { + "status": True, + "skaffold_support_state": 1, + "maintenance_mode_time": {}, + "support_expiration_time": {}, + }, }, + "deploy_parameters": {}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = cloud_deploy.UpdateCustomTargetTypeRequest.meta.fields[ - "custom_target_type" - ] + test_field = cloud_deploy.CreateReleaseRequest.meta.fields["release"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -23764,7 +27632,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["custom_target_type"].items(): # pragma: NO COVER + for field, value in request_init["release"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -23794,10 +27662,10 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["custom_target_type"][field])): - del request_init["custom_target_type"][field][i][subfield] + for i in range(0, len(request_init["release"][field])): + del request_init["release"][field][i][subfield] else: - del request_init["custom_target_type"][field][subfield] + del request_init["release"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -23812,13 +27680,13 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_custom_target_type(request) + response = client.create_release(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_update_custom_target_type_rest_use_cached_wrapped_rpc(): +def test_create_release_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -23832,22 +27700,17 @@ def test_update_custom_target_type_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.update_custom_target_type - in client._transport._wrapped_methods - ) + assert client._transport.create_release in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_custom_target_type - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_release] = mock_rpc request = {} - client.update_custom_target_type(request) + client.create_release(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -23856,19 +27719,21 @@ def test_update_custom_target_type_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.update_custom_target_type(request) + client.create_release(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_custom_target_type_rest_required_fields( - request_type=cloud_deploy.UpdateCustomTargetTypeRequest, +def test_create_release_rest_required_fields( + request_type=cloud_deploy.CreateReleaseRequest, ): transport_class = transports.CloudDeployRestTransport request_init = {} + request_init["parent"] = "" + request_init["release_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -23876,29 +27741,39 @@ def test_update_custom_target_type_rest_required_fields( ) # verify fields with default values are dropped + assert "releaseId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_custom_target_type._get_unset_required_fields(jsonified_request) + ).create_release._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "releaseId" in jsonified_request + assert jsonified_request["releaseId"] == request_init["release_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["releaseId"] = "release_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_custom_target_type._get_unset_required_fields(jsonified_request) + ).create_release._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "allow_missing", + "override_deploy_policy", + "release_id", "request_id", - "update_mask", "validate_only", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "releaseId" in jsonified_request + assert jsonified_request["releaseId"] == "release_id_value" client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -23919,7 +27794,7 @@ def test_update_custom_target_type_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "post", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -23932,39 +27807,46 @@ def test_update_custom_target_type_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_custom_target_type(request) + response = client.create_release(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "releaseId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_custom_target_type_rest_unset_required_fields(): +def test_create_release_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_custom_target_type._get_unset_required_fields({}) + unset_fields = transport.create_release._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "allowMissing", + "overrideDeployPolicy", + "releaseId", "requestId", - "updateMask", "validateOnly", ) ) & set( ( - "updateMask", - "customTargetType", + "parent", + "releaseId", + "release", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_custom_target_type_rest_interceptors(null_interceptor): +def test_create_release_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -23979,14 +27861,14 @@ def test_update_custom_target_type_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.CloudDeployRestInterceptor, "post_update_custom_target_type" + transports.CloudDeployRestInterceptor, "post_create_release" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_update_custom_target_type" + transports.CloudDeployRestInterceptor, "pre_create_release" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.UpdateCustomTargetTypeRequest.pb( - cloud_deploy.UpdateCustomTargetTypeRequest() + pb_message = cloud_deploy.CreateReleaseRequest.pb( + cloud_deploy.CreateReleaseRequest() ) transcode.return_value = { "method": "post", @@ -24002,7 +27884,7 @@ def test_update_custom_target_type_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = cloud_deploy.UpdateCustomTargetTypeRequest() + request = cloud_deploy.CreateReleaseRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -24010,7 +27892,7 @@ def test_update_custom_target_type_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.update_custom_target_type( + client.create_release( request, metadata=[ ("key", "val"), @@ -24022,8 +27904,8 @@ def test_update_custom_target_type_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_custom_target_type_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.UpdateCustomTargetTypeRequest +def test_create_release_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.CreateReleaseRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -24032,9 +27914,7 @@ def test_update_custom_target_type_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "custom_target_type": { - "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" - } + "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3" } request = request_type(**request_init) @@ -24047,10 +27927,10 @@ def test_update_custom_target_type_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_custom_target_type(request) + client.create_release(request) -def test_update_custom_target_type_rest_flattened(): +def test_create_release_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -24063,15 +27943,14 @@ def test_update_custom_target_type_rest_flattened(): # get arguments that satisfy an http rule for this method sample_request = { - "custom_target_type": { - "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" - } + "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3" } # get truthy value for each flattened field mock_args = dict( - custom_target_type=cloud_deploy.CustomTargetType(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + parent="parent_value", + release=cloud_deploy.Release(name="name_value"), + release_id="release_id_value", ) mock_args.update(sample_request) @@ -24082,20 +27961,20 @@ def test_update_custom_target_type_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_custom_target_type(**mock_args) + client.create_release(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{custom_target_type.name=projects/*/locations/*/customTargetTypes/*}" + "%s/v1/{parent=projects/*/locations/*/deliveryPipelines/*}/releases" % client.transport._host, args[1], ) -def test_update_custom_target_type_rest_flattened_error(transport: str = "rest"): +def test_create_release_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -24104,14 +27983,15 @@ def test_update_custom_target_type_rest_flattened_error(transport: str = "rest") # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_custom_target_type( - cloud_deploy.UpdateCustomTargetTypeRequest(), - custom_target_type=cloud_deploy.CustomTargetType(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.create_release( + cloud_deploy.CreateReleaseRequest(), + parent="parent_value", + release=cloud_deploy.Release(name="name_value"), + release_id="release_id_value", ) -def test_update_custom_target_type_rest_error(): +def test_create_release_rest_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -24120,11 +28000,11 @@ def test_update_custom_target_type_rest_error(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.DeleteCustomTargetTypeRequest, + cloud_deploy.AbandonReleaseRequest, dict, ], ) -def test_delete_custom_target_type_rest(request_type): +def test_abandon_release_rest(request_type): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -24132,29 +28012,31 @@ def test_delete_custom_target_type_rest(request_type): # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = cloud_deploy.AbandonReleaseResponse() # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_deploy.AbandonReleaseResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_custom_target_type(request) + response = client.abandon_release(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, cloud_deploy.AbandonReleaseResponse) -def test_delete_custom_target_type_rest_use_cached_wrapped_rpc(): +def test_abandon_release_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -24168,39 +28050,30 @@ def test_delete_custom_target_type_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.delete_custom_target_type - in client._transport._wrapped_methods - ) + assert client._transport.abandon_release in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_custom_target_type - ] = mock_rpc + client._transport._wrapped_methods[client._transport.abandon_release] = mock_rpc request = {} - client.delete_custom_target_type(request) + client.abandon_release(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_custom_target_type(request) + client.abandon_release(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_custom_target_type_rest_required_fields( - request_type=cloud_deploy.DeleteCustomTargetTypeRequest, +def test_abandon_release_rest_required_fields( + request_type=cloud_deploy.AbandonReleaseRequest, ): transport_class = transports.CloudDeployRestTransport @@ -24216,7 +28089,7 @@ def test_delete_custom_target_type_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_custom_target_type._get_unset_required_fields(jsonified_request) + ).abandon_release._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -24225,16 +28098,7 @@ def test_delete_custom_target_type_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_custom_target_type._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "allow_missing", - "etag", - "request_id", - "validate_only", - ) - ) + ).abandon_release._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -24248,7 +28112,7 @@ def test_delete_custom_target_type_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = cloud_deploy.AbandonReleaseResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -24260,46 +28124,40 @@ def test_delete_custom_target_type_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cloud_deploy.AbandonReleaseResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_custom_target_type(request) + response = client.abandon_release(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_custom_target_type_rest_unset_required_fields(): +def test_abandon_release_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_custom_target_type._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "allowMissing", - "etag", - "requestId", - "validateOnly", - ) - ) - & set(("name",)) - ) + unset_fields = transport.abandon_release._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_custom_target_type_rest_interceptors(null_interceptor): +def test_abandon_release_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -24312,16 +28170,14 @@ def test_delete_custom_target_type_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.CloudDeployRestInterceptor, "post_delete_custom_target_type" + transports.CloudDeployRestInterceptor, "post_abandon_release" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_delete_custom_target_type" + transports.CloudDeployRestInterceptor, "pre_abandon_release" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.DeleteCustomTargetTypeRequest.pb( - cloud_deploy.DeleteCustomTargetTypeRequest() + pb_message = cloud_deploy.AbandonReleaseRequest.pb( + cloud_deploy.AbandonReleaseRequest() ) transcode.return_value = { "method": "post", @@ -24333,19 +28189,19 @@ def test_delete_custom_target_type_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = cloud_deploy.AbandonReleaseResponse.to_json( + cloud_deploy.AbandonReleaseResponse() ) - request = cloud_deploy.DeleteCustomTargetTypeRequest() + request = cloud_deploy.AbandonReleaseRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = cloud_deploy.AbandonReleaseResponse() - client.delete_custom_target_type( + client.abandon_release( request, metadata=[ ("key", "val"), @@ -24357,8 +28213,8 @@ def test_delete_custom_target_type_rest_interceptors(null_interceptor): post.assert_called_once() -def test_delete_custom_target_type_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.DeleteCustomTargetTypeRequest +def test_abandon_release_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.AbandonReleaseRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -24367,7 +28223,7 @@ def test_delete_custom_target_type_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4" } request = request_type(**request_init) @@ -24380,10 +28236,10 @@ def test_delete_custom_target_type_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_custom_target_type(request) + client.abandon_release(request) -def test_delete_custom_target_type_rest_flattened(): +def test_abandon_release_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -24392,11 +28248,11 @@ def test_delete_custom_target_type_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = cloud_deploy.AbandonReleaseResponse() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4" } # get truthy value for each flattened field @@ -24408,24 +28264,26 @@ def test_delete_custom_target_type_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_deploy.AbandonReleaseResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_custom_target_type(**mock_args) + client.abandon_release(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/customTargetTypes/*}" + "%s/v1/{name=projects/*/locations/*/deliveryPipelines/*/releases/*}:abandon" % client.transport._host, args[1], ) -def test_delete_custom_target_type_rest_flattened_error(transport: str = "rest"): +def test_abandon_release_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -24434,13 +28292,13 @@ def test_delete_custom_target_type_rest_flattened_error(transport: str = "rest") # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_custom_target_type( - cloud_deploy.DeleteCustomTargetTypeRequest(), + client.abandon_release( + cloud_deploy.AbandonReleaseRequest(), name="name_value", ) -def test_delete_custom_target_type_rest_error(): +def test_abandon_release_rest_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -24449,48 +28307,151 @@ def test_delete_custom_target_type_rest_error(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.ListReleasesRequest, + cloud_deploy.CreateDeployPolicyRequest, dict, ], ) -def test_list_releases_rest(request_type): +def test_create_deploy_policy_rest(request_type): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3" + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["deploy_policy"] = { + "name": "name_value", + "uid": "uid_value", + "description": "description_value", + "annotations": {}, + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "suspended": True, + "selectors": [ + { + "delivery_pipeline": {"id": "id_value", "labels": {}}, + "target": {"id": "id_value", "labels": {}}, + } + ], + "rules": [ + { + "rollout_restriction": { + "id": "id_value", + "invokers": [1], + "actions": [1], + "time_windows": { + "time_zone": "time_zone_value", + "one_time_windows": [ + { + "start_date": {"year": 433, "month": 550, "day": 318}, + "start_time": { + "hours": 561, + "minutes": 773, + "seconds": 751, + "nanos": 543, + }, + "end_date": {}, + "end_time": {}, + } + ], + "weekly_windows": [ + {"days_of_week": [1], "start_time": {}, "end_time": {}} + ], + }, + } + } + ], + "etag": "etag_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = cloud_deploy.CreateDeployPolicyRequest.meta.fields["deploy_policy"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["deploy_policy"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["deploy_policy"][field])): + del request_init["deploy_policy"][field][i][subfield] + else: + del request_init["deploy_policy"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.ListReleasesResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = cloud_deploy.ListReleasesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_releases(request) + response = client.create_deploy_policy(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListReleasesPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert response.operation.name == "operations/spam" -def test_list_releases_rest_use_cached_wrapped_rpc(): +def test_create_deploy_policy_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -24504,35 +28465,44 @@ def test_list_releases_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_releases in client._transport._wrapped_methods + assert ( + client._transport.create_deploy_policy in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_releases] = mock_rpc + client._transport._wrapped_methods[ + client._transport.create_deploy_policy + ] = mock_rpc request = {} - client.list_releases(request) + client.create_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_releases(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_deploy_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_releases_rest_required_fields( - request_type=cloud_deploy.ListReleasesRequest, +def test_create_deploy_policy_rest_required_fields( + request_type=cloud_deploy.CreateDeployPolicyRequest, ): transport_class = transports.CloudDeployRestTransport request_init = {} request_init["parent"] = "" + request_init["deploy_policy_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -24540,26 +28510,29 @@ def test_list_releases_rest_required_fields( ) # verify fields with default values are dropped + assert "deployPolicyId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_releases._get_unset_required_fields(jsonified_request) + ).create_deploy_policy._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "deployPolicyId" in jsonified_request + assert jsonified_request["deployPolicyId"] == request_init["deploy_policy_id"] jsonified_request["parent"] = "parent_value" + jsonified_request["deployPolicyId"] = "deploy_policy_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_releases._get_unset_required_fields(jsonified_request) + ).create_deploy_policy._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "filter", - "order_by", - "page_size", - "page_token", + "deploy_policy_id", + "request_id", + "validate_only", ) ) jsonified_request.update(unset_fields) @@ -24567,6 +28540,8 @@ def test_list_releases_rest_required_fields( # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" + assert "deployPolicyId" in jsonified_request + assert jsonified_request["deployPolicyId"] == "deploy_policy_id_value" client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -24575,7 +28550,7 @@ def test_list_releases_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = cloud_deploy.ListReleasesResponse() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -24587,49 +28562,58 @@ def test_list_releases_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = cloud_deploy.ListReleasesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_releases(request) + response = client.create_deploy_policy(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "deployPolicyId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_releases_rest_unset_required_fields(): +def test_create_deploy_policy_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_releases._get_unset_required_fields({}) + unset_fields = transport.create_deploy_policy._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "filter", - "orderBy", - "pageSize", - "pageToken", + "deployPolicyId", + "requestId", + "validateOnly", + ) + ) + & set( + ( + "parent", + "deployPolicyId", + "deployPolicy", ) ) - & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_releases_rest_interceptors(null_interceptor): +def test_create_deploy_policy_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -24642,14 +28626,16 @@ def test_list_releases_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CloudDeployRestInterceptor, "post_list_releases" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CloudDeployRestInterceptor, "post_create_deploy_policy" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_list_releases" + transports.CloudDeployRestInterceptor, "pre_create_deploy_policy" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.ListReleasesRequest.pb( - cloud_deploy.ListReleasesRequest() + pb_message = cloud_deploy.CreateDeployPolicyRequest.pb( + cloud_deploy.CreateDeployPolicyRequest() ) transcode.return_value = { "method": "post", @@ -24661,19 +28647,19 @@ def test_list_releases_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = cloud_deploy.ListReleasesResponse.to_json( - cloud_deploy.ListReleasesResponse() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = cloud_deploy.ListReleasesRequest() + request = cloud_deploy.CreateDeployPolicyRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = cloud_deploy.ListReleasesResponse() + post.return_value = operations_pb2.Operation() - client.list_releases( + client.create_deploy_policy( request, metadata=[ ("key", "val"), @@ -24685,8 +28671,8 @@ def test_list_releases_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_releases_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.ListReleasesRequest +def test_create_deploy_policy_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.CreateDeployPolicyRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -24694,9 +28680,7 @@ def test_list_releases_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -24708,10 +28692,10 @@ def test_list_releases_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_releases(request) + client.create_deploy_policy(request) -def test_list_releases_rest_flattened(): +def test_create_deploy_policy_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -24720,42 +28704,40 @@ def test_list_releases_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.ListReleasesResponse() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = { - "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( parent="parent_value", + deploy_policy=cloud_deploy.DeployPolicy(name="name_value"), + deploy_policy_id="deploy_policy_id_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = cloud_deploy.ListReleasesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_releases(**mock_args) + client.create_deploy_policy(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/deliveryPipelines/*}/releases" + "%s/v1/{parent=projects/*/locations/*}/deployPolicies" % client.transport._host, args[1], ) -def test_list_releases_rest_flattened_error(transport: str = "rest"): +def test_create_deploy_policy_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -24764,134 +28746,172 @@ def test_list_releases_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_releases( - cloud_deploy.ListReleasesRequest(), + client.create_deploy_policy( + cloud_deploy.CreateDeployPolicyRequest(), parent="parent_value", + deploy_policy=cloud_deploy.DeployPolicy(name="name_value"), + deploy_policy_id="deploy_policy_id_value", ) -def test_list_releases_rest_pager(transport: str = "rest"): +def test_create_deploy_policy_rest_error(): client = CloudDeployClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - cloud_deploy.ListReleasesResponse( - releases=[ - cloud_deploy.Release(), - cloud_deploy.Release(), - cloud_deploy.Release(), - ], - next_page_token="abc", - ), - cloud_deploy.ListReleasesResponse( - releases=[], - next_page_token="def", - ), - cloud_deploy.ListReleasesResponse( - releases=[ - cloud_deploy.Release(), - ], - next_page_token="ghi", - ), - cloud_deploy.ListReleasesResponse( - releases=[ - cloud_deploy.Release(), - cloud_deploy.Release(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(cloud_deploy.ListReleasesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = { - "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3" - } - - pager = client.list_releases(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, cloud_deploy.Release) for i in results) - - pages = list(client.list_releases(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - cloud_deploy.GetReleaseRequest, + cloud_deploy.UpdateDeployPolicyRequest, dict, ], ) -def test_get_release_rest(request_type): +def test_update_deploy_policy_rest(request_type): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4" - } + # send a request that will satisfy transcoding + request_init = { + "deploy_policy": { + "name": "projects/sample1/locations/sample2/deployPolicies/sample3" + } + } + request_init["deploy_policy"] = { + "name": "projects/sample1/locations/sample2/deployPolicies/sample3", + "uid": "uid_value", + "description": "description_value", + "annotations": {}, + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "suspended": True, + "selectors": [ + { + "delivery_pipeline": {"id": "id_value", "labels": {}}, + "target": {"id": "id_value", "labels": {}}, + } + ], + "rules": [ + { + "rollout_restriction": { + "id": "id_value", + "invokers": [1], + "actions": [1], + "time_windows": { + "time_zone": "time_zone_value", + "one_time_windows": [ + { + "start_date": {"year": 433, "month": 550, "day": 318}, + "start_time": { + "hours": 561, + "minutes": 773, + "seconds": 751, + "nanos": 543, + }, + "end_date": {}, + "end_time": {}, + } + ], + "weekly_windows": [ + {"days_of_week": [1], "start_time": {}, "end_time": {}} + ], + }, + } + } + ], + "etag": "etag_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = cloud_deploy.UpdateDeployPolicyRequest.meta.fields["deploy_policy"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["deploy_policy"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["deploy_policy"][field])): + del request_init["deploy_policy"][field][i][subfield] + else: + del request_init["deploy_policy"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.Release( - name="name_value", - uid="uid_value", - description="description_value", - abandoned=True, - skaffold_config_uri="skaffold_config_uri_value", - skaffold_config_path="skaffold_config_path_value", - render_state=cloud_deploy.Release.RenderState.SUCCEEDED, - etag="etag_value", - skaffold_version="skaffold_version_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = cloud_deploy.Release.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_release(request) + response = client.update_deploy_policy(request) # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.Release) - assert response.name == "name_value" - assert response.uid == "uid_value" - assert response.description == "description_value" - assert response.abandoned is True - assert response.skaffold_config_uri == "skaffold_config_uri_value" - assert response.skaffold_config_path == "skaffold_config_path_value" - assert response.render_state == cloud_deploy.Release.RenderState.SUCCEEDED - assert response.etag == "etag_value" - assert response.skaffold_version == "skaffold_version_value" + assert response.operation.name == "operations/spam" -def test_get_release_rest_use_cached_wrapped_rpc(): +def test_update_deploy_policy_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -24905,33 +28925,42 @@ def test_get_release_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_release in client._transport._wrapped_methods + assert ( + client._transport.update_deploy_policy in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_release] = mock_rpc + client._transport._wrapped_methods[ + client._transport.update_deploy_policy + ] = mock_rpc request = {} - client.get_release(request) + client.update_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_release(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_deploy_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_release_rest_required_fields(request_type=cloud_deploy.GetReleaseRequest): +def test_update_deploy_policy_rest_required_fields( + request_type=cloud_deploy.UpdateDeployPolicyRequest, +): transport_class = transports.CloudDeployRestTransport request_init = {} - request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -24942,21 +28971,26 @@ def test_get_release_rest_required_fields(request_type=cloud_deploy.GetReleaseRe unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_release._get_unset_required_fields(jsonified_request) + ).update_deploy_policy._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_release._get_unset_required_fields(jsonified_request) + ).update_deploy_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "allow_missing", + "request_id", + "update_mask", + "validate_only", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -24965,7 +28999,7 @@ def test_get_release_rest_required_fields(request_type=cloud_deploy.GetReleaseRe request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = cloud_deploy.Release() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -24977,39 +29011,52 @@ def test_get_release_rest_required_fields(request_type=cloud_deploy.GetReleaseRe pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = cloud_deploy.Release.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_release(request) + response = client.update_deploy_policy(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_release_rest_unset_required_fields(): +def test_update_deploy_policy_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_release._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.update_deploy_policy._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "allowMissing", + "requestId", + "updateMask", + "validateOnly", + ) + ) + & set( + ( + "updateMask", + "deployPolicy", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_release_rest_interceptors(null_interceptor): +def test_update_deploy_policy_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -25022,13 +29069,17 @@ def test_get_release_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CloudDeployRestInterceptor, "post_get_release" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CloudDeployRestInterceptor, "post_update_deploy_policy" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_get_release" + transports.CloudDeployRestInterceptor, "pre_update_deploy_policy" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.GetReleaseRequest.pb(cloud_deploy.GetReleaseRequest()) + pb_message = cloud_deploy.UpdateDeployPolicyRequest.pb( + cloud_deploy.UpdateDeployPolicyRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -25039,17 +29090,19 @@ def test_get_release_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = cloud_deploy.Release.to_json(cloud_deploy.Release()) + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) - request = cloud_deploy.GetReleaseRequest() + request = cloud_deploy.UpdateDeployPolicyRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = cloud_deploy.Release() + post.return_value = operations_pb2.Operation() - client.get_release( + client.update_deploy_policy( request, metadata=[ ("key", "val"), @@ -25061,8 +29114,8 @@ def test_get_release_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_release_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.GetReleaseRequest +def test_update_deploy_policy_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.UpdateDeployPolicyRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -25071,7 +29124,9 @@ def test_get_release_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4" + "deploy_policy": { + "name": "projects/sample1/locations/sample2/deployPolicies/sample3" + } } request = request_type(**request_init) @@ -25084,10 +29139,10 @@ def test_get_release_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_release(request) + client.update_deploy_policy(request) -def test_get_release_rest_flattened(): +def test_update_deploy_policy_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -25096,42 +29151,43 @@ def test_get_release_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.Release() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4" + "deploy_policy": { + "name": "projects/sample1/locations/sample2/deployPolicies/sample3" + } } # get truthy value for each flattened field mock_args = dict( - name="name_value", + deploy_policy=cloud_deploy.DeployPolicy(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = cloud_deploy.Release.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_release(**mock_args) + client.update_deploy_policy(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/deliveryPipelines/*/releases/*}" + "%s/v1/{deploy_policy.name=projects/*/locations/*/deployPolicies/*}" % client.transport._host, args[1], ) -def test_get_release_rest_flattened_error(transport: str = "rest"): +def test_update_deploy_policy_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -25140,13 +29196,14 @@ def test_get_release_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_release( - cloud_deploy.GetReleaseRequest(), - name="name_value", + client.update_deploy_policy( + cloud_deploy.UpdateDeployPolicyRequest(), + deploy_policy=cloud_deploy.DeployPolicy(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_get_release_rest_error(): +def test_update_deploy_policy_rest_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -25155,325 +29212,370 @@ def test_get_release_rest_error(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.CreateReleaseRequest, + cloud_deploy.DeleteDeployPolicyRequest, dict, ], ) -def test_create_release_rest(request_type): +def test_delete_deploy_policy_rest(request_type): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3" - } - request_init["release"] = { - "name": "name_value", - "uid": "uid_value", - "description": "description_value", - "annotations": {}, - "labels": {}, - "abandoned": True, - "create_time": {"seconds": 751, "nanos": 543}, - "render_start_time": {}, - "render_end_time": {}, - "skaffold_config_uri": "skaffold_config_uri_value", - "skaffold_config_path": "skaffold_config_path_value", - "build_artifacts": [{"image": "image_value", "tag": "tag_value"}], - "delivery_pipeline_snapshot": { - "name": "name_value", - "uid": "uid_value", - "description": "description_value", - "annotations": {}, - "labels": {}, - "create_time": {}, - "update_time": {}, - "serial_pipeline": { - "stages": [ - { - "target_id": "target_id_value", - "profiles": ["profiles_value1", "profiles_value2"], - "strategy": { - "standard": { - "verify": True, - "predeploy": { - "actions": ["actions_value1", "actions_value2"] - }, - "postdeploy": { - "actions": ["actions_value1", "actions_value2"] - }, - }, - "canary": { - "runtime_config": { - "kubernetes": { - "gateway_service_mesh": { - "http_route": "http_route_value", - "service": "service_value", - "deployment": "deployment_value", - "route_update_wait_time": { - "seconds": 751, - "nanos": 543, - }, - "stable_cutback_duration": {}, - "pod_selector_label": "pod_selector_label_value", - }, - "service_networking": { - "service": "service_value", - "deployment": "deployment_value", - "disable_pod_overprovisioning": True, - "pod_selector_label": "pod_selector_label_value", - }, - }, - "cloud_run": { - "automatic_traffic_control": True, - "canary_revision_tags": [ - "canary_revision_tags_value1", - "canary_revision_tags_value2", - ], - "prior_revision_tags": [ - "prior_revision_tags_value1", - "prior_revision_tags_value2", - ], - "stable_revision_tags": [ - "stable_revision_tags_value1", - "stable_revision_tags_value2", - ], - }, - }, - "canary_deployment": { - "percentages": [1170, 1171], - "verify": True, - "predeploy": {}, - "postdeploy": {}, - }, - "custom_canary_deployment": { - "phase_configs": [ - { - "phase_id": "phase_id_value", - "percentage": 1054, - "profiles": [ - "profiles_value1", - "profiles_value2", - ], - "verify": True, - "predeploy": {}, - "postdeploy": {}, - } - ] - }, - }, - }, - "deploy_parameters": [ - {"values": {}, "match_target_labels": {}} - ], - } - ] - }, - "condition": { - "pipeline_ready_condition": {"status": True, "update_time": {}}, - "targets_present_condition": { - "status": True, - "missing_targets": [ - "missing_targets_value1", - "missing_targets_value2", - ], - "update_time": {}, - }, - "targets_type_condition": { - "status": True, - "error_details": "error_details_value", - }, - }, - "etag": "etag_value", - "suspended": True, - }, - "target_snapshots": [ - { - "name": "name_value", - "target_id": "target_id_value", - "uid": "uid_value", - "description": "description_value", - "annotations": {}, - "labels": {}, - "require_approval": True, - "create_time": {}, - "update_time": {}, - "gke": { - "cluster": "cluster_value", - "internal_ip": True, - "proxy_url": "proxy_url_value", - }, - "anthos_cluster": {"membership": "membership_value"}, - "run": {"location": "location_value"}, - "multi_target": { - "target_ids": ["target_ids_value1", "target_ids_value2"] - }, - "custom_target": {"custom_target_type": "custom_target_type_value"}, - "etag": "etag_value", - "execution_configs": [ - { - "usages": [1], - "default_pool": { - "service_account": "service_account_value", - "artifact_storage": "artifact_storage_value", - }, - "private_pool": { - "worker_pool": "worker_pool_value", - "service_account": "service_account_value", - "artifact_storage": "artifact_storage_value", - }, - "worker_pool": "worker_pool_value", - "service_account": "service_account_value", - "artifact_storage": "artifact_storage_value", - "execution_timeout": {}, - "verbose": True, - } - ], - "deploy_parameters": {}, - } - ], - "custom_target_type_snapshots": [ - { - "name": "name_value", - "custom_target_type_id": "custom_target_type_id_value", - "uid": "uid_value", - "description": "description_value", - "annotations": {}, - "labels": {}, - "create_time": {}, - "update_time": {}, - "etag": "etag_value", - "custom_actions": { - "render_action": "render_action_value", - "deploy_action": "deploy_action_value", - "include_skaffold_modules": [ - { - "configs": ["configs_value1", "configs_value2"], - "git": { - "repo": "repo_value", - "path": "path_value", - "ref": "ref_value", - }, - "google_cloud_storage": { - "source": "source_value", - "path": "path_value", - }, - "google_cloud_build_repo": { - "repository": "repository_value", - "path": "path_value", - "ref": "ref_value", - }, - } - ], - }, + request_init = {"name": "projects/sample1/locations/sample2/deployPolicies/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_deploy_policy(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_deploy_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_deploy_policy in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_deploy_policy + ] = mock_rpc + + request = {} + client.delete_deploy_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_deploy_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_deploy_policy_rest_required_fields( + request_type=cloud_deploy.DeleteDeployPolicyRequest, +): + transport_class = transports.CloudDeployRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_deploy_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_deploy_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "allow_missing", + "etag", + "request_id", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, } - ], - "render_state": 1, - "etag": "etag_value", - "skaffold_version": "skaffold_version_value", - "target_artifacts": {}, - "target_renders": {}, - "condition": { - "release_ready_condition": {"status": True}, - "skaffold_supported_condition": { - "status": True, - "skaffold_support_state": 1, - "maintenance_mode_time": {}, - "support_expiration_time": {}, - }, - }, - "deploy_parameters": {}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 + transcode.return_value = transcode_result - # Determine if the message type is proto-plus or protobuf - test_field = cloud_deploy.CreateReleaseRequest.meta.fields["release"] + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_deploy_policy(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_deploy_policy_rest_unset_required_fields(): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_deploy_policy._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "allowMissing", + "etag", + "requestId", + "validateOnly", + ) + ) + & set(("name",)) + ) - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_deploy_policy_rest_interceptors(null_interceptor): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudDeployRestInterceptor(), + ) + client = CloudDeployClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CloudDeployRestInterceptor, "post_delete_deploy_policy" + ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "pre_delete_deploy_policy" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_deploy.DeleteDeployPolicyRequest.pb( + cloud_deploy.DeleteDeployPolicyRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] + request = cloud_deploy.DeleteDeployPolicyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() - subfields_not_in_runtime = [] + client.delete_deploy_policy( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["release"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value + pre.assert_called_once() + post.assert_called_once() - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["release"][field])): - del request_init["release"][field][i][subfield] - else: - del request_init["release"][field][subfield] +def test_delete_deploy_policy_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.DeleteDeployPolicyRequest +): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/deployPolicies/sample3"} request = request_type(**request_init) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_deploy_policy(request) + + +def test_delete_deploy_policy_rest_flattened(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = operations_pb2.Operation(name="operations/spam") + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/deployPolicies/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_deploy_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/deployPolicies/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_deploy_policy_rest_flattened_error(transport: str = "rest"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_deploy_policy( + cloud_deploy.DeleteDeployPolicyRequest(), + name="name_value", + ) + + +def test_delete_deploy_policy_rest_error(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.ListDeployPoliciesRequest, + dict, + ], +) +def test_list_deploy_policies_rest(request_type): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.ListDeployPoliciesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_deploy.ListDeployPoliciesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_release(request) + response = client.list_deploy_policies(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, pagers.ListDeployPoliciesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_create_release_rest_use_cached_wrapped_rpc(): +def test_list_deploy_policies_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -25487,40 +29589,39 @@ def test_create_release_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_release in client._transport._wrapped_methods + assert ( + client._transport.list_deploy_policies in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_release] = mock_rpc + client._transport._wrapped_methods[ + client._transport.list_deploy_policies + ] = mock_rpc request = {} - client.create_release(request) + client.list_deploy_policies(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_release(request) + client.list_deploy_policies(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_release_rest_required_fields( - request_type=cloud_deploy.CreateReleaseRequest, +def test_list_deploy_policies_rest_required_fields( + request_type=cloud_deploy.ListDeployPoliciesRequest, ): transport_class = transports.CloudDeployRestTransport request_init = {} request_init["parent"] = "" - request_init["release_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -25528,29 +29629,26 @@ def test_create_release_rest_required_fields( ) # verify fields with default values are dropped - assert "releaseId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_release._get_unset_required_fields(jsonified_request) + ).list_deploy_policies._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "releaseId" in jsonified_request - assert jsonified_request["releaseId"] == request_init["release_id"] jsonified_request["parent"] = "parent_value" - jsonified_request["releaseId"] = "release_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_release._get_unset_required_fields(jsonified_request) + ).list_deploy_policies._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "release_id", - "request_id", - "validate_only", + "filter", + "order_by", + "page_size", + "page_token", ) ) jsonified_request.update(unset_fields) @@ -25558,8 +29656,6 @@ def test_create_release_rest_required_fields( # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" - assert "releaseId" in jsonified_request - assert jsonified_request["releaseId"] == "release_id_value" client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -25568,7 +29664,7 @@ def test_create_release_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = cloud_deploy.ListDeployPoliciesResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -25580,58 +29676,49 @@ def test_create_release_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cloud_deploy.ListDeployPoliciesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_release(request) + response = client.list_deploy_policies(request) - expected_params = [ - ( - "releaseId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_release_rest_unset_required_fields(): +def test_list_deploy_policies_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_release._get_unset_required_fields({}) + unset_fields = transport.list_deploy_policies._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "releaseId", - "requestId", - "validateOnly", - ) - ) - & set( - ( - "parent", - "releaseId", - "release", + "filter", + "orderBy", + "pageSize", + "pageToken", ) ) + & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_release_rest_interceptors(null_interceptor): +def test_list_deploy_policies_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -25644,16 +29731,14 @@ def test_create_release_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.CloudDeployRestInterceptor, "post_create_release" + transports.CloudDeployRestInterceptor, "post_list_deploy_policies" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_create_release" + transports.CloudDeployRestInterceptor, "pre_list_deploy_policies" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.CreateReleaseRequest.pb( - cloud_deploy.CreateReleaseRequest() + pb_message = cloud_deploy.ListDeployPoliciesRequest.pb( + cloud_deploy.ListDeployPoliciesRequest() ) transcode.return_value = { "method": "post", @@ -25665,19 +29750,19 @@ def test_create_release_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = cloud_deploy.ListDeployPoliciesResponse.to_json( + cloud_deploy.ListDeployPoliciesResponse() ) - request = cloud_deploy.CreateReleaseRequest() + request = cloud_deploy.ListDeployPoliciesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = cloud_deploy.ListDeployPoliciesResponse() - client.create_release( + client.list_deploy_policies( request, metadata=[ ("key", "val"), @@ -25689,8 +29774,8 @@ def test_create_release_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_release_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.CreateReleaseRequest +def test_list_deploy_policies_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.ListDeployPoliciesRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -25698,9 +29783,7 @@ def test_create_release_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -25712,10 +29795,10 @@ def test_create_release_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_release(request) + client.list_deploy_policies(request) -def test_create_release_rest_flattened(): +def test_list_deploy_policies_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -25724,42 +29807,40 @@ def test_create_release_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = cloud_deploy.ListDeployPoliciesResponse() # get arguments that satisfy an http rule for this method - sample_request = { - "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( parent="parent_value", - release=cloud_deploy.Release(name="name_value"), - release_id="release_id_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_deploy.ListDeployPoliciesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_release(**mock_args) + client.list_deploy_policies(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/deliveryPipelines/*}/releases" + "%s/v1/{parent=projects/*/locations/*}/deployPolicies" % client.transport._host, args[1], ) -def test_create_release_rest_flattened_error(transport: str = "rest"): +def test_list_deploy_policies_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -25768,60 +29849,124 @@ def test_create_release_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_release( - cloud_deploy.CreateReleaseRequest(), + client.list_deploy_policies( + cloud_deploy.ListDeployPoliciesRequest(), parent="parent_value", - release=cloud_deploy.Release(name="name_value"), - release_id="release_id_value", ) -def test_create_release_rest_error(): +def test_list_deploy_policies_rest_pager(transport: str = "rest"): client = CloudDeployClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[ + cloud_deploy.DeployPolicy(), + cloud_deploy.DeployPolicy(), + cloud_deploy.DeployPolicy(), + ], + next_page_token="abc", + ), + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[], + next_page_token="def", + ), + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[ + cloud_deploy.DeployPolicy(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[ + cloud_deploy.DeployPolicy(), + cloud_deploy.DeployPolicy(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + cloud_deploy.ListDeployPoliciesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_deploy_policies(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, cloud_deploy.DeployPolicy) for i in results) + + pages = list(client.list_deploy_policies(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - cloud_deploy.AbandonReleaseRequest, + cloud_deploy.GetDeployPolicyRequest, dict, ], ) -def test_abandon_release_rest(request_type): +def test_get_deploy_policy_rest(request_type): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4" - } + request_init = {"name": "projects/sample1/locations/sample2/deployPolicies/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.AbandonReleaseResponse() + return_value = cloud_deploy.DeployPolicy( + name="name_value", + uid="uid_value", + description="description_value", + suspended=True, + etag="etag_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = cloud_deploy.AbandonReleaseResponse.pb(return_value) + return_value = cloud_deploy.DeployPolicy.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.abandon_release(request) + response = client.get_deploy_policy(request) # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.AbandonReleaseResponse) + assert isinstance(response, cloud_deploy.DeployPolicy) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.description == "description_value" + assert response.suspended is True + assert response.etag == "etag_value" -def test_abandon_release_rest_use_cached_wrapped_rpc(): +def test_get_deploy_policy_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -25835,30 +29980,32 @@ def test_abandon_release_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.abandon_release in client._transport._wrapped_methods + assert client._transport.get_deploy_policy in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.abandon_release] = mock_rpc + client._transport._wrapped_methods[ + client._transport.get_deploy_policy + ] = mock_rpc request = {} - client.abandon_release(request) + client.get_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.abandon_release(request) + client.get_deploy_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_abandon_release_rest_required_fields( - request_type=cloud_deploy.AbandonReleaseRequest, +def test_get_deploy_policy_rest_required_fields( + request_type=cloud_deploy.GetDeployPolicyRequest, ): transport_class = transports.CloudDeployRestTransport @@ -25874,7 +30021,7 @@ def test_abandon_release_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).abandon_release._get_unset_required_fields(jsonified_request) + ).get_deploy_policy._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -25883,7 +30030,7 @@ def test_abandon_release_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).abandon_release._get_unset_required_fields(jsonified_request) + ).get_deploy_policy._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -25897,7 +30044,7 @@ def test_abandon_release_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = cloud_deploy.AbandonReleaseResponse() + return_value = cloud_deploy.DeployPolicy() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -25909,40 +30056,39 @@ def test_abandon_release_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = cloud_deploy.AbandonReleaseResponse.pb(return_value) + return_value = cloud_deploy.DeployPolicy.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.abandon_release(request) + response = client.get_deploy_policy(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_abandon_release_rest_unset_required_fields(): +def test_get_deploy_policy_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.abandon_release._get_unset_required_fields({}) + unset_fields = transport.get_deploy_policy._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_abandon_release_rest_interceptors(null_interceptor): +def test_get_deploy_policy_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -25955,14 +30101,14 @@ def test_abandon_release_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CloudDeployRestInterceptor, "post_abandon_release" + transports.CloudDeployRestInterceptor, "post_get_deploy_policy" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_abandon_release" + transports.CloudDeployRestInterceptor, "pre_get_deploy_policy" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.AbandonReleaseRequest.pb( - cloud_deploy.AbandonReleaseRequest() + pb_message = cloud_deploy.GetDeployPolicyRequest.pb( + cloud_deploy.GetDeployPolicyRequest() ) transcode.return_value = { "method": "post", @@ -25974,19 +30120,19 @@ def test_abandon_release_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = cloud_deploy.AbandonReleaseResponse.to_json( - cloud_deploy.AbandonReleaseResponse() + req.return_value._content = cloud_deploy.DeployPolicy.to_json( + cloud_deploy.DeployPolicy() ) - request = cloud_deploy.AbandonReleaseRequest() + request = cloud_deploy.GetDeployPolicyRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = cloud_deploy.AbandonReleaseResponse() + post.return_value = cloud_deploy.DeployPolicy() - client.abandon_release( + client.get_deploy_policy( request, metadata=[ ("key", "val"), @@ -25998,8 +30144,8 @@ def test_abandon_release_rest_interceptors(null_interceptor): post.assert_called_once() -def test_abandon_release_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.AbandonReleaseRequest +def test_get_deploy_policy_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.GetDeployPolicyRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -26007,9 +30153,7 @@ def test_abandon_release_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4" - } + request_init = {"name": "projects/sample1/locations/sample2/deployPolicies/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -26021,10 +30165,10 @@ def test_abandon_release_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.abandon_release(request) + client.get_deploy_policy(request) -def test_abandon_release_rest_flattened(): +def test_get_deploy_policy_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -26033,11 +30177,11 @@ def test_abandon_release_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.AbandonReleaseResponse() + return_value = cloud_deploy.DeployPolicy() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4" + "name": "projects/sample1/locations/sample2/deployPolicies/sample3" } # get truthy value for each flattened field @@ -26050,25 +30194,25 @@ def test_abandon_release_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = cloud_deploy.AbandonReleaseResponse.pb(return_value) + return_value = cloud_deploy.DeployPolicy.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.abandon_release(**mock_args) + client.get_deploy_policy(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/deliveryPipelines/*/releases/*}:abandon" + "%s/v1/{name=projects/*/locations/*/deployPolicies/*}" % client.transport._host, args[1], ) -def test_abandon_release_rest_flattened_error(transport: str = "rest"): +def test_get_deploy_policy_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -26077,13 +30221,13 @@ def test_abandon_release_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.abandon_release( - cloud_deploy.AbandonReleaseRequest(), + client.get_deploy_policy( + cloud_deploy.GetDeployPolicyRequest(), name="name_value", ) -def test_abandon_release_rest_error(): +def test_get_deploy_policy_rest_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -28009,6 +32153,7 @@ def test_create_rollout_rest_required_fields( # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( + "override_deploy_policy", "request_id", "rollout_id", "starting_phase_id", @@ -28077,6 +32222,7 @@ def test_create_rollout_rest_unset_required_fields(): assert set(unset_fields) == ( set( ( + "overrideDeployPolicy", "requestId", "rolloutId", "startingPhaseId", @@ -33332,6 +37478,11 @@ def test_cloud_deploy_base_transport(): "get_release", "create_release", "abandon_release", + "create_deploy_policy", + "update_deploy_policy", + "delete_deploy_policy", + "list_deploy_policies", + "get_deploy_policy", "approve_rollout", "advance_rollout", "cancel_rollout", @@ -33698,6 +37849,21 @@ def test_cloud_deploy_client_transport_session_collision(transport_name): session1 = client1.transport.abandon_release._session session2 = client2.transport.abandon_release._session assert session1 != session2 + session1 = client1.transport.create_deploy_policy._session + session2 = client2.transport.create_deploy_policy._session + assert session1 != session2 + session1 = client1.transport.update_deploy_policy._session + session2 = client2.transport.update_deploy_policy._session + assert session1 != session2 + session1 = client1.transport.delete_deploy_policy._session + session2 = client2.transport.delete_deploy_policy._session + assert session1 != session2 + session1 = client1.transport.list_deploy_policies._session + session2 = client2.transport.list_deploy_policies._session + assert session1 != session2 + session1 = client1.transport.get_deploy_policy._session + session2 = client2.transport.get_deploy_policy._session + assert session1 != session2 session1 = client1.transport.approve_rollout._session session2 = client2.transport.approve_rollout._session assert session1 != session2 @@ -34105,10 +38271,38 @@ def test_parse_delivery_pipeline_path(): assert expected == actual -def test_job_path(): +def test_deploy_policy_path(): project = "winkle" location = "nautilus" - job = "scallop" + deploy_policy = "scallop" + expected = ( + "projects/{project}/locations/{location}/deployPolicies/{deploy_policy}".format( + project=project, + location=location, + deploy_policy=deploy_policy, + ) + ) + actual = CloudDeployClient.deploy_policy_path(project, location, deploy_policy) + assert expected == actual + + +def test_parse_deploy_policy_path(): + expected = { + "project": "abalone", + "location": "squid", + "deploy_policy": "clam", + } + path = CloudDeployClient.deploy_policy_path(**expected) + + # Check that the path construction is reversible. + actual = CloudDeployClient.parse_deploy_policy_path(path) + assert expected == actual + + +def test_job_path(): + project = "whelk" + location = "octopus" + job = "oyster" expected = "projects/{project}/locations/{location}/jobs/{job}".format( project=project, location=location, @@ -34120,9 +38314,9 @@ def test_job_path(): def test_parse_job_path(): expected = { - "project": "abalone", - "location": "squid", - "job": "clam", + "project": "nudibranch", + "location": "cuttlefish", + "job": "mussel", } path = CloudDeployClient.job_path(**expected) @@ -34132,12 +38326,12 @@ def test_parse_job_path(): def test_job_run_path(): - project = "whelk" - location = "octopus" - delivery_pipeline = "oyster" - release = "nudibranch" - rollout = "cuttlefish" - job_run = "mussel" + project = "winkle" + location = "nautilus" + delivery_pipeline = "scallop" + release = "abalone" + rollout = "squid" + job_run = "clam" expected = "projects/{project}/locations/{location}/deliveryPipelines/{delivery_pipeline}/releases/{release}/rollouts/{rollout}/jobRuns/{job_run}".format( project=project, location=location, @@ -34154,12 +38348,12 @@ def test_job_run_path(): def test_parse_job_run_path(): expected = { - "project": "winkle", - "location": "nautilus", - "delivery_pipeline": "scallop", - "release": "abalone", - "rollout": "squid", - "job_run": "clam", + "project": "whelk", + "location": "octopus", + "delivery_pipeline": "oyster", + "release": "nudibranch", + "rollout": "cuttlefish", + "job_run": "mussel", } path = CloudDeployClient.job_run_path(**expected) @@ -34169,9 +38363,9 @@ def test_parse_job_run_path(): def test_membership_path(): - project = "whelk" - location = "octopus" - membership = "oyster" + project = "winkle" + location = "nautilus" + membership = "scallop" expected = ( "projects/{project}/locations/{location}/memberships/{membership}".format( project=project, @@ -34185,9 +38379,9 @@ def test_membership_path(): def test_parse_membership_path(): expected = { - "project": "nudibranch", - "location": "cuttlefish", - "membership": "mussel", + "project": "abalone", + "location": "squid", + "membership": "clam", } path = CloudDeployClient.membership_path(**expected) @@ -34197,10 +38391,10 @@ def test_parse_membership_path(): def test_release_path(): - project = "winkle" - location = "nautilus" - delivery_pipeline = "scallop" - release = "abalone" + project = "whelk" + location = "octopus" + delivery_pipeline = "oyster" + release = "nudibranch" expected = "projects/{project}/locations/{location}/deliveryPipelines/{delivery_pipeline}/releases/{release}".format( project=project, location=location, @@ -34215,10 +38409,10 @@ def test_release_path(): def test_parse_release_path(): expected = { - "project": "squid", - "location": "clam", - "delivery_pipeline": "whelk", - "release": "octopus", + "project": "cuttlefish", + "location": "mussel", + "delivery_pipeline": "winkle", + "release": "nautilus", } path = CloudDeployClient.release_path(**expected) @@ -34228,10 +38422,10 @@ def test_parse_release_path(): def test_repository_path(): - project = "oyster" - location = "nudibranch" - connection = "cuttlefish" - repository = "mussel" + project = "scallop" + location = "abalone" + connection = "squid" + repository = "clam" expected = "projects/{project}/locations/{location}/connections/{connection}/repositories/{repository}".format( project=project, location=location, @@ -34246,10 +38440,10 @@ def test_repository_path(): def test_parse_repository_path(): expected = { - "project": "winkle", - "location": "nautilus", - "connection": "scallop", - "repository": "abalone", + "project": "whelk", + "location": "octopus", + "connection": "oyster", + "repository": "nudibranch", } path = CloudDeployClient.repository_path(**expected) @@ -34259,11 +38453,11 @@ def test_parse_repository_path(): def test_rollout_path(): - project = "squid" - location = "clam" - delivery_pipeline = "whelk" - release = "octopus" - rollout = "oyster" + project = "cuttlefish" + location = "mussel" + delivery_pipeline = "winkle" + release = "nautilus" + rollout = "scallop" expected = "projects/{project}/locations/{location}/deliveryPipelines/{delivery_pipeline}/releases/{release}/rollouts/{rollout}".format( project=project, location=location, @@ -34279,11 +38473,11 @@ def test_rollout_path(): def test_parse_rollout_path(): expected = { - "project": "nudibranch", - "location": "cuttlefish", - "delivery_pipeline": "mussel", - "release": "winkle", - "rollout": "nautilus", + "project": "abalone", + "location": "squid", + "delivery_pipeline": "clam", + "release": "whelk", + "rollout": "octopus", } path = CloudDeployClient.rollout_path(**expected) @@ -34293,9 +38487,9 @@ def test_parse_rollout_path(): def test_service_path(): - project = "scallop" - location = "abalone" - service = "squid" + project = "oyster" + location = "nudibranch" + service = "cuttlefish" expected = "projects/{project}/locations/{location}/services/{service}".format( project=project, location=location, @@ -34307,9 +38501,9 @@ def test_service_path(): def test_parse_service_path(): expected = { - "project": "clam", - "location": "whelk", - "service": "octopus", + "project": "mussel", + "location": "winkle", + "service": "nautilus", } path = CloudDeployClient.service_path(**expected) @@ -34319,9 +38513,9 @@ def test_parse_service_path(): def test_target_path(): - project = "oyster" - location = "nudibranch" - target = "cuttlefish" + project = "scallop" + location = "abalone" + target = "squid" expected = "projects/{project}/locations/{location}/targets/{target}".format( project=project, location=location, @@ -34333,9 +38527,9 @@ def test_target_path(): def test_parse_target_path(): expected = { - "project": "mussel", - "location": "winkle", - "target": "nautilus", + "project": "clam", + "location": "whelk", + "target": "octopus", } path = CloudDeployClient.target_path(**expected) @@ -34345,9 +38539,9 @@ def test_parse_target_path(): def test_worker_pool_path(): - project = "scallop" - location = "abalone" - worker_pool = "squid" + project = "oyster" + location = "nudibranch" + worker_pool = "cuttlefish" expected = ( "projects/{project}/locations/{location}/workerPools/{worker_pool}".format( project=project, @@ -34361,9 +38555,9 @@ def test_worker_pool_path(): def test_parse_worker_pool_path(): expected = { - "project": "clam", - "location": "whelk", - "worker_pool": "octopus", + "project": "mussel", + "location": "winkle", + "worker_pool": "nautilus", } path = CloudDeployClient.worker_pool_path(**expected) @@ -34373,7 +38567,7 @@ def test_parse_worker_pool_path(): def test_common_billing_account_path(): - billing_account = "oyster" + billing_account = "scallop" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -34383,7 +38577,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "nudibranch", + "billing_account": "abalone", } path = CloudDeployClient.common_billing_account_path(**expected) @@ -34393,7 +38587,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "cuttlefish" + folder = "squid" expected = "folders/{folder}".format( folder=folder, ) @@ -34403,7 +38597,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "mussel", + "folder": "clam", } path = CloudDeployClient.common_folder_path(**expected) @@ -34413,7 +38607,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "winkle" + organization = "whelk" expected = "organizations/{organization}".format( organization=organization, ) @@ -34423,7 +38617,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "nautilus", + "organization": "octopus", } path = CloudDeployClient.common_organization_path(**expected) @@ -34433,7 +38627,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "scallop" + project = "oyster" expected = "projects/{project}".format( project=project, ) @@ -34443,7 +38637,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "abalone", + "project": "nudibranch", } path = CloudDeployClient.common_project_path(**expected) @@ -34453,8 +38647,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "squid" - location = "clam" + project = "cuttlefish" + location = "mussel" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -34465,8 +38659,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "whelk", - "location": "octopus", + "project": "winkle", + "location": "nautilus", } path = CloudDeployClient.common_location_path(**expected) From 272130a696a54b5e895c13cf8e1aae3ee4dc0889 Mon Sep 17 00:00:00 2001 From: yoshi-code-bot <70984784+yoshi-code-bot@users.noreply.github.com> Date: Fri, 4 Oct 2024 11:07:04 -0700 Subject: [PATCH 099/108] chore: Update the root changelog (#13091) Update the root changelog Co-authored-by: ohmayr --- CHANGELOG.md | 36 +++++++++++++++++++----------------- 1 file changed, 19 insertions(+), 17 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 1004d55731cc..3c4f1ec4af65 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,11 +4,11 @@ Changelogs ----- - [google-ads-admanager==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-ads-admanager/CHANGELOG.md) - [google-ads-marketingplatform-admin==0.1.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-ads-marketingplatform-admin/CHANGELOG.md) -- [google-ai-generativelanguage==0.6.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-ai-generativelanguage/CHANGELOG.md) +- [google-ai-generativelanguage==0.6.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-ai-generativelanguage/CHANGELOG.md) - [google-analytics-admin==0.23.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-analytics-admin/CHANGELOG.md) -- [google-analytics-data==0.18.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-analytics-data/CHANGELOG.md) +- [google-analytics-data==0.18.12](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-analytics-data/CHANGELOG.md) - [google-apps-card==0.1.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-card/CHANGELOG.md) -- [google-apps-chat==0.1.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-chat/CHANGELOG.md) +- [google-apps-chat==0.1.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-chat/CHANGELOG.md) - [google-apps-events-subscriptions==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-events-subscriptions/CHANGELOG.md) - [google-apps-meet==0.1.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-meet/CHANGELOG.md) - [google-apps-script-type==0.3.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-script-type/CHANGELOG.md) @@ -26,12 +26,12 @@ Changelogs - [google-cloud-appengine-logging==1.4.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-appengine-logging/CHANGELOG.md) - [google-cloud-apphub==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-apphub/CHANGELOG.md) - [google-cloud-artifact-registry==1.11.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-artifact-registry/CHANGELOG.md) -- [google-cloud-asset==3.26.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-asset/CHANGELOG.md) +- [google-cloud-asset==3.26.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-asset/CHANGELOG.md) - [google-cloud-assured-workloads==1.12.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-assured-workloads/CHANGELOG.md) - [google-cloud-automl==2.13.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-automl/CHANGELOG.md) - [google-cloud-backupdr==0.1.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-backupdr/CHANGELOG.md) - [google-cloud-bare-metal-solution==1.7.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bare-metal-solution/CHANGELOG.md) -- [google-cloud-batch==0.17.27](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-batch/CHANGELOG.md) +- [google-cloud-batch==0.17.28](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-batch/CHANGELOG.md) - [google-cloud-beyondcorp-appconnections==0.4.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-appconnections/CHANGELOG.md) - [google-cloud-beyondcorp-appconnectors==0.4.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-appconnectors/CHANGELOG.md) - [google-cloud-beyondcorp-appgateways==0.4.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-appgateways/CHANGELOG.md) @@ -49,10 +49,10 @@ Changelogs - [google-cloud-billing-budgets==1.14.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-billing-budgets/CHANGELOG.md) - [google-cloud-billing==1.13.6](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-billing/CHANGELOG.md) - [google-cloud-binary-authorization==1.10.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-binary-authorization/CHANGELOG.md) -- [google-cloud-build==3.24.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-build/CHANGELOG.md) +- [google-cloud-build==3.25.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-build/CHANGELOG.md) - [google-cloud-certificate-manager==1.7.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-certificate-manager/CHANGELOG.md) - [google-cloud-channel==1.18.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-channel/CHANGELOG.md) -- [google-cloud-cloudcontrolspartner==0.1.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-cloudcontrolspartner/CHANGELOG.md) +- [google-cloud-cloudcontrolspartner==0.2.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-cloudcontrolspartner/CHANGELOG.md) - [google-cloud-cloudquotas==0.1.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-cloudquotas/CHANGELOG.md) - [google-cloud-commerce-consumer-procurement==0.1.7](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-commerce-consumer-procurement/CHANGELOG.md) - [google-cloud-common==1.3.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-common/CHANGELOG.md) @@ -72,14 +72,14 @@ Changelogs - [google-cloud-datalabeling==1.10.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-datalabeling/CHANGELOG.md) - [google-cloud-dataplex==2.2.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataplex/CHANGELOG.md) - [google-cloud-dataproc-metastore==1.15.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataproc-metastore/CHANGELOG.md) -- [google-cloud-dataproc==5.11.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataproc/CHANGELOG.md) +- [google-cloud-dataproc==5.13.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataproc/CHANGELOG.md) - [google-cloud-datastream==1.9.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-datastream/CHANGELOG.md) - [google-cloud-deploy==2.0.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-deploy/CHANGELOG.md) - [google-cloud-developerconnect==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-developerconnect/CHANGELOG.md) - [google-cloud-dialogflow-cx==1.35.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dialogflow-cx/CHANGELOG.md) -- [google-cloud-dialogflow==2.31.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dialogflow/CHANGELOG.md) +- [google-cloud-dialogflow==2.32.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dialogflow/CHANGELOG.md) - [google-cloud-discoveryengine==0.12.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-discoveryengine/CHANGELOG.md) -- [google-cloud-dlp==3.22.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dlp/CHANGELOG.md) +- [google-cloud-dlp==3.23.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dlp/CHANGELOG.md) - [google-cloud-dms==1.9.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dms/CHANGELOG.md) - [google-cloud-documentai==2.32.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-documentai/CHANGELOG.md) - [google-cloud-domains==1.7.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-domains/CHANGELOG.md) @@ -91,18 +91,18 @@ Changelogs - [google-cloud-eventarc==1.11.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-eventarc/CHANGELOG.md) - [google-cloud-filestore==1.9.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-filestore/CHANGELOG.md) - [google-cloud-functions==1.17.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-functions/CHANGELOG.md) -- [google-cloud-gdchardwaremanagement==0.1.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gdchardwaremanagement/CHANGELOG.md) +- [google-cloud-gdchardwaremanagement==0.1.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gdchardwaremanagement/CHANGELOG.md) - [google-cloud-gke-backup==0.5.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-backup/CHANGELOG.md) - [google-cloud-gke-connect-gateway==0.9.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-connect-gateway/CHANGELOG.md) - [google-cloud-gke-hub==1.14.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-hub/CHANGELOG.md) -- [google-cloud-gke-multicloud==0.6.12](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-multicloud/CHANGELOG.md) +- [google-cloud-gke-multicloud==0.6.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-multicloud/CHANGELOG.md) - [google-cloud-gsuiteaddons==0.3.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gsuiteaddons/CHANGELOG.md) - [google-cloud-iam-logging==1.3.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-iam-logging/CHANGELOG.md) - [google-cloud-iam==2.15.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-iam/CHANGELOG.md) - [google-cloud-iap==1.13.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-iap/CHANGELOG.md) - [google-cloud-ids==1.7.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-ids/CHANGELOG.md) - [google-cloud-kms-inventory==0.2.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-kms-inventory/CHANGELOG.md) -- [google-cloud-kms==2.24.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-kms/CHANGELOG.md) +- [google-cloud-kms==3.0.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-kms/CHANGELOG.md) - [google-cloud-language==2.14.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-language/CHANGELOG.md) - [google-cloud-life-sciences==0.9.12](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-life-sciences/CHANGELOG.md) - [google-cloud-managed-identities==1.9.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-managed-identities/CHANGELOG.md) @@ -113,14 +113,15 @@ Changelogs - [google-cloud-monitoring-dashboards==2.15.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-monitoring-dashboards/CHANGELOG.md) - [google-cloud-monitoring-metrics-scopes==1.6.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-monitoring-metrics-scopes/CHANGELOG.md) - [google-cloud-monitoring==2.22.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-monitoring/CHANGELOG.md) -- [google-cloud-netapp==0.3.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-netapp/CHANGELOG.md) +- [google-cloud-netapp==0.3.14](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-netapp/CHANGELOG.md) - [google-cloud-network-connectivity==2.4.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-network-connectivity/CHANGELOG.md) - [google-cloud-network-management==1.18.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-network-management/CHANGELOG.md) - [google-cloud-network-security==0.9.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-network-security/CHANGELOG.md) - [google-cloud-network-services==0.5.14](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-network-services/CHANGELOG.md) - [google-cloud-notebooks==1.10.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-notebooks/CHANGELOG.md) - [google-cloud-optimization==1.8.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-optimization/CHANGELOG.md) -- [google-cloud-orchestration-airflow==1.13.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-orchestration-airflow/CHANGELOG.md) +- [google-cloud-oracledatabase==0.1.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-oracledatabase/CHANGELOG.md) +- [google-cloud-orchestration-airflow==1.14.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-orchestration-airflow/CHANGELOG.md) - [google-cloud-os-config==1.17.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-os-config/CHANGELOG.md) - [google-cloud-os-login==2.14.6](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-os-login/CHANGELOG.md) - [google-cloud-parallelstore==0.2.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-parallelstore/CHANGELOG.md) @@ -181,11 +182,12 @@ Changelogs - [google-cloud-workstations==0.5.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-workstations/CHANGELOG.md) - [google-geo-type==0.3.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-geo-type/CHANGELOG.md) - [google-maps-addressvalidation==0.3.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-addressvalidation/CHANGELOG.md) +- [google-maps-areainsights==0.1.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-areainsights/CHANGELOG.md) - [google-maps-fleetengine-delivery==0.2.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-fleetengine-delivery/CHANGELOG.md) - [google-maps-fleetengine==0.2.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-fleetengine/CHANGELOG.md) - [google-maps-mapsplatformdatasets==0.4.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-mapsplatformdatasets/CHANGELOG.md) -- [google-maps-places==0.1.17](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-places/CHANGELOG.md) -- [google-maps-routeoptimization==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-routeoptimization/CHANGELOG.md) +- [google-maps-places==0.1.18](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-places/CHANGELOG.md) +- [google-maps-routeoptimization==0.1.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-routeoptimization/CHANGELOG.md) - [google-maps-routing==0.6.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-routing/CHANGELOG.md) - [google-maps-solar==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-solar/CHANGELOG.md) - [google-shopping-css==0.1.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-css/CHANGELOG.md) From 14092407b9faffccb6d8db45751a08c4e589bd51 Mon Sep 17 00:00:00 2001 From: yoshi-code-bot <70984784+yoshi-code-bot@users.noreply.github.com> Date: Fri, 4 Oct 2024 11:12:25 -0700 Subject: [PATCH 100/108] chore: Update release-please config files (#13127) Update release-please config files Co-authored-by: ohmayr --- release-please-config.json | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/release-please-config.json b/release-please-config.json index 7866aa34f750..71fbe6873b37 100644 --- a/release-please-config.json +++ b/release-please-config.json @@ -1361,11 +1361,6 @@ "path": "samples/generated_samples/snippet_metadata_google.cloud.documentai.v1.json", "type": "json" }, - { - "jsonpath": "$.clientLibrary.version", - "path": "samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta2.json", - "type": "json" - }, { "jsonpath": "$.clientLibrary.version", "path": "samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta3.json", @@ -2106,7 +2101,13 @@ "component": "google-cloud-parallelstore", "extra-files": [ "google/cloud/parallelstore/gapic_version.py", + "google/cloud/parallelstore_v1/gapic_version.py", "google/cloud/parallelstore_v1beta/gapic_version.py", + { + "jsonpath": "$.clientLibrary.version", + "path": "samples/generated_samples/snippet_metadata_google.cloud.parallelstore.v1.json", + "type": "json" + }, { "jsonpath": "$.clientLibrary.version", "path": "samples/generated_samples/snippet_metadata_google.cloud.parallelstore.v1beta.json", From 852d797f21d4809c32d98b384c60bf9852b14216 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 7 Oct 2024 11:00:54 -0400 Subject: [PATCH 101/108] feat: [google-cloud-commerce-consumer-procurement] add Order modification RPCs and License Management Service (#13128) BEGIN_COMMIT_OVERRIDE feat: add Order modification RPCs and License Management Service docs: A comment for enum value `LINE_ITEM_CHANGE_STATE_PENDING_APPROVAL` in enum `LineItemChangeState` is changed docs: A comment for enum value `LINE_ITEM_CHANGE_STATE_APPROVED` in enum `LineItemChangeState` is changed docs: A comment for enum value `LINE_ITEM_CHANGE_STATE_COMPLETED` in enum `LineItemChangeState` is changed docs: A comment for enum value `LINE_ITEM_CHANGE_STATE_REJECTED` in enum `LineItemChangeState` is changed docs: A comment for enum value `LINE_ITEM_CHANGE_STATE_ABANDONED` in enum `LineItemChangeState` is changed docs: A comment for enum value `LINE_ITEM_CHANGE_STATE_ACTIVATING` in enum `LineItemChangeState` is changed docs: A comment for field `request_id` in message `.google.cloud.commerce.consumer.procurement.v1.PlaceOrderRequest` is changed docs: A comment for field `filter` in message `.google.cloud.commerce.consumer.procurement.v1.ListOrdersRequest` is changed END_COMMIT_OVERRIDE - [ ] Regenerate this pull request now. docs: A comment for enum value `LINE_ITEM_CHANGE_STATE_PENDING_APPROVAL` in enum `LineItemChangeState` is changed docs: A comment for enum value `LINE_ITEM_CHANGE_STATE_APPROVED` in enum `LineItemChangeState` is changed docs: A comment for enum value `LINE_ITEM_CHANGE_STATE_COMPLETED` in enum `LineItemChangeState` is changed docs: A comment for enum value `LINE_ITEM_CHANGE_STATE_REJECTED` in enum `LineItemChangeState` is changed docs: A comment for enum value `LINE_ITEM_CHANGE_STATE_ABANDONED` in enum `LineItemChangeState` is changed docs: A comment for enum value `LINE_ITEM_CHANGE_STATE_ACTIVATING` in enum `LineItemChangeState` is changed docs: A comment for field `request_id` in message `.google.cloud.commerce.consumer.procurement.v1.PlaceOrderRequest` is changed docs: A comment for field `filter` in message `.google.cloud.commerce.consumer.procurement.v1.ListOrdersRequest` is changed PiperOrigin-RevId: 682457622 Source-Link: https://github.com/googleapis/googleapis/commit/1f8352cf46df74d7db6fd544181655c590689b8c Source-Link: https://github.com/googleapis/googleapis-gen/commit/172720068f2259b365a56a22a0c7c2a6d446c732 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWNvbW1lcmNlLWNvbnN1bWVyLXByb2N1cmVtZW50Ly5Pd2xCb3QueWFtbCIsImgiOiIxNzI3MjAwNjhmMjI1OWIzNjVhNTZhMjJhMGM3YzJhNmQ0NDZjNzMyIn0= --------- Co-authored-by: Owl Bot --- .../license_management_service.rst | 10 + .../services_.rst | 1 + .../commerce_consumer_procurement/__init__.py | 42 + .../__init__.py | 40 + .../gapic_metadata.json | 124 + .../async_client.py | 224 + .../consumer_procurement_service/client.py | 220 + .../transports/base.py | 28 + .../transports/grpc.py | 56 + .../transports/grpc_asyncio.py | 70 + .../transports/rest.py | 266 + .../license_management_service/__init__.py | 22 + .../async_client.py | 925 +++ .../license_management_service/client.py | 1339 ++++ .../license_management_service/pagers.py | 207 + .../transports/__init__.py | 41 + .../transports/base.py | 246 + .../transports/grpc.py | 411 ++ .../transports/grpc_asyncio.py | 441 ++ .../transports/rest.py | 958 +++ .../types/__init__.py | 34 + .../types/license_management_service.py | 331 + .../types/order.py | 18 +- .../types/procurement_service.py | 186 +- ..._procurement_service_cancel_order_async.py | 56 + ...r_procurement_service_cancel_order_sync.py | 56 + ..._procurement_service_modify_order_async.py | 56 + ...r_procurement_service_modify_order_sync.py | 56 + ...license_management_service_assign_async.py | 53 + ..._license_management_service_assign_sync.py | 53 + ..._service_enumerate_licensed_users_async.py | 53 + ...t_service_enumerate_licensed_users_sync.py | 53 + ...nagement_service_get_license_pool_async.py | 52 + ...anagement_service_get_license_pool_sync.py | 52 + ...cense_management_service_unassign_async.py | 53 + ...icense_management_service_unassign_sync.py | 53 + ...ement_service_update_license_pool_async.py | 51 + ...gement_service_update_license_pool_sync.py | 51 + ...loud.commerce.consumer.procurement.v1.json | 1177 +++- ...mmerce_consumer_procurement_v1_keywords.py | 7 + .../test_consumer_procurement_service.py | 1776 ++++- .../test_license_management_service.py | 6057 +++++++++++++++++ 42 files changed, 15626 insertions(+), 379 deletions(-) create mode 100644 packages/google-cloud-commerce-consumer-procurement/docs/commerce_consumer_procurement_v1/license_management_service.rst create mode 100644 packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/__init__.py create mode 100644 packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/async_client.py create mode 100644 packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/client.py create mode 100644 packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/pagers.py create mode 100644 packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/transports/__init__.py create mode 100644 packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/transports/base.py create mode 100644 packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/transports/grpc.py create mode 100644 packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/transports/grpc_asyncio.py create mode 100644 packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/transports/rest.py create mode 100644 packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/types/license_management_service.py create mode 100644 packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_cancel_order_async.py create mode 100644 packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_cancel_order_sync.py create mode 100644 packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_modify_order_async.py create mode 100644 packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_modify_order_sync.py create mode 100644 packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_assign_async.py create mode 100644 packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_assign_sync.py create mode 100644 packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_enumerate_licensed_users_async.py create mode 100644 packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_enumerate_licensed_users_sync.py create mode 100644 packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_get_license_pool_async.py create mode 100644 packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_get_license_pool_sync.py create mode 100644 packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_unassign_async.py create mode 100644 packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_unassign_sync.py create mode 100644 packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_update_license_pool_async.py create mode 100644 packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_update_license_pool_sync.py create mode 100644 packages/google-cloud-commerce-consumer-procurement/tests/unit/gapic/commerce_consumer_procurement_v1/test_license_management_service.py diff --git a/packages/google-cloud-commerce-consumer-procurement/docs/commerce_consumer_procurement_v1/license_management_service.rst b/packages/google-cloud-commerce-consumer-procurement/docs/commerce_consumer_procurement_v1/license_management_service.rst new file mode 100644 index 000000000000..d08a71e7aec0 --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/docs/commerce_consumer_procurement_v1/license_management_service.rst @@ -0,0 +1,10 @@ +LicenseManagementService +------------------------------------------ + +.. automodule:: google.cloud.commerce_consumer_procurement_v1.services.license_management_service + :members: + :inherited-members: + +.. automodule:: google.cloud.commerce_consumer_procurement_v1.services.license_management_service.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-commerce-consumer-procurement/docs/commerce_consumer_procurement_v1/services_.rst b/packages/google-cloud-commerce-consumer-procurement/docs/commerce_consumer_procurement_v1/services_.rst index d5e8b5f12ecb..8d66166cebbe 100644 --- a/packages/google-cloud-commerce-consumer-procurement/docs/commerce_consumer_procurement_v1/services_.rst +++ b/packages/google-cloud-commerce-consumer-procurement/docs/commerce_consumer_procurement_v1/services_.rst @@ -4,3 +4,4 @@ Services for Google Cloud Commerce Consumer Procurement v1 API :maxdepth: 2 consumer_procurement_service + license_management_service diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement/__init__.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement/__init__.py index f271433c727b..bba3576813f8 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement/__init__.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement/__init__.py @@ -24,6 +24,25 @@ from google.cloud.commerce_consumer_procurement_v1.services.consumer_procurement_service.client import ( ConsumerProcurementServiceClient, ) +from google.cloud.commerce_consumer_procurement_v1.services.license_management_service.async_client import ( + LicenseManagementServiceAsyncClient, +) +from google.cloud.commerce_consumer_procurement_v1.services.license_management_service.client import ( + LicenseManagementServiceClient, +) +from google.cloud.commerce_consumer_procurement_v1.types.license_management_service import ( + AssignmentProtocol, + AssignRequest, + AssignResponse, + EnumerateLicensedUsersRequest, + EnumerateLicensedUsersResponse, + GetLicensePoolRequest, + LicensedUser, + LicensePool, + UnassignRequest, + UnassignResponse, + UpdateLicensePoolRequest, +) from google.cloud.commerce_consumer_procurement_v1.types.order import ( LineItem, LineItemChange, @@ -36,9 +55,14 @@ Subscription, ) from google.cloud.commerce_consumer_procurement_v1.types.procurement_service import ( + AutoRenewalBehavior, + CancelOrderMetadata, + CancelOrderRequest, GetOrderRequest, ListOrdersRequest, ListOrdersResponse, + ModifyOrderMetadata, + ModifyOrderRequest, PlaceOrderMetadata, PlaceOrderRequest, ) @@ -46,6 +70,19 @@ __all__ = ( "ConsumerProcurementServiceClient", "ConsumerProcurementServiceAsyncClient", + "LicenseManagementServiceClient", + "LicenseManagementServiceAsyncClient", + "AssignmentProtocol", + "AssignRequest", + "AssignResponse", + "EnumerateLicensedUsersRequest", + "EnumerateLicensedUsersResponse", + "GetLicensePoolRequest", + "LicensedUser", + "LicensePool", + "UnassignRequest", + "UnassignResponse", + "UpdateLicensePoolRequest", "LineItem", "LineItemChange", "LineItemInfo", @@ -55,9 +92,14 @@ "LineItemChangeState", "LineItemChangeStateReasonType", "LineItemChangeType", + "CancelOrderMetadata", + "CancelOrderRequest", "GetOrderRequest", "ListOrdersRequest", "ListOrdersResponse", + "ModifyOrderMetadata", + "ModifyOrderRequest", "PlaceOrderMetadata", "PlaceOrderRequest", + "AutoRenewalBehavior", ) diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/__init__.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/__init__.py index d1a4fa34b7d8..2a6c2e07c4ab 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/__init__.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/__init__.py @@ -24,6 +24,23 @@ ConsumerProcurementServiceAsyncClient, ConsumerProcurementServiceClient, ) +from .services.license_management_service import ( + LicenseManagementServiceAsyncClient, + LicenseManagementServiceClient, +) +from .types.license_management_service import ( + AssignmentProtocol, + AssignRequest, + AssignResponse, + EnumerateLicensedUsersRequest, + EnumerateLicensedUsersResponse, + GetLicensePoolRequest, + LicensedUser, + LicensePool, + UnassignRequest, + UnassignResponse, + UpdateLicensePoolRequest, +) from .types.order import ( LineItem, LineItemChange, @@ -36,17 +53,35 @@ Subscription, ) from .types.procurement_service import ( + AutoRenewalBehavior, + CancelOrderMetadata, + CancelOrderRequest, GetOrderRequest, ListOrdersRequest, ListOrdersResponse, + ModifyOrderMetadata, + ModifyOrderRequest, PlaceOrderMetadata, PlaceOrderRequest, ) __all__ = ( "ConsumerProcurementServiceAsyncClient", + "LicenseManagementServiceAsyncClient", + "AssignRequest", + "AssignResponse", + "AssignmentProtocol", + "AutoRenewalBehavior", + "CancelOrderMetadata", + "CancelOrderRequest", "ConsumerProcurementServiceClient", + "EnumerateLicensedUsersRequest", + "EnumerateLicensedUsersResponse", + "GetLicensePoolRequest", "GetOrderRequest", + "LicenseManagementServiceClient", + "LicensePool", + "LicensedUser", "LineItem", "LineItemChange", "LineItemChangeState", @@ -55,9 +90,14 @@ "LineItemInfo", "ListOrdersRequest", "ListOrdersResponse", + "ModifyOrderMetadata", + "ModifyOrderRequest", "Order", "Parameter", "PlaceOrderMetadata", "PlaceOrderRequest", "Subscription", + "UnassignRequest", + "UnassignResponse", + "UpdateLicensePoolRequest", ) diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/gapic_metadata.json b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/gapic_metadata.json index 638c161ad386..e11a84a7c315 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/gapic_metadata.json +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/gapic_metadata.json @@ -10,6 +10,11 @@ "grpc": { "libraryClient": "ConsumerProcurementServiceClient", "rpcs": { + "CancelOrder": { + "methods": [ + "cancel_order" + ] + }, "GetOrder": { "methods": [ "get_order" @@ -20,6 +25,11 @@ "list_orders" ] }, + "ModifyOrder": { + "methods": [ + "modify_order" + ] + }, "PlaceOrder": { "methods": [ "place_order" @@ -30,6 +40,11 @@ "grpc-async": { "libraryClient": "ConsumerProcurementServiceAsyncClient", "rpcs": { + "CancelOrder": { + "methods": [ + "cancel_order" + ] + }, "GetOrder": { "methods": [ "get_order" @@ -40,6 +55,11 @@ "list_orders" ] }, + "ModifyOrder": { + "methods": [ + "modify_order" + ] + }, "PlaceOrder": { "methods": [ "place_order" @@ -50,6 +70,11 @@ "rest": { "libraryClient": "ConsumerProcurementServiceClient", "rpcs": { + "CancelOrder": { + "methods": [ + "cancel_order" + ] + }, "GetOrder": { "methods": [ "get_order" @@ -60,6 +85,11 @@ "list_orders" ] }, + "ModifyOrder": { + "methods": [ + "modify_order" + ] + }, "PlaceOrder": { "methods": [ "place_order" @@ -68,6 +98,100 @@ } } } + }, + "LicenseManagementService": { + "clients": { + "grpc": { + "libraryClient": "LicenseManagementServiceClient", + "rpcs": { + "Assign": { + "methods": [ + "assign" + ] + }, + "EnumerateLicensedUsers": { + "methods": [ + "enumerate_licensed_users" + ] + }, + "GetLicensePool": { + "methods": [ + "get_license_pool" + ] + }, + "Unassign": { + "methods": [ + "unassign" + ] + }, + "UpdateLicensePool": { + "methods": [ + "update_license_pool" + ] + } + } + }, + "grpc-async": { + "libraryClient": "LicenseManagementServiceAsyncClient", + "rpcs": { + "Assign": { + "methods": [ + "assign" + ] + }, + "EnumerateLicensedUsers": { + "methods": [ + "enumerate_licensed_users" + ] + }, + "GetLicensePool": { + "methods": [ + "get_license_pool" + ] + }, + "Unassign": { + "methods": [ + "unassign" + ] + }, + "UpdateLicensePool": { + "methods": [ + "update_license_pool" + ] + } + } + }, + "rest": { + "libraryClient": "LicenseManagementServiceClient", + "rpcs": { + "Assign": { + "methods": [ + "assign" + ] + }, + "EnumerateLicensedUsers": { + "methods": [ + "enumerate_licensed_users" + ] + }, + "GetLicensePool": { + "methods": [ + "get_license_pool" + ] + }, + "Unassign": { + "methods": [ + "unassign" + ] + }, + "UpdateLicensePool": { + "methods": [ + "update_license_pool" + ] + } + } + } + } } } } diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/async_client.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/async_client.py index ba83a537babe..4b4132787111 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/async_client.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/async_client.py @@ -658,6 +658,230 @@ async def sample_list_orders(): # Done; return the response. return response + async def modify_order( + self, + request: Optional[Union[procurement_service.ModifyOrderRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Modifies an existing + [Order][google.cloud.commerce.consumer.procurement.v1.Order] + resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import commerce_consumer_procurement_v1 + + async def sample_modify_order(): + # Create a client + client = commerce_consumer_procurement_v1.ConsumerProcurementServiceAsyncClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.ModifyOrderRequest( + name="name_value", + ) + + # Make the request + operation = client.modify_order(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.commerce_consumer_procurement_v1.types.ModifyOrderRequest, dict]]): + The request object. Request message for + [ConsumerProcurementService.ModifyOrder][google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService.ModifyOrder]. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.commerce_consumer_procurement_v1.types.Order` Represents a purchase made by a customer on Cloud Marketplace. + Creating an order makes sure that both the Google + backend systems as well as external service + provider's systems (if needed) allow use of purchased + products and ensures the appropriate billing events + occur. + + An Order can be made against one Product with + multiple add-ons (optional) or one Quote which might + reference multiple products. + + Customers typically choose a price plan for each + Product purchased when they create an order and can + change their plan later, if the product allows. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, procurement_service.ModifyOrderRequest): + request = procurement_service.ModifyOrderRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.modify_order + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + order.Order, + metadata_type=procurement_service.ModifyOrderMetadata, + ) + + # Done; return the response. + return response + + async def cancel_order( + self, + request: Optional[Union[procurement_service.CancelOrderRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Cancels an existing + [Order][google.cloud.commerce.consumer.procurement.v1.Order]. + Every product procured in the Order will be cancelled. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import commerce_consumer_procurement_v1 + + async def sample_cancel_order(): + # Create a client + client = commerce_consumer_procurement_v1.ConsumerProcurementServiceAsyncClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.CancelOrderRequest( + name="name_value", + ) + + # Make the request + operation = client.cancel_order(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.commerce_consumer_procurement_v1.types.CancelOrderRequest, dict]]): + The request object. Request message for + [ConsumerProcurementService.CancelOrder][google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService.CancelOrder]. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.commerce_consumer_procurement_v1.types.Order` Represents a purchase made by a customer on Cloud Marketplace. + Creating an order makes sure that both the Google + backend systems as well as external service + provider's systems (if needed) allow use of purchased + products and ensures the appropriate billing events + occur. + + An Order can be made against one Product with + multiple add-ons (optional) or one Quote which might + reference multiple products. + + Customers typically choose a price plan for each + Product purchased when they create an order and can + change their plan later, if the product allows. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, procurement_service.CancelOrderRequest): + request = procurement_service.CancelOrderRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.cancel_order + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + order.Order, + metadata_type=procurement_service.CancelOrderMetadata, + ) + + # Done; return the response. + return response + async def get_operation( self, request: Optional[operations_pb2.GetOperationRequest] = None, diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/client.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/client.py index be7ec242cec1..525ad9877370 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/client.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/client.py @@ -1082,6 +1082,226 @@ def sample_list_orders(): # Done; return the response. return response + def modify_order( + self, + request: Optional[Union[procurement_service.ModifyOrderRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Modifies an existing + [Order][google.cloud.commerce.consumer.procurement.v1.Order] + resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import commerce_consumer_procurement_v1 + + def sample_modify_order(): + # Create a client + client = commerce_consumer_procurement_v1.ConsumerProcurementServiceClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.ModifyOrderRequest( + name="name_value", + ) + + # Make the request + operation = client.modify_order(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.commerce_consumer_procurement_v1.types.ModifyOrderRequest, dict]): + The request object. Request message for + [ConsumerProcurementService.ModifyOrder][google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService.ModifyOrder]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.commerce_consumer_procurement_v1.types.Order` Represents a purchase made by a customer on Cloud Marketplace. + Creating an order makes sure that both the Google + backend systems as well as external service + provider's systems (if needed) allow use of purchased + products and ensures the appropriate billing events + occur. + + An Order can be made against one Product with + multiple add-ons (optional) or one Quote which might + reference multiple products. + + Customers typically choose a price plan for each + Product purchased when they create an order and can + change their plan later, if the product allows. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, procurement_service.ModifyOrderRequest): + request = procurement_service.ModifyOrderRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.modify_order] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + order.Order, + metadata_type=procurement_service.ModifyOrderMetadata, + ) + + # Done; return the response. + return response + + def cancel_order( + self, + request: Optional[Union[procurement_service.CancelOrderRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Cancels an existing + [Order][google.cloud.commerce.consumer.procurement.v1.Order]. + Every product procured in the Order will be cancelled. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import commerce_consumer_procurement_v1 + + def sample_cancel_order(): + # Create a client + client = commerce_consumer_procurement_v1.ConsumerProcurementServiceClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.CancelOrderRequest( + name="name_value", + ) + + # Make the request + operation = client.cancel_order(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.commerce_consumer_procurement_v1.types.CancelOrderRequest, dict]): + The request object. Request message for + [ConsumerProcurementService.CancelOrder][google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService.CancelOrder]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.commerce_consumer_procurement_v1.types.Order` Represents a purchase made by a customer on Cloud Marketplace. + Creating an order makes sure that both the Google + backend systems as well as external service + provider's systems (if needed) allow use of purchased + products and ensures the appropriate billing events + occur. + + An Order can be made against one Product with + multiple add-ons (optional) or one Quote which might + reference multiple products. + + Customers typically choose a price plan for each + Product purchased when they create an order and can + change their plan later, if the product allows. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, procurement_service.CancelOrderRequest): + request = procurement_service.CancelOrderRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_order] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + order.Order, + metadata_type=procurement_service.CancelOrderMetadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "ConsumerProcurementServiceClient": return self diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/transports/base.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/transports/base.py index 405ae9789b33..b7df5231e9d5 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/transports/base.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/transports/base.py @@ -167,6 +167,16 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.modify_order: gapic_v1.method.wrap_method( + self.modify_order, + default_timeout=None, + client_info=client_info, + ), + self.cancel_order: gapic_v1.method.wrap_method( + self.cancel_order, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -213,6 +223,24 @@ def list_orders( ]: raise NotImplementedError() + @property + def modify_order( + self, + ) -> Callable[ + [procurement_service.ModifyOrderRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_order( + self, + ) -> Callable[ + [procurement_service.CancelOrderRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def get_operation( self, diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/transports/grpc.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/transports/grpc.py index 25e976eeed36..307d720364fa 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/transports/grpc.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/transports/grpc.py @@ -353,6 +353,62 @@ def list_orders( ) return self._stubs["list_orders"] + @property + def modify_order( + self, + ) -> Callable[[procurement_service.ModifyOrderRequest], operations_pb2.Operation]: + r"""Return a callable for the modify order method over gRPC. + + Modifies an existing + [Order][google.cloud.commerce.consumer.procurement.v1.Order] + resource. + + Returns: + Callable[[~.ModifyOrderRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "modify_order" not in self._stubs: + self._stubs["modify_order"] = self.grpc_channel.unary_unary( + "/google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService/ModifyOrder", + request_serializer=procurement_service.ModifyOrderRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["modify_order"] + + @property + def cancel_order( + self, + ) -> Callable[[procurement_service.CancelOrderRequest], operations_pb2.Operation]: + r"""Return a callable for the cancel order method over gRPC. + + Cancels an existing + [Order][google.cloud.commerce.consumer.procurement.v1.Order]. + Every product procured in the Order will be cancelled. + + Returns: + Callable[[~.CancelOrderRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_order" not in self._stubs: + self._stubs["cancel_order"] = self.grpc_channel.unary_unary( + "/google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService/CancelOrder", + request_serializer=procurement_service.CancelOrderRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["cancel_order"] + def close(self): self.grpc_channel.close() diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/transports/grpc_asyncio.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/transports/grpc_asyncio.py index fb34a2b76187..0cdba90bcd6f 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/transports/grpc_asyncio.py @@ -366,6 +366,66 @@ def list_orders( ) return self._stubs["list_orders"] + @property + def modify_order( + self, + ) -> Callable[ + [procurement_service.ModifyOrderRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the modify order method over gRPC. + + Modifies an existing + [Order][google.cloud.commerce.consumer.procurement.v1.Order] + resource. + + Returns: + Callable[[~.ModifyOrderRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "modify_order" not in self._stubs: + self._stubs["modify_order"] = self.grpc_channel.unary_unary( + "/google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService/ModifyOrder", + request_serializer=procurement_service.ModifyOrderRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["modify_order"] + + @property + def cancel_order( + self, + ) -> Callable[ + [procurement_service.CancelOrderRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the cancel order method over gRPC. + + Cancels an existing + [Order][google.cloud.commerce.consumer.procurement.v1.Order]. + Every product procured in the Order will be cancelled. + + Returns: + Callable[[~.CancelOrderRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_order" not in self._stubs: + self._stubs["cancel_order"] = self.grpc_channel.unary_unary( + "/google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService/CancelOrder", + request_serializer=procurement_service.CancelOrderRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["cancel_order"] + def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { @@ -402,6 +462,16 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.modify_order: gapic_v1.method_async.wrap_method( + self.modify_order, + default_timeout=None, + client_info=client_info, + ), + self.cancel_order: gapic_v1.method_async.wrap_method( + self.cancel_order, + default_timeout=None, + client_info=client_info, + ), } def close(self): diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/transports/rest.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/transports/rest.py index 353e9db69b53..16459934f854 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/transports/rest.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/transports/rest.py @@ -74,6 +74,14 @@ class ConsumerProcurementServiceRestInterceptor: .. code-block:: python class MyCustomConsumerProcurementServiceInterceptor(ConsumerProcurementServiceRestInterceptor): + def pre_cancel_order(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_cancel_order(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_order(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -90,6 +98,14 @@ def post_list_orders(self, response): logging.log(f"Received response: {response}") return response + def pre_modify_order(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_modify_order(self, response): + logging.log(f"Received response: {response}") + return response + def pre_place_order(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -104,6 +120,29 @@ def post_place_order(self, response): """ + def pre_cancel_order( + self, + request: procurement_service.CancelOrderRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[procurement_service.CancelOrderRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_order + + Override in a subclass to manipulate the request or metadata + before they are sent to the ConsumerProcurementService server. + """ + return request, metadata + + def post_cancel_order( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for cancel_order + + Override in a subclass to manipulate the response + after it is returned by the ConsumerProcurementService server but before + it is returned to user code. + """ + return response + def pre_get_order( self, request: procurement_service.GetOrderRequest, @@ -148,6 +187,29 @@ def post_list_orders( """ return response + def pre_modify_order( + self, + request: procurement_service.ModifyOrderRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[procurement_service.ModifyOrderRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for modify_order + + Override in a subclass to manipulate the request or metadata + before they are sent to the ConsumerProcurementService server. + """ + return request, metadata + + def post_modify_order( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for modify_order + + Override in a subclass to manipulate the response + after it is returned by the ConsumerProcurementService server but before + it is returned to user code. + """ + return response + def pre_place_order( self, request: procurement_service.PlaceOrderRequest, @@ -333,6 +395,100 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Return the client from cache. return self._operations_client + class _CancelOrder(ConsumerProcurementServiceRestStub): + def __hash__(self): + return hash("CancelOrder") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: procurement_service.CancelOrderRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the cancel order method over HTTP. + + Args: + request (~.procurement_service.CancelOrderRequest): + The request object. Request message for + [ConsumerProcurementService.CancelOrder][google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService.CancelOrder]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=billingAccounts/*/orders/*}:cancel", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_cancel_order(request, metadata) + pb_request = procurement_service.CancelOrderRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_cancel_order(resp) + return resp + class _GetOrder(ConsumerProcurementServiceRestStub): def __hash__(self): return hash("GetOrder") @@ -525,6 +681,100 @@ def __call__( resp = self._interceptor.post_list_orders(resp) return resp + class _ModifyOrder(ConsumerProcurementServiceRestStub): + def __hash__(self): + return hash("ModifyOrder") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: procurement_service.ModifyOrderRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the modify order method over HTTP. + + Args: + request (~.procurement_service.ModifyOrderRequest): + The request object. Request message for + [ConsumerProcurementService.ModifyOrder][google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService.ModifyOrder]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=billingAccounts/*/orders/*}:modify", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_modify_order(request, metadata) + pb_request = procurement_service.ModifyOrderRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_modify_order(resp) + return resp + class _PlaceOrder(ConsumerProcurementServiceRestStub): def __hash__(self): return hash("PlaceOrder") @@ -619,6 +869,14 @@ def __call__( resp = self._interceptor.post_place_order(resp) return resp + @property + def cancel_order( + self, + ) -> Callable[[procurement_service.CancelOrderRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CancelOrder(self._session, self._host, self._interceptor) # type: ignore + @property def get_order(self) -> Callable[[procurement_service.GetOrderRequest], order.Order]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. @@ -635,6 +893,14 @@ def list_orders( # In C++ this would require a dynamic_cast return self._ListOrders(self._session, self._host, self._interceptor) # type: ignore + @property + def modify_order( + self, + ) -> Callable[[procurement_service.ModifyOrderRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ModifyOrder(self._session, self._host, self._interceptor) # type: ignore + @property def place_order( self, diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/__init__.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/__init__.py new file mode 100644 index 000000000000..d1b19f8f83bc --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import LicenseManagementServiceAsyncClient +from .client import LicenseManagementServiceClient + +__all__ = ( + "LicenseManagementServiceClient", + "LicenseManagementServiceAsyncClient", +) diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/async_client.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/async_client.py new file mode 100644 index 000000000000..015a76c88aef --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/async_client.py @@ -0,0 +1,925 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.commerce_consumer_procurement_v1 import ( + gapic_version as package_version, +) + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore + +from google.cloud.commerce_consumer_procurement_v1.services.license_management_service import ( + pagers, +) +from google.cloud.commerce_consumer_procurement_v1.types import ( + license_management_service, +) + +from .client import LicenseManagementServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, LicenseManagementServiceTransport +from .transports.grpc_asyncio import LicenseManagementServiceGrpcAsyncIOTransport + + +class LicenseManagementServiceAsyncClient: + """Service for managing licenses.""" + + _client: LicenseManagementServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = LicenseManagementServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = LicenseManagementServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = ( + LicenseManagementServiceClient._DEFAULT_ENDPOINT_TEMPLATE + ) + _DEFAULT_UNIVERSE = LicenseManagementServiceClient._DEFAULT_UNIVERSE + + license_pool_path = staticmethod(LicenseManagementServiceClient.license_pool_path) + parse_license_pool_path = staticmethod( + LicenseManagementServiceClient.parse_license_pool_path + ) + common_billing_account_path = staticmethod( + LicenseManagementServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + LicenseManagementServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(LicenseManagementServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + LicenseManagementServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + LicenseManagementServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + LicenseManagementServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod( + LicenseManagementServiceClient.common_project_path + ) + parse_common_project_path = staticmethod( + LicenseManagementServiceClient.parse_common_project_path + ) + common_location_path = staticmethod( + LicenseManagementServiceClient.common_location_path + ) + parse_common_location_path = staticmethod( + LicenseManagementServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + LicenseManagementServiceAsyncClient: The constructed client. + """ + return LicenseManagementServiceClient.from_service_account_info.__func__(LicenseManagementServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + LicenseManagementServiceAsyncClient: The constructed client. + """ + return LicenseManagementServiceClient.from_service_account_file.__func__(LicenseManagementServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return LicenseManagementServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> LicenseManagementServiceTransport: + """Returns the transport used by the client instance. + + Returns: + LicenseManagementServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = LicenseManagementServiceClient.get_transport_class + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + LicenseManagementServiceTransport, + Callable[..., LicenseManagementServiceTransport], + ] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the license management service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,LicenseManagementServiceTransport,Callable[..., LicenseManagementServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the LicenseManagementServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = LicenseManagementServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def get_license_pool( + self, + request: Optional[ + Union[license_management_service.GetLicensePoolRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> license_management_service.LicensePool: + r"""Gets the license pool. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import commerce_consumer_procurement_v1 + + async def sample_get_license_pool(): + # Create a client + client = commerce_consumer_procurement_v1.LicenseManagementServiceAsyncClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.GetLicensePoolRequest( + name="name_value", + ) + + # Make the request + response = await client.get_license_pool(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.commerce_consumer_procurement_v1.types.GetLicensePoolRequest, dict]]): + The request object. Request message for getting a license + pool. + name (:class:`str`): + Required. The name of the license pool to get. Format: + ``billingAccounts/{billing_account}/orders/{order}/licensePool`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.commerce_consumer_procurement_v1.types.LicensePool: + A license pool represents a pool of + licenses that can be assigned to users. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, license_management_service.GetLicensePoolRequest): + request = license_management_service.GetLicensePoolRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_license_pool + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_license_pool( + self, + request: Optional[ + Union[license_management_service.UpdateLicensePoolRequest, dict] + ] = None, + *, + license_pool: Optional[license_management_service.LicensePool] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> license_management_service.LicensePool: + r"""Updates the license pool if one exists for this + Order. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import commerce_consumer_procurement_v1 + + async def sample_update_license_pool(): + # Create a client + client = commerce_consumer_procurement_v1.LicenseManagementServiceAsyncClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.UpdateLicensePoolRequest( + ) + + # Make the request + response = await client.update_license_pool(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.commerce_consumer_procurement_v1.types.UpdateLicensePoolRequest, dict]]): + The request object. Request message for updating a + license pool. + license_pool (:class:`google.cloud.commerce_consumer_procurement_v1.types.LicensePool`): + Required. The license pool to update. + + The license pool's name field is used to identify the + license pool to update. Format: + ``billingAccounts/{billing_account}/orders/{order}/licensePool``. + + This corresponds to the ``license_pool`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. The list of fields to + update. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.commerce_consumer_procurement_v1.types.LicensePool: + A license pool represents a pool of + licenses that can be assigned to users. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([license_pool, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, license_management_service.UpdateLicensePoolRequest): + request = license_management_service.UpdateLicensePoolRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if license_pool is not None: + request.license_pool = license_pool + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_license_pool + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("license_pool.name", request.license_pool.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def assign( + self, + request: Optional[Union[license_management_service.AssignRequest, dict]] = None, + *, + parent: Optional[str] = None, + usernames: Optional[MutableSequence[str]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> license_management_service.AssignResponse: + r"""Assigns a license to a user. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import commerce_consumer_procurement_v1 + + async def sample_assign(): + # Create a client + client = commerce_consumer_procurement_v1.LicenseManagementServiceAsyncClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.AssignRequest( + parent="parent_value", + usernames=['usernames_value1', 'usernames_value2'], + ) + + # Make the request + response = await client.assign(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.commerce_consumer_procurement_v1.types.AssignRequest, dict]]): + The request object. Request message for + [LicenseManagementService.Assign][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.Assign]. + parent (:class:`str`): + Required. License pool name. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + usernames (:class:`MutableSequence[str]`): + Required. Username. Format: ``name@domain.com``. + This corresponds to the ``usernames`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.commerce_consumer_procurement_v1.types.AssignResponse: + Response message for + [LicenseManagementService.Assign][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.Assign]. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, usernames]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, license_management_service.AssignRequest): + request = license_management_service.AssignRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if usernames: + request.usernames.extend(usernames) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.assign] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def unassign( + self, + request: Optional[ + Union[license_management_service.UnassignRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + usernames: Optional[MutableSequence[str]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> license_management_service.UnassignResponse: + r"""Unassigns a license from a user. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import commerce_consumer_procurement_v1 + + async def sample_unassign(): + # Create a client + client = commerce_consumer_procurement_v1.LicenseManagementServiceAsyncClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.UnassignRequest( + parent="parent_value", + usernames=['usernames_value1', 'usernames_value2'], + ) + + # Make the request + response = await client.unassign(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.commerce_consumer_procurement_v1.types.UnassignRequest, dict]]): + The request object. Request message for + [LicenseManagementService.Unassign][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.Unassign]. + parent (:class:`str`): + Required. License pool name. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + usernames (:class:`MutableSequence[str]`): + Required. Username. Format: ``name@domain.com``. + This corresponds to the ``usernames`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.commerce_consumer_procurement_v1.types.UnassignResponse: + Response message for + [LicenseManagementService.Unassign][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.Unassign]. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, usernames]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, license_management_service.UnassignRequest): + request = license_management_service.UnassignRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if usernames: + request.usernames.extend(usernames) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.unassign] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def enumerate_licensed_users( + self, + request: Optional[ + Union[license_management_service.EnumerateLicensedUsersRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.EnumerateLicensedUsersAsyncPager: + r"""Enumerates all users assigned a license. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import commerce_consumer_procurement_v1 + + async def sample_enumerate_licensed_users(): + # Create a client + client = commerce_consumer_procurement_v1.LicenseManagementServiceAsyncClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.EnumerateLicensedUsersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.enumerate_licensed_users(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.commerce_consumer_procurement_v1.types.EnumerateLicensedUsersRequest, dict]]): + The request object. Request message for + [LicenseManagementService.EnumerateLicensedUsers][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.EnumerateLicensedUsers]. + parent (:class:`str`): + Required. License pool name. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.commerce_consumer_procurement_v1.services.license_management_service.pagers.EnumerateLicensedUsersAsyncPager: + Response message for + [LicenseManagementService.EnumerateLicensedUsers][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.EnumerateLicensedUsers]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, license_management_service.EnumerateLicensedUsersRequest + ): + request = license_management_service.EnumerateLicensedUsersRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.enumerate_licensed_users + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.EnumerateLicensedUsersAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "LicenseManagementServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("LicenseManagementServiceAsyncClient",) diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/client.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/client.py new file mode 100644 index 000000000000..5c8aee6b83e5 --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/client.py @@ -0,0 +1,1339 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.commerce_consumer_procurement_v1 import ( + gapic_version as package_version, +) + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore + +from google.cloud.commerce_consumer_procurement_v1.services.license_management_service import ( + pagers, +) +from google.cloud.commerce_consumer_procurement_v1.types import ( + license_management_service, +) + +from .transports.base import DEFAULT_CLIENT_INFO, LicenseManagementServiceTransport +from .transports.grpc import LicenseManagementServiceGrpcTransport +from .transports.grpc_asyncio import LicenseManagementServiceGrpcAsyncIOTransport +from .transports.rest import LicenseManagementServiceRestTransport + + +class LicenseManagementServiceClientMeta(type): + """Metaclass for the LicenseManagementService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[LicenseManagementServiceTransport]] + _transport_registry["grpc"] = LicenseManagementServiceGrpcTransport + _transport_registry["grpc_asyncio"] = LicenseManagementServiceGrpcAsyncIOTransport + _transport_registry["rest"] = LicenseManagementServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[LicenseManagementServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class LicenseManagementServiceClient(metaclass=LicenseManagementServiceClientMeta): + """Service for managing licenses.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "cloudcommerceconsumerprocurement.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "cloudcommerceconsumerprocurement.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + LicenseManagementServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + LicenseManagementServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> LicenseManagementServiceTransport: + """Returns the transport used by the client instance. + + Returns: + LicenseManagementServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def license_pool_path( + billing_account: str, + order: str, + ) -> str: + """Returns a fully-qualified license_pool string.""" + return "billingAccounts/{billing_account}/orders/{order}/licensePool".format( + billing_account=billing_account, + order=order, + ) + + @staticmethod + def parse_license_pool_path(path: str) -> Dict[str, str]: + """Parses a license_pool path into its component segments.""" + m = re.match( + r"^billingAccounts/(?P.+?)/orders/(?P.+?)/licensePool$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = LicenseManagementServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = LicenseManagementServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = ( + LicenseManagementServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = LicenseManagementServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = LicenseManagementServiceClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or LicenseManagementServiceClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + LicenseManagementServiceTransport, + Callable[..., LicenseManagementServiceTransport], + ] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the license management service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,LicenseManagementServiceTransport,Callable[..., LicenseManagementServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the LicenseManagementServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = LicenseManagementServiceClient._read_environment_variables() + self._client_cert_source = ( + LicenseManagementServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + ) + self._universe_domain = LicenseManagementServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, LicenseManagementServiceTransport) + if transport_provided: + # transport is a LicenseManagementServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(LicenseManagementServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or LicenseManagementServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[LicenseManagementServiceTransport], + Callable[..., LicenseManagementServiceTransport], + ] = ( + LicenseManagementServiceClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., LicenseManagementServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def get_license_pool( + self, + request: Optional[ + Union[license_management_service.GetLicensePoolRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> license_management_service.LicensePool: + r"""Gets the license pool. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import commerce_consumer_procurement_v1 + + def sample_get_license_pool(): + # Create a client + client = commerce_consumer_procurement_v1.LicenseManagementServiceClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.GetLicensePoolRequest( + name="name_value", + ) + + # Make the request + response = client.get_license_pool(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.commerce_consumer_procurement_v1.types.GetLicensePoolRequest, dict]): + The request object. Request message for getting a license + pool. + name (str): + Required. The name of the license pool to get. Format: + ``billingAccounts/{billing_account}/orders/{order}/licensePool`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.commerce_consumer_procurement_v1.types.LicensePool: + A license pool represents a pool of + licenses that can be assigned to users. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, license_management_service.GetLicensePoolRequest): + request = license_management_service.GetLicensePoolRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_license_pool] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_license_pool( + self, + request: Optional[ + Union[license_management_service.UpdateLicensePoolRequest, dict] + ] = None, + *, + license_pool: Optional[license_management_service.LicensePool] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> license_management_service.LicensePool: + r"""Updates the license pool if one exists for this + Order. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import commerce_consumer_procurement_v1 + + def sample_update_license_pool(): + # Create a client + client = commerce_consumer_procurement_v1.LicenseManagementServiceClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.UpdateLicensePoolRequest( + ) + + # Make the request + response = client.update_license_pool(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.commerce_consumer_procurement_v1.types.UpdateLicensePoolRequest, dict]): + The request object. Request message for updating a + license pool. + license_pool (google.cloud.commerce_consumer_procurement_v1.types.LicensePool): + Required. The license pool to update. + + The license pool's name field is used to identify the + license pool to update. Format: + ``billingAccounts/{billing_account}/orders/{order}/licensePool``. + + This corresponds to the ``license_pool`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to + update. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.commerce_consumer_procurement_v1.types.LicensePool: + A license pool represents a pool of + licenses that can be assigned to users. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([license_pool, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, license_management_service.UpdateLicensePoolRequest): + request = license_management_service.UpdateLicensePoolRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if license_pool is not None: + request.license_pool = license_pool + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_license_pool] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("license_pool.name", request.license_pool.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def assign( + self, + request: Optional[Union[license_management_service.AssignRequest, dict]] = None, + *, + parent: Optional[str] = None, + usernames: Optional[MutableSequence[str]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> license_management_service.AssignResponse: + r"""Assigns a license to a user. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import commerce_consumer_procurement_v1 + + def sample_assign(): + # Create a client + client = commerce_consumer_procurement_v1.LicenseManagementServiceClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.AssignRequest( + parent="parent_value", + usernames=['usernames_value1', 'usernames_value2'], + ) + + # Make the request + response = client.assign(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.commerce_consumer_procurement_v1.types.AssignRequest, dict]): + The request object. Request message for + [LicenseManagementService.Assign][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.Assign]. + parent (str): + Required. License pool name. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + usernames (MutableSequence[str]): + Required. Username. Format: ``name@domain.com``. + This corresponds to the ``usernames`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.commerce_consumer_procurement_v1.types.AssignResponse: + Response message for + [LicenseManagementService.Assign][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.Assign]. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, usernames]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, license_management_service.AssignRequest): + request = license_management_service.AssignRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if usernames is not None: + request.usernames = usernames + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.assign] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def unassign( + self, + request: Optional[ + Union[license_management_service.UnassignRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + usernames: Optional[MutableSequence[str]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> license_management_service.UnassignResponse: + r"""Unassigns a license from a user. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import commerce_consumer_procurement_v1 + + def sample_unassign(): + # Create a client + client = commerce_consumer_procurement_v1.LicenseManagementServiceClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.UnassignRequest( + parent="parent_value", + usernames=['usernames_value1', 'usernames_value2'], + ) + + # Make the request + response = client.unassign(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.commerce_consumer_procurement_v1.types.UnassignRequest, dict]): + The request object. Request message for + [LicenseManagementService.Unassign][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.Unassign]. + parent (str): + Required. License pool name. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + usernames (MutableSequence[str]): + Required. Username. Format: ``name@domain.com``. + This corresponds to the ``usernames`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.commerce_consumer_procurement_v1.types.UnassignResponse: + Response message for + [LicenseManagementService.Unassign][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.Unassign]. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, usernames]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, license_management_service.UnassignRequest): + request = license_management_service.UnassignRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if usernames is not None: + request.usernames = usernames + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.unassign] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def enumerate_licensed_users( + self, + request: Optional[ + Union[license_management_service.EnumerateLicensedUsersRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.EnumerateLicensedUsersPager: + r"""Enumerates all users assigned a license. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import commerce_consumer_procurement_v1 + + def sample_enumerate_licensed_users(): + # Create a client + client = commerce_consumer_procurement_v1.LicenseManagementServiceClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.EnumerateLicensedUsersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.enumerate_licensed_users(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.commerce_consumer_procurement_v1.types.EnumerateLicensedUsersRequest, dict]): + The request object. Request message for + [LicenseManagementService.EnumerateLicensedUsers][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.EnumerateLicensedUsers]. + parent (str): + Required. License pool name. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.commerce_consumer_procurement_v1.services.license_management_service.pagers.EnumerateLicensedUsersPager: + Response message for + [LicenseManagementService.EnumerateLicensedUsers][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.EnumerateLicensedUsers]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, license_management_service.EnumerateLicensedUsersRequest + ): + request = license_management_service.EnumerateLicensedUsersRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.enumerate_licensed_users] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.EnumerateLicensedUsersPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "LicenseManagementServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("LicenseManagementServiceClient",) diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/pagers.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/pagers.py new file mode 100644 index 000000000000..c2d5464940a4 --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/pagers.py @@ -0,0 +1,207 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, + Union, +) + +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.cloud.commerce_consumer_procurement_v1.types import ( + license_management_service, +) + + +class EnumerateLicensedUsersPager: + """A pager for iterating through ``enumerate_licensed_users`` requests. + + This class thinly wraps an initial + :class:`google.cloud.commerce_consumer_procurement_v1.types.EnumerateLicensedUsersResponse` object, and + provides an ``__iter__`` method to iterate through its + ``licensed_users`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``EnumerateLicensedUsers`` requests and continue to iterate + through the ``licensed_users`` field on the + corresponding responses. + + All the usual :class:`google.cloud.commerce_consumer_procurement_v1.types.EnumerateLicensedUsersResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., license_management_service.EnumerateLicensedUsersResponse + ], + request: license_management_service.EnumerateLicensedUsersRequest, + response: license_management_service.EnumerateLicensedUsersResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.commerce_consumer_procurement_v1.types.EnumerateLicensedUsersRequest): + The initial request object. + response (google.cloud.commerce_consumer_procurement_v1.types.EnumerateLicensedUsersResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = license_management_service.EnumerateLicensedUsersRequest( + request + ) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages( + self, + ) -> Iterator[license_management_service.EnumerateLicensedUsersResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[license_management_service.LicensedUser]: + for page in self.pages: + yield from page.licensed_users + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class EnumerateLicensedUsersAsyncPager: + """A pager for iterating through ``enumerate_licensed_users`` requests. + + This class thinly wraps an initial + :class:`google.cloud.commerce_consumer_procurement_v1.types.EnumerateLicensedUsersResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``licensed_users`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``EnumerateLicensedUsers`` requests and continue to iterate + through the ``licensed_users`` field on the + corresponding responses. + + All the usual :class:`google.cloud.commerce_consumer_procurement_v1.types.EnumerateLicensedUsersResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[license_management_service.EnumerateLicensedUsersResponse] + ], + request: license_management_service.EnumerateLicensedUsersRequest, + response: license_management_service.EnumerateLicensedUsersResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.commerce_consumer_procurement_v1.types.EnumerateLicensedUsersRequest): + The initial request object. + response (google.cloud.commerce_consumer_procurement_v1.types.EnumerateLicensedUsersResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = license_management_service.EnumerateLicensedUsersRequest( + request + ) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[license_management_service.EnumerateLicensedUsersResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[license_management_service.LicensedUser]: + async def async_generator(): + async for page in self.pages: + for response in page.licensed_users: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/transports/__init__.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/transports/__init__.py new file mode 100644 index 000000000000..cdaddef08466 --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/transports/__init__.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import LicenseManagementServiceTransport +from .grpc import LicenseManagementServiceGrpcTransport +from .grpc_asyncio import LicenseManagementServiceGrpcAsyncIOTransport +from .rest import ( + LicenseManagementServiceRestInterceptor, + LicenseManagementServiceRestTransport, +) + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[LicenseManagementServiceTransport]] +_transport_registry["grpc"] = LicenseManagementServiceGrpcTransport +_transport_registry["grpc_asyncio"] = LicenseManagementServiceGrpcAsyncIOTransport +_transport_registry["rest"] = LicenseManagementServiceRestTransport + +__all__ = ( + "LicenseManagementServiceTransport", + "LicenseManagementServiceGrpcTransport", + "LicenseManagementServiceGrpcAsyncIOTransport", + "LicenseManagementServiceRestTransport", + "LicenseManagementServiceRestInterceptor", +) diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/transports/base.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/transports/base.py new file mode 100644 index 000000000000..416fb9788d4c --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/transports/base.py @@ -0,0 +1,246 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.commerce_consumer_procurement_v1 import ( + gapic_version as package_version, +) +from google.cloud.commerce_consumer_procurement_v1.types import ( + license_management_service, +) + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class LicenseManagementServiceTransport(abc.ABC): + """Abstract transport class for LicenseManagementService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "cloudcommerceconsumerprocurement.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'cloudcommerceconsumerprocurement.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.get_license_pool: gapic_v1.method.wrap_method( + self.get_license_pool, + default_timeout=None, + client_info=client_info, + ), + self.update_license_pool: gapic_v1.method.wrap_method( + self.update_license_pool, + default_timeout=None, + client_info=client_info, + ), + self.assign: gapic_v1.method.wrap_method( + self.assign, + default_timeout=None, + client_info=client_info, + ), + self.unassign: gapic_v1.method.wrap_method( + self.unassign, + default_timeout=None, + client_info=client_info, + ), + self.enumerate_licensed_users: gapic_v1.method.wrap_method( + self.enumerate_licensed_users, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def get_license_pool( + self, + ) -> Callable[ + [license_management_service.GetLicensePoolRequest], + Union[ + license_management_service.LicensePool, + Awaitable[license_management_service.LicensePool], + ], + ]: + raise NotImplementedError() + + @property + def update_license_pool( + self, + ) -> Callable[ + [license_management_service.UpdateLicensePoolRequest], + Union[ + license_management_service.LicensePool, + Awaitable[license_management_service.LicensePool], + ], + ]: + raise NotImplementedError() + + @property + def assign( + self, + ) -> Callable[ + [license_management_service.AssignRequest], + Union[ + license_management_service.AssignResponse, + Awaitable[license_management_service.AssignResponse], + ], + ]: + raise NotImplementedError() + + @property + def unassign( + self, + ) -> Callable[ + [license_management_service.UnassignRequest], + Union[ + license_management_service.UnassignResponse, + Awaitable[license_management_service.UnassignResponse], + ], + ]: + raise NotImplementedError() + + @property + def enumerate_licensed_users( + self, + ) -> Callable[ + [license_management_service.EnumerateLicensedUsersRequest], + Union[ + license_management_service.EnumerateLicensedUsersResponse, + Awaitable[license_management_service.EnumerateLicensedUsersResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("LicenseManagementServiceTransport",) diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/transports/grpc.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/transports/grpc.py new file mode 100644 index 000000000000..ff5a937a6e70 --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/transports/grpc.py @@ -0,0 +1,411 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.commerce_consumer_procurement_v1.types import ( + license_management_service, +) + +from .base import DEFAULT_CLIENT_INFO, LicenseManagementServiceTransport + + +class LicenseManagementServiceGrpcTransport(LicenseManagementServiceTransport): + """gRPC backend transport for LicenseManagementService. + + Service for managing licenses. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "cloudcommerceconsumerprocurement.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'cloudcommerceconsumerprocurement.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "cloudcommerceconsumerprocurement.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def get_license_pool( + self, + ) -> Callable[ + [license_management_service.GetLicensePoolRequest], + license_management_service.LicensePool, + ]: + r"""Return a callable for the get license pool method over gRPC. + + Gets the license pool. + + Returns: + Callable[[~.GetLicensePoolRequest], + ~.LicensePool]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_license_pool" not in self._stubs: + self._stubs["get_license_pool"] = self.grpc_channel.unary_unary( + "/google.cloud.commerce.consumer.procurement.v1.LicenseManagementService/GetLicensePool", + request_serializer=license_management_service.GetLicensePoolRequest.serialize, + response_deserializer=license_management_service.LicensePool.deserialize, + ) + return self._stubs["get_license_pool"] + + @property + def update_license_pool( + self, + ) -> Callable[ + [license_management_service.UpdateLicensePoolRequest], + license_management_service.LicensePool, + ]: + r"""Return a callable for the update license pool method over gRPC. + + Updates the license pool if one exists for this + Order. + + Returns: + Callable[[~.UpdateLicensePoolRequest], + ~.LicensePool]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_license_pool" not in self._stubs: + self._stubs["update_license_pool"] = self.grpc_channel.unary_unary( + "/google.cloud.commerce.consumer.procurement.v1.LicenseManagementService/UpdateLicensePool", + request_serializer=license_management_service.UpdateLicensePoolRequest.serialize, + response_deserializer=license_management_service.LicensePool.deserialize, + ) + return self._stubs["update_license_pool"] + + @property + def assign( + self, + ) -> Callable[ + [license_management_service.AssignRequest], + license_management_service.AssignResponse, + ]: + r"""Return a callable for the assign method over gRPC. + + Assigns a license to a user. + + Returns: + Callable[[~.AssignRequest], + ~.AssignResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "assign" not in self._stubs: + self._stubs["assign"] = self.grpc_channel.unary_unary( + "/google.cloud.commerce.consumer.procurement.v1.LicenseManagementService/Assign", + request_serializer=license_management_service.AssignRequest.serialize, + response_deserializer=license_management_service.AssignResponse.deserialize, + ) + return self._stubs["assign"] + + @property + def unassign( + self, + ) -> Callable[ + [license_management_service.UnassignRequest], + license_management_service.UnassignResponse, + ]: + r"""Return a callable for the unassign method over gRPC. + + Unassigns a license from a user. + + Returns: + Callable[[~.UnassignRequest], + ~.UnassignResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "unassign" not in self._stubs: + self._stubs["unassign"] = self.grpc_channel.unary_unary( + "/google.cloud.commerce.consumer.procurement.v1.LicenseManagementService/Unassign", + request_serializer=license_management_service.UnassignRequest.serialize, + response_deserializer=license_management_service.UnassignResponse.deserialize, + ) + return self._stubs["unassign"] + + @property + def enumerate_licensed_users( + self, + ) -> Callable[ + [license_management_service.EnumerateLicensedUsersRequest], + license_management_service.EnumerateLicensedUsersResponse, + ]: + r"""Return a callable for the enumerate licensed users method over gRPC. + + Enumerates all users assigned a license. + + Returns: + Callable[[~.EnumerateLicensedUsersRequest], + ~.EnumerateLicensedUsersResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "enumerate_licensed_users" not in self._stubs: + self._stubs["enumerate_licensed_users"] = self.grpc_channel.unary_unary( + "/google.cloud.commerce.consumer.procurement.v1.LicenseManagementService/EnumerateLicensedUsers", + request_serializer=license_management_service.EnumerateLicensedUsersRequest.serialize, + response_deserializer=license_management_service.EnumerateLicensedUsersResponse.deserialize, + ) + return self._stubs["enumerate_licensed_users"] + + def close(self): + self.grpc_channel.close() + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("LicenseManagementServiceGrpcTransport",) diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/transports/grpc_asyncio.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..77f1f9a02681 --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/transports/grpc_asyncio.py @@ -0,0 +1,441 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.commerce_consumer_procurement_v1.types import ( + license_management_service, +) + +from .base import DEFAULT_CLIENT_INFO, LicenseManagementServiceTransport +from .grpc import LicenseManagementServiceGrpcTransport + + +class LicenseManagementServiceGrpcAsyncIOTransport(LicenseManagementServiceTransport): + """gRPC AsyncIO backend transport for LicenseManagementService. + + Service for managing licenses. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "cloudcommerceconsumerprocurement.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "cloudcommerceconsumerprocurement.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'cloudcommerceconsumerprocurement.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def get_license_pool( + self, + ) -> Callable[ + [license_management_service.GetLicensePoolRequest], + Awaitable[license_management_service.LicensePool], + ]: + r"""Return a callable for the get license pool method over gRPC. + + Gets the license pool. + + Returns: + Callable[[~.GetLicensePoolRequest], + Awaitable[~.LicensePool]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_license_pool" not in self._stubs: + self._stubs["get_license_pool"] = self.grpc_channel.unary_unary( + "/google.cloud.commerce.consumer.procurement.v1.LicenseManagementService/GetLicensePool", + request_serializer=license_management_service.GetLicensePoolRequest.serialize, + response_deserializer=license_management_service.LicensePool.deserialize, + ) + return self._stubs["get_license_pool"] + + @property + def update_license_pool( + self, + ) -> Callable[ + [license_management_service.UpdateLicensePoolRequest], + Awaitable[license_management_service.LicensePool], + ]: + r"""Return a callable for the update license pool method over gRPC. + + Updates the license pool if one exists for this + Order. + + Returns: + Callable[[~.UpdateLicensePoolRequest], + Awaitable[~.LicensePool]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_license_pool" not in self._stubs: + self._stubs["update_license_pool"] = self.grpc_channel.unary_unary( + "/google.cloud.commerce.consumer.procurement.v1.LicenseManagementService/UpdateLicensePool", + request_serializer=license_management_service.UpdateLicensePoolRequest.serialize, + response_deserializer=license_management_service.LicensePool.deserialize, + ) + return self._stubs["update_license_pool"] + + @property + def assign( + self, + ) -> Callable[ + [license_management_service.AssignRequest], + Awaitable[license_management_service.AssignResponse], + ]: + r"""Return a callable for the assign method over gRPC. + + Assigns a license to a user. + + Returns: + Callable[[~.AssignRequest], + Awaitable[~.AssignResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "assign" not in self._stubs: + self._stubs["assign"] = self.grpc_channel.unary_unary( + "/google.cloud.commerce.consumer.procurement.v1.LicenseManagementService/Assign", + request_serializer=license_management_service.AssignRequest.serialize, + response_deserializer=license_management_service.AssignResponse.deserialize, + ) + return self._stubs["assign"] + + @property + def unassign( + self, + ) -> Callable[ + [license_management_service.UnassignRequest], + Awaitable[license_management_service.UnassignResponse], + ]: + r"""Return a callable for the unassign method over gRPC. + + Unassigns a license from a user. + + Returns: + Callable[[~.UnassignRequest], + Awaitable[~.UnassignResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "unassign" not in self._stubs: + self._stubs["unassign"] = self.grpc_channel.unary_unary( + "/google.cloud.commerce.consumer.procurement.v1.LicenseManagementService/Unassign", + request_serializer=license_management_service.UnassignRequest.serialize, + response_deserializer=license_management_service.UnassignResponse.deserialize, + ) + return self._stubs["unassign"] + + @property + def enumerate_licensed_users( + self, + ) -> Callable[ + [license_management_service.EnumerateLicensedUsersRequest], + Awaitable[license_management_service.EnumerateLicensedUsersResponse], + ]: + r"""Return a callable for the enumerate licensed users method over gRPC. + + Enumerates all users assigned a license. + + Returns: + Callable[[~.EnumerateLicensedUsersRequest], + Awaitable[~.EnumerateLicensedUsersResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "enumerate_licensed_users" not in self._stubs: + self._stubs["enumerate_licensed_users"] = self.grpc_channel.unary_unary( + "/google.cloud.commerce.consumer.procurement.v1.LicenseManagementService/EnumerateLicensedUsers", + request_serializer=license_management_service.EnumerateLicensedUsersRequest.serialize, + response_deserializer=license_management_service.EnumerateLicensedUsersResponse.deserialize, + ) + return self._stubs["enumerate_licensed_users"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.get_license_pool: gapic_v1.method_async.wrap_method( + self.get_license_pool, + default_timeout=None, + client_info=client_info, + ), + self.update_license_pool: gapic_v1.method_async.wrap_method( + self.update_license_pool, + default_timeout=None, + client_info=client_info, + ), + self.assign: gapic_v1.method_async.wrap_method( + self.assign, + default_timeout=None, + client_info=client_info, + ), + self.unassign: gapic_v1.method_async.wrap_method( + self.unassign, + default_timeout=None, + client_info=client_info, + ), + self.enumerate_licensed_users: gapic_v1.method_async.wrap_method( + self.enumerate_licensed_users, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + return self.grpc_channel.close() + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + +__all__ = ("LicenseManagementServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/transports/rest.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/transports/rest.py new file mode 100644 index 000000000000..e2e5093c1b3a --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/transports/rest.py @@ -0,0 +1,958 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.commerce_consumer_procurement_v1.types import ( + license_management_service, +) + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import LicenseManagementServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class LicenseManagementServiceRestInterceptor: + """Interceptor for LicenseManagementService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the LicenseManagementServiceRestTransport. + + .. code-block:: python + class MyCustomLicenseManagementServiceInterceptor(LicenseManagementServiceRestInterceptor): + def pre_assign(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_assign(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_enumerate_licensed_users(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_enumerate_licensed_users(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_license_pool(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_license_pool(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_unassign(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_unassign(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_license_pool(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_license_pool(self, response): + logging.log(f"Received response: {response}") + return response + + transport = LicenseManagementServiceRestTransport(interceptor=MyCustomLicenseManagementServiceInterceptor()) + client = LicenseManagementServiceClient(transport=transport) + + + """ + + def pre_assign( + self, + request: license_management_service.AssignRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[license_management_service.AssignRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for assign + + Override in a subclass to manipulate the request or metadata + before they are sent to the LicenseManagementService server. + """ + return request, metadata + + def post_assign( + self, response: license_management_service.AssignResponse + ) -> license_management_service.AssignResponse: + """Post-rpc interceptor for assign + + Override in a subclass to manipulate the response + after it is returned by the LicenseManagementService server but before + it is returned to user code. + """ + return response + + def pre_enumerate_licensed_users( + self, + request: license_management_service.EnumerateLicensedUsersRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + license_management_service.EnumerateLicensedUsersRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for enumerate_licensed_users + + Override in a subclass to manipulate the request or metadata + before they are sent to the LicenseManagementService server. + """ + return request, metadata + + def post_enumerate_licensed_users( + self, response: license_management_service.EnumerateLicensedUsersResponse + ) -> license_management_service.EnumerateLicensedUsersResponse: + """Post-rpc interceptor for enumerate_licensed_users + + Override in a subclass to manipulate the response + after it is returned by the LicenseManagementService server but before + it is returned to user code. + """ + return response + + def pre_get_license_pool( + self, + request: license_management_service.GetLicensePoolRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + license_management_service.GetLicensePoolRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for get_license_pool + + Override in a subclass to manipulate the request or metadata + before they are sent to the LicenseManagementService server. + """ + return request, metadata + + def post_get_license_pool( + self, response: license_management_service.LicensePool + ) -> license_management_service.LicensePool: + """Post-rpc interceptor for get_license_pool + + Override in a subclass to manipulate the response + after it is returned by the LicenseManagementService server but before + it is returned to user code. + """ + return response + + def pre_unassign( + self, + request: license_management_service.UnassignRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[license_management_service.UnassignRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for unassign + + Override in a subclass to manipulate the request or metadata + before they are sent to the LicenseManagementService server. + """ + return request, metadata + + def post_unassign( + self, response: license_management_service.UnassignResponse + ) -> license_management_service.UnassignResponse: + """Post-rpc interceptor for unassign + + Override in a subclass to manipulate the response + after it is returned by the LicenseManagementService server but before + it is returned to user code. + """ + return response + + def pre_update_license_pool( + self, + request: license_management_service.UpdateLicensePoolRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + license_management_service.UpdateLicensePoolRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for update_license_pool + + Override in a subclass to manipulate the request or metadata + before they are sent to the LicenseManagementService server. + """ + return request, metadata + + def post_update_license_pool( + self, response: license_management_service.LicensePool + ) -> license_management_service.LicensePool: + """Post-rpc interceptor for update_license_pool + + Override in a subclass to manipulate the response + after it is returned by the LicenseManagementService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the LicenseManagementService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the LicenseManagementService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class LicenseManagementServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: LicenseManagementServiceRestInterceptor + + +class LicenseManagementServiceRestTransport(LicenseManagementServiceTransport): + """REST backend transport for LicenseManagementService. + + Service for managing licenses. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "cloudcommerceconsumerprocurement.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[LicenseManagementServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'cloudcommerceconsumerprocurement.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or LicenseManagementServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _Assign(LicenseManagementServiceRestStub): + def __hash__(self): + return hash("Assign") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: license_management_service.AssignRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> license_management_service.AssignResponse: + r"""Call the assign method over HTTP. + + Args: + request (~.license_management_service.AssignRequest): + The request object. Request message for + [LicenseManagementService.Assign][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.Assign]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.license_management_service.AssignResponse: + Response message for + [LicenseManagementService.Assign][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.Assign]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=billingAccounts/*/orders/*/licensePool}:assign", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_assign(request, metadata) + pb_request = license_management_service.AssignRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = license_management_service.AssignResponse() + pb_resp = license_management_service.AssignResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_assign(resp) + return resp + + class _EnumerateLicensedUsers(LicenseManagementServiceRestStub): + def __hash__(self): + return hash("EnumerateLicensedUsers") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: license_management_service.EnumerateLicensedUsersRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> license_management_service.EnumerateLicensedUsersResponse: + r"""Call the enumerate licensed users method over HTTP. + + Args: + request (~.license_management_service.EnumerateLicensedUsersRequest): + The request object. Request message for + [LicenseManagementService.EnumerateLicensedUsers][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.EnumerateLicensedUsers]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.license_management_service.EnumerateLicensedUsersResponse: + Response message for + [LicenseManagementService.EnumerateLicensedUsers][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.EnumerateLicensedUsers]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=billingAccounts/*/orders/*/licensePool}:enumerateLicensedUsers", + }, + ] + request, metadata = self._interceptor.pre_enumerate_licensed_users( + request, metadata + ) + pb_request = license_management_service.EnumerateLicensedUsersRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = license_management_service.EnumerateLicensedUsersResponse() + pb_resp = license_management_service.EnumerateLicensedUsersResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_enumerate_licensed_users(resp) + return resp + + class _GetLicensePool(LicenseManagementServiceRestStub): + def __hash__(self): + return hash("GetLicensePool") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: license_management_service.GetLicensePoolRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> license_management_service.LicensePool: + r"""Call the get license pool method over HTTP. + + Args: + request (~.license_management_service.GetLicensePoolRequest): + The request object. Request message for getting a license + pool. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.license_management_service.LicensePool: + A license pool represents a pool of + licenses that can be assigned to users. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=billingAccounts/*/orders/*/licensePool}", + }, + ] + request, metadata = self._interceptor.pre_get_license_pool( + request, metadata + ) + pb_request = license_management_service.GetLicensePoolRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = license_management_service.LicensePool() + pb_resp = license_management_service.LicensePool.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_license_pool(resp) + return resp + + class _Unassign(LicenseManagementServiceRestStub): + def __hash__(self): + return hash("Unassign") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: license_management_service.UnassignRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> license_management_service.UnassignResponse: + r"""Call the unassign method over HTTP. + + Args: + request (~.license_management_service.UnassignRequest): + The request object. Request message for + [LicenseManagementService.Unassign][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.Unassign]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.license_management_service.UnassignResponse: + Response message for + [LicenseManagementService.Unassign][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.Unassign]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=billingAccounts/*/orders/*/licensePool}:unassign", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_unassign(request, metadata) + pb_request = license_management_service.UnassignRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = license_management_service.UnassignResponse() + pb_resp = license_management_service.UnassignResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_unassign(resp) + return resp + + class _UpdateLicensePool(LicenseManagementServiceRestStub): + def __hash__(self): + return hash("UpdateLicensePool") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: license_management_service.UpdateLicensePoolRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> license_management_service.LicensePool: + r"""Call the update license pool method over HTTP. + + Args: + request (~.license_management_service.UpdateLicensePoolRequest): + The request object. Request message for updating a + license pool. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.license_management_service.LicensePool: + A license pool represents a pool of + licenses that can be assigned to users. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{license_pool.name=billingAccounts/*/orders/*/licensePool/*}", + "body": "license_pool", + }, + ] + request, metadata = self._interceptor.pre_update_license_pool( + request, metadata + ) + pb_request = license_management_service.UpdateLicensePoolRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = license_management_service.LicensePool() + pb_resp = license_management_service.LicensePool.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_license_pool(resp) + return resp + + @property + def assign( + self, + ) -> Callable[ + [license_management_service.AssignRequest], + license_management_service.AssignResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Assign(self._session, self._host, self._interceptor) # type: ignore + + @property + def enumerate_licensed_users( + self, + ) -> Callable[ + [license_management_service.EnumerateLicensedUsersRequest], + license_management_service.EnumerateLicensedUsersResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._EnumerateLicensedUsers(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_license_pool( + self, + ) -> Callable[ + [license_management_service.GetLicensePoolRequest], + license_management_service.LicensePool, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetLicensePool(self._session, self._host, self._interceptor) # type: ignore + + @property + def unassign( + self, + ) -> Callable[ + [license_management_service.UnassignRequest], + license_management_service.UnassignResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Unassign(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_license_pool( + self, + ) -> Callable[ + [license_management_service.UpdateLicensePoolRequest], + license_management_service.LicensePool, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateLicensePool(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(LicenseManagementServiceRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=billingAccounts/*/orders/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("LicenseManagementServiceRestTransport",) diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/types/__init__.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/types/__init__.py index cc4943418819..4e35259c31e1 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/types/__init__.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/types/__init__.py @@ -13,6 +13,19 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from .license_management_service import ( + AssignmentProtocol, + AssignRequest, + AssignResponse, + EnumerateLicensedUsersRequest, + EnumerateLicensedUsersResponse, + GetLicensePoolRequest, + LicensedUser, + LicensePool, + UnassignRequest, + UnassignResponse, + UpdateLicensePoolRequest, +) from .order import ( LineItem, LineItemChange, @@ -25,14 +38,30 @@ Subscription, ) from .procurement_service import ( + AutoRenewalBehavior, + CancelOrderMetadata, + CancelOrderRequest, GetOrderRequest, ListOrdersRequest, ListOrdersResponse, + ModifyOrderMetadata, + ModifyOrderRequest, PlaceOrderMetadata, PlaceOrderRequest, ) __all__ = ( + "AssignmentProtocol", + "AssignRequest", + "AssignResponse", + "EnumerateLicensedUsersRequest", + "EnumerateLicensedUsersResponse", + "GetLicensePoolRequest", + "LicensedUser", + "LicensePool", + "UnassignRequest", + "UnassignResponse", + "UpdateLicensePoolRequest", "LineItem", "LineItemChange", "LineItemInfo", @@ -42,9 +71,14 @@ "LineItemChangeState", "LineItemChangeStateReasonType", "LineItemChangeType", + "CancelOrderMetadata", + "CancelOrderRequest", "GetOrderRequest", "ListOrdersRequest", "ListOrdersResponse", + "ModifyOrderMetadata", + "ModifyOrderRequest", "PlaceOrderMetadata", "PlaceOrderRequest", + "AutoRenewalBehavior", ) diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/types/license_management_service.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/types/license_management_service.py new file mode 100644 index 000000000000..4b13230f4ea8 --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/types/license_management_service.py @@ -0,0 +1,331 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.commerce.consumer.procurement.v1", + manifest={ + "AssignmentProtocol", + "LicensePool", + "GetLicensePoolRequest", + "UpdateLicensePoolRequest", + "AssignRequest", + "AssignResponse", + "UnassignRequest", + "UnassignResponse", + "EnumerateLicensedUsersRequest", + "LicensedUser", + "EnumerateLicensedUsersResponse", + }, +) + + +class AssignmentProtocol(proto.Message): + r"""Assignment protocol for a license pool. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + manual_assignment_type (google.cloud.commerce_consumer_procurement_v1.types.AssignmentProtocol.ManualAssignmentType): + Allow manual assignments triggered by + administrative operations only. + + This field is a member of `oneof`_ ``assignment_type``. + auto_assignment_type (google.cloud.commerce_consumer_procurement_v1.types.AssignmentProtocol.AutoAssignmentType): + Allow automatic assignments triggered by data + plane operations. + + This field is a member of `oneof`_ ``assignment_type``. + """ + + class ManualAssignmentType(proto.Message): + r"""Allow manual assignments triggered by administrative + operations only. + + """ + + class AutoAssignmentType(proto.Message): + r"""Configuration for automatic assignments handled by data plane + operations. + + Attributes: + inactive_license_ttl (google.protobuf.duration_pb2.Duration): + Optional. The time to live for an inactive + license. After this time has passed, the license + will be automatically unassigned from the user. + Must be at least 7 days, if set. If unset, the + license will never expire. + """ + + inactive_license_ttl: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=1, + message=duration_pb2.Duration, + ) + + manual_assignment_type: ManualAssignmentType = proto.Field( + proto.MESSAGE, + number=2, + oneof="assignment_type", + message=ManualAssignmentType, + ) + auto_assignment_type: AutoAssignmentType = proto.Field( + proto.MESSAGE, + number=3, + oneof="assignment_type", + message=AutoAssignmentType, + ) + + +class LicensePool(proto.Message): + r"""A license pool represents a pool of licenses that can be + assigned to users. + + Attributes: + name (str): + Identifier. Format: + ``billingAccounts/{billing_account}/orders/{order}/licensePool`` + license_assignment_protocol (google.cloud.commerce_consumer_procurement_v1.types.AssignmentProtocol): + Required. Assignment protocol for the license + pool. + available_license_count (int): + Output only. Licenses count that are + available to be assigned. + total_license_count (int): + Output only. Total number of licenses in the + pool. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + license_assignment_protocol: "AssignmentProtocol" = proto.Field( + proto.MESSAGE, + number=2, + message="AssignmentProtocol", + ) + available_license_count: int = proto.Field( + proto.INT32, + number=3, + ) + total_license_count: int = proto.Field( + proto.INT32, + number=4, + ) + + +class GetLicensePoolRequest(proto.Message): + r"""Request message for getting a license pool. + + Attributes: + name (str): + Required. The name of the license pool to get. Format: + ``billingAccounts/{billing_account}/orders/{order}/licensePool`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateLicensePoolRequest(proto.Message): + r"""Request message for updating a license pool. + + Attributes: + license_pool (google.cloud.commerce_consumer_procurement_v1.types.LicensePool): + Required. The license pool to update. + + The license pool's name field is used to identify the + license pool to update. Format: + ``billingAccounts/{billing_account}/orders/{order}/licensePool``. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to update. + """ + + license_pool: "LicensePool" = proto.Field( + proto.MESSAGE, + number=1, + message="LicensePool", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class AssignRequest(proto.Message): + r"""Request message for + [LicenseManagementService.Assign][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.Assign]. + + Attributes: + parent (str): + Required. License pool name. + usernames (MutableSequence[str]): + Required. Username. Format: ``name@domain.com``. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + usernames: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + +class AssignResponse(proto.Message): + r"""Response message for + [LicenseManagementService.Assign][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.Assign]. + + """ + + +class UnassignRequest(proto.Message): + r"""Request message for + [LicenseManagementService.Unassign][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.Unassign]. + + Attributes: + parent (str): + Required. License pool name. + usernames (MutableSequence[str]): + Required. Username. Format: ``name@domain.com``. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + usernames: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + +class UnassignResponse(proto.Message): + r"""Response message for + [LicenseManagementService.Unassign][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.Unassign]. + + """ + + +class EnumerateLicensedUsersRequest(proto.Message): + r"""Request message for + [LicenseManagementService.EnumerateLicensedUsers][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.EnumerateLicensedUsers]. + + Attributes: + parent (str): + Required. License pool name. + page_size (int): + Optional. The maximum number of users to + return. The service may return fewer than this + value. + page_token (str): + Optional. A page token, received from a previous + ``EnumerateLicensedUsers`` call. Provide this to retrieve + the subsequent page. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class LicensedUser(proto.Message): + r"""A licensed user. + + Attributes: + username (str): + Username. Format: ``name@domain.com``. + assign_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Timestamp when the license was + assigned. + recent_usage_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Timestamp when the license was + recently used. This may not be the most recent + usage time, and will be updated regularly + (within 24 hours). + """ + + username: str = proto.Field( + proto.STRING, + number=1, + ) + assign_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + recent_usage_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + + +class EnumerateLicensedUsersResponse(proto.Message): + r"""Response message for + [LicenseManagementService.EnumerateLicensedUsers][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.EnumerateLicensedUsers]. + + Attributes: + licensed_users (MutableSequence[google.cloud.commerce_consumer_procurement_v1.types.LicensedUser]): + The list of licensed users. + next_page_token (str): + A token that can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + licensed_users: MutableSequence["LicensedUser"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="LicensedUser", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/types/order.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/types/order.py index 70bce4301daf..f22c139cd202 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/types/order.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/types/order.py @@ -68,7 +68,8 @@ class LineItemChangeState(proto.Enum): Sentinel value. Do not use. LINE_ITEM_CHANGE_STATE_PENDING_APPROVAL (1): Change is in this state when a change is - initiated and waiting for partner approval. + initiated and waiting for partner approval. This + state is only applicable for pending change. LINE_ITEM_CHANGE_STATE_APPROVED (2): Change is in this state after it's approved by the partner or auto-approved but before it @@ -76,21 +77,26 @@ class LineItemChangeState(proto.Enum): cancelled depending on the new line item info property (pending Private Offer change cannot be cancelled and can only be overwritten by another - Private Offer). + Private Offer). This state is only applicable + for pending change. LINE_ITEM_CHANGE_STATE_COMPLETED (3): Change is in this state after it's been - activated. + activated. This state is only applicable for + change in history. LINE_ITEM_CHANGE_STATE_REJECTED (4): Change is in this state if it was rejected by - the partner. + the partner. This state is only applicable for + change in history. LINE_ITEM_CHANGE_STATE_ABANDONED (5): Change is in this state if it was abandoned - by the user. + by the user. This state is only applicable for + change in history. LINE_ITEM_CHANGE_STATE_ACTIVATING (6): Change is in this state if it's currently being provisioned downstream. The change can't be overwritten or cancelled when it's in this - state. + state. This state is only applicable for pending + change. """ LINE_ITEM_CHANGE_STATE_UNSPECIFIED = 0 LINE_ITEM_CHANGE_STATE_PENDING_APPROVAL = 1 diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/types/procurement_service.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/types/procurement_service.py index 3a9de30378b8..93c2b0b600ab 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/types/procurement_service.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/types/procurement_service.py @@ -24,15 +24,39 @@ __protobuf__ = proto.module( package="google.cloud.commerce.consumer.procurement.v1", manifest={ + "AutoRenewalBehavior", "PlaceOrderRequest", "PlaceOrderMetadata", "GetOrderRequest", "ListOrdersRequest", "ListOrdersResponse", + "ModifyOrderRequest", + "ModifyOrderMetadata", + "CancelOrderRequest", + "CancelOrderMetadata", }, ) +class AutoRenewalBehavior(proto.Enum): + r"""Indicates the auto renewal behavior customer specifies on + subscription. + + Values: + AUTO_RENEWAL_BEHAVIOR_UNSPECIFIED (0): + If unspecified, the auto renewal behavior + will follow the default config. + AUTO_RENEWAL_BEHAVIOR_ENABLE (1): + Auto Renewal will be enabled on subscription. + AUTO_RENEWAL_BEHAVIOR_DISABLE (2): + Auto Renewal will be disabled on + subscription. + """ + AUTO_RENEWAL_BEHAVIOR_UNSPECIFIED = 0 + AUTO_RENEWAL_BEHAVIOR_ENABLE = 1 + AUTO_RENEWAL_BEHAVIOR_DISABLE = 2 + + class PlaceOrderRequest(proto.Message): r"""Request message for [ConsumerProcurementService.PlaceOrder][google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService.PlaceOrder]. @@ -50,7 +74,7 @@ class PlaceOrderRequest(proto.Message): request_id (str): Optional. A unique identifier for this request. The server will ignore subsequent requests that provide a duplicate - request ID for at least 120 minutes after the first request. + request ID for at least 24 hours after the first request. The request ID must be a valid `UUID `__. @@ -176,4 +200,164 @@ def raw_page(self): ) +class ModifyOrderRequest(proto.Message): + r"""Request message for + [ConsumerProcurementService.ModifyOrder][google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService.ModifyOrder]. + + Attributes: + name (str): + Required. Name of the order to update. + modifications (MutableSequence[google.cloud.commerce_consumer_procurement_v1.types.ModifyOrderRequest.Modification]): + Optional. Modifications for an existing Order + created by an Offer. Required when Offer based + Order is being modified, except for when going + from an offer to a public plan. + display_name (str): + Optional. Updated display name of the order, + leave as empty if you do not want to update + current display name. + etag (str): + Optional. The weak etag, which can be + optionally populated, of the order that this + modify request is based on. Validation checking + will only happen if the invoker supplies this + field. + """ + + class Modification(proto.Message): + r"""Modifications to make on the order. + + Attributes: + line_item_id (str): + Required. ID of the existing line item to make change to. + Required when change type is + [LineItemChangeType.LINE_ITEM_CHANGE_TYPE_UPDATE] or + [LineItemChangeType.LINE_ITEM_CHANGE_TYPE_CANCEL]. + change_type (google.cloud.commerce_consumer_procurement_v1.types.LineItemChangeType): + Required. Type of change to make. + new_line_item_info (google.cloud.commerce_consumer_procurement_v1.types.LineItemInfo): + Optional. The line item to update to. Required when + change_type is + [LineItemChangeType.LINE_ITEM_CHANGE_TYPE_CREATE] or + [LineItemChangeType.LINE_ITEM_CHANGE_TYPE_UPDATE]. + auto_renewal_behavior (google.cloud.commerce_consumer_procurement_v1.types.AutoRenewalBehavior): + Optional. Auto renewal behavior of the subscription for the + update. Applied when change_type is + [LineItemChangeType.LINE_ITEM_CHANGE_TYPE_UPDATE]. Follows + plan default config when this field is not specified. + """ + + line_item_id: str = proto.Field( + proto.STRING, + number=1, + ) + change_type: order.LineItemChangeType = proto.Field( + proto.ENUM, + number=2, + enum=order.LineItemChangeType, + ) + new_line_item_info: order.LineItemInfo = proto.Field( + proto.MESSAGE, + number=3, + message=order.LineItemInfo, + ) + auto_renewal_behavior: "AutoRenewalBehavior" = proto.Field( + proto.ENUM, + number=4, + enum="AutoRenewalBehavior", + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + modifications: MutableSequence[Modification] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message=Modification, + ) + display_name: str = proto.Field( + proto.STRING, + number=5, + ) + etag: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ModifyOrderMetadata(proto.Message): + r"""Message stored in the metadata field of the Operation returned by + [ConsumerProcurementService.ModifyOrder][google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService.ModifyOrder]. + + """ + + +class CancelOrderRequest(proto.Message): + r"""Request message for + [ConsumerProcurementService.CancelOrder][google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService.CancelOrder]. + + Attributes: + name (str): + Required. The resource name of the order. + etag (str): + Optional. The weak etag, which can be + optionally populated, of the order that this + cancel request is based on. Validation checking + will only happen if the invoker supplies this + field. + cancellation_policy (google.cloud.commerce_consumer_procurement_v1.types.CancelOrderRequest.CancellationPolicy): + Optional. Cancellation policy of this + request. + """ + + class CancellationPolicy(proto.Enum): + r"""Indicates the cancellation policy the customer uses to cancel + the order. + + Values: + CANCELLATION_POLICY_UNSPECIFIED (0): + If unspecified, cancellation will try to + cancel the order, if order cannot be immediately + cancelled, auto renewal will be turned off. + However, caller should avoid using the value as + it will yield a non-deterministic result. This + is still supported mainly to maintain existing + integrated usages and ensure backwards + compatibility. + CANCELLATION_POLICY_CANCEL_IMMEDIATELY (1): + Request will cancel the whole order + immediately, if order cannot be immediately + cancelled, the request will fail. + CANCELLATION_POLICY_CANCEL_AT_TERM_END (2): + Request will cancel the auto renewal, if + order is not subscription based, the request + will fail. + """ + CANCELLATION_POLICY_UNSPECIFIED = 0 + CANCELLATION_POLICY_CANCEL_IMMEDIATELY = 1 + CANCELLATION_POLICY_CANCEL_AT_TERM_END = 2 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + etag: str = proto.Field( + proto.STRING, + number=2, + ) + cancellation_policy: CancellationPolicy = proto.Field( + proto.ENUM, + number=3, + enum=CancellationPolicy, + ) + + +class CancelOrderMetadata(proto.Message): + r"""Message stored in the metadata field of the Operation returned by + [ConsumerProcurementService.CancelOrder][google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService.CancelOrder]. + + """ + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_cancel_order_async.py b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_cancel_order_async.py new file mode 100644 index 000000000000..6af6267a3dfe --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_cancel_order_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CancelOrder +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-commerce-consumer-procurement + + +# [START cloudcommerceconsumerprocurement_v1_generated_ConsumerProcurementService_CancelOrder_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import commerce_consumer_procurement_v1 + + +async def sample_cancel_order(): + # Create a client + client = commerce_consumer_procurement_v1.ConsumerProcurementServiceAsyncClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.CancelOrderRequest( + name="name_value", + ) + + # Make the request + operation = client.cancel_order(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END cloudcommerceconsumerprocurement_v1_generated_ConsumerProcurementService_CancelOrder_async] diff --git a/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_cancel_order_sync.py b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_cancel_order_sync.py new file mode 100644 index 000000000000..d107e654d209 --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_cancel_order_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CancelOrder +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-commerce-consumer-procurement + + +# [START cloudcommerceconsumerprocurement_v1_generated_ConsumerProcurementService_CancelOrder_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import commerce_consumer_procurement_v1 + + +def sample_cancel_order(): + # Create a client + client = commerce_consumer_procurement_v1.ConsumerProcurementServiceClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.CancelOrderRequest( + name="name_value", + ) + + # Make the request + operation = client.cancel_order(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END cloudcommerceconsumerprocurement_v1_generated_ConsumerProcurementService_CancelOrder_sync] diff --git a/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_modify_order_async.py b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_modify_order_async.py new file mode 100644 index 000000000000..d7995349fbc1 --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_modify_order_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ModifyOrder +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-commerce-consumer-procurement + + +# [START cloudcommerceconsumerprocurement_v1_generated_ConsumerProcurementService_ModifyOrder_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import commerce_consumer_procurement_v1 + + +async def sample_modify_order(): + # Create a client + client = commerce_consumer_procurement_v1.ConsumerProcurementServiceAsyncClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.ModifyOrderRequest( + name="name_value", + ) + + # Make the request + operation = client.modify_order(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END cloudcommerceconsumerprocurement_v1_generated_ConsumerProcurementService_ModifyOrder_async] diff --git a/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_modify_order_sync.py b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_modify_order_sync.py new file mode 100644 index 000000000000..7d2280e2367b --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_modify_order_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ModifyOrder +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-commerce-consumer-procurement + + +# [START cloudcommerceconsumerprocurement_v1_generated_ConsumerProcurementService_ModifyOrder_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import commerce_consumer_procurement_v1 + + +def sample_modify_order(): + # Create a client + client = commerce_consumer_procurement_v1.ConsumerProcurementServiceClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.ModifyOrderRequest( + name="name_value", + ) + + # Make the request + operation = client.modify_order(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END cloudcommerceconsumerprocurement_v1_generated_ConsumerProcurementService_ModifyOrder_sync] diff --git a/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_assign_async.py b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_assign_async.py new file mode 100644 index 000000000000..2c5e4b37f6e4 --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_assign_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Assign +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-commerce-consumer-procurement + + +# [START cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_Assign_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import commerce_consumer_procurement_v1 + + +async def sample_assign(): + # Create a client + client = commerce_consumer_procurement_v1.LicenseManagementServiceAsyncClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.AssignRequest( + parent="parent_value", + usernames=['usernames_value1', 'usernames_value2'], + ) + + # Make the request + response = await client.assign(request=request) + + # Handle the response + print(response) + +# [END cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_Assign_async] diff --git a/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_assign_sync.py b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_assign_sync.py new file mode 100644 index 000000000000..ec7ea6ea12bf --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_assign_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Assign +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-commerce-consumer-procurement + + +# [START cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_Assign_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import commerce_consumer_procurement_v1 + + +def sample_assign(): + # Create a client + client = commerce_consumer_procurement_v1.LicenseManagementServiceClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.AssignRequest( + parent="parent_value", + usernames=['usernames_value1', 'usernames_value2'], + ) + + # Make the request + response = client.assign(request=request) + + # Handle the response + print(response) + +# [END cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_Assign_sync] diff --git a/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_enumerate_licensed_users_async.py b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_enumerate_licensed_users_async.py new file mode 100644 index 000000000000..735a8c855ace --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_enumerate_licensed_users_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for EnumerateLicensedUsers +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-commerce-consumer-procurement + + +# [START cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_EnumerateLicensedUsers_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import commerce_consumer_procurement_v1 + + +async def sample_enumerate_licensed_users(): + # Create a client + client = commerce_consumer_procurement_v1.LicenseManagementServiceAsyncClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.EnumerateLicensedUsersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.enumerate_licensed_users(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_EnumerateLicensedUsers_async] diff --git a/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_enumerate_licensed_users_sync.py b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_enumerate_licensed_users_sync.py new file mode 100644 index 000000000000..ae35b74ee487 --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_enumerate_licensed_users_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for EnumerateLicensedUsers +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-commerce-consumer-procurement + + +# [START cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_EnumerateLicensedUsers_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import commerce_consumer_procurement_v1 + + +def sample_enumerate_licensed_users(): + # Create a client + client = commerce_consumer_procurement_v1.LicenseManagementServiceClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.EnumerateLicensedUsersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.enumerate_licensed_users(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_EnumerateLicensedUsers_sync] diff --git a/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_get_license_pool_async.py b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_get_license_pool_async.py new file mode 100644 index 000000000000..ca51289633c0 --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_get_license_pool_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetLicensePool +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-commerce-consumer-procurement + + +# [START cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_GetLicensePool_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import commerce_consumer_procurement_v1 + + +async def sample_get_license_pool(): + # Create a client + client = commerce_consumer_procurement_v1.LicenseManagementServiceAsyncClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.GetLicensePoolRequest( + name="name_value", + ) + + # Make the request + response = await client.get_license_pool(request=request) + + # Handle the response + print(response) + +# [END cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_GetLicensePool_async] diff --git a/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_get_license_pool_sync.py b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_get_license_pool_sync.py new file mode 100644 index 000000000000..235e9b739fa0 --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_get_license_pool_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetLicensePool +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-commerce-consumer-procurement + + +# [START cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_GetLicensePool_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import commerce_consumer_procurement_v1 + + +def sample_get_license_pool(): + # Create a client + client = commerce_consumer_procurement_v1.LicenseManagementServiceClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.GetLicensePoolRequest( + name="name_value", + ) + + # Make the request + response = client.get_license_pool(request=request) + + # Handle the response + print(response) + +# [END cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_GetLicensePool_sync] diff --git a/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_unassign_async.py b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_unassign_async.py new file mode 100644 index 000000000000..453d0136af27 --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_unassign_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Unassign +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-commerce-consumer-procurement + + +# [START cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_Unassign_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import commerce_consumer_procurement_v1 + + +async def sample_unassign(): + # Create a client + client = commerce_consumer_procurement_v1.LicenseManagementServiceAsyncClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.UnassignRequest( + parent="parent_value", + usernames=['usernames_value1', 'usernames_value2'], + ) + + # Make the request + response = await client.unassign(request=request) + + # Handle the response + print(response) + +# [END cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_Unassign_async] diff --git a/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_unassign_sync.py b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_unassign_sync.py new file mode 100644 index 000000000000..22d3cfb4d955 --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_unassign_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Unassign +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-commerce-consumer-procurement + + +# [START cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_Unassign_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import commerce_consumer_procurement_v1 + + +def sample_unassign(): + # Create a client + client = commerce_consumer_procurement_v1.LicenseManagementServiceClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.UnassignRequest( + parent="parent_value", + usernames=['usernames_value1', 'usernames_value2'], + ) + + # Make the request + response = client.unassign(request=request) + + # Handle the response + print(response) + +# [END cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_Unassign_sync] diff --git a/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_update_license_pool_async.py b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_update_license_pool_async.py new file mode 100644 index 000000000000..9f6375daf58e --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_update_license_pool_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateLicensePool +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-commerce-consumer-procurement + + +# [START cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_UpdateLicensePool_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import commerce_consumer_procurement_v1 + + +async def sample_update_license_pool(): + # Create a client + client = commerce_consumer_procurement_v1.LicenseManagementServiceAsyncClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.UpdateLicensePoolRequest( + ) + + # Make the request + response = await client.update_license_pool(request=request) + + # Handle the response + print(response) + +# [END cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_UpdateLicensePool_async] diff --git a/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_update_license_pool_sync.py b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_update_license_pool_sync.py new file mode 100644 index 000000000000..8eff8c5b8d83 --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_update_license_pool_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateLicensePool +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-commerce-consumer-procurement + + +# [START cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_UpdateLicensePool_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import commerce_consumer_procurement_v1 + + +def sample_update_license_pool(): + # Create a client + client = commerce_consumer_procurement_v1.LicenseManagementServiceClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.UpdateLicensePoolRequest( + ) + + # Make the request + response = client.update_license_pool(request=request) + + # Handle the response + print(response) + +# [END cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_UpdateLicensePool_sync] diff --git a/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/snippet_metadata_google.cloud.commerce.consumer.procurement.v1.json b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/snippet_metadata_google.cloud.commerce.consumer.procurement.v1.json index 08bfac2f3eb3..e4fae2067939 100644 --- a/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/snippet_metadata_google.cloud.commerce.consumer.procurement.v1.json +++ b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/snippet_metadata_google.cloud.commerce.consumer.procurement.v1.json @@ -11,6 +11,159 @@ "version": "0.1.0" }, "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.commerce_consumer_procurement_v1.ConsumerProcurementServiceAsyncClient", + "shortName": "ConsumerProcurementServiceAsyncClient" + }, + "fullName": "google.cloud.commerce_consumer_procurement_v1.ConsumerProcurementServiceAsyncClient.cancel_order", + "method": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService.CancelOrder", + "service": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService", + "shortName": "ConsumerProcurementService" + }, + "shortName": "CancelOrder" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.commerce_consumer_procurement_v1.types.CancelOrderRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "cancel_order" + }, + "description": "Sample for CancelOrder", + "file": "cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_cancel_order_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudcommerceconsumerprocurement_v1_generated_ConsumerProcurementService_CancelOrder_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_cancel_order_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.commerce_consumer_procurement_v1.ConsumerProcurementServiceClient", + "shortName": "ConsumerProcurementServiceClient" + }, + "fullName": "google.cloud.commerce_consumer_procurement_v1.ConsumerProcurementServiceClient.cancel_order", + "method": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService.CancelOrder", + "service": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService", + "shortName": "ConsumerProcurementService" + }, + "shortName": "CancelOrder" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.commerce_consumer_procurement_v1.types.CancelOrderRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "cancel_order" + }, + "description": "Sample for CancelOrder", + "file": "cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_cancel_order_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudcommerceconsumerprocurement_v1_generated_ConsumerProcurementService_CancelOrder_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_cancel_order_sync.py" + }, { "canonical": true, "clientMethod": { @@ -341,19 +494,19 @@ "fullName": "google.cloud.commerce_consumer_procurement_v1.ConsumerProcurementServiceAsyncClient", "shortName": "ConsumerProcurementServiceAsyncClient" }, - "fullName": "google.cloud.commerce_consumer_procurement_v1.ConsumerProcurementServiceAsyncClient.place_order", + "fullName": "google.cloud.commerce_consumer_procurement_v1.ConsumerProcurementServiceAsyncClient.modify_order", "method": { - "fullName": "google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService.PlaceOrder", + "fullName": "google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService.ModifyOrder", "service": { "fullName": "google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService", "shortName": "ConsumerProcurementService" }, - "shortName": "PlaceOrder" + "shortName": "ModifyOrder" }, "parameters": [ { "name": "request", - "type": "google.cloud.commerce_consumer_procurement_v1.types.PlaceOrderRequest" + "type": "google.cloud.commerce_consumer_procurement_v1.types.ModifyOrderRequest" }, { "name": "retry", @@ -369,21 +522,21 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "place_order" + "shortName": "modify_order" }, - "description": "Sample for PlaceOrder", - "file": "cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_place_order_async.py", + "description": "Sample for ModifyOrder", + "file": "cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_modify_order_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "cloudcommerceconsumerprocurement_v1_generated_ConsumerProcurementService_PlaceOrder_async", + "regionTag": "cloudcommerceconsumerprocurement_v1_generated_ConsumerProcurementService_ModifyOrder_async", "segments": [ { - "end": 56, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 55, "start": 27, "type": "SHORT" }, @@ -393,22 +546,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 47, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_place_order_async.py" + "title": "cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_modify_order_async.py" }, { "canonical": true, @@ -417,7 +570,84 @@ "fullName": "google.cloud.commerce_consumer_procurement_v1.ConsumerProcurementServiceClient", "shortName": "ConsumerProcurementServiceClient" }, - "fullName": "google.cloud.commerce_consumer_procurement_v1.ConsumerProcurementServiceClient.place_order", + "fullName": "google.cloud.commerce_consumer_procurement_v1.ConsumerProcurementServiceClient.modify_order", + "method": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService.ModifyOrder", + "service": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService", + "shortName": "ConsumerProcurementService" + }, + "shortName": "ModifyOrder" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.commerce_consumer_procurement_v1.types.ModifyOrderRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "modify_order" + }, + "description": "Sample for ModifyOrder", + "file": "cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_modify_order_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudcommerceconsumerprocurement_v1_generated_ConsumerProcurementService_ModifyOrder_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_modify_order_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.commerce_consumer_procurement_v1.ConsumerProcurementServiceAsyncClient", + "shortName": "ConsumerProcurementServiceAsyncClient" + }, + "fullName": "google.cloud.commerce_consumer_procurement_v1.ConsumerProcurementServiceAsyncClient.place_order", "method": { "fullName": "google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService.PlaceOrder", "service": { @@ -444,14 +674,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation.Operation", + "resultType": "google.api_core.operation_async.AsyncOperation", "shortName": "place_order" }, "description": "Sample for PlaceOrder", - "file": "cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_place_order_sync.py", + "file": "cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_place_order_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "cloudcommerceconsumerprocurement_v1_generated_ConsumerProcurementService_PlaceOrder_sync", + "regionTag": "cloudcommerceconsumerprocurement_v1_generated_ConsumerProcurementService_PlaceOrder_async", "segments": [ { "end": 56, @@ -484,7 +714,912 @@ "type": "RESPONSE_HANDLING" } ], - "title": "cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_place_order_sync.py" + "title": "cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_place_order_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.commerce_consumer_procurement_v1.ConsumerProcurementServiceClient", + "shortName": "ConsumerProcurementServiceClient" + }, + "fullName": "google.cloud.commerce_consumer_procurement_v1.ConsumerProcurementServiceClient.place_order", + "method": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService.PlaceOrder", + "service": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService", + "shortName": "ConsumerProcurementService" + }, + "shortName": "PlaceOrder" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.commerce_consumer_procurement_v1.types.PlaceOrderRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "place_order" + }, + "description": "Sample for PlaceOrder", + "file": "cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_place_order_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudcommerceconsumerprocurement_v1_generated_ConsumerProcurementService_PlaceOrder_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_place_order_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.commerce_consumer_procurement_v1.LicenseManagementServiceAsyncClient", + "shortName": "LicenseManagementServiceAsyncClient" + }, + "fullName": "google.cloud.commerce_consumer_procurement_v1.LicenseManagementServiceAsyncClient.assign", + "method": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.Assign", + "service": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.LicenseManagementService", + "shortName": "LicenseManagementService" + }, + "shortName": "Assign" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.commerce_consumer_procurement_v1.types.AssignRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "usernames", + "type": "MutableSequence[str]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.commerce_consumer_procurement_v1.types.AssignResponse", + "shortName": "assign" + }, + "description": "Sample for Assign", + "file": "cloudcommerceconsumerprocurement_v1_generated_license_management_service_assign_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_Assign_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudcommerceconsumerprocurement_v1_generated_license_management_service_assign_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.commerce_consumer_procurement_v1.LicenseManagementServiceClient", + "shortName": "LicenseManagementServiceClient" + }, + "fullName": "google.cloud.commerce_consumer_procurement_v1.LicenseManagementServiceClient.assign", + "method": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.Assign", + "service": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.LicenseManagementService", + "shortName": "LicenseManagementService" + }, + "shortName": "Assign" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.commerce_consumer_procurement_v1.types.AssignRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "usernames", + "type": "MutableSequence[str]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.commerce_consumer_procurement_v1.types.AssignResponse", + "shortName": "assign" + }, + "description": "Sample for Assign", + "file": "cloudcommerceconsumerprocurement_v1_generated_license_management_service_assign_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_Assign_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudcommerceconsumerprocurement_v1_generated_license_management_service_assign_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.commerce_consumer_procurement_v1.LicenseManagementServiceAsyncClient", + "shortName": "LicenseManagementServiceAsyncClient" + }, + "fullName": "google.cloud.commerce_consumer_procurement_v1.LicenseManagementServiceAsyncClient.enumerate_licensed_users", + "method": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.EnumerateLicensedUsers", + "service": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.LicenseManagementService", + "shortName": "LicenseManagementService" + }, + "shortName": "EnumerateLicensedUsers" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.commerce_consumer_procurement_v1.types.EnumerateLicensedUsersRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.commerce_consumer_procurement_v1.services.license_management_service.pagers.EnumerateLicensedUsersAsyncPager", + "shortName": "enumerate_licensed_users" + }, + "description": "Sample for EnumerateLicensedUsers", + "file": "cloudcommerceconsumerprocurement_v1_generated_license_management_service_enumerate_licensed_users_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_EnumerateLicensedUsers_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudcommerceconsumerprocurement_v1_generated_license_management_service_enumerate_licensed_users_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.commerce_consumer_procurement_v1.LicenseManagementServiceClient", + "shortName": "LicenseManagementServiceClient" + }, + "fullName": "google.cloud.commerce_consumer_procurement_v1.LicenseManagementServiceClient.enumerate_licensed_users", + "method": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.EnumerateLicensedUsers", + "service": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.LicenseManagementService", + "shortName": "LicenseManagementService" + }, + "shortName": "EnumerateLicensedUsers" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.commerce_consumer_procurement_v1.types.EnumerateLicensedUsersRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.commerce_consumer_procurement_v1.services.license_management_service.pagers.EnumerateLicensedUsersPager", + "shortName": "enumerate_licensed_users" + }, + "description": "Sample for EnumerateLicensedUsers", + "file": "cloudcommerceconsumerprocurement_v1_generated_license_management_service_enumerate_licensed_users_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_EnumerateLicensedUsers_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudcommerceconsumerprocurement_v1_generated_license_management_service_enumerate_licensed_users_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.commerce_consumer_procurement_v1.LicenseManagementServiceAsyncClient", + "shortName": "LicenseManagementServiceAsyncClient" + }, + "fullName": "google.cloud.commerce_consumer_procurement_v1.LicenseManagementServiceAsyncClient.get_license_pool", + "method": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.GetLicensePool", + "service": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.LicenseManagementService", + "shortName": "LicenseManagementService" + }, + "shortName": "GetLicensePool" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.commerce_consumer_procurement_v1.types.GetLicensePoolRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.commerce_consumer_procurement_v1.types.LicensePool", + "shortName": "get_license_pool" + }, + "description": "Sample for GetLicensePool", + "file": "cloudcommerceconsumerprocurement_v1_generated_license_management_service_get_license_pool_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_GetLicensePool_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudcommerceconsumerprocurement_v1_generated_license_management_service_get_license_pool_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.commerce_consumer_procurement_v1.LicenseManagementServiceClient", + "shortName": "LicenseManagementServiceClient" + }, + "fullName": "google.cloud.commerce_consumer_procurement_v1.LicenseManagementServiceClient.get_license_pool", + "method": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.GetLicensePool", + "service": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.LicenseManagementService", + "shortName": "LicenseManagementService" + }, + "shortName": "GetLicensePool" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.commerce_consumer_procurement_v1.types.GetLicensePoolRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.commerce_consumer_procurement_v1.types.LicensePool", + "shortName": "get_license_pool" + }, + "description": "Sample for GetLicensePool", + "file": "cloudcommerceconsumerprocurement_v1_generated_license_management_service_get_license_pool_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_GetLicensePool_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudcommerceconsumerprocurement_v1_generated_license_management_service_get_license_pool_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.commerce_consumer_procurement_v1.LicenseManagementServiceAsyncClient", + "shortName": "LicenseManagementServiceAsyncClient" + }, + "fullName": "google.cloud.commerce_consumer_procurement_v1.LicenseManagementServiceAsyncClient.unassign", + "method": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.Unassign", + "service": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.LicenseManagementService", + "shortName": "LicenseManagementService" + }, + "shortName": "Unassign" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.commerce_consumer_procurement_v1.types.UnassignRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "usernames", + "type": "MutableSequence[str]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.commerce_consumer_procurement_v1.types.UnassignResponse", + "shortName": "unassign" + }, + "description": "Sample for Unassign", + "file": "cloudcommerceconsumerprocurement_v1_generated_license_management_service_unassign_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_Unassign_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudcommerceconsumerprocurement_v1_generated_license_management_service_unassign_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.commerce_consumer_procurement_v1.LicenseManagementServiceClient", + "shortName": "LicenseManagementServiceClient" + }, + "fullName": "google.cloud.commerce_consumer_procurement_v1.LicenseManagementServiceClient.unassign", + "method": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.Unassign", + "service": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.LicenseManagementService", + "shortName": "LicenseManagementService" + }, + "shortName": "Unassign" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.commerce_consumer_procurement_v1.types.UnassignRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "usernames", + "type": "MutableSequence[str]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.commerce_consumer_procurement_v1.types.UnassignResponse", + "shortName": "unassign" + }, + "description": "Sample for Unassign", + "file": "cloudcommerceconsumerprocurement_v1_generated_license_management_service_unassign_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_Unassign_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudcommerceconsumerprocurement_v1_generated_license_management_service_unassign_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.commerce_consumer_procurement_v1.LicenseManagementServiceAsyncClient", + "shortName": "LicenseManagementServiceAsyncClient" + }, + "fullName": "google.cloud.commerce_consumer_procurement_v1.LicenseManagementServiceAsyncClient.update_license_pool", + "method": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.UpdateLicensePool", + "service": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.LicenseManagementService", + "shortName": "LicenseManagementService" + }, + "shortName": "UpdateLicensePool" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.commerce_consumer_procurement_v1.types.UpdateLicensePoolRequest" + }, + { + "name": "license_pool", + "type": "google.cloud.commerce_consumer_procurement_v1.types.LicensePool" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.commerce_consumer_procurement_v1.types.LicensePool", + "shortName": "update_license_pool" + }, + "description": "Sample for UpdateLicensePool", + "file": "cloudcommerceconsumerprocurement_v1_generated_license_management_service_update_license_pool_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_UpdateLicensePool_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudcommerceconsumerprocurement_v1_generated_license_management_service_update_license_pool_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.commerce_consumer_procurement_v1.LicenseManagementServiceClient", + "shortName": "LicenseManagementServiceClient" + }, + "fullName": "google.cloud.commerce_consumer_procurement_v1.LicenseManagementServiceClient.update_license_pool", + "method": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.UpdateLicensePool", + "service": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.LicenseManagementService", + "shortName": "LicenseManagementService" + }, + "shortName": "UpdateLicensePool" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.commerce_consumer_procurement_v1.types.UpdateLicensePoolRequest" + }, + { + "name": "license_pool", + "type": "google.cloud.commerce_consumer_procurement_v1.types.LicensePool" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.commerce_consumer_procurement_v1.types.LicensePool", + "shortName": "update_license_pool" + }, + "description": "Sample for UpdateLicensePool", + "file": "cloudcommerceconsumerprocurement_v1_generated_license_management_service_update_license_pool_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_UpdateLicensePool_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudcommerceconsumerprocurement_v1_generated_license_management_service_update_license_pool_sync.py" } ] } diff --git a/packages/google-cloud-commerce-consumer-procurement/scripts/fixup_commerce_consumer_procurement_v1_keywords.py b/packages/google-cloud-commerce-consumer-procurement/scripts/fixup_commerce_consumer_procurement_v1_keywords.py index 940f6a1151c1..dea0ce76511c 100644 --- a/packages/google-cloud-commerce-consumer-procurement/scripts/fixup_commerce_consumer_procurement_v1_keywords.py +++ b/packages/google-cloud-commerce-consumer-procurement/scripts/fixup_commerce_consumer_procurement_v1_keywords.py @@ -39,9 +39,16 @@ def partition( class commerce_consumer_procurementCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'assign': ('parent', 'usernames', ), + 'cancel_order': ('name', 'etag', 'cancellation_policy', ), + 'enumerate_licensed_users': ('parent', 'page_size', 'page_token', ), + 'get_license_pool': ('name', ), 'get_order': ('name', ), 'list_orders': ('parent', 'page_size', 'page_token', 'filter', ), + 'modify_order': ('name', 'modifications', 'display_name', 'etag', ), 'place_order': ('parent', 'display_name', 'line_item_info', 'request_id', ), + 'unassign': ('parent', 'usernames', ), + 'update_license_pool': ('license_pool', 'update_mask', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: diff --git a/packages/google-cloud-commerce-consumer-procurement/tests/unit/gapic/commerce_consumer_procurement_v1/test_consumer_procurement_service.py b/packages/google-cloud-commerce-consumer-procurement/tests/unit/gapic/commerce_consumer_procurement_v1/test_consumer_procurement_service.py index be7a8fe9c772..0bf79c077912 100644 --- a/packages/google-cloud-commerce-consumer-procurement/tests/unit/gapic/commerce_consumer_procurement_v1/test_consumer_procurement_service.py +++ b/packages/google-cloud-commerce-consumer-procurement/tests/unit/gapic/commerce_consumer_procurement_v1/test_consumer_procurement_service.py @@ -2436,6 +2436,582 @@ async def test_list_orders_async_pages(): assert page_.raw_page.next_page_token == token +@pytest.mark.parametrize( + "request_type", + [ + procurement_service.ModifyOrderRequest, + dict, + ], +) +def test_modify_order(request_type, transport: str = "grpc"): + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.modify_order), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.modify_order(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = procurement_service.ModifyOrderRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_modify_order_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.modify_order), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.modify_order() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == procurement_service.ModifyOrderRequest() + + +def test_modify_order_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = procurement_service.ModifyOrderRequest( + name="name_value", + display_name="display_name_value", + etag="etag_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.modify_order), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.modify_order(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == procurement_service.ModifyOrderRequest( + name="name_value", + display_name="display_name_value", + etag="etag_value", + ) + + +def test_modify_order_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.modify_order in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.modify_order] = mock_rpc + request = {} + client.modify_order(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.modify_order(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_modify_order_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConsumerProcurementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.modify_order), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.modify_order() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == procurement_service.ModifyOrderRequest() + + +@pytest.mark.asyncio +async def test_modify_order_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConsumerProcurementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.modify_order + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.modify_order + ] = mock_rpc + + request = {} + await client.modify_order(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.modify_order(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_modify_order_async( + transport: str = "grpc_asyncio", request_type=procurement_service.ModifyOrderRequest +): + client = ConsumerProcurementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.modify_order), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.modify_order(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = procurement_service.ModifyOrderRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_modify_order_async_from_dict(): + await test_modify_order_async(request_type=dict) + + +def test_modify_order_field_headers(): + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = procurement_service.ModifyOrderRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.modify_order), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.modify_order(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_modify_order_field_headers_async(): + client = ConsumerProcurementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = procurement_service.ModifyOrderRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.modify_order), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.modify_order(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + procurement_service.CancelOrderRequest, + dict, + ], +) +def test_cancel_order(request_type, transport: str = "grpc"): + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_order), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.cancel_order(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = procurement_service.CancelOrderRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_cancel_order_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_order), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.cancel_order() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == procurement_service.CancelOrderRequest() + + +def test_cancel_order_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = procurement_service.CancelOrderRequest( + name="name_value", + etag="etag_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_order), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.cancel_order(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == procurement_service.CancelOrderRequest( + name="name_value", + etag="etag_value", + ) + + +def test_cancel_order_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.cancel_order in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.cancel_order] = mock_rpc + request = {} + client.cancel_order(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.cancel_order(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_cancel_order_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConsumerProcurementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_order), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.cancel_order() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == procurement_service.CancelOrderRequest() + + +@pytest.mark.asyncio +async def test_cancel_order_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConsumerProcurementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.cancel_order + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.cancel_order + ] = mock_rpc + + request = {} + await client.cancel_order(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.cancel_order(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_cancel_order_async( + transport: str = "grpc_asyncio", request_type=procurement_service.CancelOrderRequest +): + client = ConsumerProcurementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_order), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.cancel_order(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = procurement_service.CancelOrderRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_cancel_order_async_from_dict(): + await test_cancel_order_async(request_type=dict) + + +def test_cancel_order_field_headers(): + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = procurement_service.CancelOrderRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_order), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.cancel_order(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_order_field_headers_async(): + client = ConsumerProcurementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = procurement_service.CancelOrderRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_order), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.cancel_order(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + @pytest.mark.parametrize( "request_type", [ @@ -2443,7 +3019,567 @@ async def test_list_orders_async_pages(): dict, ], ) -def test_place_order_rest(request_type): +def test_place_order_rest(request_type): + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "billingAccounts/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.place_order(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_place_order_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.place_order in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.place_order] = mock_rpc + + request = {} + client.place_order(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.place_order(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_place_order_rest_required_fields( + request_type=procurement_service.PlaceOrderRequest, +): + transport_class = transports.ConsumerProcurementServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["display_name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).place_order._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + jsonified_request["displayName"] = "display_name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).place_order._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "displayName" in jsonified_request + assert jsonified_request["displayName"] == "display_name_value" + + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.place_order(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_place_order_rest_unset_required_fields(): + transport = transports.ConsumerProcurementServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.place_order._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "displayName", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_place_order_rest_interceptors(null_interceptor): + transport = transports.ConsumerProcurementServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ConsumerProcurementServiceRestInterceptor(), + ) + client = ConsumerProcurementServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ConsumerProcurementServiceRestInterceptor, "post_place_order" + ) as post, mock.patch.object( + transports.ConsumerProcurementServiceRestInterceptor, "pre_place_order" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = procurement_service.PlaceOrderRequest.pb( + procurement_service.PlaceOrderRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = procurement_service.PlaceOrderRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.place_order( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_place_order_rest_bad_request( + transport: str = "rest", request_type=procurement_service.PlaceOrderRequest +): + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "billingAccounts/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.place_order(request) + + +def test_place_order_rest_error(): + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + procurement_service.GetOrderRequest, + dict, + ], +) +def test_get_order_rest(request_type): + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "billingAccounts/sample1/orders/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = order.Order( + name="name_value", + display_name="display_name_value", + etag="etag_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = order.Order.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_order(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, order.Order) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.etag == "etag_value" + + +def test_get_order_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_order in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_order] = mock_rpc + + request = {} + client.get_order(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_order(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_order_rest_required_fields( + request_type=procurement_service.GetOrderRequest, +): + transport_class = transports.ConsumerProcurementServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_order._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_order._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = order.Order() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = order.Order.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_order(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_order_rest_unset_required_fields(): + transport = transports.ConsumerProcurementServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_order._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_order_rest_interceptors(null_interceptor): + transport = transports.ConsumerProcurementServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ConsumerProcurementServiceRestInterceptor(), + ) + client = ConsumerProcurementServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ConsumerProcurementServiceRestInterceptor, "post_get_order" + ) as post, mock.patch.object( + transports.ConsumerProcurementServiceRestInterceptor, "pre_get_order" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = procurement_service.GetOrderRequest.pb( + procurement_service.GetOrderRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = order.Order.to_json(order.Order()) + + request = procurement_service.GetOrderRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = order.Order() + + client.get_order( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_order_rest_bad_request( + transport: str = "rest", request_type=procurement_service.GetOrderRequest +): + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "billingAccounts/sample1/orders/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_order(request) + + +def test_get_order_rest_flattened(): + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = order.Order() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "billingAccounts/sample1/orders/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = order.Order.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_order(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=billingAccounts/*/orders/*}" % client.transport._host, args[1] + ) + + +def test_get_order_rest_flattened_error(transport: str = "rest"): + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_order( + procurement_service.GetOrderRequest(), + name="name_value", + ) + + +def test_get_order_rest_error(): + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + procurement_service.ListOrdersRequest, + dict, + ], +) +def test_list_orders_rest(request_type): client = ConsumerProcurementServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -2456,22 +3592,27 @@ def test_place_order_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = procurement_service.ListOrdersResponse( + next_page_token="next_page_token_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = procurement_service.ListOrdersResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.place_order(request) + response = client.list_orders(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, pagers.ListOrdersPager) + assert response.next_page_token == "next_page_token_value" -def test_place_order_rest_use_cached_wrapped_rpc(): +def test_list_orders_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -2485,40 +3626,35 @@ def test_place_order_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.place_order in client._transport._wrapped_methods + assert client._transport.list_orders in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.place_order] = mock_rpc + client._transport._wrapped_methods[client._transport.list_orders] = mock_rpc request = {} - client.place_order(request) + client.list_orders(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.place_order(request) + client.list_orders(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_place_order_rest_required_fields( - request_type=procurement_service.PlaceOrderRequest, +def test_list_orders_rest_required_fields( + request_type=procurement_service.ListOrdersRequest, ): transport_class = transports.ConsumerProcurementServiceRestTransport request_init = {} request_init["parent"] = "" - request_init["display_name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -2529,24 +3665,29 @@ def test_place_order_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).place_order._get_unset_required_fields(jsonified_request) + ).list_orders._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["parent"] = "parent_value" - jsonified_request["displayName"] = "display_name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).place_order._get_unset_required_fields(jsonified_request) + ).list_orders._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" - assert "displayName" in jsonified_request - assert jsonified_request["displayName"] == "display_name_value" client = ConsumerProcurementServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2555,7 +3696,7 @@ def test_place_order_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = procurement_service.ListOrdersResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -2567,45 +3708,48 @@ def test_place_order_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = procurement_service.ListOrdersResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.place_order(request) + response = client.list_orders(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_place_order_rest_unset_required_fields(): +def test_list_orders_rest_unset_required_fields(): transport = transports.ConsumerProcurementServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.place_order._get_unset_required_fields({}) + unset_fields = transport.list_orders._get_unset_required_fields({}) assert set(unset_fields) == ( - set(()) - & set( + set( ( - "parent", - "displayName", + "filter", + "pageSize", + "pageToken", ) ) + & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_place_order_rest_interceptors(null_interceptor): +def test_list_orders_rest_interceptors(null_interceptor): transport = transports.ConsumerProcurementServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -2618,16 +3762,14 @@ def test_place_order_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.ConsumerProcurementServiceRestInterceptor, "post_place_order" + transports.ConsumerProcurementServiceRestInterceptor, "post_list_orders" ) as post, mock.patch.object( - transports.ConsumerProcurementServiceRestInterceptor, "pre_place_order" + transports.ConsumerProcurementServiceRestInterceptor, "pre_list_orders" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = procurement_service.PlaceOrderRequest.pb( - procurement_service.PlaceOrderRequest() + pb_message = procurement_service.ListOrdersRequest.pb( + procurement_service.ListOrdersRequest() ) transcode.return_value = { "method": "post", @@ -2639,19 +3781,19 @@ def test_place_order_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = procurement_service.ListOrdersResponse.to_json( + procurement_service.ListOrdersResponse() ) - request = procurement_service.PlaceOrderRequest() + request = procurement_service.ListOrdersRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = procurement_service.ListOrdersResponse() - client.place_order( + client.list_orders( request, metadata=[ ("key", "val"), @@ -2663,8 +3805,8 @@ def test_place_order_rest_interceptors(null_interceptor): post.assert_called_once() -def test_place_order_rest_bad_request( - transport: str = "rest", request_type=procurement_service.PlaceOrderRequest +def test_list_orders_rest_bad_request( + transport: str = "rest", request_type=procurement_service.ListOrdersRequest ): client = ConsumerProcurementServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2684,23 +3826,135 @@ def test_place_order_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.place_order(request) + client.list_orders(request) -def test_place_order_rest_error(): +def test_list_orders_rest_flattened(): client = ConsumerProcurementServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = procurement_service.ListOrdersResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "billingAccounts/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = procurement_service.ListOrdersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_orders(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=billingAccounts/*}/orders" % client.transport._host, args[1] + ) + + +def test_list_orders_rest_flattened_error(transport: str = "rest"): + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_orders( + procurement_service.ListOrdersRequest(), + parent="parent_value", + ) + + +def test_list_orders_rest_pager(transport: str = "rest"): + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + procurement_service.ListOrdersResponse( + orders=[ + order.Order(), + order.Order(), + order.Order(), + ], + next_page_token="abc", + ), + procurement_service.ListOrdersResponse( + orders=[], + next_page_token="def", + ), + procurement_service.ListOrdersResponse( + orders=[ + order.Order(), + ], + next_page_token="ghi", + ), + procurement_service.ListOrdersResponse( + orders=[ + order.Order(), + order.Order(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + procurement_service.ListOrdersResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "billingAccounts/sample1"} + + pager = client.list_orders(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, order.Order) for i in results) + + pages = list(client.list_orders(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - procurement_service.GetOrderRequest, + procurement_service.ModifyOrderRequest, dict, ], ) -def test_get_order_rest(request_type): +def test_modify_order_rest(request_type): client = ConsumerProcurementServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -2713,31 +3967,22 @@ def test_get_order_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = order.Order( - name="name_value", - display_name="display_name_value", - etag="etag_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = order.Order.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_order(request) + response = client.modify_order(request) # Establish that the response is the type that we expect. - assert isinstance(response, order.Order) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.etag == "etag_value" + assert response.operation.name == "operations/spam" -def test_get_order_rest_use_cached_wrapped_rpc(): +def test_modify_order_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -2751,30 +3996,34 @@ def test_get_order_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_order in client._transport._wrapped_methods + assert client._transport.modify_order in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_order] = mock_rpc + client._transport._wrapped_methods[client._transport.modify_order] = mock_rpc request = {} - client.get_order(request) + client.modify_order(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_order(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.modify_order(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_order_rest_required_fields( - request_type=procurement_service.GetOrderRequest, +def test_modify_order_rest_required_fields( + request_type=procurement_service.ModifyOrderRequest, ): transport_class = transports.ConsumerProcurementServiceRestTransport @@ -2790,7 +4039,7 @@ def test_get_order_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_order._get_unset_required_fields(jsonified_request) + ).modify_order._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -2799,7 +4048,7 @@ def test_get_order_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_order._get_unset_required_fields(jsonified_request) + ).modify_order._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -2813,7 +4062,7 @@ def test_get_order_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = order.Order() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -2825,39 +4074,37 @@ def test_get_order_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = order.Order.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_order(request) + response = client.modify_order(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_order_rest_unset_required_fields(): +def test_modify_order_rest_unset_required_fields(): transport = transports.ConsumerProcurementServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_order._get_unset_required_fields({}) + unset_fields = transport.modify_order._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_order_rest_interceptors(null_interceptor): +def test_modify_order_rest_interceptors(null_interceptor): transport = transports.ConsumerProcurementServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -2870,14 +4117,16 @@ def test_get_order_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ConsumerProcurementServiceRestInterceptor, "post_get_order" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ConsumerProcurementServiceRestInterceptor, "post_modify_order" ) as post, mock.patch.object( - transports.ConsumerProcurementServiceRestInterceptor, "pre_get_order" + transports.ConsumerProcurementServiceRestInterceptor, "pre_modify_order" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = procurement_service.GetOrderRequest.pb( - procurement_service.GetOrderRequest() + pb_message = procurement_service.ModifyOrderRequest.pb( + procurement_service.ModifyOrderRequest() ) transcode.return_value = { "method": "post", @@ -2889,108 +4138,55 @@ def test_get_order_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = order.Order.to_json(order.Order()) - - request = procurement_service.GetOrderRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = order.Order() - - client.get_order( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_order_rest_bad_request( - transport: str = "rest", request_type=procurement_service.GetOrderRequest -): - client = ConsumerProcurementServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"name": "billingAccounts/sample1/orders/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_order(request) - - -def test_get_order_rest_flattened(): - client = ConsumerProcurementServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = order.Order() - - # get arguments that satisfy an http rule for this method - sample_request = {"name": "billingAccounts/sample1/orders/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = order.Order.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - client.get_order(**mock_args) + request = procurement_service.ModifyOrderRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=billingAccounts/*/orders/*}" % client.transport._host, args[1] + client.modify_order( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) + pre.assert_called_once() + post.assert_called_once() -def test_get_order_rest_flattened_error(transport: str = "rest"): + +def test_modify_order_rest_bad_request( + transport: str = "rest", request_type=procurement_service.ModifyOrderRequest +): client = ConsumerProcurementServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_order( - procurement_service.GetOrderRequest(), - name="name_value", - ) + # send a request that will satisfy transcoding + request_init = {"name": "billingAccounts/sample1/orders/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.modify_order(request) -def test_get_order_rest_error(): +def test_modify_order_rest_error(): client = ConsumerProcurementServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -2999,44 +4195,39 @@ def test_get_order_rest_error(): @pytest.mark.parametrize( "request_type", [ - procurement_service.ListOrdersRequest, + procurement_service.CancelOrderRequest, dict, ], ) -def test_list_orders_rest(request_type): +def test_cancel_order_rest(request_type): client = ConsumerProcurementServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "billingAccounts/sample1"} + request_init = {"name": "billingAccounts/sample1/orders/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = procurement_service.ListOrdersResponse( - next_page_token="next_page_token_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = procurement_service.ListOrdersResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_orders(request) + response = client.cancel_order(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListOrdersPager) - assert response.next_page_token == "next_page_token_value" + assert response.operation.name == "operations/spam" -def test_list_orders_rest_use_cached_wrapped_rpc(): +def test_cancel_order_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -3050,35 +4241,39 @@ def test_list_orders_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_orders in client._transport._wrapped_methods + assert client._transport.cancel_order in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_orders] = mock_rpc + client._transport._wrapped_methods[client._transport.cancel_order] = mock_rpc request = {} - client.list_orders(request) + client.cancel_order(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_orders(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.cancel_order(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_orders_rest_required_fields( - request_type=procurement_service.ListOrdersRequest, +def test_cancel_order_rest_required_fields( + request_type=procurement_service.CancelOrderRequest, ): transport_class = transports.ConsumerProcurementServiceRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -3089,29 +4284,21 @@ def test_list_orders_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_orders._get_unset_required_fields(jsonified_request) + ).cancel_order._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_orders._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "page_size", - "page_token", - ) - ) + ).cancel_order._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = ConsumerProcurementServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3120,7 +4307,7 @@ def test_list_orders_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = procurement_service.ListOrdersResponse() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -3132,48 +4319,37 @@ def test_list_orders_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = procurement_service.ListOrdersResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_orders(request) + response = client.cancel_order(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_orders_rest_unset_required_fields(): +def test_cancel_order_rest_unset_required_fields(): transport = transports.ConsumerProcurementServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_orders._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) + unset_fields = transport.cancel_order._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_orders_rest_interceptors(null_interceptor): +def test_cancel_order_rest_interceptors(null_interceptor): transport = transports.ConsumerProcurementServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -3186,14 +4362,16 @@ def test_list_orders_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ConsumerProcurementServiceRestInterceptor, "post_list_orders" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ConsumerProcurementServiceRestInterceptor, "post_cancel_order" ) as post, mock.patch.object( - transports.ConsumerProcurementServiceRestInterceptor, "pre_list_orders" + transports.ConsumerProcurementServiceRestInterceptor, "pre_cancel_order" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = procurement_service.ListOrdersRequest.pb( - procurement_service.ListOrdersRequest() + pb_message = procurement_service.CancelOrderRequest.pb( + procurement_service.CancelOrderRequest() ) transcode.return_value = { "method": "post", @@ -3205,19 +4383,19 @@ def test_list_orders_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = procurement_service.ListOrdersResponse.to_json( - procurement_service.ListOrdersResponse() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = procurement_service.ListOrdersRequest() + request = procurement_service.CancelOrderRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = procurement_service.ListOrdersResponse() + post.return_value = operations_pb2.Operation() - client.list_orders( + client.cancel_order( request, metadata=[ ("key", "val"), @@ -3229,8 +4407,8 @@ def test_list_orders_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_orders_rest_bad_request( - transport: str = "rest", request_type=procurement_service.ListOrdersRequest +def test_cancel_order_rest_bad_request( + transport: str = "rest", request_type=procurement_service.CancelOrderRequest ): client = ConsumerProcurementServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3238,7 +4416,7 @@ def test_list_orders_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "billingAccounts/sample1"} + request_init = {"name": "billingAccounts/sample1/orders/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3250,126 +4428,14 @@ def test_list_orders_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_orders(request) - - -def test_list_orders_rest_flattened(): - client = ConsumerProcurementServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = procurement_service.ListOrdersResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "billingAccounts/sample1"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = procurement_service.ListOrdersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.list_orders(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=billingAccounts/*}/orders" % client.transport._host, args[1] - ) - - -def test_list_orders_rest_flattened_error(transport: str = "rest"): - client = ConsumerProcurementServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_orders( - procurement_service.ListOrdersRequest(), - parent="parent_value", - ) + client.cancel_order(request) -def test_list_orders_rest_pager(transport: str = "rest"): +def test_cancel_order_rest_error(): client = ConsumerProcurementServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - procurement_service.ListOrdersResponse( - orders=[ - order.Order(), - order.Order(), - order.Order(), - ], - next_page_token="abc", - ), - procurement_service.ListOrdersResponse( - orders=[], - next_page_token="def", - ), - procurement_service.ListOrdersResponse( - orders=[ - order.Order(), - ], - next_page_token="ghi", - ), - procurement_service.ListOrdersResponse( - orders=[ - order.Order(), - order.Order(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - procurement_service.ListOrdersResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "billingAccounts/sample1"} - - pager = client.list_orders(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, order.Order) for i in results) - - pages = list(client.list_orders(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. @@ -3513,6 +4579,8 @@ def test_consumer_procurement_service_base_transport(): "place_order", "get_order", "list_orders", + "modify_order", + "cancel_order", "get_operation", ) for method in methods: @@ -3809,6 +4877,12 @@ def test_consumer_procurement_service_client_transport_session_collision( session1 = client1.transport.list_orders._session session2 = client2.transport.list_orders._session assert session1 != session2 + session1 = client1.transport.modify_order._session + session2 = client2.transport.modify_order._session + assert session1 != session2 + session1 = client1.transport.cancel_order._session + session2 = client2.transport.cancel_order._session + assert session1 != session2 def test_consumer_procurement_service_grpc_transport_channel(): diff --git a/packages/google-cloud-commerce-consumer-procurement/tests/unit/gapic/commerce_consumer_procurement_v1/test_license_management_service.py b/packages/google-cloud-commerce-consumer-procurement/tests/unit/gapic/commerce_consumer_procurement_v1/test_license_management_service.py new file mode 100644 index 000000000000..aab21d579a3a --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/tests/unit/gapic/commerce_consumer_procurement_v1/test_license_management_service.py @@ -0,0 +1,6057 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.commerce_consumer_procurement_v1.services.license_management_service import ( + LicenseManagementServiceAsyncClient, + LicenseManagementServiceClient, + pagers, + transports, +) +from google.cloud.commerce_consumer_procurement_v1.types import ( + license_management_service, +) + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert LicenseManagementServiceClient._get_default_mtls_endpoint(None) is None + assert ( + LicenseManagementServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + LicenseManagementServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + LicenseManagementServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + LicenseManagementServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + LicenseManagementServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +def test__read_environment_variables(): + assert LicenseManagementServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert LicenseManagementServiceClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert LicenseManagementServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + LicenseManagementServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert LicenseManagementServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert LicenseManagementServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert LicenseManagementServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + LicenseManagementServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert LicenseManagementServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert LicenseManagementServiceClient._get_client_cert_source(None, False) is None + assert ( + LicenseManagementServiceClient._get_client_cert_source( + mock_provided_cert_source, False + ) + is None + ) + assert ( + LicenseManagementServiceClient._get_client_cert_source( + mock_provided_cert_source, True + ) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + LicenseManagementServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + LicenseManagementServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + LicenseManagementServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(LicenseManagementServiceClient), +) +@mock.patch.object( + LicenseManagementServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(LicenseManagementServiceAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = LicenseManagementServiceClient._DEFAULT_UNIVERSE + default_endpoint = LicenseManagementServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = LicenseManagementServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + LicenseManagementServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + LicenseManagementServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == LicenseManagementServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + LicenseManagementServiceClient._get_api_endpoint( + None, None, default_universe, "auto" + ) + == default_endpoint + ) + assert ( + LicenseManagementServiceClient._get_api_endpoint( + None, None, default_universe, "always" + ) + == LicenseManagementServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + LicenseManagementServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == LicenseManagementServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + LicenseManagementServiceClient._get_api_endpoint( + None, None, mock_universe, "never" + ) + == mock_endpoint + ) + assert ( + LicenseManagementServiceClient._get_api_endpoint( + None, None, default_universe, "never" + ) + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + LicenseManagementServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + LicenseManagementServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + LicenseManagementServiceClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + LicenseManagementServiceClient._get_universe_domain(None, None) + == LicenseManagementServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + LicenseManagementServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + LicenseManagementServiceClient, + transports.LicenseManagementServiceGrpcTransport, + "grpc", + ), + ( + LicenseManagementServiceClient, + transports.LicenseManagementServiceRestTransport, + "rest", + ), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (LicenseManagementServiceClient, "grpc"), + (LicenseManagementServiceAsyncClient, "grpc_asyncio"), + (LicenseManagementServiceClient, "rest"), + ], +) +def test_license_management_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "cloudcommerceconsumerprocurement.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://cloudcommerceconsumerprocurement.googleapis.com/" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.LicenseManagementServiceGrpcTransport, "grpc"), + (transports.LicenseManagementServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.LicenseManagementServiceRestTransport, "rest"), + ], +) +def test_license_management_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (LicenseManagementServiceClient, "grpc"), + (LicenseManagementServiceAsyncClient, "grpc_asyncio"), + (LicenseManagementServiceClient, "rest"), + ], +) +def test_license_management_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "cloudcommerceconsumerprocurement.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://cloudcommerceconsumerprocurement.googleapis.com/" + ) + + +def test_license_management_service_client_get_transport_class(): + transport = LicenseManagementServiceClient.get_transport_class() + available_transports = [ + transports.LicenseManagementServiceGrpcTransport, + transports.LicenseManagementServiceRestTransport, + ] + assert transport in available_transports + + transport = LicenseManagementServiceClient.get_transport_class("grpc") + assert transport == transports.LicenseManagementServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + LicenseManagementServiceClient, + transports.LicenseManagementServiceGrpcTransport, + "grpc", + ), + ( + LicenseManagementServiceAsyncClient, + transports.LicenseManagementServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + LicenseManagementServiceClient, + transports.LicenseManagementServiceRestTransport, + "rest", + ), + ], +) +@mock.patch.object( + LicenseManagementServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(LicenseManagementServiceClient), +) +@mock.patch.object( + LicenseManagementServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(LicenseManagementServiceAsyncClient), +) +def test_license_management_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object( + LicenseManagementServiceClient, "get_transport_class" + ) as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object( + LicenseManagementServiceClient, "get_transport_class" + ) as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com/" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com/", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + LicenseManagementServiceClient, + transports.LicenseManagementServiceGrpcTransport, + "grpc", + "true", + ), + ( + LicenseManagementServiceAsyncClient, + transports.LicenseManagementServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + LicenseManagementServiceClient, + transports.LicenseManagementServiceGrpcTransport, + "grpc", + "false", + ), + ( + LicenseManagementServiceAsyncClient, + transports.LicenseManagementServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + LicenseManagementServiceClient, + transports.LicenseManagementServiceRestTransport, + "rest", + "true", + ), + ( + LicenseManagementServiceClient, + transports.LicenseManagementServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + LicenseManagementServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(LicenseManagementServiceClient), +) +@mock.patch.object( + LicenseManagementServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(LicenseManagementServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_license_management_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", + [LicenseManagementServiceClient, LicenseManagementServiceAsyncClient], +) +@mock.patch.object( + LicenseManagementServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(LicenseManagementServiceClient), +) +@mock.patch.object( + LicenseManagementServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(LicenseManagementServiceAsyncClient), +) +def test_license_management_service_client_get_mtls_endpoint_and_cert_source( + client_class, +): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", + [LicenseManagementServiceClient, LicenseManagementServiceAsyncClient], +) +@mock.patch.object( + LicenseManagementServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(LicenseManagementServiceClient), +) +@mock.patch.object( + LicenseManagementServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(LicenseManagementServiceAsyncClient), +) +def test_license_management_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = LicenseManagementServiceClient._DEFAULT_UNIVERSE + default_endpoint = LicenseManagementServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = LicenseManagementServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + LicenseManagementServiceClient, + transports.LicenseManagementServiceGrpcTransport, + "grpc", + ), + ( + LicenseManagementServiceAsyncClient, + transports.LicenseManagementServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + LicenseManagementServiceClient, + transports.LicenseManagementServiceRestTransport, + "rest", + ), + ], +) +def test_license_management_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + LicenseManagementServiceClient, + transports.LicenseManagementServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + LicenseManagementServiceAsyncClient, + transports.LicenseManagementServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + LicenseManagementServiceClient, + transports.LicenseManagementServiceRestTransport, + "rest", + None, + ), + ], +) +def test_license_management_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_license_management_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.commerce_consumer_procurement_v1.services.license_management_service.transports.LicenseManagementServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = LicenseManagementServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + LicenseManagementServiceClient, + transports.LicenseManagementServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + LicenseManagementServiceAsyncClient, + transports.LicenseManagementServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_license_management_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "cloudcommerceconsumerprocurement.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="cloudcommerceconsumerprocurement.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + license_management_service.GetLicensePoolRequest, + dict, + ], +) +def test_get_license_pool(request_type, transport: str = "grpc"): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_license_pool), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = license_management_service.LicensePool( + name="name_value", + available_license_count=2411, + total_license_count=2030, + ) + response = client.get_license_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = license_management_service.GetLicensePoolRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, license_management_service.LicensePool) + assert response.name == "name_value" + assert response.available_license_count == 2411 + assert response.total_license_count == 2030 + + +def test_get_license_pool_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_license_pool), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_license_pool() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == license_management_service.GetLicensePoolRequest() + + +def test_get_license_pool_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = license_management_service.GetLicensePoolRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_license_pool), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_license_pool(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == license_management_service.GetLicensePoolRequest( + name="name_value", + ) + + +def test_get_license_pool_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_license_pool in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_license_pool + ] = mock_rpc + request = {} + client.get_license_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_license_pool(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_license_pool_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_license_pool), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + license_management_service.LicensePool( + name="name_value", + available_license_count=2411, + total_license_count=2030, + ) + ) + response = await client.get_license_pool() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == license_management_service.GetLicensePoolRequest() + + +@pytest.mark.asyncio +async def test_get_license_pool_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_license_pool + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_license_pool + ] = mock_rpc + + request = {} + await client.get_license_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_license_pool(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_license_pool_async( + transport: str = "grpc_asyncio", + request_type=license_management_service.GetLicensePoolRequest, +): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_license_pool), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + license_management_service.LicensePool( + name="name_value", + available_license_count=2411, + total_license_count=2030, + ) + ) + response = await client.get_license_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = license_management_service.GetLicensePoolRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, license_management_service.LicensePool) + assert response.name == "name_value" + assert response.available_license_count == 2411 + assert response.total_license_count == 2030 + + +@pytest.mark.asyncio +async def test_get_license_pool_async_from_dict(): + await test_get_license_pool_async(request_type=dict) + + +def test_get_license_pool_field_headers(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = license_management_service.GetLicensePoolRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_license_pool), "__call__") as call: + call.return_value = license_management_service.LicensePool() + client.get_license_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_license_pool_field_headers_async(): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = license_management_service.GetLicensePoolRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_license_pool), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + license_management_service.LicensePool() + ) + await client.get_license_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_license_pool_flattened(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_license_pool), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = license_management_service.LicensePool() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_license_pool( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_license_pool_flattened_error(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_license_pool( + license_management_service.GetLicensePoolRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_license_pool_flattened_async(): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_license_pool), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = license_management_service.LicensePool() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + license_management_service.LicensePool() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_license_pool( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_license_pool_flattened_error_async(): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_license_pool( + license_management_service.GetLicensePoolRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + license_management_service.UpdateLicensePoolRequest, + dict, + ], +) +def test_update_license_pool(request_type, transport: str = "grpc"): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_license_pool), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = license_management_service.LicensePool( + name="name_value", + available_license_count=2411, + total_license_count=2030, + ) + response = client.update_license_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = license_management_service.UpdateLicensePoolRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, license_management_service.LicensePool) + assert response.name == "name_value" + assert response.available_license_count == 2411 + assert response.total_license_count == 2030 + + +def test_update_license_pool_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_license_pool), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_license_pool() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == license_management_service.UpdateLicensePoolRequest() + + +def test_update_license_pool_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = license_management_service.UpdateLicensePoolRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_license_pool), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_license_pool(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == license_management_service.UpdateLicensePoolRequest() + + +def test_update_license_pool_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_license_pool in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_license_pool + ] = mock_rpc + request = {} + client.update_license_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_license_pool(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_license_pool_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_license_pool), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + license_management_service.LicensePool( + name="name_value", + available_license_count=2411, + total_license_count=2030, + ) + ) + response = await client.update_license_pool() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == license_management_service.UpdateLicensePoolRequest() + + +@pytest.mark.asyncio +async def test_update_license_pool_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_license_pool + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_license_pool + ] = mock_rpc + + request = {} + await client.update_license_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.update_license_pool(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_license_pool_async( + transport: str = "grpc_asyncio", + request_type=license_management_service.UpdateLicensePoolRequest, +): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_license_pool), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + license_management_service.LicensePool( + name="name_value", + available_license_count=2411, + total_license_count=2030, + ) + ) + response = await client.update_license_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = license_management_service.UpdateLicensePoolRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, license_management_service.LicensePool) + assert response.name == "name_value" + assert response.available_license_count == 2411 + assert response.total_license_count == 2030 + + +@pytest.mark.asyncio +async def test_update_license_pool_async_from_dict(): + await test_update_license_pool_async(request_type=dict) + + +def test_update_license_pool_field_headers(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = license_management_service.UpdateLicensePoolRequest() + + request.license_pool.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_license_pool), "__call__" + ) as call: + call.return_value = license_management_service.LicensePool() + client.update_license_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "license_pool.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_license_pool_field_headers_async(): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = license_management_service.UpdateLicensePoolRequest() + + request.license_pool.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_license_pool), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + license_management_service.LicensePool() + ) + await client.update_license_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "license_pool.name=name_value", + ) in kw["metadata"] + + +def test_update_license_pool_flattened(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_license_pool), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = license_management_service.LicensePool() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_license_pool( + license_pool=license_management_service.LicensePool(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].license_pool + mock_val = license_management_service.LicensePool(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_license_pool_flattened_error(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_license_pool( + license_management_service.UpdateLicensePoolRequest(), + license_pool=license_management_service.LicensePool(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_license_pool_flattened_async(): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_license_pool), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = license_management_service.LicensePool() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + license_management_service.LicensePool() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_license_pool( + license_pool=license_management_service.LicensePool(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].license_pool + mock_val = license_management_service.LicensePool(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_license_pool_flattened_error_async(): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_license_pool( + license_management_service.UpdateLicensePoolRequest(), + license_pool=license_management_service.LicensePool(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + license_management_service.AssignRequest, + dict, + ], +) +def test_assign(request_type, transport: str = "grpc"): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.assign), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = license_management_service.AssignResponse() + response = client.assign(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = license_management_service.AssignRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, license_management_service.AssignResponse) + + +def test_assign_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.assign), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.assign() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == license_management_service.AssignRequest() + + +def test_assign_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = license_management_service.AssignRequest( + parent="parent_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.assign), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.assign(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == license_management_service.AssignRequest( + parent="parent_value", + ) + + +def test_assign_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.assign in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.assign] = mock_rpc + request = {} + client.assign(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.assign(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_assign_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.assign), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + license_management_service.AssignResponse() + ) + response = await client.assign() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == license_management_service.AssignRequest() + + +@pytest.mark.asyncio +async def test_assign_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.assign + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.assign + ] = mock_rpc + + request = {} + await client.assign(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.assign(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_assign_async( + transport: str = "grpc_asyncio", + request_type=license_management_service.AssignRequest, +): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.assign), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + license_management_service.AssignResponse() + ) + response = await client.assign(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = license_management_service.AssignRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, license_management_service.AssignResponse) + + +@pytest.mark.asyncio +async def test_assign_async_from_dict(): + await test_assign_async(request_type=dict) + + +def test_assign_field_headers(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = license_management_service.AssignRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.assign), "__call__") as call: + call.return_value = license_management_service.AssignResponse() + client.assign(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_assign_field_headers_async(): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = license_management_service.AssignRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.assign), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + license_management_service.AssignResponse() + ) + await client.assign(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_assign_flattened(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.assign), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = license_management_service.AssignResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.assign( + parent="parent_value", + usernames=["usernames_value"], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].usernames + mock_val = ["usernames_value"] + assert arg == mock_val + + +def test_assign_flattened_error(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.assign( + license_management_service.AssignRequest(), + parent="parent_value", + usernames=["usernames_value"], + ) + + +@pytest.mark.asyncio +async def test_assign_flattened_async(): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.assign), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = license_management_service.AssignResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + license_management_service.AssignResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.assign( + parent="parent_value", + usernames=["usernames_value"], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].usernames + mock_val = ["usernames_value"] + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_assign_flattened_error_async(): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.assign( + license_management_service.AssignRequest(), + parent="parent_value", + usernames=["usernames_value"], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + license_management_service.UnassignRequest, + dict, + ], +) +def test_unassign(request_type, transport: str = "grpc"): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.unassign), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = license_management_service.UnassignResponse() + response = client.unassign(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = license_management_service.UnassignRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, license_management_service.UnassignResponse) + + +def test_unassign_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.unassign), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.unassign() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == license_management_service.UnassignRequest() + + +def test_unassign_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = license_management_service.UnassignRequest( + parent="parent_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.unassign), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.unassign(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == license_management_service.UnassignRequest( + parent="parent_value", + ) + + +def test_unassign_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.unassign in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.unassign] = mock_rpc + request = {} + client.unassign(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.unassign(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_unassign_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.unassign), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + license_management_service.UnassignResponse() + ) + response = await client.unassign() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == license_management_service.UnassignRequest() + + +@pytest.mark.asyncio +async def test_unassign_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.unassign + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.unassign + ] = mock_rpc + + request = {} + await client.unassign(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.unassign(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_unassign_async( + transport: str = "grpc_asyncio", + request_type=license_management_service.UnassignRequest, +): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.unassign), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + license_management_service.UnassignResponse() + ) + response = await client.unassign(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = license_management_service.UnassignRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, license_management_service.UnassignResponse) + + +@pytest.mark.asyncio +async def test_unassign_async_from_dict(): + await test_unassign_async(request_type=dict) + + +def test_unassign_field_headers(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = license_management_service.UnassignRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.unassign), "__call__") as call: + call.return_value = license_management_service.UnassignResponse() + client.unassign(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_unassign_field_headers_async(): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = license_management_service.UnassignRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.unassign), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + license_management_service.UnassignResponse() + ) + await client.unassign(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_unassign_flattened(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.unassign), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = license_management_service.UnassignResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.unassign( + parent="parent_value", + usernames=["usernames_value"], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].usernames + mock_val = ["usernames_value"] + assert arg == mock_val + + +def test_unassign_flattened_error(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.unassign( + license_management_service.UnassignRequest(), + parent="parent_value", + usernames=["usernames_value"], + ) + + +@pytest.mark.asyncio +async def test_unassign_flattened_async(): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.unassign), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = license_management_service.UnassignResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + license_management_service.UnassignResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.unassign( + parent="parent_value", + usernames=["usernames_value"], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].usernames + mock_val = ["usernames_value"] + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_unassign_flattened_error_async(): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.unassign( + license_management_service.UnassignRequest(), + parent="parent_value", + usernames=["usernames_value"], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + license_management_service.EnumerateLicensedUsersRequest, + dict, + ], +) +def test_enumerate_licensed_users(request_type, transport: str = "grpc"): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.enumerate_licensed_users), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = license_management_service.EnumerateLicensedUsersResponse( + next_page_token="next_page_token_value", + ) + response = client.enumerate_licensed_users(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = license_management_service.EnumerateLicensedUsersRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.EnumerateLicensedUsersPager) + assert response.next_page_token == "next_page_token_value" + + +def test_enumerate_licensed_users_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.enumerate_licensed_users), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.enumerate_licensed_users() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == license_management_service.EnumerateLicensedUsersRequest() + + +def test_enumerate_licensed_users_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = license_management_service.EnumerateLicensedUsersRequest( + parent="parent_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.enumerate_licensed_users), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.enumerate_licensed_users(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == license_management_service.EnumerateLicensedUsersRequest( + parent="parent_value", + page_token="page_token_value", + ) + + +def test_enumerate_licensed_users_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.enumerate_licensed_users + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.enumerate_licensed_users + ] = mock_rpc + request = {} + client.enumerate_licensed_users(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.enumerate_licensed_users(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_enumerate_licensed_users_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.enumerate_licensed_users), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + license_management_service.EnumerateLicensedUsersResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.enumerate_licensed_users() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == license_management_service.EnumerateLicensedUsersRequest() + + +@pytest.mark.asyncio +async def test_enumerate_licensed_users_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.enumerate_licensed_users + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.enumerate_licensed_users + ] = mock_rpc + + request = {} + await client.enumerate_licensed_users(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.enumerate_licensed_users(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_enumerate_licensed_users_async( + transport: str = "grpc_asyncio", + request_type=license_management_service.EnumerateLicensedUsersRequest, +): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.enumerate_licensed_users), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + license_management_service.EnumerateLicensedUsersResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.enumerate_licensed_users(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = license_management_service.EnumerateLicensedUsersRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.EnumerateLicensedUsersAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_enumerate_licensed_users_async_from_dict(): + await test_enumerate_licensed_users_async(request_type=dict) + + +def test_enumerate_licensed_users_field_headers(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = license_management_service.EnumerateLicensedUsersRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.enumerate_licensed_users), "__call__" + ) as call: + call.return_value = license_management_service.EnumerateLicensedUsersResponse() + client.enumerate_licensed_users(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_enumerate_licensed_users_field_headers_async(): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = license_management_service.EnumerateLicensedUsersRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.enumerate_licensed_users), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + license_management_service.EnumerateLicensedUsersResponse() + ) + await client.enumerate_licensed_users(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_enumerate_licensed_users_flattened(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.enumerate_licensed_users), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = license_management_service.EnumerateLicensedUsersResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.enumerate_licensed_users( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_enumerate_licensed_users_flattened_error(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.enumerate_licensed_users( + license_management_service.EnumerateLicensedUsersRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_enumerate_licensed_users_flattened_async(): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.enumerate_licensed_users), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = license_management_service.EnumerateLicensedUsersResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + license_management_service.EnumerateLicensedUsersResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.enumerate_licensed_users( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_enumerate_licensed_users_flattened_error_async(): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.enumerate_licensed_users( + license_management_service.EnumerateLicensedUsersRequest(), + parent="parent_value", + ) + + +def test_enumerate_licensed_users_pager(transport_name: str = "grpc"): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.enumerate_licensed_users), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + license_management_service.EnumerateLicensedUsersResponse( + licensed_users=[ + license_management_service.LicensedUser(), + license_management_service.LicensedUser(), + license_management_service.LicensedUser(), + ], + next_page_token="abc", + ), + license_management_service.EnumerateLicensedUsersResponse( + licensed_users=[], + next_page_token="def", + ), + license_management_service.EnumerateLicensedUsersResponse( + licensed_users=[ + license_management_service.LicensedUser(), + ], + next_page_token="ghi", + ), + license_management_service.EnumerateLicensedUsersResponse( + licensed_users=[ + license_management_service.LicensedUser(), + license_management_service.LicensedUser(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.enumerate_licensed_users( + request={}, retry=retry, timeout=timeout + ) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, license_management_service.LicensedUser) for i in results + ) + + +def test_enumerate_licensed_users_pages(transport_name: str = "grpc"): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.enumerate_licensed_users), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + license_management_service.EnumerateLicensedUsersResponse( + licensed_users=[ + license_management_service.LicensedUser(), + license_management_service.LicensedUser(), + license_management_service.LicensedUser(), + ], + next_page_token="abc", + ), + license_management_service.EnumerateLicensedUsersResponse( + licensed_users=[], + next_page_token="def", + ), + license_management_service.EnumerateLicensedUsersResponse( + licensed_users=[ + license_management_service.LicensedUser(), + ], + next_page_token="ghi", + ), + license_management_service.EnumerateLicensedUsersResponse( + licensed_users=[ + license_management_service.LicensedUser(), + license_management_service.LicensedUser(), + ], + ), + RuntimeError, + ) + pages = list(client.enumerate_licensed_users(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_enumerate_licensed_users_async_pager(): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.enumerate_licensed_users), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + license_management_service.EnumerateLicensedUsersResponse( + licensed_users=[ + license_management_service.LicensedUser(), + license_management_service.LicensedUser(), + license_management_service.LicensedUser(), + ], + next_page_token="abc", + ), + license_management_service.EnumerateLicensedUsersResponse( + licensed_users=[], + next_page_token="def", + ), + license_management_service.EnumerateLicensedUsersResponse( + licensed_users=[ + license_management_service.LicensedUser(), + ], + next_page_token="ghi", + ), + license_management_service.EnumerateLicensedUsersResponse( + licensed_users=[ + license_management_service.LicensedUser(), + license_management_service.LicensedUser(), + ], + ), + RuntimeError, + ) + async_pager = await client.enumerate_licensed_users( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, license_management_service.LicensedUser) for i in responses + ) + + +@pytest.mark.asyncio +async def test_enumerate_licensed_users_async_pages(): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.enumerate_licensed_users), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + license_management_service.EnumerateLicensedUsersResponse( + licensed_users=[ + license_management_service.LicensedUser(), + license_management_service.LicensedUser(), + license_management_service.LicensedUser(), + ], + next_page_token="abc", + ), + license_management_service.EnumerateLicensedUsersResponse( + licensed_users=[], + next_page_token="def", + ), + license_management_service.EnumerateLicensedUsersResponse( + licensed_users=[ + license_management_service.LicensedUser(), + ], + next_page_token="ghi", + ), + license_management_service.EnumerateLicensedUsersResponse( + licensed_users=[ + license_management_service.LicensedUser(), + license_management_service.LicensedUser(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.enumerate_licensed_users(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + license_management_service.GetLicensePoolRequest, + dict, + ], +) +def test_get_license_pool_rest(request_type): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "billingAccounts/sample1/orders/sample2/licensePool"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = license_management_service.LicensePool( + name="name_value", + available_license_count=2411, + total_license_count=2030, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = license_management_service.LicensePool.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_license_pool(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, license_management_service.LicensePool) + assert response.name == "name_value" + assert response.available_license_count == 2411 + assert response.total_license_count == 2030 + + +def test_get_license_pool_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_license_pool in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_license_pool + ] = mock_rpc + + request = {} + client.get_license_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_license_pool(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_license_pool_rest_required_fields( + request_type=license_management_service.GetLicensePoolRequest, +): + transport_class = transports.LicenseManagementServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_license_pool._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_license_pool._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = license_management_service.LicensePool() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = license_management_service.LicensePool.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_license_pool(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_license_pool_rest_unset_required_fields(): + transport = transports.LicenseManagementServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_license_pool._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_license_pool_rest_interceptors(null_interceptor): + transport = transports.LicenseManagementServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.LicenseManagementServiceRestInterceptor(), + ) + client = LicenseManagementServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.LicenseManagementServiceRestInterceptor, "post_get_license_pool" + ) as post, mock.patch.object( + transports.LicenseManagementServiceRestInterceptor, "pre_get_license_pool" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = license_management_service.GetLicensePoolRequest.pb( + license_management_service.GetLicensePoolRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = license_management_service.LicensePool.to_json( + license_management_service.LicensePool() + ) + + request = license_management_service.GetLicensePoolRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = license_management_service.LicensePool() + + client.get_license_pool( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_license_pool_rest_bad_request( + transport: str = "rest", + request_type=license_management_service.GetLicensePoolRequest, +): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "billingAccounts/sample1/orders/sample2/licensePool"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_license_pool(request) + + +def test_get_license_pool_rest_flattened(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = license_management_service.LicensePool() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "billingAccounts/sample1/orders/sample2/licensePool"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = license_management_service.LicensePool.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_license_pool(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=billingAccounts/*/orders/*/licensePool}" + % client.transport._host, + args[1], + ) + + +def test_get_license_pool_rest_flattened_error(transport: str = "rest"): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_license_pool( + license_management_service.GetLicensePoolRequest(), + name="name_value", + ) + + +def test_get_license_pool_rest_error(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + license_management_service.UpdateLicensePoolRequest, + dict, + ], +) +def test_update_license_pool_rest(request_type): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "license_pool": { + "name": "billingAccounts/sample1/orders/sample2/licensePool/sample3" + } + } + request_init["license_pool"] = { + "name": "billingAccounts/sample1/orders/sample2/licensePool/sample3", + "license_assignment_protocol": { + "manual_assignment_type": {}, + "auto_assignment_type": { + "inactive_license_ttl": {"seconds": 751, "nanos": 543} + }, + }, + "available_license_count": 2411, + "total_license_count": 2030, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = license_management_service.UpdateLicensePoolRequest.meta.fields[ + "license_pool" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["license_pool"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["license_pool"][field])): + del request_init["license_pool"][field][i][subfield] + else: + del request_init["license_pool"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = license_management_service.LicensePool( + name="name_value", + available_license_count=2411, + total_license_count=2030, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = license_management_service.LicensePool.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_license_pool(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, license_management_service.LicensePool) + assert response.name == "name_value" + assert response.available_license_count == 2411 + assert response.total_license_count == 2030 + + +def test_update_license_pool_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_license_pool in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_license_pool + ] = mock_rpc + + request = {} + client.update_license_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_license_pool(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_license_pool_rest_required_fields( + request_type=license_management_service.UpdateLicensePoolRequest, +): + transport_class = transports.LicenseManagementServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_license_pool._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_license_pool._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = license_management_service.LicensePool() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = license_management_service.LicensePool.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_license_pool(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_license_pool_rest_unset_required_fields(): + transport = transports.LicenseManagementServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_license_pool._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "licensePool", + "updateMask", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_license_pool_rest_interceptors(null_interceptor): + transport = transports.LicenseManagementServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.LicenseManagementServiceRestInterceptor(), + ) + client = LicenseManagementServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.LicenseManagementServiceRestInterceptor, "post_update_license_pool" + ) as post, mock.patch.object( + transports.LicenseManagementServiceRestInterceptor, "pre_update_license_pool" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = license_management_service.UpdateLicensePoolRequest.pb( + license_management_service.UpdateLicensePoolRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = license_management_service.LicensePool.to_json( + license_management_service.LicensePool() + ) + + request = license_management_service.UpdateLicensePoolRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = license_management_service.LicensePool() + + client.update_license_pool( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_license_pool_rest_bad_request( + transport: str = "rest", + request_type=license_management_service.UpdateLicensePoolRequest, +): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "license_pool": { + "name": "billingAccounts/sample1/orders/sample2/licensePool/sample3" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_license_pool(request) + + +def test_update_license_pool_rest_flattened(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = license_management_service.LicensePool() + + # get arguments that satisfy an http rule for this method + sample_request = { + "license_pool": { + "name": "billingAccounts/sample1/orders/sample2/licensePool/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + license_pool=license_management_service.LicensePool(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = license_management_service.LicensePool.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_license_pool(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{license_pool.name=billingAccounts/*/orders/*/licensePool/*}" + % client.transport._host, + args[1], + ) + + +def test_update_license_pool_rest_flattened_error(transport: str = "rest"): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_license_pool( + license_management_service.UpdateLicensePoolRequest(), + license_pool=license_management_service.LicensePool(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_license_pool_rest_error(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + license_management_service.AssignRequest, + dict, + ], +) +def test_assign_rest(request_type): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "billingAccounts/sample1/orders/sample2/licensePool"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = license_management_service.AssignResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = license_management_service.AssignResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.assign(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, license_management_service.AssignResponse) + + +def test_assign_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.assign in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.assign] = mock_rpc + + request = {} + client.assign(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.assign(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_assign_rest_required_fields( + request_type=license_management_service.AssignRequest, +): + transport_class = transports.LicenseManagementServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["usernames"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).assign._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + jsonified_request["usernames"] = "usernames_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).assign._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "usernames" in jsonified_request + assert jsonified_request["usernames"] == "usernames_value" + + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = license_management_service.AssignResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = license_management_service.AssignResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.assign(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_assign_rest_unset_required_fields(): + transport = transports.LicenseManagementServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.assign._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "usernames", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_assign_rest_interceptors(null_interceptor): + transport = transports.LicenseManagementServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.LicenseManagementServiceRestInterceptor(), + ) + client = LicenseManagementServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.LicenseManagementServiceRestInterceptor, "post_assign" + ) as post, mock.patch.object( + transports.LicenseManagementServiceRestInterceptor, "pre_assign" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = license_management_service.AssignRequest.pb( + license_management_service.AssignRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = license_management_service.AssignResponse.to_json( + license_management_service.AssignResponse() + ) + + request = license_management_service.AssignRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = license_management_service.AssignResponse() + + client.assign( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_assign_rest_bad_request( + transport: str = "rest", request_type=license_management_service.AssignRequest +): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "billingAccounts/sample1/orders/sample2/licensePool"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.assign(request) + + +def test_assign_rest_flattened(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = license_management_service.AssignResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "billingAccounts/sample1/orders/sample2/licensePool" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + usernames=["usernames_value"], + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = license_management_service.AssignResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.assign(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=billingAccounts/*/orders/*/licensePool}:assign" + % client.transport._host, + args[1], + ) + + +def test_assign_rest_flattened_error(transport: str = "rest"): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.assign( + license_management_service.AssignRequest(), + parent="parent_value", + usernames=["usernames_value"], + ) + + +def test_assign_rest_error(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + license_management_service.UnassignRequest, + dict, + ], +) +def test_unassign_rest(request_type): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "billingAccounts/sample1/orders/sample2/licensePool"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = license_management_service.UnassignResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = license_management_service.UnassignResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.unassign(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, license_management_service.UnassignResponse) + + +def test_unassign_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.unassign in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.unassign] = mock_rpc + + request = {} + client.unassign(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.unassign(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_unassign_rest_required_fields( + request_type=license_management_service.UnassignRequest, +): + transport_class = transports.LicenseManagementServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["usernames"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).unassign._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + jsonified_request["usernames"] = "usernames_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).unassign._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "usernames" in jsonified_request + assert jsonified_request["usernames"] == "usernames_value" + + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = license_management_service.UnassignResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = license_management_service.UnassignResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.unassign(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_unassign_rest_unset_required_fields(): + transport = transports.LicenseManagementServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.unassign._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "usernames", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_unassign_rest_interceptors(null_interceptor): + transport = transports.LicenseManagementServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.LicenseManagementServiceRestInterceptor(), + ) + client = LicenseManagementServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.LicenseManagementServiceRestInterceptor, "post_unassign" + ) as post, mock.patch.object( + transports.LicenseManagementServiceRestInterceptor, "pre_unassign" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = license_management_service.UnassignRequest.pb( + license_management_service.UnassignRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = license_management_service.UnassignResponse.to_json( + license_management_service.UnassignResponse() + ) + + request = license_management_service.UnassignRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = license_management_service.UnassignResponse() + + client.unassign( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_unassign_rest_bad_request( + transport: str = "rest", request_type=license_management_service.UnassignRequest +): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "billingAccounts/sample1/orders/sample2/licensePool"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.unassign(request) + + +def test_unassign_rest_flattened(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = license_management_service.UnassignResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "billingAccounts/sample1/orders/sample2/licensePool" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + usernames=["usernames_value"], + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = license_management_service.UnassignResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.unassign(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=billingAccounts/*/orders/*/licensePool}:unassign" + % client.transport._host, + args[1], + ) + + +def test_unassign_rest_flattened_error(transport: str = "rest"): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.unassign( + license_management_service.UnassignRequest(), + parent="parent_value", + usernames=["usernames_value"], + ) + + +def test_unassign_rest_error(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + license_management_service.EnumerateLicensedUsersRequest, + dict, + ], +) +def test_enumerate_licensed_users_rest(request_type): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "billingAccounts/sample1/orders/sample2/licensePool"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = license_management_service.EnumerateLicensedUsersResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = license_management_service.EnumerateLicensedUsersResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.enumerate_licensed_users(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.EnumerateLicensedUsersPager) + assert response.next_page_token == "next_page_token_value" + + +def test_enumerate_licensed_users_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.enumerate_licensed_users + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.enumerate_licensed_users + ] = mock_rpc + + request = {} + client.enumerate_licensed_users(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.enumerate_licensed_users(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_enumerate_licensed_users_rest_required_fields( + request_type=license_management_service.EnumerateLicensedUsersRequest, +): + transport_class = transports.LicenseManagementServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).enumerate_licensed_users._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).enumerate_licensed_users._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = license_management_service.EnumerateLicensedUsersResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = license_management_service.EnumerateLicensedUsersResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.enumerate_licensed_users(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_enumerate_licensed_users_rest_unset_required_fields(): + transport = transports.LicenseManagementServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.enumerate_licensed_users._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_enumerate_licensed_users_rest_interceptors(null_interceptor): + transport = transports.LicenseManagementServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.LicenseManagementServiceRestInterceptor(), + ) + client = LicenseManagementServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.LicenseManagementServiceRestInterceptor, + "post_enumerate_licensed_users", + ) as post, mock.patch.object( + transports.LicenseManagementServiceRestInterceptor, + "pre_enumerate_licensed_users", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = license_management_service.EnumerateLicensedUsersRequest.pb( + license_management_service.EnumerateLicensedUsersRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + license_management_service.EnumerateLicensedUsersResponse.to_json( + license_management_service.EnumerateLicensedUsersResponse() + ) + ) + + request = license_management_service.EnumerateLicensedUsersRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = license_management_service.EnumerateLicensedUsersResponse() + + client.enumerate_licensed_users( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_enumerate_licensed_users_rest_bad_request( + transport: str = "rest", + request_type=license_management_service.EnumerateLicensedUsersRequest, +): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "billingAccounts/sample1/orders/sample2/licensePool"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.enumerate_licensed_users(request) + + +def test_enumerate_licensed_users_rest_flattened(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = license_management_service.EnumerateLicensedUsersResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "billingAccounts/sample1/orders/sample2/licensePool" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = license_management_service.EnumerateLicensedUsersResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.enumerate_licensed_users(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=billingAccounts/*/orders/*/licensePool}:enumerateLicensedUsers" + % client.transport._host, + args[1], + ) + + +def test_enumerate_licensed_users_rest_flattened_error(transport: str = "rest"): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.enumerate_licensed_users( + license_management_service.EnumerateLicensedUsersRequest(), + parent="parent_value", + ) + + +def test_enumerate_licensed_users_rest_pager(transport: str = "rest"): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + license_management_service.EnumerateLicensedUsersResponse( + licensed_users=[ + license_management_service.LicensedUser(), + license_management_service.LicensedUser(), + license_management_service.LicensedUser(), + ], + next_page_token="abc", + ), + license_management_service.EnumerateLicensedUsersResponse( + licensed_users=[], + next_page_token="def", + ), + license_management_service.EnumerateLicensedUsersResponse( + licensed_users=[ + license_management_service.LicensedUser(), + ], + next_page_token="ghi", + ), + license_management_service.EnumerateLicensedUsersResponse( + licensed_users=[ + license_management_service.LicensedUser(), + license_management_service.LicensedUser(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + license_management_service.EnumerateLicensedUsersResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "billingAccounts/sample1/orders/sample2/licensePool" + } + + pager = client.enumerate_licensed_users(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, license_management_service.LicensedUser) for i in results + ) + + pages = list(client.enumerate_licensed_users(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.LicenseManagementServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.LicenseManagementServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = LicenseManagementServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.LicenseManagementServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = LicenseManagementServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = LicenseManagementServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.LicenseManagementServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = LicenseManagementServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.LicenseManagementServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = LicenseManagementServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.LicenseManagementServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.LicenseManagementServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.LicenseManagementServiceGrpcTransport, + transports.LicenseManagementServiceGrpcAsyncIOTransport, + transports.LicenseManagementServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = LicenseManagementServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.LicenseManagementServiceGrpcTransport, + ) + + +def test_license_management_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.LicenseManagementServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_license_management_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.commerce_consumer_procurement_v1.services.license_management_service.transports.LicenseManagementServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.LicenseManagementServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "get_license_pool", + "update_license_pool", + "assign", + "unassign", + "enumerate_licensed_users", + "get_operation", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_license_management_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.commerce_consumer_procurement_v1.services.license_management_service.transports.LicenseManagementServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.LicenseManagementServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_license_management_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.commerce_consumer_procurement_v1.services.license_management_service.transports.LicenseManagementServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.LicenseManagementServiceTransport() + adc.assert_called_once() + + +def test_license_management_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + LicenseManagementServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.LicenseManagementServiceGrpcTransport, + transports.LicenseManagementServiceGrpcAsyncIOTransport, + ], +) +def test_license_management_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.LicenseManagementServiceGrpcTransport, + transports.LicenseManagementServiceGrpcAsyncIOTransport, + transports.LicenseManagementServiceRestTransport, + ], +) +def test_license_management_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com/" + api_audience_tests = [None, "https://language2.com/"] + api_audience_expect = [host, "https://language2.com/"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.LicenseManagementServiceGrpcTransport, grpc_helpers), + (transports.LicenseManagementServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_license_management_service_transport_create_channel( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "cloudcommerceconsumerprocurement.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="cloudcommerceconsumerprocurement.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.LicenseManagementServiceGrpcTransport, + transports.LicenseManagementServiceGrpcAsyncIOTransport, + ], +) +def test_license_management_service_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_license_management_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.LicenseManagementServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_license_management_service_host_no_port(transport_name): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="cloudcommerceconsumerprocurement.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "cloudcommerceconsumerprocurement.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://cloudcommerceconsumerprocurement.googleapis.com/" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_license_management_service_host_with_port(transport_name): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="cloudcommerceconsumerprocurement.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "cloudcommerceconsumerprocurement.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://cloudcommerceconsumerprocurement.googleapis.com:8000/" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_license_management_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = LicenseManagementServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = LicenseManagementServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get_license_pool._session + session2 = client2.transport.get_license_pool._session + assert session1 != session2 + session1 = client1.transport.update_license_pool._session + session2 = client2.transport.update_license_pool._session + assert session1 != session2 + session1 = client1.transport.assign._session + session2 = client2.transport.assign._session + assert session1 != session2 + session1 = client1.transport.unassign._session + session2 = client2.transport.unassign._session + assert session1 != session2 + session1 = client1.transport.enumerate_licensed_users._session + session2 = client2.transport.enumerate_licensed_users._session + assert session1 != session2 + + +def test_license_management_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.LicenseManagementServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_license_management_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.LicenseManagementServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.LicenseManagementServiceGrpcTransport, + transports.LicenseManagementServiceGrpcAsyncIOTransport, + ], +) +def test_license_management_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.LicenseManagementServiceGrpcTransport, + transports.LicenseManagementServiceGrpcAsyncIOTransport, + ], +) +def test_license_management_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_license_pool_path(): + billing_account = "squid" + order = "clam" + expected = "billingAccounts/{billing_account}/orders/{order}/licensePool".format( + billing_account=billing_account, + order=order, + ) + actual = LicenseManagementServiceClient.license_pool_path(billing_account, order) + assert expected == actual + + +def test_parse_license_pool_path(): + expected = { + "billing_account": "whelk", + "order": "octopus", + } + path = LicenseManagementServiceClient.license_pool_path(**expected) + + # Check that the path construction is reversible. + actual = LicenseManagementServiceClient.parse_license_pool_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "oyster" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = LicenseManagementServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nudibranch", + } + path = LicenseManagementServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = LicenseManagementServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "cuttlefish" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = LicenseManagementServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "mussel", + } + path = LicenseManagementServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = LicenseManagementServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "winkle" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = LicenseManagementServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nautilus", + } + path = LicenseManagementServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = LicenseManagementServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "scallop" + expected = "projects/{project}".format( + project=project, + ) + actual = LicenseManagementServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "abalone", + } + path = LicenseManagementServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = LicenseManagementServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "squid" + location = "clam" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = LicenseManagementServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = LicenseManagementServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = LicenseManagementServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.LicenseManagementServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.LicenseManagementServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = LicenseManagementServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "billingAccounts/sample1/orders/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "billingAccounts/sample1/orders/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation(transport: str = "grpc"): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + ( + LicenseManagementServiceClient, + transports.LicenseManagementServiceGrpcTransport, + ), + ( + LicenseManagementServiceAsyncClient, + transports.LicenseManagementServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) From bd7ac5328808f9aadfad08404348bc1cc473ff08 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Mon, 7 Oct 2024 17:13:23 +0200 Subject: [PATCH 102/108] chore(deps): update all dependencies (#13130) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | Type | Update | |---|---|---|---|---|---|---|---| | [google-cloud-kms](https://redirect.github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-kms) ([source](https://redirect.github.com/googleapis/google-cloud-python)) | `>= 2.3.0, <3.0.0dev` -> `>=3.0.0, <3.1.0` | [![age](https://developer.mend.io/api/mc/badges/age/pypi/google-cloud-kms/3.0.0?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://developer.mend.io/api/mc/badges/adoption/pypi/google-cloud-kms/3.0.0?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://developer.mend.io/api/mc/badges/compatibility/pypi/google-cloud-kms/2.24.2/3.0.0?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://developer.mend.io/api/mc/badges/confidence/pypi/google-cloud-kms/2.24.2/3.0.0?slim=true)](https://docs.renovatebot.com/merge-confidence/) | | major | | [ubuntu](https://redirect.github.com/actions/runner-images) | `22.04` -> `24.04` | [![age](https://developer.mend.io/api/mc/badges/age/github-runners/ubuntu/24.04?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://developer.mend.io/api/mc/badges/adoption/github-runners/ubuntu/24.04?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://developer.mend.io/api/mc/badges/compatibility/github-runners/ubuntu/22.04/24.04?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://developer.mend.io/api/mc/badges/confidence/github-runners/ubuntu/22.04/24.04?slim=true)](https://docs.renovatebot.com/merge-confidence/) | github-runner | major | --- ### Release Notes
googleapis/google-cloud-python (google-cloud-kms) ### [`v3.0.0`](https://redirect.github.com/googleapis/google-cloud-python/releases/tag/google-cloud-kms-v3.0.0): google-cloud-kms: v3.0.0 [Compare Source](https://redirect.github.com/googleapis/google-cloud-python/compare/google-cloud-kms-v2.24.2...google-cloud-kms-v3.0.0) ##### ⚠ BREAKING CHANGES - Pagination feature is introduced for method ListKeyHandles in service Autokey ##### Features - Adding a state field for AutokeyConfig ([b4c9770](https://redirect.github.com/googleapis/google-cloud-python/commit/b4c977059e075c73781c179b26fdf915548e65c4)) ##### Bug Fixes - Pagination feature is introduced for method ListKeyHandles in service Autokey ([b4c9770](https://redirect.github.com/googleapis/google-cloud-python/commit/b4c977059e075c73781c179b26fdf915548e65c4)) ##### Documentation - A comment for field destroy_scheduled_duration in message .google.cloud.kms.v1.CryptoKey is updated for the default duration ([b4c9770](https://redirect.github.com/googleapis/google-cloud-python/commit/b4c977059e075c73781c179b26fdf915548e65c4)) - Field service_resolvers in message .google.cloud.kms.v1.EkmConnection is Explicitly is marked as to have field behavior of Optional ([b4c9770](https://redirect.github.com/googleapis/google-cloud-python/commit/b4c977059e075c73781c179b26fdf915548e65c4))
--- ### Configuration 📅 **Schedule**: Branch creation - "before 4am on Monday" (UTC), Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 👻 **Immortal**: This PR will be recreated if closed unmerged. Get [config help](https://redirect.github.com/renovatebot/renovate/discussions) if that's undesired. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR was generated by [Mend Renovate](https://mend.io/renovate/). View the [repository job log](https://developer.mend.io/github/googleapis/google-cloud-python). BEGIN_COMMIT_OVERRIDE fix(deps): allow google-cloud-kms 3.x END_COMMIT_OVERRIDE --------- Co-authored-by: ohmayr Co-authored-by: Anthonios Partheniou --- .github/workflows/main.yml | 2 +- packages/google-cloud-kms-inventory/setup.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 6a77a8b8e8e3..678e2c6a3724 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -29,7 +29,7 @@ jobs: permissions: pull-requests: write # for googleapis/code-suggester name: Update API List PR - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 # don't run the workflow on forks of googleapis/google-cloud-python if: ${{github.repository == 'googleapis/google-cloud-python'}} steps: diff --git a/packages/google-cloud-kms-inventory/setup.py b/packages/google-cloud-kms-inventory/setup.py index fe5e2c1e73d4..6fd9503942b2 100644 --- a/packages/google-cloud-kms-inventory/setup.py +++ b/packages/google-cloud-kms-inventory/setup.py @@ -47,7 +47,7 @@ "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", "proto-plus >= 1.22.3, <2.0.0dev", "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", - "google-cloud-kms >= 2.3.0, <3.0.0dev", + "google-cloud-kms >= 2.3.0, <4.0.0", ] url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-kms-inventory" From 27c262d51c5d9f055152d9448f5fb6759da4bdb3 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 8 Oct 2024 10:15:31 -0400 Subject: [PATCH 103/108] feat: [google-cloud-backupdr] Client library for the backupvault api is added (#13110) - [ ] Regenerate this pull request now. fix!: Remove visibility of unneeded InitiateBackup RPC fix!: Remove visibility of unneeded AbandonBackup RPC fix!: Remove visibility of unneeded FinalizeBackup RPC fix!: Remove visibility of unneeded RemoveDataSource RPC fix!: Remove visibility of unneeded SetInternalStatus RPC PiperOrigin-RevId: 683196317 Source-Link: https://github.com/googleapis/googleapis/commit/c532f355b2bae18fdff19ced316897433de5f093 Source-Link: https://github.com/googleapis/googleapis-gen/commit/fd43f5733b002358fa679623f6099aafb6660c8e Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWJhY2t1cGRyLy5Pd2xCb3QueWFtbCIsImgiOiJmZDQzZjU3MzNiMDAyMzU4ZmE2Nzk2MjNmNjA5OWFhZmI2NjYwYzhlIn0= BEGIN_NESTED_COMMIT feat: [google-cloud-backupdr] Client library for the backupvault api is added feat: Add backupplan proto feat: Add backupplanassociation proto feat: Add backupvault_ba proto feat: Add backupvault_gce proto docs: A comment for field `oauth2_client_id` in message `.google.cloud.backupdr.v1.ManagementServer` is changed docs: A comment for field `parent` in message `.google.cloud.backupdr.v1.ListManagementServersRequest` is changed docs: A comment for field `management_servers` in message `.google.cloud.backupdr.v1.ListManagementServersResponse` is changed docs: A comment for field `name` in message `.google.cloud.backupdr.v1.GetManagementServerRequest` is changed docs: A comment for field `parent` in message `.google.cloud.backupdr.v1.CreateManagementServerRequest` is changed docs: A comment for field `requested_cancellation` in message `.google.cloud.backupdr.v1.OperationMetadata` is changed PiperOrigin-RevId: 678800741 Source-Link: https://github.com/googleapis/googleapis/commit/d36e288fc56ace0443c96ee1e385529c4ec4198c Source-Link: https://github.com/googleapis/googleapis-gen/commit/fba9d225b43dcf7361006491810485cdf1b57cdb Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWJhY2t1cGRyLy5Pd2xCb3QueWFtbCIsImgiOiJmYmE5ZDIyNWI0M2RjZjczNjEwMDY0OTE4MTA0ODVjZGYxYjU3Y2RiIn0= END_NESTED_COMMIT BEGIN_COMMIT_OVERRIDE feat: [google-cloud-backupdr] Client library for the backupvault api is added feat: Add backupplan proto feat: Add backupplanassociation proto feat: Add backupvault_ba proto feat: Add backupvault_gce proto docs: A comment for field `oauth2_client_id` in message `.google.cloud.backupdr.v1.ManagementServer` is changed docs: A comment for field `parent` in message `.google.cloud.backupdr.v1.ListManagementServersRequest` is changed docs: A comment for field `management_servers` in message `.google.cloud.backupdr.v1.ListManagementServersResponse` is changed docs: A comment for field `name` in message `.google.cloud.backupdr.v1.GetManagementServerRequest` is changed docs: A comment for field `parent` in message `.google.cloud.backupdr.v1.CreateManagementServerRequest` is changed docs: A comment for field `requested_cancellation` in message `.google.cloud.backupdr.v1.OperationMetadata` is changed END_COMMIT_OVERRIDE --------- Co-authored-by: Owl Bot Co-authored-by: ohmayr --- .../google/cloud/backupdr/__init__.py | 166 + .../google/cloud/backupdr_v1/__init__.py | 164 + .../cloud/backupdr_v1/gapic_metadata.json | 345 + .../services/backup_dr/async_client.py | 2964 ++- .../backupdr_v1/services/backup_dr/client.py | 3011 ++- .../backupdr_v1/services/backup_dr/pagers.py | 925 +- .../services/backup_dr/transports/base.py | 430 +- .../services/backup_dr/transports/grpc.py | 638 +- .../backup_dr/transports/grpc_asyncio.py | 862 +- .../services/backup_dr/transports/rest.py | 3645 ++- .../cloud/backupdr_v1/types/__init__.py | 164 + .../cloud/backupdr_v1/types/backupdr.py | 37 +- .../cloud/backupdr_v1/types/backupplan.py | 644 + .../types/backupplanassociation.py | 454 + .../cloud/backupdr_v1/types/backupvault.py | 2065 ++ .../cloud/backupdr_v1/types/backupvault_ba.py | 87 + .../backupdr_v1/types/backupvault_gce.py | 1991 ++ ...dr_create_backup_plan_association_async.py | 62 + ..._dr_create_backup_plan_association_sync.py | 62 + ...ated_backup_dr_create_backup_plan_async.py | 68 + ...rated_backup_dr_create_backup_plan_sync.py | 68 + ...ted_backup_dr_create_backup_vault_async.py | 57 + ...ated_backup_dr_create_backup_vault_sync.py | 57 + ...generated_backup_dr_delete_backup_async.py | 56 + ...dr_delete_backup_plan_association_async.py | 56 + ..._dr_delete_backup_plan_association_sync.py | 56 + ...ated_backup_dr_delete_backup_plan_async.py | 56 + ...rated_backup_dr_delete_backup_plan_sync.py | 56 + ..._generated_backup_dr_delete_backup_sync.py | 56 + ...ted_backup_dr_delete_backup_vault_async.py | 56 + ...ated_backup_dr_delete_backup_vault_sync.py | 56 + ...kup_dr_fetch_usable_backup_vaults_async.py | 53 + ...ckup_dr_fetch_usable_backup_vaults_sync.py | 53 + ...v1_generated_backup_dr_get_backup_async.py | 52 + ...up_dr_get_backup_plan_association_async.py | 52 + ...kup_dr_get_backup_plan_association_sync.py | 52 + ...nerated_backup_dr_get_backup_plan_async.py | 52 + ...enerated_backup_dr_get_backup_plan_sync.py | 52 + ..._v1_generated_backup_dr_get_backup_sync.py | 52 + ...erated_backup_dr_get_backup_vault_async.py | 52 + ...nerated_backup_dr_get_backup_vault_sync.py | 52 + ...nerated_backup_dr_get_data_source_async.py | 52 + ...enerated_backup_dr_get_data_source_sync.py | 52 + ..._dr_list_backup_plan_associations_async.py | 53 + ...p_dr_list_backup_plan_associations_sync.py | 53 + ...rated_backup_dr_list_backup_plans_async.py | 53 + ...erated_backup_dr_list_backup_plans_sync.py | 53 + ...ated_backup_dr_list_backup_vaults_async.py | 53 + ...rated_backup_dr_list_backup_vaults_sync.py | 53 + ..._generated_backup_dr_list_backups_async.py | 53 + ...1_generated_backup_dr_list_backups_sync.py | 53 + ...rated_backup_dr_list_data_sources_async.py | 53 + ...erated_backup_dr_list_data_sources_sync.py | 53 + ...enerated_backup_dr_restore_backup_async.py | 61 + ...generated_backup_dr_restore_backup_sync.py | 61 + ...enerated_backup_dr_trigger_backup_async.py | 57 + ...generated_backup_dr_trigger_backup_sync.py | 57 + ...generated_backup_dr_update_backup_async.py | 55 + ..._generated_backup_dr_update_backup_sync.py | 55 + ...ted_backup_dr_update_backup_vault_async.py | 55 + ...ated_backup_dr_update_backup_vault_sync.py | 55 + ...ated_backup_dr_update_data_source_async.py | 55 + ...rated_backup_dr_update_data_source_sync.py | 55 + ...pet_metadata_google.cloud.backupdr.v1.json | 4061 +++- .../scripts/fixup_backupdr_v1_keywords.py | 23 + .../unit/gapic/backupdr_v1/test_backup_dr.py | 20048 +++++++++++++++- 66 files changed, 44283 insertions(+), 985 deletions(-) create mode 100644 packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupplan.py create mode 100644 packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupplanassociation.py create mode 100644 packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupvault.py create mode 100644 packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupvault_ba.py create mode 100644 packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupvault_gce.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_association_async.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_association_sync.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_async.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_sync.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_vault_async.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_vault_sync.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_async.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_association_async.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_association_sync.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_async.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_sync.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_sync.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_vault_async.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_vault_sync.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_usable_backup_vaults_async.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_usable_backup_vaults_sync.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_async.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_association_async.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_association_sync.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_async.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_sync.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_sync.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_vault_async.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_vault_sync.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_data_source_async.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_data_source_sync.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plan_associations_async.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plan_associations_sync.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plans_async.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plans_sync.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_vaults_async.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_vaults_sync.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backups_async.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backups_sync.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_data_sources_async.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_data_sources_sync.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_restore_backup_async.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_restore_backup_sync.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_trigger_backup_async.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_trigger_backup_sync.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_async.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_sync.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_vault_async.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_vault_sync.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_data_source_async.py create mode 100644 packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_data_source_sync.py diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr/__init__.py b/packages/google-cloud-backupdr/google/cloud/backupdr/__init__.py index c29b14037f34..5ab4e805a40b 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr/__init__.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr/__init__.py @@ -33,6 +33,94 @@ WorkforceIdentityBasedManagementURI, WorkforceIdentityBasedOAuth2ClientID, ) +from google.cloud.backupdr_v1.types.backupplan import ( + BackupPlan, + BackupRule, + BackupWindow, + CreateBackupPlanRequest, + DeleteBackupPlanRequest, + GetBackupPlanRequest, + ListBackupPlansRequest, + ListBackupPlansResponse, + StandardSchedule, + WeekDayOfMonth, +) +from google.cloud.backupdr_v1.types.backupplanassociation import ( + BackupPlanAssociation, + CreateBackupPlanAssociationRequest, + DeleteBackupPlanAssociationRequest, + GetBackupPlanAssociationRequest, + ListBackupPlanAssociationsRequest, + ListBackupPlanAssociationsResponse, + RuleConfigInfo, + TriggerBackupRequest, +) +from google.cloud.backupdr_v1.types.backupvault import ( + Backup, + BackupApplianceBackupConfig, + BackupApplianceLockInfo, + BackupConfigInfo, + BackupConfigState, + BackupLock, + BackupVault, + BackupVaultView, + BackupView, + CreateBackupVaultRequest, + DataSource, + DataSourceBackupApplianceApplication, + DataSourceGcpResource, + DeleteBackupRequest, + DeleteBackupVaultRequest, + FetchUsableBackupVaultsRequest, + FetchUsableBackupVaultsResponse, + GcpBackupConfig, + GcpResource, + GetBackupRequest, + GetBackupVaultRequest, + GetDataSourceRequest, + ListBackupsRequest, + ListBackupsResponse, + ListBackupVaultsRequest, + ListBackupVaultsResponse, + ListDataSourcesRequest, + ListDataSourcesResponse, + RestoreBackupRequest, + RestoreBackupResponse, + ServiceLockInfo, + TargetResource, + UpdateBackupRequest, + UpdateBackupVaultRequest, + UpdateDataSourceRequest, +) +from google.cloud.backupdr_v1.types.backupvault_ba import ( + BackupApplianceBackupProperties, +) +from google.cloud.backupdr_v1.types.backupvault_gce import ( + AcceleratorConfig, + AccessConfig, + AdvancedMachineFeatures, + AliasIpRange, + AllocationAffinity, + AttachedDisk, + ComputeInstanceBackupProperties, + ComputeInstanceDataSourceProperties, + ComputeInstanceRestoreProperties, + ComputeInstanceTargetEnvironment, + ConfidentialInstanceConfig, + CustomerEncryptionKey, + DisplayDevice, + Entry, + GuestOsFeature, + InstanceParams, + KeyRevocationActionType, + Metadata, + NetworkInterface, + NetworkPerformanceConfig, + Scheduling, + SchedulingDuration, + ServiceAccount, + Tags, +) __all__ = ( "BackupDRClient", @@ -48,4 +136,82 @@ "OperationMetadata", "WorkforceIdentityBasedManagementURI", "WorkforceIdentityBasedOAuth2ClientID", + "BackupPlan", + "BackupRule", + "BackupWindow", + "CreateBackupPlanRequest", + "DeleteBackupPlanRequest", + "GetBackupPlanRequest", + "ListBackupPlansRequest", + "ListBackupPlansResponse", + "StandardSchedule", + "WeekDayOfMonth", + "BackupPlanAssociation", + "CreateBackupPlanAssociationRequest", + "DeleteBackupPlanAssociationRequest", + "GetBackupPlanAssociationRequest", + "ListBackupPlanAssociationsRequest", + "ListBackupPlanAssociationsResponse", + "RuleConfigInfo", + "TriggerBackupRequest", + "Backup", + "BackupApplianceBackupConfig", + "BackupApplianceLockInfo", + "BackupConfigInfo", + "BackupLock", + "BackupVault", + "CreateBackupVaultRequest", + "DataSource", + "DataSourceBackupApplianceApplication", + "DataSourceGcpResource", + "DeleteBackupRequest", + "DeleteBackupVaultRequest", + "FetchUsableBackupVaultsRequest", + "FetchUsableBackupVaultsResponse", + "GcpBackupConfig", + "GcpResource", + "GetBackupRequest", + "GetBackupVaultRequest", + "GetDataSourceRequest", + "ListBackupsRequest", + "ListBackupsResponse", + "ListBackupVaultsRequest", + "ListBackupVaultsResponse", + "ListDataSourcesRequest", + "ListDataSourcesResponse", + "RestoreBackupRequest", + "RestoreBackupResponse", + "ServiceLockInfo", + "TargetResource", + "UpdateBackupRequest", + "UpdateBackupVaultRequest", + "UpdateDataSourceRequest", + "BackupConfigState", + "BackupVaultView", + "BackupView", + "BackupApplianceBackupProperties", + "AcceleratorConfig", + "AccessConfig", + "AdvancedMachineFeatures", + "AliasIpRange", + "AllocationAffinity", + "AttachedDisk", + "ComputeInstanceBackupProperties", + "ComputeInstanceDataSourceProperties", + "ComputeInstanceRestoreProperties", + "ComputeInstanceTargetEnvironment", + "ConfidentialInstanceConfig", + "CustomerEncryptionKey", + "DisplayDevice", + "Entry", + "GuestOsFeature", + "InstanceParams", + "Metadata", + "NetworkInterface", + "NetworkPerformanceConfig", + "Scheduling", + "SchedulingDuration", + "ServiceAccount", + "Tags", + "KeyRevocationActionType", ) diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/__init__.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/__init__.py index a2dc2b97f601..eddcfa53658f 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/__init__.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/__init__.py @@ -32,19 +32,183 @@ WorkforceIdentityBasedManagementURI, WorkforceIdentityBasedOAuth2ClientID, ) +from .types.backupplan import ( + BackupPlan, + BackupRule, + BackupWindow, + CreateBackupPlanRequest, + DeleteBackupPlanRequest, + GetBackupPlanRequest, + ListBackupPlansRequest, + ListBackupPlansResponse, + StandardSchedule, + WeekDayOfMonth, +) +from .types.backupplanassociation import ( + BackupPlanAssociation, + CreateBackupPlanAssociationRequest, + DeleteBackupPlanAssociationRequest, + GetBackupPlanAssociationRequest, + ListBackupPlanAssociationsRequest, + ListBackupPlanAssociationsResponse, + RuleConfigInfo, + TriggerBackupRequest, +) +from .types.backupvault import ( + Backup, + BackupApplianceBackupConfig, + BackupApplianceLockInfo, + BackupConfigInfo, + BackupConfigState, + BackupLock, + BackupVault, + BackupVaultView, + BackupView, + CreateBackupVaultRequest, + DataSource, + DataSourceBackupApplianceApplication, + DataSourceGcpResource, + DeleteBackupRequest, + DeleteBackupVaultRequest, + FetchUsableBackupVaultsRequest, + FetchUsableBackupVaultsResponse, + GcpBackupConfig, + GcpResource, + GetBackupRequest, + GetBackupVaultRequest, + GetDataSourceRequest, + ListBackupsRequest, + ListBackupsResponse, + ListBackupVaultsRequest, + ListBackupVaultsResponse, + ListDataSourcesRequest, + ListDataSourcesResponse, + RestoreBackupRequest, + RestoreBackupResponse, + ServiceLockInfo, + TargetResource, + UpdateBackupRequest, + UpdateBackupVaultRequest, + UpdateDataSourceRequest, +) +from .types.backupvault_ba import BackupApplianceBackupProperties +from .types.backupvault_gce import ( + AcceleratorConfig, + AccessConfig, + AdvancedMachineFeatures, + AliasIpRange, + AllocationAffinity, + AttachedDisk, + ComputeInstanceBackupProperties, + ComputeInstanceDataSourceProperties, + ComputeInstanceRestoreProperties, + ComputeInstanceTargetEnvironment, + ConfidentialInstanceConfig, + CustomerEncryptionKey, + DisplayDevice, + Entry, + GuestOsFeature, + InstanceParams, + KeyRevocationActionType, + Metadata, + NetworkInterface, + NetworkPerformanceConfig, + Scheduling, + SchedulingDuration, + ServiceAccount, + Tags, +) __all__ = ( "BackupDRAsyncClient", + "AcceleratorConfig", + "AccessConfig", + "AdvancedMachineFeatures", + "AliasIpRange", + "AllocationAffinity", + "AttachedDisk", + "Backup", + "BackupApplianceBackupConfig", + "BackupApplianceBackupProperties", + "BackupApplianceLockInfo", + "BackupConfigInfo", + "BackupConfigState", "BackupDRClient", + "BackupLock", + "BackupPlan", + "BackupPlanAssociation", + "BackupRule", + "BackupVault", + "BackupVaultView", + "BackupView", + "BackupWindow", + "ComputeInstanceBackupProperties", + "ComputeInstanceDataSourceProperties", + "ComputeInstanceRestoreProperties", + "ComputeInstanceTargetEnvironment", + "ConfidentialInstanceConfig", + "CreateBackupPlanAssociationRequest", + "CreateBackupPlanRequest", + "CreateBackupVaultRequest", "CreateManagementServerRequest", + "CustomerEncryptionKey", + "DataSource", + "DataSourceBackupApplianceApplication", + "DataSourceGcpResource", + "DeleteBackupPlanAssociationRequest", + "DeleteBackupPlanRequest", + "DeleteBackupRequest", + "DeleteBackupVaultRequest", "DeleteManagementServerRequest", + "DisplayDevice", + "Entry", + "FetchUsableBackupVaultsRequest", + "FetchUsableBackupVaultsResponse", + "GcpBackupConfig", + "GcpResource", + "GetBackupPlanAssociationRequest", + "GetBackupPlanRequest", + "GetBackupRequest", + "GetBackupVaultRequest", + "GetDataSourceRequest", "GetManagementServerRequest", + "GuestOsFeature", + "InstanceParams", + "KeyRevocationActionType", + "ListBackupPlanAssociationsRequest", + "ListBackupPlanAssociationsResponse", + "ListBackupPlansRequest", + "ListBackupPlansResponse", + "ListBackupVaultsRequest", + "ListBackupVaultsResponse", + "ListBackupsRequest", + "ListBackupsResponse", + "ListDataSourcesRequest", + "ListDataSourcesResponse", "ListManagementServersRequest", "ListManagementServersResponse", "ManagementServer", "ManagementURI", + "Metadata", "NetworkConfig", + "NetworkInterface", + "NetworkPerformanceConfig", "OperationMetadata", + "RestoreBackupRequest", + "RestoreBackupResponse", + "RuleConfigInfo", + "Scheduling", + "SchedulingDuration", + "ServiceAccount", + "ServiceLockInfo", + "StandardSchedule", + "Tags", + "TargetResource", + "TriggerBackupRequest", + "UpdateBackupRequest", + "UpdateBackupVaultRequest", + "UpdateDataSourceRequest", + "WeekDayOfMonth", "WorkforceIdentityBasedManagementURI", "WorkforceIdentityBasedOAuth2ClientID", ) diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/gapic_metadata.json b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/gapic_metadata.json index 4a9d58bb8dad..902530688c39 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/gapic_metadata.json +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/gapic_metadata.json @@ -10,75 +10,420 @@ "grpc": { "libraryClient": "BackupDRClient", "rpcs": { + "CreateBackupPlan": { + "methods": [ + "create_backup_plan" + ] + }, + "CreateBackupPlanAssociation": { + "methods": [ + "create_backup_plan_association" + ] + }, + "CreateBackupVault": { + "methods": [ + "create_backup_vault" + ] + }, "CreateManagementServer": { "methods": [ "create_management_server" ] }, + "DeleteBackup": { + "methods": [ + "delete_backup" + ] + }, + "DeleteBackupPlan": { + "methods": [ + "delete_backup_plan" + ] + }, + "DeleteBackupPlanAssociation": { + "methods": [ + "delete_backup_plan_association" + ] + }, + "DeleteBackupVault": { + "methods": [ + "delete_backup_vault" + ] + }, "DeleteManagementServer": { "methods": [ "delete_management_server" ] }, + "FetchUsableBackupVaults": { + "methods": [ + "fetch_usable_backup_vaults" + ] + }, + "GetBackup": { + "methods": [ + "get_backup" + ] + }, + "GetBackupPlan": { + "methods": [ + "get_backup_plan" + ] + }, + "GetBackupPlanAssociation": { + "methods": [ + "get_backup_plan_association" + ] + }, + "GetBackupVault": { + "methods": [ + "get_backup_vault" + ] + }, + "GetDataSource": { + "methods": [ + "get_data_source" + ] + }, "GetManagementServer": { "methods": [ "get_management_server" ] }, + "ListBackupPlanAssociations": { + "methods": [ + "list_backup_plan_associations" + ] + }, + "ListBackupPlans": { + "methods": [ + "list_backup_plans" + ] + }, + "ListBackupVaults": { + "methods": [ + "list_backup_vaults" + ] + }, + "ListBackups": { + "methods": [ + "list_backups" + ] + }, + "ListDataSources": { + "methods": [ + "list_data_sources" + ] + }, "ListManagementServers": { "methods": [ "list_management_servers" ] + }, + "RestoreBackup": { + "methods": [ + "restore_backup" + ] + }, + "TriggerBackup": { + "methods": [ + "trigger_backup" + ] + }, + "UpdateBackup": { + "methods": [ + "update_backup" + ] + }, + "UpdateBackupVault": { + "methods": [ + "update_backup_vault" + ] + }, + "UpdateDataSource": { + "methods": [ + "update_data_source" + ] } } }, "grpc-async": { "libraryClient": "BackupDRAsyncClient", "rpcs": { + "CreateBackupPlan": { + "methods": [ + "create_backup_plan" + ] + }, + "CreateBackupPlanAssociation": { + "methods": [ + "create_backup_plan_association" + ] + }, + "CreateBackupVault": { + "methods": [ + "create_backup_vault" + ] + }, "CreateManagementServer": { "methods": [ "create_management_server" ] }, + "DeleteBackup": { + "methods": [ + "delete_backup" + ] + }, + "DeleteBackupPlan": { + "methods": [ + "delete_backup_plan" + ] + }, + "DeleteBackupPlanAssociation": { + "methods": [ + "delete_backup_plan_association" + ] + }, + "DeleteBackupVault": { + "methods": [ + "delete_backup_vault" + ] + }, "DeleteManagementServer": { "methods": [ "delete_management_server" ] }, + "FetchUsableBackupVaults": { + "methods": [ + "fetch_usable_backup_vaults" + ] + }, + "GetBackup": { + "methods": [ + "get_backup" + ] + }, + "GetBackupPlan": { + "methods": [ + "get_backup_plan" + ] + }, + "GetBackupPlanAssociation": { + "methods": [ + "get_backup_plan_association" + ] + }, + "GetBackupVault": { + "methods": [ + "get_backup_vault" + ] + }, + "GetDataSource": { + "methods": [ + "get_data_source" + ] + }, "GetManagementServer": { "methods": [ "get_management_server" ] }, + "ListBackupPlanAssociations": { + "methods": [ + "list_backup_plan_associations" + ] + }, + "ListBackupPlans": { + "methods": [ + "list_backup_plans" + ] + }, + "ListBackupVaults": { + "methods": [ + "list_backup_vaults" + ] + }, + "ListBackups": { + "methods": [ + "list_backups" + ] + }, + "ListDataSources": { + "methods": [ + "list_data_sources" + ] + }, "ListManagementServers": { "methods": [ "list_management_servers" ] + }, + "RestoreBackup": { + "methods": [ + "restore_backup" + ] + }, + "TriggerBackup": { + "methods": [ + "trigger_backup" + ] + }, + "UpdateBackup": { + "methods": [ + "update_backup" + ] + }, + "UpdateBackupVault": { + "methods": [ + "update_backup_vault" + ] + }, + "UpdateDataSource": { + "methods": [ + "update_data_source" + ] } } }, "rest": { "libraryClient": "BackupDRClient", "rpcs": { + "CreateBackupPlan": { + "methods": [ + "create_backup_plan" + ] + }, + "CreateBackupPlanAssociation": { + "methods": [ + "create_backup_plan_association" + ] + }, + "CreateBackupVault": { + "methods": [ + "create_backup_vault" + ] + }, "CreateManagementServer": { "methods": [ "create_management_server" ] }, + "DeleteBackup": { + "methods": [ + "delete_backup" + ] + }, + "DeleteBackupPlan": { + "methods": [ + "delete_backup_plan" + ] + }, + "DeleteBackupPlanAssociation": { + "methods": [ + "delete_backup_plan_association" + ] + }, + "DeleteBackupVault": { + "methods": [ + "delete_backup_vault" + ] + }, "DeleteManagementServer": { "methods": [ "delete_management_server" ] }, + "FetchUsableBackupVaults": { + "methods": [ + "fetch_usable_backup_vaults" + ] + }, + "GetBackup": { + "methods": [ + "get_backup" + ] + }, + "GetBackupPlan": { + "methods": [ + "get_backup_plan" + ] + }, + "GetBackupPlanAssociation": { + "methods": [ + "get_backup_plan_association" + ] + }, + "GetBackupVault": { + "methods": [ + "get_backup_vault" + ] + }, + "GetDataSource": { + "methods": [ + "get_data_source" + ] + }, "GetManagementServer": { "methods": [ "get_management_server" ] }, + "ListBackupPlanAssociations": { + "methods": [ + "list_backup_plan_associations" + ] + }, + "ListBackupPlans": { + "methods": [ + "list_backup_plans" + ] + }, + "ListBackupVaults": { + "methods": [ + "list_backup_vaults" + ] + }, + "ListBackups": { + "methods": [ + "list_backups" + ] + }, + "ListDataSources": { + "methods": [ + "list_data_sources" + ] + }, "ListManagementServers": { "methods": [ "list_management_servers" ] + }, + "RestoreBackup": { + "methods": [ + "restore_backup" + ] + }, + "TriggerBackup": { + "methods": [ + "trigger_backup" + ] + }, + "UpdateBackup": { + "methods": [ + "update_backup" + ] + }, + "UpdateBackupVault": { + "methods": [ + "update_backup_vault" + ] + }, + "UpdateDataSource": { + "methods": [ + "update_data_source" + ] } } } diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/async_client.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/async_client.py index 33ea84a57365..ae020d8602f6 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/async_client.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/async_client.py @@ -48,12 +48,21 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.protobuf import wrappers_pb2 # type: ignore from google.cloud.backupdr_v1.services.backup_dr import pagers -from google.cloud.backupdr_v1.types import backupdr +from google.cloud.backupdr_v1.types import ( + backupdr, + backupplan, + backupplanassociation, + backupvault, + backupvault_ba, + backupvault_gce, +) from .client import BackupDRClient from .transports.base import DEFAULT_CLIENT_INFO, BackupDRTransport @@ -72,6 +81,20 @@ class BackupDRAsyncClient: _DEFAULT_ENDPOINT_TEMPLATE = BackupDRClient._DEFAULT_ENDPOINT_TEMPLATE _DEFAULT_UNIVERSE = BackupDRClient._DEFAULT_UNIVERSE + backup_path = staticmethod(BackupDRClient.backup_path) + parse_backup_path = staticmethod(BackupDRClient.parse_backup_path) + backup_plan_path = staticmethod(BackupDRClient.backup_plan_path) + parse_backup_plan_path = staticmethod(BackupDRClient.parse_backup_plan_path) + backup_plan_association_path = staticmethod( + BackupDRClient.backup_plan_association_path + ) + parse_backup_plan_association_path = staticmethod( + BackupDRClient.parse_backup_plan_association_path + ) + backup_vault_path = staticmethod(BackupDRClient.backup_vault_path) + parse_backup_vault_path = staticmethod(BackupDRClient.parse_backup_vault_path) + data_source_path = staticmethod(BackupDRClient.data_source_path) + parse_data_source_path = staticmethod(BackupDRClient.parse_data_source_path) management_server_path = staticmethod(BackupDRClient.management_server_path) parse_management_server_path = staticmethod( BackupDRClient.parse_management_server_path @@ -304,10 +327,10 @@ async def sample_list_management_servers(): parent (:class:`str`): Required. The project and location for which to retrieve management servers information, in the format - ``projects/{project_id}/locations/{location}``. In Cloud - BackupDR, locations map to GCP regions, for example - **us-central1**. To retrieve management servers for all - locations, use "-" for the ``{location}`` value. + 'projects/{project_id}/locations/{location}'. In Cloud + BackupDR, locations map to Google Cloud regions, for + example **us-central1**. To retrieve management servers + for all locations, use "-" for the '{location}' value. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -428,7 +451,7 @@ async def sample_get_management_server(): name (:class:`str`): Required. Name of the management server resource name, in the format - ``projects/{project_id}/locations/{location}/managementServers/{resource_name}`` + 'projects/{project_id}/locations/{location}/managementServers/{resource_name}' This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -542,10 +565,9 @@ async def sample_create_management_server(): management server instance. parent (:class:`str`): Required. The management server project and location in - the format - ``projects/{project_id}/locations/{location}``. In Cloud - Backup and DR locations map to GCP regions, for example - **us-central1**. + the format 'projects/{project_id}/locations/{location}'. + In Cloud Backup and DR locations map to Google Cloud + regions, for example **us-central1**. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -766,6 +788,2928 @@ async def sample_delete_management_server(): # Done; return the response. return response + async def create_backup_vault( + self, + request: Optional[Union[backupvault.CreateBackupVaultRequest, dict]] = None, + *, + parent: Optional[str] = None, + backup_vault: Optional[backupvault.BackupVault] = None, + backup_vault_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new BackupVault in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_create_backup_vault(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.CreateBackupVaultRequest( + parent="parent_value", + backup_vault_id="backup_vault_id_value", + ) + + # Make the request + operation = client.create_backup_vault(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.CreateBackupVaultRequest, dict]]): + The request object. Message for creating a BackupVault. + parent (:class:`str`): + Required. Value for parent. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_vault (:class:`google.cloud.backupdr_v1.types.BackupVault`): + Required. The resource being created + This corresponds to the ``backup_vault`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_vault_id (:class:`str`): + Required. ID of the requesting object If auto-generating + ID server-side, remove this field and backup_vault_id + from the method_signature of Create RPC + + This corresponds to the ``backup_vault_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.backupdr_v1.types.BackupVault` + Message describing a BackupVault object. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, backup_vault, backup_vault_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.CreateBackupVaultRequest): + request = backupvault.CreateBackupVaultRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if backup_vault is not None: + request.backup_vault = backup_vault + if backup_vault_id is not None: + request.backup_vault_id = backup_vault_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_backup_vault + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + backupvault.BackupVault, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_backup_vaults( + self, + request: Optional[Union[backupvault.ListBackupVaultsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBackupVaultsAsyncPager: + r"""Lists BackupVaults in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_list_backup_vaults(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.ListBackupVaultsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_vaults(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.ListBackupVaultsRequest, dict]]): + The request object. Request message for listing + backupvault stores. + parent (:class:`str`): + Required. The project and location for which to retrieve + backupvault stores information, in the format + 'projects/{project_id}/locations/{location}'. In Cloud + Backup and DR, locations map to Google Cloud regions, + for example **us-central1**. To retrieve backupvault + stores for all locations, use "-" for the '{location}' + value. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupVaultsAsyncPager: + Response message for listing + BackupVaults. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.ListBackupVaultsRequest): + request = backupvault.ListBackupVaultsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_backup_vaults + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListBackupVaultsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def fetch_usable_backup_vaults( + self, + request: Optional[ + Union[backupvault.FetchUsableBackupVaultsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.FetchUsableBackupVaultsAsyncPager: + r"""FetchUsableBackupVaults lists usable BackupVaults in + a given project and location. Usable BackupVault are the + ones that user has backupdr.backupVaults.get permission. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_fetch_usable_backup_vaults(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.FetchUsableBackupVaultsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.fetch_usable_backup_vaults(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.FetchUsableBackupVaultsRequest, dict]]): + The request object. Request message for fetching usable + BackupVaults. + parent (:class:`str`): + Required. The project and location for which to retrieve + backupvault stores information, in the format + 'projects/{project_id}/locations/{location}'. In Cloud + Backup and DR, locations map to Google Cloud regions, + for example **us-central1**. To retrieve backupvault + stores for all locations, use "-" for the '{location}' + value. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.services.backup_dr.pagers.FetchUsableBackupVaultsAsyncPager: + Response message for fetching usable + BackupVaults. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.FetchUsableBackupVaultsRequest): + request = backupvault.FetchUsableBackupVaultsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.fetch_usable_backup_vaults + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.FetchUsableBackupVaultsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_backup_vault( + self, + request: Optional[Union[backupvault.GetBackupVaultRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupvault.BackupVault: + r"""Gets details of a BackupVault. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_get_backup_vault(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.GetBackupVaultRequest( + name="name_value", + ) + + # Make the request + response = await client.get_backup_vault(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.GetBackupVaultRequest, dict]]): + The request object. Request message for getting a + BackupVault. + name (:class:`str`): + Required. Name of the backupvault store resource name, + in the format + 'projects/{project_id}/locations/{location}/backupVaults/{resource_name}' + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.types.BackupVault: + Message describing a BackupVault + object. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.GetBackupVaultRequest): + request = backupvault.GetBackupVaultRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_backup_vault + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_backup_vault( + self, + request: Optional[Union[backupvault.UpdateBackupVaultRequest, dict]] = None, + *, + backup_vault: Optional[backupvault.BackupVault] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates the settings of a BackupVault. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_update_backup_vault(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.UpdateBackupVaultRequest( + ) + + # Make the request + operation = client.update_backup_vault(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.UpdateBackupVaultRequest, dict]]): + The request object. Request message for updating a + BackupVault. + backup_vault (:class:`google.cloud.backupdr_v1.types.BackupVault`): + Required. The resource being updated + This corresponds to the ``backup_vault`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Field mask is used to specify the fields to be + overwritten in the BackupVault resource by the update. + The fields specified in the update_mask are relative to + the resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then the request will fail. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.backupdr_v1.types.BackupVault` + Message describing a BackupVault object. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([backup_vault, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.UpdateBackupVaultRequest): + request = backupvault.UpdateBackupVaultRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if backup_vault is not None: + request.backup_vault = backup_vault + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_backup_vault + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("backup_vault.name", request.backup_vault.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + backupvault.BackupVault, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_backup_vault( + self, + request: Optional[Union[backupvault.DeleteBackupVaultRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a BackupVault. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_delete_backup_vault(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.DeleteBackupVaultRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup_vault(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.DeleteBackupVaultRequest, dict]]): + The request object. Message for deleting a BackupVault. + name (:class:`str`): + Required. Name of the resource. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.DeleteBackupVaultRequest): + request = backupvault.DeleteBackupVaultRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_backup_vault + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_data_sources( + self, + request: Optional[Union[backupvault.ListDataSourcesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDataSourcesAsyncPager: + r"""Lists DataSources in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_list_data_sources(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.ListDataSourcesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_sources(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.ListDataSourcesRequest, dict]]): + The request object. Request message for listing + DataSources. + parent (:class:`str`): + Required. The project and location for which to retrieve + data sources information, in the format + 'projects/{project_id}/locations/{location}'. In Cloud + Backup and DR, locations map to Google Cloud regions, + for example **us-central1**. To retrieve data sources + for all locations, use "-" for the '{location}' value. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.services.backup_dr.pagers.ListDataSourcesAsyncPager: + Response message for listing + DataSources. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.ListDataSourcesRequest): + request = backupvault.ListDataSourcesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_data_sources + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDataSourcesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_data_source( + self, + request: Optional[Union[backupvault.GetDataSourceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupvault.DataSource: + r"""Gets details of a DataSource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_get_data_source(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.GetDataSourceRequest( + name="name_value", + ) + + # Make the request + response = await client.get_data_source(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.GetDataSourceRequest, dict]]): + The request object. Request message for getting a + DataSource instance. + name (:class:`str`): + Required. Name of the data source resource name, in the + format + 'projects/{project_id}/locations/{location}/backupVaults/{resource_name}/dataSource/{resource_name}' + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.types.DataSource: + Message describing a DataSource + object. Datasource object used to + represent Datasource details for both + admin and basic view. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.GetDataSourceRequest): + request = backupvault.GetDataSourceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_data_source + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_data_source( + self, + request: Optional[Union[backupvault.UpdateDataSourceRequest, dict]] = None, + *, + data_source: Optional[backupvault.DataSource] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates the settings of a DataSource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_update_data_source(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.UpdateDataSourceRequest( + ) + + # Make the request + operation = client.update_data_source(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.UpdateDataSourceRequest, dict]]): + The request object. Request message for updating a data + source instance. + data_source (:class:`google.cloud.backupdr_v1.types.DataSource`): + Required. The resource being updated + This corresponds to the ``data_source`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Field mask is used to specify the fields to be + overwritten in the DataSource resource by the update. + The fields specified in the update_mask are relative to + the resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then the request will fail. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.backupdr_v1.types.DataSource` Message describing a DataSource object. + Datasource object used to represent Datasource + details for both admin and basic view. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([data_source, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.UpdateDataSourceRequest): + request = backupvault.UpdateDataSourceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if data_source is not None: + request.data_source = data_source + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_data_source + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("data_source.name", request.data_source.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + backupvault.DataSource, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_backups( + self, + request: Optional[Union[backupvault.ListBackupsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBackupsAsyncPager: + r"""Lists Backups in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_list_backups(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.ListBackupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backups(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.ListBackupsRequest, dict]]): + The request object. Request message for listing Backups. + parent (:class:`str`): + Required. The project and location for which to retrieve + backup information, in the format + 'projects/{project_id}/locations/{location}'. In Cloud + Backup and DR, locations map to Google Cloud regions, + for example **us-central1**. To retrieve data sources + for all locations, use "-" for the '{location}' value. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupsAsyncPager: + Response message for listing Backups. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.ListBackupsRequest): + request = backupvault.ListBackupsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_backups + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListBackupsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_backup( + self, + request: Optional[Union[backupvault.GetBackupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupvault.Backup: + r"""Gets details of a Backup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_get_backup(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.GetBackupRequest( + name="name_value", + ) + + # Make the request + response = await client.get_backup(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.GetBackupRequest, dict]]): + The request object. Request message for getting a Backup. + name (:class:`str`): + Required. Name of the data source resource name, in the + format + 'projects/{project_id}/locations/{location}/backupVaults/{backupVault}/dataSources/{datasource}/backups/{backup}' + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.types.Backup: + Message describing a Backup object. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.GetBackupRequest): + request = backupvault.GetBackupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_backup + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_backup( + self, + request: Optional[Union[backupvault.UpdateBackupRequest, dict]] = None, + *, + backup: Optional[backupvault.Backup] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates the settings of a Backup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_update_backup(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.UpdateBackupRequest( + ) + + # Make the request + operation = client.update_backup(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.UpdateBackupRequest, dict]]): + The request object. Request message for updating a + Backup. + backup (:class:`google.cloud.backupdr_v1.types.Backup`): + Required. The resource being updated + This corresponds to the ``backup`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Field mask is used to specify the fields to be + overwritten in the Backup resource by the update. The + fields specified in the update_mask are relative to the + resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then the request will fail. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.backupdr_v1.types.Backup` Message + describing a Backup object. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([backup, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.UpdateBackupRequest): + request = backupvault.UpdateBackupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if backup is not None: + request.backup = backup + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_backup + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("backup.name", request.backup.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + backupvault.Backup, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_backup( + self, + request: Optional[Union[backupvault.DeleteBackupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a Backup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_delete_backup(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.DeleteBackupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.DeleteBackupRequest, dict]]): + The request object. Message for deleting a Backup. + name (:class:`str`): + Required. Name of the resource. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.backupdr_v1.types.Backup` Message + describing a Backup object. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.DeleteBackupRequest): + request = backupvault.DeleteBackupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_backup + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + backupvault.Backup, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + async def restore_backup( + self, + request: Optional[Union[backupvault.RestoreBackupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Restore from a Backup + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_restore_backup(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + compute_instance_target_environment = backupdr_v1.ComputeInstanceTargetEnvironment() + compute_instance_target_environment.project = "project_value" + compute_instance_target_environment.zone = "zone_value" + + request = backupdr_v1.RestoreBackupRequest( + compute_instance_target_environment=compute_instance_target_environment, + name="name_value", + ) + + # Make the request + operation = client.restore_backup(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.RestoreBackupRequest, dict]]): + The request object. Request message for restoring from a + Backup. + name (:class:`str`): + Required. The resource name of the Backup instance, in + the format + 'projects/*/locations/*/backupVaults/*/dataSources/*/backups/'. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.backupdr_v1.types.RestoreBackupResponse` + Response message for restoring from a Backup. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.RestoreBackupRequest): + request = backupvault.RestoreBackupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.restore_backup + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + backupvault.RestoreBackupResponse, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + async def create_backup_plan( + self, + request: Optional[Union[backupplan.CreateBackupPlanRequest, dict]] = None, + *, + parent: Optional[str] = None, + backup_plan: Optional[backupplan.BackupPlan] = None, + backup_plan_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Create a BackupPlan + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_create_backup_plan(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + backup_plan = backupdr_v1.BackupPlan() + backup_plan.backup_rules.standard_schedule.recurrence_type = "YEARLY" + backup_plan.backup_rules.standard_schedule.backup_window.start_hour_of_day = 1820 + backup_plan.backup_rules.standard_schedule.backup_window.end_hour_of_day = 1573 + backup_plan.backup_rules.standard_schedule.time_zone = "time_zone_value" + backup_plan.backup_rules.rule_id = "rule_id_value" + backup_plan.backup_rules.backup_retention_days = 2237 + backup_plan.resource_type = "resource_type_value" + backup_plan.backup_vault = "backup_vault_value" + + request = backupdr_v1.CreateBackupPlanRequest( + parent="parent_value", + backup_plan_id="backup_plan_id_value", + backup_plan=backup_plan, + ) + + # Make the request + operation = client.create_backup_plan(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.CreateBackupPlanRequest, dict]]): + The request object. The request message for creating a ``BackupPlan``. + parent (:class:`str`): + Required. The ``BackupPlan`` project and location in the + format ``projects/{project}/locations/{location}``. In + Cloud BackupDR locations map to GCP regions, for example + **us-central1**. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_plan (:class:`google.cloud.backupdr_v1.types.BackupPlan`): + Required. The ``BackupPlan`` resource object to create. + This corresponds to the ``backup_plan`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_plan_id (:class:`str`): + Required. The name of the ``BackupPlan`` to create. The + name must be unique for the specified project and + location.The name must start with a lowercase letter + followed by up to 62 lowercase letters, numbers, or + hyphens. Pattern, /[a-z][a-z0-9-]{,62}/. + + This corresponds to the ``backup_plan_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.backupdr_v1.types.BackupPlan` A BackupPlan specifies some common fields, such as description as well + as one or more BackupRule messages. Each BackupRule + has a retention policy and defines a schedule by + which the system is to perform backup workloads. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, backup_plan, backup_plan_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupplan.CreateBackupPlanRequest): + request = backupplan.CreateBackupPlanRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if backup_plan is not None: + request.backup_plan = backup_plan + if backup_plan_id is not None: + request.backup_plan_id = backup_plan_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_backup_plan + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + backupplan.BackupPlan, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + async def get_backup_plan( + self, + request: Optional[Union[backupplan.GetBackupPlanRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupplan.BackupPlan: + r"""Gets details of a single BackupPlan. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_get_backup_plan(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.GetBackupPlanRequest( + name="name_value", + ) + + # Make the request + response = await client.get_backup_plan(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.GetBackupPlanRequest, dict]]): + The request object. The request message for getting a ``BackupPlan``. + name (:class:`str`): + Required. The resource name of the ``BackupPlan`` to + retrieve. + + Format: + ``projects/{project}/locations/{location}/backupPlans/{backup_plan}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.types.BackupPlan: + A BackupPlan specifies some common fields, such as description as well + as one or more BackupRule messages. Each BackupRule + has a retention policy and defines a schedule by + which the system is to perform backup workloads. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupplan.GetBackupPlanRequest): + request = backupplan.GetBackupPlanRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_backup_plan + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_backup_plans( + self, + request: Optional[Union[backupplan.ListBackupPlansRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBackupPlansAsyncPager: + r"""Lists BackupPlans in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_list_backup_plans(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.ListBackupPlansRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_plans(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.ListBackupPlansRequest, dict]]): + The request object. The request message for getting a list ``BackupPlan``. + parent (:class:`str`): + Required. The project and location for which to retrieve + ``BackupPlans`` information. Format: + ``projects/{project}/locations/{location}``. In Cloud + BackupDR, locations map to GCP regions, for e.g. + **us-central1**. To retrieve backup plans for all + locations, use "-" for the ``{location}`` value. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupPlansAsyncPager: + The response message for getting a list of BackupPlan. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupplan.ListBackupPlansRequest): + request = backupplan.ListBackupPlansRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_backup_plans + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListBackupPlansAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_backup_plan( + self, + request: Optional[Union[backupplan.DeleteBackupPlanRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a single BackupPlan. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_delete_backup_plan(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.DeleteBackupPlanRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup_plan(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.DeleteBackupPlanRequest, dict]]): + The request object. The request message for deleting a ``BackupPlan``. + name (:class:`str`): + Required. The resource name of the ``BackupPlan`` to + delete. + + Format: + ``projects/{project}/locations/{location}/backupPlans/{backup_plan}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupplan.DeleteBackupPlanRequest): + request = backupplan.DeleteBackupPlanRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_backup_plan + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + async def create_backup_plan_association( + self, + request: Optional[ + Union[backupplanassociation.CreateBackupPlanAssociationRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + backup_plan_association: Optional[ + backupplanassociation.BackupPlanAssociation + ] = None, + backup_plan_association_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Create a BackupPlanAssociation + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_create_backup_plan_association(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + backup_plan_association = backupdr_v1.BackupPlanAssociation() + backup_plan_association.resource = "resource_value" + backup_plan_association.backup_plan = "backup_plan_value" + + request = backupdr_v1.CreateBackupPlanAssociationRequest( + parent="parent_value", + backup_plan_association_id="backup_plan_association_id_value", + backup_plan_association=backup_plan_association, + ) + + # Make the request + operation = client.create_backup_plan_association(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.CreateBackupPlanAssociationRequest, dict]]): + The request object. Request message for creating a backup + plan. + parent (:class:`str`): + Required. The backup plan association project and + location in the format + ``projects/{project_id}/locations/{location}``. In Cloud + BackupDR locations map to GCP regions, for example + **us-central1**. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_plan_association (:class:`google.cloud.backupdr_v1.types.BackupPlanAssociation`): + Required. The resource being created + This corresponds to the ``backup_plan_association`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_plan_association_id (:class:`str`): + Required. The name of the backup plan + association to create. The name must be + unique for the specified project and + location. + + This corresponds to the ``backup_plan_association_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.backupdr_v1.types.BackupPlanAssociation` A BackupPlanAssociation represents a single BackupPlanAssociation which + contains details like workload, backup plan etc + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [parent, backup_plan_association, backup_plan_association_id] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, backupplanassociation.CreateBackupPlanAssociationRequest + ): + request = backupplanassociation.CreateBackupPlanAssociationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if backup_plan_association is not None: + request.backup_plan_association = backup_plan_association + if backup_plan_association_id is not None: + request.backup_plan_association_id = backup_plan_association_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_backup_plan_association + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + backupplanassociation.BackupPlanAssociation, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + async def get_backup_plan_association( + self, + request: Optional[ + Union[backupplanassociation.GetBackupPlanAssociationRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupplanassociation.BackupPlanAssociation: + r"""Gets details of a single BackupPlanAssociation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_get_backup_plan_association(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.GetBackupPlanAssociationRequest( + name="name_value", + ) + + # Make the request + response = await client.get_backup_plan_association(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.GetBackupPlanAssociationRequest, dict]]): + The request object. Request message for getting a + BackupPlanAssociation resource. + name (:class:`str`): + Required. Name of the backup plan association resource, + in the format + ``projects/{project}/locations/{location}/backupPlanAssociations/{backupPlanAssociationId}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.types.BackupPlanAssociation: + A BackupPlanAssociation represents a + single BackupPlanAssociation which + contains details like workload, backup + plan etc + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, backupplanassociation.GetBackupPlanAssociationRequest + ): + request = backupplanassociation.GetBackupPlanAssociationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_backup_plan_association + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_backup_plan_associations( + self, + request: Optional[ + Union[backupplanassociation.ListBackupPlanAssociationsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBackupPlanAssociationsAsyncPager: + r"""Lists BackupPlanAssociations in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_list_backup_plan_associations(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.ListBackupPlanAssociationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_plan_associations(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.ListBackupPlanAssociationsRequest, dict]]): + The request object. Request message for List + BackupPlanAssociation + parent (:class:`str`): + Required. The project and location for which to retrieve + backup Plan Associations information, in the format + ``projects/{project_id}/locations/{location}``. In Cloud + BackupDR, locations map to GCP regions, for example + **us-central1**. To retrieve backup plan associations + for all locations, use "-" for the ``{location}`` value. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupPlanAssociationsAsyncPager: + Response message for List + BackupPlanAssociation + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, backupplanassociation.ListBackupPlanAssociationsRequest + ): + request = backupplanassociation.ListBackupPlanAssociationsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_backup_plan_associations + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListBackupPlanAssociationsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_backup_plan_association( + self, + request: Optional[ + Union[backupplanassociation.DeleteBackupPlanAssociationRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a single BackupPlanAssociation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_delete_backup_plan_association(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.DeleteBackupPlanAssociationRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup_plan_association(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.DeleteBackupPlanAssociationRequest, dict]]): + The request object. Request message for deleting a backup + plan association. + name (:class:`str`): + Required. Name of the backup plan association resource, + in the format + ``projects/{project}/locations/{location}/backupPlanAssociations/{backupPlanAssociationId}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, backupplanassociation.DeleteBackupPlanAssociationRequest + ): + request = backupplanassociation.DeleteBackupPlanAssociationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_backup_plan_association + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + async def trigger_backup( + self, + request: Optional[ + Union[backupplanassociation.TriggerBackupRequest, dict] + ] = None, + *, + name: Optional[str] = None, + rule_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Triggers a new Backup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_trigger_backup(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.TriggerBackupRequest( + name="name_value", + rule_id="rule_id_value", + ) + + # Make the request + operation = client.trigger_backup(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.TriggerBackupRequest, dict]]): + The request object. Request message for triggering a + backup. + name (:class:`str`): + Required. Name of the backup plan association resource, + in the format + ``projects/{project}/locations/{location}/backupPlanAssociations/{backupPlanAssociationId}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + rule_id (:class:`str`): + Required. backup rule_id for which a backup needs to be + triggered. + + This corresponds to the ``rule_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.backupdr_v1.types.BackupPlanAssociation` A BackupPlanAssociation represents a single BackupPlanAssociation which + contains details like workload, backup plan etc + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, rule_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupplanassociation.TriggerBackupRequest): + request = backupplanassociation.TriggerBackupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if rule_id is not None: + request.rule_id = rule_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.trigger_backup + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + backupplanassociation.BackupPlanAssociation, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + async def list_operations( self, request: Optional[operations_pb2.ListOperationsRequest] = None, diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/client.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/client.py index a853cfead99d..b884fc527a39 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/client.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/client.py @@ -54,12 +54,21 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.protobuf import wrappers_pb2 # type: ignore from google.cloud.backupdr_v1.services.backup_dr import pagers -from google.cloud.backupdr_v1.types import backupdr +from google.cloud.backupdr_v1.types import ( + backupdr, + backupplan, + backupplanassociation, + backupvault, + backupvault_ba, + backupvault_gce, +) from .transports.base import DEFAULT_CLIENT_INFO, BackupDRTransport from .transports.grpc import BackupDRGrpcTransport @@ -191,6 +200,126 @@ def transport(self) -> BackupDRTransport: """ return self._transport + @staticmethod + def backup_path( + project: str, + location: str, + backupvault: str, + datasource: str, + backup: str, + ) -> str: + """Returns a fully-qualified backup string.""" + return "projects/{project}/locations/{location}/backupVaults/{backupvault}/dataSources/{datasource}/backups/{backup}".format( + project=project, + location=location, + backupvault=backupvault, + datasource=datasource, + backup=backup, + ) + + @staticmethod + def parse_backup_path(path: str) -> Dict[str, str]: + """Parses a backup path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/backupVaults/(?P.+?)/dataSources/(?P.+?)/backups/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def backup_plan_path( + project: str, + location: str, + backup_plan: str, + ) -> str: + """Returns a fully-qualified backup_plan string.""" + return ( + "projects/{project}/locations/{location}/backupPlans/{backup_plan}".format( + project=project, + location=location, + backup_plan=backup_plan, + ) + ) + + @staticmethod + def parse_backup_plan_path(path: str) -> Dict[str, str]: + """Parses a backup_plan path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/backupPlans/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def backup_plan_association_path( + project: str, + location: str, + backup_plan_association: str, + ) -> str: + """Returns a fully-qualified backup_plan_association string.""" + return "projects/{project}/locations/{location}/backupPlanAssociations/{backup_plan_association}".format( + project=project, + location=location, + backup_plan_association=backup_plan_association, + ) + + @staticmethod + def parse_backup_plan_association_path(path: str) -> Dict[str, str]: + """Parses a backup_plan_association path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/backupPlanAssociations/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def backup_vault_path( + project: str, + location: str, + backupvault: str, + ) -> str: + """Returns a fully-qualified backup_vault string.""" + return ( + "projects/{project}/locations/{location}/backupVaults/{backupvault}".format( + project=project, + location=location, + backupvault=backupvault, + ) + ) + + @staticmethod + def parse_backup_vault_path(path: str) -> Dict[str, str]: + """Parses a backup_vault path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/backupVaults/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def data_source_path( + project: str, + location: str, + backupvault: str, + datasource: str, + ) -> str: + """Returns a fully-qualified data_source string.""" + return "projects/{project}/locations/{location}/backupVaults/{backupvault}/dataSources/{datasource}".format( + project=project, + location=location, + backupvault=backupvault, + datasource=datasource, + ) + + @staticmethod + def parse_data_source_path(path: str) -> Dict[str, str]: + """Parses a data_source path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/backupVaults/(?P.+?)/dataSources/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def management_server_path( project: str, @@ -728,10 +857,10 @@ def sample_list_management_servers(): parent (str): Required. The project and location for which to retrieve management servers information, in the format - ``projects/{project_id}/locations/{location}``. In Cloud - BackupDR, locations map to GCP regions, for example - **us-central1**. To retrieve management servers for all - locations, use "-" for the ``{location}`` value. + 'projects/{project_id}/locations/{location}'. In Cloud + BackupDR, locations map to Google Cloud regions, for + example **us-central1**. To retrieve management servers + for all locations, use "-" for the '{location}' value. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -849,7 +978,7 @@ def sample_get_management_server(): name (str): Required. Name of the management server resource name, in the format - ``projects/{project_id}/locations/{location}/managementServers/{resource_name}`` + 'projects/{project_id}/locations/{location}/managementServers/{resource_name}' This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -960,10 +1089,9 @@ def sample_create_management_server(): management server instance. parent (str): Required. The management server project and location in - the format - ``projects/{project_id}/locations/{location}``. In Cloud - Backup and DR locations map to GCP regions, for example - **us-central1**. + the format 'projects/{project_id}/locations/{location}'. + In Cloud Backup and DR locations map to Google Cloud + regions, for example **us-central1**. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1178,6 +1306,2869 @@ def sample_delete_management_server(): # Done; return the response. return response + def create_backup_vault( + self, + request: Optional[Union[backupvault.CreateBackupVaultRequest, dict]] = None, + *, + parent: Optional[str] = None, + backup_vault: Optional[backupvault.BackupVault] = None, + backup_vault_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new BackupVault in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_create_backup_vault(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.CreateBackupVaultRequest( + parent="parent_value", + backup_vault_id="backup_vault_id_value", + ) + + # Make the request + operation = client.create_backup_vault(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.CreateBackupVaultRequest, dict]): + The request object. Message for creating a BackupVault. + parent (str): + Required. Value for parent. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_vault (google.cloud.backupdr_v1.types.BackupVault): + Required. The resource being created + This corresponds to the ``backup_vault`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_vault_id (str): + Required. ID of the requesting object If auto-generating + ID server-side, remove this field and backup_vault_id + from the method_signature of Create RPC + + This corresponds to the ``backup_vault_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.backupdr_v1.types.BackupVault` + Message describing a BackupVault object. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, backup_vault, backup_vault_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.CreateBackupVaultRequest): + request = backupvault.CreateBackupVaultRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if backup_vault is not None: + request.backup_vault = backup_vault + if backup_vault_id is not None: + request.backup_vault_id = backup_vault_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_backup_vault] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + backupvault.BackupVault, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_backup_vaults( + self, + request: Optional[Union[backupvault.ListBackupVaultsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBackupVaultsPager: + r"""Lists BackupVaults in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_list_backup_vaults(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.ListBackupVaultsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_vaults(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.ListBackupVaultsRequest, dict]): + The request object. Request message for listing + backupvault stores. + parent (str): + Required. The project and location for which to retrieve + backupvault stores information, in the format + 'projects/{project_id}/locations/{location}'. In Cloud + Backup and DR, locations map to Google Cloud regions, + for example **us-central1**. To retrieve backupvault + stores for all locations, use "-" for the '{location}' + value. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupVaultsPager: + Response message for listing + BackupVaults. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.ListBackupVaultsRequest): + request = backupvault.ListBackupVaultsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_backup_vaults] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListBackupVaultsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def fetch_usable_backup_vaults( + self, + request: Optional[ + Union[backupvault.FetchUsableBackupVaultsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.FetchUsableBackupVaultsPager: + r"""FetchUsableBackupVaults lists usable BackupVaults in + a given project and location. Usable BackupVault are the + ones that user has backupdr.backupVaults.get permission. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_fetch_usable_backup_vaults(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.FetchUsableBackupVaultsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.fetch_usable_backup_vaults(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.FetchUsableBackupVaultsRequest, dict]): + The request object. Request message for fetching usable + BackupVaults. + parent (str): + Required. The project and location for which to retrieve + backupvault stores information, in the format + 'projects/{project_id}/locations/{location}'. In Cloud + Backup and DR, locations map to Google Cloud regions, + for example **us-central1**. To retrieve backupvault + stores for all locations, use "-" for the '{location}' + value. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.services.backup_dr.pagers.FetchUsableBackupVaultsPager: + Response message for fetching usable + BackupVaults. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.FetchUsableBackupVaultsRequest): + request = backupvault.FetchUsableBackupVaultsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.fetch_usable_backup_vaults + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.FetchUsableBackupVaultsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_backup_vault( + self, + request: Optional[Union[backupvault.GetBackupVaultRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupvault.BackupVault: + r"""Gets details of a BackupVault. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_get_backup_vault(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.GetBackupVaultRequest( + name="name_value", + ) + + # Make the request + response = client.get_backup_vault(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.GetBackupVaultRequest, dict]): + The request object. Request message for getting a + BackupVault. + name (str): + Required. Name of the backupvault store resource name, + in the format + 'projects/{project_id}/locations/{location}/backupVaults/{resource_name}' + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.types.BackupVault: + Message describing a BackupVault + object. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.GetBackupVaultRequest): + request = backupvault.GetBackupVaultRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_backup_vault] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_backup_vault( + self, + request: Optional[Union[backupvault.UpdateBackupVaultRequest, dict]] = None, + *, + backup_vault: Optional[backupvault.BackupVault] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates the settings of a BackupVault. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_update_backup_vault(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.UpdateBackupVaultRequest( + ) + + # Make the request + operation = client.update_backup_vault(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.UpdateBackupVaultRequest, dict]): + The request object. Request message for updating a + BackupVault. + backup_vault (google.cloud.backupdr_v1.types.BackupVault): + Required. The resource being updated + This corresponds to the ``backup_vault`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten in the BackupVault resource by the update. + The fields specified in the update_mask are relative to + the resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then the request will fail. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.backupdr_v1.types.BackupVault` + Message describing a BackupVault object. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([backup_vault, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.UpdateBackupVaultRequest): + request = backupvault.UpdateBackupVaultRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if backup_vault is not None: + request.backup_vault = backup_vault + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_backup_vault] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("backup_vault.name", request.backup_vault.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + backupvault.BackupVault, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_backup_vault( + self, + request: Optional[Union[backupvault.DeleteBackupVaultRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a BackupVault. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_delete_backup_vault(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.DeleteBackupVaultRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup_vault(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.DeleteBackupVaultRequest, dict]): + The request object. Message for deleting a BackupVault. + name (str): + Required. Name of the resource. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.DeleteBackupVaultRequest): + request = backupvault.DeleteBackupVaultRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_backup_vault] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_data_sources( + self, + request: Optional[Union[backupvault.ListDataSourcesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDataSourcesPager: + r"""Lists DataSources in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_list_data_sources(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.ListDataSourcesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_sources(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.ListDataSourcesRequest, dict]): + The request object. Request message for listing + DataSources. + parent (str): + Required. The project and location for which to retrieve + data sources information, in the format + 'projects/{project_id}/locations/{location}'. In Cloud + Backup and DR, locations map to Google Cloud regions, + for example **us-central1**. To retrieve data sources + for all locations, use "-" for the '{location}' value. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.services.backup_dr.pagers.ListDataSourcesPager: + Response message for listing + DataSources. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.ListDataSourcesRequest): + request = backupvault.ListDataSourcesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_data_sources] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDataSourcesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_data_source( + self, + request: Optional[Union[backupvault.GetDataSourceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupvault.DataSource: + r"""Gets details of a DataSource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_get_data_source(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.GetDataSourceRequest( + name="name_value", + ) + + # Make the request + response = client.get_data_source(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.GetDataSourceRequest, dict]): + The request object. Request message for getting a + DataSource instance. + name (str): + Required. Name of the data source resource name, in the + format + 'projects/{project_id}/locations/{location}/backupVaults/{resource_name}/dataSource/{resource_name}' + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.types.DataSource: + Message describing a DataSource + object. Datasource object used to + represent Datasource details for both + admin and basic view. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.GetDataSourceRequest): + request = backupvault.GetDataSourceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_data_source] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_data_source( + self, + request: Optional[Union[backupvault.UpdateDataSourceRequest, dict]] = None, + *, + data_source: Optional[backupvault.DataSource] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates the settings of a DataSource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_update_data_source(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.UpdateDataSourceRequest( + ) + + # Make the request + operation = client.update_data_source(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.UpdateDataSourceRequest, dict]): + The request object. Request message for updating a data + source instance. + data_source (google.cloud.backupdr_v1.types.DataSource): + Required. The resource being updated + This corresponds to the ``data_source`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten in the DataSource resource by the update. + The fields specified in the update_mask are relative to + the resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then the request will fail. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.backupdr_v1.types.DataSource` Message describing a DataSource object. + Datasource object used to represent Datasource + details for both admin and basic view. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([data_source, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.UpdateDataSourceRequest): + request = backupvault.UpdateDataSourceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if data_source is not None: + request.data_source = data_source + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_data_source] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("data_source.name", request.data_source.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + backupvault.DataSource, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_backups( + self, + request: Optional[Union[backupvault.ListBackupsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBackupsPager: + r"""Lists Backups in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_list_backups(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.ListBackupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backups(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.ListBackupsRequest, dict]): + The request object. Request message for listing Backups. + parent (str): + Required. The project and location for which to retrieve + backup information, in the format + 'projects/{project_id}/locations/{location}'. In Cloud + Backup and DR, locations map to Google Cloud regions, + for example **us-central1**. To retrieve data sources + for all locations, use "-" for the '{location}' value. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupsPager: + Response message for listing Backups. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.ListBackupsRequest): + request = backupvault.ListBackupsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_backups] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListBackupsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_backup( + self, + request: Optional[Union[backupvault.GetBackupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupvault.Backup: + r"""Gets details of a Backup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_get_backup(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.GetBackupRequest( + name="name_value", + ) + + # Make the request + response = client.get_backup(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.GetBackupRequest, dict]): + The request object. Request message for getting a Backup. + name (str): + Required. Name of the data source resource name, in the + format + 'projects/{project_id}/locations/{location}/backupVaults/{backupVault}/dataSources/{datasource}/backups/{backup}' + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.types.Backup: + Message describing a Backup object. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.GetBackupRequest): + request = backupvault.GetBackupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_backup] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_backup( + self, + request: Optional[Union[backupvault.UpdateBackupRequest, dict]] = None, + *, + backup: Optional[backupvault.Backup] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates the settings of a Backup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_update_backup(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.UpdateBackupRequest( + ) + + # Make the request + operation = client.update_backup(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.UpdateBackupRequest, dict]): + The request object. Request message for updating a + Backup. + backup (google.cloud.backupdr_v1.types.Backup): + Required. The resource being updated + This corresponds to the ``backup`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten in the Backup resource by the update. The + fields specified in the update_mask are relative to the + resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then the request will fail. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.backupdr_v1.types.Backup` Message + describing a Backup object. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([backup, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.UpdateBackupRequest): + request = backupvault.UpdateBackupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if backup is not None: + request.backup = backup + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_backup] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("backup.name", request.backup.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + backupvault.Backup, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_backup( + self, + request: Optional[Union[backupvault.DeleteBackupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a Backup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_delete_backup(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.DeleteBackupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.DeleteBackupRequest, dict]): + The request object. Message for deleting a Backup. + name (str): + Required. Name of the resource. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.backupdr_v1.types.Backup` Message + describing a Backup object. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.DeleteBackupRequest): + request = backupvault.DeleteBackupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_backup] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + backupvault.Backup, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + def restore_backup( + self, + request: Optional[Union[backupvault.RestoreBackupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Restore from a Backup + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_restore_backup(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + compute_instance_target_environment = backupdr_v1.ComputeInstanceTargetEnvironment() + compute_instance_target_environment.project = "project_value" + compute_instance_target_environment.zone = "zone_value" + + request = backupdr_v1.RestoreBackupRequest( + compute_instance_target_environment=compute_instance_target_environment, + name="name_value", + ) + + # Make the request + operation = client.restore_backup(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.RestoreBackupRequest, dict]): + The request object. Request message for restoring from a + Backup. + name (str): + Required. The resource name of the Backup instance, in + the format + 'projects/*/locations/*/backupVaults/*/dataSources/*/backups/'. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.backupdr_v1.types.RestoreBackupResponse` + Response message for restoring from a Backup. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.RestoreBackupRequest): + request = backupvault.RestoreBackupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.restore_backup] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + backupvault.RestoreBackupResponse, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + def create_backup_plan( + self, + request: Optional[Union[backupplan.CreateBackupPlanRequest, dict]] = None, + *, + parent: Optional[str] = None, + backup_plan: Optional[backupplan.BackupPlan] = None, + backup_plan_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Create a BackupPlan + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_create_backup_plan(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + backup_plan = backupdr_v1.BackupPlan() + backup_plan.backup_rules.standard_schedule.recurrence_type = "YEARLY" + backup_plan.backup_rules.standard_schedule.backup_window.start_hour_of_day = 1820 + backup_plan.backup_rules.standard_schedule.backup_window.end_hour_of_day = 1573 + backup_plan.backup_rules.standard_schedule.time_zone = "time_zone_value" + backup_plan.backup_rules.rule_id = "rule_id_value" + backup_plan.backup_rules.backup_retention_days = 2237 + backup_plan.resource_type = "resource_type_value" + backup_plan.backup_vault = "backup_vault_value" + + request = backupdr_v1.CreateBackupPlanRequest( + parent="parent_value", + backup_plan_id="backup_plan_id_value", + backup_plan=backup_plan, + ) + + # Make the request + operation = client.create_backup_plan(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.CreateBackupPlanRequest, dict]): + The request object. The request message for creating a ``BackupPlan``. + parent (str): + Required. The ``BackupPlan`` project and location in the + format ``projects/{project}/locations/{location}``. In + Cloud BackupDR locations map to GCP regions, for example + **us-central1**. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_plan (google.cloud.backupdr_v1.types.BackupPlan): + Required. The ``BackupPlan`` resource object to create. + This corresponds to the ``backup_plan`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_plan_id (str): + Required. The name of the ``BackupPlan`` to create. The + name must be unique for the specified project and + location.The name must start with a lowercase letter + followed by up to 62 lowercase letters, numbers, or + hyphens. Pattern, /[a-z][a-z0-9-]{,62}/. + + This corresponds to the ``backup_plan_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.backupdr_v1.types.BackupPlan` A BackupPlan specifies some common fields, such as description as well + as one or more BackupRule messages. Each BackupRule + has a retention policy and defines a schedule by + which the system is to perform backup workloads. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, backup_plan, backup_plan_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupplan.CreateBackupPlanRequest): + request = backupplan.CreateBackupPlanRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if backup_plan is not None: + request.backup_plan = backup_plan + if backup_plan_id is not None: + request.backup_plan_id = backup_plan_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_backup_plan] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + backupplan.BackupPlan, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + def get_backup_plan( + self, + request: Optional[Union[backupplan.GetBackupPlanRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupplan.BackupPlan: + r"""Gets details of a single BackupPlan. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_get_backup_plan(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.GetBackupPlanRequest( + name="name_value", + ) + + # Make the request + response = client.get_backup_plan(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.GetBackupPlanRequest, dict]): + The request object. The request message for getting a ``BackupPlan``. + name (str): + Required. The resource name of the ``BackupPlan`` to + retrieve. + + Format: + ``projects/{project}/locations/{location}/backupPlans/{backup_plan}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.types.BackupPlan: + A BackupPlan specifies some common fields, such as description as well + as one or more BackupRule messages. Each BackupRule + has a retention policy and defines a schedule by + which the system is to perform backup workloads. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupplan.GetBackupPlanRequest): + request = backupplan.GetBackupPlanRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_backup_plan] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_backup_plans( + self, + request: Optional[Union[backupplan.ListBackupPlansRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBackupPlansPager: + r"""Lists BackupPlans in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_list_backup_plans(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.ListBackupPlansRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_plans(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.ListBackupPlansRequest, dict]): + The request object. The request message for getting a list ``BackupPlan``. + parent (str): + Required. The project and location for which to retrieve + ``BackupPlans`` information. Format: + ``projects/{project}/locations/{location}``. In Cloud + BackupDR, locations map to GCP regions, for e.g. + **us-central1**. To retrieve backup plans for all + locations, use "-" for the ``{location}`` value. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupPlansPager: + The response message for getting a list of BackupPlan. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupplan.ListBackupPlansRequest): + request = backupplan.ListBackupPlansRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_backup_plans] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListBackupPlansPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_backup_plan( + self, + request: Optional[Union[backupplan.DeleteBackupPlanRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a single BackupPlan. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_delete_backup_plan(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.DeleteBackupPlanRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup_plan(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.DeleteBackupPlanRequest, dict]): + The request object. The request message for deleting a ``BackupPlan``. + name (str): + Required. The resource name of the ``BackupPlan`` to + delete. + + Format: + ``projects/{project}/locations/{location}/backupPlans/{backup_plan}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupplan.DeleteBackupPlanRequest): + request = backupplan.DeleteBackupPlanRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_backup_plan] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + def create_backup_plan_association( + self, + request: Optional[ + Union[backupplanassociation.CreateBackupPlanAssociationRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + backup_plan_association: Optional[ + backupplanassociation.BackupPlanAssociation + ] = None, + backup_plan_association_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Create a BackupPlanAssociation + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_create_backup_plan_association(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + backup_plan_association = backupdr_v1.BackupPlanAssociation() + backup_plan_association.resource = "resource_value" + backup_plan_association.backup_plan = "backup_plan_value" + + request = backupdr_v1.CreateBackupPlanAssociationRequest( + parent="parent_value", + backup_plan_association_id="backup_plan_association_id_value", + backup_plan_association=backup_plan_association, + ) + + # Make the request + operation = client.create_backup_plan_association(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.CreateBackupPlanAssociationRequest, dict]): + The request object. Request message for creating a backup + plan. + parent (str): + Required. The backup plan association project and + location in the format + ``projects/{project_id}/locations/{location}``. In Cloud + BackupDR locations map to GCP regions, for example + **us-central1**. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_plan_association (google.cloud.backupdr_v1.types.BackupPlanAssociation): + Required. The resource being created + This corresponds to the ``backup_plan_association`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_plan_association_id (str): + Required. The name of the backup plan + association to create. The name must be + unique for the specified project and + location. + + This corresponds to the ``backup_plan_association_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.backupdr_v1.types.BackupPlanAssociation` A BackupPlanAssociation represents a single BackupPlanAssociation which + contains details like workload, backup plan etc + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [parent, backup_plan_association, backup_plan_association_id] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, backupplanassociation.CreateBackupPlanAssociationRequest + ): + request = backupplanassociation.CreateBackupPlanAssociationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if backup_plan_association is not None: + request.backup_plan_association = backup_plan_association + if backup_plan_association_id is not None: + request.backup_plan_association_id = backup_plan_association_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.create_backup_plan_association + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + backupplanassociation.BackupPlanAssociation, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + def get_backup_plan_association( + self, + request: Optional[ + Union[backupplanassociation.GetBackupPlanAssociationRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupplanassociation.BackupPlanAssociation: + r"""Gets details of a single BackupPlanAssociation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_get_backup_plan_association(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.GetBackupPlanAssociationRequest( + name="name_value", + ) + + # Make the request + response = client.get_backup_plan_association(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.GetBackupPlanAssociationRequest, dict]): + The request object. Request message for getting a + BackupPlanAssociation resource. + name (str): + Required. Name of the backup plan association resource, + in the format + ``projects/{project}/locations/{location}/backupPlanAssociations/{backupPlanAssociationId}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.types.BackupPlanAssociation: + A BackupPlanAssociation represents a + single BackupPlanAssociation which + contains details like workload, backup + plan etc + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, backupplanassociation.GetBackupPlanAssociationRequest + ): + request = backupplanassociation.GetBackupPlanAssociationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.get_backup_plan_association + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_backup_plan_associations( + self, + request: Optional[ + Union[backupplanassociation.ListBackupPlanAssociationsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBackupPlanAssociationsPager: + r"""Lists BackupPlanAssociations in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_list_backup_plan_associations(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.ListBackupPlanAssociationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_plan_associations(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.ListBackupPlanAssociationsRequest, dict]): + The request object. Request message for List + BackupPlanAssociation + parent (str): + Required. The project and location for which to retrieve + backup Plan Associations information, in the format + ``projects/{project_id}/locations/{location}``. In Cloud + BackupDR, locations map to GCP regions, for example + **us-central1**. To retrieve backup plan associations + for all locations, use "-" for the ``{location}`` value. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupPlanAssociationsPager: + Response message for List + BackupPlanAssociation + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, backupplanassociation.ListBackupPlanAssociationsRequest + ): + request = backupplanassociation.ListBackupPlanAssociationsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_backup_plan_associations + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListBackupPlanAssociationsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_backup_plan_association( + self, + request: Optional[ + Union[backupplanassociation.DeleteBackupPlanAssociationRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a single BackupPlanAssociation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_delete_backup_plan_association(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.DeleteBackupPlanAssociationRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup_plan_association(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.DeleteBackupPlanAssociationRequest, dict]): + The request object. Request message for deleting a backup + plan association. + name (str): + Required. Name of the backup plan association resource, + in the format + ``projects/{project}/locations/{location}/backupPlanAssociations/{backupPlanAssociationId}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, backupplanassociation.DeleteBackupPlanAssociationRequest + ): + request = backupplanassociation.DeleteBackupPlanAssociationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.delete_backup_plan_association + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + def trigger_backup( + self, + request: Optional[ + Union[backupplanassociation.TriggerBackupRequest, dict] + ] = None, + *, + name: Optional[str] = None, + rule_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Triggers a new Backup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_trigger_backup(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.TriggerBackupRequest( + name="name_value", + rule_id="rule_id_value", + ) + + # Make the request + operation = client.trigger_backup(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.TriggerBackupRequest, dict]): + The request object. Request message for triggering a + backup. + name (str): + Required. Name of the backup plan association resource, + in the format + ``projects/{project}/locations/{location}/backupPlanAssociations/{backupPlanAssociationId}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + rule_id (str): + Required. backup rule_id for which a backup needs to be + triggered. + + This corresponds to the ``rule_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.backupdr_v1.types.BackupPlanAssociation` A BackupPlanAssociation represents a single BackupPlanAssociation which + contains details like workload, backup plan etc + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, rule_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupplanassociation.TriggerBackupRequest): + request = backupplanassociation.TriggerBackupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if rule_id is not None: + request.rule_id = rule_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.trigger_backup] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + backupplanassociation.BackupPlanAssociation, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "BackupDRClient": return self diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/pagers.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/pagers.py index 462c609c80a8..3594ed629a56 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/pagers.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/pagers.py @@ -38,7 +38,12 @@ OptionalRetry = Union[retries.Retry, object, None] # type: ignore OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore -from google.cloud.backupdr_v1.types import backupdr +from google.cloud.backupdr_v1.types import ( + backupdr, + backupplan, + backupplanassociation, + backupvault, +) class ListManagementServersPager: @@ -191,3 +196,921 @@ async def async_generator(): def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListBackupVaultsPager: + """A pager for iterating through ``list_backup_vaults`` requests. + + This class thinly wraps an initial + :class:`google.cloud.backupdr_v1.types.ListBackupVaultsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``backup_vaults`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListBackupVaults`` requests and continue to iterate + through the ``backup_vaults`` field on the + corresponding responses. + + All the usual :class:`google.cloud.backupdr_v1.types.ListBackupVaultsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., backupvault.ListBackupVaultsResponse], + request: backupvault.ListBackupVaultsRequest, + response: backupvault.ListBackupVaultsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.backupdr_v1.types.ListBackupVaultsRequest): + The initial request object. + response (google.cloud.backupdr_v1.types.ListBackupVaultsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = backupvault.ListBackupVaultsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[backupvault.ListBackupVaultsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[backupvault.BackupVault]: + for page in self.pages: + yield from page.backup_vaults + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListBackupVaultsAsyncPager: + """A pager for iterating through ``list_backup_vaults`` requests. + + This class thinly wraps an initial + :class:`google.cloud.backupdr_v1.types.ListBackupVaultsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``backup_vaults`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListBackupVaults`` requests and continue to iterate + through the ``backup_vaults`` field on the + corresponding responses. + + All the usual :class:`google.cloud.backupdr_v1.types.ListBackupVaultsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[backupvault.ListBackupVaultsResponse]], + request: backupvault.ListBackupVaultsRequest, + response: backupvault.ListBackupVaultsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.backupdr_v1.types.ListBackupVaultsRequest): + The initial request object. + response (google.cloud.backupdr_v1.types.ListBackupVaultsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = backupvault.ListBackupVaultsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[backupvault.ListBackupVaultsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[backupvault.BackupVault]: + async def async_generator(): + async for page in self.pages: + for response in page.backup_vaults: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class FetchUsableBackupVaultsPager: + """A pager for iterating through ``fetch_usable_backup_vaults`` requests. + + This class thinly wraps an initial + :class:`google.cloud.backupdr_v1.types.FetchUsableBackupVaultsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``backup_vaults`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``FetchUsableBackupVaults`` requests and continue to iterate + through the ``backup_vaults`` field on the + corresponding responses. + + All the usual :class:`google.cloud.backupdr_v1.types.FetchUsableBackupVaultsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., backupvault.FetchUsableBackupVaultsResponse], + request: backupvault.FetchUsableBackupVaultsRequest, + response: backupvault.FetchUsableBackupVaultsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.backupdr_v1.types.FetchUsableBackupVaultsRequest): + The initial request object. + response (google.cloud.backupdr_v1.types.FetchUsableBackupVaultsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = backupvault.FetchUsableBackupVaultsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[backupvault.FetchUsableBackupVaultsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[backupvault.BackupVault]: + for page in self.pages: + yield from page.backup_vaults + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class FetchUsableBackupVaultsAsyncPager: + """A pager for iterating through ``fetch_usable_backup_vaults`` requests. + + This class thinly wraps an initial + :class:`google.cloud.backupdr_v1.types.FetchUsableBackupVaultsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``backup_vaults`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``FetchUsableBackupVaults`` requests and continue to iterate + through the ``backup_vaults`` field on the + corresponding responses. + + All the usual :class:`google.cloud.backupdr_v1.types.FetchUsableBackupVaultsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[backupvault.FetchUsableBackupVaultsResponse]], + request: backupvault.FetchUsableBackupVaultsRequest, + response: backupvault.FetchUsableBackupVaultsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.backupdr_v1.types.FetchUsableBackupVaultsRequest): + The initial request object. + response (google.cloud.backupdr_v1.types.FetchUsableBackupVaultsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = backupvault.FetchUsableBackupVaultsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[backupvault.FetchUsableBackupVaultsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[backupvault.BackupVault]: + async def async_generator(): + async for page in self.pages: + for response in page.backup_vaults: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListDataSourcesPager: + """A pager for iterating through ``list_data_sources`` requests. + + This class thinly wraps an initial + :class:`google.cloud.backupdr_v1.types.ListDataSourcesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``data_sources`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDataSources`` requests and continue to iterate + through the ``data_sources`` field on the + corresponding responses. + + All the usual :class:`google.cloud.backupdr_v1.types.ListDataSourcesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., backupvault.ListDataSourcesResponse], + request: backupvault.ListDataSourcesRequest, + response: backupvault.ListDataSourcesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.backupdr_v1.types.ListDataSourcesRequest): + The initial request object. + response (google.cloud.backupdr_v1.types.ListDataSourcesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = backupvault.ListDataSourcesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[backupvault.ListDataSourcesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[backupvault.DataSource]: + for page in self.pages: + yield from page.data_sources + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListDataSourcesAsyncPager: + """A pager for iterating through ``list_data_sources`` requests. + + This class thinly wraps an initial + :class:`google.cloud.backupdr_v1.types.ListDataSourcesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``data_sources`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDataSources`` requests and continue to iterate + through the ``data_sources`` field on the + corresponding responses. + + All the usual :class:`google.cloud.backupdr_v1.types.ListDataSourcesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[backupvault.ListDataSourcesResponse]], + request: backupvault.ListDataSourcesRequest, + response: backupvault.ListDataSourcesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.backupdr_v1.types.ListDataSourcesRequest): + The initial request object. + response (google.cloud.backupdr_v1.types.ListDataSourcesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = backupvault.ListDataSourcesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[backupvault.ListDataSourcesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[backupvault.DataSource]: + async def async_generator(): + async for page in self.pages: + for response in page.data_sources: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListBackupsPager: + """A pager for iterating through ``list_backups`` requests. + + This class thinly wraps an initial + :class:`google.cloud.backupdr_v1.types.ListBackupsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``backups`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListBackups`` requests and continue to iterate + through the ``backups`` field on the + corresponding responses. + + All the usual :class:`google.cloud.backupdr_v1.types.ListBackupsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., backupvault.ListBackupsResponse], + request: backupvault.ListBackupsRequest, + response: backupvault.ListBackupsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.backupdr_v1.types.ListBackupsRequest): + The initial request object. + response (google.cloud.backupdr_v1.types.ListBackupsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = backupvault.ListBackupsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[backupvault.ListBackupsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[backupvault.Backup]: + for page in self.pages: + yield from page.backups + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListBackupsAsyncPager: + """A pager for iterating through ``list_backups`` requests. + + This class thinly wraps an initial + :class:`google.cloud.backupdr_v1.types.ListBackupsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``backups`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListBackups`` requests and continue to iterate + through the ``backups`` field on the + corresponding responses. + + All the usual :class:`google.cloud.backupdr_v1.types.ListBackupsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[backupvault.ListBackupsResponse]], + request: backupvault.ListBackupsRequest, + response: backupvault.ListBackupsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.backupdr_v1.types.ListBackupsRequest): + The initial request object. + response (google.cloud.backupdr_v1.types.ListBackupsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = backupvault.ListBackupsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[backupvault.ListBackupsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[backupvault.Backup]: + async def async_generator(): + async for page in self.pages: + for response in page.backups: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListBackupPlansPager: + """A pager for iterating through ``list_backup_plans`` requests. + + This class thinly wraps an initial + :class:`google.cloud.backupdr_v1.types.ListBackupPlansResponse` object, and + provides an ``__iter__`` method to iterate through its + ``backup_plans`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListBackupPlans`` requests and continue to iterate + through the ``backup_plans`` field on the + corresponding responses. + + All the usual :class:`google.cloud.backupdr_v1.types.ListBackupPlansResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., backupplan.ListBackupPlansResponse], + request: backupplan.ListBackupPlansRequest, + response: backupplan.ListBackupPlansResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.backupdr_v1.types.ListBackupPlansRequest): + The initial request object. + response (google.cloud.backupdr_v1.types.ListBackupPlansResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = backupplan.ListBackupPlansRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[backupplan.ListBackupPlansResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[backupplan.BackupPlan]: + for page in self.pages: + yield from page.backup_plans + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListBackupPlansAsyncPager: + """A pager for iterating through ``list_backup_plans`` requests. + + This class thinly wraps an initial + :class:`google.cloud.backupdr_v1.types.ListBackupPlansResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``backup_plans`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListBackupPlans`` requests and continue to iterate + through the ``backup_plans`` field on the + corresponding responses. + + All the usual :class:`google.cloud.backupdr_v1.types.ListBackupPlansResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[backupplan.ListBackupPlansResponse]], + request: backupplan.ListBackupPlansRequest, + response: backupplan.ListBackupPlansResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.backupdr_v1.types.ListBackupPlansRequest): + The initial request object. + response (google.cloud.backupdr_v1.types.ListBackupPlansResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = backupplan.ListBackupPlansRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[backupplan.ListBackupPlansResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[backupplan.BackupPlan]: + async def async_generator(): + async for page in self.pages: + for response in page.backup_plans: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListBackupPlanAssociationsPager: + """A pager for iterating through ``list_backup_plan_associations`` requests. + + This class thinly wraps an initial + :class:`google.cloud.backupdr_v1.types.ListBackupPlanAssociationsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``backup_plan_associations`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListBackupPlanAssociations`` requests and continue to iterate + through the ``backup_plan_associations`` field on the + corresponding responses. + + All the usual :class:`google.cloud.backupdr_v1.types.ListBackupPlanAssociationsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., backupplanassociation.ListBackupPlanAssociationsResponse], + request: backupplanassociation.ListBackupPlanAssociationsRequest, + response: backupplanassociation.ListBackupPlanAssociationsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.backupdr_v1.types.ListBackupPlanAssociationsRequest): + The initial request object. + response (google.cloud.backupdr_v1.types.ListBackupPlanAssociationsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = backupplanassociation.ListBackupPlanAssociationsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages( + self, + ) -> Iterator[backupplanassociation.ListBackupPlanAssociationsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[backupplanassociation.BackupPlanAssociation]: + for page in self.pages: + yield from page.backup_plan_associations + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListBackupPlanAssociationsAsyncPager: + """A pager for iterating through ``list_backup_plan_associations`` requests. + + This class thinly wraps an initial + :class:`google.cloud.backupdr_v1.types.ListBackupPlanAssociationsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``backup_plan_associations`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListBackupPlanAssociations`` requests and continue to iterate + through the ``backup_plan_associations`` field on the + corresponding responses. + + All the usual :class:`google.cloud.backupdr_v1.types.ListBackupPlanAssociationsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[backupplanassociation.ListBackupPlanAssociationsResponse] + ], + request: backupplanassociation.ListBackupPlanAssociationsRequest, + response: backupplanassociation.ListBackupPlanAssociationsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.backupdr_v1.types.ListBackupPlanAssociationsRequest): + The initial request object. + response (google.cloud.backupdr_v1.types.ListBackupPlanAssociationsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = backupplanassociation.ListBackupPlanAssociationsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[backupplanassociation.ListBackupPlanAssociationsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[backupplanassociation.BackupPlanAssociation]: + async def async_generator(): + async for page in self.pages: + for response in page.backup_plan_associations: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/base.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/base.py index 0b741d07dfc8..e11f0ea29379 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/base.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/base.py @@ -29,7 +29,12 @@ from google.oauth2 import service_account # type: ignore from google.cloud.backupdr_v1 import gapic_version as package_version -from google.cloud.backupdr_v1.types import backupdr +from google.cloud.backupdr_v1.types import ( + backupdr, + backupplan, + backupplanassociation, + backupvault, +) DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ @@ -170,6 +175,202 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.create_backup_vault: gapic_v1.method.wrap_method( + self.create_backup_vault, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_backup_vaults: gapic_v1.method.wrap_method( + self.list_backup_vaults, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.fetch_usable_backup_vaults: gapic_v1.method.wrap_method( + self.fetch_usable_backup_vaults, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_backup_vault: gapic_v1.method.wrap_method( + self.get_backup_vault, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_backup_vault: gapic_v1.method.wrap_method( + self.update_backup_vault, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_backup_vault: gapic_v1.method.wrap_method( + self.delete_backup_vault, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_data_sources: gapic_v1.method.wrap_method( + self.list_data_sources, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_data_source: gapic_v1.method.wrap_method( + self.get_data_source, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_data_source: gapic_v1.method.wrap_method( + self.update_data_source, + default_timeout=60.0, + client_info=client_info, + ), + self.list_backups: gapic_v1.method.wrap_method( + self.list_backups, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_backup: gapic_v1.method.wrap_method( + self.get_backup, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_backup: gapic_v1.method.wrap_method( + self.update_backup, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_backup: gapic_v1.method.wrap_method( + self.delete_backup, + default_timeout=None, + client_info=client_info, + ), + self.restore_backup: gapic_v1.method.wrap_method( + self.restore_backup, + default_timeout=60.0, + client_info=client_info, + ), + self.create_backup_plan: gapic_v1.method.wrap_method( + self.create_backup_plan, + default_timeout=None, + client_info=client_info, + ), + self.get_backup_plan: gapic_v1.method.wrap_method( + self.get_backup_plan, + default_timeout=None, + client_info=client_info, + ), + self.list_backup_plans: gapic_v1.method.wrap_method( + self.list_backup_plans, + default_timeout=None, + client_info=client_info, + ), + self.delete_backup_plan: gapic_v1.method.wrap_method( + self.delete_backup_plan, + default_timeout=None, + client_info=client_info, + ), + self.create_backup_plan_association: gapic_v1.method.wrap_method( + self.create_backup_plan_association, + default_timeout=None, + client_info=client_info, + ), + self.get_backup_plan_association: gapic_v1.method.wrap_method( + self.get_backup_plan_association, + default_timeout=None, + client_info=client_info, + ), + self.list_backup_plan_associations: gapic_v1.method.wrap_method( + self.list_backup_plan_associations, + default_timeout=None, + client_info=client_info, + ), + self.delete_backup_plan_association: gapic_v1.method.wrap_method( + self.delete_backup_plan_association, + default_timeout=None, + client_info=client_info, + ), + self.trigger_backup: gapic_v1.method.wrap_method( + self.trigger_backup, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -225,6 +426,233 @@ def delete_management_server( ]: raise NotImplementedError() + @property + def create_backup_vault( + self, + ) -> Callable[ + [backupvault.CreateBackupVaultRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_backup_vaults( + self, + ) -> Callable[ + [backupvault.ListBackupVaultsRequest], + Union[ + backupvault.ListBackupVaultsResponse, + Awaitable[backupvault.ListBackupVaultsResponse], + ], + ]: + raise NotImplementedError() + + @property + def fetch_usable_backup_vaults( + self, + ) -> Callable[ + [backupvault.FetchUsableBackupVaultsRequest], + Union[ + backupvault.FetchUsableBackupVaultsResponse, + Awaitable[backupvault.FetchUsableBackupVaultsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_backup_vault( + self, + ) -> Callable[ + [backupvault.GetBackupVaultRequest], + Union[backupvault.BackupVault, Awaitable[backupvault.BackupVault]], + ]: + raise NotImplementedError() + + @property + def update_backup_vault( + self, + ) -> Callable[ + [backupvault.UpdateBackupVaultRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_backup_vault( + self, + ) -> Callable[ + [backupvault.DeleteBackupVaultRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_data_sources( + self, + ) -> Callable[ + [backupvault.ListDataSourcesRequest], + Union[ + backupvault.ListDataSourcesResponse, + Awaitable[backupvault.ListDataSourcesResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_data_source( + self, + ) -> Callable[ + [backupvault.GetDataSourceRequest], + Union[backupvault.DataSource, Awaitable[backupvault.DataSource]], + ]: + raise NotImplementedError() + + @property + def update_data_source( + self, + ) -> Callable[ + [backupvault.UpdateDataSourceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_backups( + self, + ) -> Callable[ + [backupvault.ListBackupsRequest], + Union[ + backupvault.ListBackupsResponse, Awaitable[backupvault.ListBackupsResponse] + ], + ]: + raise NotImplementedError() + + @property + def get_backup( + self, + ) -> Callable[ + [backupvault.GetBackupRequest], + Union[backupvault.Backup, Awaitable[backupvault.Backup]], + ]: + raise NotImplementedError() + + @property + def update_backup( + self, + ) -> Callable[ + [backupvault.UpdateBackupRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_backup( + self, + ) -> Callable[ + [backupvault.DeleteBackupRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def restore_backup( + self, + ) -> Callable[ + [backupvault.RestoreBackupRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def create_backup_plan( + self, + ) -> Callable[ + [backupplan.CreateBackupPlanRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def get_backup_plan( + self, + ) -> Callable[ + [backupplan.GetBackupPlanRequest], + Union[backupplan.BackupPlan, Awaitable[backupplan.BackupPlan]], + ]: + raise NotImplementedError() + + @property + def list_backup_plans( + self, + ) -> Callable[ + [backupplan.ListBackupPlansRequest], + Union[ + backupplan.ListBackupPlansResponse, + Awaitable[backupplan.ListBackupPlansResponse], + ], + ]: + raise NotImplementedError() + + @property + def delete_backup_plan( + self, + ) -> Callable[ + [backupplan.DeleteBackupPlanRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def create_backup_plan_association( + self, + ) -> Callable[ + [backupplanassociation.CreateBackupPlanAssociationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def get_backup_plan_association( + self, + ) -> Callable[ + [backupplanassociation.GetBackupPlanAssociationRequest], + Union[ + backupplanassociation.BackupPlanAssociation, + Awaitable[backupplanassociation.BackupPlanAssociation], + ], + ]: + raise NotImplementedError() + + @property + def list_backup_plan_associations( + self, + ) -> Callable[ + [backupplanassociation.ListBackupPlanAssociationsRequest], + Union[ + backupplanassociation.ListBackupPlanAssociationsResponse, + Awaitable[backupplanassociation.ListBackupPlanAssociationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def delete_backup_plan_association( + self, + ) -> Callable[ + [backupplanassociation.DeleteBackupPlanAssociationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def trigger_backup( + self, + ) -> Callable[ + [backupplanassociation.TriggerBackupRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def list_operations( self, diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/grpc.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/grpc.py index 39a429c884c0..63c287d5bfa1 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/grpc.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/grpc.py @@ -26,7 +26,12 @@ from google.longrunning import operations_pb2 # type: ignore import grpc # type: ignore -from google.cloud.backupdr_v1.types import backupdr +from google.cloud.backupdr_v1.types import ( + backupdr, + backupplan, + backupplanassociation, + backupvault, +) from .base import DEFAULT_CLIENT_INFO, BackupDRTransport @@ -361,6 +366,637 @@ def delete_management_server( ) return self._stubs["delete_management_server"] + @property + def create_backup_vault( + self, + ) -> Callable[[backupvault.CreateBackupVaultRequest], operations_pb2.Operation]: + r"""Return a callable for the create backup vault method over gRPC. + + Creates a new BackupVault in a given project and + location. + + Returns: + Callable[[~.CreateBackupVaultRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_backup_vault" not in self._stubs: + self._stubs["create_backup_vault"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/CreateBackupVault", + request_serializer=backupvault.CreateBackupVaultRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_backup_vault"] + + @property + def list_backup_vaults( + self, + ) -> Callable[ + [backupvault.ListBackupVaultsRequest], backupvault.ListBackupVaultsResponse + ]: + r"""Return a callable for the list backup vaults method over gRPC. + + Lists BackupVaults in a given project and location. + + Returns: + Callable[[~.ListBackupVaultsRequest], + ~.ListBackupVaultsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_backup_vaults" not in self._stubs: + self._stubs["list_backup_vaults"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/ListBackupVaults", + request_serializer=backupvault.ListBackupVaultsRequest.serialize, + response_deserializer=backupvault.ListBackupVaultsResponse.deserialize, + ) + return self._stubs["list_backup_vaults"] + + @property + def fetch_usable_backup_vaults( + self, + ) -> Callable[ + [backupvault.FetchUsableBackupVaultsRequest], + backupvault.FetchUsableBackupVaultsResponse, + ]: + r"""Return a callable for the fetch usable backup vaults method over gRPC. + + FetchUsableBackupVaults lists usable BackupVaults in + a given project and location. Usable BackupVault are the + ones that user has backupdr.backupVaults.get permission. + + Returns: + Callable[[~.FetchUsableBackupVaultsRequest], + ~.FetchUsableBackupVaultsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "fetch_usable_backup_vaults" not in self._stubs: + self._stubs["fetch_usable_backup_vaults"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/FetchUsableBackupVaults", + request_serializer=backupvault.FetchUsableBackupVaultsRequest.serialize, + response_deserializer=backupvault.FetchUsableBackupVaultsResponse.deserialize, + ) + return self._stubs["fetch_usable_backup_vaults"] + + @property + def get_backup_vault( + self, + ) -> Callable[[backupvault.GetBackupVaultRequest], backupvault.BackupVault]: + r"""Return a callable for the get backup vault method over gRPC. + + Gets details of a BackupVault. + + Returns: + Callable[[~.GetBackupVaultRequest], + ~.BackupVault]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_backup_vault" not in self._stubs: + self._stubs["get_backup_vault"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/GetBackupVault", + request_serializer=backupvault.GetBackupVaultRequest.serialize, + response_deserializer=backupvault.BackupVault.deserialize, + ) + return self._stubs["get_backup_vault"] + + @property + def update_backup_vault( + self, + ) -> Callable[[backupvault.UpdateBackupVaultRequest], operations_pb2.Operation]: + r"""Return a callable for the update backup vault method over gRPC. + + Updates the settings of a BackupVault. + + Returns: + Callable[[~.UpdateBackupVaultRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_backup_vault" not in self._stubs: + self._stubs["update_backup_vault"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/UpdateBackupVault", + request_serializer=backupvault.UpdateBackupVaultRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_backup_vault"] + + @property + def delete_backup_vault( + self, + ) -> Callable[[backupvault.DeleteBackupVaultRequest], operations_pb2.Operation]: + r"""Return a callable for the delete backup vault method over gRPC. + + Deletes a BackupVault. + + Returns: + Callable[[~.DeleteBackupVaultRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_backup_vault" not in self._stubs: + self._stubs["delete_backup_vault"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/DeleteBackupVault", + request_serializer=backupvault.DeleteBackupVaultRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_backup_vault"] + + @property + def list_data_sources( + self, + ) -> Callable[ + [backupvault.ListDataSourcesRequest], backupvault.ListDataSourcesResponse + ]: + r"""Return a callable for the list data sources method over gRPC. + + Lists DataSources in a given project and location. + + Returns: + Callable[[~.ListDataSourcesRequest], + ~.ListDataSourcesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_data_sources" not in self._stubs: + self._stubs["list_data_sources"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/ListDataSources", + request_serializer=backupvault.ListDataSourcesRequest.serialize, + response_deserializer=backupvault.ListDataSourcesResponse.deserialize, + ) + return self._stubs["list_data_sources"] + + @property + def get_data_source( + self, + ) -> Callable[[backupvault.GetDataSourceRequest], backupvault.DataSource]: + r"""Return a callable for the get data source method over gRPC. + + Gets details of a DataSource. + + Returns: + Callable[[~.GetDataSourceRequest], + ~.DataSource]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_data_source" not in self._stubs: + self._stubs["get_data_source"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/GetDataSource", + request_serializer=backupvault.GetDataSourceRequest.serialize, + response_deserializer=backupvault.DataSource.deserialize, + ) + return self._stubs["get_data_source"] + + @property + def update_data_source( + self, + ) -> Callable[[backupvault.UpdateDataSourceRequest], operations_pb2.Operation]: + r"""Return a callable for the update data source method over gRPC. + + Updates the settings of a DataSource. + + Returns: + Callable[[~.UpdateDataSourceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_data_source" not in self._stubs: + self._stubs["update_data_source"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/UpdateDataSource", + request_serializer=backupvault.UpdateDataSourceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_data_source"] + + @property + def list_backups( + self, + ) -> Callable[[backupvault.ListBackupsRequest], backupvault.ListBackupsResponse]: + r"""Return a callable for the list backups method over gRPC. + + Lists Backups in a given project and location. + + Returns: + Callable[[~.ListBackupsRequest], + ~.ListBackupsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_backups" not in self._stubs: + self._stubs["list_backups"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/ListBackups", + request_serializer=backupvault.ListBackupsRequest.serialize, + response_deserializer=backupvault.ListBackupsResponse.deserialize, + ) + return self._stubs["list_backups"] + + @property + def get_backup( + self, + ) -> Callable[[backupvault.GetBackupRequest], backupvault.Backup]: + r"""Return a callable for the get backup method over gRPC. + + Gets details of a Backup. + + Returns: + Callable[[~.GetBackupRequest], + ~.Backup]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_backup" not in self._stubs: + self._stubs["get_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/GetBackup", + request_serializer=backupvault.GetBackupRequest.serialize, + response_deserializer=backupvault.Backup.deserialize, + ) + return self._stubs["get_backup"] + + @property + def update_backup( + self, + ) -> Callable[[backupvault.UpdateBackupRequest], operations_pb2.Operation]: + r"""Return a callable for the update backup method over gRPC. + + Updates the settings of a Backup. + + Returns: + Callable[[~.UpdateBackupRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_backup" not in self._stubs: + self._stubs["update_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/UpdateBackup", + request_serializer=backupvault.UpdateBackupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_backup"] + + @property + def delete_backup( + self, + ) -> Callable[[backupvault.DeleteBackupRequest], operations_pb2.Operation]: + r"""Return a callable for the delete backup method over gRPC. + + Deletes a Backup. + + Returns: + Callable[[~.DeleteBackupRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_backup" not in self._stubs: + self._stubs["delete_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/DeleteBackup", + request_serializer=backupvault.DeleteBackupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_backup"] + + @property + def restore_backup( + self, + ) -> Callable[[backupvault.RestoreBackupRequest], operations_pb2.Operation]: + r"""Return a callable for the restore backup method over gRPC. + + Restore from a Backup + + Returns: + Callable[[~.RestoreBackupRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "restore_backup" not in self._stubs: + self._stubs["restore_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/RestoreBackup", + request_serializer=backupvault.RestoreBackupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["restore_backup"] + + @property + def create_backup_plan( + self, + ) -> Callable[[backupplan.CreateBackupPlanRequest], operations_pb2.Operation]: + r"""Return a callable for the create backup plan method over gRPC. + + Create a BackupPlan + + Returns: + Callable[[~.CreateBackupPlanRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_backup_plan" not in self._stubs: + self._stubs["create_backup_plan"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/CreateBackupPlan", + request_serializer=backupplan.CreateBackupPlanRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_backup_plan"] + + @property + def get_backup_plan( + self, + ) -> Callable[[backupplan.GetBackupPlanRequest], backupplan.BackupPlan]: + r"""Return a callable for the get backup plan method over gRPC. + + Gets details of a single BackupPlan. + + Returns: + Callable[[~.GetBackupPlanRequest], + ~.BackupPlan]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_backup_plan" not in self._stubs: + self._stubs["get_backup_plan"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/GetBackupPlan", + request_serializer=backupplan.GetBackupPlanRequest.serialize, + response_deserializer=backupplan.BackupPlan.deserialize, + ) + return self._stubs["get_backup_plan"] + + @property + def list_backup_plans( + self, + ) -> Callable[ + [backupplan.ListBackupPlansRequest], backupplan.ListBackupPlansResponse + ]: + r"""Return a callable for the list backup plans method over gRPC. + + Lists BackupPlans in a given project and location. + + Returns: + Callable[[~.ListBackupPlansRequest], + ~.ListBackupPlansResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_backup_plans" not in self._stubs: + self._stubs["list_backup_plans"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/ListBackupPlans", + request_serializer=backupplan.ListBackupPlansRequest.serialize, + response_deserializer=backupplan.ListBackupPlansResponse.deserialize, + ) + return self._stubs["list_backup_plans"] + + @property + def delete_backup_plan( + self, + ) -> Callable[[backupplan.DeleteBackupPlanRequest], operations_pb2.Operation]: + r"""Return a callable for the delete backup plan method over gRPC. + + Deletes a single BackupPlan. + + Returns: + Callable[[~.DeleteBackupPlanRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_backup_plan" not in self._stubs: + self._stubs["delete_backup_plan"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/DeleteBackupPlan", + request_serializer=backupplan.DeleteBackupPlanRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_backup_plan"] + + @property + def create_backup_plan_association( + self, + ) -> Callable[ + [backupplanassociation.CreateBackupPlanAssociationRequest], + operations_pb2.Operation, + ]: + r"""Return a callable for the create backup plan association method over gRPC. + + Create a BackupPlanAssociation + + Returns: + Callable[[~.CreateBackupPlanAssociationRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_backup_plan_association" not in self._stubs: + self._stubs[ + "create_backup_plan_association" + ] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/CreateBackupPlanAssociation", + request_serializer=backupplanassociation.CreateBackupPlanAssociationRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_backup_plan_association"] + + @property + def get_backup_plan_association( + self, + ) -> Callable[ + [backupplanassociation.GetBackupPlanAssociationRequest], + backupplanassociation.BackupPlanAssociation, + ]: + r"""Return a callable for the get backup plan association method over gRPC. + + Gets details of a single BackupPlanAssociation. + + Returns: + Callable[[~.GetBackupPlanAssociationRequest], + ~.BackupPlanAssociation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_backup_plan_association" not in self._stubs: + self._stubs["get_backup_plan_association"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/GetBackupPlanAssociation", + request_serializer=backupplanassociation.GetBackupPlanAssociationRequest.serialize, + response_deserializer=backupplanassociation.BackupPlanAssociation.deserialize, + ) + return self._stubs["get_backup_plan_association"] + + @property + def list_backup_plan_associations( + self, + ) -> Callable[ + [backupplanassociation.ListBackupPlanAssociationsRequest], + backupplanassociation.ListBackupPlanAssociationsResponse, + ]: + r"""Return a callable for the list backup plan associations method over gRPC. + + Lists BackupPlanAssociations in a given project and + location. + + Returns: + Callable[[~.ListBackupPlanAssociationsRequest], + ~.ListBackupPlanAssociationsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_backup_plan_associations" not in self._stubs: + self._stubs[ + "list_backup_plan_associations" + ] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/ListBackupPlanAssociations", + request_serializer=backupplanassociation.ListBackupPlanAssociationsRequest.serialize, + response_deserializer=backupplanassociation.ListBackupPlanAssociationsResponse.deserialize, + ) + return self._stubs["list_backup_plan_associations"] + + @property + def delete_backup_plan_association( + self, + ) -> Callable[ + [backupplanassociation.DeleteBackupPlanAssociationRequest], + operations_pb2.Operation, + ]: + r"""Return a callable for the delete backup plan association method over gRPC. + + Deletes a single BackupPlanAssociation. + + Returns: + Callable[[~.DeleteBackupPlanAssociationRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_backup_plan_association" not in self._stubs: + self._stubs[ + "delete_backup_plan_association" + ] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/DeleteBackupPlanAssociation", + request_serializer=backupplanassociation.DeleteBackupPlanAssociationRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_backup_plan_association"] + + @property + def trigger_backup( + self, + ) -> Callable[ + [backupplanassociation.TriggerBackupRequest], operations_pb2.Operation + ]: + r"""Return a callable for the trigger backup method over gRPC. + + Triggers a new Backup. + + Returns: + Callable[[~.TriggerBackupRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "trigger_backup" not in self._stubs: + self._stubs["trigger_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/TriggerBackup", + request_serializer=backupplanassociation.TriggerBackupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["trigger_backup"] + def close(self): self.grpc_channel.close() diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/grpc_asyncio.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/grpc_asyncio.py index 26b64ba6a60c..9acd2b61c3fb 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/grpc_asyncio.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/grpc_asyncio.py @@ -28,7 +28,12 @@ import grpc # type: ignore from grpc.experimental import aio # type: ignore -from google.cloud.backupdr_v1.types import backupdr +from google.cloud.backupdr_v1.types import ( + backupdr, + backupplan, + backupplanassociation, + backupvault, +) from .base import DEFAULT_CLIENT_INFO, BackupDRTransport from .grpc import BackupDRGrpcTransport @@ -374,6 +379,665 @@ def delete_management_server( ) return self._stubs["delete_management_server"] + @property + def create_backup_vault( + self, + ) -> Callable[ + [backupvault.CreateBackupVaultRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the create backup vault method over gRPC. + + Creates a new BackupVault in a given project and + location. + + Returns: + Callable[[~.CreateBackupVaultRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_backup_vault" not in self._stubs: + self._stubs["create_backup_vault"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/CreateBackupVault", + request_serializer=backupvault.CreateBackupVaultRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_backup_vault"] + + @property + def list_backup_vaults( + self, + ) -> Callable[ + [backupvault.ListBackupVaultsRequest], + Awaitable[backupvault.ListBackupVaultsResponse], + ]: + r"""Return a callable for the list backup vaults method over gRPC. + + Lists BackupVaults in a given project and location. + + Returns: + Callable[[~.ListBackupVaultsRequest], + Awaitable[~.ListBackupVaultsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_backup_vaults" not in self._stubs: + self._stubs["list_backup_vaults"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/ListBackupVaults", + request_serializer=backupvault.ListBackupVaultsRequest.serialize, + response_deserializer=backupvault.ListBackupVaultsResponse.deserialize, + ) + return self._stubs["list_backup_vaults"] + + @property + def fetch_usable_backup_vaults( + self, + ) -> Callable[ + [backupvault.FetchUsableBackupVaultsRequest], + Awaitable[backupvault.FetchUsableBackupVaultsResponse], + ]: + r"""Return a callable for the fetch usable backup vaults method over gRPC. + + FetchUsableBackupVaults lists usable BackupVaults in + a given project and location. Usable BackupVault are the + ones that user has backupdr.backupVaults.get permission. + + Returns: + Callable[[~.FetchUsableBackupVaultsRequest], + Awaitable[~.FetchUsableBackupVaultsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "fetch_usable_backup_vaults" not in self._stubs: + self._stubs["fetch_usable_backup_vaults"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/FetchUsableBackupVaults", + request_serializer=backupvault.FetchUsableBackupVaultsRequest.serialize, + response_deserializer=backupvault.FetchUsableBackupVaultsResponse.deserialize, + ) + return self._stubs["fetch_usable_backup_vaults"] + + @property + def get_backup_vault( + self, + ) -> Callable[ + [backupvault.GetBackupVaultRequest], Awaitable[backupvault.BackupVault] + ]: + r"""Return a callable for the get backup vault method over gRPC. + + Gets details of a BackupVault. + + Returns: + Callable[[~.GetBackupVaultRequest], + Awaitable[~.BackupVault]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_backup_vault" not in self._stubs: + self._stubs["get_backup_vault"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/GetBackupVault", + request_serializer=backupvault.GetBackupVaultRequest.serialize, + response_deserializer=backupvault.BackupVault.deserialize, + ) + return self._stubs["get_backup_vault"] + + @property + def update_backup_vault( + self, + ) -> Callable[ + [backupvault.UpdateBackupVaultRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the update backup vault method over gRPC. + + Updates the settings of a BackupVault. + + Returns: + Callable[[~.UpdateBackupVaultRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_backup_vault" not in self._stubs: + self._stubs["update_backup_vault"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/UpdateBackupVault", + request_serializer=backupvault.UpdateBackupVaultRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_backup_vault"] + + @property + def delete_backup_vault( + self, + ) -> Callable[ + [backupvault.DeleteBackupVaultRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the delete backup vault method over gRPC. + + Deletes a BackupVault. + + Returns: + Callable[[~.DeleteBackupVaultRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_backup_vault" not in self._stubs: + self._stubs["delete_backup_vault"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/DeleteBackupVault", + request_serializer=backupvault.DeleteBackupVaultRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_backup_vault"] + + @property + def list_data_sources( + self, + ) -> Callable[ + [backupvault.ListDataSourcesRequest], + Awaitable[backupvault.ListDataSourcesResponse], + ]: + r"""Return a callable for the list data sources method over gRPC. + + Lists DataSources in a given project and location. + + Returns: + Callable[[~.ListDataSourcesRequest], + Awaitable[~.ListDataSourcesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_data_sources" not in self._stubs: + self._stubs["list_data_sources"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/ListDataSources", + request_serializer=backupvault.ListDataSourcesRequest.serialize, + response_deserializer=backupvault.ListDataSourcesResponse.deserialize, + ) + return self._stubs["list_data_sources"] + + @property + def get_data_source( + self, + ) -> Callable[ + [backupvault.GetDataSourceRequest], Awaitable[backupvault.DataSource] + ]: + r"""Return a callable for the get data source method over gRPC. + + Gets details of a DataSource. + + Returns: + Callable[[~.GetDataSourceRequest], + Awaitable[~.DataSource]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_data_source" not in self._stubs: + self._stubs["get_data_source"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/GetDataSource", + request_serializer=backupvault.GetDataSourceRequest.serialize, + response_deserializer=backupvault.DataSource.deserialize, + ) + return self._stubs["get_data_source"] + + @property + def update_data_source( + self, + ) -> Callable[ + [backupvault.UpdateDataSourceRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the update data source method over gRPC. + + Updates the settings of a DataSource. + + Returns: + Callable[[~.UpdateDataSourceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_data_source" not in self._stubs: + self._stubs["update_data_source"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/UpdateDataSource", + request_serializer=backupvault.UpdateDataSourceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_data_source"] + + @property + def list_backups( + self, + ) -> Callable[ + [backupvault.ListBackupsRequest], Awaitable[backupvault.ListBackupsResponse] + ]: + r"""Return a callable for the list backups method over gRPC. + + Lists Backups in a given project and location. + + Returns: + Callable[[~.ListBackupsRequest], + Awaitable[~.ListBackupsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_backups" not in self._stubs: + self._stubs["list_backups"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/ListBackups", + request_serializer=backupvault.ListBackupsRequest.serialize, + response_deserializer=backupvault.ListBackupsResponse.deserialize, + ) + return self._stubs["list_backups"] + + @property + def get_backup( + self, + ) -> Callable[[backupvault.GetBackupRequest], Awaitable[backupvault.Backup]]: + r"""Return a callable for the get backup method over gRPC. + + Gets details of a Backup. + + Returns: + Callable[[~.GetBackupRequest], + Awaitable[~.Backup]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_backup" not in self._stubs: + self._stubs["get_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/GetBackup", + request_serializer=backupvault.GetBackupRequest.serialize, + response_deserializer=backupvault.Backup.deserialize, + ) + return self._stubs["get_backup"] + + @property + def update_backup( + self, + ) -> Callable[ + [backupvault.UpdateBackupRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the update backup method over gRPC. + + Updates the settings of a Backup. + + Returns: + Callable[[~.UpdateBackupRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_backup" not in self._stubs: + self._stubs["update_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/UpdateBackup", + request_serializer=backupvault.UpdateBackupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_backup"] + + @property + def delete_backup( + self, + ) -> Callable[ + [backupvault.DeleteBackupRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the delete backup method over gRPC. + + Deletes a Backup. + + Returns: + Callable[[~.DeleteBackupRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_backup" not in self._stubs: + self._stubs["delete_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/DeleteBackup", + request_serializer=backupvault.DeleteBackupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_backup"] + + @property + def restore_backup( + self, + ) -> Callable[ + [backupvault.RestoreBackupRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the restore backup method over gRPC. + + Restore from a Backup + + Returns: + Callable[[~.RestoreBackupRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "restore_backup" not in self._stubs: + self._stubs["restore_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/RestoreBackup", + request_serializer=backupvault.RestoreBackupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["restore_backup"] + + @property + def create_backup_plan( + self, + ) -> Callable[ + [backupplan.CreateBackupPlanRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the create backup plan method over gRPC. + + Create a BackupPlan + + Returns: + Callable[[~.CreateBackupPlanRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_backup_plan" not in self._stubs: + self._stubs["create_backup_plan"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/CreateBackupPlan", + request_serializer=backupplan.CreateBackupPlanRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_backup_plan"] + + @property + def get_backup_plan( + self, + ) -> Callable[[backupplan.GetBackupPlanRequest], Awaitable[backupplan.BackupPlan]]: + r"""Return a callable for the get backup plan method over gRPC. + + Gets details of a single BackupPlan. + + Returns: + Callable[[~.GetBackupPlanRequest], + Awaitable[~.BackupPlan]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_backup_plan" not in self._stubs: + self._stubs["get_backup_plan"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/GetBackupPlan", + request_serializer=backupplan.GetBackupPlanRequest.serialize, + response_deserializer=backupplan.BackupPlan.deserialize, + ) + return self._stubs["get_backup_plan"] + + @property + def list_backup_plans( + self, + ) -> Callable[ + [backupplan.ListBackupPlansRequest], + Awaitable[backupplan.ListBackupPlansResponse], + ]: + r"""Return a callable for the list backup plans method over gRPC. + + Lists BackupPlans in a given project and location. + + Returns: + Callable[[~.ListBackupPlansRequest], + Awaitable[~.ListBackupPlansResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_backup_plans" not in self._stubs: + self._stubs["list_backup_plans"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/ListBackupPlans", + request_serializer=backupplan.ListBackupPlansRequest.serialize, + response_deserializer=backupplan.ListBackupPlansResponse.deserialize, + ) + return self._stubs["list_backup_plans"] + + @property + def delete_backup_plan( + self, + ) -> Callable[ + [backupplan.DeleteBackupPlanRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the delete backup plan method over gRPC. + + Deletes a single BackupPlan. + + Returns: + Callable[[~.DeleteBackupPlanRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_backup_plan" not in self._stubs: + self._stubs["delete_backup_plan"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/DeleteBackupPlan", + request_serializer=backupplan.DeleteBackupPlanRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_backup_plan"] + + @property + def create_backup_plan_association( + self, + ) -> Callable[ + [backupplanassociation.CreateBackupPlanAssociationRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the create backup plan association method over gRPC. + + Create a BackupPlanAssociation + + Returns: + Callable[[~.CreateBackupPlanAssociationRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_backup_plan_association" not in self._stubs: + self._stubs[ + "create_backup_plan_association" + ] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/CreateBackupPlanAssociation", + request_serializer=backupplanassociation.CreateBackupPlanAssociationRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_backup_plan_association"] + + @property + def get_backup_plan_association( + self, + ) -> Callable[ + [backupplanassociation.GetBackupPlanAssociationRequest], + Awaitable[backupplanassociation.BackupPlanAssociation], + ]: + r"""Return a callable for the get backup plan association method over gRPC. + + Gets details of a single BackupPlanAssociation. + + Returns: + Callable[[~.GetBackupPlanAssociationRequest], + Awaitable[~.BackupPlanAssociation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_backup_plan_association" not in self._stubs: + self._stubs["get_backup_plan_association"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/GetBackupPlanAssociation", + request_serializer=backupplanassociation.GetBackupPlanAssociationRequest.serialize, + response_deserializer=backupplanassociation.BackupPlanAssociation.deserialize, + ) + return self._stubs["get_backup_plan_association"] + + @property + def list_backup_plan_associations( + self, + ) -> Callable[ + [backupplanassociation.ListBackupPlanAssociationsRequest], + Awaitable[backupplanassociation.ListBackupPlanAssociationsResponse], + ]: + r"""Return a callable for the list backup plan associations method over gRPC. + + Lists BackupPlanAssociations in a given project and + location. + + Returns: + Callable[[~.ListBackupPlanAssociationsRequest], + Awaitable[~.ListBackupPlanAssociationsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_backup_plan_associations" not in self._stubs: + self._stubs[ + "list_backup_plan_associations" + ] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/ListBackupPlanAssociations", + request_serializer=backupplanassociation.ListBackupPlanAssociationsRequest.serialize, + response_deserializer=backupplanassociation.ListBackupPlanAssociationsResponse.deserialize, + ) + return self._stubs["list_backup_plan_associations"] + + @property + def delete_backup_plan_association( + self, + ) -> Callable[ + [backupplanassociation.DeleteBackupPlanAssociationRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the delete backup plan association method over gRPC. + + Deletes a single BackupPlanAssociation. + + Returns: + Callable[[~.DeleteBackupPlanAssociationRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_backup_plan_association" not in self._stubs: + self._stubs[ + "delete_backup_plan_association" + ] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/DeleteBackupPlanAssociation", + request_serializer=backupplanassociation.DeleteBackupPlanAssociationRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_backup_plan_association"] + + @property + def trigger_backup( + self, + ) -> Callable[ + [backupplanassociation.TriggerBackupRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the trigger backup method over gRPC. + + Triggers a new Backup. + + Returns: + Callable[[~.TriggerBackupRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "trigger_backup" not in self._stubs: + self._stubs["trigger_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/TriggerBackup", + request_serializer=backupplanassociation.TriggerBackupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["trigger_backup"] + def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { @@ -415,6 +1079,202 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.create_backup_vault: gapic_v1.method_async.wrap_method( + self.create_backup_vault, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_backup_vaults: gapic_v1.method_async.wrap_method( + self.list_backup_vaults, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.fetch_usable_backup_vaults: gapic_v1.method_async.wrap_method( + self.fetch_usable_backup_vaults, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_backup_vault: gapic_v1.method_async.wrap_method( + self.get_backup_vault, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_backup_vault: gapic_v1.method_async.wrap_method( + self.update_backup_vault, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_backup_vault: gapic_v1.method_async.wrap_method( + self.delete_backup_vault, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_data_sources: gapic_v1.method_async.wrap_method( + self.list_data_sources, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_data_source: gapic_v1.method_async.wrap_method( + self.get_data_source, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_data_source: gapic_v1.method_async.wrap_method( + self.update_data_source, + default_timeout=60.0, + client_info=client_info, + ), + self.list_backups: gapic_v1.method_async.wrap_method( + self.list_backups, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_backup: gapic_v1.method_async.wrap_method( + self.get_backup, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_backup: gapic_v1.method_async.wrap_method( + self.update_backup, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_backup: gapic_v1.method_async.wrap_method( + self.delete_backup, + default_timeout=None, + client_info=client_info, + ), + self.restore_backup: gapic_v1.method_async.wrap_method( + self.restore_backup, + default_timeout=60.0, + client_info=client_info, + ), + self.create_backup_plan: gapic_v1.method_async.wrap_method( + self.create_backup_plan, + default_timeout=None, + client_info=client_info, + ), + self.get_backup_plan: gapic_v1.method_async.wrap_method( + self.get_backup_plan, + default_timeout=None, + client_info=client_info, + ), + self.list_backup_plans: gapic_v1.method_async.wrap_method( + self.list_backup_plans, + default_timeout=None, + client_info=client_info, + ), + self.delete_backup_plan: gapic_v1.method_async.wrap_method( + self.delete_backup_plan, + default_timeout=None, + client_info=client_info, + ), + self.create_backup_plan_association: gapic_v1.method_async.wrap_method( + self.create_backup_plan_association, + default_timeout=None, + client_info=client_info, + ), + self.get_backup_plan_association: gapic_v1.method_async.wrap_method( + self.get_backup_plan_association, + default_timeout=None, + client_info=client_info, + ), + self.list_backup_plan_associations: gapic_v1.method_async.wrap_method( + self.list_backup_plan_associations, + default_timeout=None, + client_info=client_info, + ), + self.delete_backup_plan_association: gapic_v1.method_async.wrap_method( + self.delete_backup_plan_association, + default_timeout=None, + client_info=client_info, + ), + self.trigger_backup: gapic_v1.method_async.wrap_method( + self.trigger_backup, + default_timeout=None, + client_info=client_info, + ), } def close(self): diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/rest.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/rest.py index 621153ce0574..2ff0d1ccf3b9 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/rest.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/rest.py @@ -47,7 +47,12 @@ from google.longrunning import operations_pb2 # type: ignore -from google.cloud.backupdr_v1.types import backupdr +from google.cloud.backupdr_v1.types import ( + backupdr, + backupplan, + backupplanassociation, + backupvault, +) from .base import BackupDRTransport from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO @@ -74,6 +79,30 @@ class BackupDRRestInterceptor: .. code-block:: python class MyCustomBackupDRInterceptor(BackupDRRestInterceptor): + def pre_create_backup_plan(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_backup_plan(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_backup_plan_association(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_backup_plan_association(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_backup_vault(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_backup_vault(self, response): + logging.log(f"Received response: {response}") + return response + def pre_create_management_server(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -82,6 +111,38 @@ def post_create_management_server(self, response): logging.log(f"Received response: {response}") return response + def pre_delete_backup(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_backup(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_backup_plan(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_backup_plan(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_backup_plan_association(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_backup_plan_association(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_backup_vault(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_backup_vault(self, response): + logging.log(f"Received response: {response}") + return response + def pre_delete_management_server(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -90,6 +151,54 @@ def post_delete_management_server(self, response): logging.log(f"Received response: {response}") return response + def pre_fetch_usable_backup_vaults(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_fetch_usable_backup_vaults(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_backup(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_backup(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_backup_plan(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_backup_plan(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_backup_plan_association(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_backup_plan_association(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_backup_vault(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_backup_vault(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_data_source(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_data_source(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_management_server(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -98,6 +207,46 @@ def post_get_management_server(self, response): logging.log(f"Received response: {response}") return response + def pre_list_backup_plan_associations(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_backup_plan_associations(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_backup_plans(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_backup_plans(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_backups(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_backups(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_backup_vaults(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_backup_vaults(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_data_sources(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_data_sources(self, response): + logging.log(f"Received response: {response}") + return response + def pre_list_management_servers(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -106,28 +255,68 @@ def post_list_management_servers(self, response): logging.log(f"Received response: {response}") return response + def pre_restore_backup(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_restore_backup(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_trigger_backup(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_trigger_backup(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_backup(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_backup(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_backup_vault(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_backup_vault(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_data_source(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_data_source(self, response): + logging.log(f"Received response: {response}") + return response + transport = BackupDRRestTransport(interceptor=MyCustomBackupDRInterceptor()) client = BackupDRClient(transport=transport) """ - def pre_create_management_server( + def pre_create_backup_plan( self, - request: backupdr.CreateManagementServerRequest, + request: backupplan.CreateBackupPlanRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[backupdr.CreateManagementServerRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for create_management_server + ) -> Tuple[backupplan.CreateBackupPlanRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_backup_plan Override in a subclass to manipulate the request or metadata before they are sent to the BackupDR server. """ return request, metadata - def post_create_management_server( + def post_create_backup_plan( self, response: operations_pb2.Operation ) -> operations_pb2.Operation: - """Post-rpc interceptor for create_management_server + """Post-rpc interceptor for create_backup_plan Override in a subclass to manipulate the response after it is returned by the BackupDR server but before @@ -135,22 +324,25 @@ def post_create_management_server( """ return response - def pre_delete_management_server( + def pre_create_backup_plan_association( self, - request: backupdr.DeleteManagementServerRequest, + request: backupplanassociation.CreateBackupPlanAssociationRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[backupdr.DeleteManagementServerRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for delete_management_server + ) -> Tuple[ + backupplanassociation.CreateBackupPlanAssociationRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for create_backup_plan_association Override in a subclass to manipulate the request or metadata before they are sent to the BackupDR server. """ return request, metadata - def post_delete_management_server( + def post_create_backup_plan_association( self, response: operations_pb2.Operation ) -> operations_pb2.Operation: - """Post-rpc interceptor for delete_management_server + """Post-rpc interceptor for create_backup_plan_association Override in a subclass to manipulate the response after it is returned by the BackupDR server but before @@ -158,22 +350,22 @@ def post_delete_management_server( """ return response - def pre_get_management_server( + def pre_create_backup_vault( self, - request: backupdr.GetManagementServerRequest, + request: backupvault.CreateBackupVaultRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[backupdr.GetManagementServerRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_management_server + ) -> Tuple[backupvault.CreateBackupVaultRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_backup_vault Override in a subclass to manipulate the request or metadata before they are sent to the BackupDR server. """ return request, metadata - def post_get_management_server( - self, response: backupdr.ManagementServer - ) -> backupdr.ManagementServer: - """Post-rpc interceptor for get_management_server + def post_create_backup_vault( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_backup_vault Override in a subclass to manipulate the response after it is returned by the BackupDR server but before @@ -181,22 +373,22 @@ def post_get_management_server( """ return response - def pre_list_management_servers( + def pre_create_management_server( self, - request: backupdr.ListManagementServersRequest, + request: backupdr.CreateManagementServerRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[backupdr.ListManagementServersRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_management_servers + ) -> Tuple[backupdr.CreateManagementServerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_management_server Override in a subclass to manipulate the request or metadata before they are sent to the BackupDR server. """ return request, metadata - def post_list_management_servers( - self, response: backupdr.ListManagementServersResponse - ) -> backupdr.ListManagementServersResponse: - """Post-rpc interceptor for list_management_servers + def post_create_management_server( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_management_server Override in a subclass to manipulate the response after it is returned by the BackupDR server but before @@ -204,22 +396,22 @@ def post_list_management_servers( """ return response - def pre_get_location( + def pre_delete_backup( self, - request: locations_pb2.GetLocationRequest, + request: backupvault.DeleteBackupRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_location + ) -> Tuple[backupvault.DeleteBackupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_backup Override in a subclass to manipulate the request or metadata before they are sent to the BackupDR server. """ return request, metadata - def post_get_location( - self, response: locations_pb2.Location - ) -> locations_pb2.Location: - """Post-rpc interceptor for get_location + def post_delete_backup( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_backup Override in a subclass to manipulate the response after it is returned by the BackupDR server but before @@ -227,22 +419,22 @@ def post_get_location( """ return response - def pre_list_locations( + def pre_delete_backup_plan( self, - request: locations_pb2.ListLocationsRequest, + request: backupplan.DeleteBackupPlanRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_locations + ) -> Tuple[backupplan.DeleteBackupPlanRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_backup_plan Override in a subclass to manipulate the request or metadata before they are sent to the BackupDR server. """ return request, metadata - def post_list_locations( - self, response: locations_pb2.ListLocationsResponse - ) -> locations_pb2.ListLocationsResponse: - """Post-rpc interceptor for list_locations + def post_delete_backup_plan( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_backup_plan Override in a subclass to manipulate the response after it is returned by the BackupDR server but before @@ -250,20 +442,25 @@ def post_list_locations( """ return response - def pre_get_iam_policy( + def pre_delete_backup_plan_association( self, - request: iam_policy_pb2.GetIamPolicyRequest, + request: backupplanassociation.DeleteBackupPlanAssociationRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_iam_policy + ) -> Tuple[ + backupplanassociation.DeleteBackupPlanAssociationRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for delete_backup_plan_association Override in a subclass to manipulate the request or metadata before they are sent to the BackupDR server. """ return request, metadata - def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: - """Post-rpc interceptor for get_iam_policy + def post_delete_backup_plan_association( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_backup_plan_association Override in a subclass to manipulate the response after it is returned by the BackupDR server but before @@ -271,20 +468,22 @@ def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: """ return response - def pre_set_iam_policy( + def pre_delete_backup_vault( self, - request: iam_policy_pb2.SetIamPolicyRequest, + request: backupvault.DeleteBackupVaultRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for set_iam_policy + ) -> Tuple[backupvault.DeleteBackupVaultRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_backup_vault Override in a subclass to manipulate the request or metadata before they are sent to the BackupDR server. """ return request, metadata - def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: - """Post-rpc interceptor for set_iam_policy + def post_delete_backup_vault( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_backup_vault Override in a subclass to manipulate the response after it is returned by the BackupDR server but before @@ -292,22 +491,22 @@ def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: """ return response - def pre_test_iam_permissions( + def pre_delete_management_server( self, - request: iam_policy_pb2.TestIamPermissionsRequest, + request: backupdr.DeleteManagementServerRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[iam_policy_pb2.TestIamPermissionsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for test_iam_permissions + ) -> Tuple[backupdr.DeleteManagementServerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_management_server Override in a subclass to manipulate the request or metadata before they are sent to the BackupDR server. """ return request, metadata - def post_test_iam_permissions( - self, response: iam_policy_pb2.TestIamPermissionsResponse - ) -> iam_policy_pb2.TestIamPermissionsResponse: - """Post-rpc interceptor for test_iam_permissions + def post_delete_management_server( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_management_server Override in a subclass to manipulate the response after it is returned by the BackupDR server but before @@ -315,20 +514,22 @@ def post_test_iam_permissions( """ return response - def pre_cancel_operation( + def pre_fetch_usable_backup_vaults( self, - request: operations_pb2.CancelOperationRequest, + request: backupvault.FetchUsableBackupVaultsRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for cancel_operation + ) -> Tuple[backupvault.FetchUsableBackupVaultsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for fetch_usable_backup_vaults Override in a subclass to manipulate the request or metadata before they are sent to the BackupDR server. """ return request, metadata - def post_cancel_operation(self, response: None) -> None: - """Post-rpc interceptor for cancel_operation + def post_fetch_usable_backup_vaults( + self, response: backupvault.FetchUsableBackupVaultsResponse + ) -> backupvault.FetchUsableBackupVaultsResponse: + """Post-rpc interceptor for fetch_usable_backup_vaults Override in a subclass to manipulate the response after it is returned by the BackupDR server but before @@ -336,20 +537,18 @@ def post_cancel_operation(self, response: None) -> None: """ return response - def pre_delete_operation( - self, - request: operations_pb2.DeleteOperationRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for delete_operation + def pre_get_backup( + self, request: backupvault.GetBackupRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[backupvault.GetBackupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_backup Override in a subclass to manipulate the request or metadata before they are sent to the BackupDR server. """ return request, metadata - def post_delete_operation(self, response: None) -> None: - """Post-rpc interceptor for delete_operation + def post_get_backup(self, response: backupvault.Backup) -> backupvault.Backup: + """Post-rpc interceptor for get_backup Override in a subclass to manipulate the response after it is returned by the BackupDR server but before @@ -357,22 +556,22 @@ def post_delete_operation(self, response: None) -> None: """ return response - def pre_get_operation( + def pre_get_backup_plan( self, - request: operations_pb2.GetOperationRequest, + request: backupplan.GetBackupPlanRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_operation + ) -> Tuple[backupplan.GetBackupPlanRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_backup_plan Override in a subclass to manipulate the request or metadata before they are sent to the BackupDR server. """ return request, metadata - def post_get_operation( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for get_operation + def post_get_backup_plan( + self, response: backupplan.BackupPlan + ) -> backupplan.BackupPlan: + """Post-rpc interceptor for get_backup_plan Override in a subclass to manipulate the response after it is returned by the BackupDR server but before @@ -380,22 +579,24 @@ def post_get_operation( """ return response - def pre_list_operations( + def pre_get_backup_plan_association( self, - request: operations_pb2.ListOperationsRequest, + request: backupplanassociation.GetBackupPlanAssociationRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_operations + ) -> Tuple[ + backupplanassociation.GetBackupPlanAssociationRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for get_backup_plan_association Override in a subclass to manipulate the request or metadata before they are sent to the BackupDR server. """ return request, metadata - def post_list_operations( - self, response: operations_pb2.ListOperationsResponse - ) -> operations_pb2.ListOperationsResponse: - """Post-rpc interceptor for list_operations + def post_get_backup_plan_association( + self, response: backupplanassociation.BackupPlanAssociation + ) -> backupplanassociation.BackupPlanAssociation: + """Post-rpc interceptor for get_backup_plan_association Override in a subclass to manipulate the response after it is returned by the BackupDR server but before @@ -403,164 +604,2784 @@ def post_list_operations( """ return response + def pre_get_backup_vault( + self, + request: backupvault.GetBackupVaultRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[backupvault.GetBackupVaultRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_backup_vault -@dataclasses.dataclass -class BackupDRRestStub: - _session: AuthorizedSession - _host: str - _interceptor: BackupDRRestInterceptor + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + def post_get_backup_vault( + self, response: backupvault.BackupVault + ) -> backupvault.BackupVault: + """Post-rpc interceptor for get_backup_vault -class BackupDRRestTransport(BackupDRTransport): - """REST backend transport for BackupDR. + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response - The BackupDR Service + def pre_get_data_source( + self, + request: backupvault.GetDataSourceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[backupvault.GetDataSourceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_data_source - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata - It sends JSON representations of protocol buffers over HTTP/1.1 + def post_get_data_source( + self, response: backupvault.DataSource + ) -> backupvault.DataSource: + """Post-rpc interceptor for get_data_source - """ + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response - def __init__( + def pre_get_management_server( self, - *, - host: str = "backupdr.googleapis.com", - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = "https", - interceptor: Optional[BackupDRRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. + request: backupdr.GetManagementServerRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[backupdr.GetManagementServerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_management_server - Args: - host (Optional[str]): - The hostname to connect to (default: 'backupdr.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. + def post_get_management_server( + self, response: backupdr.ManagementServer + ) -> backupdr.ManagementServer: + """Post-rpc interceptor for get_management_server + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError( - f"Unexpected hostname structure: {host}" - ) # pragma: NO COVER + return response - url_match_items = maybe_url_match.groupdict() + def pre_list_backup_plan_associations( + self, + request: backupplanassociation.ListBackupPlanAssociationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + backupplanassociation.ListBackupPlanAssociationsRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for list_backup_plan_associations - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST - ) - self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or BackupDRRestInterceptor() - self._prep_wrapped_messages(client_info) + def post_list_backup_plan_associations( + self, response: backupplanassociation.ListBackupPlanAssociationsResponse + ) -> backupplanassociation.ListBackupPlanAssociationsResponse: + """Post-rpc interceptor for list_backup_plan_associations - @property - def operations_client(self) -> operations_v1.AbstractOperationsClient: - """Create the client designed to process long-running operations. + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response - This property caches on the instance; repeated calls return the same - client. + def pre_list_backup_plans( + self, + request: backupplan.ListBackupPlansRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[backupplan.ListBackupPlansRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_backup_plans + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. """ - # Only create a new client if we do not already have one. - if self._operations_client is None: - http_options: Dict[str, List[Dict[str, str]]] = { - "google.longrunning.Operations.CancelOperation": [ - { - "method": "post", - "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", - "body": "*", - }, - ], - "google.longrunning.Operations.DeleteOperation": [ - { - "method": "delete", - "uri": "/v1/{name=projects/*/locations/*/operations/*}", - }, - ], - "google.longrunning.Operations.GetOperation": [ - { - "method": "get", - "uri": "/v1/{name=projects/*/locations/*/operations/*}", - }, - ], - "google.longrunning.Operations.ListOperations": [ - { - "method": "get", - "uri": "/v1/{name=projects/*/locations/*}/operations", - }, - ], - } + return request, metadata - rest_transport = operations_v1.OperationsRestTransport( - host=self._host, - # use the credentials which are saved - credentials=self._credentials, - scopes=self._scopes, - http_options=http_options, - path_prefix="v1", - ) + def post_list_backup_plans( + self, response: backupplan.ListBackupPlansResponse + ) -> backupplan.ListBackupPlansResponse: + """Post-rpc interceptor for list_backup_plans - self._operations_client = operations_v1.AbstractOperationsClient( - transport=rest_transport - ) + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_list_backups( + self, + request: backupvault.ListBackupsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[backupvault.ListBackupsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_backups + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_list_backups( + self, response: backupvault.ListBackupsResponse + ) -> backupvault.ListBackupsResponse: + """Post-rpc interceptor for list_backups + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_list_backup_vaults( + self, + request: backupvault.ListBackupVaultsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[backupvault.ListBackupVaultsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_backup_vaults + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_list_backup_vaults( + self, response: backupvault.ListBackupVaultsResponse + ) -> backupvault.ListBackupVaultsResponse: + """Post-rpc interceptor for list_backup_vaults + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_list_data_sources( + self, + request: backupvault.ListDataSourcesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[backupvault.ListDataSourcesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_data_sources + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_list_data_sources( + self, response: backupvault.ListDataSourcesResponse + ) -> backupvault.ListDataSourcesResponse: + """Post-rpc interceptor for list_data_sources + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_list_management_servers( + self, + request: backupdr.ListManagementServersRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[backupdr.ListManagementServersRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_management_servers + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_list_management_servers( + self, response: backupdr.ListManagementServersResponse + ) -> backupdr.ListManagementServersResponse: + """Post-rpc interceptor for list_management_servers + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_restore_backup( + self, + request: backupvault.RestoreBackupRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[backupvault.RestoreBackupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for restore_backup + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_restore_backup( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for restore_backup + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_trigger_backup( + self, + request: backupplanassociation.TriggerBackupRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[backupplanassociation.TriggerBackupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for trigger_backup + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_trigger_backup( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for trigger_backup + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_update_backup( + self, + request: backupvault.UpdateBackupRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[backupvault.UpdateBackupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_backup + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_update_backup( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_backup + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_update_backup_vault( + self, + request: backupvault.UpdateBackupVaultRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[backupvault.UpdateBackupVaultRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_backup_vault + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_update_backup_vault( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_backup_vault + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_update_data_source( + self, + request: backupvault.UpdateDataSourceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[backupvault.UpdateDataSourceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_data_source + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_update_data_source( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_data_source + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_get_iam_policy( + self, + request: iam_policy_pb2.GetIamPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_set_iam_policy( + self, + request: iam_policy_pb2.SetIamPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_test_iam_permissions( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[iam_policy_pb2.TestIamPermissionsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_test_iam_permissions( + self, response: iam_policy_pb2.TestIamPermissionsResponse + ) -> iam_policy_pb2.TestIamPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_delete_operation(self, response: None) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class BackupDRRestStub: + _session: AuthorizedSession + _host: str + _interceptor: BackupDRRestInterceptor + + +class BackupDRRestTransport(BackupDRTransport): + """REST backend transport for BackupDR. + + The BackupDR Service + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "backupdr.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[BackupDRRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'backupdr.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or BackupDRRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + ], + "google.longrunning.Operations.DeleteOperation": [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _CreateBackupPlan(BackupDRRestStub): + def __hash__(self): + return hash("CreateBackupPlan") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "backupPlanId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupplan.CreateBackupPlanRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create backup plan method over HTTP. + + Args: + request (~.backupplan.CreateBackupPlanRequest): + The request object. The request message for creating a ``BackupPlan``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/backupPlans", + "body": "backup_plan", + }, + ] + request, metadata = self._interceptor.pre_create_backup_plan( + request, metadata + ) + pb_request = backupplan.CreateBackupPlanRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_backup_plan(resp) + return resp + + class _CreateBackupPlanAssociation(BackupDRRestStub): + def __hash__(self): + return hash("CreateBackupPlanAssociation") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "backupPlanAssociationId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupplanassociation.CreateBackupPlanAssociationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create backup plan + association method over HTTP. + + Args: + request (~.backupplanassociation.CreateBackupPlanAssociationRequest): + The request object. Request message for creating a backup + plan. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/backupPlanAssociations", + "body": "backup_plan_association", + }, + ] + request, metadata = self._interceptor.pre_create_backup_plan_association( + request, metadata + ) + pb_request = backupplanassociation.CreateBackupPlanAssociationRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_backup_plan_association(resp) + return resp + + class _CreateBackupVault(BackupDRRestStub): + def __hash__(self): + return hash("CreateBackupVault") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "backupVaultId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupvault.CreateBackupVaultRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create backup vault method over HTTP. + + Args: + request (~.backupvault.CreateBackupVaultRequest): + The request object. Message for creating a BackupVault. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/backupVaults", + "body": "backup_vault", + }, + ] + request, metadata = self._interceptor.pre_create_backup_vault( + request, metadata + ) + pb_request = backupvault.CreateBackupVaultRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_backup_vault(resp) + return resp + + class _CreateManagementServer(BackupDRRestStub): + def __hash__(self): + return hash("CreateManagementServer") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "managementServerId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupdr.CreateManagementServerRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create management server method over HTTP. + + Args: + request (~.backupdr.CreateManagementServerRequest): + The request object. Request message for creating a + management server instance. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/managementServers", + "body": "management_server", + }, + ] + request, metadata = self._interceptor.pre_create_management_server( + request, metadata + ) + pb_request = backupdr.CreateManagementServerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_management_server(resp) + return resp + + class _DeleteBackup(BackupDRRestStub): + def __hash__(self): + return hash("DeleteBackup") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupvault.DeleteBackupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete backup method over HTTP. + + Args: + request (~.backupvault.DeleteBackupRequest): + The request object. Message for deleting a Backup. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/backupVaults/*/dataSources/*/backups/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_backup(request, metadata) + pb_request = backupvault.DeleteBackupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_backup(resp) + return resp + + class _DeleteBackupPlan(BackupDRRestStub): + def __hash__(self): + return hash("DeleteBackupPlan") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupplan.DeleteBackupPlanRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete backup plan method over HTTP. + + Args: + request (~.backupplan.DeleteBackupPlanRequest): + The request object. The request message for deleting a ``BackupPlan``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/backupPlans/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_backup_plan( + request, metadata + ) + pb_request = backupplan.DeleteBackupPlanRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_backup_plan(resp) + return resp + + class _DeleteBackupPlanAssociation(BackupDRRestStub): + def __hash__(self): + return hash("DeleteBackupPlanAssociation") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupplanassociation.DeleteBackupPlanAssociationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete backup plan + association method over HTTP. + + Args: + request (~.backupplanassociation.DeleteBackupPlanAssociationRequest): + The request object. Request message for deleting a backup + plan association. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/backupPlanAssociations/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_backup_plan_association( + request, metadata + ) + pb_request = backupplanassociation.DeleteBackupPlanAssociationRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_backup_plan_association(resp) + return resp + + class _DeleteBackupVault(BackupDRRestStub): + def __hash__(self): + return hash("DeleteBackupVault") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupvault.DeleteBackupVaultRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete backup vault method over HTTP. + + Args: + request (~.backupvault.DeleteBackupVaultRequest): + The request object. Message for deleting a BackupVault. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/backupVaults/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_backup_vault( + request, metadata + ) + pb_request = backupvault.DeleteBackupVaultRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_backup_vault(resp) + return resp + + class _DeleteManagementServer(BackupDRRestStub): + def __hash__(self): + return hash("DeleteManagementServer") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupdr.DeleteManagementServerRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete management server method over HTTP. + + Args: + request (~.backupdr.DeleteManagementServerRequest): + The request object. Request message for deleting a + management server instance. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/managementServers/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_management_server( + request, metadata + ) + pb_request = backupdr.DeleteManagementServerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_management_server(resp) + return resp + + class _FetchUsableBackupVaults(BackupDRRestStub): + def __hash__(self): + return hash("FetchUsableBackupVaults") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupvault.FetchUsableBackupVaultsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupvault.FetchUsableBackupVaultsResponse: + r"""Call the fetch usable backup + vaults method over HTTP. + + Args: + request (~.backupvault.FetchUsableBackupVaultsRequest): + The request object. Request message for fetching usable + BackupVaults. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.backupvault.FetchUsableBackupVaultsResponse: + Response message for fetching usable + BackupVaults. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/backupVaults:fetchUsable", + }, + ] + request, metadata = self._interceptor.pre_fetch_usable_backup_vaults( + request, metadata + ) + pb_request = backupvault.FetchUsableBackupVaultsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backupvault.FetchUsableBackupVaultsResponse() + pb_resp = backupvault.FetchUsableBackupVaultsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_fetch_usable_backup_vaults(resp) + return resp + + class _GetBackup(BackupDRRestStub): + def __hash__(self): + return hash("GetBackup") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupvault.GetBackupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupvault.Backup: + r"""Call the get backup method over HTTP. + + Args: + request (~.backupvault.GetBackupRequest): + The request object. Request message for getting a Backup. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.backupvault.Backup: + Message describing a Backup object. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/backupVaults/*/dataSources/*/backups/*}", + }, + ] + request, metadata = self._interceptor.pre_get_backup(request, metadata) + pb_request = backupvault.GetBackupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backupvault.Backup() + pb_resp = backupvault.Backup.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_backup(resp) + return resp + + class _GetBackupPlan(BackupDRRestStub): + def __hash__(self): + return hash("GetBackupPlan") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupplan.GetBackupPlanRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupplan.BackupPlan: + r"""Call the get backup plan method over HTTP. + + Args: + request (~.backupplan.GetBackupPlanRequest): + The request object. The request message for getting a ``BackupPlan``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.backupplan.BackupPlan: + A ``BackupPlan`` specifies some common fields, such as + ``description`` as well as one or more ``BackupRule`` + messages. Each ``BackupRule`` has a retention policy and + defines a schedule by which the system is to perform + backup workloads. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/backupPlans/*}", + }, + ] + request, metadata = self._interceptor.pre_get_backup_plan(request, metadata) + pb_request = backupplan.GetBackupPlanRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backupplan.BackupPlan() + pb_resp = backupplan.BackupPlan.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_backup_plan(resp) + return resp + + class _GetBackupPlanAssociation(BackupDRRestStub): + def __hash__(self): + return hash("GetBackupPlanAssociation") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupplanassociation.GetBackupPlanAssociationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupplanassociation.BackupPlanAssociation: + r"""Call the get backup plan + association method over HTTP. + + Args: + request (~.backupplanassociation.GetBackupPlanAssociationRequest): + The request object. Request message for getting a + BackupPlanAssociation resource. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.backupplanassociation.BackupPlanAssociation: + A BackupPlanAssociation represents a + single BackupPlanAssociation which + contains details like workload, backup + plan etc + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/backupPlanAssociations/*}", + }, + ] + request, metadata = self._interceptor.pre_get_backup_plan_association( + request, metadata + ) + pb_request = backupplanassociation.GetBackupPlanAssociationRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backupplanassociation.BackupPlanAssociation() + pb_resp = backupplanassociation.BackupPlanAssociation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_backup_plan_association(resp) + return resp + + class _GetBackupVault(BackupDRRestStub): + def __hash__(self): + return hash("GetBackupVault") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupvault.GetBackupVaultRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupvault.BackupVault: + r"""Call the get backup vault method over HTTP. + + Args: + request (~.backupvault.GetBackupVaultRequest): + The request object. Request message for getting a + BackupVault. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.backupvault.BackupVault: + Message describing a BackupVault + object. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/backupVaults/*}", + }, + ] + request, metadata = self._interceptor.pre_get_backup_vault( + request, metadata + ) + pb_request = backupvault.GetBackupVaultRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backupvault.BackupVault() + pb_resp = backupvault.BackupVault.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_backup_vault(resp) + return resp + + class _GetDataSource(BackupDRRestStub): + def __hash__(self): + return hash("GetDataSource") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupvault.GetDataSourceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupvault.DataSource: + r"""Call the get data source method over HTTP. + + Args: + request (~.backupvault.GetDataSourceRequest): + The request object. Request message for getting a + DataSource instance. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.backupvault.DataSource: + Message describing a DataSource + object. Datasource object used to + represent Datasource details for both + admin and basic view. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/backupVaults/*/dataSources/*}", + }, + ] + request, metadata = self._interceptor.pre_get_data_source(request, metadata) + pb_request = backupvault.GetDataSourceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backupvault.DataSource() + pb_resp = backupvault.DataSource.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_data_source(resp) + return resp + + class _GetManagementServer(BackupDRRestStub): + def __hash__(self): + return hash("GetManagementServer") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupdr.GetManagementServerRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupdr.ManagementServer: + r"""Call the get management server method over HTTP. + + Args: + request (~.backupdr.GetManagementServerRequest): + The request object. Request message for getting a + management server instance. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.backupdr.ManagementServer: + ManagementServer describes a single + BackupDR ManagementServer instance. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/managementServers/*}", + }, + ] + request, metadata = self._interceptor.pre_get_management_server( + request, metadata + ) + pb_request = backupdr.GetManagementServerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backupdr.ManagementServer() + pb_resp = backupdr.ManagementServer.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_management_server(resp) + return resp + + class _ListBackupPlanAssociations(BackupDRRestStub): + def __hash__(self): + return hash("ListBackupPlanAssociations") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupplanassociation.ListBackupPlanAssociationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupplanassociation.ListBackupPlanAssociationsResponse: + r"""Call the list backup plan + associations method over HTTP. + + Args: + request (~.backupplanassociation.ListBackupPlanAssociationsRequest): + The request object. Request message for List + BackupPlanAssociation + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.backupplanassociation.ListBackupPlanAssociationsResponse: + Response message for List + BackupPlanAssociation + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/backupPlanAssociations", + }, + ] + request, metadata = self._interceptor.pre_list_backup_plan_associations( + request, metadata + ) + pb_request = backupplanassociation.ListBackupPlanAssociationsRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backupplanassociation.ListBackupPlanAssociationsResponse() + pb_resp = backupplanassociation.ListBackupPlanAssociationsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_backup_plan_associations(resp) + return resp + + class _ListBackupPlans(BackupDRRestStub): + def __hash__(self): + return hash("ListBackupPlans") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupplan.ListBackupPlansRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupplan.ListBackupPlansResponse: + r"""Call the list backup plans method over HTTP. + + Args: + request (~.backupplan.ListBackupPlansRequest): + The request object. The request message for getting a list ``BackupPlan``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.backupplan.ListBackupPlansResponse: + The response message for getting a list of + ``BackupPlan``. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/backupPlans", + }, + ] + request, metadata = self._interceptor.pre_list_backup_plans( + request, metadata + ) + pb_request = backupplan.ListBackupPlansRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backupplan.ListBackupPlansResponse() + pb_resp = backupplan.ListBackupPlansResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_backup_plans(resp) + return resp + + class _ListBackups(BackupDRRestStub): + def __hash__(self): + return hash("ListBackups") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupvault.ListBackupsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupvault.ListBackupsResponse: + r"""Call the list backups method over HTTP. + + Args: + request (~.backupvault.ListBackupsRequest): + The request object. Request message for listing Backups. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.backupvault.ListBackupsResponse: + Response message for listing Backups. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/backupVaults/*/dataSources/*}/backups", + }, + ] + request, metadata = self._interceptor.pre_list_backups(request, metadata) + pb_request = backupvault.ListBackupsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backupvault.ListBackupsResponse() + pb_resp = backupvault.ListBackupsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_backups(resp) + return resp + + class _ListBackupVaults(BackupDRRestStub): + def __hash__(self): + return hash("ListBackupVaults") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupvault.ListBackupVaultsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupvault.ListBackupVaultsResponse: + r"""Call the list backup vaults method over HTTP. + + Args: + request (~.backupvault.ListBackupVaultsRequest): + The request object. Request message for listing + backupvault stores. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.backupvault.ListBackupVaultsResponse: + Response message for listing + BackupVaults. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/backupVaults", + }, + ] + request, metadata = self._interceptor.pre_list_backup_vaults( + request, metadata + ) + pb_request = backupvault.ListBackupVaultsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backupvault.ListBackupVaultsResponse() + pb_resp = backupvault.ListBackupVaultsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_backup_vaults(resp) + return resp + + class _ListDataSources(BackupDRRestStub): + def __hash__(self): + return hash("ListDataSources") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupvault.ListDataSourcesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupvault.ListDataSourcesResponse: + r"""Call the list data sources method over HTTP. + + Args: + request (~.backupvault.ListDataSourcesRequest): + The request object. Request message for listing + DataSources. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.backupvault.ListDataSourcesResponse: + Response message for listing + DataSources. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/backupVaults/*}/dataSources", + }, + ] + request, metadata = self._interceptor.pre_list_data_sources( + request, metadata + ) + pb_request = backupvault.ListDataSourcesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backupvault.ListDataSourcesResponse() + pb_resp = backupvault.ListDataSourcesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_data_sources(resp) + return resp + + class _ListManagementServers(BackupDRRestStub): + def __hash__(self): + return hash("ListManagementServers") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupdr.ListManagementServersRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupdr.ListManagementServersResponse: + r"""Call the list management servers method over HTTP. + + Args: + request (~.backupdr.ListManagementServersRequest): + The request object. Request message for listing + management servers. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.backupdr.ListManagementServersResponse: + Response message for listing + management servers. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/managementServers", + }, + ] + request, metadata = self._interceptor.pre_list_management_servers( + request, metadata + ) + pb_request = backupdr.ListManagementServersRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backupdr.ListManagementServersResponse() + pb_resp = backupdr.ListManagementServersResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_management_servers(resp) + return resp + + class _RestoreBackup(BackupDRRestStub): + def __hash__(self): + return hash("RestoreBackup") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupvault.RestoreBackupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the restore backup method over HTTP. + + Args: + request (~.backupvault.RestoreBackupRequest): + The request object. Request message for restoring from a + Backup. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/backupVaults/*/dataSources/*/backups/*}:restore", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_restore_backup(request, metadata) + pb_request = backupvault.RestoreBackupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) - # Return the client from cache. - return self._operations_client + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_restore_backup(resp) + return resp - class _CreateManagementServer(BackupDRRestStub): + class _TriggerBackup(BackupDRRestStub): def __hash__(self): - return hash("CreateManagementServer") + return hash("TriggerBackup") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "managementServerId": "", - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -572,18 +3393,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: backupdr.CreateManagementServerRequest, + request: backupplanassociation.TriggerBackupRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: - r"""Call the create management server method over HTTP. + r"""Call the trigger backup method over HTTP. Args: - request (~.backupdr.CreateManagementServerRequest): - The request object. Request message for creating a - management server instance. + request (~.backupplanassociation.TriggerBackupRequest): + The request object. Request message for triggering a + backup. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -601,14 +3422,12 @@ def __call__( http_options: List[Dict[str, str]] = [ { "method": "post", - "uri": "/v1/{parent=projects/*/locations/*}/managementServers", - "body": "management_server", + "uri": "/v1/{name=projects/*/locations/*/backupPlanAssociations/*}:triggerBackup", + "body": "*", }, ] - request, metadata = self._interceptor.pre_create_management_server( - request, metadata - ) - pb_request = backupdr.CreateManagementServerRequest.pb(request) + request, metadata = self._interceptor.pre_trigger_backup(request, metadata) + pb_request = backupplanassociation.TriggerBackupRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) # Jsonify the request body @@ -649,14 +3468,16 @@ def __call__( # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_management_server(resp) + resp = self._interceptor.post_trigger_backup(resp) return resp - class _DeleteManagementServer(BackupDRRestStub): + class _UpdateBackup(BackupDRRestStub): def __hash__(self): - return hash("DeleteManagementServer") + return hash("UpdateBackup") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } @classmethod def _get_unset_required_fields(cls, message_dict): @@ -668,18 +3489,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: backupdr.DeleteManagementServerRequest, + request: backupvault.UpdateBackupRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: - r"""Call the delete management server method over HTTP. + r"""Call the update backup method over HTTP. Args: - request (~.backupdr.DeleteManagementServerRequest): - The request object. Request message for deleting a - management server instance. + request (~.backupvault.UpdateBackupRequest): + The request object. Request message for updating a + Backup. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -696,16 +3517,20 @@ def __call__( http_options: List[Dict[str, str]] = [ { - "method": "delete", - "uri": "/v1/{name=projects/*/locations/*/managementServers/*}", + "method": "patch", + "uri": "/v1/{backup.name=projects/*/locations/*/backupVaults/*/dataSources/*/backups/*}", + "body": "backup", }, ] - request, metadata = self._interceptor.pre_delete_management_server( - request, metadata - ) - pb_request = backupdr.DeleteManagementServerRequest.pb(request) + request, metadata = self._interceptor.pre_update_backup(request, metadata) + pb_request = backupvault.UpdateBackupRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -728,6 +3553,7 @@ def __call__( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -738,14 +3564,16 @@ def __call__( # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_delete_management_server(resp) + resp = self._interceptor.post_update_backup(resp) return resp - class _GetManagementServer(BackupDRRestStub): + class _UpdateBackupVault(BackupDRRestStub): def __hash__(self): - return hash("GetManagementServer") + return hash("UpdateBackupVault") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } @classmethod def _get_unset_required_fields(cls, message_dict): @@ -757,18 +3585,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: backupdr.GetManagementServerRequest, + request: backupvault.UpdateBackupVaultRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> backupdr.ManagementServer: - r"""Call the get management server method over HTTP. + ) -> operations_pb2.Operation: + r"""Call the update backup vault method over HTTP. Args: - request (~.backupdr.GetManagementServerRequest): - The request object. Request message for getting a - management server instance. + request (~.backupvault.UpdateBackupVaultRequest): + The request object. Request message for updating a + BackupVault. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -776,24 +3604,31 @@ def __call__( sent along with the request as metadata. Returns: - ~.backupdr.ManagementServer: - ManagementServer describes a single - BackupDR ManagementServer instance. + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. """ http_options: List[Dict[str, str]] = [ { - "method": "get", - "uri": "/v1/{name=projects/*/locations/*/managementServers/*}", + "method": "patch", + "uri": "/v1/{backup_vault.name=projects/*/locations/*/backupVaults/*}", + "body": "backup_vault", }, ] - request, metadata = self._interceptor.pre_get_management_server( + request, metadata = self._interceptor.pre_update_backup_vault( request, metadata ) - pb_request = backupdr.GetManagementServerRequest.pb(request) + pb_request = backupvault.UpdateBackupVaultRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -816,6 +3651,7 @@ def __call__( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -824,18 +3660,18 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = backupdr.ManagementServer() - pb_resp = backupdr.ManagementServer.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_management_server(resp) + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_backup_vault(resp) return resp - class _ListManagementServers(BackupDRRestStub): + class _UpdateDataSource(BackupDRRestStub): def __hash__(self): - return hash("ListManagementServers") + return hash("UpdateDataSource") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } @classmethod def _get_unset_required_fields(cls, message_dict): @@ -847,18 +3683,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: backupdr.ListManagementServersRequest, + request: backupvault.UpdateDataSourceRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> backupdr.ListManagementServersResponse: - r"""Call the list management servers method over HTTP. + ) -> operations_pb2.Operation: + r"""Call the update data source method over HTTP. Args: - request (~.backupdr.ListManagementServersRequest): - The request object. Request message for listing - management servers. + request (~.backupvault.UpdateDataSourceRequest): + The request object. Request message for updating a data + source instance. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -866,24 +3702,31 @@ def __call__( sent along with the request as metadata. Returns: - ~.backupdr.ListManagementServersResponse: - Response message for listing - management servers. + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. """ http_options: List[Dict[str, str]] = [ { - "method": "get", - "uri": "/v1/{parent=projects/*/locations/*}/managementServers", + "method": "patch", + "uri": "/v1/{data_source.name=projects/*/locations/*/backupVaults/*/dataSources/*}", + "body": "data_source", }, ] - request, metadata = self._interceptor.pre_list_management_servers( + request, metadata = self._interceptor.pre_update_data_source( request, metadata ) - pb_request = backupdr.ListManagementServersRequest.pb(request) + pb_request = backupvault.UpdateDataSourceRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -906,6 +3749,7 @@ def __call__( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -914,13 +3758,38 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = backupdr.ListManagementServersResponse() - pb_resp = backupdr.ListManagementServersResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_management_servers(resp) + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_data_source(resp) return resp + @property + def create_backup_plan( + self, + ) -> Callable[[backupplan.CreateBackupPlanRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateBackupPlan(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_backup_plan_association( + self, + ) -> Callable[ + [backupplanassociation.CreateBackupPlanAssociationRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateBackupPlanAssociation(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_backup_vault( + self, + ) -> Callable[[backupvault.CreateBackupVaultRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateBackupVault(self._session, self._host, self._interceptor) # type: ignore + @property def create_management_server( self, @@ -929,6 +3798,41 @@ def create_management_server( # In C++ this would require a dynamic_cast return self._CreateManagementServer(self._session, self._host, self._interceptor) # type: ignore + @property + def delete_backup( + self, + ) -> Callable[[backupvault.DeleteBackupRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteBackup(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_backup_plan( + self, + ) -> Callable[[backupplan.DeleteBackupPlanRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteBackupPlan(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_backup_plan_association( + self, + ) -> Callable[ + [backupplanassociation.DeleteBackupPlanAssociationRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteBackupPlanAssociation(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_backup_vault( + self, + ) -> Callable[[backupvault.DeleteBackupVaultRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteBackupVault(self._session, self._host, self._interceptor) # type: ignore + @property def delete_management_server( self, @@ -937,6 +3841,60 @@ def delete_management_server( # In C++ this would require a dynamic_cast return self._DeleteManagementServer(self._session, self._host, self._interceptor) # type: ignore + @property + def fetch_usable_backup_vaults( + self, + ) -> Callable[ + [backupvault.FetchUsableBackupVaultsRequest], + backupvault.FetchUsableBackupVaultsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._FetchUsableBackupVaults(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_backup( + self, + ) -> Callable[[backupvault.GetBackupRequest], backupvault.Backup]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetBackup(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_backup_plan( + self, + ) -> Callable[[backupplan.GetBackupPlanRequest], backupplan.BackupPlan]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetBackupPlan(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_backup_plan_association( + self, + ) -> Callable[ + [backupplanassociation.GetBackupPlanAssociationRequest], + backupplanassociation.BackupPlanAssociation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetBackupPlanAssociation(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_backup_vault( + self, + ) -> Callable[[backupvault.GetBackupVaultRequest], backupvault.BackupVault]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetBackupVault(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_data_source( + self, + ) -> Callable[[backupvault.GetDataSourceRequest], backupvault.DataSource]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetDataSource(self._session, self._host, self._interceptor) # type: ignore + @property def get_management_server( self, @@ -945,6 +3903,55 @@ def get_management_server( # In C++ this would require a dynamic_cast return self._GetManagementServer(self._session, self._host, self._interceptor) # type: ignore + @property + def list_backup_plan_associations( + self, + ) -> Callable[ + [backupplanassociation.ListBackupPlanAssociationsRequest], + backupplanassociation.ListBackupPlanAssociationsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListBackupPlanAssociations(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_backup_plans( + self, + ) -> Callable[ + [backupplan.ListBackupPlansRequest], backupplan.ListBackupPlansResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListBackupPlans(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_backups( + self, + ) -> Callable[[backupvault.ListBackupsRequest], backupvault.ListBackupsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListBackups(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_backup_vaults( + self, + ) -> Callable[ + [backupvault.ListBackupVaultsRequest], backupvault.ListBackupVaultsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListBackupVaults(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_data_sources( + self, + ) -> Callable[ + [backupvault.ListDataSourcesRequest], backupvault.ListDataSourcesResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListDataSources(self._session, self._host, self._interceptor) # type: ignore + @property def list_management_servers( self, @@ -955,6 +3962,48 @@ def list_management_servers( # In C++ this would require a dynamic_cast return self._ListManagementServers(self._session, self._host, self._interceptor) # type: ignore + @property + def restore_backup( + self, + ) -> Callable[[backupvault.RestoreBackupRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RestoreBackup(self._session, self._host, self._interceptor) # type: ignore + + @property + def trigger_backup( + self, + ) -> Callable[ + [backupplanassociation.TriggerBackupRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._TriggerBackup(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_backup( + self, + ) -> Callable[[backupvault.UpdateBackupRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateBackup(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_backup_vault( + self, + ) -> Callable[[backupvault.UpdateBackupVaultRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateBackupVault(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_data_source( + self, + ) -> Callable[[backupvault.UpdateDataSourceRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateDataSource(self._session, self._host, self._interceptor) # type: ignore + @property def get_location(self): return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/__init__.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/__init__.py index 3afc31268ba2..951186d655ee 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/__init__.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/__init__.py @@ -26,6 +26,92 @@ WorkforceIdentityBasedManagementURI, WorkforceIdentityBasedOAuth2ClientID, ) +from .backupplan import ( + BackupPlan, + BackupRule, + BackupWindow, + CreateBackupPlanRequest, + DeleteBackupPlanRequest, + GetBackupPlanRequest, + ListBackupPlansRequest, + ListBackupPlansResponse, + StandardSchedule, + WeekDayOfMonth, +) +from .backupplanassociation import ( + BackupPlanAssociation, + CreateBackupPlanAssociationRequest, + DeleteBackupPlanAssociationRequest, + GetBackupPlanAssociationRequest, + ListBackupPlanAssociationsRequest, + ListBackupPlanAssociationsResponse, + RuleConfigInfo, + TriggerBackupRequest, +) +from .backupvault import ( + Backup, + BackupApplianceBackupConfig, + BackupApplianceLockInfo, + BackupConfigInfo, + BackupConfigState, + BackupLock, + BackupVault, + BackupVaultView, + BackupView, + CreateBackupVaultRequest, + DataSource, + DataSourceBackupApplianceApplication, + DataSourceGcpResource, + DeleteBackupRequest, + DeleteBackupVaultRequest, + FetchUsableBackupVaultsRequest, + FetchUsableBackupVaultsResponse, + GcpBackupConfig, + GcpResource, + GetBackupRequest, + GetBackupVaultRequest, + GetDataSourceRequest, + ListBackupsRequest, + ListBackupsResponse, + ListBackupVaultsRequest, + ListBackupVaultsResponse, + ListDataSourcesRequest, + ListDataSourcesResponse, + RestoreBackupRequest, + RestoreBackupResponse, + ServiceLockInfo, + TargetResource, + UpdateBackupRequest, + UpdateBackupVaultRequest, + UpdateDataSourceRequest, +) +from .backupvault_ba import BackupApplianceBackupProperties +from .backupvault_gce import ( + AcceleratorConfig, + AccessConfig, + AdvancedMachineFeatures, + AliasIpRange, + AllocationAffinity, + AttachedDisk, + ComputeInstanceBackupProperties, + ComputeInstanceDataSourceProperties, + ComputeInstanceRestoreProperties, + ComputeInstanceTargetEnvironment, + ConfidentialInstanceConfig, + CustomerEncryptionKey, + DisplayDevice, + Entry, + GuestOsFeature, + InstanceParams, + KeyRevocationActionType, + Metadata, + NetworkInterface, + NetworkPerformanceConfig, + Scheduling, + SchedulingDuration, + ServiceAccount, + Tags, +) __all__ = ( "CreateManagementServerRequest", @@ -39,4 +125,82 @@ "OperationMetadata", "WorkforceIdentityBasedManagementURI", "WorkforceIdentityBasedOAuth2ClientID", + "BackupPlan", + "BackupRule", + "BackupWindow", + "CreateBackupPlanRequest", + "DeleteBackupPlanRequest", + "GetBackupPlanRequest", + "ListBackupPlansRequest", + "ListBackupPlansResponse", + "StandardSchedule", + "WeekDayOfMonth", + "BackupPlanAssociation", + "CreateBackupPlanAssociationRequest", + "DeleteBackupPlanAssociationRequest", + "GetBackupPlanAssociationRequest", + "ListBackupPlanAssociationsRequest", + "ListBackupPlanAssociationsResponse", + "RuleConfigInfo", + "TriggerBackupRequest", + "Backup", + "BackupApplianceBackupConfig", + "BackupApplianceLockInfo", + "BackupConfigInfo", + "BackupLock", + "BackupVault", + "CreateBackupVaultRequest", + "DataSource", + "DataSourceBackupApplianceApplication", + "DataSourceGcpResource", + "DeleteBackupRequest", + "DeleteBackupVaultRequest", + "FetchUsableBackupVaultsRequest", + "FetchUsableBackupVaultsResponse", + "GcpBackupConfig", + "GcpResource", + "GetBackupRequest", + "GetBackupVaultRequest", + "GetDataSourceRequest", + "ListBackupsRequest", + "ListBackupsResponse", + "ListBackupVaultsRequest", + "ListBackupVaultsResponse", + "ListDataSourcesRequest", + "ListDataSourcesResponse", + "RestoreBackupRequest", + "RestoreBackupResponse", + "ServiceLockInfo", + "TargetResource", + "UpdateBackupRequest", + "UpdateBackupVaultRequest", + "UpdateDataSourceRequest", + "BackupConfigState", + "BackupVaultView", + "BackupView", + "BackupApplianceBackupProperties", + "AcceleratorConfig", + "AccessConfig", + "AdvancedMachineFeatures", + "AliasIpRange", + "AllocationAffinity", + "AttachedDisk", + "ComputeInstanceBackupProperties", + "ComputeInstanceDataSourceProperties", + "ComputeInstanceRestoreProperties", + "ComputeInstanceTargetEnvironment", + "ConfidentialInstanceConfig", + "CustomerEncryptionKey", + "DisplayDevice", + "Entry", + "GuestOsFeature", + "InstanceParams", + "Metadata", + "NetworkInterface", + "NetworkPerformanceConfig", + "Scheduling", + "SchedulingDuration", + "ServiceAccount", + "Tags", + "KeyRevocationActionType", ) diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupdr.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupdr.py index 07ad09a753bc..2cbce001d7d5 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupdr.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupdr.py @@ -195,7 +195,7 @@ class ManagementServer(proto.Message): oauth2_client_id (str): Output only. The OAuth 2.0 client id is required to make API calls to the BackupDR instance API of this ManagementServer. - This is the value that should be provided in the ‘aud’ field + This is the value that should be provided in the 'aud' field of the OIDC ID Token (see openid specification https://openid.net/specs/openid-connect-core-1_0.html#IDToken). workforce_identity_based_oauth2_client_id (google.cloud.backupdr_v1.types.WorkforceIdentityBasedOAuth2ClientID): @@ -350,10 +350,10 @@ class ListManagementServersRequest(proto.Message): parent (str): Required. The project and location for which to retrieve management servers information, in the format - ``projects/{project_id}/locations/{location}``. In Cloud - BackupDR, locations map to GCP regions, for example + 'projects/{project_id}/locations/{location}'. In Cloud + BackupDR, locations map to Google Cloud regions, for example **us-central1**. To retrieve management servers for all - locations, use "-" for the ``{location}`` value. + locations, use "-" for the '{location}' value. page_size (int): Optional. Requested page size. Server may return fewer items than requested. If @@ -401,15 +401,16 @@ class ListManagementServersResponse(proto.Message): Attributes: management_servers (MutableSequence[google.cloud.backupdr_v1.types.ManagementServer]): - The list of ManagementServer instances in the project for - the specified location. - - If the ``{location}`` value in the request is "-", the - response contains a list of instances from all locations. In - case any location is unreachable, the response will only - return management servers in reachable locations and the - 'unreachable' field will be populated with a list of - unreachable locations. + The list of ManagementServer instances in the + project for the specified location. + + If the '{location}' value in the request is "-", + the response contains a list of instances from + all locations. In case any location is + unreachable, the response will only return + management servers in reachable locations and + the 'unreachable' field will be populated with a + list of unreachable locations. next_page_token (str): A token identifying a page of results the server should return. @@ -443,7 +444,7 @@ class GetManagementServerRequest(proto.Message): name (str): Required. Name of the management server resource name, in the format - ``projects/{project_id}/locations/{location}/managementServers/{resource_name}`` + 'projects/{project_id}/locations/{location}/managementServers/{resource_name}' """ name: str = proto.Field( @@ -458,9 +459,9 @@ class CreateManagementServerRequest(proto.Message): Attributes: parent (str): Required. The management server project and location in the - format ``projects/{project_id}/locations/{location}``. In - Cloud Backup and DR locations map to GCP regions, for - example **us-central1**. + format 'projects/{project_id}/locations/{location}'. In + Cloud Backup and DR locations map to Google Cloud regions, + for example **us-central1**. management_server_id (str): Required. The name of the management server to create. The name must be unique for the @@ -571,7 +572,7 @@ class OperationMetadata(proto.Message): cancellation of the operation. Operations that have successfully been cancelled have [Operation.error][] value with a [google.rpc.Status.code][google.rpc.Status.code] of - 1, corresponding to ``Code.CANCELLED``. + 1, corresponding to 'Code.CANCELLED'. api_version (str): Output only. API version used to start the operation. diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupplan.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupplan.py new file mode 100644 index 000000000000..ca0eabb6d9f1 --- /dev/null +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupplan.py @@ -0,0 +1,644 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +from google.type import month_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.backupdr.v1", + manifest={ + "BackupPlan", + "BackupRule", + "StandardSchedule", + "BackupWindow", + "WeekDayOfMonth", + "CreateBackupPlanRequest", + "ListBackupPlansRequest", + "ListBackupPlansResponse", + "GetBackupPlanRequest", + "DeleteBackupPlanRequest", + }, +) + + +class BackupPlan(proto.Message): + r"""A ``BackupPlan`` specifies some common fields, such as + ``description`` as well as one or more ``BackupRule`` messages. Each + ``BackupRule`` has a retention policy and defines a schedule by + which the system is to perform backup workloads. + + Attributes: + name (str): + Output only. Identifier. The resource name of the + ``BackupPlan``. + + Format: + ``projects/{project}/locations/{location}/backupPlans/{backup_plan}`` + description (str): + Optional. The description of the ``BackupPlan`` resource. + + The description allows for additional details about + ``BackupPlan`` and its use cases to be provided. An example + description is the following: "This is a backup plan that + performs a daily backup at 6pm and retains data for 3 + months". The description must be at most 2048 characters. + labels (MutableMapping[str, str]): + Optional. This collection of key/value pairs + allows for custom labels to be supplied by the + user. Example, {"tag": "Weekly"}. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. When the ``BackupPlan`` was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. When the ``BackupPlan`` was last updated. + backup_rules (MutableSequence[google.cloud.backupdr_v1.types.BackupRule]): + Required. The backup rules for this ``BackupPlan``. There + must be at least one ``BackupRule`` message. + state (google.cloud.backupdr_v1.types.BackupPlan.State): + Output only. The ``State`` for the ``BackupPlan``. + resource_type (str): + Required. The resource type to which the ``BackupPlan`` will + be applied. Examples include, + "compute.googleapis.com/Instance" and + "storage.googleapis.com/Bucket". + etag (str): + Optional. ``etag`` is returned from the service in the + response. As a user of the service, you may provide an etag + value in this field to prevent stale resources. + backup_vault (str): + Required. Resource name of backup vault which + will be used as storage location for backups. + Format: + + projects/{project}/locations/{location}/backupVaults/{backupvault} + backup_vault_service_account (str): + Output only. The Google Cloud Platform + Service Account to be used by the BackupVault + for taking backups. Specify the email address of + the Backup Vault Service Account. + """ + + class State(proto.Enum): + r"""``State`` enumerates the possible states for a ``BackupPlan``. + + Values: + STATE_UNSPECIFIED (0): + State not set. + CREATING (1): + The resource is being created. + ACTIVE (2): + The resource has been created and is fully + usable. + DELETING (3): + The resource is being deleted. + INACTIVE (4): + The resource has been created but is not + usable. + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + ACTIVE = 2 + DELETING = 3 + INACTIVE = 4 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=3, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + backup_rules: MutableSequence["BackupRule"] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message="BackupRule", + ) + state: State = proto.Field( + proto.ENUM, + number=7, + enum=State, + ) + resource_type: str = proto.Field( + proto.STRING, + number=8, + ) + etag: str = proto.Field( + proto.STRING, + number=9, + ) + backup_vault: str = proto.Field( + proto.STRING, + number=10, + ) + backup_vault_service_account: str = proto.Field( + proto.STRING, + number=11, + ) + + +class BackupRule(proto.Message): + r"""``BackupRule`` binds the backup schedule to a retention policy. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + rule_id (str): + Required. Immutable. The unique id of this ``BackupRule``. + The ``rule_id`` is unique per ``BackupPlan``.The ``rule_id`` + must start with a lowercase letter followed by up to 62 + lowercase letters, numbers, or hyphens. Pattern, + /[a-z][a-z0-9-]{,62}/. + backup_retention_days (int): + Required. Configures the duration for which + backup data will be kept. It is defined in + “days”. The value should be greater than or + equal to minimum enforced retention of the + backup vault. + standard_schedule (google.cloud.backupdr_v1.types.StandardSchedule): + Required. Defines a schedule that runs within + the confines of a defined window of time. + + This field is a member of `oneof`_ ``backup_schedule_oneof``. + """ + + rule_id: str = proto.Field( + proto.STRING, + number=1, + ) + backup_retention_days: int = proto.Field( + proto.INT32, + number=4, + ) + standard_schedule: "StandardSchedule" = proto.Field( + proto.MESSAGE, + number=5, + oneof="backup_schedule_oneof", + message="StandardSchedule", + ) + + +class StandardSchedule(proto.Message): + r"""``StandardSchedule`` defines a schedule that run within the confines + of a defined window of days. We can define recurrence type for + schedule as HOURLY, DAILY, WEEKLY, MONTHLY or YEARLY. + + Attributes: + recurrence_type (google.cloud.backupdr_v1.types.StandardSchedule.RecurrenceType): + Required. Specifies the ``RecurrenceType`` for the schedule. + hourly_frequency (int): + Optional. Specifies frequency for hourly backups. A hourly + frequency of 2 means jobs will run every 2 hours from start + time till end time defined. + + This is required for ``recurrence_type``, ``HOURLY`` and is + not applicable otherwise. A validation error will occur if a + value is supplied and ``recurrence_type`` is not ``HOURLY``. + + Value of hourly frequency should be between 6 and 23. + + Reason for limit : We found that there is bandwidth + limitation of 3GB/S for GMI while taking a backup and 5GB/S + while doing a restore. Given the amount of parallel backups + and restore we are targeting, this will potentially take the + backup time to mins and hours (in worst case scenario). + days_of_week (MutableSequence[google.type.dayofweek_pb2.DayOfWeek]): + Optional. Specifies days of week like, MONDAY or TUESDAY, on + which jobs will run. + + This is required for ``recurrence_type``, ``WEEKLY`` and is + not applicable otherwise. A validation error will occur if a + value is supplied and ``recurrence_type`` is not ``WEEKLY``. + days_of_month (MutableSequence[int]): + Optional. Specifies days of months like 1, 5, or 14 on which + jobs will run. + + Values for ``days_of_month`` are only applicable for + ``recurrence_type``, ``MONTHLY`` and ``YEARLY``. A + validation error will occur if other values are supplied. + week_day_of_month (google.cloud.backupdr_v1.types.WeekDayOfMonth): + Optional. Specifies a week day of the month like, FIRST + SUNDAY or LAST MONDAY, on which jobs will run. This will be + specified by two fields in ``WeekDayOfMonth``, one for the + day, e.g. ``MONDAY``, and one for the week, e.g. ``LAST``. + + This field is only applicable for ``recurrence_type``, + ``MONTHLY`` and ``YEARLY``. A validation error will occur if + other values are supplied. + months (MutableSequence[google.type.month_pb2.Month]): + Optional. Specifies the months of year, like ``FEBRUARY`` + and/or ``MAY``, on which jobs will run. + + This field is only applicable when ``recurrence_type`` is + ``YEARLY``. A validation error will occur if other values + are supplied. + backup_window (google.cloud.backupdr_v1.types.BackupWindow): + Required. A BackupWindow defines the window of day during + which backup jobs will run. Jobs are queued at the beginning + of the window and will be marked as ``NOT_RUN`` if they do + not start by the end of the window. + + Note: running jobs will not be cancelled at the end of the + window. + time_zone (str): + Required. The time zone to be used when interpreting the + schedule. The value of this field must be a time zone name + from the IANA tz database. See + https://en.wikipedia.org/wiki/List_of_tz_database_time_zones + for the list of valid timezone names. For e.g., + Europe/Paris. + """ + + class RecurrenceType(proto.Enum): + r"""``RecurrenceTypes`` enumerates the applicable periodicity for the + schedule. + + Values: + RECURRENCE_TYPE_UNSPECIFIED (0): + recurrence type not set + HOURLY (1): + The ``BackupRule`` is to be applied hourly. + DAILY (2): + The ``BackupRule`` is to be applied daily. + WEEKLY (3): + The ``BackupRule`` is to be applied weekly. + MONTHLY (4): + The ``BackupRule`` is to be applied monthly. + YEARLY (5): + The ``BackupRule`` is to be applied yearly. + """ + RECURRENCE_TYPE_UNSPECIFIED = 0 + HOURLY = 1 + DAILY = 2 + WEEKLY = 3 + MONTHLY = 4 + YEARLY = 5 + + recurrence_type: RecurrenceType = proto.Field( + proto.ENUM, + number=1, + enum=RecurrenceType, + ) + hourly_frequency: int = proto.Field( + proto.INT32, + number=2, + ) + days_of_week: MutableSequence[dayofweek_pb2.DayOfWeek] = proto.RepeatedField( + proto.ENUM, + number=3, + enum=dayofweek_pb2.DayOfWeek, + ) + days_of_month: MutableSequence[int] = proto.RepeatedField( + proto.INT32, + number=4, + ) + week_day_of_month: "WeekDayOfMonth" = proto.Field( + proto.MESSAGE, + number=5, + message="WeekDayOfMonth", + ) + months: MutableSequence[month_pb2.Month] = proto.RepeatedField( + proto.ENUM, + number=6, + enum=month_pb2.Month, + ) + backup_window: "BackupWindow" = proto.Field( + proto.MESSAGE, + number=7, + message="BackupWindow", + ) + time_zone: str = proto.Field( + proto.STRING, + number=8, + ) + + +class BackupWindow(proto.Message): + r"""``BackupWindow`` defines a window of the day during which backup + jobs will run. + + Attributes: + start_hour_of_day (int): + Required. The hour of day (0-23) when the + window starts for e.g. if value of start hour of + day is 6 that mean backup window start at 6:00. + end_hour_of_day (int): + Required. The hour of day (1-24) when the window end for + e.g. if value of end hour of day is 10 that mean backup + window end time is 10:00. + + End hour of day should be greater than start hour of day. 0 + <= start_hour_of_day < end_hour_of_day <= 24 + + End hour of day is not include in backup window that mean if + end_hour_of_day= 10 jobs should start before 10:00. + """ + + start_hour_of_day: int = proto.Field( + proto.INT32, + number=1, + ) + end_hour_of_day: int = proto.Field( + proto.INT32, + number=2, + ) + + +class WeekDayOfMonth(proto.Message): + r"""``WeekDayOfMonth`` defines the week day of the month on which the + backups will run. The message combines a ``WeekOfMonth`` and + ``DayOfWeek`` to produce values like ``FIRST``/``MONDAY`` or + ``LAST``/``FRIDAY``. + + Attributes: + week_of_month (google.cloud.backupdr_v1.types.WeekDayOfMonth.WeekOfMonth): + Required. Specifies the week of the month. + day_of_week (google.type.dayofweek_pb2.DayOfWeek): + Required. Specifies the day of the week. + """ + + class WeekOfMonth(proto.Enum): + r"""``WeekOfMonth`` enumerates possible weeks in the month, e.g. the + first, third, or last week of the month. + + Values: + WEEK_OF_MONTH_UNSPECIFIED (0): + The zero value. Do not use. + FIRST (1): + The first week of the month. + SECOND (2): + The second week of the month. + THIRD (3): + The third week of the month. + FOURTH (4): + The fourth week of the month. + LAST (5): + The last week of the month. + """ + WEEK_OF_MONTH_UNSPECIFIED = 0 + FIRST = 1 + SECOND = 2 + THIRD = 3 + FOURTH = 4 + LAST = 5 + + week_of_month: WeekOfMonth = proto.Field( + proto.ENUM, + number=1, + enum=WeekOfMonth, + ) + day_of_week: dayofweek_pb2.DayOfWeek = proto.Field( + proto.ENUM, + number=2, + enum=dayofweek_pb2.DayOfWeek, + ) + + +class CreateBackupPlanRequest(proto.Message): + r"""The request message for creating a ``BackupPlan``. + + Attributes: + parent (str): + Required. The ``BackupPlan`` project and location in the + format ``projects/{project}/locations/{location}``. In Cloud + BackupDR locations map to GCP regions, for example + **us-central1**. + backup_plan_id (str): + Required. The name of the ``BackupPlan`` to create. The name + must be unique for the specified project and location.The + name must start with a lowercase letter followed by up to 62 + lowercase letters, numbers, or hyphens. Pattern, + /[a-z][a-z0-9-]{,62}/. + backup_plan (google.cloud.backupdr_v1.types.BackupPlan): + Required. The ``BackupPlan`` resource object to create. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + + For example, consider a situation where you make + an initial request and t he request times out. + If you make the request again with the same + request ID, the server can check if original + operation with the same request ID was received, + and if so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + backup_plan_id: str = proto.Field( + proto.STRING, + number=2, + ) + backup_plan: "BackupPlan" = proto.Field( + proto.MESSAGE, + number=3, + message="BackupPlan", + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListBackupPlansRequest(proto.Message): + r"""The request message for getting a list ``BackupPlan``. + + Attributes: + parent (str): + Required. The project and location for which to retrieve + ``BackupPlans`` information. Format: + ``projects/{project}/locations/{location}``. In Cloud + BackupDR, locations map to GCP regions, for e.g. + **us-central1**. To retrieve backup plans for all locations, + use "-" for the ``{location}`` value. + page_size (int): + Optional. The maximum number of ``BackupPlans`` to return in + a single response. If not specified, a default value will be + chosen by the service. Note that the response may include a + partial list and a caller should only rely on the response's + [next_page_token][google.cloud.backupdr.v1.ListBackupPlansResponse.next_page_token] + to determine if there are more instances left to be queried. + page_token (str): + Optional. The value of + [next_page_token][google.cloud.backupdr.v1.ListBackupPlansResponse.next_page_token] + received from a previous ``ListBackupPlans`` call. Provide + this to retrieve the subsequent page in a multi-page list of + results. When paginating, all other parameters provided to + ``ListBackupPlans`` must match the call that provided the + page token. + filter (str): + Optional. Field match expression used to + filter the results. + order_by (str): + Optional. Field by which to sort the results. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListBackupPlansResponse(proto.Message): + r"""The response message for getting a list of ``BackupPlan``. + + Attributes: + backup_plans (MutableSequence[google.cloud.backupdr_v1.types.BackupPlan]): + The list of ``BackupPlans`` in the project for the specified + location. + + If the ``{location}`` value in the request is "-", the + response contains a list of resources from all locations. In + case any location is unreachable, the response will only + return backup plans in reachable locations and the + 'unreachable' field will be populated with a list of + unreachable locations. BackupPlan + next_page_token (str): + A token which may be sent as + [page_token][google.cloud.backupdr.v1.ListBackupPlansRequest.page_token] + in a subsequent ``ListBackupPlans`` call to retrieve the + next page of results. If this field is omitted or empty, + then there are no more results to return. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + backup_plans: MutableSequence["BackupPlan"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="BackupPlan", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetBackupPlanRequest(proto.Message): + r"""The request message for getting a ``BackupPlan``. + + Attributes: + name (str): + Required. The resource name of the ``BackupPlan`` to + retrieve. + + Format: + ``projects/{project}/locations/{location}/backupPlans/{backup_plan}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DeleteBackupPlanRequest(proto.Message): + r"""The request message for deleting a ``BackupPlan``. + + Attributes: + name (str): + Required. The resource name of the ``BackupPlan`` to delete. + + Format: + ``projects/{project}/locations/{location}/backupPlans/{backup_plan}`` + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes after the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupplanassociation.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupplanassociation.py new file mode 100644 index 000000000000..23a4309a3fd8 --- /dev/null +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupplanassociation.py @@ -0,0 +1,454 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.backupdr.v1", + manifest={ + "BackupPlanAssociation", + "RuleConfigInfo", + "CreateBackupPlanAssociationRequest", + "ListBackupPlanAssociationsRequest", + "ListBackupPlanAssociationsResponse", + "GetBackupPlanAssociationRequest", + "DeleteBackupPlanAssociationRequest", + "TriggerBackupRequest", + }, +) + + +class BackupPlanAssociation(proto.Message): + r"""A BackupPlanAssociation represents a single + BackupPlanAssociation which contains details like workload, + backup plan etc + + Attributes: + name (str): + Output only. Identifier. The resource name of + BackupPlanAssociation in below format Format : + + projects/{project}/locations/{location}/backupPlanAssociations/{backupPlanAssociationId} + resource_type (str): + Optional. Resource type of workload on which + backupplan is applied + resource (str): + Required. Immutable. Resource name of + workload on which backupplan is applied + backup_plan (str): + Required. Resource name of backup plan which + needs to be applied on workload. Format: + + projects/{project}/locations/{location}/backupPlans/{backupPlanId} + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the instance was + created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the instance was + updated. + state (google.cloud.backupdr_v1.types.BackupPlanAssociation.State): + Output only. The BackupPlanAssociation + resource state. + rules_config_info (MutableSequence[google.cloud.backupdr_v1.types.RuleConfigInfo]): + Output only. The config info related to + backup rules. + data_source (str): + Output only. Output Only. + + Resource name of data source which will be used + as storage location for backups taken. + Format : + + projects/{project}/locations/{location}/backupVaults/{backupvault}/dataSources/{datasource} + """ + + class State(proto.Enum): + r"""Enum for State of BackupPlan Association + + Values: + STATE_UNSPECIFIED (0): + State not set. + CREATING (1): + The resource is being created. + ACTIVE (2): + The resource has been created and is fully + usable. + DELETING (3): + The resource is being deleted. + INACTIVE (4): + The resource has been created but is not + usable. + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + ACTIVE = 2 + DELETING = 3 + INACTIVE = 4 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + resource_type: str = proto.Field( + proto.STRING, + number=2, + ) + resource: str = proto.Field( + proto.STRING, + number=3, + ) + backup_plan: str = proto.Field( + proto.STRING, + number=4, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + state: State = proto.Field( + proto.ENUM, + number=7, + enum=State, + ) + rules_config_info: MutableSequence["RuleConfigInfo"] = proto.RepeatedField( + proto.MESSAGE, + number=8, + message="RuleConfigInfo", + ) + data_source: str = proto.Field( + proto.STRING, + number=9, + ) + + +class RuleConfigInfo(proto.Message): + r"""Message for rules config info. + + Attributes: + rule_id (str): + Output only. Output Only. + + Backup Rule id fetched from backup plan. + last_backup_state (google.cloud.backupdr_v1.types.RuleConfigInfo.LastBackupState): + Output only. The last backup state for rule. + last_backup_error (google.rpc.status_pb2.Status): + Output only. Output Only. + + google.rpc.Status object to store the last + backup error. + last_successful_backup_consistency_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The point in time when the last + successful backup was captured from the source. + """ + + class LastBackupState(proto.Enum): + r"""Enum for LastBackupState + + Values: + LAST_BACKUP_STATE_UNSPECIFIED (0): + State not set. + FIRST_BACKUP_PENDING (1): + The first backup is pending. + PERMISSION_DENIED (2): + The most recent backup could not be + run/failed because of the lack of permissions. + SUCCEEDED (3): + The last backup operation succeeded. + FAILED (4): + The last backup operation failed. + """ + LAST_BACKUP_STATE_UNSPECIFIED = 0 + FIRST_BACKUP_PENDING = 1 + PERMISSION_DENIED = 2 + SUCCEEDED = 3 + FAILED = 4 + + rule_id: str = proto.Field( + proto.STRING, + number=1, + ) + last_backup_state: LastBackupState = proto.Field( + proto.ENUM, + number=3, + enum=LastBackupState, + ) + last_backup_error: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=4, + message=status_pb2.Status, + ) + last_successful_backup_consistency_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + + +class CreateBackupPlanAssociationRequest(proto.Message): + r"""Request message for creating a backup plan. + + Attributes: + parent (str): + Required. The backup plan association project and location + in the format + ``projects/{project_id}/locations/{location}``. In Cloud + BackupDR locations map to GCP regions, for example + **us-central1**. + backup_plan_association_id (str): + Required. The name of the backup plan + association to create. The name must be unique + for the specified project and location. + backup_plan_association (google.cloud.backupdr_v1.types.BackupPlanAssociation): + Required. The resource being created + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + + For example, consider a situation where you make + an initial request and t he request times out. + If you make the request again with the same + request ID, the server can check if original + operation with the same request ID was received, + and if so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + backup_plan_association_id: str = proto.Field( + proto.STRING, + number=2, + ) + backup_plan_association: "BackupPlanAssociation" = proto.Field( + proto.MESSAGE, + number=3, + message="BackupPlanAssociation", + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListBackupPlanAssociationsRequest(proto.Message): + r"""Request message for List BackupPlanAssociation + + Attributes: + parent (str): + Required. The project and location for which to retrieve + backup Plan Associations information, in the format + ``projects/{project_id}/locations/{location}``. In Cloud + BackupDR, locations map to GCP regions, for example + **us-central1**. To retrieve backup plan associations for + all locations, use "-" for the ``{location}`` value. + page_size (int): + Optional. Requested page size. Server may + return fewer items than requested. If + unspecified, server will pick an appropriate + default. + page_token (str): + Optional. A token identifying a page of + results the server should return. + filter (str): + Optional. Filtering results + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListBackupPlanAssociationsResponse(proto.Message): + r"""Response message for List BackupPlanAssociation + + Attributes: + backup_plan_associations (MutableSequence[google.cloud.backupdr_v1.types.BackupPlanAssociation]): + The list of Backup Plan Associations in the project for the + specified location. + + If the ``{location}`` value in the request is "-", the + response contains a list of instances from all locations. In + case any location is unreachable, the response will only + return backup plan associations in reachable locations and + the 'unreachable' field will be populated with a list of + unreachable locations. + next_page_token (str): + A token identifying a page of results the + server should return. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + backup_plan_associations: MutableSequence[ + "BackupPlanAssociation" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="BackupPlanAssociation", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetBackupPlanAssociationRequest(proto.Message): + r"""Request message for getting a BackupPlanAssociation resource. + + Attributes: + name (str): + Required. Name of the backup plan association resource, in + the format + ``projects/{project}/locations/{location}/backupPlanAssociations/{backupPlanAssociationId}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DeleteBackupPlanAssociationRequest(proto.Message): + r"""Request message for deleting a backup plan association. + + Attributes: + name (str): + Required. Name of the backup plan association resource, in + the format + ``projects/{project}/locations/{location}/backupPlanAssociations/{backupPlanAssociationId}`` + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes after the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class TriggerBackupRequest(proto.Message): + r"""Request message for triggering a backup. + + Attributes: + name (str): + Required. Name of the backup plan association resource, in + the format + ``projects/{project}/locations/{location}/backupPlanAssociations/{backupPlanAssociationId}`` + rule_id (str): + Required. backup rule_id for which a backup needs to be + triggered. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes after the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + rule_id: str = proto.Field( + proto.STRING, + number=2, + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupvault.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupvault.py new file mode 100644 index 000000000000..ced3cd195702 --- /dev/null +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupvault.py @@ -0,0 +1,2065 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.backupdr_v1.types import backupvault_ba, backupvault_gce + +__protobuf__ = proto.module( + package="google.cloud.backupdr.v1", + manifest={ + "BackupConfigState", + "BackupView", + "BackupVaultView", + "BackupVault", + "DataSource", + "BackupConfigInfo", + "GcpBackupConfig", + "BackupApplianceBackupConfig", + "DataSourceGcpResource", + "DataSourceBackupApplianceApplication", + "ServiceLockInfo", + "BackupApplianceLockInfo", + "BackupLock", + "Backup", + "CreateBackupVaultRequest", + "ListBackupVaultsRequest", + "ListBackupVaultsResponse", + "FetchUsableBackupVaultsRequest", + "FetchUsableBackupVaultsResponse", + "GetBackupVaultRequest", + "UpdateBackupVaultRequest", + "DeleteBackupVaultRequest", + "ListDataSourcesRequest", + "ListDataSourcesResponse", + "GetDataSourceRequest", + "UpdateDataSourceRequest", + "ListBackupsRequest", + "ListBackupsResponse", + "GetBackupRequest", + "UpdateBackupRequest", + "DeleteBackupRequest", + "RestoreBackupRequest", + "RestoreBackupResponse", + "TargetResource", + "GcpResource", + }, +) + + +class BackupConfigState(proto.Enum): + r"""Backup configuration state. Is the resource configured for + backup? + + Values: + BACKUP_CONFIG_STATE_UNSPECIFIED (0): + The possible states of backup configuration. + Status not set. + ACTIVE (1): + The data source is actively protected (i.e. + there is a BackupPlanAssociation or Appliance + SLA pointing to it) + PASSIVE (2): + The data source is no longer protected (but + may have backups under it) + """ + BACKUP_CONFIG_STATE_UNSPECIFIED = 0 + ACTIVE = 1 + PASSIVE = 2 + + +class BackupView(proto.Enum): + r"""BackupView contains enum options for Partial and Full view. + + Values: + BACKUP_VIEW_UNSPECIFIED (0): + If the value is not set, the default 'FULL' + view is used. + BACKUP_VIEW_BASIC (1): + Includes basic data about the Backup, but not + the full contents. + BACKUP_VIEW_FULL (2): + Includes all data about the Backup. + This is the default value (for both ListBackups + and GetBackup). + """ + BACKUP_VIEW_UNSPECIFIED = 0 + BACKUP_VIEW_BASIC = 1 + BACKUP_VIEW_FULL = 2 + + +class BackupVaultView(proto.Enum): + r"""BackupVaultView contains enum options for Partial and Full + view. + + Values: + BACKUP_VAULT_VIEW_UNSPECIFIED (0): + If the value is not set, the default 'FULL' + view is used. + BACKUP_VAULT_VIEW_BASIC (1): + Includes basic data about the Backup Vault, + but not the full contents. + BACKUP_VAULT_VIEW_FULL (2): + Includes all data about the Backup Vault. + This is the default value (for both + ListBackupVaults and GetBackupVault). + """ + BACKUP_VAULT_VIEW_UNSPECIFIED = 0 + BACKUP_VAULT_VIEW_BASIC = 1 + BACKUP_VAULT_VIEW_FULL = 2 + + +class BackupVault(proto.Message): + r"""Message describing a BackupVault object. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Output only. Identifier. Name of the backup vault to create. + It must have the + format\ ``"projects/{project}/locations/{location}/backupVaults/{backupvault}"``. + ``{backupvault}`` cannot be changed after creation. It must + be between 3-63 characters long and must be unique within + the project and location. + description (str): + Optional. The description of the BackupVault + instance (2048 characters or less). + + This field is a member of `oneof`_ ``_description``. + labels (MutableMapping[str, str]): + Optional. Resource labels to represent user + provided metadata. No labels currently defined: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the instance was + created. + + This field is a member of `oneof`_ ``_create_time``. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the instance was + updated. + + This field is a member of `oneof`_ ``_update_time``. + backup_minimum_enforced_retention_duration (google.protobuf.duration_pb2.Duration): + Required. The default and minimum enforced + retention for each backup within the backup + vault. The enforced retention for each backup + can be extended. + + This field is a member of `oneof`_ ``_backup_minimum_enforced_retention_duration``. + deletable (bool): + Output only. Set to true when there are no + backups nested under this resource. + + This field is a member of `oneof`_ ``_deletable``. + etag (str): + Optional. Server specified ETag for the + backup vault resource to prevent simultaneous + updates from overwiting each other. + + This field is a member of `oneof`_ ``_etag``. + state (google.cloud.backupdr_v1.types.BackupVault.State): + Output only. The BackupVault resource + instance state. + effective_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. Time after which the BackupVault + resource is locked. + + This field is a member of `oneof`_ ``_effective_time``. + backup_count (int): + Output only. The number of backups in this + backup vault. + service_account (str): + Output only. Service account used by the + BackupVault Service for this BackupVault. The + user should grant this account permissions in + their workload project to enable the service to + run backups and restores there. + total_stored_bytes (int): + Output only. Total size of the storage used + by all backup resources. + uid (str): + Output only. Output only + Immutable after resource creation until resource + deletion. + annotations (MutableMapping[str, str]): + Optional. User annotations. See + https://google.aip.dev/128#annotations Stores + small amounts of arbitrary data. + access_restriction (google.cloud.backupdr_v1.types.BackupVault.AccessRestriction): + Optional. Note: This field is added for future use case and + will not be supported in the current release. + + Optional. + + Access restriction for the backup vault. Default value is + WITHIN_ORGANIZATION if not provided during creation. + """ + + class State(proto.Enum): + r"""Holds the state of the backup vault resource. + + Values: + STATE_UNSPECIFIED (0): + State not set. + CREATING (1): + The backup vault is being created. + ACTIVE (2): + The backup vault has been created and is + fully usable. + DELETING (3): + The backup vault is being deleted. + ERROR (4): + The backup vault is experiencing an issue and + might be unusable. + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + ACTIVE = 2 + DELETING = 3 + ERROR = 4 + + class AccessRestriction(proto.Enum): + r"""Holds the access restriction for the backup vault. + + Values: + ACCESS_RESTRICTION_UNSPECIFIED (0): + Access restriction not set. + WITHIN_PROJECT (1): + Access to or from resources outside your + current project will be denied. + WITHIN_ORGANIZATION (2): + Access to or from resources outside your + current organization will be denied. + UNRESTRICTED (3): + No access restriction. + """ + ACCESS_RESTRICTION_UNSPECIFIED = 0 + WITHIN_PROJECT = 1 + WITHIN_ORGANIZATION = 2 + UNRESTRICTED = 3 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + optional=True, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=3, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + optional=True, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + optional=True, + message=timestamp_pb2.Timestamp, + ) + backup_minimum_enforced_retention_duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=20, + optional=True, + message=duration_pb2.Duration, + ) + deletable: bool = proto.Field( + proto.BOOL, + number=8, + optional=True, + ) + etag: str = proto.Field( + proto.STRING, + number=9, + optional=True, + ) + state: State = proto.Field( + proto.ENUM, + number=10, + enum=State, + ) + effective_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=12, + optional=True, + message=timestamp_pb2.Timestamp, + ) + backup_count: int = proto.Field( + proto.INT64, + number=17, + ) + service_account: str = proto.Field( + proto.STRING, + number=18, + ) + total_stored_bytes: int = proto.Field( + proto.INT64, + number=19, + ) + uid: str = proto.Field( + proto.STRING, + number=21, + ) + annotations: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=22, + ) + access_restriction: AccessRestriction = proto.Field( + proto.ENUM, + number=24, + enum=AccessRestriction, + ) + + +class DataSource(proto.Message): + r"""Message describing a DataSource object. + Datasource object used to represent Datasource details for both + admin and basic view. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Output only. Identifier. Name of the datasource to create. + It must have the + format\ ``"projects/{project}/locations/{location}/backupVaults/{backupvault}/dataSources/{datasource}"``. + ``{datasource}`` cannot be changed after creation. It must + be between 3-63 characters long and must be unique within + the backup vault. + state (google.cloud.backupdr_v1.types.DataSource.State): + Output only. The DataSource resource instance + state. + labels (MutableMapping[str, str]): + Optional. Resource labels to represent user + provided metadata. No labels currently defined: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the instance was + created. + + This field is a member of `oneof`_ ``_create_time``. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the instance was + updated. + + This field is a member of `oneof`_ ``_update_time``. + backup_count (int): + Number of backups in the data source. + + This field is a member of `oneof`_ ``_backup_count``. + etag (str): + Server specified ETag for the + ManagementServer resource to prevent + simultaneous updates from overwiting each other. + + This field is a member of `oneof`_ ``_etag``. + total_stored_bytes (int): + The number of bytes (metadata and data) + stored in this datasource. + + This field is a member of `oneof`_ ``_total_stored_bytes``. + config_state (google.cloud.backupdr_v1.types.BackupConfigState): + Output only. The backup configuration state. + backup_config_info (google.cloud.backupdr_v1.types.BackupConfigInfo): + Output only. Details of how the resource is + configured for backup. + data_source_gcp_resource (google.cloud.backupdr_v1.types.DataSourceGcpResource): + The backed up resource is a Google Cloud + resource. The word 'DataSource' was included in + the names to indicate that this is the + representation of the Google Cloud resource used + within the DataSource object. + + This field is a member of `oneof`_ ``source_resource``. + data_source_backup_appliance_application (google.cloud.backupdr_v1.types.DataSourceBackupApplianceApplication): + The backed up resource is a backup appliance + application. + + This field is a member of `oneof`_ ``source_resource``. + """ + + class State(proto.Enum): + r"""Holds the state of the data source resource. + + Values: + STATE_UNSPECIFIED (0): + State not set. + CREATING (1): + The data source is being created. + ACTIVE (2): + The data source has been created and is fully + usable. + DELETING (3): + The data source is being deleted. + ERROR (4): + The data source is experiencing an issue and + might be unusable. + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + ACTIVE = 2 + DELETING = 3 + ERROR = 4 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + state: State = proto.Field( + proto.ENUM, + number=21, + enum=State, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + optional=True, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + optional=True, + message=timestamp_pb2.Timestamp, + ) + backup_count: int = proto.Field( + proto.INT64, + number=7, + optional=True, + ) + etag: str = proto.Field( + proto.STRING, + number=14, + optional=True, + ) + total_stored_bytes: int = proto.Field( + proto.INT64, + number=23, + optional=True, + ) + config_state: "BackupConfigState" = proto.Field( + proto.ENUM, + number=24, + enum="BackupConfigState", + ) + backup_config_info: "BackupConfigInfo" = proto.Field( + proto.MESSAGE, + number=25, + message="BackupConfigInfo", + ) + data_source_gcp_resource: "DataSourceGcpResource" = proto.Field( + proto.MESSAGE, + number=26, + oneof="source_resource", + message="DataSourceGcpResource", + ) + data_source_backup_appliance_application: "DataSourceBackupApplianceApplication" = ( + proto.Field( + proto.MESSAGE, + number=27, + oneof="source_resource", + message="DataSourceBackupApplianceApplication", + ) + ) + + +class BackupConfigInfo(proto.Message): + r"""BackupConfigInfo has information about how the resource is + configured for Backup and about the most recent backup to this + vault. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + last_backup_state (google.cloud.backupdr_v1.types.BackupConfigInfo.LastBackupState): + Output only. The status of the last backup to + this BackupVault + last_successful_backup_consistency_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. If the last backup were + successful, this field has the consistency date. + last_backup_error (google.rpc.status_pb2.Status): + Output only. If the last backup failed, this + field has the error message. + gcp_backup_config (google.cloud.backupdr_v1.types.GcpBackupConfig): + Configuration for a Google Cloud resource. + + This field is a member of `oneof`_ ``backup_config``. + backup_appliance_backup_config (google.cloud.backupdr_v1.types.BackupApplianceBackupConfig): + Configuration for an application backed up by + a Backup Appliance. + + This field is a member of `oneof`_ ``backup_config``. + """ + + class LastBackupState(proto.Enum): + r"""LastBackupstate tracks whether the last backup was not yet + started, successful, failed, or could not be run because of the + lack of permissions. + + Values: + LAST_BACKUP_STATE_UNSPECIFIED (0): + Status not set. + FIRST_BACKUP_PENDING (1): + The first backup has not yet completed + SUCCEEDED (2): + The most recent backup was successful + FAILED (3): + The most recent backup failed + PERMISSION_DENIED (4): + The most recent backup could not be + run/failed because of the lack of permissions + """ + LAST_BACKUP_STATE_UNSPECIFIED = 0 + FIRST_BACKUP_PENDING = 1 + SUCCEEDED = 2 + FAILED = 3 + PERMISSION_DENIED = 4 + + last_backup_state: LastBackupState = proto.Field( + proto.ENUM, + number=1, + enum=LastBackupState, + ) + last_successful_backup_consistency_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + last_backup_error: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=3, + message=status_pb2.Status, + ) + gcp_backup_config: "GcpBackupConfig" = proto.Field( + proto.MESSAGE, + number=4, + oneof="backup_config", + message="GcpBackupConfig", + ) + backup_appliance_backup_config: "BackupApplianceBackupConfig" = proto.Field( + proto.MESSAGE, + number=5, + oneof="backup_config", + message="BackupApplianceBackupConfig", + ) + + +class GcpBackupConfig(proto.Message): + r"""GcpBackupConfig captures the Backup configuration details for + Google Cloud resources. All Google Cloud resources regardless of + type are protected with backup plan associations. + + Attributes: + backup_plan (str): + The name of the backup plan. + backup_plan_description (str): + The description of the backup plan. + backup_plan_association (str): + The name of the backup plan association. + backup_plan_rules (MutableSequence[str]): + The names of the backup plan rules which + point to this backupvault + """ + + backup_plan: str = proto.Field( + proto.STRING, + number=1, + ) + backup_plan_description: str = proto.Field( + proto.STRING, + number=2, + ) + backup_plan_association: str = proto.Field( + proto.STRING, + number=3, + ) + backup_plan_rules: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + + +class BackupApplianceBackupConfig(proto.Message): + r"""BackupApplianceBackupConfig captures the backup configuration + for applications that are protected by Backup Appliances. + + Attributes: + backup_appliance_name (str): + The name of the backup appliance. + backup_appliance_id (int): + The ID of the backup appliance. + sla_id (int): + The ID of the SLA of this application. + application_name (str): + The name of the application. + host_name (str): + The name of the host where the application is + running. + slt_name (str): + The name of the SLT associated with the + application. + slp_name (str): + The name of the SLP associated with the + application. + """ + + backup_appliance_name: str = proto.Field( + proto.STRING, + number=1, + ) + backup_appliance_id: int = proto.Field( + proto.INT64, + number=2, + ) + sla_id: int = proto.Field( + proto.INT64, + number=3, + ) + application_name: str = proto.Field( + proto.STRING, + number=4, + ) + host_name: str = proto.Field( + proto.STRING, + number=5, + ) + slt_name: str = proto.Field( + proto.STRING, + number=6, + ) + slp_name: str = proto.Field( + proto.STRING, + number=7, + ) + + +class DataSourceGcpResource(proto.Message): + r"""DataSourceGcpResource is used for protected resources that + are Google Cloud Resources. This name is easeier to understand + than GcpResourceDataSource or GcpDataSourceResource + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + gcp_resourcename (str): + Output only. Full resource pathname URL of + the source Google Cloud resource. + location (str): + Location of the resource: + //"global"/"unspecified". + type_ (str): + The type of the Google Cloud resource. Use + the Unified Resource Type, eg. + compute.googleapis.com/Instance. + compute_instance_datasource_properties (google.cloud.backupdr_v1.types.ComputeInstanceDataSourceProperties): + ComputeInstanceDataSourceProperties has a + subset of Compute Instance properties that are + useful at the Datasource level. + + This field is a member of `oneof`_ ``gcp_resource_properties``. + """ + + gcp_resourcename: str = proto.Field( + proto.STRING, + number=1, + ) + location: str = proto.Field( + proto.STRING, + number=2, + ) + type_: str = proto.Field( + proto.STRING, + number=3, + ) + compute_instance_datasource_properties: backupvault_gce.ComputeInstanceDataSourceProperties = proto.Field( + proto.MESSAGE, + number=4, + oneof="gcp_resource_properties", + message=backupvault_gce.ComputeInstanceDataSourceProperties, + ) + + +class DataSourceBackupApplianceApplication(proto.Message): + r"""BackupApplianceApplication describes a Source Resource when + it is an application backed up by a BackupAppliance. + + Attributes: + application_name (str): + The name of the Application as known to the + Backup Appliance. + backup_appliance (str): + Appliance name. + appliance_id (int): + Appliance Id of the Backup Appliance. + type_ (str): + The type of the application. e.g. VMBackup + application_id (int): + The appid field of the application within the + Backup Appliance. + hostname (str): + Hostname of the host where the application is + running. + host_id (int): + Hostid of the application host. + """ + + application_name: str = proto.Field( + proto.STRING, + number=1, + ) + backup_appliance: str = proto.Field( + proto.STRING, + number=2, + ) + appliance_id: int = proto.Field( + proto.INT64, + number=3, + ) + type_: str = proto.Field( + proto.STRING, + number=4, + ) + application_id: int = proto.Field( + proto.INT64, + number=8, + ) + hostname: str = proto.Field( + proto.STRING, + number=6, + ) + host_id: int = proto.Field( + proto.INT64, + number=7, + ) + + +class ServiceLockInfo(proto.Message): + r"""ServiceLockInfo represents the details of a lock taken by the + service on a Backup resource. + + Attributes: + operation (str): + Output only. The name of the operation that + created this lock. The lock will automatically + be released when the operation completes. + """ + + operation: str = proto.Field( + proto.STRING, + number=1, + ) + + +class BackupApplianceLockInfo(proto.Message): + r"""BackupApplianceLockInfo contains metadata about the + backupappliance that created the lock. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + backup_appliance_id (int): + Required. The ID of the backup/recovery + appliance that created this lock. + backup_appliance_name (str): + Required. The name of the backup/recovery + appliance that created this lock. + lock_reason (str): + Required. The reason for the lock: e.g. + MOUNT/RESTORE/BACKUP/etc. The value of this + string is only meaningful to the client and it + is not interpreted by the BackupVault service. + job_name (str): + The job name on the backup/recovery appliance + that created this lock. + + This field is a member of `oneof`_ ``lock_source``. + backup_image (str): + The image name that depends on this Backup. + + This field is a member of `oneof`_ ``lock_source``. + sla_id (int): + The SLA on the backup/recovery appliance that + owns the lock. + + This field is a member of `oneof`_ ``lock_source``. + """ + + backup_appliance_id: int = proto.Field( + proto.INT64, + number=1, + ) + backup_appliance_name: str = proto.Field( + proto.STRING, + number=2, + ) + lock_reason: str = proto.Field( + proto.STRING, + number=5, + ) + job_name: str = proto.Field( + proto.STRING, + number=6, + oneof="lock_source", + ) + backup_image: str = proto.Field( + proto.STRING, + number=7, + oneof="lock_source", + ) + sla_id: int = proto.Field( + proto.INT64, + number=8, + oneof="lock_source", + ) + + +class BackupLock(proto.Message): + r"""BackupLock represents a single lock on a Backup resource. An + unexpired lock on a Backup prevents the Backup from being + deleted. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + lock_until_time (google.protobuf.timestamp_pb2.Timestamp): + Required. The time after which this lock is + not considered valid and will no longer protect + the Backup from deletion. + backup_appliance_lock_info (google.cloud.backupdr_v1.types.BackupApplianceLockInfo): + If the client is a backup and recovery + appliance, this contains metadata about why the + lock exists. + + This field is a member of `oneof`_ ``ClientLockInfo``. + service_lock_info (google.cloud.backupdr_v1.types.ServiceLockInfo): + Output only. Contains metadata about the lock + exist for Google Cloud native backups. + + This field is a member of `oneof`_ ``ClientLockInfo``. + """ + + lock_until_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + backup_appliance_lock_info: "BackupApplianceLockInfo" = proto.Field( + proto.MESSAGE, + number=3, + oneof="ClientLockInfo", + message="BackupApplianceLockInfo", + ) + service_lock_info: "ServiceLockInfo" = proto.Field( + proto.MESSAGE, + number=4, + oneof="ClientLockInfo", + message="ServiceLockInfo", + ) + + +class Backup(proto.Message): + r"""Message describing a Backup object. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Output only. Identifier. Name of the backup to create. It + must have the + format\ ``"projects//locations//backupVaults//dataSources/{datasource}/backups/{backup}"``. + ``{backup}`` cannot be changed after creation. It must be + between 3-63 characters long and must be unique within the + datasource. + description (str): + Output only. The description of the Backup + instance (2048 characters or less). + + This field is a member of `oneof`_ ``_description``. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the instance was + created. + + This field is a member of `oneof`_ ``_create_time``. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the instance was + updated. + + This field is a member of `oneof`_ ``_update_time``. + labels (MutableMapping[str, str]): + Optional. Resource labels to represent user + provided metadata. No labels currently defined. + enforced_retention_end_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. The backup can not be deleted + before this time. + + This field is a member of `oneof`_ ``_enforced_retention_end_time``. + expire_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. When this backup is automatically + expired. + + This field is a member of `oneof`_ ``_expire_time``. + consistency_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The point in time when this + backup was captured from the source. + + This field is a member of `oneof`_ ``_consistency_time``. + etag (str): + Optional. Server specified ETag to prevent + updates from overwriting each other. + + This field is a member of `oneof`_ ``_etag``. + state (google.cloud.backupdr_v1.types.Backup.State): + Output only. The Backup resource instance + state. + service_locks (MutableSequence[google.cloud.backupdr_v1.types.BackupLock]): + Output only. The list of BackupLocks taken by + the service to prevent the deletion of the + backup. + backup_appliance_locks (MutableSequence[google.cloud.backupdr_v1.types.BackupLock]): + Optional. The list of BackupLocks taken by + the accessor Backup Appliance. + compute_instance_backup_properties (google.cloud.backupdr_v1.types.ComputeInstanceBackupProperties): + Output only. Compute Engine specific backup + properties. + + This field is a member of `oneof`_ ``backup_properties``. + backup_appliance_backup_properties (google.cloud.backupdr_v1.types.BackupApplianceBackupProperties): + Output only. Backup Appliance specific backup + properties. + + This field is a member of `oneof`_ ``backup_properties``. + backup_type (google.cloud.backupdr_v1.types.Backup.BackupType): + Output only. Type of the backup, unspecified, + scheduled or ondemand. + gcp_backup_plan_info (google.cloud.backupdr_v1.types.Backup.GCPBackupPlanInfo): + Output only. Configuration for a Google Cloud + resource. + + This field is a member of `oneof`_ ``plan_info``. + resource_size_bytes (int): + Output only. source resource size in bytes at + the time of the backup. + """ + + class State(proto.Enum): + r"""Holds the state of the backup resource. + + Values: + STATE_UNSPECIFIED (0): + State not set. + CREATING (1): + The backup is being created. + ACTIVE (2): + The backup has been created and is fully + usable. + DELETING (3): + The backup is being deleted. + ERROR (4): + The backup is experiencing an issue and might + be unusable. + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + ACTIVE = 2 + DELETING = 3 + ERROR = 4 + + class BackupType(proto.Enum): + r"""Type of the backup, scheduled or ondemand. + + Values: + BACKUP_TYPE_UNSPECIFIED (0): + Backup type is unspecified. + SCHEDULED (1): + Scheduled backup. + ON_DEMAND (2): + On demand backup. + """ + BACKUP_TYPE_UNSPECIFIED = 0 + SCHEDULED = 1 + ON_DEMAND = 2 + + class GCPBackupPlanInfo(proto.Message): + r"""GCPBackupPlanInfo captures the plan configuration details of + Google Cloud resources at the time of backup. + + Attributes: + backup_plan (str): + Resource name of backup plan by which + workload is protected at the time of the backup. + Format: + + projects/{project}/locations/{location}/backupPlans/{backupPlanId} + backup_plan_rule_id (str): + The rule id of the backup plan which + triggered this backup in case of scheduled + backup or used for + """ + + backup_plan: str = proto.Field( + proto.STRING, + number=1, + ) + backup_plan_rule_id: str = proto.Field( + proto.STRING, + number=2, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + optional=True, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + optional=True, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + optional=True, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=5, + ) + enforced_retention_end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + optional=True, + message=timestamp_pb2.Timestamp, + ) + expire_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + optional=True, + message=timestamp_pb2.Timestamp, + ) + consistency_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=9, + optional=True, + message=timestamp_pb2.Timestamp, + ) + etag: str = proto.Field( + proto.STRING, + number=11, + optional=True, + ) + state: State = proto.Field( + proto.ENUM, + number=15, + enum=State, + ) + service_locks: MutableSequence["BackupLock"] = proto.RepeatedField( + proto.MESSAGE, + number=17, + message="BackupLock", + ) + backup_appliance_locks: MutableSequence["BackupLock"] = proto.RepeatedField( + proto.MESSAGE, + number=18, + message="BackupLock", + ) + compute_instance_backup_properties: backupvault_gce.ComputeInstanceBackupProperties = proto.Field( + proto.MESSAGE, + number=19, + oneof="backup_properties", + message=backupvault_gce.ComputeInstanceBackupProperties, + ) + backup_appliance_backup_properties: backupvault_ba.BackupApplianceBackupProperties = proto.Field( + proto.MESSAGE, + number=21, + oneof="backup_properties", + message=backupvault_ba.BackupApplianceBackupProperties, + ) + backup_type: BackupType = proto.Field( + proto.ENUM, + number=20, + enum=BackupType, + ) + gcp_backup_plan_info: GCPBackupPlanInfo = proto.Field( + proto.MESSAGE, + number=22, + oneof="plan_info", + message=GCPBackupPlanInfo, + ) + resource_size_bytes: int = proto.Field( + proto.INT64, + number=23, + ) + + +class CreateBackupVaultRequest(proto.Message): + r"""Message for creating a BackupVault. + + Attributes: + parent (str): + Required. Value for parent. + backup_vault_id (str): + Required. ID of the requesting object If auto-generating ID + server-side, remove this field and backup_vault_id from the + method_signature of Create RPC + backup_vault (google.cloud.backupdr_v1.types.BackupVault): + Required. The resource being created + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + validate_only (bool): + Optional. Only validate the request, but do + not perform mutations. The default is 'false'. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + backup_vault_id: str = proto.Field( + proto.STRING, + number=2, + ) + backup_vault: "BackupVault" = proto.Field( + proto.MESSAGE, + number=3, + message="BackupVault", + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=5, + ) + + +class ListBackupVaultsRequest(proto.Message): + r"""Request message for listing backupvault stores. + + Attributes: + parent (str): + Required. The project and location for which to retrieve + backupvault stores information, in the format + 'projects/{project_id}/locations/{location}'. In Cloud + Backup and DR, locations map to Google Cloud regions, for + example **us-central1**. To retrieve backupvault stores for + all locations, use "-" for the '{location}' value. + page_size (int): + Optional. Requested page size. Server may + return fewer items than requested. If + unspecified, server will pick an appropriate + default. + page_token (str): + Optional. A token identifying a page of + results the server should return. + filter (str): + Optional. Filtering results. + order_by (str): + Optional. Hint for how to order the results. + view (google.cloud.backupdr_v1.types.BackupVaultView): + Optional. Reserved for future use to provide + a BASIC & FULL view of Backup Vault. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + view: "BackupVaultView" = proto.Field( + proto.ENUM, + number=6, + enum="BackupVaultView", + ) + + +class ListBackupVaultsResponse(proto.Message): + r"""Response message for listing BackupVaults. + + Attributes: + backup_vaults (MutableSequence[google.cloud.backupdr_v1.types.BackupVault]): + The list of BackupVault instances in the + project for the specified location. + + If the '{location}' value in the request is "-", + the response contains a list of instances from + all locations. In case any location is + unreachable, the response will only return + backup vaults in reachable locations and the + 'unreachable' field will be populated with a + list of unreachable locations. + next_page_token (str): + A token identifying a page of results the + server should return. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + backup_vaults: MutableSequence["BackupVault"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="BackupVault", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class FetchUsableBackupVaultsRequest(proto.Message): + r"""Request message for fetching usable BackupVaults. + + Attributes: + parent (str): + Required. The project and location for which to retrieve + backupvault stores information, in the format + 'projects/{project_id}/locations/{location}'. In Cloud + Backup and DR, locations map to Google Cloud regions, for + example **us-central1**. To retrieve backupvault stores for + all locations, use "-" for the '{location}' value. + page_size (int): + Optional. Requested page size. Server may + return fewer items than requested. If + unspecified, server will pick an appropriate + default. + page_token (str): + Optional. A token identifying a page of + results the server should return. + filter (str): + Optional. Filtering results. + order_by (str): + Optional. Hint for how to order the results. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class FetchUsableBackupVaultsResponse(proto.Message): + r"""Response message for fetching usable BackupVaults. + + Attributes: + backup_vaults (MutableSequence[google.cloud.backupdr_v1.types.BackupVault]): + The list of BackupVault instances in the + project for the specified location. + + If the '{location}' value in the request is "-", + the response contains a list of instances from + all locations. In case any location is + unreachable, the response will only return + backup vaults in reachable locations and the + 'unreachable' field will be populated with a + list of unreachable locations. + next_page_token (str): + A token identifying a page of results the + server should return. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + backup_vaults: MutableSequence["BackupVault"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="BackupVault", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetBackupVaultRequest(proto.Message): + r"""Request message for getting a BackupVault. + + Attributes: + name (str): + Required. Name of the backupvault store resource name, in + the format + 'projects/{project_id}/locations/{location}/backupVaults/{resource_name}' + view (google.cloud.backupdr_v1.types.BackupVaultView): + Optional. Reserved for future use to provide + a BASIC & FULL view of Backup Vault + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + view: "BackupVaultView" = proto.Field( + proto.ENUM, + number=2, + enum="BackupVaultView", + ) + + +class UpdateBackupVaultRequest(proto.Message): + r"""Request message for updating a BackupVault. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten in the BackupVault resource by the update. The + fields specified in the update_mask are relative to the + resource, not the full request. A field will be overwritten + if it is in the mask. If the user does not provide a mask + then the request will fail. + backup_vault (google.cloud.backupdr_v1.types.BackupVault): + Required. The resource being updated + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + validate_only (bool): + Optional. Only validate the request, but do + not perform mutations. The default is 'false'. + force (bool): + Optional. If set to true, will not check plan + duration against backup vault enforcement + duration. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + backup_vault: "BackupVault" = proto.Field( + proto.MESSAGE, + number=2, + message="BackupVault", + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + force: bool = proto.Field( + proto.BOOL, + number=5, + ) + + +class DeleteBackupVaultRequest(proto.Message): + r"""Message for deleting a BackupVault. + + Attributes: + name (str): + Required. Name of the resource. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes after the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + force (bool): + Optional. If set to true, any data source + from this backup vault will also be deleted. + etag (str): + The current etag of the backup vault. + If an etag is provided and does not match the + current etag of the connection, deletion will be + blocked. + validate_only (bool): + Optional. Only validate the request, but do + not perform mutations. The default is 'false'. + allow_missing (bool): + Optional. If true and the BackupVault is not + found, the request will succeed but no action + will be taken. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + force: bool = proto.Field( + proto.BOOL, + number=3, + ) + etag: str = proto.Field( + proto.STRING, + number=4, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=5, + ) + allow_missing: bool = proto.Field( + proto.BOOL, + number=6, + ) + + +class ListDataSourcesRequest(proto.Message): + r"""Request message for listing DataSources. + + Attributes: + parent (str): + Required. The project and location for which to retrieve + data sources information, in the format + 'projects/{project_id}/locations/{location}'. In Cloud + Backup and DR, locations map to Google Cloud regions, for + example **us-central1**. To retrieve data sources for all + locations, use "-" for the '{location}' value. + page_size (int): + Optional. Requested page size. Server may + return fewer items than requested. If + unspecified, server will pick an appropriate + default. + page_token (str): + Optional. A token identifying a page of + results the server should return. + filter (str): + Optional. Filtering results. + order_by (str): + Optional. Hint for how to order the results. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListDataSourcesResponse(proto.Message): + r"""Response message for listing DataSources. + + Attributes: + data_sources (MutableSequence[google.cloud.backupdr_v1.types.DataSource]): + The list of DataSource instances in the + project for the specified location. + + If the '{location}' value in the request is "-", + the response contains a list of instances from + all locations. In case any location is + unreachable, the response will only return data + sources in reachable locations and the + 'unreachable' field will be populated with a + list of unreachable locations. + next_page_token (str): + A token identifying a page of results the + server should return. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + data_sources: MutableSequence["DataSource"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="DataSource", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetDataSourceRequest(proto.Message): + r"""Request message for getting a DataSource instance. + + Attributes: + name (str): + Required. Name of the data source resource name, in the + format + 'projects/{project_id}/locations/{location}/backupVaults/{resource_name}/dataSource/{resource_name}' + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateDataSourceRequest(proto.Message): + r"""Request message for updating a data source instance. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten in the DataSource resource by the update. The + fields specified in the update_mask are relative to the + resource, not the full request. A field will be overwritten + if it is in the mask. If the user does not provide a mask + then the request will fail. + data_source (google.cloud.backupdr_v1.types.DataSource): + Required. The resource being updated + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + allow_missing (bool): + Optional. Enable upsert. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + data_source: "DataSource" = proto.Field( + proto.MESSAGE, + number=2, + message="DataSource", + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + allow_missing: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class ListBackupsRequest(proto.Message): + r"""Request message for listing Backups. + + Attributes: + parent (str): + Required. The project and location for which to retrieve + backup information, in the format + 'projects/{project_id}/locations/{location}'. In Cloud + Backup and DR, locations map to Google Cloud regions, for + example **us-central1**. To retrieve data sources for all + locations, use "-" for the '{location}' value. + page_size (int): + Optional. Requested page size. Server may + return fewer items than requested. If + unspecified, server will pick an appropriate + default. + page_token (str): + Optional. A token identifying a page of + results the server should return. + filter (str): + Optional. Filtering results. + order_by (str): + Optional. Hint for how to order the results. + view (google.cloud.backupdr_v1.types.BackupView): + Optional. Reserved for future use to provide + a BASIC & FULL view of Backup resource. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + view: "BackupView" = proto.Field( + proto.ENUM, + number=6, + enum="BackupView", + ) + + +class ListBackupsResponse(proto.Message): + r"""Response message for listing Backups. + + Attributes: + backups (MutableSequence[google.cloud.backupdr_v1.types.Backup]): + The list of Backup instances in the project + for the specified location. + + If the '{location}' value in the request is "-", + the response contains a list of instances from + all locations. In case any location is + unreachable, the response will only return data + sources in reachable locations and the + 'unreachable' field will be populated with a + list of unreachable locations. + next_page_token (str): + A token identifying a page of results the + server should return. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + backups: MutableSequence["Backup"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Backup", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetBackupRequest(proto.Message): + r"""Request message for getting a Backup. + + Attributes: + name (str): + Required. Name of the data source resource name, in the + format + 'projects/{project_id}/locations/{location}/backupVaults/{backupVault}/dataSources/{datasource}/backups/{backup}' + view (google.cloud.backupdr_v1.types.BackupView): + Optional. Reserved for future use to provide + a BASIC & FULL view of Backup resource. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + view: "BackupView" = proto.Field( + proto.ENUM, + number=2, + enum="BackupView", + ) + + +class UpdateBackupRequest(proto.Message): + r"""Request message for updating a Backup. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten in the Backup resource by the update. The fields + specified in the update_mask are relative to the resource, + not the full request. A field will be overwritten if it is + in the mask. If the user does not provide a mask then the + request will fail. + backup (google.cloud.backupdr_v1.types.Backup): + Required. The resource being updated + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + backup: "Backup" = proto.Field( + proto.MESSAGE, + number=2, + message="Backup", + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class DeleteBackupRequest(proto.Message): + r"""Message for deleting a Backup. + + Attributes: + name (str): + Required. Name of the resource. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes after the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class RestoreBackupRequest(proto.Message): + r"""Request message for restoring from a Backup. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Required. The resource name of the Backup instance, in the + format + 'projects/*/locations/*/backupVaults/*/dataSources/*/backups/'. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes after the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + compute_instance_target_environment (google.cloud.backupdr_v1.types.ComputeInstanceTargetEnvironment): + Compute Engine target environment to be used + during restore. + + This field is a member of `oneof`_ ``target_environment``. + compute_instance_restore_properties (google.cloud.backupdr_v1.types.ComputeInstanceRestoreProperties): + Compute Engine instance properties to be + overridden during restore. + + This field is a member of `oneof`_ ``instance_properties``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + compute_instance_target_environment: backupvault_gce.ComputeInstanceTargetEnvironment = proto.Field( + proto.MESSAGE, + number=3, + oneof="target_environment", + message=backupvault_gce.ComputeInstanceTargetEnvironment, + ) + compute_instance_restore_properties: backupvault_gce.ComputeInstanceRestoreProperties = proto.Field( + proto.MESSAGE, + number=4, + oneof="instance_properties", + message=backupvault_gce.ComputeInstanceRestoreProperties, + ) + + +class RestoreBackupResponse(proto.Message): + r"""Response message for restoring from a Backup. + + Attributes: + target_resource (google.cloud.backupdr_v1.types.TargetResource): + Details of the target resource + created/modified as part of restore. + """ + + target_resource: "TargetResource" = proto.Field( + proto.MESSAGE, + number=1, + message="TargetResource", + ) + + +class TargetResource(proto.Message): + r"""Details of the target resource created/modified as part of + restore. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + gcp_resource (google.cloud.backupdr_v1.types.GcpResource): + Details of the native Google Cloud resource + created as part of restore. + + This field is a member of `oneof`_ ``target_resource_info``. + """ + + gcp_resource: "GcpResource" = proto.Field( + proto.MESSAGE, + number=1, + oneof="target_resource_info", + message="GcpResource", + ) + + +class GcpResource(proto.Message): + r"""Minimum details to identify a Google Cloud resource + + Attributes: + gcp_resourcename (str): + Name of the Google Cloud resource. + location (str): + Location of the resource: + //"global"/"unspecified". + type_ (str): + Type of the resource. Use the Unified + Resource Type, eg. + compute.googleapis.com/Instance. + """ + + gcp_resourcename: str = proto.Field( + proto.STRING, + number=1, + ) + location: str = proto.Field( + proto.STRING, + number=2, + ) + type_: str = proto.Field( + proto.STRING, + number=3, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupvault_ba.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupvault_ba.py new file mode 100644 index 000000000000..131f54b56abe --- /dev/null +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupvault_ba.py @@ -0,0 +1,87 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.backupdr.v1", + manifest={ + "BackupApplianceBackupProperties", + }, +) + + +class BackupApplianceBackupProperties(proto.Message): + r"""BackupApplianceBackupProperties represents BackupDR backup + appliance's properties. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + generation_id (int): + Output only. The numeric generation ID of the + backup (monotonically increasing). + + This field is a member of `oneof`_ ``_generation_id``. + finalize_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when this backup object + was finalized (if none, backup is not + finalized). + + This field is a member of `oneof`_ ``_finalize_time``. + recovery_range_start_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. The earliest timestamp of data + available in this Backup. + + This field is a member of `oneof`_ ``_recovery_range_start_time``. + recovery_range_end_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. The latest timestamp of data + available in this Backup. + + This field is a member of `oneof`_ ``_recovery_range_end_time``. + """ + + generation_id: int = proto.Field( + proto.INT32, + number=1, + optional=True, + ) + finalize_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + optional=True, + message=timestamp_pb2.Timestamp, + ) + recovery_range_start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + optional=True, + message=timestamp_pb2.Timestamp, + ) + recovery_range_end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + optional=True, + message=timestamp_pb2.Timestamp, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupvault_gce.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupvault_gce.py new file mode 100644 index 000000000000..9e3e98632644 --- /dev/null +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupvault_gce.py @@ -0,0 +1,1991 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.backupdr.v1", + manifest={ + "KeyRevocationActionType", + "ComputeInstanceBackupProperties", + "ComputeInstanceRestoreProperties", + "ComputeInstanceTargetEnvironment", + "ComputeInstanceDataSourceProperties", + "AdvancedMachineFeatures", + "ConfidentialInstanceConfig", + "DisplayDevice", + "AcceleratorConfig", + "CustomerEncryptionKey", + "Entry", + "Metadata", + "NetworkInterface", + "NetworkPerformanceConfig", + "AccessConfig", + "AliasIpRange", + "InstanceParams", + "AllocationAffinity", + "Scheduling", + "SchedulingDuration", + "ServiceAccount", + "Tags", + "AttachedDisk", + "GuestOsFeature", + }, +) + + +class KeyRevocationActionType(proto.Enum): + r"""Specifies whether the virtual machine instance will be shut + down on key revocation. It is currently used in instance, + instance properties and GMI protos + + Values: + KEY_REVOCATION_ACTION_TYPE_UNSPECIFIED (0): + Default value. This value is unused. + NONE (1): + Indicates user chose no operation. + STOP (2): + Indicates user chose to opt for VM shutdown + on key revocation. + """ + KEY_REVOCATION_ACTION_TYPE_UNSPECIFIED = 0 + NONE = 1 + STOP = 2 + + +class ComputeInstanceBackupProperties(proto.Message): + r"""ComputeInstanceBackupProperties represents Compute Engine + instance backup properties. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + description (str): + An optional text description for the + instances that are created from these + properties. + + This field is a member of `oneof`_ ``_description``. + tags (google.cloud.backupdr_v1.types.Tags): + A list of tags to apply to the instances that + are created from these properties. The tags + identify valid sources or targets for network + firewalls. The setTags method can modify this + list of tags. Each tag within the list must + comply with RFC1035 + (https://www.ietf.org/rfc/rfc1035.txt). + + This field is a member of `oneof`_ ``_tags``. + machine_type (str): + The machine type to use for instances that + are created from these properties. + + This field is a member of `oneof`_ ``_machine_type``. + can_ip_forward (bool): + Enables instances created based on these properties to send + packets with source IP addresses other than their own and + receive packets with destination IP addresses other than + their own. If these instances will be used as an IP gateway + or it will be set as the next-hop in a Route resource, + specify ``true``. If unsure, leave this set to ``false``. + See the + https://cloud.google.com/vpc/docs/using-routes#canipforward + documentation for more information. + + This field is a member of `oneof`_ ``_can_ip_forward``. + network_interface (MutableSequence[google.cloud.backupdr_v1.types.NetworkInterface]): + An array of network access configurations for + this interface. + disk (MutableSequence[google.cloud.backupdr_v1.types.AttachedDisk]): + An array of disks that are associated with + the instances that are created from these + properties. + metadata (google.cloud.backupdr_v1.types.Metadata): + The metadata key/value pairs to assign to + instances that are created from these + properties. These pairs can consist of custom + metadata or predefined keys. See + https://cloud.google.com/compute/docs/metadata/overview + for more information. + + This field is a member of `oneof`_ ``_metadata``. + service_account (MutableSequence[google.cloud.backupdr_v1.types.ServiceAccount]): + A list of service accounts with specified + scopes. Access tokens for these service accounts + are available to the instances that are created + from these properties. Use metadata queries to + obtain the access tokens for these instances. + scheduling (google.cloud.backupdr_v1.types.Scheduling): + Specifies the scheduling options for the + instances that are created from these + properties. + + This field is a member of `oneof`_ ``_scheduling``. + guest_accelerator (MutableSequence[google.cloud.backupdr_v1.types.AcceleratorConfig]): + A list of guest accelerator cards' type and + count to use for instances created from these + properties. + min_cpu_platform (str): + Minimum cpu/platform to be used by instances. The instance + may be scheduled on the specified or newer cpu/platform. + Applicable values are the friendly names of CPU platforms, + such as ``minCpuPlatform: Intel Haswell`` or + ``minCpuPlatform: Intel Sandy Bridge``. For more + information, read + https://cloud.google.com/compute/docs/instances/specify-min-cpu-platform. + + This field is a member of `oneof`_ ``_min_cpu_platform``. + key_revocation_action_type (google.cloud.backupdr_v1.types.KeyRevocationActionType): + KeyRevocationActionType of the instance. + Supported options are "STOP" and "NONE". The + default value is "NONE" if it is not specified. + + This field is a member of `oneof`_ ``_key_revocation_action_type``. + source_instance (str): + The source instance used to create this + backup. This can be a partial or full URL to the + resource. For example, the following are valid + values: + + -https://www.googleapis.com/compute/v1/projects/project/zones/zone/instances/instance + -projects/project/zones/zone/instances/instance + + This field is a member of `oneof`_ ``_source_instance``. + labels (MutableMapping[str, str]): + Labels to apply to instances that are created + from these properties. + """ + + description: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + tags: "Tags" = proto.Field( + proto.MESSAGE, + number=2, + optional=True, + message="Tags", + ) + machine_type: str = proto.Field( + proto.STRING, + number=3, + optional=True, + ) + can_ip_forward: bool = proto.Field( + proto.BOOL, + number=4, + optional=True, + ) + network_interface: MutableSequence["NetworkInterface"] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message="NetworkInterface", + ) + disk: MutableSequence["AttachedDisk"] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message="AttachedDisk", + ) + metadata: "Metadata" = proto.Field( + proto.MESSAGE, + number=7, + optional=True, + message="Metadata", + ) + service_account: MutableSequence["ServiceAccount"] = proto.RepeatedField( + proto.MESSAGE, + number=8, + message="ServiceAccount", + ) + scheduling: "Scheduling" = proto.Field( + proto.MESSAGE, + number=9, + optional=True, + message="Scheduling", + ) + guest_accelerator: MutableSequence["AcceleratorConfig"] = proto.RepeatedField( + proto.MESSAGE, + number=10, + message="AcceleratorConfig", + ) + min_cpu_platform: str = proto.Field( + proto.STRING, + number=11, + optional=True, + ) + key_revocation_action_type: "KeyRevocationActionType" = proto.Field( + proto.ENUM, + number=12, + optional=True, + enum="KeyRevocationActionType", + ) + source_instance: str = proto.Field( + proto.STRING, + number=13, + optional=True, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=14, + ) + + +class ComputeInstanceRestoreProperties(proto.Message): + r"""ComputeInstanceRestoreProperties represents Compute Engine + instance properties to be overridden during restore. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Required. Name of the compute instance. + + This field is a member of `oneof`_ ``_name``. + advanced_machine_features (google.cloud.backupdr_v1.types.AdvancedMachineFeatures): + Optional. Controls for advanced + machine-related behavior features. + + This field is a member of `oneof`_ ``_advanced_machine_features``. + can_ip_forward (bool): + Optional. Allows this instance to send and + receive packets with non-matching destination or + source IPs. + + This field is a member of `oneof`_ ``_can_ip_forward``. + confidential_instance_config (google.cloud.backupdr_v1.types.ConfidentialInstanceConfig): + Optional. Controls Confidential compute + options on the instance + + This field is a member of `oneof`_ ``_confidential_instance_config``. + deletion_protection (bool): + Optional. Whether the resource should be + protected against deletion. + + This field is a member of `oneof`_ ``_deletion_protection``. + description (str): + Optional. An optional description of this + resource. Provide this property when you create + the resource. + + This field is a member of `oneof`_ ``_description``. + disks (MutableSequence[google.cloud.backupdr_v1.types.AttachedDisk]): + Optional. Array of disks associated with this + instance. Persistent disks must be created + before you can assign them. + display_device (google.cloud.backupdr_v1.types.DisplayDevice): + Optional. Enables display device for the + instance. + + This field is a member of `oneof`_ ``_display_device``. + guest_accelerators (MutableSequence[google.cloud.backupdr_v1.types.AcceleratorConfig]): + Optional. A list of the type and count of + accelerator cards attached to the instance. + hostname (str): + Optional. Specifies the hostname of the instance. The + specified hostname must be RFC1035 compliant. If hostname is + not specified, the default hostname is + [INSTANCE_NAME].c.[PROJECT_ID].internal when using the + global DNS, and + [INSTANCE_NAME].[ZONE].c.[PROJECT_ID].internal when using + zonal DNS. + + This field is a member of `oneof`_ ``_hostname``. + instance_encryption_key (google.cloud.backupdr_v1.types.CustomerEncryptionKey): + Optional. Encrypts suspended data for an + instance with a customer-managed encryption key. + + This field is a member of `oneof`_ ``_instance_encryption_key``. + key_revocation_action_type (google.cloud.backupdr_v1.types.KeyRevocationActionType): + Optional. KeyRevocationActionType of the + instance. + + This field is a member of `oneof`_ ``_key_revocation_action_type``. + labels (MutableMapping[str, str]): + Optional. Labels to apply to this instance. + machine_type (str): + Optional. Full or partial URL of the machine + type resource to use for this instance. + + This field is a member of `oneof`_ ``_machine_type``. + metadata (google.cloud.backupdr_v1.types.Metadata): + Optional. This includes custom metadata and + predefined keys. + + This field is a member of `oneof`_ ``_metadata``. + min_cpu_platform (str): + Optional. Minimum CPU platform to use for + this instance. + + This field is a member of `oneof`_ ``_min_cpu_platform``. + network_interfaces (MutableSequence[google.cloud.backupdr_v1.types.NetworkInterface]): + Optional. An array of network configurations + for this instance. These specify how interfaces + are configured to interact with other network + services, such as connecting to the internet. + Multiple interfaces are supported per instance. + network_performance_config (google.cloud.backupdr_v1.types.NetworkPerformanceConfig): + Optional. Configure network performance such + as egress bandwidth tier. + + This field is a member of `oneof`_ ``_network_performance_config``. + params (google.cloud.backupdr_v1.types.InstanceParams): + Input only. Additional params passed with the + request, but not persisted as part of resource + payload. + + This field is a member of `oneof`_ ``_params``. + private_ipv6_google_access (google.cloud.backupdr_v1.types.ComputeInstanceRestoreProperties.InstancePrivateIpv6GoogleAccess): + Optional. The private IPv6 google access type for the VM. If + not specified, use INHERIT_FROM_SUBNETWORK as default. + + This field is a member of `oneof`_ ``_private_ipv6_google_access``. + allocation_affinity (google.cloud.backupdr_v1.types.AllocationAffinity): + Optional. Specifies the reservations that + this instance can consume from. + + This field is a member of `oneof`_ ``_allocation_affinity``. + resource_policies (MutableSequence[str]): + Optional. Resource policies applied to this + instance. + scheduling (google.cloud.backupdr_v1.types.Scheduling): + Optional. Sets the scheduling options for + this instance. + + This field is a member of `oneof`_ ``_scheduling``. + service_accounts (MutableSequence[google.cloud.backupdr_v1.types.ServiceAccount]): + Optional. A list of service accounts, with + their specified scopes, authorized for this + instance. Only one service account per VM + instance is supported. + tags (google.cloud.backupdr_v1.types.Tags): + Optional. Tags to apply to this instance. + Tags are used to identify valid sources or + targets for network firewalls and are specified + by the client during instance creation. + + This field is a member of `oneof`_ ``_tags``. + """ + + class InstancePrivateIpv6GoogleAccess(proto.Enum): + r"""The private IPv6 google access type for the VMs. + + Values: + INSTANCE_PRIVATE_IPV6_GOOGLE_ACCESS_UNSPECIFIED (0): + Default value. This value is unused. + INHERIT_FROM_SUBNETWORK (1): + Each network interface inherits + PrivateIpv6GoogleAccess from its subnetwork. + ENABLE_OUTBOUND_VM_ACCESS_TO_GOOGLE (2): + Outbound private IPv6 access from VMs in this + subnet to Google services. If specified, the + subnetwork who is attached to the instance's + default network interface will be assigned an + internal IPv6 prefix if it doesn't have before. + ENABLE_BIDIRECTIONAL_ACCESS_TO_GOOGLE (3): + Bidirectional private IPv6 access to/from + Google services. If specified, the subnetwork + who is attached to the instance's default + network interface will be assigned an internal + IPv6 prefix if it doesn't have before. + """ + INSTANCE_PRIVATE_IPV6_GOOGLE_ACCESS_UNSPECIFIED = 0 + INHERIT_FROM_SUBNETWORK = 1 + ENABLE_OUTBOUND_VM_ACCESS_TO_GOOGLE = 2 + ENABLE_BIDIRECTIONAL_ACCESS_TO_GOOGLE = 3 + + name: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + advanced_machine_features: "AdvancedMachineFeatures" = proto.Field( + proto.MESSAGE, + number=2, + optional=True, + message="AdvancedMachineFeatures", + ) + can_ip_forward: bool = proto.Field( + proto.BOOL, + number=3, + optional=True, + ) + confidential_instance_config: "ConfidentialInstanceConfig" = proto.Field( + proto.MESSAGE, + number=4, + optional=True, + message="ConfidentialInstanceConfig", + ) + deletion_protection: bool = proto.Field( + proto.BOOL, + number=5, + optional=True, + ) + description: str = proto.Field( + proto.STRING, + number=6, + optional=True, + ) + disks: MutableSequence["AttachedDisk"] = proto.RepeatedField( + proto.MESSAGE, + number=7, + message="AttachedDisk", + ) + display_device: "DisplayDevice" = proto.Field( + proto.MESSAGE, + number=8, + optional=True, + message="DisplayDevice", + ) + guest_accelerators: MutableSequence["AcceleratorConfig"] = proto.RepeatedField( + proto.MESSAGE, + number=9, + message="AcceleratorConfig", + ) + hostname: str = proto.Field( + proto.STRING, + number=10, + optional=True, + ) + instance_encryption_key: "CustomerEncryptionKey" = proto.Field( + proto.MESSAGE, + number=11, + optional=True, + message="CustomerEncryptionKey", + ) + key_revocation_action_type: "KeyRevocationActionType" = proto.Field( + proto.ENUM, + number=12, + optional=True, + enum="KeyRevocationActionType", + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=13, + ) + machine_type: str = proto.Field( + proto.STRING, + number=14, + optional=True, + ) + metadata: "Metadata" = proto.Field( + proto.MESSAGE, + number=15, + optional=True, + message="Metadata", + ) + min_cpu_platform: str = proto.Field( + proto.STRING, + number=16, + optional=True, + ) + network_interfaces: MutableSequence["NetworkInterface"] = proto.RepeatedField( + proto.MESSAGE, + number=17, + message="NetworkInterface", + ) + network_performance_config: "NetworkPerformanceConfig" = proto.Field( + proto.MESSAGE, + number=18, + optional=True, + message="NetworkPerformanceConfig", + ) + params: "InstanceParams" = proto.Field( + proto.MESSAGE, + number=19, + optional=True, + message="InstanceParams", + ) + private_ipv6_google_access: InstancePrivateIpv6GoogleAccess = proto.Field( + proto.ENUM, + number=20, + optional=True, + enum=InstancePrivateIpv6GoogleAccess, + ) + allocation_affinity: "AllocationAffinity" = proto.Field( + proto.MESSAGE, + number=21, + optional=True, + message="AllocationAffinity", + ) + resource_policies: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=22, + ) + scheduling: "Scheduling" = proto.Field( + proto.MESSAGE, + number=23, + optional=True, + message="Scheduling", + ) + service_accounts: MutableSequence["ServiceAccount"] = proto.RepeatedField( + proto.MESSAGE, + number=24, + message="ServiceAccount", + ) + tags: "Tags" = proto.Field( + proto.MESSAGE, + number=26, + optional=True, + message="Tags", + ) + + +class ComputeInstanceTargetEnvironment(proto.Message): + r"""ComputeInstanceTargetEnvironment represents Compute Engine + target environment to be used during restore. + + Attributes: + project (str): + Required. Target project for the Compute + Engine instance. + zone (str): + Required. The zone of the Compute Engine + instance. + """ + + project: str = proto.Field( + proto.STRING, + number=1, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ComputeInstanceDataSourceProperties(proto.Message): + r"""ComputeInstanceDataSourceProperties represents the properties + of a ComputeEngine resource that are stored in the DataSource. + + Attributes: + name (str): + Name of the compute instance backed up by the + datasource. + description (str): + The description of the Compute Engine + instance. + machine_type (str): + The machine type of the instance. + total_disk_count (int): + The total number of disks attached to the + Instance. + total_disk_size_gb (int): + The sum of all the disk sizes. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + machine_type: str = proto.Field( + proto.STRING, + number=3, + ) + total_disk_count: int = proto.Field( + proto.INT64, + number=4, + ) + total_disk_size_gb: int = proto.Field( + proto.INT64, + number=5, + ) + + +class AdvancedMachineFeatures(proto.Message): + r"""Specifies options for controlling advanced machine features. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + enable_nested_virtualization (bool): + Optional. Whether to enable nested + virtualization or not (default is false). + + This field is a member of `oneof`_ ``_enable_nested_virtualization``. + threads_per_core (int): + Optional. The number of threads per physical + core. To disable simultaneous multithreading + (SMT) set this to 1. If unset, the maximum + number of threads supported per core by the + underlying processor is assumed. + + This field is a member of `oneof`_ ``_threads_per_core``. + visible_core_count (int): + Optional. The number of physical cores to + expose to an instance. Multiply by the number of + threads per core to compute the total number of + virtual CPUs to expose to the instance. If + unset, the number of cores is inferred from the + instance's nominal CPU count and the underlying + platform's SMT width. + + This field is a member of `oneof`_ ``_visible_core_count``. + enable_uefi_networking (bool): + Optional. Whether to enable UEFI networking + for instance creation. + + This field is a member of `oneof`_ ``_enable_uefi_networking``. + """ + + enable_nested_virtualization: bool = proto.Field( + proto.BOOL, + number=1, + optional=True, + ) + threads_per_core: int = proto.Field( + proto.INT32, + number=2, + optional=True, + ) + visible_core_count: int = proto.Field( + proto.INT32, + number=3, + optional=True, + ) + enable_uefi_networking: bool = proto.Field( + proto.BOOL, + number=4, + optional=True, + ) + + +class ConfidentialInstanceConfig(proto.Message): + r"""A set of Confidential Instance options. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + enable_confidential_compute (bool): + Optional. Defines whether the instance should + have confidential compute enabled. + + This field is a member of `oneof`_ ``_enable_confidential_compute``. + """ + + enable_confidential_compute: bool = proto.Field( + proto.BOOL, + number=1, + optional=True, + ) + + +class DisplayDevice(proto.Message): + r"""A set of Display Device options + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + enable_display (bool): + Optional. Enables display for the Compute + Engine VM + + This field is a member of `oneof`_ ``_enable_display``. + """ + + enable_display: bool = proto.Field( + proto.BOOL, + number=1, + optional=True, + ) + + +class AcceleratorConfig(proto.Message): + r"""A specification of the type and number of accelerator cards + attached to the instance. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + accelerator_type (str): + Optional. Full or partial URL of the + accelerator type resource to attach to this + instance. + + This field is a member of `oneof`_ ``_accelerator_type``. + accelerator_count (int): + Optional. The number of the guest accelerator + cards exposed to this instance. + + This field is a member of `oneof`_ ``_accelerator_count``. + """ + + accelerator_type: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + accelerator_count: int = proto.Field( + proto.INT32, + number=2, + optional=True, + ) + + +class CustomerEncryptionKey(proto.Message): + r"""A customer-supplied encryption key. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + raw_key (str): + Optional. Specifies a 256-bit + customer-supplied encryption key. + + This field is a member of `oneof`_ ``key``. + rsa_encrypted_key (str): + Optional. RSA-wrapped 2048-bit + customer-supplied encryption key to either + encrypt or decrypt this resource. + + This field is a member of `oneof`_ ``key``. + kms_key_name (str): + Optional. The name of the encryption key that + is stored in Google Cloud KMS. + + This field is a member of `oneof`_ ``key``. + kms_key_service_account (str): + Optional. The service account being used for + the encryption request for the given KMS key. If + absent, the Compute Engine default service + account is used. + + This field is a member of `oneof`_ ``_kms_key_service_account``. + """ + + raw_key: str = proto.Field( + proto.STRING, + number=1, + oneof="key", + ) + rsa_encrypted_key: str = proto.Field( + proto.STRING, + number=2, + oneof="key", + ) + kms_key_name: str = proto.Field( + proto.STRING, + number=3, + oneof="key", + ) + kms_key_service_account: str = proto.Field( + proto.STRING, + number=4, + optional=True, + ) + + +class Entry(proto.Message): + r"""A key/value pair to be used for storing metadata. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + key (str): + Optional. Key for the metadata entry. + + This field is a member of `oneof`_ ``_key``. + value (str): + Optional. Value for the metadata entry. These + are free-form strings, and only have meaning as + interpreted by the image running in the + instance. The only restriction placed on values + is that their size must be less than or equal to + 262144 bytes (256 KiB). + + This field is a member of `oneof`_ ``_value``. + """ + + key: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + value: str = proto.Field( + proto.STRING, + number=2, + optional=True, + ) + + +class Metadata(proto.Message): + r"""A metadata key/value entry. + + Attributes: + items (MutableSequence[google.cloud.backupdr_v1.types.Entry]): + Optional. Array of key/value pairs. The total + size of all keys and values must be less than + 512 KB. + """ + + items: MutableSequence["Entry"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Entry", + ) + + +class NetworkInterface(proto.Message): + r"""A network interface resource attached to an instance. + s + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + network (str): + Optional. URL of the VPC network resource for + this instance. + + This field is a member of `oneof`_ ``_network``. + subnetwork (str): + Optional. The URL of the Subnetwork resource + for this instance. + + This field is a member of `oneof`_ ``_subnetwork``. + ip_address (str): + Optional. An IPv4 internal IP address to + assign to the instance for this network + interface. If not specified by the user, an + unused internal IP is assigned by the system. + + This field is a member of `oneof`_ ``_ip_address``. + ipv6_address (str): + Optional. An IPv6 internal network address + for this network interface. To use a static + internal IP address, it must be unused and in + the same region as the instance's zone. If not + specified, Google Cloud will automatically + assign an internal IPv6 address from the + instance's subnetwork. + + This field is a member of `oneof`_ ``_ipv6_address``. + internal_ipv6_prefix_length (int): + Optional. The prefix length of the primary + internal IPv6 range. + + This field is a member of `oneof`_ ``_internal_ipv6_prefix_length``. + name (str): + Output only. [Output Only] The name of the network + interface, which is generated by the server. + + This field is a member of `oneof`_ ``_name``. + access_configs (MutableSequence[google.cloud.backupdr_v1.types.AccessConfig]): + Optional. An array of configurations for this interface. + Currently, only one access config,ONE_TO_ONE_NAT is + supported. If there are no accessConfigs specified, then + this instance will have no external internet access. + ipv6_access_configs (MutableSequence[google.cloud.backupdr_v1.types.AccessConfig]): + Optional. An array of IPv6 access configurations for this + interface. Currently, only one IPv6 access config, + DIRECT_IPV6, is supported. If there is no ipv6AccessConfig + specified, then this instance will have no external IPv6 + Internet access. + alias_ip_ranges (MutableSequence[google.cloud.backupdr_v1.types.AliasIpRange]): + Optional. An array of alias IP ranges for + this network interface. You can only specify + this field for network interfaces in VPC + networks. + stack_type (google.cloud.backupdr_v1.types.NetworkInterface.StackType): + The stack type for this network interface. + + This field is a member of `oneof`_ ``_stack_type``. + ipv6_access_type (google.cloud.backupdr_v1.types.NetworkInterface.Ipv6AccessType): + Optional. [Output Only] One of EXTERNAL, INTERNAL to + indicate whether the IP can be accessed from the Internet. + This field is always inherited from its subnetwork. + + This field is a member of `oneof`_ ``_ipv6_access_type``. + queue_count (int): + Optional. The networking queue count that's + specified by users for the network interface. + Both Rx and Tx queues will be set to this + number. It'll be empty if not specified by the + users. + + This field is a member of `oneof`_ ``_queue_count``. + nic_type (google.cloud.backupdr_v1.types.NetworkInterface.NicType): + Optional. The type of vNIC to be used on this + interface. This may be gVNIC or VirtioNet. + + This field is a member of `oneof`_ ``_nic_type``. + network_attachment (str): + Optional. The URL of the network attachment that this + interface should connect to in the following format: + projects/{project_number}/regions/{region_name}/networkAttachments/{network_attachment_name}. + + This field is a member of `oneof`_ ``_network_attachment``. + """ + + class StackType(proto.Enum): + r"""Stack type for this network interface. + + Values: + STACK_TYPE_UNSPECIFIED (0): + Default should be STACK_TYPE_UNSPECIFIED. + IPV4_ONLY (1): + The network interface will be assigned IPv4 + address. + IPV4_IPV6 (2): + The network interface can have both IPv4 and + IPv6 addresses. + """ + STACK_TYPE_UNSPECIFIED = 0 + IPV4_ONLY = 1 + IPV4_IPV6 = 2 + + class Ipv6AccessType(proto.Enum): + r"""IPv6 access type for this network interface. + + Values: + UNSPECIFIED_IPV6_ACCESS_TYPE (0): + IPv6 access type not set. Means this network + interface hasn't been turned on IPv6 yet. + INTERNAL (1): + This network interface can have internal + IPv6. + EXTERNAL (2): + This network interface can have external + IPv6. + """ + UNSPECIFIED_IPV6_ACCESS_TYPE = 0 + INTERNAL = 1 + EXTERNAL = 2 + + class NicType(proto.Enum): + r"""Nic type for this network interface. + + Values: + NIC_TYPE_UNSPECIFIED (0): + Default should be NIC_TYPE_UNSPECIFIED. + VIRTIO_NET (1): + VIRTIO + GVNIC (2): + GVNIC + """ + NIC_TYPE_UNSPECIFIED = 0 + VIRTIO_NET = 1 + GVNIC = 2 + + network: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + subnetwork: str = proto.Field( + proto.STRING, + number=2, + optional=True, + ) + ip_address: str = proto.Field( + proto.STRING, + number=3, + optional=True, + ) + ipv6_address: str = proto.Field( + proto.STRING, + number=4, + optional=True, + ) + internal_ipv6_prefix_length: int = proto.Field( + proto.INT32, + number=5, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=6, + optional=True, + ) + access_configs: MutableSequence["AccessConfig"] = proto.RepeatedField( + proto.MESSAGE, + number=7, + message="AccessConfig", + ) + ipv6_access_configs: MutableSequence["AccessConfig"] = proto.RepeatedField( + proto.MESSAGE, + number=8, + message="AccessConfig", + ) + alias_ip_ranges: MutableSequence["AliasIpRange"] = proto.RepeatedField( + proto.MESSAGE, + number=9, + message="AliasIpRange", + ) + stack_type: StackType = proto.Field( + proto.ENUM, + number=10, + optional=True, + enum=StackType, + ) + ipv6_access_type: Ipv6AccessType = proto.Field( + proto.ENUM, + number=11, + optional=True, + enum=Ipv6AccessType, + ) + queue_count: int = proto.Field( + proto.INT32, + number=12, + optional=True, + ) + nic_type: NicType = proto.Field( + proto.ENUM, + number=13, + optional=True, + enum=NicType, + ) + network_attachment: str = proto.Field( + proto.STRING, + number=14, + optional=True, + ) + + +class NetworkPerformanceConfig(proto.Message): + r"""Network performance configuration. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + total_egress_bandwidth_tier (google.cloud.backupdr_v1.types.NetworkPerformanceConfig.Tier): + Optional. The tier of the total egress + bandwidth. + + This field is a member of `oneof`_ ``_total_egress_bandwidth_tier``. + """ + + class Tier(proto.Enum): + r"""Network performance tier. + + Values: + TIER_UNSPECIFIED (0): + This value is unused. + DEFAULT (1): + Default network performance config. + TIER_1 (2): + Tier 1 network performance config. + """ + TIER_UNSPECIFIED = 0 + DEFAULT = 1 + TIER_1 = 2 + + total_egress_bandwidth_tier: Tier = proto.Field( + proto.ENUM, + number=1, + optional=True, + enum=Tier, + ) + + +class AccessConfig(proto.Message): + r"""An access configuration attached to an instance's network + interface. Only one access config per instance is supported. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + type_ (google.cloud.backupdr_v1.types.AccessConfig.AccessType): + Optional. In accessConfigs (IPv4), the default and only + option is ONE_TO_ONE_NAT. In ipv6AccessConfigs, the default + and only option is DIRECT_IPV6. + + This field is a member of `oneof`_ ``_type``. + name (str): + Optional. The name of this access + configuration. + + This field is a member of `oneof`_ ``_name``. + external_ip (str): + Optional. The external IP address of this + access configuration. + + This field is a member of `oneof`_ ``_external_ip``. + external_ipv6 (str): + Optional. The external IPv6 address of this + access configuration. + + This field is a member of `oneof`_ ``_external_ipv6``. + external_ipv6_prefix_length (int): + Optional. The prefix length of the external + IPv6 range. + + This field is a member of `oneof`_ ``_external_ipv6_prefix_length``. + set_public_ptr (bool): + Optional. Specifies whether a public DNS + 'PTR' record should be created to map the + external IP address of the instance to a DNS + domain name. + + This field is a member of `oneof`_ ``_set_public_ptr``. + public_ptr_domain_name (str): + Optional. The DNS domain name for the public + PTR record. + + This field is a member of `oneof`_ ``_public_ptr_domain_name``. + network_tier (google.cloud.backupdr_v1.types.AccessConfig.NetworkTier): + Optional. This signifies the networking tier + used for configuring this access + + This field is a member of `oneof`_ ``_network_tier``. + """ + + class AccessType(proto.Enum): + r"""The type of configuration. + + Values: + ACCESS_TYPE_UNSPECIFIED (0): + Default value. This value is unused. + ONE_TO_ONE_NAT (1): + ONE_TO_ONE_NAT + DIRECT_IPV6 (2): + Direct IPv6 access. + """ + ACCESS_TYPE_UNSPECIFIED = 0 + ONE_TO_ONE_NAT = 1 + DIRECT_IPV6 = 2 + + class NetworkTier(proto.Enum): + r"""Network tier property used by addresses, instances and + forwarding rules. + + Values: + NETWORK_TIER_UNSPECIFIED (0): + Default value. This value is unused. + PREMIUM (1): + High quality, Google-grade network tier, + support for all networking products. + STANDARD (2): + Public internet quality, only limited support + for other networking products. + """ + NETWORK_TIER_UNSPECIFIED = 0 + PREMIUM = 1 + STANDARD = 2 + + type_: AccessType = proto.Field( + proto.ENUM, + number=1, + optional=True, + enum=AccessType, + ) + name: str = proto.Field( + proto.STRING, + number=2, + optional=True, + ) + external_ip: str = proto.Field( + proto.STRING, + number=3, + optional=True, + ) + external_ipv6: str = proto.Field( + proto.STRING, + number=4, + optional=True, + ) + external_ipv6_prefix_length: int = proto.Field( + proto.INT32, + number=5, + optional=True, + ) + set_public_ptr: bool = proto.Field( + proto.BOOL, + number=6, + optional=True, + ) + public_ptr_domain_name: str = proto.Field( + proto.STRING, + number=7, + optional=True, + ) + network_tier: NetworkTier = proto.Field( + proto.ENUM, + number=8, + optional=True, + enum=NetworkTier, + ) + + +class AliasIpRange(proto.Message): + r"""An alias IP range attached to an instance's network + interface. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + ip_cidr_range (str): + Optional. The IP alias ranges to allocate for + this interface. + + This field is a member of `oneof`_ ``_ip_cidr_range``. + subnetwork_range_name (str): + Optional. The name of a subnetwork secondary + IP range from which to allocate an IP alias + range. If not specified, the primary range of + the subnetwork is used. + + This field is a member of `oneof`_ ``_subnetwork_range_name``. + """ + + ip_cidr_range: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + subnetwork_range_name: str = proto.Field( + proto.STRING, + number=2, + optional=True, + ) + + +class InstanceParams(proto.Message): + r"""Additional instance params. + + Attributes: + resource_manager_tags (MutableMapping[str, str]): + Optional. Resource manager tags to be bound + to the instance. + """ + + resource_manager_tags: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=1, + ) + + +class AllocationAffinity(proto.Message): + r"""Specifies the reservations that this instance can consume + from. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + consume_allocation_type (google.cloud.backupdr_v1.types.AllocationAffinity.Type): + Optional. Specifies the type of reservation + from which this instance can consume + + This field is a member of `oneof`_ ``_consume_allocation_type``. + key (str): + Optional. Corresponds to the label key of a + reservation resource. + + This field is a member of `oneof`_ ``_key``. + values (MutableSequence[str]): + Optional. Corresponds to the label values of + a reservation resource. + """ + + class Type(proto.Enum): + r"""Indicates whether to consume from a reservation or not. + + Values: + TYPE_UNSPECIFIED (0): + Default value. This value is unused. + NO_RESERVATION (1): + Do not consume from any allocated capacity. + ANY_RESERVATION (2): + Consume any allocation available. + SPECIFIC_RESERVATION (3): + Must consume from a specific reservation. + Must specify key value fields for specifying the + reservations. + """ + TYPE_UNSPECIFIED = 0 + NO_RESERVATION = 1 + ANY_RESERVATION = 2 + SPECIFIC_RESERVATION = 3 + + consume_allocation_type: Type = proto.Field( + proto.ENUM, + number=1, + optional=True, + enum=Type, + ) + key: str = proto.Field( + proto.STRING, + number=2, + optional=True, + ) + values: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class Scheduling(proto.Message): + r"""Sets the scheduling options for an Instance. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + on_host_maintenance (google.cloud.backupdr_v1.types.Scheduling.OnHostMaintenance): + Optional. Defines the maintenance behavior + for this instance. + + This field is a member of `oneof`_ ``_on_host_maintenance``. + automatic_restart (bool): + Optional. Specifies whether the instance + should be automatically restarted if it is + terminated by Compute Engine (not terminated by + a user). + + This field is a member of `oneof`_ ``_automatic_restart``. + preemptible (bool): + Optional. Defines whether the instance is + preemptible. + + This field is a member of `oneof`_ ``_preemptible``. + node_affinities (MutableSequence[google.cloud.backupdr_v1.types.Scheduling.NodeAffinity]): + Optional. A set of node affinity and + anti-affinity configurations. Overrides + reservationAffinity. + min_node_cpus (int): + Optional. The minimum number of virtual CPUs + this instance will consume when running on a + sole-tenant node. + + This field is a member of `oneof`_ ``_min_node_cpus``. + provisioning_model (google.cloud.backupdr_v1.types.Scheduling.ProvisioningModel): + Optional. Specifies the provisioning model of + the instance. + + This field is a member of `oneof`_ ``_provisioning_model``. + instance_termination_action (google.cloud.backupdr_v1.types.Scheduling.InstanceTerminationAction): + Optional. Specifies the termination action + for the instance. + + This field is a member of `oneof`_ ``_instance_termination_action``. + local_ssd_recovery_timeout (google.cloud.backupdr_v1.types.SchedulingDuration): + Optional. Specifies the maximum amount of + time a Local Ssd Vm should wait while recovery + of the Local Ssd state is attempted. Its value + should be in between 0 and 168 hours with hour + granularity and the default value being 1 hour. + + This field is a member of `oneof`_ ``_local_ssd_recovery_timeout``. + """ + + class OnHostMaintenance(proto.Enum): + r"""Defines the maintenance behavior for this instance= + + Values: + ON_HOST_MAINTENANCE_UNSPECIFIED (0): + Default value. This value is unused. + TERMINATE (1): + Tells Compute Engine to terminate and + (optionally) restart the instance away from the + maintenance activity. + MIGRATE (1000): + Default, Allows Compute Engine to + automatically migrate instances out of the way + of maintenance events. + """ + ON_HOST_MAINTENANCE_UNSPECIFIED = 0 + TERMINATE = 1 + MIGRATE = 1000 + + class ProvisioningModel(proto.Enum): + r"""Defines the provisioning model for an instance. + + Values: + PROVISIONING_MODEL_UNSPECIFIED (0): + Default value. This value is not used. + STANDARD (1): + Standard provisioning with user controlled + runtime, no discounts. + SPOT (2): + Heavily discounted, no guaranteed runtime. + """ + PROVISIONING_MODEL_UNSPECIFIED = 0 + STANDARD = 1 + SPOT = 2 + + class InstanceTerminationAction(proto.Enum): + r"""Defines the supported termination actions for an instance. + + Values: + INSTANCE_TERMINATION_ACTION_UNSPECIFIED (0): + Default value. This value is unused. + DELETE (1): + Delete the VM. + STOP (2): + Stop the VM without storing in-memory + content. default action. + """ + INSTANCE_TERMINATION_ACTION_UNSPECIFIED = 0 + DELETE = 1 + STOP = 2 + + class NodeAffinity(proto.Message): + r"""Node Affinity: the configuration of desired nodes onto which + this Instance could be scheduled. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + key (str): + Optional. Corresponds to the label key of + Node resource. + + This field is a member of `oneof`_ ``_key``. + operator (google.cloud.backupdr_v1.types.Scheduling.NodeAffinity.Operator): + Optional. Defines the operation of node + selection. + + This field is a member of `oneof`_ ``_operator``. + values (MutableSequence[str]): + Optional. Corresponds to the label values of + Node resource. + """ + + class Operator(proto.Enum): + r"""Defines the type of node selections. + + Values: + OPERATOR_UNSPECIFIED (0): + Default value. This value is unused. + IN (1): + Requires Compute Engine to seek for matched + nodes. + NOT_IN (2): + Requires Compute Engine to avoid certain + nodes. + """ + OPERATOR_UNSPECIFIED = 0 + IN = 1 + NOT_IN = 2 + + key: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + operator: "Scheduling.NodeAffinity.Operator" = proto.Field( + proto.ENUM, + number=2, + optional=True, + enum="Scheduling.NodeAffinity.Operator", + ) + values: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + on_host_maintenance: OnHostMaintenance = proto.Field( + proto.ENUM, + number=1, + optional=True, + enum=OnHostMaintenance, + ) + automatic_restart: bool = proto.Field( + proto.BOOL, + number=2, + optional=True, + ) + preemptible: bool = proto.Field( + proto.BOOL, + number=3, + optional=True, + ) + node_affinities: MutableSequence[NodeAffinity] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=NodeAffinity, + ) + min_node_cpus: int = proto.Field( + proto.INT32, + number=5, + optional=True, + ) + provisioning_model: ProvisioningModel = proto.Field( + proto.ENUM, + number=6, + optional=True, + enum=ProvisioningModel, + ) + instance_termination_action: InstanceTerminationAction = proto.Field( + proto.ENUM, + number=7, + optional=True, + enum=InstanceTerminationAction, + ) + local_ssd_recovery_timeout: "SchedulingDuration" = proto.Field( + proto.MESSAGE, + number=10, + optional=True, + message="SchedulingDuration", + ) + + +class SchedulingDuration(proto.Message): + r"""A SchedulingDuration represents a fixed-length span of time + represented as a count of seconds and fractions of seconds at + nanosecond resolution. It is independent of any calendar and + concepts like "day" or "month". Range is approximately 10,000 + years. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + seconds (int): + Optional. Span of time at a resolution of a + second. + + This field is a member of `oneof`_ ``_seconds``. + nanos (int): + Optional. Span of time that's a fraction of a + second at nanosecond resolution. + + This field is a member of `oneof`_ ``_nanos``. + """ + + seconds: int = proto.Field( + proto.INT64, + number=1, + optional=True, + ) + nanos: int = proto.Field( + proto.INT32, + number=2, + optional=True, + ) + + +class ServiceAccount(proto.Message): + r"""A service account. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + email (str): + Optional. Email address of the service + account. + + This field is a member of `oneof`_ ``_email``. + scopes (MutableSequence[str]): + Optional. The list of scopes to be made + available for this service account. + """ + + email: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + scopes: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + +class Tags(proto.Message): + r"""A set of instance tags. + + Attributes: + items (MutableSequence[str]): + Optional. An array of tags. Each tag must be + 1-63 characters long, and comply with RFC1035. + """ + + items: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + +class AttachedDisk(proto.Message): + r"""An instance-attached disk resource. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + initialize_params (google.cloud.backupdr_v1.types.AttachedDisk.InitializeParams): + Optional. Specifies the parameters to + initialize this disk. + + This field is a member of `oneof`_ ``_initialize_params``. + device_name (str): + Optional. This is used as an identifier for the disks. This + is the unique name has to provided to modify disk parameters + like disk_name and replica_zones (in case of RePDs) + + This field is a member of `oneof`_ ``_device_name``. + kind (str): + Optional. Type of the resource. + + This field is a member of `oneof`_ ``_kind``. + disk_type_deprecated (google.cloud.backupdr_v1.types.AttachedDisk.DiskType): + Specifies the type of the disk. + + This field is a member of `oneof`_ ``_disk_type_deprecated``. + mode (google.cloud.backupdr_v1.types.AttachedDisk.DiskMode): + Optional. The mode in which to attach this + disk. + + This field is a member of `oneof`_ ``_mode``. + source (str): + Optional. Specifies a valid partial or full + URL to an existing Persistent Disk resource. + + This field is a member of `oneof`_ ``_source``. + index (int): + Optional. A zero-based index to this disk, + where 0 is reserved for the boot disk. + + This field is a member of `oneof`_ ``_index``. + boot (bool): + Optional. Indicates that this is a boot disk. + The virtual machine will use the first partition + of the disk for its root filesystem. + + This field is a member of `oneof`_ ``_boot``. + auto_delete (bool): + Optional. Specifies whether the disk will be + auto-deleted when the instance is deleted (but + not when the disk is detached from the + instance). + + This field is a member of `oneof`_ ``_auto_delete``. + license_ (MutableSequence[str]): + Optional. Any valid publicly visible + licenses. + disk_interface (google.cloud.backupdr_v1.types.AttachedDisk.DiskInterface): + Optional. Specifies the disk interface to use + for attaching this disk. + + This field is a member of `oneof`_ ``_disk_interface``. + guest_os_feature (MutableSequence[google.cloud.backupdr_v1.types.GuestOsFeature]): + Optional. A list of features to enable on the + guest operating system. Applicable only for + bootable images. + disk_encryption_key (google.cloud.backupdr_v1.types.CustomerEncryptionKey): + Optional. Encrypts or decrypts a disk using a + customer-supplied encryption key. + + This field is a member of `oneof`_ ``_disk_encryption_key``. + disk_size_gb (int): + Optional. The size of the disk in GB. + + This field is a member of `oneof`_ ``_disk_size_gb``. + saved_state (google.cloud.backupdr_v1.types.AttachedDisk.DiskSavedState): + Optional. Output only. The state of the disk. + + This field is a member of `oneof`_ ``_saved_state``. + disk_type (str): + Optional. Output only. The URI of the disk + type resource. For example: + projects/project/zones/zone/diskTypes/pd-standard + or pd-ssd + + This field is a member of `oneof`_ ``_disk_type``. + type_ (google.cloud.backupdr_v1.types.AttachedDisk.DiskType): + Optional. Specifies the type of the disk. + + This field is a member of `oneof`_ ``_type``. + """ + + class DiskType(proto.Enum): + r"""List of the Disk Types. + + Values: + DISK_TYPE_UNSPECIFIED (0): + Default value, which is unused. + SCRATCH (1): + A scratch disk type. + PERSISTENT (2): + A persistent disk type. + """ + DISK_TYPE_UNSPECIFIED = 0 + SCRATCH = 1 + PERSISTENT = 2 + + class DiskMode(proto.Enum): + r"""List of the Disk Modes. + + Values: + DISK_MODE_UNSPECIFIED (0): + Default value, which is unused. + READ_WRITE (1): + Attaches this disk in read-write mode. Only + one virtual machine at a time can be attached to + a disk in read-write mode. + READ_ONLY (2): + Attaches this disk in read-only mode. + Multiple virtual machines can use a disk in + read-only mode at a time. + LOCKED (3): + The disk is locked for administrative + reasons. Nobody else can use the disk. This mode + is used (for example) when taking a snapshot of + a disk to prevent mounting the disk while it is + being snapshotted. + """ + DISK_MODE_UNSPECIFIED = 0 + READ_WRITE = 1 + READ_ONLY = 2 + LOCKED = 3 + + class DiskInterface(proto.Enum): + r"""List of the Disk Interfaces. + + Values: + DISK_INTERFACE_UNSPECIFIED (0): + Default value, which is unused. + SCSI (1): + SCSI Disk Interface. + NVME (2): + NVME Disk Interface. + NVDIMM (3): + NVDIMM Disk Interface. + ISCSI (4): + ISCSI Disk Interface. + """ + DISK_INTERFACE_UNSPECIFIED = 0 + SCSI = 1 + NVME = 2 + NVDIMM = 3 + ISCSI = 4 + + class DiskSavedState(proto.Enum): + r"""List of the states of the Disk. + + Values: + DISK_SAVED_STATE_UNSPECIFIED (0): + Default Disk state has not been preserved. + PRESERVED (1): + Disk state has been preserved. + """ + DISK_SAVED_STATE_UNSPECIFIED = 0 + PRESERVED = 1 + + class InitializeParams(proto.Message): + r"""Specifies the parameters to initialize this disk. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + disk_name (str): + Optional. Specifies the disk name. If not + specified, the default is to use the name of the + instance. + + This field is a member of `oneof`_ ``_disk_name``. + replica_zones (MutableSequence[str]): + Optional. URL of the zone where the disk + should be created. Required for each regional + disk associated with the instance. + """ + + disk_name: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + replica_zones: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + initialize_params: InitializeParams = proto.Field( + proto.MESSAGE, + number=1, + optional=True, + message=InitializeParams, + ) + device_name: str = proto.Field( + proto.STRING, + number=4, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=5, + optional=True, + ) + disk_type_deprecated: DiskType = proto.Field( + proto.ENUM, + number=6, + optional=True, + enum=DiskType, + ) + mode: DiskMode = proto.Field( + proto.ENUM, + number=7, + optional=True, + enum=DiskMode, + ) + source: str = proto.Field( + proto.STRING, + number=8, + optional=True, + ) + index: int = proto.Field( + proto.INT64, + number=9, + optional=True, + ) + boot: bool = proto.Field( + proto.BOOL, + number=10, + optional=True, + ) + auto_delete: bool = proto.Field( + proto.BOOL, + number=11, + optional=True, + ) + license_: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=12, + ) + disk_interface: DiskInterface = proto.Field( + proto.ENUM, + number=13, + optional=True, + enum=DiskInterface, + ) + guest_os_feature: MutableSequence["GuestOsFeature"] = proto.RepeatedField( + proto.MESSAGE, + number=14, + message="GuestOsFeature", + ) + disk_encryption_key: "CustomerEncryptionKey" = proto.Field( + proto.MESSAGE, + number=15, + optional=True, + message="CustomerEncryptionKey", + ) + disk_size_gb: int = proto.Field( + proto.INT64, + number=16, + optional=True, + ) + saved_state: DiskSavedState = proto.Field( + proto.ENUM, + number=17, + optional=True, + enum=DiskSavedState, + ) + disk_type: str = proto.Field( + proto.STRING, + number=18, + optional=True, + ) + type_: DiskType = proto.Field( + proto.ENUM, + number=19, + optional=True, + enum=DiskType, + ) + + +class GuestOsFeature(proto.Message): + r"""Feature type of the Guest OS. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + type_ (google.cloud.backupdr_v1.types.GuestOsFeature.FeatureType): + The ID of a supported feature. + + This field is a member of `oneof`_ ``_type``. + """ + + class FeatureType(proto.Enum): + r"""List of the Feature Types. + + Values: + FEATURE_TYPE_UNSPECIFIED (0): + Default value, which is unused. + VIRTIO_SCSI_MULTIQUEUE (1): + VIRTIO_SCSI_MULTIQUEUE feature type. + WINDOWS (2): + WINDOWS feature type. + MULTI_IP_SUBNET (3): + MULTI_IP_SUBNET feature type. + UEFI_COMPATIBLE (4): + UEFI_COMPATIBLE feature type. + SECURE_BOOT (5): + SECURE_BOOT feature type. + GVNIC (6): + GVNIC feature type. + SEV_CAPABLE (7): + SEV_CAPABLE feature type. + BARE_METAL_LINUX_COMPATIBLE (8): + BARE_METAL_LINUX_COMPATIBLE feature type. + SUSPEND_RESUME_COMPATIBLE (9): + SUSPEND_RESUME_COMPATIBLE feature type. + SEV_LIVE_MIGRATABLE (10): + SEV_LIVE_MIGRATABLE feature type. + SEV_SNP_CAPABLE (11): + SEV_SNP_CAPABLE feature type. + TDX_CAPABLE (12): + TDX_CAPABLE feature type. + IDPF (13): + IDPF feature type. + SEV_LIVE_MIGRATABLE_V2 (14): + SEV_LIVE_MIGRATABLE_V2 feature type. + """ + FEATURE_TYPE_UNSPECIFIED = 0 + VIRTIO_SCSI_MULTIQUEUE = 1 + WINDOWS = 2 + MULTI_IP_SUBNET = 3 + UEFI_COMPATIBLE = 4 + SECURE_BOOT = 5 + GVNIC = 6 + SEV_CAPABLE = 7 + BARE_METAL_LINUX_COMPATIBLE = 8 + SUSPEND_RESUME_COMPATIBLE = 9 + SEV_LIVE_MIGRATABLE = 10 + SEV_SNP_CAPABLE = 11 + TDX_CAPABLE = 12 + IDPF = 13 + SEV_LIVE_MIGRATABLE_V2 = 14 + + type_: FeatureType = proto.Field( + proto.ENUM, + number=1, + optional=True, + enum=FeatureType, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_association_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_association_async.py new file mode 100644 index 000000000000..25dbf9cca081 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_association_async.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBackupPlanAssociation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_CreateBackupPlanAssociation_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_create_backup_plan_association(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + backup_plan_association = backupdr_v1.BackupPlanAssociation() + backup_plan_association.resource = "resource_value" + backup_plan_association.backup_plan = "backup_plan_value" + + request = backupdr_v1.CreateBackupPlanAssociationRequest( + parent="parent_value", + backup_plan_association_id="backup_plan_association_id_value", + backup_plan_association=backup_plan_association, + ) + + # Make the request + operation = client.create_backup_plan_association(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_CreateBackupPlanAssociation_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_association_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_association_sync.py new file mode 100644 index 000000000000..fc82ca77f706 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_association_sync.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBackupPlanAssociation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_CreateBackupPlanAssociation_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_create_backup_plan_association(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + backup_plan_association = backupdr_v1.BackupPlanAssociation() + backup_plan_association.resource = "resource_value" + backup_plan_association.backup_plan = "backup_plan_value" + + request = backupdr_v1.CreateBackupPlanAssociationRequest( + parent="parent_value", + backup_plan_association_id="backup_plan_association_id_value", + backup_plan_association=backup_plan_association, + ) + + # Make the request + operation = client.create_backup_plan_association(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_CreateBackupPlanAssociation_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_async.py new file mode 100644 index 000000000000..ff546daa2ac6 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_async.py @@ -0,0 +1,68 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBackupPlan +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_CreateBackupPlan_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_create_backup_plan(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + backup_plan = backupdr_v1.BackupPlan() + backup_plan.backup_rules.standard_schedule.recurrence_type = "YEARLY" + backup_plan.backup_rules.standard_schedule.backup_window.start_hour_of_day = 1820 + backup_plan.backup_rules.standard_schedule.backup_window.end_hour_of_day = 1573 + backup_plan.backup_rules.standard_schedule.time_zone = "time_zone_value" + backup_plan.backup_rules.rule_id = "rule_id_value" + backup_plan.backup_rules.backup_retention_days = 2237 + backup_plan.resource_type = "resource_type_value" + backup_plan.backup_vault = "backup_vault_value" + + request = backupdr_v1.CreateBackupPlanRequest( + parent="parent_value", + backup_plan_id="backup_plan_id_value", + backup_plan=backup_plan, + ) + + # Make the request + operation = client.create_backup_plan(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_CreateBackupPlan_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_sync.py new file mode 100644 index 000000000000..5c648a085be1 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_sync.py @@ -0,0 +1,68 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBackupPlan +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_CreateBackupPlan_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_create_backup_plan(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + backup_plan = backupdr_v1.BackupPlan() + backup_plan.backup_rules.standard_schedule.recurrence_type = "YEARLY" + backup_plan.backup_rules.standard_schedule.backup_window.start_hour_of_day = 1820 + backup_plan.backup_rules.standard_schedule.backup_window.end_hour_of_day = 1573 + backup_plan.backup_rules.standard_schedule.time_zone = "time_zone_value" + backup_plan.backup_rules.rule_id = "rule_id_value" + backup_plan.backup_rules.backup_retention_days = 2237 + backup_plan.resource_type = "resource_type_value" + backup_plan.backup_vault = "backup_vault_value" + + request = backupdr_v1.CreateBackupPlanRequest( + parent="parent_value", + backup_plan_id="backup_plan_id_value", + backup_plan=backup_plan, + ) + + # Make the request + operation = client.create_backup_plan(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_CreateBackupPlan_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_vault_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_vault_async.py new file mode 100644 index 000000000000..1acf666c5d38 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_vault_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBackupVault +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_CreateBackupVault_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_create_backup_vault(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.CreateBackupVaultRequest( + parent="parent_value", + backup_vault_id="backup_vault_id_value", + ) + + # Make the request + operation = client.create_backup_vault(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_CreateBackupVault_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_vault_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_vault_sync.py new file mode 100644 index 000000000000..5a1abda3a275 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_vault_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBackupVault +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_CreateBackupVault_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_create_backup_vault(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.CreateBackupVaultRequest( + parent="parent_value", + backup_vault_id="backup_vault_id_value", + ) + + # Make the request + operation = client.create_backup_vault(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_CreateBackupVault_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_async.py new file mode 100644 index 000000000000..346d24a9543e --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_DeleteBackup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_delete_backup(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.DeleteBackupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_DeleteBackup_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_association_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_association_async.py new file mode 100644 index 000000000000..21af239763d3 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_association_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBackupPlanAssociation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_DeleteBackupPlanAssociation_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_delete_backup_plan_association(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.DeleteBackupPlanAssociationRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup_plan_association(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_DeleteBackupPlanAssociation_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_association_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_association_sync.py new file mode 100644 index 000000000000..18c4ca0cae3b --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_association_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBackupPlanAssociation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_DeleteBackupPlanAssociation_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_delete_backup_plan_association(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.DeleteBackupPlanAssociationRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup_plan_association(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_DeleteBackupPlanAssociation_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_async.py new file mode 100644 index 000000000000..3423852e66cd --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBackupPlan +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_DeleteBackupPlan_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_delete_backup_plan(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.DeleteBackupPlanRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup_plan(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_DeleteBackupPlan_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_sync.py new file mode 100644 index 000000000000..aaabe6e55265 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBackupPlan +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_DeleteBackupPlan_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_delete_backup_plan(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.DeleteBackupPlanRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup_plan(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_DeleteBackupPlan_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_sync.py new file mode 100644 index 000000000000..dee368f1cd32 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_DeleteBackup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_delete_backup(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.DeleteBackupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_DeleteBackup_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_vault_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_vault_async.py new file mode 100644 index 000000000000..a70379011f44 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_vault_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBackupVault +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_DeleteBackupVault_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_delete_backup_vault(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.DeleteBackupVaultRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup_vault(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_DeleteBackupVault_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_vault_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_vault_sync.py new file mode 100644 index 000000000000..2b824ef4088e --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_vault_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBackupVault +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_DeleteBackupVault_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_delete_backup_vault(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.DeleteBackupVaultRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup_vault(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_DeleteBackupVault_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_usable_backup_vaults_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_usable_backup_vaults_async.py new file mode 100644 index 000000000000..ab7dc9c365b4 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_usable_backup_vaults_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FetchUsableBackupVaults +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_FetchUsableBackupVaults_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_fetch_usable_backup_vaults(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.FetchUsableBackupVaultsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.fetch_usable_backup_vaults(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END backupdr_v1_generated_BackupDR_FetchUsableBackupVaults_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_usable_backup_vaults_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_usable_backup_vaults_sync.py new file mode 100644 index 000000000000..0e4abb2342d5 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_usable_backup_vaults_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FetchUsableBackupVaults +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_FetchUsableBackupVaults_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_fetch_usable_backup_vaults(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.FetchUsableBackupVaultsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.fetch_usable_backup_vaults(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END backupdr_v1_generated_BackupDR_FetchUsableBackupVaults_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_async.py new file mode 100644 index 000000000000..27f69f503b1b --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_GetBackup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_get_backup(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.GetBackupRequest( + name="name_value", + ) + + # Make the request + response = await client.get_backup(request=request) + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_GetBackup_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_association_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_association_async.py new file mode 100644 index 000000000000..666e503e039c --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_association_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBackupPlanAssociation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_GetBackupPlanAssociation_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_get_backup_plan_association(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.GetBackupPlanAssociationRequest( + name="name_value", + ) + + # Make the request + response = await client.get_backup_plan_association(request=request) + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_GetBackupPlanAssociation_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_association_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_association_sync.py new file mode 100644 index 000000000000..f16d4b5dcdc6 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_association_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBackupPlanAssociation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_GetBackupPlanAssociation_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_get_backup_plan_association(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.GetBackupPlanAssociationRequest( + name="name_value", + ) + + # Make the request + response = client.get_backup_plan_association(request=request) + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_GetBackupPlanAssociation_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_async.py new file mode 100644 index 000000000000..bbca5985c4d3 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBackupPlan +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_GetBackupPlan_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_get_backup_plan(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.GetBackupPlanRequest( + name="name_value", + ) + + # Make the request + response = await client.get_backup_plan(request=request) + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_GetBackupPlan_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_sync.py new file mode 100644 index 000000000000..3e6f35ccdc90 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBackupPlan +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_GetBackupPlan_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_get_backup_plan(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.GetBackupPlanRequest( + name="name_value", + ) + + # Make the request + response = client.get_backup_plan(request=request) + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_GetBackupPlan_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_sync.py new file mode 100644 index 000000000000..064cbac8920e --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_GetBackup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_get_backup(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.GetBackupRequest( + name="name_value", + ) + + # Make the request + response = client.get_backup(request=request) + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_GetBackup_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_vault_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_vault_async.py new file mode 100644 index 000000000000..95d30ed5bf46 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_vault_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBackupVault +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_GetBackupVault_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_get_backup_vault(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.GetBackupVaultRequest( + name="name_value", + ) + + # Make the request + response = await client.get_backup_vault(request=request) + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_GetBackupVault_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_vault_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_vault_sync.py new file mode 100644 index 000000000000..814ccccaf4a0 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_vault_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBackupVault +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_GetBackupVault_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_get_backup_vault(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.GetBackupVaultRequest( + name="name_value", + ) + + # Make the request + response = client.get_backup_vault(request=request) + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_GetBackupVault_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_data_source_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_data_source_async.py new file mode 100644 index 000000000000..864ee90db114 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_data_source_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDataSource +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_GetDataSource_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_get_data_source(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.GetDataSourceRequest( + name="name_value", + ) + + # Make the request + response = await client.get_data_source(request=request) + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_GetDataSource_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_data_source_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_data_source_sync.py new file mode 100644 index 000000000000..95f18218de42 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_data_source_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDataSource +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_GetDataSource_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_get_data_source(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.GetDataSourceRequest( + name="name_value", + ) + + # Make the request + response = client.get_data_source(request=request) + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_GetDataSource_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plan_associations_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plan_associations_async.py new file mode 100644 index 000000000000..e6cfd3cc039c --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plan_associations_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBackupPlanAssociations +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_ListBackupPlanAssociations_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_list_backup_plan_associations(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.ListBackupPlanAssociationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_plan_associations(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END backupdr_v1_generated_BackupDR_ListBackupPlanAssociations_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plan_associations_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plan_associations_sync.py new file mode 100644 index 000000000000..39b135ce9944 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plan_associations_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBackupPlanAssociations +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_ListBackupPlanAssociations_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_list_backup_plan_associations(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.ListBackupPlanAssociationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_plan_associations(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END backupdr_v1_generated_BackupDR_ListBackupPlanAssociations_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plans_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plans_async.py new file mode 100644 index 000000000000..f09593b5796b --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plans_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBackupPlans +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_ListBackupPlans_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_list_backup_plans(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.ListBackupPlansRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_plans(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END backupdr_v1_generated_BackupDR_ListBackupPlans_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plans_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plans_sync.py new file mode 100644 index 000000000000..ab2cab9b1701 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plans_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBackupPlans +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_ListBackupPlans_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_list_backup_plans(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.ListBackupPlansRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_plans(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END backupdr_v1_generated_BackupDR_ListBackupPlans_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_vaults_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_vaults_async.py new file mode 100644 index 000000000000..675c345b810c --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_vaults_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBackupVaults +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_ListBackupVaults_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_list_backup_vaults(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.ListBackupVaultsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_vaults(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END backupdr_v1_generated_BackupDR_ListBackupVaults_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_vaults_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_vaults_sync.py new file mode 100644 index 000000000000..27b1faa5debb --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_vaults_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBackupVaults +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_ListBackupVaults_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_list_backup_vaults(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.ListBackupVaultsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_vaults(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END backupdr_v1_generated_BackupDR_ListBackupVaults_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backups_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backups_async.py new file mode 100644 index 000000000000..da6366f3b095 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backups_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBackups +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_ListBackups_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_list_backups(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.ListBackupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backups(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END backupdr_v1_generated_BackupDR_ListBackups_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backups_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backups_sync.py new file mode 100644 index 000000000000..18387f7371a5 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backups_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBackups +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_ListBackups_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_list_backups(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.ListBackupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backups(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END backupdr_v1_generated_BackupDR_ListBackups_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_data_sources_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_data_sources_async.py new file mode 100644 index 000000000000..f5cb4d5a4477 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_data_sources_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDataSources +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_ListDataSources_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_list_data_sources(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.ListDataSourcesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_sources(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END backupdr_v1_generated_BackupDR_ListDataSources_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_data_sources_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_data_sources_sync.py new file mode 100644 index 000000000000..36680bf32e15 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_data_sources_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDataSources +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_ListDataSources_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_list_data_sources(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.ListDataSourcesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_sources(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END backupdr_v1_generated_BackupDR_ListDataSources_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_restore_backup_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_restore_backup_async.py new file mode 100644 index 000000000000..9bdfab3c21bc --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_restore_backup_async.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RestoreBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_RestoreBackup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_restore_backup(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + compute_instance_target_environment = backupdr_v1.ComputeInstanceTargetEnvironment() + compute_instance_target_environment.project = "project_value" + compute_instance_target_environment.zone = "zone_value" + + request = backupdr_v1.RestoreBackupRequest( + compute_instance_target_environment=compute_instance_target_environment, + name="name_value", + ) + + # Make the request + operation = client.restore_backup(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_RestoreBackup_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_restore_backup_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_restore_backup_sync.py new file mode 100644 index 000000000000..6b503fb4a546 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_restore_backup_sync.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RestoreBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_RestoreBackup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_restore_backup(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + compute_instance_target_environment = backupdr_v1.ComputeInstanceTargetEnvironment() + compute_instance_target_environment.project = "project_value" + compute_instance_target_environment.zone = "zone_value" + + request = backupdr_v1.RestoreBackupRequest( + compute_instance_target_environment=compute_instance_target_environment, + name="name_value", + ) + + # Make the request + operation = client.restore_backup(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_RestoreBackup_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_trigger_backup_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_trigger_backup_async.py new file mode 100644 index 000000000000..6c6c641d54ee --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_trigger_backup_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TriggerBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_TriggerBackup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_trigger_backup(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.TriggerBackupRequest( + name="name_value", + rule_id="rule_id_value", + ) + + # Make the request + operation = client.trigger_backup(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_TriggerBackup_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_trigger_backup_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_trigger_backup_sync.py new file mode 100644 index 000000000000..359727f2dd1c --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_trigger_backup_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TriggerBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_TriggerBackup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_trigger_backup(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.TriggerBackupRequest( + name="name_value", + rule_id="rule_id_value", + ) + + # Make the request + operation = client.trigger_backup(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_TriggerBackup_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_async.py new file mode 100644 index 000000000000..a1a2fcc0ce51 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_UpdateBackup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_update_backup(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.UpdateBackupRequest( + ) + + # Make the request + operation = client.update_backup(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_UpdateBackup_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_sync.py new file mode 100644 index 000000000000..9ea7e26404d0 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_UpdateBackup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_update_backup(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.UpdateBackupRequest( + ) + + # Make the request + operation = client.update_backup(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_UpdateBackup_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_vault_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_vault_async.py new file mode 100644 index 000000000000..386f2ca872d3 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_vault_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateBackupVault +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_UpdateBackupVault_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_update_backup_vault(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.UpdateBackupVaultRequest( + ) + + # Make the request + operation = client.update_backup_vault(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_UpdateBackupVault_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_vault_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_vault_sync.py new file mode 100644 index 000000000000..ab3690e1df33 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_vault_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateBackupVault +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_UpdateBackupVault_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_update_backup_vault(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.UpdateBackupVaultRequest( + ) + + # Make the request + operation = client.update_backup_vault(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_UpdateBackupVault_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_data_source_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_data_source_async.py new file mode 100644 index 000000000000..986de214c53d --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_data_source_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDataSource +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_UpdateDataSource_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_update_data_source(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.UpdateDataSourceRequest( + ) + + # Make the request + operation = client.update_data_source(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_UpdateDataSource_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_data_source_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_data_source_sync.py new file mode 100644 index 000000000000..d20aa5d93848 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_data_source_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDataSource +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_UpdateDataSource_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_update_data_source(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.UpdateDataSourceRequest( + ) + + # Make the request + operation = client.update_data_source(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_UpdateDataSource_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/snippet_metadata_google.cloud.backupdr.v1.json b/packages/google-cloud-backupdr/samples/generated_samples/snippet_metadata_google.cloud.backupdr.v1.json index ff879435143f..904b6f7dbef2 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/snippet_metadata_google.cloud.backupdr.v1.json +++ b/packages/google-cloud-backupdr/samples/generated_samples/snippet_metadata_google.cloud.backupdr.v1.json @@ -11,6 +11,537 @@ "version": "0.1.0" }, "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.create_backup_plan_association", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.CreateBackupPlanAssociation", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "CreateBackupPlanAssociation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.CreateBackupPlanAssociationRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "backup_plan_association", + "type": "google.cloud.backupdr_v1.types.BackupPlanAssociation" + }, + { + "name": "backup_plan_association_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_backup_plan_association" + }, + "description": "Sample for CreateBackupPlanAssociation", + "file": "backupdr_v1_generated_backup_dr_create_backup_plan_association_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_CreateBackupPlanAssociation_async", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_create_backup_plan_association_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.create_backup_plan_association", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.CreateBackupPlanAssociation", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "CreateBackupPlanAssociation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.CreateBackupPlanAssociationRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "backup_plan_association", + "type": "google.cloud.backupdr_v1.types.BackupPlanAssociation" + }, + { + "name": "backup_plan_association_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_backup_plan_association" + }, + "description": "Sample for CreateBackupPlanAssociation", + "file": "backupdr_v1_generated_backup_dr_create_backup_plan_association_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_CreateBackupPlanAssociation_sync", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_create_backup_plan_association_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.create_backup_plan", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.CreateBackupPlan", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "CreateBackupPlan" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.CreateBackupPlanRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "backup_plan", + "type": "google.cloud.backupdr_v1.types.BackupPlan" + }, + { + "name": "backup_plan_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_backup_plan" + }, + "description": "Sample for CreateBackupPlan", + "file": "backupdr_v1_generated_backup_dr_create_backup_plan_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_CreateBackupPlan_async", + "segments": [ + { + "end": 67, + "start": 27, + "type": "FULL" + }, + { + "end": 67, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 57, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 64, + "start": 58, + "type": "REQUEST_EXECUTION" + }, + { + "end": 68, + "start": 65, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_create_backup_plan_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.create_backup_plan", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.CreateBackupPlan", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "CreateBackupPlan" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.CreateBackupPlanRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "backup_plan", + "type": "google.cloud.backupdr_v1.types.BackupPlan" + }, + { + "name": "backup_plan_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_backup_plan" + }, + "description": "Sample for CreateBackupPlan", + "file": "backupdr_v1_generated_backup_dr_create_backup_plan_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_CreateBackupPlan_sync", + "segments": [ + { + "end": 67, + "start": 27, + "type": "FULL" + }, + { + "end": 67, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 57, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 64, + "start": 58, + "type": "REQUEST_EXECUTION" + }, + { + "end": 68, + "start": 65, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_create_backup_plan_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.create_backup_vault", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.CreateBackupVault", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "CreateBackupVault" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.CreateBackupVaultRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "backup_vault", + "type": "google.cloud.backupdr_v1.types.BackupVault" + }, + { + "name": "backup_vault_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_backup_vault" + }, + "description": "Sample for CreateBackupVault", + "file": "backupdr_v1_generated_backup_dr_create_backup_vault_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_CreateBackupVault_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_create_backup_vault_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.create_backup_vault", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.CreateBackupVault", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "CreateBackupVault" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.CreateBackupVaultRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "backup_vault", + "type": "google.cloud.backupdr_v1.types.BackupVault" + }, + { + "name": "backup_vault_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_backup_vault" + }, + "description": "Sample for CreateBackupVault", + "file": "backupdr_v1_generated_backup_dr_create_backup_vault_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_CreateBackupVault_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_create_backup_vault_sync.py" + }, { "canonical": true, "clientMethod": { @@ -21,28 +552,3260 @@ }, "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.create_management_server", "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.CreateManagementServer", + "fullName": "google.cloud.backupdr.v1.BackupDR.CreateManagementServer", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "CreateManagementServer" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.CreateManagementServerRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "management_server", + "type": "google.cloud.backupdr_v1.types.ManagementServer" + }, + { + "name": "management_server_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_management_server" + }, + "description": "Sample for CreateManagementServer", + "file": "backupdr_v1_generated_backup_dr_create_management_server_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_CreateManagementServer_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_create_management_server_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.create_management_server", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.CreateManagementServer", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "CreateManagementServer" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.CreateManagementServerRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "management_server", + "type": "google.cloud.backupdr_v1.types.ManagementServer" + }, + { + "name": "management_server_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_management_server" + }, + "description": "Sample for CreateManagementServer", + "file": "backupdr_v1_generated_backup_dr_create_management_server_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_CreateManagementServer_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_create_management_server_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.delete_backup_plan_association", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.DeleteBackupPlanAssociation", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "DeleteBackupPlanAssociation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.DeleteBackupPlanAssociationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_backup_plan_association" + }, + "description": "Sample for DeleteBackupPlanAssociation", + "file": "backupdr_v1_generated_backup_dr_delete_backup_plan_association_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_DeleteBackupPlanAssociation_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_delete_backup_plan_association_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.delete_backup_plan_association", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.DeleteBackupPlanAssociation", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "DeleteBackupPlanAssociation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.DeleteBackupPlanAssociationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_backup_plan_association" + }, + "description": "Sample for DeleteBackupPlanAssociation", + "file": "backupdr_v1_generated_backup_dr_delete_backup_plan_association_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_DeleteBackupPlanAssociation_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_delete_backup_plan_association_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.delete_backup_plan", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.DeleteBackupPlan", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "DeleteBackupPlan" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.DeleteBackupPlanRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_backup_plan" + }, + "description": "Sample for DeleteBackupPlan", + "file": "backupdr_v1_generated_backup_dr_delete_backup_plan_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_DeleteBackupPlan_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_delete_backup_plan_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.delete_backup_plan", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.DeleteBackupPlan", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "DeleteBackupPlan" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.DeleteBackupPlanRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_backup_plan" + }, + "description": "Sample for DeleteBackupPlan", + "file": "backupdr_v1_generated_backup_dr_delete_backup_plan_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_DeleteBackupPlan_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_delete_backup_plan_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.delete_backup_vault", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.DeleteBackupVault", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "DeleteBackupVault" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.DeleteBackupVaultRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_backup_vault" + }, + "description": "Sample for DeleteBackupVault", + "file": "backupdr_v1_generated_backup_dr_delete_backup_vault_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_DeleteBackupVault_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_delete_backup_vault_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.delete_backup_vault", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.DeleteBackupVault", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "DeleteBackupVault" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.DeleteBackupVaultRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_backup_vault" + }, + "description": "Sample for DeleteBackupVault", + "file": "backupdr_v1_generated_backup_dr_delete_backup_vault_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_DeleteBackupVault_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_delete_backup_vault_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.delete_backup", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.DeleteBackup", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "DeleteBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.DeleteBackupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_backup" + }, + "description": "Sample for DeleteBackup", + "file": "backupdr_v1_generated_backup_dr_delete_backup_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_DeleteBackup_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_delete_backup_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.delete_backup", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.DeleteBackup", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "DeleteBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.DeleteBackupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_backup" + }, + "description": "Sample for DeleteBackup", + "file": "backupdr_v1_generated_backup_dr_delete_backup_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_DeleteBackup_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_delete_backup_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.delete_management_server", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.DeleteManagementServer", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "DeleteManagementServer" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.DeleteManagementServerRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_management_server" + }, + "description": "Sample for DeleteManagementServer", + "file": "backupdr_v1_generated_backup_dr_delete_management_server_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_DeleteManagementServer_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_delete_management_server_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.delete_management_server", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.DeleteManagementServer", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "DeleteManagementServer" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.DeleteManagementServerRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_management_server" + }, + "description": "Sample for DeleteManagementServer", + "file": "backupdr_v1_generated_backup_dr_delete_management_server_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_DeleteManagementServer_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_delete_management_server_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.fetch_usable_backup_vaults", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.FetchUsableBackupVaults", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "FetchUsableBackupVaults" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.FetchUsableBackupVaultsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.FetchUsableBackupVaultsAsyncPager", + "shortName": "fetch_usable_backup_vaults" + }, + "description": "Sample for FetchUsableBackupVaults", + "file": "backupdr_v1_generated_backup_dr_fetch_usable_backup_vaults_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_FetchUsableBackupVaults_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_fetch_usable_backup_vaults_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.fetch_usable_backup_vaults", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.FetchUsableBackupVaults", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "FetchUsableBackupVaults" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.FetchUsableBackupVaultsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.FetchUsableBackupVaultsPager", + "shortName": "fetch_usable_backup_vaults" + }, + "description": "Sample for FetchUsableBackupVaults", + "file": "backupdr_v1_generated_backup_dr_fetch_usable_backup_vaults_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_FetchUsableBackupVaults_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_fetch_usable_backup_vaults_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.get_backup_plan_association", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.GetBackupPlanAssociation", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "GetBackupPlanAssociation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.GetBackupPlanAssociationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.types.BackupPlanAssociation", + "shortName": "get_backup_plan_association" + }, + "description": "Sample for GetBackupPlanAssociation", + "file": "backupdr_v1_generated_backup_dr_get_backup_plan_association_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_GetBackupPlanAssociation_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_get_backup_plan_association_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.get_backup_plan_association", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.GetBackupPlanAssociation", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "GetBackupPlanAssociation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.GetBackupPlanAssociationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.types.BackupPlanAssociation", + "shortName": "get_backup_plan_association" + }, + "description": "Sample for GetBackupPlanAssociation", + "file": "backupdr_v1_generated_backup_dr_get_backup_plan_association_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_GetBackupPlanAssociation_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_get_backup_plan_association_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.get_backup_plan", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.GetBackupPlan", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "GetBackupPlan" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.GetBackupPlanRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.types.BackupPlan", + "shortName": "get_backup_plan" + }, + "description": "Sample for GetBackupPlan", + "file": "backupdr_v1_generated_backup_dr_get_backup_plan_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_GetBackupPlan_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_get_backup_plan_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.get_backup_plan", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.GetBackupPlan", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "GetBackupPlan" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.GetBackupPlanRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.types.BackupPlan", + "shortName": "get_backup_plan" + }, + "description": "Sample for GetBackupPlan", + "file": "backupdr_v1_generated_backup_dr_get_backup_plan_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_GetBackupPlan_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_get_backup_plan_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.get_backup_vault", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.GetBackupVault", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "GetBackupVault" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.GetBackupVaultRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.types.BackupVault", + "shortName": "get_backup_vault" + }, + "description": "Sample for GetBackupVault", + "file": "backupdr_v1_generated_backup_dr_get_backup_vault_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_GetBackupVault_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_get_backup_vault_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.get_backup_vault", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.GetBackupVault", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "GetBackupVault" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.GetBackupVaultRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.types.BackupVault", + "shortName": "get_backup_vault" + }, + "description": "Sample for GetBackupVault", + "file": "backupdr_v1_generated_backup_dr_get_backup_vault_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_GetBackupVault_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_get_backup_vault_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.get_backup", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.GetBackup", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "GetBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.GetBackupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.types.Backup", + "shortName": "get_backup" + }, + "description": "Sample for GetBackup", + "file": "backupdr_v1_generated_backup_dr_get_backup_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_GetBackup_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_get_backup_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.get_backup", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.GetBackup", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "GetBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.GetBackupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.types.Backup", + "shortName": "get_backup" + }, + "description": "Sample for GetBackup", + "file": "backupdr_v1_generated_backup_dr_get_backup_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_GetBackup_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_get_backup_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.get_data_source", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.GetDataSource", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "GetDataSource" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.GetDataSourceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.types.DataSource", + "shortName": "get_data_source" + }, + "description": "Sample for GetDataSource", + "file": "backupdr_v1_generated_backup_dr_get_data_source_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_GetDataSource_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_get_data_source_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.get_data_source", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.GetDataSource", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "GetDataSource" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.GetDataSourceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.types.DataSource", + "shortName": "get_data_source" + }, + "description": "Sample for GetDataSource", + "file": "backupdr_v1_generated_backup_dr_get_data_source_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_GetDataSource_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_get_data_source_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.get_management_server", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.GetManagementServer", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "GetManagementServer" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.GetManagementServerRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.types.ManagementServer", + "shortName": "get_management_server" + }, + "description": "Sample for GetManagementServer", + "file": "backupdr_v1_generated_backup_dr_get_management_server_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_GetManagementServer_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_get_management_server_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.get_management_server", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.GetManagementServer", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "GetManagementServer" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.GetManagementServerRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.types.ManagementServer", + "shortName": "get_management_server" + }, + "description": "Sample for GetManagementServer", + "file": "backupdr_v1_generated_backup_dr_get_management_server_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_GetManagementServer_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_get_management_server_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.list_backup_plan_associations", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.ListBackupPlanAssociations", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "ListBackupPlanAssociations" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.ListBackupPlanAssociationsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupPlanAssociationsAsyncPager", + "shortName": "list_backup_plan_associations" + }, + "description": "Sample for ListBackupPlanAssociations", + "file": "backupdr_v1_generated_backup_dr_list_backup_plan_associations_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_ListBackupPlanAssociations_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_list_backup_plan_associations_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.list_backup_plan_associations", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.ListBackupPlanAssociations", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "ListBackupPlanAssociations" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.ListBackupPlanAssociationsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupPlanAssociationsPager", + "shortName": "list_backup_plan_associations" + }, + "description": "Sample for ListBackupPlanAssociations", + "file": "backupdr_v1_generated_backup_dr_list_backup_plan_associations_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_ListBackupPlanAssociations_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_list_backup_plan_associations_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.list_backup_plans", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.ListBackupPlans", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "ListBackupPlans" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.ListBackupPlansRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupPlansAsyncPager", + "shortName": "list_backup_plans" + }, + "description": "Sample for ListBackupPlans", + "file": "backupdr_v1_generated_backup_dr_list_backup_plans_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_ListBackupPlans_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_list_backup_plans_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.list_backup_plans", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.ListBackupPlans", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "ListBackupPlans" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.ListBackupPlansRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupPlansPager", + "shortName": "list_backup_plans" + }, + "description": "Sample for ListBackupPlans", + "file": "backupdr_v1_generated_backup_dr_list_backup_plans_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_ListBackupPlans_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_list_backup_plans_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.list_backup_vaults", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.ListBackupVaults", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "ListBackupVaults" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.ListBackupVaultsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupVaultsAsyncPager", + "shortName": "list_backup_vaults" + }, + "description": "Sample for ListBackupVaults", + "file": "backupdr_v1_generated_backup_dr_list_backup_vaults_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_ListBackupVaults_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_list_backup_vaults_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.list_backup_vaults", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.ListBackupVaults", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "ListBackupVaults" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.ListBackupVaultsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupVaultsPager", + "shortName": "list_backup_vaults" + }, + "description": "Sample for ListBackupVaults", + "file": "backupdr_v1_generated_backup_dr_list_backup_vaults_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_ListBackupVaults_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_list_backup_vaults_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.list_backups", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.ListBackups", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "ListBackups" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.ListBackupsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupsAsyncPager", + "shortName": "list_backups" + }, + "description": "Sample for ListBackups", + "file": "backupdr_v1_generated_backup_dr_list_backups_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_ListBackups_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_list_backups_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.list_backups", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.ListBackups", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "ListBackups" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.ListBackupsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupsPager", + "shortName": "list_backups" + }, + "description": "Sample for ListBackups", + "file": "backupdr_v1_generated_backup_dr_list_backups_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_ListBackups_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_list_backups_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.list_data_sources", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.ListDataSources", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "ListDataSources" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.ListDataSourcesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListDataSourcesAsyncPager", + "shortName": "list_data_sources" + }, + "description": "Sample for ListDataSources", + "file": "backupdr_v1_generated_backup_dr_list_data_sources_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_ListDataSources_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_list_data_sources_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.list_data_sources", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.ListDataSources", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "ListDataSources" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.ListDataSourcesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListDataSourcesPager", + "shortName": "list_data_sources" + }, + "description": "Sample for ListDataSources", + "file": "backupdr_v1_generated_backup_dr_list_data_sources_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_ListDataSources_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_list_data_sources_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.list_management_servers", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.ListManagementServers", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "ListManagementServers" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.ListManagementServersRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListManagementServersAsyncPager", + "shortName": "list_management_servers" + }, + "description": "Sample for ListManagementServers", + "file": "backupdr_v1_generated_backup_dr_list_management_servers_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_ListManagementServers_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_list_management_servers_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.list_management_servers", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.ListManagementServers", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "ListManagementServers" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.ListManagementServersRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListManagementServersPager", + "shortName": "list_management_servers" + }, + "description": "Sample for ListManagementServers", + "file": "backupdr_v1_generated_backup_dr_list_management_servers_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_ListManagementServers_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_list_management_servers_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.restore_backup", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.RestoreBackup", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "RestoreBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.RestoreBackupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "restore_backup" + }, + "description": "Sample for RestoreBackup", + "file": "backupdr_v1_generated_backup_dr_restore_backup_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_RestoreBackup_async", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_restore_backup_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.restore_backup", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.RestoreBackup", "service": { "fullName": "google.cloud.backupdr.v1.BackupDR", "shortName": "BackupDR" }, - "shortName": "CreateManagementServer" + "shortName": "RestoreBackup" }, "parameters": [ { "name": "request", - "type": "google.cloud.backupdr_v1.types.CreateManagementServerRequest" + "type": "google.cloud.backupdr_v1.types.RestoreBackupRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { - "name": "management_server", - "type": "google.cloud.backupdr_v1.types.ManagementServer" + "name": "retry", + "type": "google.api_core.retry.Retry" }, { - "name": "management_server_id", + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "restore_backup" + }, + "description": "Sample for RestoreBackup", + "file": "backupdr_v1_generated_backup_dr_restore_backup_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_RestoreBackup_sync", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_restore_backup_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.trigger_backup", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.TriggerBackup", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "TriggerBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.TriggerBackupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "rule_id", "type": "str" }, { @@ -59,13 +3822,13 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_management_server" + "shortName": "trigger_backup" }, - "description": "Sample for CreateManagementServer", - "file": "backupdr_v1_generated_backup_dr_create_management_server_async.py", + "description": "Sample for TriggerBackup", + "file": "backupdr_v1_generated_backup_dr_trigger_backup_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_CreateManagementServer_async", + "regionTag": "backupdr_v1_generated_BackupDR_TriggerBackup_async", "segments": [ { "end": 56, @@ -98,7 +3861,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "backupdr_v1_generated_backup_dr_create_management_server_async.py" + "title": "backupdr_v1_generated_backup_dr_trigger_backup_async.py" }, { "canonical": true, @@ -107,30 +3870,26 @@ "fullName": "google.cloud.backupdr_v1.BackupDRClient", "shortName": "BackupDRClient" }, - "fullName": "google.cloud.backupdr_v1.BackupDRClient.create_management_server", + "fullName": "google.cloud.backupdr_v1.BackupDRClient.trigger_backup", "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.CreateManagementServer", + "fullName": "google.cloud.backupdr.v1.BackupDR.TriggerBackup", "service": { "fullName": "google.cloud.backupdr.v1.BackupDR", "shortName": "BackupDR" }, - "shortName": "CreateManagementServer" + "shortName": "TriggerBackup" }, "parameters": [ { "name": "request", - "type": "google.cloud.backupdr_v1.types.CreateManagementServerRequest" + "type": "google.cloud.backupdr_v1.types.TriggerBackupRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { - "name": "management_server", - "type": "google.cloud.backupdr_v1.types.ManagementServer" - }, - { - "name": "management_server_id", + "name": "rule_id", "type": "str" }, { @@ -147,13 +3906,13 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "create_management_server" + "shortName": "trigger_backup" }, - "description": "Sample for CreateManagementServer", - "file": "backupdr_v1_generated_backup_dr_create_management_server_sync.py", + "description": "Sample for TriggerBackup", + "file": "backupdr_v1_generated_backup_dr_trigger_backup_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_CreateManagementServer_sync", + "regionTag": "backupdr_v1_generated_BackupDR_TriggerBackup_sync", "segments": [ { "end": 56, @@ -186,7 +3945,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "backupdr_v1_generated_backup_dr_create_management_server_sync.py" + "title": "backupdr_v1_generated_backup_dr_trigger_backup_sync.py" }, { "canonical": true, @@ -196,23 +3955,27 @@ "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", "shortName": "BackupDRAsyncClient" }, - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.delete_management_server", + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.update_backup_vault", "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.DeleteManagementServer", + "fullName": "google.cloud.backupdr.v1.BackupDR.UpdateBackupVault", "service": { "fullName": "google.cloud.backupdr.v1.BackupDR", "shortName": "BackupDR" }, - "shortName": "DeleteManagementServer" + "shortName": "UpdateBackupVault" }, "parameters": [ { "name": "request", - "type": "google.cloud.backupdr_v1.types.DeleteManagementServerRequest" + "type": "google.cloud.backupdr_v1.types.UpdateBackupVaultRequest" }, { - "name": "name", - "type": "str" + "name": "backup_vault", + "type": "google.cloud.backupdr_v1.types.BackupVault" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -228,21 +3991,21 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_management_server" + "shortName": "update_backup_vault" }, - "description": "Sample for DeleteManagementServer", - "file": "backupdr_v1_generated_backup_dr_delete_management_server_async.py", + "description": "Sample for UpdateBackupVault", + "file": "backupdr_v1_generated_backup_dr_update_backup_vault_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_DeleteManagementServer_async", + "regionTag": "backupdr_v1_generated_BackupDR_UpdateBackupVault_async", "segments": [ { - "end": 55, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 54, "start": 27, "type": "SHORT" }, @@ -252,22 +4015,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 52, - "start": 46, + "end": 51, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "backupdr_v1_generated_backup_dr_delete_management_server_async.py" + "title": "backupdr_v1_generated_backup_dr_update_backup_vault_async.py" }, { "canonical": true, @@ -276,23 +4039,27 @@ "fullName": "google.cloud.backupdr_v1.BackupDRClient", "shortName": "BackupDRClient" }, - "fullName": "google.cloud.backupdr_v1.BackupDRClient.delete_management_server", + "fullName": "google.cloud.backupdr_v1.BackupDRClient.update_backup_vault", "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.DeleteManagementServer", + "fullName": "google.cloud.backupdr.v1.BackupDR.UpdateBackupVault", "service": { "fullName": "google.cloud.backupdr.v1.BackupDR", "shortName": "BackupDR" }, - "shortName": "DeleteManagementServer" + "shortName": "UpdateBackupVault" }, "parameters": [ { "name": "request", - "type": "google.cloud.backupdr_v1.types.DeleteManagementServerRequest" + "type": "google.cloud.backupdr_v1.types.UpdateBackupVaultRequest" }, { - "name": "name", - "type": "str" + "name": "backup_vault", + "type": "google.cloud.backupdr_v1.types.BackupVault" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -308,21 +4075,21 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "delete_management_server" + "shortName": "update_backup_vault" }, - "description": "Sample for DeleteManagementServer", - "file": "backupdr_v1_generated_backup_dr_delete_management_server_sync.py", + "description": "Sample for UpdateBackupVault", + "file": "backupdr_v1_generated_backup_dr_update_backup_vault_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_DeleteManagementServer_sync", + "regionTag": "backupdr_v1_generated_BackupDR_UpdateBackupVault_sync", "segments": [ { - "end": 55, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 54, "start": 27, "type": "SHORT" }, @@ -332,22 +4099,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 52, - "start": 46, + "end": 51, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "backupdr_v1_generated_backup_dr_delete_management_server_sync.py" + "title": "backupdr_v1_generated_backup_dr_update_backup_vault_sync.py" }, { "canonical": true, @@ -357,23 +4124,27 @@ "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", "shortName": "BackupDRAsyncClient" }, - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.get_management_server", + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.update_backup", "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.GetManagementServer", + "fullName": "google.cloud.backupdr.v1.BackupDR.UpdateBackup", "service": { "fullName": "google.cloud.backupdr.v1.BackupDR", "shortName": "BackupDR" }, - "shortName": "GetManagementServer" + "shortName": "UpdateBackup" }, "parameters": [ { "name": "request", - "type": "google.cloud.backupdr_v1.types.GetManagementServerRequest" + "type": "google.cloud.backupdr_v1.types.UpdateBackupRequest" }, { - "name": "name", - "type": "str" + "name": "backup", + "type": "google.cloud.backupdr_v1.types.Backup" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -388,22 +4159,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.backupdr_v1.types.ManagementServer", - "shortName": "get_management_server" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_backup" }, - "description": "Sample for GetManagementServer", - "file": "backupdr_v1_generated_backup_dr_get_management_server_async.py", + "description": "Sample for UpdateBackup", + "file": "backupdr_v1_generated_backup_dr_update_backup_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_GetManagementServer_async", + "regionTag": "backupdr_v1_generated_BackupDR_UpdateBackup_async", "segments": [ { - "end": 51, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 54, "start": 27, "type": "SHORT" }, @@ -413,22 +4184,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 51, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "backupdr_v1_generated_backup_dr_get_management_server_async.py" + "title": "backupdr_v1_generated_backup_dr_update_backup_async.py" }, { "canonical": true, @@ -437,23 +4208,27 @@ "fullName": "google.cloud.backupdr_v1.BackupDRClient", "shortName": "BackupDRClient" }, - "fullName": "google.cloud.backupdr_v1.BackupDRClient.get_management_server", + "fullName": "google.cloud.backupdr_v1.BackupDRClient.update_backup", "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.GetManagementServer", + "fullName": "google.cloud.backupdr.v1.BackupDR.UpdateBackup", "service": { "fullName": "google.cloud.backupdr.v1.BackupDR", "shortName": "BackupDR" }, - "shortName": "GetManagementServer" + "shortName": "UpdateBackup" }, "parameters": [ { "name": "request", - "type": "google.cloud.backupdr_v1.types.GetManagementServerRequest" + "type": "google.cloud.backupdr_v1.types.UpdateBackupRequest" }, { - "name": "name", - "type": "str" + "name": "backup", + "type": "google.cloud.backupdr_v1.types.Backup" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -468,22 +4243,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.backupdr_v1.types.ManagementServer", - "shortName": "get_management_server" + "resultType": "google.api_core.operation.Operation", + "shortName": "update_backup" }, - "description": "Sample for GetManagementServer", - "file": "backupdr_v1_generated_backup_dr_get_management_server_sync.py", + "description": "Sample for UpdateBackup", + "file": "backupdr_v1_generated_backup_dr_update_backup_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_GetManagementServer_sync", + "regionTag": "backupdr_v1_generated_BackupDR_UpdateBackup_sync", "segments": [ { - "end": 51, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 54, "start": 27, "type": "SHORT" }, @@ -493,22 +4268,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 51, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "backupdr_v1_generated_backup_dr_get_management_server_sync.py" + "title": "backupdr_v1_generated_backup_dr_update_backup_sync.py" }, { "canonical": true, @@ -518,23 +4293,27 @@ "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", "shortName": "BackupDRAsyncClient" }, - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.list_management_servers", + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.update_data_source", "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.ListManagementServers", + "fullName": "google.cloud.backupdr.v1.BackupDR.UpdateDataSource", "service": { "fullName": "google.cloud.backupdr.v1.BackupDR", "shortName": "BackupDR" }, - "shortName": "ListManagementServers" + "shortName": "UpdateDataSource" }, "parameters": [ { "name": "request", - "type": "google.cloud.backupdr_v1.types.ListManagementServersRequest" + "type": "google.cloud.backupdr_v1.types.UpdateDataSourceRequest" }, { - "name": "parent", - "type": "str" + "name": "data_source", + "type": "google.cloud.backupdr_v1.types.DataSource" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -549,22 +4328,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListManagementServersAsyncPager", - "shortName": "list_management_servers" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_data_source" }, - "description": "Sample for ListManagementServers", - "file": "backupdr_v1_generated_backup_dr_list_management_servers_async.py", + "description": "Sample for UpdateDataSource", + "file": "backupdr_v1_generated_backup_dr_update_data_source_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_ListManagementServers_async", + "regionTag": "backupdr_v1_generated_BackupDR_UpdateDataSource_async", "segments": [ { - "end": 52, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 54, "start": 27, "type": "SHORT" }, @@ -574,22 +4353,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 51, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "backupdr_v1_generated_backup_dr_list_management_servers_async.py" + "title": "backupdr_v1_generated_backup_dr_update_data_source_async.py" }, { "canonical": true, @@ -598,23 +4377,27 @@ "fullName": "google.cloud.backupdr_v1.BackupDRClient", "shortName": "BackupDRClient" }, - "fullName": "google.cloud.backupdr_v1.BackupDRClient.list_management_servers", + "fullName": "google.cloud.backupdr_v1.BackupDRClient.update_data_source", "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.ListManagementServers", + "fullName": "google.cloud.backupdr.v1.BackupDR.UpdateDataSource", "service": { "fullName": "google.cloud.backupdr.v1.BackupDR", "shortName": "BackupDR" }, - "shortName": "ListManagementServers" + "shortName": "UpdateDataSource" }, "parameters": [ { "name": "request", - "type": "google.cloud.backupdr_v1.types.ListManagementServersRequest" + "type": "google.cloud.backupdr_v1.types.UpdateDataSourceRequest" }, { - "name": "parent", - "type": "str" + "name": "data_source", + "type": "google.cloud.backupdr_v1.types.DataSource" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -629,22 +4412,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListManagementServersPager", - "shortName": "list_management_servers" + "resultType": "google.api_core.operation.Operation", + "shortName": "update_data_source" }, - "description": "Sample for ListManagementServers", - "file": "backupdr_v1_generated_backup_dr_list_management_servers_sync.py", + "description": "Sample for UpdateDataSource", + "file": "backupdr_v1_generated_backup_dr_update_data_source_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_ListManagementServers_sync", + "regionTag": "backupdr_v1_generated_BackupDR_UpdateDataSource_sync", "segments": [ { - "end": 52, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 54, "start": 27, "type": "SHORT" }, @@ -654,22 +4437,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 51, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "backupdr_v1_generated_backup_dr_list_management_servers_sync.py" + "title": "backupdr_v1_generated_backup_dr_update_data_source_sync.py" } ] } diff --git a/packages/google-cloud-backupdr/scripts/fixup_backupdr_v1_keywords.py b/packages/google-cloud-backupdr/scripts/fixup_backupdr_v1_keywords.py index b65698148046..c0dd15568f46 100644 --- a/packages/google-cloud-backupdr/scripts/fixup_backupdr_v1_keywords.py +++ b/packages/google-cloud-backupdr/scripts/fixup_backupdr_v1_keywords.py @@ -39,10 +39,33 @@ def partition( class backupdrCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_backup_plan': ('parent', 'backup_plan_id', 'backup_plan', 'request_id', ), + 'create_backup_plan_association': ('parent', 'backup_plan_association_id', 'backup_plan_association', 'request_id', ), + 'create_backup_vault': ('parent', 'backup_vault_id', 'backup_vault', 'request_id', 'validate_only', ), 'create_management_server': ('parent', 'management_server_id', 'management_server', 'request_id', ), + 'delete_backup': ('name', 'request_id', ), + 'delete_backup_plan': ('name', 'request_id', ), + 'delete_backup_plan_association': ('name', 'request_id', ), + 'delete_backup_vault': ('name', 'request_id', 'force', 'etag', 'validate_only', 'allow_missing', ), 'delete_management_server': ('name', 'request_id', ), + 'fetch_usable_backup_vaults': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'get_backup': ('name', 'view', ), + 'get_backup_plan': ('name', ), + 'get_backup_plan_association': ('name', ), + 'get_backup_vault': ('name', 'view', ), + 'get_data_source': ('name', ), 'get_management_server': ('name', ), + 'list_backup_plan_associations': ('parent', 'page_size', 'page_token', 'filter', ), + 'list_backup_plans': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_backups': ('parent', 'page_size', 'page_token', 'filter', 'order_by', 'view', ), + 'list_backup_vaults': ('parent', 'page_size', 'page_token', 'filter', 'order_by', 'view', ), + 'list_data_sources': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_management_servers': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'restore_backup': ('name', 'request_id', 'compute_instance_target_environment', 'compute_instance_restore_properties', ), + 'trigger_backup': ('name', 'rule_id', 'request_id', ), + 'update_backup': ('update_mask', 'backup', 'request_id', ), + 'update_backup_vault': ('update_mask', 'backup_vault', 'request_id', 'validate_only', 'force', ), + 'update_data_source': ('update_mask', 'data_source', 'request_id', 'allow_missing', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: diff --git a/packages/google-cloud-backupdr/tests/unit/gapic/backupdr_v1/test_backup_dr.py b/packages/google-cloud-backupdr/tests/unit/gapic/backupdr_v1/test_backup_dr.py index 1b1c7ad74e91..ee098d5a5646 100644 --- a/packages/google-cloud-backupdr/tests/unit/gapic/backupdr_v1/test_backup_dr.py +++ b/packages/google-cloud-backupdr/tests/unit/gapic/backupdr_v1/test_backup_dr.py @@ -48,10 +48,16 @@ from google.iam.v1 import policy_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account +from google.protobuf import any_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import json_format from google.protobuf import timestamp_pb2 # type: ignore from google.protobuf import wrappers_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +from google.type import month_pb2 # type: ignore import grpc from grpc.experimental import aio from proto.marshal.rules import wrappers @@ -66,7 +72,14 @@ pagers, transports, ) -from google.cloud.backupdr_v1.types import backupdr +from google.cloud.backupdr_v1.types import ( + backupdr, + backupplan, + backupplanassociation, + backupvault, + backupvault_ba, + backupvault_gce, +) def client_cert_source_callback(): @@ -2911,52 +2924,98 @@ async def test_delete_management_server_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - backupdr.ListManagementServersRequest, + backupvault.CreateBackupVaultRequest, dict, ], ) -def test_list_management_servers_rest(request_type): +def test_create_backup_vault(request_type, transport: str = "grpc"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = backupdr.ListManagementServersResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupvault.CreateBackupVaultRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_backup_vault_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_vault), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) + client.create_backup_vault() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.CreateBackupVaultRequest() - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = backupdr.ListManagementServersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_management_servers(request) +def test_create_backup_vault_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListManagementServersPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupvault.CreateBackupVaultRequest( + parent="parent_value", + backup_vault_id="backup_vault_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_vault), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_backup_vault(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.CreateBackupVaultRequest( + parent="parent_value", + backup_vault_id="backup_vault_id_value", + ) -def test_list_management_servers_rest_use_cached_wrapped_rpc(): +def test_create_backup_vault_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -2965,8 +3024,7 @@ def test_list_management_servers_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_management_servers - in client._transport._wrapped_methods + client._transport.create_backup_vault in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -2975,128 +3033,18962 @@ def test_list_management_servers_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_management_servers + client._transport.create_backup_vault ] = mock_rpc - request = {} - client.list_management_servers(request) + client.create_backup_vault(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_management_servers(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_backup_vault(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_management_servers_rest_required_fields( - request_type=backupdr.ListManagementServersRequest, -): - transport_class = transports.BackupDRRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) +@pytest.mark.asyncio +async def test_create_backup_vault_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) - # verify fields with default values are dropped + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_backup_vault() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.CreateBackupVaultRequest() - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_management_servers._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - # verify required fields with default values are now present +@pytest.mark.asyncio +async def test_create_backup_vault_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - jsonified_request["parent"] = "parent_value" + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_management_servers._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", + # Ensure method has been cached + assert ( + client._client._transport.create_backup_vault + in client._client._transport._wrapped_methods ) - ) - jsonified_request.update(unset_fields) - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_backup_vault + ] = mock_rpc - client = BackupDRClient( + request = {} + await client.create_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_backup_vault(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_backup_vault_async( + transport: str = "grpc_asyncio", request_type=backupvault.CreateBackupVaultRequest +): + client = BackupDRAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = backupdr.ListManagementServersResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - response_value = Response() - response_value.status_code = 200 + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_backup_vault(request) - # Convert return value to protobuf type - return_value = backupdr.ListManagementServersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupvault.CreateBackupVaultRequest() + assert args[0] == request - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) - response = client.list_management_servers(request) - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params +@pytest.mark.asyncio +async def test_create_backup_vault_async_from_dict(): + await test_create_backup_vault_async(request_type=dict) -def test_list_management_servers_rest_unset_required_fields(): - transport = transports.BackupDRRestTransport( - credentials=ga_credentials.AnonymousCredentials +def test_create_backup_vault_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), ) - unset_fields = transport.list_management_servers._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "orderBy", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.CreateBackupVaultRequest() + request.parent = "parent_value" -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_management_servers_rest_interceptors(null_interceptor): - transport = transports.BackupDRRestTransport( - credentials=ga_credentials.AnonymousCredentials(), + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_vault), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_backup_vault_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.CreateBackupVaultRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_vault), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_backup_vault_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_backup_vault( + parent="parent_value", + backup_vault=backupvault.BackupVault(name="name_value"), + backup_vault_id="backup_vault_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].backup_vault + mock_val = backupvault.BackupVault(name="name_value") + assert arg == mock_val + arg = args[0].backup_vault_id + mock_val = "backup_vault_id_value" + assert arg == mock_val + + +def test_create_backup_vault_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_backup_vault( + backupvault.CreateBackupVaultRequest(), + parent="parent_value", + backup_vault=backupvault.BackupVault(name="name_value"), + backup_vault_id="backup_vault_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_backup_vault_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_backup_vault( + parent="parent_value", + backup_vault=backupvault.BackupVault(name="name_value"), + backup_vault_id="backup_vault_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].backup_vault + mock_val = backupvault.BackupVault(name="name_value") + assert arg == mock_val + arg = args[0].backup_vault_id + mock_val = "backup_vault_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_backup_vault_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_backup_vault( + backupvault.CreateBackupVaultRequest(), + parent="parent_value", + backup_vault=backupvault.BackupVault(name="name_value"), + backup_vault_id="backup_vault_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.ListBackupVaultsRequest, + dict, + ], +) +def test_list_backup_vaults(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_vaults), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.ListBackupVaultsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_backup_vaults(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupvault.ListBackupVaultsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupVaultsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_backup_vaults_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_vaults), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_backup_vaults() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.ListBackupVaultsRequest() + + +def test_list_backup_vaults_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupvault.ListBackupVaultsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_vaults), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_backup_vaults(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.ListBackupVaultsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + +def test_list_backup_vaults_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_backup_vaults in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_backup_vaults + ] = mock_rpc + request = {} + client.list_backup_vaults(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_backup_vaults(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_backup_vaults_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_vaults), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.ListBackupVaultsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_backup_vaults() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.ListBackupVaultsRequest() + + +@pytest.mark.asyncio +async def test_list_backup_vaults_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_backup_vaults + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_backup_vaults + ] = mock_rpc + + request = {} + await client.list_backup_vaults(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_backup_vaults(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_backup_vaults_async( + transport: str = "grpc_asyncio", request_type=backupvault.ListBackupVaultsRequest +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_vaults), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.ListBackupVaultsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_backup_vaults(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupvault.ListBackupVaultsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupVaultsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_backup_vaults_async_from_dict(): + await test_list_backup_vaults_async(request_type=dict) + + +def test_list_backup_vaults_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.ListBackupVaultsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_vaults), "__call__" + ) as call: + call.return_value = backupvault.ListBackupVaultsResponse() + client.list_backup_vaults(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_backup_vaults_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.ListBackupVaultsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_vaults), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.ListBackupVaultsResponse() + ) + await client.list_backup_vaults(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_backup_vaults_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_vaults), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.ListBackupVaultsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_backup_vaults( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_backup_vaults_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_backup_vaults( + backupvault.ListBackupVaultsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_backup_vaults_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_vaults), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.ListBackupVaultsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.ListBackupVaultsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_backup_vaults( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_backup_vaults_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_backup_vaults( + backupvault.ListBackupVaultsRequest(), + parent="parent_value", + ) + + +def test_list_backup_vaults_pager(transport_name: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_vaults), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupvault.ListBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + next_page_token="abc", + ), + backupvault.ListBackupVaultsResponse( + backup_vaults=[], + next_page_token="def", + ), + backupvault.ListBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + ], + next_page_token="ghi", + ), + backupvault.ListBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_backup_vaults(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backupvault.BackupVault) for i in results) + + +def test_list_backup_vaults_pages(transport_name: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_vaults), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupvault.ListBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + next_page_token="abc", + ), + backupvault.ListBackupVaultsResponse( + backup_vaults=[], + next_page_token="def", + ), + backupvault.ListBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + ], + next_page_token="ghi", + ), + backupvault.ListBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + ), + RuntimeError, + ) + pages = list(client.list_backup_vaults(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_backup_vaults_async_pager(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_vaults), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupvault.ListBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + next_page_token="abc", + ), + backupvault.ListBackupVaultsResponse( + backup_vaults=[], + next_page_token="def", + ), + backupvault.ListBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + ], + next_page_token="ghi", + ), + backupvault.ListBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_backup_vaults( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, backupvault.BackupVault) for i in responses) + + +@pytest.mark.asyncio +async def test_list_backup_vaults_async_pages(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_vaults), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupvault.ListBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + next_page_token="abc", + ), + backupvault.ListBackupVaultsResponse( + backup_vaults=[], + next_page_token="def", + ), + backupvault.ListBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + ], + next_page_token="ghi", + ), + backupvault.ListBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_backup_vaults(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.FetchUsableBackupVaultsRequest, + dict, + ], +) +def test_fetch_usable_backup_vaults(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_usable_backup_vaults), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.FetchUsableBackupVaultsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.fetch_usable_backup_vaults(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupvault.FetchUsableBackupVaultsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.FetchUsableBackupVaultsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_fetch_usable_backup_vaults_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_usable_backup_vaults), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.fetch_usable_backup_vaults() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.FetchUsableBackupVaultsRequest() + + +def test_fetch_usable_backup_vaults_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupvault.FetchUsableBackupVaultsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_usable_backup_vaults), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.fetch_usable_backup_vaults(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.FetchUsableBackupVaultsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + +def test_fetch_usable_backup_vaults_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.fetch_usable_backup_vaults + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.fetch_usable_backup_vaults + ] = mock_rpc + request = {} + client.fetch_usable_backup_vaults(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.fetch_usable_backup_vaults(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_fetch_usable_backup_vaults_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_usable_backup_vaults), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.FetchUsableBackupVaultsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.fetch_usable_backup_vaults() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.FetchUsableBackupVaultsRequest() + + +@pytest.mark.asyncio +async def test_fetch_usable_backup_vaults_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.fetch_usable_backup_vaults + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.fetch_usable_backup_vaults + ] = mock_rpc + + request = {} + await client.fetch_usable_backup_vaults(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.fetch_usable_backup_vaults(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_fetch_usable_backup_vaults_async( + transport: str = "grpc_asyncio", + request_type=backupvault.FetchUsableBackupVaultsRequest, +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_usable_backup_vaults), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.FetchUsableBackupVaultsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.fetch_usable_backup_vaults(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupvault.FetchUsableBackupVaultsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.FetchUsableBackupVaultsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_fetch_usable_backup_vaults_async_from_dict(): + await test_fetch_usable_backup_vaults_async(request_type=dict) + + +def test_fetch_usable_backup_vaults_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.FetchUsableBackupVaultsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_usable_backup_vaults), "__call__" + ) as call: + call.return_value = backupvault.FetchUsableBackupVaultsResponse() + client.fetch_usable_backup_vaults(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_fetch_usable_backup_vaults_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.FetchUsableBackupVaultsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_usable_backup_vaults), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.FetchUsableBackupVaultsResponse() + ) + await client.fetch_usable_backup_vaults(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_fetch_usable_backup_vaults_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_usable_backup_vaults), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.FetchUsableBackupVaultsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.fetch_usable_backup_vaults( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_fetch_usable_backup_vaults_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.fetch_usable_backup_vaults( + backupvault.FetchUsableBackupVaultsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_fetch_usable_backup_vaults_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_usable_backup_vaults), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.FetchUsableBackupVaultsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.FetchUsableBackupVaultsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.fetch_usable_backup_vaults( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_fetch_usable_backup_vaults_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.fetch_usable_backup_vaults( + backupvault.FetchUsableBackupVaultsRequest(), + parent="parent_value", + ) + + +def test_fetch_usable_backup_vaults_pager(transport_name: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_usable_backup_vaults), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + next_page_token="abc", + ), + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[], + next_page_token="def", + ), + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + ], + next_page_token="ghi", + ), + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.fetch_usable_backup_vaults( + request={}, retry=retry, timeout=timeout + ) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backupvault.BackupVault) for i in results) + + +def test_fetch_usable_backup_vaults_pages(transport_name: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_usable_backup_vaults), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + next_page_token="abc", + ), + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[], + next_page_token="def", + ), + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + ], + next_page_token="ghi", + ), + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + ), + RuntimeError, + ) + pages = list(client.fetch_usable_backup_vaults(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_fetch_usable_backup_vaults_async_pager(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_usable_backup_vaults), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + next_page_token="abc", + ), + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[], + next_page_token="def", + ), + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + ], + next_page_token="ghi", + ), + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + ), + RuntimeError, + ) + async_pager = await client.fetch_usable_backup_vaults( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, backupvault.BackupVault) for i in responses) + + +@pytest.mark.asyncio +async def test_fetch_usable_backup_vaults_async_pages(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_usable_backup_vaults), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + next_page_token="abc", + ), + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[], + next_page_token="def", + ), + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + ], + next_page_token="ghi", + ), + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.fetch_usable_backup_vaults(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.GetBackupVaultRequest, + dict, + ], +) +def test_get_backup_vault(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_vault), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.BackupVault( + name="name_value", + description="description_value", + deletable=True, + etag="etag_value", + state=backupvault.BackupVault.State.CREATING, + backup_count=1278, + service_account="service_account_value", + total_stored_bytes=1946, + uid="uid_value", + access_restriction=backupvault.BackupVault.AccessRestriction.WITHIN_PROJECT, + ) + response = client.get_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupvault.GetBackupVaultRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, backupvault.BackupVault) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.deletable is True + assert response.etag == "etag_value" + assert response.state == backupvault.BackupVault.State.CREATING + assert response.backup_count == 1278 + assert response.service_account == "service_account_value" + assert response.total_stored_bytes == 1946 + assert response.uid == "uid_value" + assert ( + response.access_restriction + == backupvault.BackupVault.AccessRestriction.WITHIN_PROJECT + ) + + +def test_get_backup_vault_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_vault), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_backup_vault() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.GetBackupVaultRequest() + + +def test_get_backup_vault_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupvault.GetBackupVaultRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_vault), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_backup_vault(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.GetBackupVaultRequest( + name="name_value", + ) + + +def test_get_backup_vault_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_backup_vault in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_backup_vault + ] = mock_rpc + request = {} + client.get_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_backup_vault(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_backup_vault_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_vault), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.BackupVault( + name="name_value", + description="description_value", + deletable=True, + etag="etag_value", + state=backupvault.BackupVault.State.CREATING, + backup_count=1278, + service_account="service_account_value", + total_stored_bytes=1946, + uid="uid_value", + access_restriction=backupvault.BackupVault.AccessRestriction.WITHIN_PROJECT, + ) + ) + response = await client.get_backup_vault() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.GetBackupVaultRequest() + + +@pytest.mark.asyncio +async def test_get_backup_vault_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_backup_vault + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_backup_vault + ] = mock_rpc + + request = {} + await client.get_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_backup_vault(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_backup_vault_async( + transport: str = "grpc_asyncio", request_type=backupvault.GetBackupVaultRequest +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_vault), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.BackupVault( + name="name_value", + description="description_value", + deletable=True, + etag="etag_value", + state=backupvault.BackupVault.State.CREATING, + backup_count=1278, + service_account="service_account_value", + total_stored_bytes=1946, + uid="uid_value", + access_restriction=backupvault.BackupVault.AccessRestriction.WITHIN_PROJECT, + ) + ) + response = await client.get_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupvault.GetBackupVaultRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, backupvault.BackupVault) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.deletable is True + assert response.etag == "etag_value" + assert response.state == backupvault.BackupVault.State.CREATING + assert response.backup_count == 1278 + assert response.service_account == "service_account_value" + assert response.total_stored_bytes == 1946 + assert response.uid == "uid_value" + assert ( + response.access_restriction + == backupvault.BackupVault.AccessRestriction.WITHIN_PROJECT + ) + + +@pytest.mark.asyncio +async def test_get_backup_vault_async_from_dict(): + await test_get_backup_vault_async(request_type=dict) + + +def test_get_backup_vault_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.GetBackupVaultRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_vault), "__call__") as call: + call.return_value = backupvault.BackupVault() + client.get_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_backup_vault_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.GetBackupVaultRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_vault), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.BackupVault() + ) + await client.get_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_backup_vault_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_vault), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.BackupVault() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_backup_vault( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_backup_vault_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_backup_vault( + backupvault.GetBackupVaultRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_backup_vault_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_vault), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.BackupVault() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.BackupVault() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_backup_vault( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_backup_vault_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_backup_vault( + backupvault.GetBackupVaultRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.UpdateBackupVaultRequest, + dict, + ], +) +def test_update_backup_vault(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupvault.UpdateBackupVaultRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_backup_vault_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_vault), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_backup_vault() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.UpdateBackupVaultRequest() + + +def test_update_backup_vault_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupvault.UpdateBackupVaultRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_vault), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_backup_vault(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.UpdateBackupVaultRequest() + + +def test_update_backup_vault_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_backup_vault in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_backup_vault + ] = mock_rpc + request = {} + client.update_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_backup_vault(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_backup_vault_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_backup_vault() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.UpdateBackupVaultRequest() + + +@pytest.mark.asyncio +async def test_update_backup_vault_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_backup_vault + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_backup_vault + ] = mock_rpc + + request = {} + await client.update_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_backup_vault(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_backup_vault_async( + transport: str = "grpc_asyncio", request_type=backupvault.UpdateBackupVaultRequest +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupvault.UpdateBackupVaultRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_backup_vault_async_from_dict(): + await test_update_backup_vault_async(request_type=dict) + + +def test_update_backup_vault_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.UpdateBackupVaultRequest() + + request.backup_vault.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_vault), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "backup_vault.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_backup_vault_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.UpdateBackupVaultRequest() + + request.backup_vault.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_vault), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "backup_vault.name=name_value", + ) in kw["metadata"] + + +def test_update_backup_vault_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_backup_vault( + backup_vault=backupvault.BackupVault(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].backup_vault + mock_val = backupvault.BackupVault(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_backup_vault_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_backup_vault( + backupvault.UpdateBackupVaultRequest(), + backup_vault=backupvault.BackupVault(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_backup_vault_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_backup_vault( + backup_vault=backupvault.BackupVault(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].backup_vault + mock_val = backupvault.BackupVault(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_backup_vault_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_backup_vault( + backupvault.UpdateBackupVaultRequest(), + backup_vault=backupvault.BackupVault(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.DeleteBackupVaultRequest, + dict, + ], +) +def test_delete_backup_vault(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupvault.DeleteBackupVaultRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_backup_vault_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_vault), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_backup_vault() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.DeleteBackupVaultRequest() + + +def test_delete_backup_vault_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupvault.DeleteBackupVaultRequest( + name="name_value", + etag="etag_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_vault), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_backup_vault(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.DeleteBackupVaultRequest( + name="name_value", + etag="etag_value", + ) + + +def test_delete_backup_vault_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_backup_vault in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_backup_vault + ] = mock_rpc + request = {} + client.delete_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_backup_vault(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_backup_vault_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_backup_vault() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.DeleteBackupVaultRequest() + + +@pytest.mark.asyncio +async def test_delete_backup_vault_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_backup_vault + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_backup_vault + ] = mock_rpc + + request = {} + await client.delete_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_backup_vault(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_backup_vault_async( + transport: str = "grpc_asyncio", request_type=backupvault.DeleteBackupVaultRequest +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupvault.DeleteBackupVaultRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_backup_vault_async_from_dict(): + await test_delete_backup_vault_async(request_type=dict) + + +def test_delete_backup_vault_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.DeleteBackupVaultRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_vault), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_backup_vault_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.DeleteBackupVaultRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_vault), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_backup_vault_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_backup_vault( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_backup_vault_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_backup_vault( + backupvault.DeleteBackupVaultRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_backup_vault_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_backup_vault( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_backup_vault_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_backup_vault( + backupvault.DeleteBackupVaultRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.ListDataSourcesRequest, + dict, + ], +) +def test_list_data_sources(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.ListDataSourcesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_data_sources(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupvault.ListDataSourcesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDataSourcesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_data_sources_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_data_sources() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.ListDataSourcesRequest() + + +def test_list_data_sources_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupvault.ListDataSourcesRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_data_sources(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.ListDataSourcesRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + +def test_list_data_sources_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_data_sources in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_data_sources + ] = mock_rpc + request = {} + client.list_data_sources(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_data_sources(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_data_sources_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.ListDataSourcesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_data_sources() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.ListDataSourcesRequest() + + +@pytest.mark.asyncio +async def test_list_data_sources_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_data_sources + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_data_sources + ] = mock_rpc + + request = {} + await client.list_data_sources(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_data_sources(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_data_sources_async( + transport: str = "grpc_asyncio", request_type=backupvault.ListDataSourcesRequest +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.ListDataSourcesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_data_sources(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupvault.ListDataSourcesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDataSourcesAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_data_sources_async_from_dict(): + await test_list_data_sources_async(request_type=dict) + + +def test_list_data_sources_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.ListDataSourcesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), "__call__" + ) as call: + call.return_value = backupvault.ListDataSourcesResponse() + client.list_data_sources(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_data_sources_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.ListDataSourcesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.ListDataSourcesResponse() + ) + await client.list_data_sources(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_data_sources_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.ListDataSourcesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_data_sources( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_data_sources_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_data_sources( + backupvault.ListDataSourcesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_data_sources_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.ListDataSourcesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.ListDataSourcesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_data_sources( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_data_sources_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_data_sources( + backupvault.ListDataSourcesRequest(), + parent="parent_value", + ) + + +def test_list_data_sources_pager(transport_name: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupvault.ListDataSourcesResponse( + data_sources=[ + backupvault.DataSource(), + backupvault.DataSource(), + backupvault.DataSource(), + ], + next_page_token="abc", + ), + backupvault.ListDataSourcesResponse( + data_sources=[], + next_page_token="def", + ), + backupvault.ListDataSourcesResponse( + data_sources=[ + backupvault.DataSource(), + ], + next_page_token="ghi", + ), + backupvault.ListDataSourcesResponse( + data_sources=[ + backupvault.DataSource(), + backupvault.DataSource(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_data_sources(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backupvault.DataSource) for i in results) + + +def test_list_data_sources_pages(transport_name: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupvault.ListDataSourcesResponse( + data_sources=[ + backupvault.DataSource(), + backupvault.DataSource(), + backupvault.DataSource(), + ], + next_page_token="abc", + ), + backupvault.ListDataSourcesResponse( + data_sources=[], + next_page_token="def", + ), + backupvault.ListDataSourcesResponse( + data_sources=[ + backupvault.DataSource(), + ], + next_page_token="ghi", + ), + backupvault.ListDataSourcesResponse( + data_sources=[ + backupvault.DataSource(), + backupvault.DataSource(), + ], + ), + RuntimeError, + ) + pages = list(client.list_data_sources(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_data_sources_async_pager(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupvault.ListDataSourcesResponse( + data_sources=[ + backupvault.DataSource(), + backupvault.DataSource(), + backupvault.DataSource(), + ], + next_page_token="abc", + ), + backupvault.ListDataSourcesResponse( + data_sources=[], + next_page_token="def", + ), + backupvault.ListDataSourcesResponse( + data_sources=[ + backupvault.DataSource(), + ], + next_page_token="ghi", + ), + backupvault.ListDataSourcesResponse( + data_sources=[ + backupvault.DataSource(), + backupvault.DataSource(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_data_sources( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, backupvault.DataSource) for i in responses) + + +@pytest.mark.asyncio +async def test_list_data_sources_async_pages(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupvault.ListDataSourcesResponse( + data_sources=[ + backupvault.DataSource(), + backupvault.DataSource(), + backupvault.DataSource(), + ], + next_page_token="abc", + ), + backupvault.ListDataSourcesResponse( + data_sources=[], + next_page_token="def", + ), + backupvault.ListDataSourcesResponse( + data_sources=[ + backupvault.DataSource(), + ], + next_page_token="ghi", + ), + backupvault.ListDataSourcesResponse( + data_sources=[ + backupvault.DataSource(), + backupvault.DataSource(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_data_sources(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.GetDataSourceRequest, + dict, + ], +) +def test_get_data_source(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_source), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.DataSource( + name="name_value", + state=backupvault.DataSource.State.CREATING, + backup_count=1278, + etag="etag_value", + total_stored_bytes=1946, + config_state=backupvault.BackupConfigState.ACTIVE, + ) + response = client.get_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupvault.GetDataSourceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, backupvault.DataSource) + assert response.name == "name_value" + assert response.state == backupvault.DataSource.State.CREATING + assert response.backup_count == 1278 + assert response.etag == "etag_value" + assert response.total_stored_bytes == 1946 + assert response.config_state == backupvault.BackupConfigState.ACTIVE + + +def test_get_data_source_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_source), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_data_source() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.GetDataSourceRequest() + + +def test_get_data_source_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupvault.GetDataSourceRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_source), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_data_source(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.GetDataSourceRequest( + name="name_value", + ) + + +def test_get_data_source_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_data_source in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_data_source] = mock_rpc + request = {} + client.get_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_data_source(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_data_source_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_source), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.DataSource( + name="name_value", + state=backupvault.DataSource.State.CREATING, + backup_count=1278, + etag="etag_value", + total_stored_bytes=1946, + config_state=backupvault.BackupConfigState.ACTIVE, + ) + ) + response = await client.get_data_source() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.GetDataSourceRequest() + + +@pytest.mark.asyncio +async def test_get_data_source_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_data_source + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_data_source + ] = mock_rpc + + request = {} + await client.get_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_data_source(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_data_source_async( + transport: str = "grpc_asyncio", request_type=backupvault.GetDataSourceRequest +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_source), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.DataSource( + name="name_value", + state=backupvault.DataSource.State.CREATING, + backup_count=1278, + etag="etag_value", + total_stored_bytes=1946, + config_state=backupvault.BackupConfigState.ACTIVE, + ) + ) + response = await client.get_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupvault.GetDataSourceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, backupvault.DataSource) + assert response.name == "name_value" + assert response.state == backupvault.DataSource.State.CREATING + assert response.backup_count == 1278 + assert response.etag == "etag_value" + assert response.total_stored_bytes == 1946 + assert response.config_state == backupvault.BackupConfigState.ACTIVE + + +@pytest.mark.asyncio +async def test_get_data_source_async_from_dict(): + await test_get_data_source_async(request_type=dict) + + +def test_get_data_source_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.GetDataSourceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_source), "__call__") as call: + call.return_value = backupvault.DataSource() + client.get_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_data_source_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.GetDataSourceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_source), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.DataSource() + ) + await client.get_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_data_source_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_source), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.DataSource() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_data_source( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_data_source_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_data_source( + backupvault.GetDataSourceRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_data_source_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_source), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.DataSource() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.DataSource() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_data_source( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_data_source_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_data_source( + backupvault.GetDataSourceRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.UpdateDataSourceRequest, + dict, + ], +) +def test_update_data_source(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_source), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupvault.UpdateDataSourceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_data_source_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_source), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_data_source() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.UpdateDataSourceRequest() + + +def test_update_data_source_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupvault.UpdateDataSourceRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_source), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_data_source(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.UpdateDataSourceRequest() + + +def test_update_data_source_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_data_source in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_data_source + ] = mock_rpc + request = {} + client.update_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_data_source(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_data_source_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_source), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_data_source() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.UpdateDataSourceRequest() + + +@pytest.mark.asyncio +async def test_update_data_source_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_data_source + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_data_source + ] = mock_rpc + + request = {} + await client.update_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_data_source(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_data_source_async( + transport: str = "grpc_asyncio", request_type=backupvault.UpdateDataSourceRequest +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_source), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupvault.UpdateDataSourceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_data_source_async_from_dict(): + await test_update_data_source_async(request_type=dict) + + +def test_update_data_source_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.UpdateDataSourceRequest() + + request.data_source.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_source), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "data_source.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_data_source_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.UpdateDataSourceRequest() + + request.data_source.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_source), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "data_source.name=name_value", + ) in kw["metadata"] + + +def test_update_data_source_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_source), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_data_source( + data_source=backupvault.DataSource(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].data_source + mock_val = backupvault.DataSource(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_data_source_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_data_source( + backupvault.UpdateDataSourceRequest(), + data_source=backupvault.DataSource(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_data_source_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_source), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_data_source( + data_source=backupvault.DataSource(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].data_source + mock_val = backupvault.DataSource(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_data_source_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_data_source( + backupvault.UpdateDataSourceRequest(), + data_source=backupvault.DataSource(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.ListBackupsRequest, + dict, + ], +) +def test_list_backups(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.ListBackupsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupvault.ListBackupsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_backups_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_backups() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.ListBackupsRequest() + + +def test_list_backups_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupvault.ListBackupsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_backups(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.ListBackupsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + +def test_list_backups_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_backups in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_backups] = mock_rpc + request = {} + client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_backups(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_backups_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.ListBackupsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_backups() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.ListBackupsRequest() + + +@pytest.mark.asyncio +async def test_list_backups_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_backups + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_backups + ] = mock_rpc + + request = {} + await client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_backups(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_backups_async( + transport: str = "grpc_asyncio", request_type=backupvault.ListBackupsRequest +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.ListBackupsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupvault.ListBackupsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_backups_async_from_dict(): + await test_list_backups_async(request_type=dict) + + +def test_list_backups_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.ListBackupsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + call.return_value = backupvault.ListBackupsResponse() + client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_backups_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.ListBackupsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.ListBackupsResponse() + ) + await client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_backups_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.ListBackupsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_backups( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_backups_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_backups( + backupvault.ListBackupsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_backups_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.ListBackupsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.ListBackupsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_backups( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_backups_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_backups( + backupvault.ListBackupsRequest(), + parent="parent_value", + ) + + +def test_list_backups_pager(transport_name: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + backupvault.ListBackupsResponse( + backups=[ + backupvault.Backup(), + backupvault.Backup(), + backupvault.Backup(), + ], + next_page_token="abc", + ), + backupvault.ListBackupsResponse( + backups=[], + next_page_token="def", + ), + backupvault.ListBackupsResponse( + backups=[ + backupvault.Backup(), + ], + next_page_token="ghi", + ), + backupvault.ListBackupsResponse( + backups=[ + backupvault.Backup(), + backupvault.Backup(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_backups(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backupvault.Backup) for i in results) + + +def test_list_backups_pages(transport_name: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + backupvault.ListBackupsResponse( + backups=[ + backupvault.Backup(), + backupvault.Backup(), + backupvault.Backup(), + ], + next_page_token="abc", + ), + backupvault.ListBackupsResponse( + backups=[], + next_page_token="def", + ), + backupvault.ListBackupsResponse( + backups=[ + backupvault.Backup(), + ], + next_page_token="ghi", + ), + backupvault.ListBackupsResponse( + backups=[ + backupvault.Backup(), + backupvault.Backup(), + ], + ), + RuntimeError, + ) + pages = list(client.list_backups(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_backups_async_pager(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backups), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupvault.ListBackupsResponse( + backups=[ + backupvault.Backup(), + backupvault.Backup(), + backupvault.Backup(), + ], + next_page_token="abc", + ), + backupvault.ListBackupsResponse( + backups=[], + next_page_token="def", + ), + backupvault.ListBackupsResponse( + backups=[ + backupvault.Backup(), + ], + next_page_token="ghi", + ), + backupvault.ListBackupsResponse( + backups=[ + backupvault.Backup(), + backupvault.Backup(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_backups( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, backupvault.Backup) for i in responses) + + +@pytest.mark.asyncio +async def test_list_backups_async_pages(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backups), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupvault.ListBackupsResponse( + backups=[ + backupvault.Backup(), + backupvault.Backup(), + backupvault.Backup(), + ], + next_page_token="abc", + ), + backupvault.ListBackupsResponse( + backups=[], + next_page_token="def", + ), + backupvault.ListBackupsResponse( + backups=[ + backupvault.Backup(), + ], + next_page_token="ghi", + ), + backupvault.ListBackupsResponse( + backups=[ + backupvault.Backup(), + backupvault.Backup(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_backups(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.GetBackupRequest, + dict, + ], +) +def test_get_backup(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.Backup( + name="name_value", + description="description_value", + etag="etag_value", + state=backupvault.Backup.State.CREATING, + backup_type=backupvault.Backup.BackupType.SCHEDULED, + resource_size_bytes=2056, + ) + response = client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupvault.GetBackupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, backupvault.Backup) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.etag == "etag_value" + assert response.state == backupvault.Backup.State.CREATING + assert response.backup_type == backupvault.Backup.BackupType.SCHEDULED + assert response.resource_size_bytes == 2056 + + +def test_get_backup_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.GetBackupRequest() + + +def test_get_backup_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupvault.GetBackupRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_backup(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.GetBackupRequest( + name="name_value", + ) + + +def test_get_backup_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_backup] = mock_rpc + request = {} + client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_backup_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.Backup( + name="name_value", + description="description_value", + etag="etag_value", + state=backupvault.Backup.State.CREATING, + backup_type=backupvault.Backup.BackupType.SCHEDULED, + resource_size_bytes=2056, + ) + ) + response = await client.get_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.GetBackupRequest() + + +@pytest.mark.asyncio +async def test_get_backup_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_backup + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_backup + ] = mock_rpc + + request = {} + await client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_backup_async( + transport: str = "grpc_asyncio", request_type=backupvault.GetBackupRequest +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.Backup( + name="name_value", + description="description_value", + etag="etag_value", + state=backupvault.Backup.State.CREATING, + backup_type=backupvault.Backup.BackupType.SCHEDULED, + resource_size_bytes=2056, + ) + ) + response = await client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupvault.GetBackupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, backupvault.Backup) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.etag == "etag_value" + assert response.state == backupvault.Backup.State.CREATING + assert response.backup_type == backupvault.Backup.BackupType.SCHEDULED + assert response.resource_size_bytes == 2056 + + +@pytest.mark.asyncio +async def test_get_backup_async_from_dict(): + await test_get_backup_async(request_type=dict) + + +def test_get_backup_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.GetBackupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + call.return_value = backupvault.Backup() + client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_backup_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.GetBackupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backupvault.Backup()) + await client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_backup_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.Backup() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_backup( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_backup_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_backup( + backupvault.GetBackupRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_backup_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.Backup() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backupvault.Backup()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_backup( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_backup_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_backup( + backupvault.GetBackupRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.UpdateBackupRequest, + dict, + ], +) +def test_update_backup(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupvault.UpdateBackupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_backup_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.UpdateBackupRequest() + + +def test_update_backup_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupvault.UpdateBackupRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_backup(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.UpdateBackupRequest() + + +def test_update_backup_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_backup] = mock_rpc + request = {} + client.update_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_backup_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.UpdateBackupRequest() + + +@pytest.mark.asyncio +async def test_update_backup_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_backup + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_backup + ] = mock_rpc + + request = {} + await client.update_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_backup_async( + transport: str = "grpc_asyncio", request_type=backupvault.UpdateBackupRequest +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupvault.UpdateBackupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_backup_async_from_dict(): + await test_update_backup_async(request_type=dict) + + +def test_update_backup_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.UpdateBackupRequest() + + request.backup.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "backup.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_backup_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.UpdateBackupRequest() + + request.backup.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "backup.name=name_value", + ) in kw["metadata"] + + +def test_update_backup_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_backup( + backup=backupvault.Backup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].backup + mock_val = backupvault.Backup(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_backup_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_backup( + backupvault.UpdateBackupRequest(), + backup=backupvault.Backup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_backup_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_backup( + backup=backupvault.Backup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].backup + mock_val = backupvault.Backup(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_backup_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_backup( + backupvault.UpdateBackupRequest(), + backup=backupvault.Backup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.DeleteBackupRequest, + dict, + ], +) +def test_delete_backup(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupvault.DeleteBackupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_backup_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.DeleteBackupRequest() + + +def test_delete_backup_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupvault.DeleteBackupRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_backup(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.DeleteBackupRequest( + name="name_value", + ) + + +def test_delete_backup_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_backup] = mock_rpc + request = {} + client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_backup_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.DeleteBackupRequest() + + +@pytest.mark.asyncio +async def test_delete_backup_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_backup + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_backup + ] = mock_rpc + + request = {} + await client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_backup_async( + transport: str = "grpc_asyncio", request_type=backupvault.DeleteBackupRequest +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupvault.DeleteBackupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_backup_async_from_dict(): + await test_delete_backup_async(request_type=dict) + + +def test_delete_backup_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.DeleteBackupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_backup_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.DeleteBackupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_backup_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_backup( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_backup_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_backup( + backupvault.DeleteBackupRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_backup_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_backup( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_backup_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_backup( + backupvault.DeleteBackupRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.RestoreBackupRequest, + dict, + ], +) +def test_restore_backup(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restore_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.restore_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupvault.RestoreBackupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_restore_backup_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restore_backup), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.restore_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.RestoreBackupRequest() + + +def test_restore_backup_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupvault.RestoreBackupRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restore_backup), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.restore_backup(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.RestoreBackupRequest( + name="name_value", + ) + + +def test_restore_backup_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.restore_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.restore_backup] = mock_rpc + request = {} + client.restore_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.restore_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_restore_backup_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restore_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.restore_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.RestoreBackupRequest() + + +@pytest.mark.asyncio +async def test_restore_backup_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.restore_backup + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.restore_backup + ] = mock_rpc + + request = {} + await client.restore_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.restore_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_restore_backup_async( + transport: str = "grpc_asyncio", request_type=backupvault.RestoreBackupRequest +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restore_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.restore_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupvault.RestoreBackupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_restore_backup_async_from_dict(): + await test_restore_backup_async(request_type=dict) + + +def test_restore_backup_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.RestoreBackupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restore_backup), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.restore_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_restore_backup_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.RestoreBackupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restore_backup), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.restore_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_restore_backup_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restore_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.restore_backup( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_restore_backup_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.restore_backup( + backupvault.RestoreBackupRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_restore_backup_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restore_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.restore_backup( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_restore_backup_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.restore_backup( + backupvault.RestoreBackupRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupplan.CreateBackupPlanRequest, + dict, + ], +) +def test_create_backup_plan(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupplan.CreateBackupPlanRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_backup_plan_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_backup_plan() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplan.CreateBackupPlanRequest() + + +def test_create_backup_plan_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupplan.CreateBackupPlanRequest( + parent="parent_value", + backup_plan_id="backup_plan_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_backup_plan(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplan.CreateBackupPlanRequest( + parent="parent_value", + backup_plan_id="backup_plan_id_value", + ) + + +def test_create_backup_plan_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_backup_plan in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_backup_plan + ] = mock_rpc + request = {} + client.create_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_backup_plan(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_backup_plan_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_backup_plan() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplan.CreateBackupPlanRequest() + + +@pytest.mark.asyncio +async def test_create_backup_plan_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_backup_plan + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_backup_plan + ] = mock_rpc + + request = {} + await client.create_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_backup_plan(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_backup_plan_async( + transport: str = "grpc_asyncio", request_type=backupplan.CreateBackupPlanRequest +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupplan.CreateBackupPlanRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_backup_plan_async_from_dict(): + await test_create_backup_plan_async(request_type=dict) + + +def test_create_backup_plan_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplan.CreateBackupPlanRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_backup_plan_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplan.CreateBackupPlanRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_backup_plan_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_backup_plan( + parent="parent_value", + backup_plan=backupplan.BackupPlan(name="name_value"), + backup_plan_id="backup_plan_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].backup_plan + mock_val = backupplan.BackupPlan(name="name_value") + assert arg == mock_val + arg = args[0].backup_plan_id + mock_val = "backup_plan_id_value" + assert arg == mock_val + + +def test_create_backup_plan_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_backup_plan( + backupplan.CreateBackupPlanRequest(), + parent="parent_value", + backup_plan=backupplan.BackupPlan(name="name_value"), + backup_plan_id="backup_plan_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_backup_plan_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_backup_plan( + parent="parent_value", + backup_plan=backupplan.BackupPlan(name="name_value"), + backup_plan_id="backup_plan_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].backup_plan + mock_val = backupplan.BackupPlan(name="name_value") + assert arg == mock_val + arg = args[0].backup_plan_id + mock_val = "backup_plan_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_backup_plan_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_backup_plan( + backupplan.CreateBackupPlanRequest(), + parent="parent_value", + backup_plan=backupplan.BackupPlan(name="name_value"), + backup_plan_id="backup_plan_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupplan.GetBackupPlanRequest, + dict, + ], +) +def test_get_backup_plan(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_plan), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = backupplan.BackupPlan( + name="name_value", + description="description_value", + state=backupplan.BackupPlan.State.CREATING, + resource_type="resource_type_value", + etag="etag_value", + backup_vault="backup_vault_value", + backup_vault_service_account="backup_vault_service_account_value", + ) + response = client.get_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupplan.GetBackupPlanRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, backupplan.BackupPlan) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.state == backupplan.BackupPlan.State.CREATING + assert response.resource_type == "resource_type_value" + assert response.etag == "etag_value" + assert response.backup_vault == "backup_vault_value" + assert response.backup_vault_service_account == "backup_vault_service_account_value" + + +def test_get_backup_plan_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_plan), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_backup_plan() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplan.GetBackupPlanRequest() + + +def test_get_backup_plan_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupplan.GetBackupPlanRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_plan), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_backup_plan(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplan.GetBackupPlanRequest( + name="name_value", + ) + + +def test_get_backup_plan_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_backup_plan in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_backup_plan] = mock_rpc + request = {} + client.get_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_backup_plan(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_backup_plan_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_plan), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplan.BackupPlan( + name="name_value", + description="description_value", + state=backupplan.BackupPlan.State.CREATING, + resource_type="resource_type_value", + etag="etag_value", + backup_vault="backup_vault_value", + backup_vault_service_account="backup_vault_service_account_value", + ) + ) + response = await client.get_backup_plan() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplan.GetBackupPlanRequest() + + +@pytest.mark.asyncio +async def test_get_backup_plan_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_backup_plan + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_backup_plan + ] = mock_rpc + + request = {} + await client.get_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_backup_plan(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_backup_plan_async( + transport: str = "grpc_asyncio", request_type=backupplan.GetBackupPlanRequest +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_plan), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplan.BackupPlan( + name="name_value", + description="description_value", + state=backupplan.BackupPlan.State.CREATING, + resource_type="resource_type_value", + etag="etag_value", + backup_vault="backup_vault_value", + backup_vault_service_account="backup_vault_service_account_value", + ) + ) + response = await client.get_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupplan.GetBackupPlanRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, backupplan.BackupPlan) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.state == backupplan.BackupPlan.State.CREATING + assert response.resource_type == "resource_type_value" + assert response.etag == "etag_value" + assert response.backup_vault == "backup_vault_value" + assert response.backup_vault_service_account == "backup_vault_service_account_value" + + +@pytest.mark.asyncio +async def test_get_backup_plan_async_from_dict(): + await test_get_backup_plan_async(request_type=dict) + + +def test_get_backup_plan_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplan.GetBackupPlanRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_plan), "__call__") as call: + call.return_value = backupplan.BackupPlan() + client.get_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_backup_plan_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplan.GetBackupPlanRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_plan), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplan.BackupPlan() + ) + await client.get_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_backup_plan_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_plan), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = backupplan.BackupPlan() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_backup_plan( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_backup_plan_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_backup_plan( + backupplan.GetBackupPlanRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_backup_plan_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_plan), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = backupplan.BackupPlan() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplan.BackupPlan() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_backup_plan( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_backup_plan_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_backup_plan( + backupplan.GetBackupPlanRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupplan.ListBackupPlansRequest, + dict, + ], +) +def test_list_backup_plans(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plans), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupplan.ListBackupPlansResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_backup_plans(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupplan.ListBackupPlansRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupPlansPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_backup_plans_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plans), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_backup_plans() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplan.ListBackupPlansRequest() + + +def test_list_backup_plans_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupplan.ListBackupPlansRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plans), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_backup_plans(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplan.ListBackupPlansRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + +def test_list_backup_plans_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_backup_plans in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_backup_plans + ] = mock_rpc + request = {} + client.list_backup_plans(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_backup_plans(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_backup_plans_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plans), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplan.ListBackupPlansResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_backup_plans() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplan.ListBackupPlansRequest() + + +@pytest.mark.asyncio +async def test_list_backup_plans_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_backup_plans + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_backup_plans + ] = mock_rpc + + request = {} + await client.list_backup_plans(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_backup_plans(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_backup_plans_async( + transport: str = "grpc_asyncio", request_type=backupplan.ListBackupPlansRequest +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plans), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplan.ListBackupPlansResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_backup_plans(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupplan.ListBackupPlansRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupPlansAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_backup_plans_async_from_dict(): + await test_list_backup_plans_async(request_type=dict) + + +def test_list_backup_plans_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplan.ListBackupPlansRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plans), "__call__" + ) as call: + call.return_value = backupplan.ListBackupPlansResponse() + client.list_backup_plans(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_backup_plans_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplan.ListBackupPlansRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plans), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplan.ListBackupPlansResponse() + ) + await client.list_backup_plans(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_backup_plans_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plans), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupplan.ListBackupPlansResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_backup_plans( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_backup_plans_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_backup_plans( + backupplan.ListBackupPlansRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_backup_plans_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plans), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupplan.ListBackupPlansResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplan.ListBackupPlansResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_backup_plans( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_backup_plans_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_backup_plans( + backupplan.ListBackupPlansRequest(), + parent="parent_value", + ) + + +def test_list_backup_plans_pager(transport_name: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plans), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + backupplan.BackupPlan(), + backupplan.BackupPlan(), + ], + next_page_token="abc", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[], + next_page_token="def", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + ], + next_page_token="ghi", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + backupplan.BackupPlan(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_backup_plans(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backupplan.BackupPlan) for i in results) + + +def test_list_backup_plans_pages(transport_name: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plans), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + backupplan.BackupPlan(), + backupplan.BackupPlan(), + ], + next_page_token="abc", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[], + next_page_token="def", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + ], + next_page_token="ghi", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + backupplan.BackupPlan(), + ], + ), + RuntimeError, + ) + pages = list(client.list_backup_plans(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_backup_plans_async_pager(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plans), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + backupplan.BackupPlan(), + backupplan.BackupPlan(), + ], + next_page_token="abc", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[], + next_page_token="def", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + ], + next_page_token="ghi", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + backupplan.BackupPlan(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_backup_plans( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, backupplan.BackupPlan) for i in responses) + + +@pytest.mark.asyncio +async def test_list_backup_plans_async_pages(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plans), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + backupplan.BackupPlan(), + backupplan.BackupPlan(), + ], + next_page_token="abc", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[], + next_page_token="def", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + ], + next_page_token="ghi", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + backupplan.BackupPlan(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_backup_plans(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + backupplan.DeleteBackupPlanRequest, + dict, + ], +) +def test_delete_backup_plan(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupplan.DeleteBackupPlanRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_backup_plan_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_backup_plan() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplan.DeleteBackupPlanRequest() + + +def test_delete_backup_plan_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupplan.DeleteBackupPlanRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_backup_plan(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplan.DeleteBackupPlanRequest( + name="name_value", + ) + + +def test_delete_backup_plan_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_backup_plan in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_backup_plan + ] = mock_rpc + request = {} + client.delete_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_backup_plan(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_backup_plan_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_backup_plan() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplan.DeleteBackupPlanRequest() + + +@pytest.mark.asyncio +async def test_delete_backup_plan_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_backup_plan + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_backup_plan + ] = mock_rpc + + request = {} + await client.delete_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_backup_plan(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_backup_plan_async( + transport: str = "grpc_asyncio", request_type=backupplan.DeleteBackupPlanRequest +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupplan.DeleteBackupPlanRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_backup_plan_async_from_dict(): + await test_delete_backup_plan_async(request_type=dict) + + +def test_delete_backup_plan_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplan.DeleteBackupPlanRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_backup_plan_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplan.DeleteBackupPlanRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_backup_plan_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_backup_plan( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_backup_plan_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_backup_plan( + backupplan.DeleteBackupPlanRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_backup_plan_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_backup_plan( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_backup_plan_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_backup_plan( + backupplan.DeleteBackupPlanRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupplanassociation.CreateBackupPlanAssociationRequest, + dict, + ], +) +def test_create_backup_plan_association(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupplanassociation.CreateBackupPlanAssociationRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_backup_plan_association_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan_association), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_backup_plan_association() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplanassociation.CreateBackupPlanAssociationRequest() + + +def test_create_backup_plan_association_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupplanassociation.CreateBackupPlanAssociationRequest( + parent="parent_value", + backup_plan_association_id="backup_plan_association_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan_association), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_backup_plan_association(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplanassociation.CreateBackupPlanAssociationRequest( + parent="parent_value", + backup_plan_association_id="backup_plan_association_id_value", + ) + + +def test_create_backup_plan_association_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_backup_plan_association + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_backup_plan_association + ] = mock_rpc + request = {} + client.create_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_backup_plan_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_backup_plan_association_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_backup_plan_association() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplanassociation.CreateBackupPlanAssociationRequest() + + +@pytest.mark.asyncio +async def test_create_backup_plan_association_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_backup_plan_association + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_backup_plan_association + ] = mock_rpc + + request = {} + await client.create_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_backup_plan_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_backup_plan_association_async( + transport: str = "grpc_asyncio", + request_type=backupplanassociation.CreateBackupPlanAssociationRequest, +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupplanassociation.CreateBackupPlanAssociationRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_backup_plan_association_async_from_dict(): + await test_create_backup_plan_association_async(request_type=dict) + + +def test_create_backup_plan_association_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplanassociation.CreateBackupPlanAssociationRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan_association), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_backup_plan_association_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplanassociation.CreateBackupPlanAssociationRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan_association), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_backup_plan_association_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_backup_plan_association( + parent="parent_value", + backup_plan_association=backupplanassociation.BackupPlanAssociation( + name="name_value" + ), + backup_plan_association_id="backup_plan_association_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].backup_plan_association + mock_val = backupplanassociation.BackupPlanAssociation(name="name_value") + assert arg == mock_val + arg = args[0].backup_plan_association_id + mock_val = "backup_plan_association_id_value" + assert arg == mock_val + + +def test_create_backup_plan_association_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_backup_plan_association( + backupplanassociation.CreateBackupPlanAssociationRequest(), + parent="parent_value", + backup_plan_association=backupplanassociation.BackupPlanAssociation( + name="name_value" + ), + backup_plan_association_id="backup_plan_association_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_backup_plan_association_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_backup_plan_association( + parent="parent_value", + backup_plan_association=backupplanassociation.BackupPlanAssociation( + name="name_value" + ), + backup_plan_association_id="backup_plan_association_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].backup_plan_association + mock_val = backupplanassociation.BackupPlanAssociation(name="name_value") + assert arg == mock_val + arg = args[0].backup_plan_association_id + mock_val = "backup_plan_association_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_backup_plan_association_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_backup_plan_association( + backupplanassociation.CreateBackupPlanAssociationRequest(), + parent="parent_value", + backup_plan_association=backupplanassociation.BackupPlanAssociation( + name="name_value" + ), + backup_plan_association_id="backup_plan_association_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupplanassociation.GetBackupPlanAssociationRequest, + dict, + ], +) +def test_get_backup_plan_association(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupplanassociation.BackupPlanAssociation( + name="name_value", + resource_type="resource_type_value", + resource="resource_value", + backup_plan="backup_plan_value", + state=backupplanassociation.BackupPlanAssociation.State.CREATING, + data_source="data_source_value", + ) + response = client.get_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupplanassociation.GetBackupPlanAssociationRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, backupplanassociation.BackupPlanAssociation) + assert response.name == "name_value" + assert response.resource_type == "resource_type_value" + assert response.resource == "resource_value" + assert response.backup_plan == "backup_plan_value" + assert response.state == backupplanassociation.BackupPlanAssociation.State.CREATING + assert response.data_source == "data_source_value" + + +def test_get_backup_plan_association_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_plan_association), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_backup_plan_association() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplanassociation.GetBackupPlanAssociationRequest() + + +def test_get_backup_plan_association_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupplanassociation.GetBackupPlanAssociationRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_plan_association), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_backup_plan_association(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplanassociation.GetBackupPlanAssociationRequest( + name="name_value", + ) + + +def test_get_backup_plan_association_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_backup_plan_association + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_backup_plan_association + ] = mock_rpc + request = {} + client.get_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_backup_plan_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_backup_plan_association_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplanassociation.BackupPlanAssociation( + name="name_value", + resource_type="resource_type_value", + resource="resource_value", + backup_plan="backup_plan_value", + state=backupplanassociation.BackupPlanAssociation.State.CREATING, + data_source="data_source_value", + ) + ) + response = await client.get_backup_plan_association() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplanassociation.GetBackupPlanAssociationRequest() + + +@pytest.mark.asyncio +async def test_get_backup_plan_association_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_backup_plan_association + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_backup_plan_association + ] = mock_rpc + + request = {} + await client.get_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_backup_plan_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_backup_plan_association_async( + transport: str = "grpc_asyncio", + request_type=backupplanassociation.GetBackupPlanAssociationRequest, +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplanassociation.BackupPlanAssociation( + name="name_value", + resource_type="resource_type_value", + resource="resource_value", + backup_plan="backup_plan_value", + state=backupplanassociation.BackupPlanAssociation.State.CREATING, + data_source="data_source_value", + ) + ) + response = await client.get_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupplanassociation.GetBackupPlanAssociationRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, backupplanassociation.BackupPlanAssociation) + assert response.name == "name_value" + assert response.resource_type == "resource_type_value" + assert response.resource == "resource_value" + assert response.backup_plan == "backup_plan_value" + assert response.state == backupplanassociation.BackupPlanAssociation.State.CREATING + assert response.data_source == "data_source_value" + + +@pytest.mark.asyncio +async def test_get_backup_plan_association_async_from_dict(): + await test_get_backup_plan_association_async(request_type=dict) + + +def test_get_backup_plan_association_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplanassociation.GetBackupPlanAssociationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_plan_association), "__call__" + ) as call: + call.return_value = backupplanassociation.BackupPlanAssociation() + client.get_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_backup_plan_association_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplanassociation.GetBackupPlanAssociationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_plan_association), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplanassociation.BackupPlanAssociation() + ) + await client.get_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_backup_plan_association_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupplanassociation.BackupPlanAssociation() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_backup_plan_association( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_backup_plan_association_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_backup_plan_association( + backupplanassociation.GetBackupPlanAssociationRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_backup_plan_association_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupplanassociation.BackupPlanAssociation() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplanassociation.BackupPlanAssociation() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_backup_plan_association( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_backup_plan_association_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_backup_plan_association( + backupplanassociation.GetBackupPlanAssociationRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupplanassociation.ListBackupPlanAssociationsRequest, + dict, + ], +) +def test_list_backup_plan_associations(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupplanassociation.ListBackupPlanAssociationsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_backup_plan_associations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupplanassociation.ListBackupPlanAssociationsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupPlanAssociationsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_backup_plan_associations_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_backup_plan_associations() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplanassociation.ListBackupPlanAssociationsRequest() + + +def test_list_backup_plan_associations_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupplanassociation.ListBackupPlanAssociationsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_backup_plan_associations(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplanassociation.ListBackupPlanAssociationsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + ) + + +def test_list_backup_plan_associations_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_backup_plan_associations + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_backup_plan_associations + ] = mock_rpc + request = {} + client.list_backup_plan_associations(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_backup_plan_associations(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_backup_plan_associations_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplanassociation.ListBackupPlanAssociationsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_backup_plan_associations() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplanassociation.ListBackupPlanAssociationsRequest() + + +@pytest.mark.asyncio +async def test_list_backup_plan_associations_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_backup_plan_associations + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_backup_plan_associations + ] = mock_rpc + + request = {} + await client.list_backup_plan_associations(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_backup_plan_associations(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_backup_plan_associations_async( + transport: str = "grpc_asyncio", + request_type=backupplanassociation.ListBackupPlanAssociationsRequest, +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplanassociation.ListBackupPlanAssociationsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_backup_plan_associations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupplanassociation.ListBackupPlanAssociationsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupPlanAssociationsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_backup_plan_associations_async_from_dict(): + await test_list_backup_plan_associations_async(request_type=dict) + + +def test_list_backup_plan_associations_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplanassociation.ListBackupPlanAssociationsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), "__call__" + ) as call: + call.return_value = backupplanassociation.ListBackupPlanAssociationsResponse() + client.list_backup_plan_associations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_backup_plan_associations_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplanassociation.ListBackupPlanAssociationsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplanassociation.ListBackupPlanAssociationsResponse() + ) + await client.list_backup_plan_associations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_backup_plan_associations_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupplanassociation.ListBackupPlanAssociationsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_backup_plan_associations( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_backup_plan_associations_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_backup_plan_associations( + backupplanassociation.ListBackupPlanAssociationsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_backup_plan_associations_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupplanassociation.ListBackupPlanAssociationsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplanassociation.ListBackupPlanAssociationsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_backup_plan_associations( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_backup_plan_associations_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_backup_plan_associations( + backupplanassociation.ListBackupPlanAssociationsRequest(), + parent="parent_value", + ) + + +def test_list_backup_plan_associations_pager(transport_name: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + ], + next_page_token="abc", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[], + next_page_token="def", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + ], + next_page_token="ghi", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_backup_plan_associations( + request={}, retry=retry, timeout=timeout + ) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, backupplanassociation.BackupPlanAssociation) for i in results + ) + + +def test_list_backup_plan_associations_pages(transport_name: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + ], + next_page_token="abc", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[], + next_page_token="def", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + ], + next_page_token="ghi", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + ], + ), + RuntimeError, + ) + pages = list(client.list_backup_plan_associations(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_backup_plan_associations_async_pager(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + ], + next_page_token="abc", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[], + next_page_token="def", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + ], + next_page_token="ghi", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_backup_plan_associations( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, backupplanassociation.BackupPlanAssociation) + for i in responses + ) + + +@pytest.mark.asyncio +async def test_list_backup_plan_associations_async_pages(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + ], + next_page_token="abc", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[], + next_page_token="def", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + ], + next_page_token="ghi", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_backup_plan_associations(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + backupplanassociation.DeleteBackupPlanAssociationRequest, + dict, + ], +) +def test_delete_backup_plan_association(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupplanassociation.DeleteBackupPlanAssociationRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_backup_plan_association_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan_association), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_backup_plan_association() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplanassociation.DeleteBackupPlanAssociationRequest() + + +def test_delete_backup_plan_association_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupplanassociation.DeleteBackupPlanAssociationRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan_association), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_backup_plan_association(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplanassociation.DeleteBackupPlanAssociationRequest( + name="name_value", + ) + + +def test_delete_backup_plan_association_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_backup_plan_association + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_backup_plan_association + ] = mock_rpc + request = {} + client.delete_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_backup_plan_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_backup_plan_association_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_backup_plan_association() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplanassociation.DeleteBackupPlanAssociationRequest() + + +@pytest.mark.asyncio +async def test_delete_backup_plan_association_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_backup_plan_association + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_backup_plan_association + ] = mock_rpc + + request = {} + await client.delete_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_backup_plan_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_backup_plan_association_async( + transport: str = "grpc_asyncio", + request_type=backupplanassociation.DeleteBackupPlanAssociationRequest, +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupplanassociation.DeleteBackupPlanAssociationRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_backup_plan_association_async_from_dict(): + await test_delete_backup_plan_association_async(request_type=dict) + + +def test_delete_backup_plan_association_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplanassociation.DeleteBackupPlanAssociationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan_association), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_backup_plan_association_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplanassociation.DeleteBackupPlanAssociationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan_association), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_backup_plan_association_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_backup_plan_association( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_backup_plan_association_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_backup_plan_association( + backupplanassociation.DeleteBackupPlanAssociationRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_backup_plan_association_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_backup_plan_association( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_backup_plan_association_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_backup_plan_association( + backupplanassociation.DeleteBackupPlanAssociationRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupplanassociation.TriggerBackupRequest, + dict, + ], +) +def test_trigger_backup(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.trigger_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.trigger_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupplanassociation.TriggerBackupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_trigger_backup_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.trigger_backup), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.trigger_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplanassociation.TriggerBackupRequest() + + +def test_trigger_backup_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupplanassociation.TriggerBackupRequest( + name="name_value", + rule_id="rule_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.trigger_backup), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.trigger_backup(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplanassociation.TriggerBackupRequest( + name="name_value", + rule_id="rule_id_value", + ) + + +def test_trigger_backup_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.trigger_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.trigger_backup] = mock_rpc + request = {} + client.trigger_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.trigger_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_trigger_backup_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.trigger_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.trigger_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplanassociation.TriggerBackupRequest() + + +@pytest.mark.asyncio +async def test_trigger_backup_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.trigger_backup + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.trigger_backup + ] = mock_rpc + + request = {} + await client.trigger_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.trigger_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_trigger_backup_async( + transport: str = "grpc_asyncio", + request_type=backupplanassociation.TriggerBackupRequest, +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.trigger_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.trigger_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupplanassociation.TriggerBackupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_trigger_backup_async_from_dict(): + await test_trigger_backup_async(request_type=dict) + + +def test_trigger_backup_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplanassociation.TriggerBackupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.trigger_backup), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.trigger_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_trigger_backup_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplanassociation.TriggerBackupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.trigger_backup), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.trigger_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_trigger_backup_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.trigger_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.trigger_backup( + name="name_value", + rule_id="rule_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].rule_id + mock_val = "rule_id_value" + assert arg == mock_val + + +def test_trigger_backup_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.trigger_backup( + backupplanassociation.TriggerBackupRequest(), + name="name_value", + rule_id="rule_id_value", + ) + + +@pytest.mark.asyncio +async def test_trigger_backup_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.trigger_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.trigger_backup( + name="name_value", + rule_id="rule_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].rule_id + mock_val = "rule_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_trigger_backup_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.trigger_backup( + backupplanassociation.TriggerBackupRequest(), + name="name_value", + rule_id="rule_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupdr.ListManagementServersRequest, + dict, + ], +) +def test_list_management_servers_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupdr.ListManagementServersResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupdr.ListManagementServersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_management_servers(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListManagementServersPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_management_servers_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_management_servers + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_management_servers + ] = mock_rpc + + request = {} + client.list_management_servers(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_management_servers(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_management_servers_rest_required_fields( + request_type=backupdr.ListManagementServersRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_management_servers._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_management_servers._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = backupdr.ListManagementServersResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backupdr.ListManagementServersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_management_servers(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_management_servers_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_management_servers._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_management_servers_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BackupDRRestInterceptor, "post_list_management_servers" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_list_management_servers" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupdr.ListManagementServersRequest.pb( + backupdr.ListManagementServersRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = backupdr.ListManagementServersResponse.to_json( + backupdr.ListManagementServersResponse() + ) + + request = backupdr.ListManagementServersRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = backupdr.ListManagementServersResponse() + + client.list_management_servers( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_management_servers_rest_bad_request( + transport: str = "rest", request_type=backupdr.ListManagementServersRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_management_servers(request) + + +def test_list_management_servers_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupdr.ListManagementServersResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupdr.ListManagementServersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_management_servers(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/managementServers" + % client.transport._host, + args[1], + ) + + +def test_list_management_servers_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_management_servers( + backupdr.ListManagementServersRequest(), + parent="parent_value", + ) + + +def test_list_management_servers_rest_pager(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + backupdr.ListManagementServersResponse( + management_servers=[ + backupdr.ManagementServer(), + backupdr.ManagementServer(), + backupdr.ManagementServer(), + ], + next_page_token="abc", + ), + backupdr.ListManagementServersResponse( + management_servers=[], + next_page_token="def", + ), + backupdr.ListManagementServersResponse( + management_servers=[ + backupdr.ManagementServer(), + ], + next_page_token="ghi", + ), + backupdr.ListManagementServersResponse( + management_servers=[ + backupdr.ManagementServer(), + backupdr.ManagementServer(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + backupdr.ListManagementServersResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_management_servers(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backupdr.ManagementServer) for i in results) + + pages = list(client.list_management_servers(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + backupdr.GetManagementServerRequest, + dict, + ], +) +def test_get_management_server_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/managementServers/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupdr.ManagementServer( + name="name_value", + description="description_value", + type_=backupdr.ManagementServer.InstanceType.BACKUP_RESTORE, + state=backupdr.ManagementServer.InstanceState.CREATING, + etag="etag_value", + oauth2_client_id="oauth2_client_id_value", + ba_proxy_uri=["ba_proxy_uri_value"], + satisfies_pzi=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupdr.ManagementServer.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_management_server(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, backupdr.ManagementServer) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.type_ == backupdr.ManagementServer.InstanceType.BACKUP_RESTORE + assert response.state == backupdr.ManagementServer.InstanceState.CREATING + assert response.etag == "etag_value" + assert response.oauth2_client_id == "oauth2_client_id_value" + assert response.ba_proxy_uri == ["ba_proxy_uri_value"] + assert response.satisfies_pzi is True + + +def test_get_management_server_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_management_server + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_management_server + ] = mock_rpc + + request = {} + client.get_management_server(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_management_server(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_management_server_rest_required_fields( + request_type=backupdr.GetManagementServerRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_management_server._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_management_server._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = backupdr.ManagementServer() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backupdr.ManagementServer.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_management_server(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_management_server_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_management_server._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_management_server_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BackupDRRestInterceptor, "post_get_management_server" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_get_management_server" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupdr.GetManagementServerRequest.pb( + backupdr.GetManagementServerRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = backupdr.ManagementServer.to_json( + backupdr.ManagementServer() + ) + + request = backupdr.GetManagementServerRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = backupdr.ManagementServer() + + client.get_management_server( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_management_server_rest_bad_request( + transport: str = "rest", request_type=backupdr.GetManagementServerRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/managementServers/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_management_server(request) + + +def test_get_management_server_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupdr.ManagementServer() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/managementServers/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupdr.ManagementServer.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_management_server(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/managementServers/*}" + % client.transport._host, + args[1], + ) + + +def test_get_management_server_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_management_server( + backupdr.GetManagementServerRequest(), + name="name_value", + ) + + +def test_get_management_server_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupdr.CreateManagementServerRequest, + dict, + ], +) +def test_create_management_server_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["management_server"] = { + "name": "name_value", + "description": "description_value", + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "type_": 1, + "management_uri": {"web_ui": "web_ui_value", "api": "api_value"}, + "workforce_identity_based_management_uri": { + "first_party_management_uri": "first_party_management_uri_value", + "third_party_management_uri": "third_party_management_uri_value", + }, + "state": 1, + "networks": [{"network": "network_value", "peering_mode": 1}], + "etag": "etag_value", + "oauth2_client_id": "oauth2_client_id_value", + "workforce_identity_based_oauth2_client_id": { + "first_party_oauth2_client_id": "first_party_oauth2_client_id_value", + "third_party_oauth2_client_id": "third_party_oauth2_client_id_value", + }, + "ba_proxy_uri": ["ba_proxy_uri_value1", "ba_proxy_uri_value2"], + "satisfies_pzs": {"value": True}, + "satisfies_pzi": True, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = backupdr.CreateManagementServerRequest.meta.fields["management_server"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["management_server"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["management_server"][field])): + del request_init["management_server"][field][i][subfield] + else: + del request_init["management_server"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_management_server(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_management_server_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_management_server + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_management_server + ] = mock_rpc + + request = {} + client.create_management_server(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_management_server(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_management_server_rest_required_fields( + request_type=backupdr.CreateManagementServerRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["management_server_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "managementServerId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_management_server._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "managementServerId" in jsonified_request + assert ( + jsonified_request["managementServerId"] == request_init["management_server_id"] + ) + + jsonified_request["parent"] = "parent_value" + jsonified_request["managementServerId"] = "management_server_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_management_server._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "management_server_id", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "managementServerId" in jsonified_request + assert jsonified_request["managementServerId"] == "management_server_id_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_management_server(request) + + expected_params = [ + ( + "managementServerId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_management_server_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_management_server._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "managementServerId", + "requestId", + ) + ) + & set( + ( + "parent", + "managementServerId", + "managementServer", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_management_server_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BackupDRRestInterceptor, "post_create_management_server" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_create_management_server" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupdr.CreateManagementServerRequest.pb( + backupdr.CreateManagementServerRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = backupdr.CreateManagementServerRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_management_server( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_management_server_rest_bad_request( + transport: str = "rest", request_type=backupdr.CreateManagementServerRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_management_server(request) + + +def test_create_management_server_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + management_server=backupdr.ManagementServer(name="name_value"), + management_server_id="management_server_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_management_server(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/managementServers" + % client.transport._host, + args[1], + ) + + +def test_create_management_server_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_management_server( + backupdr.CreateManagementServerRequest(), + parent="parent_value", + management_server=backupdr.ManagementServer(name="name_value"), + management_server_id="management_server_id_value", + ) + + +def test_create_management_server_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupdr.DeleteManagementServerRequest, + dict, + ], +) +def test_delete_management_server_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/managementServers/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_management_server(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_management_server_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_management_server + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_management_server + ] = mock_rpc + + request = {} + client.delete_management_server(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_management_server(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_management_server_rest_required_fields( + request_type=backupdr.DeleteManagementServerRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_management_server._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_management_server._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_management_server(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_management_server_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_management_server._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_management_server_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BackupDRRestInterceptor, "post_delete_management_server" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_delete_management_server" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupdr.DeleteManagementServerRequest.pb( + backupdr.DeleteManagementServerRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = backupdr.DeleteManagementServerRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_management_server( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_management_server_rest_bad_request( + transport: str = "rest", request_type=backupdr.DeleteManagementServerRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/managementServers/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_management_server(request) + + +def test_delete_management_server_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/managementServers/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_management_server(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/managementServers/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_management_server_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_management_server( + backupdr.DeleteManagementServerRequest(), + name="name_value", + ) + + +def test_delete_management_server_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.CreateBackupVaultRequest, + dict, + ], +) +def test_create_backup_vault_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["backup_vault"] = { + "name": "name_value", + "description": "description_value", + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "backup_minimum_enforced_retention_duration": {"seconds": 751, "nanos": 543}, + "deletable": True, + "etag": "etag_value", + "state": 1, + "effective_time": {}, + "backup_count": 1278, + "service_account": "service_account_value", + "total_stored_bytes": 1946, + "uid": "uid_value", + "annotations": {}, + "access_restriction": 1, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = backupvault.CreateBackupVaultRequest.meta.fields["backup_vault"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["backup_vault"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backup_vault"][field])): + del request_init["backup_vault"][field][i][subfield] + else: + del request_init["backup_vault"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_backup_vault(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_backup_vault_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_backup_vault in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_backup_vault + ] = mock_rpc + + request = {} + client.create_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_backup_vault(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_backup_vault_rest_required_fields( + request_type=backupvault.CreateBackupVaultRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["backup_vault_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "backupVaultId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_backup_vault._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "backupVaultId" in jsonified_request + assert jsonified_request["backupVaultId"] == request_init["backup_vault_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["backupVaultId"] = "backup_vault_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_backup_vault._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "backup_vault_id", + "request_id", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "backupVaultId" in jsonified_request + assert jsonified_request["backupVaultId"] == "backup_vault_id_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_backup_vault(request) + + expected_params = [ + ( + "backupVaultId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_backup_vault_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_backup_vault._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "backupVaultId", + "requestId", + "validateOnly", + ) + ) + & set( + ( + "parent", + "backupVaultId", + "backupVault", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_backup_vault_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BackupDRRestInterceptor, "post_create_backup_vault" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_create_backup_vault" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupvault.CreateBackupVaultRequest.pb( + backupvault.CreateBackupVaultRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = backupvault.CreateBackupVaultRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_backup_vault( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_backup_vault_rest_bad_request( + transport: str = "rest", request_type=backupvault.CreateBackupVaultRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_backup_vault(request) + + +def test_create_backup_vault_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + backup_vault=backupvault.BackupVault(name="name_value"), + backup_vault_id="backup_vault_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_backup_vault(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/backupVaults" + % client.transport._host, + args[1], + ) + + +def test_create_backup_vault_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_backup_vault( + backupvault.CreateBackupVaultRequest(), + parent="parent_value", + backup_vault=backupvault.BackupVault(name="name_value"), + backup_vault_id="backup_vault_id_value", + ) + + +def test_create_backup_vault_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.ListBackupVaultsRequest, + dict, + ], +) +def test_list_backup_vaults_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupvault.ListBackupVaultsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupvault.ListBackupVaultsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_backup_vaults(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupVaultsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_backup_vaults_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_backup_vaults in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_backup_vaults + ] = mock_rpc + + request = {} + client.list_backup_vaults(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_backup_vaults(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_backup_vaults_rest_required_fields( + request_type=backupvault.ListBackupVaultsRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_backup_vaults._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_backup_vaults._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + "view", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = backupvault.ListBackupVaultsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backupvault.ListBackupVaultsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_backup_vaults(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_backup_vaults_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_backup_vaults._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + "view", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_backup_vaults_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BackupDRRestInterceptor, "post_list_backup_vaults" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_list_backup_vaults" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupvault.ListBackupVaultsRequest.pb( + backupvault.ListBackupVaultsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = backupvault.ListBackupVaultsResponse.to_json( + backupvault.ListBackupVaultsResponse() + ) + + request = backupvault.ListBackupVaultsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = backupvault.ListBackupVaultsResponse() + + client.list_backup_vaults( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_backup_vaults_rest_bad_request( + transport: str = "rest", request_type=backupvault.ListBackupVaultsRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_backup_vaults(request) + + +def test_list_backup_vaults_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupvault.ListBackupVaultsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupvault.ListBackupVaultsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_backup_vaults(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/backupVaults" + % client.transport._host, + args[1], + ) + + +def test_list_backup_vaults_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_backup_vaults( + backupvault.ListBackupVaultsRequest(), + parent="parent_value", + ) + + +def test_list_backup_vaults_rest_pager(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + backupvault.ListBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + next_page_token="abc", + ), + backupvault.ListBackupVaultsResponse( + backup_vaults=[], + next_page_token="def", + ), + backupvault.ListBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + ], + next_page_token="ghi", + ), + backupvault.ListBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + backupvault.ListBackupVaultsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_backup_vaults(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backupvault.BackupVault) for i in results) + + pages = list(client.list_backup_vaults(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.FetchUsableBackupVaultsRequest, + dict, + ], +) +def test_fetch_usable_backup_vaults_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupvault.FetchUsableBackupVaultsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupvault.FetchUsableBackupVaultsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.fetch_usable_backup_vaults(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.FetchUsableBackupVaultsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_fetch_usable_backup_vaults_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.fetch_usable_backup_vaults + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.fetch_usable_backup_vaults + ] = mock_rpc + + request = {} + client.fetch_usable_backup_vaults(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.fetch_usable_backup_vaults(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_fetch_usable_backup_vaults_rest_required_fields( + request_type=backupvault.FetchUsableBackupVaultsRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).fetch_usable_backup_vaults._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).fetch_usable_backup_vaults._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = backupvault.FetchUsableBackupVaultsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backupvault.FetchUsableBackupVaultsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.fetch_usable_backup_vaults(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_fetch_usable_backup_vaults_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.fetch_usable_backup_vaults._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_fetch_usable_backup_vaults_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BackupDRRestInterceptor, "post_fetch_usable_backup_vaults" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_fetch_usable_backup_vaults" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupvault.FetchUsableBackupVaultsRequest.pb( + backupvault.FetchUsableBackupVaultsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = backupvault.FetchUsableBackupVaultsResponse.to_json( + backupvault.FetchUsableBackupVaultsResponse() + ) + + request = backupvault.FetchUsableBackupVaultsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = backupvault.FetchUsableBackupVaultsResponse() + + client.fetch_usable_backup_vaults( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_fetch_usable_backup_vaults_rest_bad_request( + transport: str = "rest", request_type=backupvault.FetchUsableBackupVaultsRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.fetch_usable_backup_vaults(request) + + +def test_fetch_usable_backup_vaults_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupvault.FetchUsableBackupVaultsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupvault.FetchUsableBackupVaultsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.fetch_usable_backup_vaults(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/backupVaults:fetchUsable" + % client.transport._host, + args[1], + ) + + +def test_fetch_usable_backup_vaults_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.fetch_usable_backup_vaults( + backupvault.FetchUsableBackupVaultsRequest(), + parent="parent_value", + ) + + +def test_fetch_usable_backup_vaults_rest_pager(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + next_page_token="abc", + ), + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[], + next_page_token="def", + ), + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + ], + next_page_token="ghi", + ), + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + backupvault.FetchUsableBackupVaultsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.fetch_usable_backup_vaults(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backupvault.BackupVault) for i in results) + + pages = list(client.fetch_usable_backup_vaults(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.GetBackupVaultRequest, + dict, + ], +) +def test_get_backup_vault_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/backupVaults/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupvault.BackupVault( + name="name_value", + description="description_value", + deletable=True, + etag="etag_value", + state=backupvault.BackupVault.State.CREATING, + backup_count=1278, + service_account="service_account_value", + total_stored_bytes=1946, + uid="uid_value", + access_restriction=backupvault.BackupVault.AccessRestriction.WITHIN_PROJECT, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupvault.BackupVault.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_backup_vault(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, backupvault.BackupVault) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.deletable is True + assert response.etag == "etag_value" + assert response.state == backupvault.BackupVault.State.CREATING + assert response.backup_count == 1278 + assert response.service_account == "service_account_value" + assert response.total_stored_bytes == 1946 + assert response.uid == "uid_value" + assert ( + response.access_restriction + == backupvault.BackupVault.AccessRestriction.WITHIN_PROJECT + ) + + +def test_get_backup_vault_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_backup_vault in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_backup_vault + ] = mock_rpc + + request = {} + client.get_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_backup_vault(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_backup_vault_rest_required_fields( + request_type=backupvault.GetBackupVaultRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_backup_vault._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_backup_vault._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("view",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = backupvault.BackupVault() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backupvault.BackupVault.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_backup_vault(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_backup_vault_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_backup_vault._get_unset_required_fields({}) + assert set(unset_fields) == (set(("view",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_backup_vault_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BackupDRRestInterceptor, "post_get_backup_vault" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_get_backup_vault" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupvault.GetBackupVaultRequest.pb( + backupvault.GetBackupVaultRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = backupvault.BackupVault.to_json( + backupvault.BackupVault() + ) + + request = backupvault.GetBackupVaultRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = backupvault.BackupVault() + + client.get_backup_vault( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_backup_vault_rest_bad_request( + transport: str = "rest", request_type=backupvault.GetBackupVaultRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/backupVaults/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_backup_vault(request) + + +def test_get_backup_vault_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupvault.BackupVault() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupvault.BackupVault.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_backup_vault(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/backupVaults/*}" + % client.transport._host, + args[1], + ) + + +def test_get_backup_vault_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_backup_vault( + backupvault.GetBackupVaultRequest(), + name="name_value", + ) + + +def test_get_backup_vault_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.UpdateBackupVaultRequest, + dict, + ], +) +def test_update_backup_vault_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "backup_vault": { + "name": "projects/sample1/locations/sample2/backupVaults/sample3" + } + } + request_init["backup_vault"] = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3", + "description": "description_value", + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "backup_minimum_enforced_retention_duration": {"seconds": 751, "nanos": 543}, + "deletable": True, + "etag": "etag_value", + "state": 1, + "effective_time": {}, + "backup_count": 1278, + "service_account": "service_account_value", + "total_stored_bytes": 1946, + "uid": "uid_value", + "annotations": {}, + "access_restriction": 1, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = backupvault.UpdateBackupVaultRequest.meta.fields["backup_vault"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["backup_vault"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backup_vault"][field])): + del request_init["backup_vault"][field][i][subfield] + else: + del request_init["backup_vault"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_backup_vault(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_backup_vault_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_backup_vault in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_backup_vault + ] = mock_rpc + + request = {} + client.update_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_backup_vault(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_backup_vault_rest_required_fields( + request_type=backupvault.UpdateBackupVaultRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_backup_vault._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_backup_vault._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "force", + "request_id", + "update_mask", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_backup_vault(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_backup_vault_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_backup_vault._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "force", + "requestId", + "updateMask", + "validateOnly", + ) + ) + & set( + ( + "updateMask", + "backupVault", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_backup_vault_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BackupDRRestInterceptor, "post_update_backup_vault" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_update_backup_vault" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupvault.UpdateBackupVaultRequest.pb( + backupvault.UpdateBackupVaultRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = backupvault.UpdateBackupVaultRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_backup_vault( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_backup_vault_rest_bad_request( + transport: str = "rest", request_type=backupvault.UpdateBackupVaultRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "backup_vault": { + "name": "projects/sample1/locations/sample2/backupVaults/sample3" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_backup_vault(request) + + +def test_update_backup_vault_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "backup_vault": { + "name": "projects/sample1/locations/sample2/backupVaults/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + backup_vault=backupvault.BackupVault(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_backup_vault(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{backup_vault.name=projects/*/locations/*/backupVaults/*}" + % client.transport._host, + args[1], + ) + + +def test_update_backup_vault_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_backup_vault( + backupvault.UpdateBackupVaultRequest(), + backup_vault=backupvault.BackupVault(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_backup_vault_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.DeleteBackupVaultRequest, + dict, + ], +) +def test_delete_backup_vault_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/backupVaults/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_backup_vault(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_backup_vault_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_backup_vault in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_backup_vault + ] = mock_rpc + + request = {} + client.delete_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_backup_vault(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_backup_vault_rest_required_fields( + request_type=backupvault.DeleteBackupVaultRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_backup_vault._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_backup_vault._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "allow_missing", + "etag", + "force", + "request_id", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_backup_vault(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_backup_vault_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_backup_vault._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "allowMissing", + "etag", + "force", + "requestId", + "validateOnly", + ) + ) + & set(("name",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_backup_vault_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BackupDRRestInterceptor, "post_delete_backup_vault" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_delete_backup_vault" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupvault.DeleteBackupVaultRequest.pb( + backupvault.DeleteBackupVaultRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = backupvault.DeleteBackupVaultRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_backup_vault( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_backup_vault_rest_bad_request( + transport: str = "rest", request_type=backupvault.DeleteBackupVaultRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/backupVaults/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_backup_vault(request) + + +def test_delete_backup_vault_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_backup_vault(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/backupVaults/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_backup_vault_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_backup_vault( + backupvault.DeleteBackupVaultRequest(), + name="name_value", + ) + + +def test_delete_backup_vault_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.ListDataSourcesRequest, + dict, + ], +) +def test_list_data_sources_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/backupVaults/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupvault.ListDataSourcesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupvault.ListDataSourcesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_data_sources(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDataSourcesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_data_sources_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_data_sources in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_data_sources + ] = mock_rpc + + request = {} + client.list_data_sources(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_data_sources(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_data_sources_rest_required_fields( + request_type=backupvault.ListDataSourcesRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_data_sources._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_data_sources._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = backupvault.ListDataSourcesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backupvault.ListDataSourcesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_data_sources(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_data_sources_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_data_sources._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_data_sources_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BackupDRRestInterceptor, "post_list_data_sources" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_list_data_sources" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupvault.ListDataSourcesRequest.pb( + backupvault.ListDataSourcesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = backupvault.ListDataSourcesResponse.to_json( + backupvault.ListDataSourcesResponse() + ) + + request = backupvault.ListDataSourcesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = backupvault.ListDataSourcesResponse() + + client.list_data_sources( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_data_sources_rest_bad_request( + transport: str = "rest", request_type=backupvault.ListDataSourcesRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/backupVaults/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_data_sources(request) + + +def test_list_data_sources_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupvault.ListDataSourcesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/backupVaults/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupvault.ListDataSourcesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_data_sources(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/backupVaults/*}/dataSources" + % client.transport._host, + args[1], + ) + + +def test_list_data_sources_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_data_sources( + backupvault.ListDataSourcesRequest(), + parent="parent_value", + ) + + +def test_list_data_sources_rest_pager(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + backupvault.ListDataSourcesResponse( + data_sources=[ + backupvault.DataSource(), + backupvault.DataSource(), + backupvault.DataSource(), + ], + next_page_token="abc", + ), + backupvault.ListDataSourcesResponse( + data_sources=[], + next_page_token="def", + ), + backupvault.ListDataSourcesResponse( + data_sources=[ + backupvault.DataSource(), + ], + next_page_token="ghi", + ), + backupvault.ListDataSourcesResponse( + data_sources=[ + backupvault.DataSource(), + backupvault.DataSource(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + backupvault.ListDataSourcesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/backupVaults/sample3" + } + + pager = client.list_data_sources(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backupvault.DataSource) for i in results) + + pages = list(client.list_data_sources(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.GetDataSourceRequest, + dict, + ], +) +def test_get_data_source_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupvault.DataSource( + name="name_value", + state=backupvault.DataSource.State.CREATING, + backup_count=1278, + etag="etag_value", + total_stored_bytes=1946, + config_state=backupvault.BackupConfigState.ACTIVE, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupvault.DataSource.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_data_source(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, backupvault.DataSource) + assert response.name == "name_value" + assert response.state == backupvault.DataSource.State.CREATING + assert response.backup_count == 1278 + assert response.etag == "etag_value" + assert response.total_stored_bytes == 1946 + assert response.config_state == backupvault.BackupConfigState.ACTIVE + + +def test_get_data_source_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_data_source in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_data_source] = mock_rpc + + request = {} + client.get_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_data_source(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_data_source_rest_required_fields( + request_type=backupvault.GetDataSourceRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_data_source._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_data_source._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = backupvault.DataSource() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backupvault.DataSource.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_data_source(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_data_source_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_data_source._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_data_source_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BackupDRRestInterceptor, "post_get_data_source" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_get_data_source" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupvault.GetDataSourceRequest.pb( + backupvault.GetDataSourceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = backupvault.DataSource.to_json( + backupvault.DataSource() + ) + + request = backupvault.GetDataSourceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = backupvault.DataSource() + + client.get_data_source( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_data_source_rest_bad_request( + transport: str = "rest", request_type=backupvault.GetDataSourceRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_data_source(request) + + +def test_get_data_source_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupvault.DataSource() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupvault.DataSource.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_data_source(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/backupVaults/*/dataSources/*}" + % client.transport._host, + args[1], + ) + + +def test_get_data_source_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_data_source( + backupvault.GetDataSourceRequest(), + name="name_value", + ) + + +def test_get_data_source_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.UpdateDataSourceRequest, + dict, + ], +) +def test_update_data_source_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "data_source": { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4" + } + } + request_init["data_source"] = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4", + "state": 1, + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "backup_count": 1278, + "etag": "etag_value", + "total_stored_bytes": 1946, + "config_state": 1, + "backup_config_info": { + "last_backup_state": 1, + "last_successful_backup_consistency_time": {}, + "last_backup_error": { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + }, + "gcp_backup_config": { + "backup_plan": "backup_plan_value", + "backup_plan_description": "backup_plan_description_value", + "backup_plan_association": "backup_plan_association_value", + "backup_plan_rules": [ + "backup_plan_rules_value1", + "backup_plan_rules_value2", + ], + }, + "backup_appliance_backup_config": { + "backup_appliance_name": "backup_appliance_name_value", + "backup_appliance_id": 1966, + "sla_id": 620, + "application_name": "application_name_value", + "host_name": "host_name_value", + "slt_name": "slt_name_value", + "slp_name": "slp_name_value", + }, + }, + "data_source_gcp_resource": { + "gcp_resourcename": "gcp_resourcename_value", + "location": "location_value", + "type_": "type__value", + "compute_instance_datasource_properties": { + "name": "name_value", + "description": "description_value", + "machine_type": "machine_type_value", + "total_disk_count": 1718, + "total_disk_size_gb": 1904, + }, + }, + "data_source_backup_appliance_application": { + "application_name": "application_name_value", + "backup_appliance": "backup_appliance_value", + "appliance_id": 1241, + "type_": "type__value", + "application_id": 1472, + "hostname": "hostname_value", + "host_id": 746, + }, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = backupvault.UpdateDataSourceRequest.meta.fields["data_source"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["data_source"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["data_source"][field])): + del request_init["data_source"][field][i][subfield] + else: + del request_init["data_source"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_data_source(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_data_source_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_data_source in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_data_source + ] = mock_rpc + + request = {} + client.update_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_data_source(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_data_source_rest_required_fields( + request_type=backupvault.UpdateDataSourceRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_data_source._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_data_source._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "allow_missing", + "request_id", + "update_mask", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_data_source(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_data_source_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_data_source._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "allowMissing", + "requestId", + "updateMask", + ) + ) + & set( + ( + "updateMask", + "dataSource", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_data_source_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BackupDRRestInterceptor, "post_update_data_source" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_update_data_source" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupvault.UpdateDataSourceRequest.pb( + backupvault.UpdateDataSourceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = backupvault.UpdateDataSourceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_data_source( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_data_source_rest_bad_request( + transport: str = "rest", request_type=backupvault.UpdateDataSourceRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "data_source": { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_data_source(request) + + +def test_update_data_source_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "data_source": { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4" + } + } + + # get truthy value for each flattened field + mock_args = dict( + data_source=backupvault.DataSource(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_data_source(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{data_source.name=projects/*/locations/*/backupVaults/*/dataSources/*}" + % client.transport._host, + args[1], + ) + + +def test_update_data_source_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_data_source( + backupvault.UpdateDataSourceRequest(), + data_source=backupvault.DataSource(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_data_source_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.ListBackupsRequest, + dict, + ], +) +def test_list_backups_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupvault.ListBackupsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupvault.ListBackupsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_backups(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_backups_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_backups in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_backups] = mock_rpc + + request = {} + client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_backups(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_backups_rest_required_fields(request_type=backupvault.ListBackupsRequest): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_backups._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_backups._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + "view", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = backupvault.ListBackupsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backupvault.ListBackupsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_backups(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_backups_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_backups._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + "view", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_backups_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BackupDRRestInterceptor, "post_list_backups" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_list_backups" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupvault.ListBackupsRequest.pb(backupvault.ListBackupsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = backupvault.ListBackupsResponse.to_json( + backupvault.ListBackupsResponse() + ) + + request = backupvault.ListBackupsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = backupvault.ListBackupsResponse() + + client.list_backups( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_backups_rest_bad_request( + transport: str = "rest", request_type=backupvault.ListBackupsRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_backups(request) + + +def test_list_backups_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupvault.ListBackupsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupvault.ListBackupsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_backups(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/backupVaults/*/dataSources/*}/backups" + % client.transport._host, + args[1], + ) + + +def test_list_backups_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_backups( + backupvault.ListBackupsRequest(), + parent="parent_value", + ) + + +def test_list_backups_rest_pager(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + backupvault.ListBackupsResponse( + backups=[ + backupvault.Backup(), + backupvault.Backup(), + backupvault.Backup(), + ], + next_page_token="abc", + ), + backupvault.ListBackupsResponse( + backups=[], + next_page_token="def", + ), + backupvault.ListBackupsResponse( + backups=[ + backupvault.Backup(), + ], + next_page_token="ghi", + ), + backupvault.ListBackupsResponse( + backups=[ + backupvault.Backup(), + backupvault.Backup(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(backupvault.ListBackupsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4" + } + + pager = client.list_backups(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backupvault.Backup) for i in results) + + pages = list(client.list_backups(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.GetBackupRequest, + dict, + ], +) +def test_get_backup_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupvault.Backup( + name="name_value", + description="description_value", + etag="etag_value", + state=backupvault.Backup.State.CREATING, + backup_type=backupvault.Backup.BackupType.SCHEDULED, + resource_size_bytes=2056, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupvault.Backup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_backup(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, backupvault.Backup) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.etag == "etag_value" + assert response.state == backupvault.Backup.State.CREATING + assert response.backup_type == backupvault.Backup.BackupType.SCHEDULED + assert response.resource_size_bytes == 2056 + + +def test_get_backup_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_backup] = mock_rpc + + request = {} + client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_backup_rest_required_fields(request_type=backupvault.GetBackupRequest): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_backup._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_backup._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("view",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = backupvault.Backup() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backupvault.Backup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_backup(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_backup_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_backup._get_unset_required_fields({}) + assert set(unset_fields) == (set(("view",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_backup_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BackupDRRestInterceptor, "post_get_backup" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_get_backup" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupvault.GetBackupRequest.pb(backupvault.GetBackupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = backupvault.Backup.to_json(backupvault.Backup()) + + request = backupvault.GetBackupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = backupvault.Backup() + + client.get_backup( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_backup_rest_bad_request( + transport: str = "rest", request_type=backupvault.GetBackupRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_backup(request) + + +def test_get_backup_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupvault.Backup() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupvault.Backup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_backup(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/backupVaults/*/dataSources/*/backups/*}" + % client.transport._host, + args[1], + ) + + +def test_get_backup_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_backup( + backupvault.GetBackupRequest(), + name="name_value", + ) + + +def test_get_backup_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.UpdateBackupRequest, + dict, + ], +) +def test_update_backup_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "backup": { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" + } + } + request_init["backup"] = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5", + "description": "description_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "enforced_retention_end_time": {}, + "expire_time": {}, + "consistency_time": {}, + "etag": "etag_value", + "state": 1, + "service_locks": [ + { + "lock_until_time": {}, + "backup_appliance_lock_info": { + "backup_appliance_id": 1966, + "backup_appliance_name": "backup_appliance_name_value", + "lock_reason": "lock_reason_value", + "job_name": "job_name_value", + "backup_image": "backup_image_value", + "sla_id": 620, + }, + "service_lock_info": {"operation": "operation_value"}, + } + ], + "backup_appliance_locks": {}, + "compute_instance_backup_properties": { + "description": "description_value", + "tags": {"items": ["items_value1", "items_value2"]}, + "machine_type": "machine_type_value", + "can_ip_forward": True, + "network_interface": [ + { + "network": "network_value", + "subnetwork": "subnetwork_value", + "ip_address": "ip_address_value", + "ipv6_address": "ipv6_address_value", + "internal_ipv6_prefix_length": 2831, + "name": "name_value", + "access_configs": [ + { + "type_": 1, + "name": "name_value", + "external_ip": "external_ip_value", + "external_ipv6": "external_ipv6_value", + "external_ipv6_prefix_length": 2837, + "set_public_ptr": True, + "public_ptr_domain_name": "public_ptr_domain_name_value", + "network_tier": 1, + } + ], + "ipv6_access_configs": {}, + "alias_ip_ranges": [ + { + "ip_cidr_range": "ip_cidr_range_value", + "subnetwork_range_name": "subnetwork_range_name_value", + } + ], + "stack_type": 1, + "ipv6_access_type": 1, + "queue_count": 1197, + "nic_type": 1, + "network_attachment": "network_attachment_value", + } + ], + "disk": [ + { + "initialize_params": { + "disk_name": "disk_name_value", + "replica_zones": [ + "replica_zones_value1", + "replica_zones_value2", + ], + }, + "device_name": "device_name_value", + "kind": "kind_value", + "disk_type_deprecated": 1, + "mode": 1, + "source": "source_value", + "index": 536, + "boot": True, + "auto_delete": True, + "license_": ["license__value1", "license__value2"], + "disk_interface": 1, + "guest_os_feature": [{"type_": 1}], + "disk_encryption_key": { + "raw_key": "raw_key_value", + "rsa_encrypted_key": "rsa_encrypted_key_value", + "kms_key_name": "kms_key_name_value", + "kms_key_service_account": "kms_key_service_account_value", + }, + "disk_size_gb": 1261, + "saved_state": 1, + "disk_type": "disk_type_value", + "type_": 1, + } + ], + "metadata": {"items": [{"key": "key_value", "value": "value_value"}]}, + "service_account": [ + {"email": "email_value", "scopes": ["scopes_value1", "scopes_value2"]} + ], + "scheduling": { + "on_host_maintenance": 1, + "automatic_restart": True, + "preemptible": True, + "node_affinities": [ + { + "key": "key_value", + "operator": 1, + "values": ["values_value1", "values_value2"], + } + ], + "min_node_cpus": 1379, + "provisioning_model": 1, + "instance_termination_action": 1, + "local_ssd_recovery_timeout": {"seconds": 751, "nanos": 543}, + }, + "guest_accelerator": [ + { + "accelerator_type": "accelerator_type_value", + "accelerator_count": 1805, + } + ], + "min_cpu_platform": "min_cpu_platform_value", + "key_revocation_action_type": 1, + "source_instance": "source_instance_value", + "labels": {}, + }, + "backup_appliance_backup_properties": { + "generation_id": 1368, + "finalize_time": {}, + "recovery_range_start_time": {}, + "recovery_range_end_time": {}, + }, + "backup_type": 1, + "gcp_backup_plan_info": { + "backup_plan": "backup_plan_value", + "backup_plan_rule_id": "backup_plan_rule_id_value", + }, + "resource_size_bytes": 2056, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = backupvault.UpdateBackupRequest.meta.fields["backup"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["backup"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backup"][field])): + del request_init["backup"][field][i][subfield] + else: + del request_init["backup"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_backup(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_backup_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_backup] = mock_rpc + + request = {} + client.update_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_backup_rest_required_fields( + request_type=backupvault.UpdateBackupRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_backup._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_backup._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_backup(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_backup_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_backup._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "requestId", + "updateMask", + ) + ) + & set( + ( + "updateMask", + "backup", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_backup_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BackupDRRestInterceptor, "post_update_backup" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_update_backup" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupvault.UpdateBackupRequest.pb( + backupvault.UpdateBackupRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = backupvault.UpdateBackupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_backup( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_backup_rest_bad_request( + transport: str = "rest", request_type=backupvault.UpdateBackupRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "backup": { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_backup(request) + + +def test_update_backup_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "backup": { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" + } + } + + # get truthy value for each flattened field + mock_args = dict( + backup=backupvault.Backup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_backup(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{backup.name=projects/*/locations/*/backupVaults/*/dataSources/*/backups/*}" + % client.transport._host, + args[1], + ) + + +def test_update_backup_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_backup( + backupvault.UpdateBackupRequest(), + backup=backupvault.Backup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_backup_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.DeleteBackupRequest, + dict, + ], +) +def test_delete_backup_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_backup(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_backup_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_backup] = mock_rpc + + request = {} + client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_backup_rest_required_fields( + request_type=backupvault.DeleteBackupRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_backup._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_backup._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_backup(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_backup_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_backup._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_backup_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BackupDRRestInterceptor, "post_delete_backup" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_delete_backup" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupvault.DeleteBackupRequest.pb( + backupvault.DeleteBackupRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = backupvault.DeleteBackupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_backup( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_backup_rest_bad_request( + transport: str = "rest", request_type=backupvault.DeleteBackupRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_backup(request) + + +def test_delete_backup_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_backup(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/backupVaults/*/dataSources/*/backups/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_backup_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_backup( + backupvault.DeleteBackupRequest(), + name="name_value", + ) + + +def test_delete_backup_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.RestoreBackupRequest, + dict, + ], +) +def test_restore_backup_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.restore_backup(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_restore_backup_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.restore_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.restore_backup] = mock_rpc + + request = {} + client.restore_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.restore_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_restore_backup_rest_required_fields( + request_type=backupvault.RestoreBackupRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).restore_backup._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).restore_backup._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.restore_backup(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_restore_backup_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.restore_backup._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_restore_backup_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BackupDRRestInterceptor, "post_restore_backup" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_restore_backup" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupvault.RestoreBackupRequest.pb( + backupvault.RestoreBackupRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = backupvault.RestoreBackupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.restore_backup( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_restore_backup_rest_bad_request( + transport: str = "rest", request_type=backupvault.RestoreBackupRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.restore_backup(request) + + +def test_restore_backup_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.restore_backup(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/backupVaults/*/dataSources/*/backups/*}:restore" + % client.transport._host, + args[1], + ) + + +def test_restore_backup_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.restore_backup( + backupvault.RestoreBackupRequest(), + name="name_value", + ) + + +def test_restore_backup_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupplan.CreateBackupPlanRequest, + dict, + ], +) +def test_create_backup_plan_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["backup_plan"] = { + "name": "name_value", + "description": "description_value", + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "backup_rules": [ + { + "rule_id": "rule_id_value", + "backup_retention_days": 2237, + "standard_schedule": { + "recurrence_type": 1, + "hourly_frequency": 1748, + "days_of_week": [1], + "days_of_month": [1387, 1388], + "week_day_of_month": {"week_of_month": 1, "day_of_week": 1}, + "months": [1], + "backup_window": { + "start_hour_of_day": 1820, + "end_hour_of_day": 1573, + }, + "time_zone": "time_zone_value", + }, + } + ], + "state": 1, + "resource_type": "resource_type_value", + "etag": "etag_value", + "backup_vault": "backup_vault_value", + "backup_vault_service_account": "backup_vault_service_account_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = backupplan.CreateBackupPlanRequest.meta.fields["backup_plan"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["backup_plan"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backup_plan"][field])): + del request_init["backup_plan"][field][i][subfield] + else: + del request_init["backup_plan"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_backup_plan(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_backup_plan_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_backup_plan in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_backup_plan + ] = mock_rpc + + request = {} + client.create_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_backup_plan(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_backup_plan_rest_required_fields( + request_type=backupplan.CreateBackupPlanRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["backup_plan_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "backupPlanId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_backup_plan._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "backupPlanId" in jsonified_request + assert jsonified_request["backupPlanId"] == request_init["backup_plan_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["backupPlanId"] = "backup_plan_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_backup_plan._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "backup_plan_id", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "backupPlanId" in jsonified_request + assert jsonified_request["backupPlanId"] == "backup_plan_id_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_backup_plan(request) + + expected_params = [ + ( + "backupPlanId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_backup_plan_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_backup_plan._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "backupPlanId", + "requestId", + ) + ) + & set( + ( + "parent", + "backupPlanId", + "backupPlan", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_backup_plan_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BackupDRRestInterceptor, "post_create_backup_plan" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_create_backup_plan" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupplan.CreateBackupPlanRequest.pb( + backupplan.CreateBackupPlanRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = backupplan.CreateBackupPlanRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_backup_plan( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_backup_plan_rest_bad_request( + transport: str = "rest", request_type=backupplan.CreateBackupPlanRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_backup_plan(request) + + +def test_create_backup_plan_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + backup_plan=backupplan.BackupPlan(name="name_value"), + backup_plan_id="backup_plan_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_backup_plan(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/backupPlans" + % client.transport._host, + args[1], + ) + + +def test_create_backup_plan_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_backup_plan( + backupplan.CreateBackupPlanRequest(), + parent="parent_value", + backup_plan=backupplan.BackupPlan(name="name_value"), + backup_plan_id="backup_plan_id_value", + ) + + +def test_create_backup_plan_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupplan.GetBackupPlanRequest, + dict, + ], +) +def test_get_backup_plan_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/backupPlans/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupplan.BackupPlan( + name="name_value", + description="description_value", + state=backupplan.BackupPlan.State.CREATING, + resource_type="resource_type_value", + etag="etag_value", + backup_vault="backup_vault_value", + backup_vault_service_account="backup_vault_service_account_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupplan.BackupPlan.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_backup_plan(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, backupplan.BackupPlan) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.state == backupplan.BackupPlan.State.CREATING + assert response.resource_type == "resource_type_value" + assert response.etag == "etag_value" + assert response.backup_vault == "backup_vault_value" + assert response.backup_vault_service_account == "backup_vault_service_account_value" + + +def test_get_backup_plan_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_backup_plan in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_backup_plan] = mock_rpc + + request = {} + client.get_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_backup_plan(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_backup_plan_rest_required_fields( + request_type=backupplan.GetBackupPlanRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_backup_plan._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_backup_plan._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = backupplan.BackupPlan() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backupplan.BackupPlan.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_backup_plan(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_backup_plan_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_backup_plan._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_backup_plan_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BackupDRRestInterceptor, "post_get_backup_plan" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_get_backup_plan" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupplan.GetBackupPlanRequest.pb( + backupplan.GetBackupPlanRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = backupplan.BackupPlan.to_json( + backupplan.BackupPlan() + ) + + request = backupplan.GetBackupPlanRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = backupplan.BackupPlan() + + client.get_backup_plan( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_backup_plan_rest_bad_request( + transport: str = "rest", request_type=backupplan.GetBackupPlanRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/backupPlans/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_backup_plan(request) + + +def test_get_backup_plan_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupplan.BackupPlan() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/backupPlans/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupplan.BackupPlan.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_backup_plan(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/backupPlans/*}" + % client.transport._host, + args[1], + ) + + +def test_get_backup_plan_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_backup_plan( + backupplan.GetBackupPlanRequest(), + name="name_value", + ) + + +def test_get_backup_plan_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupplan.ListBackupPlansRequest, + dict, + ], +) +def test_list_backup_plans_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupplan.ListBackupPlansResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupplan.ListBackupPlansResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_backup_plans(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupPlansPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_backup_plans_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_backup_plans in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_backup_plans + ] = mock_rpc + + request = {} + client.list_backup_plans(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_backup_plans(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_backup_plans_rest_required_fields( + request_type=backupplan.ListBackupPlansRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_backup_plans._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_backup_plans._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = backupplan.ListBackupPlansResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backupplan.ListBackupPlansResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_backup_plans(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_backup_plans_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_backup_plans._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_backup_plans_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BackupDRRestInterceptor, "post_list_backup_plans" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_list_backup_plans" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupplan.ListBackupPlansRequest.pb( + backupplan.ListBackupPlansRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = backupplan.ListBackupPlansResponse.to_json( + backupplan.ListBackupPlansResponse() + ) + + request = backupplan.ListBackupPlansRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = backupplan.ListBackupPlansResponse() + + client.list_backup_plans( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_backup_plans_rest_bad_request( + transport: str = "rest", request_type=backupplan.ListBackupPlansRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_backup_plans(request) + + +def test_list_backup_plans_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupplan.ListBackupPlansResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupplan.ListBackupPlansResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_backup_plans(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/backupPlans" + % client.transport._host, + args[1], + ) + + +def test_list_backup_plans_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_backup_plans( + backupplan.ListBackupPlansRequest(), + parent="parent_value", + ) + + +def test_list_backup_plans_rest_pager(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + backupplan.BackupPlan(), + backupplan.BackupPlan(), + ], + next_page_token="abc", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[], + next_page_token="def", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + ], + next_page_token="ghi", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + backupplan.BackupPlan(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + backupplan.ListBackupPlansResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_backup_plans(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backupplan.BackupPlan) for i in results) + + pages = list(client.list_backup_plans(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + backupplan.DeleteBackupPlanRequest, + dict, + ], +) +def test_delete_backup_plan_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/backupPlans/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_backup_plan(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_backup_plan_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_backup_plan in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_backup_plan + ] = mock_rpc + + request = {} + client.delete_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_backup_plan(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_backup_plan_rest_required_fields( + request_type=backupplan.DeleteBackupPlanRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_backup_plan._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_backup_plan._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_backup_plan(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_backup_plan_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_backup_plan._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_backup_plan_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BackupDRRestInterceptor, "post_delete_backup_plan" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_delete_backup_plan" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupplan.DeleteBackupPlanRequest.pb( + backupplan.DeleteBackupPlanRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = backupplan.DeleteBackupPlanRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_backup_plan( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_backup_plan_rest_bad_request( + transport: str = "rest", request_type=backupplan.DeleteBackupPlanRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/backupPlans/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_backup_plan(request) + + +def test_delete_backup_plan_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/backupPlans/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_backup_plan(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/backupPlans/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_backup_plan_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_backup_plan( + backupplan.DeleteBackupPlanRequest(), + name="name_value", + ) + + +def test_delete_backup_plan_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupplanassociation.CreateBackupPlanAssociationRequest, + dict, + ], +) +def test_create_backup_plan_association_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["backup_plan_association"] = { + "name": "name_value", + "resource_type": "resource_type_value", + "resource": "resource_value", + "backup_plan": "backup_plan_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "state": 1, + "rules_config_info": [ + { + "rule_id": "rule_id_value", + "last_backup_state": 1, + "last_backup_error": { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + }, + "last_successful_backup_consistency_time": {}, + } + ], + "data_source": "data_source_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = backupplanassociation.CreateBackupPlanAssociationRequest.meta.fields[ + "backup_plan_association" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "backup_plan_association" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backup_plan_association"][field])): + del request_init["backup_plan_association"][field][i][subfield] + else: + del request_init["backup_plan_association"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_backup_plan_association(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_backup_plan_association_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_backup_plan_association + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_backup_plan_association + ] = mock_rpc + + request = {} + client.create_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_backup_plan_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_backup_plan_association_rest_required_fields( + request_type=backupplanassociation.CreateBackupPlanAssociationRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["backup_plan_association_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "backupPlanAssociationId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_backup_plan_association._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "backupPlanAssociationId" in jsonified_request + assert ( + jsonified_request["backupPlanAssociationId"] + == request_init["backup_plan_association_id"] + ) + + jsonified_request["parent"] = "parent_value" + jsonified_request["backupPlanAssociationId"] = "backup_plan_association_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_backup_plan_association._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "backup_plan_association_id", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "backupPlanAssociationId" in jsonified_request + assert ( + jsonified_request["backupPlanAssociationId"] + == "backup_plan_association_id_value" + ) + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_backup_plan_association(request) + + expected_params = [ + ( + "backupPlanAssociationId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_backup_plan_association_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_backup_plan_association._get_unset_required_fields( + {} + ) + assert set(unset_fields) == ( + set( + ( + "backupPlanAssociationId", + "requestId", + ) + ) + & set( + ( + "parent", + "backupPlanAssociationId", + "backupPlanAssociation", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_backup_plan_association_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BackupDRRestInterceptor, "post_create_backup_plan_association" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_create_backup_plan_association" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupplanassociation.CreateBackupPlanAssociationRequest.pb( + backupplanassociation.CreateBackupPlanAssociationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = backupplanassociation.CreateBackupPlanAssociationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_backup_plan_association( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_backup_plan_association_rest_bad_request( + transport: str = "rest", + request_type=backupplanassociation.CreateBackupPlanAssociationRequest, +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_backup_plan_association(request) + + +def test_create_backup_plan_association_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + backup_plan_association=backupplanassociation.BackupPlanAssociation( + name="name_value" + ), + backup_plan_association_id="backup_plan_association_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_backup_plan_association(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/backupPlanAssociations" + % client.transport._host, + args[1], + ) + + +def test_create_backup_plan_association_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_backup_plan_association( + backupplanassociation.CreateBackupPlanAssociationRequest(), + parent="parent_value", + backup_plan_association=backupplanassociation.BackupPlanAssociation( + name="name_value" + ), + backup_plan_association_id="backup_plan_association_id_value", + ) + + +def test_create_backup_plan_association_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupplanassociation.GetBackupPlanAssociationRequest, + dict, + ], +) +def test_get_backup_plan_association_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/backupPlanAssociations/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupplanassociation.BackupPlanAssociation( + name="name_value", + resource_type="resource_type_value", + resource="resource_value", + backup_plan="backup_plan_value", + state=backupplanassociation.BackupPlanAssociation.State.CREATING, + data_source="data_source_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupplanassociation.BackupPlanAssociation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_backup_plan_association(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, backupplanassociation.BackupPlanAssociation) + assert response.name == "name_value" + assert response.resource_type == "resource_type_value" + assert response.resource == "resource_value" + assert response.backup_plan == "backup_plan_value" + assert response.state == backupplanassociation.BackupPlanAssociation.State.CREATING + assert response.data_source == "data_source_value" + + +def test_get_backup_plan_association_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_backup_plan_association + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_backup_plan_association + ] = mock_rpc + + request = {} + client.get_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_backup_plan_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_backup_plan_association_rest_required_fields( + request_type=backupplanassociation.GetBackupPlanAssociationRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_backup_plan_association._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_backup_plan_association._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = backupplanassociation.BackupPlanAssociation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backupplanassociation.BackupPlanAssociation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_backup_plan_association(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_backup_plan_association_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_backup_plan_association._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_backup_plan_association_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), ) client = BackupDRClient(transport=transport) @@ -3105,14 +21997,14 @@ def test_list_management_servers_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.BackupDRRestInterceptor, "post_list_management_servers" + transports.BackupDRRestInterceptor, "post_get_backup_plan_association" ) as post, mock.patch.object( - transports.BackupDRRestInterceptor, "pre_list_management_servers" + transports.BackupDRRestInterceptor, "pre_get_backup_plan_association" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = backupdr.ListManagementServersRequest.pb( - backupdr.ListManagementServersRequest() + pb_message = backupplanassociation.GetBackupPlanAssociationRequest.pb( + backupplanassociation.GetBackupPlanAssociationRequest() ) transcode.return_value = { "method": "post", @@ -3124,19 +22016,19 @@ def test_list_management_servers_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = backupdr.ListManagementServersResponse.to_json( - backupdr.ListManagementServersResponse() + req.return_value._content = backupplanassociation.BackupPlanAssociation.to_json( + backupplanassociation.BackupPlanAssociation() ) - request = backupdr.ListManagementServersRequest() + request = backupplanassociation.GetBackupPlanAssociationRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = backupdr.ListManagementServersResponse() + post.return_value = backupplanassociation.BackupPlanAssociation() - client.list_management_servers( + client.get_backup_plan_association( request, metadata=[ ("key", "val"), @@ -3148,8 +22040,9 @@ def test_list_management_servers_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_management_servers_rest_bad_request( - transport: str = "rest", request_type=backupdr.ListManagementServersRequest +def test_get_backup_plan_association_rest_bad_request( + transport: str = "rest", + request_type=backupplanassociation.GetBackupPlanAssociationRequest, ): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3157,7 +22050,9 @@ def test_list_management_servers_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "name": "projects/sample1/locations/sample2/backupPlanAssociations/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3169,10 +22064,10 @@ def test_list_management_servers_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_management_servers(request) + client.get_backup_plan_association(request) -def test_list_management_servers_rest_flattened(): +def test_get_backup_plan_association_rest_flattened(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -3181,14 +22076,16 @@ def test_list_management_servers_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = backupdr.ListManagementServersResponse() + return_value = backupplanassociation.BackupPlanAssociation() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "name": "projects/sample1/locations/sample2/backupPlanAssociations/sample3" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + name="name_value", ) mock_args.update(sample_request) @@ -3196,159 +22093,90 @@ def test_list_management_servers_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = backupdr.ListManagementServersResponse.pb(return_value) + return_value = backupplanassociation.BackupPlanAssociation.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_management_servers(**mock_args) + client.get_backup_plan_association(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/managementServers" + "%s/v1/{name=projects/*/locations/*/backupPlanAssociations/*}" % client.transport._host, args[1], ) -def test_list_management_servers_rest_flattened_error(transport: str = "rest"): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_management_servers( - backupdr.ListManagementServersRequest(), - parent="parent_value", - ) - - -def test_list_management_servers_rest_pager(transport: str = "rest"): +def test_get_backup_plan_association_rest_flattened_error(transport: str = "rest"): client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - backupdr.ListManagementServersResponse( - management_servers=[ - backupdr.ManagementServer(), - backupdr.ManagementServer(), - backupdr.ManagementServer(), - ], - next_page_token="abc", - ), - backupdr.ListManagementServersResponse( - management_servers=[], - next_page_token="def", - ), - backupdr.ListManagementServersResponse( - management_servers=[ - backupdr.ManagementServer(), - ], - next_page_token="ghi", - ), - backupdr.ListManagementServersResponse( - management_servers=[ - backupdr.ManagementServer(), - backupdr.ManagementServer(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - backupdr.ListManagementServersResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - pager = client.list_management_servers(request=sample_request) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_backup_plan_association( + backupplanassociation.GetBackupPlanAssociationRequest(), + name="name_value", + ) - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, backupdr.ManagementServer) for i in results) - pages = list(client.list_management_servers(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token +def test_get_backup_plan_association_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) @pytest.mark.parametrize( "request_type", [ - backupdr.GetManagementServerRequest, + backupplanassociation.ListBackupPlanAssociationsRequest, dict, ], ) -def test_get_management_server_rest(request_type): +def test_list_backup_plan_associations_rest(request_type): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/managementServers/sample3" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = backupdr.ManagementServer( - name="name_value", - description="description_value", - type_=backupdr.ManagementServer.InstanceType.BACKUP_RESTORE, - state=backupdr.ManagementServer.InstanceState.CREATING, - etag="etag_value", - oauth2_client_id="oauth2_client_id_value", - ba_proxy_uri=["ba_proxy_uri_value"], - satisfies_pzi=True, + return_value = backupplanassociation.ListBackupPlanAssociationsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = backupdr.ManagementServer.pb(return_value) + return_value = backupplanassociation.ListBackupPlanAssociationsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_management_server(request) + response = client.list_backup_plan_associations(request) # Establish that the response is the type that we expect. - assert isinstance(response, backupdr.ManagementServer) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.type_ == backupdr.ManagementServer.InstanceType.BACKUP_RESTORE - assert response.state == backupdr.ManagementServer.InstanceState.CREATING - assert response.etag == "etag_value" - assert response.oauth2_client_id == "oauth2_client_id_value" - assert response.ba_proxy_uri == ["ba_proxy_uri_value"] - assert response.satisfies_pzi is True + assert isinstance(response, pagers.ListBackupPlanAssociationsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_get_management_server_rest_use_cached_wrapped_rpc(): +def test_list_backup_plan_associations_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -3363,7 +22191,7 @@ def test_get_management_server_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_management_server + client._transport.list_backup_plan_associations in client._transport._wrapped_methods ) @@ -3373,29 +22201,29 @@ def test_get_management_server_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_management_server + client._transport.list_backup_plan_associations ] = mock_rpc request = {} - client.get_management_server(request) + client.list_backup_plan_associations(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_management_server(request) + client.list_backup_plan_associations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_management_server_rest_required_fields( - request_type=backupdr.GetManagementServerRequest, +def test_list_backup_plan_associations_rest_required_fields( + request_type=backupplanassociation.ListBackupPlanAssociationsRequest, ): transport_class = transports.BackupDRRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -3406,21 +22234,29 @@ def test_get_management_server_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_management_server._get_unset_required_fields(jsonified_request) + ).list_backup_plan_associations._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_management_server._get_unset_required_fields(jsonified_request) + ).list_backup_plan_associations._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3429,7 +22265,7 @@ def test_get_management_server_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = backupdr.ManagementServer() + return_value = backupplanassociation.ListBackupPlanAssociationsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -3450,30 +22286,43 @@ def test_get_management_server_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = backupdr.ManagementServer.pb(return_value) + return_value = backupplanassociation.ListBackupPlanAssociationsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_management_server(request) + response = client.list_backup_plan_associations(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_management_server_rest_unset_required_fields(): +def test_list_backup_plan_associations_rest_unset_required_fields(): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_management_server._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.list_backup_plan_associations._get_unset_required_fields( + {} + ) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_management_server_rest_interceptors(null_interceptor): +def test_list_backup_plan_associations_rest_interceptors(null_interceptor): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), @@ -3484,14 +22333,14 @@ def test_get_management_server_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.BackupDRRestInterceptor, "post_get_management_server" + transports.BackupDRRestInterceptor, "post_list_backup_plan_associations" ) as post, mock.patch.object( - transports.BackupDRRestInterceptor, "pre_get_management_server" + transports.BackupDRRestInterceptor, "pre_list_backup_plan_associations" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = backupdr.GetManagementServerRequest.pb( - backupdr.GetManagementServerRequest() + pb_message = backupplanassociation.ListBackupPlanAssociationsRequest.pb( + backupplanassociation.ListBackupPlanAssociationsRequest() ) transcode.return_value = { "method": "post", @@ -3503,19 +22352,21 @@ def test_get_management_server_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = backupdr.ManagementServer.to_json( - backupdr.ManagementServer() + req.return_value._content = ( + backupplanassociation.ListBackupPlanAssociationsResponse.to_json( + backupplanassociation.ListBackupPlanAssociationsResponse() + ) ) - request = backupdr.GetManagementServerRequest() + request = backupplanassociation.ListBackupPlanAssociationsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = backupdr.ManagementServer() + post.return_value = backupplanassociation.ListBackupPlanAssociationsResponse() - client.get_management_server( + client.list_backup_plan_associations( request, metadata=[ ("key", "val"), @@ -3527,8 +22378,9 @@ def test_get_management_server_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_management_server_rest_bad_request( - transport: str = "rest", request_type=backupdr.GetManagementServerRequest +def test_list_backup_plan_associations_rest_bad_request( + transport: str = "rest", + request_type=backupplanassociation.ListBackupPlanAssociationsRequest, ): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3536,9 +22388,7 @@ def test_get_management_server_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/managementServers/sample3" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3550,10 +22400,10 @@ def test_get_management_server_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_management_server(request) + client.list_backup_plan_associations(request) -def test_get_management_server_rest_flattened(): +def test_list_backup_plan_associations_rest_flattened(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -3562,16 +22412,14 @@ def test_get_management_server_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = backupdr.ManagementServer() + return_value = backupplanassociation.ListBackupPlanAssociationsResponse() # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/managementServers/sample3" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) @@ -3579,25 +22427,27 @@ def test_get_management_server_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = backupdr.ManagementServer.pb(return_value) + return_value = backupplanassociation.ListBackupPlanAssociationsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_management_server(**mock_args) + client.list_backup_plan_associations(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/managementServers/*}" + "%s/v1/{parent=projects/*/locations/*}/backupPlanAssociations" % client.transport._host, args[1], ) -def test_get_management_server_rest_flattened_error(transport: str = "rest"): +def test_list_backup_plan_associations_rest_flattened_error(transport: str = "rest"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3606,124 +22456,95 @@ def test_get_management_server_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_management_server( - backupdr.GetManagementServerRequest(), - name="name_value", + client.list_backup_plan_associations( + backupplanassociation.ListBackupPlanAssociationsRequest(), + parent="parent_value", ) -def test_get_management_server_rest_error(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - backupdr.CreateManagementServerRequest, - dict, - ], -) -def test_create_management_server_rest(request_type): +def test_list_backup_plan_associations_rest_pager(transport: str = "rest"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["management_server"] = { - "name": "name_value", - "description": "description_value", - "labels": {}, - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "type_": 1, - "management_uri": {"web_ui": "web_ui_value", "api": "api_value"}, - "workforce_identity_based_management_uri": { - "first_party_management_uri": "first_party_management_uri_value", - "third_party_management_uri": "third_party_management_uri_value", - }, - "state": 1, - "networks": [{"network": "network_value", "peering_mode": 1}], - "etag": "etag_value", - "oauth2_client_id": "oauth2_client_id_value", - "workforce_identity_based_oauth2_client_id": { - "first_party_oauth2_client_id": "first_party_oauth2_client_id_value", - "third_party_oauth2_client_id": "third_party_oauth2_client_id_value", - }, - "ba_proxy_uri": ["ba_proxy_uri_value1", "ba_proxy_uri_value2"], - "satisfies_pzs": {"value": True}, - "satisfies_pzi": True, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = backupdr.CreateManagementServerRequest.meta.fields["management_server"] + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + ], + next_page_token="abc", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[], + next_page_token="def", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + ], + next_page_token="ghi", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + ], + ), + ) + # Two responses for two calls + response = response + response - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] + # Wrap the values into proper Response objs + response = tuple( + backupplanassociation.ListBackupPlanAssociationsResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + sample_request = {"parent": "projects/sample1/locations/sample2"} - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields + pager = client.list_backup_plan_associations(request=sample_request) - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, backupplanassociation.BackupPlanAssociation) for i in results + ) - subfields_not_in_runtime = [] + pages = list(client.list_backup_plan_associations(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["management_server"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) +@pytest.mark.parametrize( + "request_type", + [ + backupplanassociation.DeleteBackupPlanAssociationRequest, + dict, + ], +) +def test_delete_backup_plan_association_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["management_server"][field])): - del request_init["management_server"][field][i][subfield] - else: - del request_init["management_server"][field][subfield] + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/backupPlanAssociations/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3738,13 +22559,13 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_management_server(request) + response = client.delete_backup_plan_association(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_create_management_server_rest_use_cached_wrapped_rpc(): +def test_delete_backup_plan_association_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -3759,7 +22580,7 @@ def test_create_management_server_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_management_server + client._transport.delete_backup_plan_association in client._transport._wrapped_methods ) @@ -3769,11 +22590,11 @@ def test_create_management_server_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_management_server + client._transport.delete_backup_plan_association ] = mock_rpc request = {} - client.create_management_server(request) + client.delete_backup_plan_association(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -3782,21 +22603,20 @@ def test_create_management_server_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.create_management_server(request) + client.delete_backup_plan_association(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_management_server_rest_required_fields( - request_type=backupdr.CreateManagementServerRequest, +def test_delete_backup_plan_association_rest_required_fields( + request_type=backupplanassociation.DeleteBackupPlanAssociationRequest, ): transport_class = transports.BackupDRRestTransport request_init = {} - request_init["parent"] = "" - request_init["management_server_id"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -3804,39 +22624,26 @@ def test_create_management_server_rest_required_fields( ) # verify fields with default values are dropped - assert "managementServerId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_management_server._get_unset_required_fields(jsonified_request) + ).delete_backup_plan_association._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "managementServerId" in jsonified_request - assert ( - jsonified_request["managementServerId"] == request_init["management_server_id"] - ) - jsonified_request["parent"] = "parent_value" - jsonified_request["managementServerId"] = "management_server_id_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_management_server._get_unset_required_fields(jsonified_request) + ).delete_backup_plan_association._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "management_server_id", - "request_id", - ) - ) + assert not set(unset_fields) - set(("request_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "managementServerId" in jsonified_request - assert jsonified_request["managementServerId"] == "management_server_id_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3857,10 +22664,9 @@ def test_create_management_server_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -3870,44 +22676,26 @@ def test_create_management_server_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_management_server(request) + response = client.delete_backup_plan_association(request) - expected_params = [ - ( - "managementServerId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_management_server_rest_unset_required_fields(): +def test_delete_backup_plan_association_rest_unset_required_fields(): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_management_server._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "managementServerId", - "requestId", - ) - ) - & set( - ( - "parent", - "managementServerId", - "managementServer", - ) - ) + unset_fields = transport.delete_backup_plan_association._get_unset_required_fields( + {} ) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_management_server_rest_interceptors(null_interceptor): +def test_delete_backup_plan_association_rest_interceptors(null_interceptor): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), @@ -3920,14 +22708,14 @@ def test_create_management_server_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.BackupDRRestInterceptor, "post_create_management_server" + transports.BackupDRRestInterceptor, "post_delete_backup_plan_association" ) as post, mock.patch.object( - transports.BackupDRRestInterceptor, "pre_create_management_server" + transports.BackupDRRestInterceptor, "pre_delete_backup_plan_association" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = backupdr.CreateManagementServerRequest.pb( - backupdr.CreateManagementServerRequest() + pb_message = backupplanassociation.DeleteBackupPlanAssociationRequest.pb( + backupplanassociation.DeleteBackupPlanAssociationRequest() ) transcode.return_value = { "method": "post", @@ -3943,7 +22731,7 @@ def test_create_management_server_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = backupdr.CreateManagementServerRequest() + request = backupplanassociation.DeleteBackupPlanAssociationRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -3951,7 +22739,7 @@ def test_create_management_server_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.create_management_server( + client.delete_backup_plan_association( request, metadata=[ ("key", "val"), @@ -3963,8 +22751,9 @@ def test_create_management_server_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_management_server_rest_bad_request( - transport: str = "rest", request_type=backupdr.CreateManagementServerRequest +def test_delete_backup_plan_association_rest_bad_request( + transport: str = "rest", + request_type=backupplanassociation.DeleteBackupPlanAssociationRequest, ): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3972,7 +22761,9 @@ def test_create_management_server_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "name": "projects/sample1/locations/sample2/backupPlanAssociations/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3984,10 +22775,10 @@ def test_create_management_server_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_management_server(request) + client.delete_backup_plan_association(request) -def test_create_management_server_rest_flattened(): +def test_delete_backup_plan_association_rest_flattened(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -3999,13 +22790,13 @@ def test_create_management_server_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "name": "projects/sample1/locations/sample2/backupPlanAssociations/sample3" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - management_server=backupdr.ManagementServer(name="name_value"), - management_server_id="management_server_id_value", + name="name_value", ) mock_args.update(sample_request) @@ -4016,20 +22807,20 @@ def test_create_management_server_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_management_server(**mock_args) + client.delete_backup_plan_association(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/managementServers" + "%s/v1/{name=projects/*/locations/*/backupPlanAssociations/*}" % client.transport._host, args[1], ) -def test_create_management_server_rest_flattened_error(transport: str = "rest"): +def test_delete_backup_plan_association_rest_flattened_error(transport: str = "rest"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4038,15 +22829,13 @@ def test_create_management_server_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_management_server( - backupdr.CreateManagementServerRequest(), - parent="parent_value", - management_server=backupdr.ManagementServer(name="name_value"), - management_server_id="management_server_id_value", + client.delete_backup_plan_association( + backupplanassociation.DeleteBackupPlanAssociationRequest(), + name="name_value", ) -def test_create_management_server_rest_error(): +def test_delete_backup_plan_association_rest_error(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -4055,11 +22844,11 @@ def test_create_management_server_rest_error(): @pytest.mark.parametrize( "request_type", [ - backupdr.DeleteManagementServerRequest, + backupplanassociation.TriggerBackupRequest, dict, ], ) -def test_delete_management_server_rest(request_type): +def test_trigger_backup_rest(request_type): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4067,7 +22856,7 @@ def test_delete_management_server_rest(request_type): # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/managementServers/sample3" + "name": "projects/sample1/locations/sample2/backupPlanAssociations/sample3" } request = request_type(**request_init) @@ -4083,13 +22872,13 @@ def test_delete_management_server_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_management_server(request) + response = client.trigger_backup(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_delete_management_server_rest_use_cached_wrapped_rpc(): +def test_trigger_backup_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -4103,22 +22892,17 @@ def test_delete_management_server_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.delete_management_server - in client._transport._wrapped_methods - ) + assert client._transport.trigger_backup in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_management_server - ] = mock_rpc + client._transport._wrapped_methods[client._transport.trigger_backup] = mock_rpc request = {} - client.delete_management_server(request) + client.trigger_backup(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -4127,20 +22911,21 @@ def test_delete_management_server_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.delete_management_server(request) + client.trigger_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_management_server_rest_required_fields( - request_type=backupdr.DeleteManagementServerRequest, +def test_trigger_backup_rest_required_fields( + request_type=backupplanassociation.TriggerBackupRequest, ): transport_class = transports.BackupDRRestTransport request_init = {} request_init["name"] = "" + request_init["rule_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -4151,23 +22936,24 @@ def test_delete_management_server_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_management_server._get_unset_required_fields(jsonified_request) + ).trigger_backup._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["name"] = "name_value" + jsonified_request["ruleId"] = "rule_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_management_server._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("request_id",)) + ).trigger_backup._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request assert jsonified_request["name"] == "name_value" + assert "ruleId" in jsonified_request + assert jsonified_request["ruleId"] == "rule_id_value" client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4188,9 +22974,10 @@ def test_delete_management_server_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -4200,24 +22987,32 @@ def test_delete_management_server_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_management_server(request) + response = client.trigger_backup(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_management_server_rest_unset_required_fields(): +def test_trigger_backup_rest_unset_required_fields(): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_management_server._get_unset_required_fields({}) - assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + unset_fields = transport.trigger_backup._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "ruleId", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_management_server_rest_interceptors(null_interceptor): +def test_trigger_backup_rest_interceptors(null_interceptor): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), @@ -4230,14 +23025,14 @@ def test_delete_management_server_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.BackupDRRestInterceptor, "post_delete_management_server" + transports.BackupDRRestInterceptor, "post_trigger_backup" ) as post, mock.patch.object( - transports.BackupDRRestInterceptor, "pre_delete_management_server" + transports.BackupDRRestInterceptor, "pre_trigger_backup" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = backupdr.DeleteManagementServerRequest.pb( - backupdr.DeleteManagementServerRequest() + pb_message = backupplanassociation.TriggerBackupRequest.pb( + backupplanassociation.TriggerBackupRequest() ) transcode.return_value = { "method": "post", @@ -4253,7 +23048,7 @@ def test_delete_management_server_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = backupdr.DeleteManagementServerRequest() + request = backupplanassociation.TriggerBackupRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -4261,7 +23056,7 @@ def test_delete_management_server_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.delete_management_server( + client.trigger_backup( request, metadata=[ ("key", "val"), @@ -4273,8 +23068,8 @@ def test_delete_management_server_rest_interceptors(null_interceptor): post.assert_called_once() -def test_delete_management_server_rest_bad_request( - transport: str = "rest", request_type=backupdr.DeleteManagementServerRequest +def test_trigger_backup_rest_bad_request( + transport: str = "rest", request_type=backupplanassociation.TriggerBackupRequest ): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4283,7 +23078,7 @@ def test_delete_management_server_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/managementServers/sample3" + "name": "projects/sample1/locations/sample2/backupPlanAssociations/sample3" } request = request_type(**request_init) @@ -4296,10 +23091,10 @@ def test_delete_management_server_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_management_server(request) + client.trigger_backup(request) -def test_delete_management_server_rest_flattened(): +def test_trigger_backup_rest_flattened(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4312,12 +23107,13 @@ def test_delete_management_server_rest_flattened(): # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/managementServers/sample3" + "name": "projects/sample1/locations/sample2/backupPlanAssociations/sample3" } # get truthy value for each flattened field mock_args = dict( name="name_value", + rule_id="rule_id_value", ) mock_args.update(sample_request) @@ -4328,20 +23124,20 @@ def test_delete_management_server_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_management_server(**mock_args) + client.trigger_backup(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/managementServers/*}" + "%s/v1/{name=projects/*/locations/*/backupPlanAssociations/*}:triggerBackup" % client.transport._host, args[1], ) -def test_delete_management_server_rest_flattened_error(transport: str = "rest"): +def test_trigger_backup_rest_flattened_error(transport: str = "rest"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4350,13 +23146,14 @@ def test_delete_management_server_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_management_server( - backupdr.DeleteManagementServerRequest(), + client.trigger_backup( + backupplanassociation.TriggerBackupRequest(), name="name_value", + rule_id="rule_id_value", ) -def test_delete_management_server_rest_error(): +def test_trigger_backup_rest_error(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -4505,6 +23302,29 @@ def test_backup_dr_base_transport(): "get_management_server", "create_management_server", "delete_management_server", + "create_backup_vault", + "list_backup_vaults", + "fetch_usable_backup_vaults", + "get_backup_vault", + "update_backup_vault", + "delete_backup_vault", + "list_data_sources", + "get_data_source", + "update_data_source", + "list_backups", + "get_backup", + "update_backup", + "delete_backup", + "restore_backup", + "create_backup_plan", + "get_backup_plan", + "list_backup_plans", + "delete_backup_plan", + "create_backup_plan_association", + "get_backup_plan_association", + "list_backup_plan_associations", + "delete_backup_plan_association", + "trigger_backup", "set_iam_policy", "get_iam_policy", "test_iam_permissions", @@ -4803,6 +23623,75 @@ def test_backup_dr_client_transport_session_collision(transport_name): session1 = client1.transport.delete_management_server._session session2 = client2.transport.delete_management_server._session assert session1 != session2 + session1 = client1.transport.create_backup_vault._session + session2 = client2.transport.create_backup_vault._session + assert session1 != session2 + session1 = client1.transport.list_backup_vaults._session + session2 = client2.transport.list_backup_vaults._session + assert session1 != session2 + session1 = client1.transport.fetch_usable_backup_vaults._session + session2 = client2.transport.fetch_usable_backup_vaults._session + assert session1 != session2 + session1 = client1.transport.get_backup_vault._session + session2 = client2.transport.get_backup_vault._session + assert session1 != session2 + session1 = client1.transport.update_backup_vault._session + session2 = client2.transport.update_backup_vault._session + assert session1 != session2 + session1 = client1.transport.delete_backup_vault._session + session2 = client2.transport.delete_backup_vault._session + assert session1 != session2 + session1 = client1.transport.list_data_sources._session + session2 = client2.transport.list_data_sources._session + assert session1 != session2 + session1 = client1.transport.get_data_source._session + session2 = client2.transport.get_data_source._session + assert session1 != session2 + session1 = client1.transport.update_data_source._session + session2 = client2.transport.update_data_source._session + assert session1 != session2 + session1 = client1.transport.list_backups._session + session2 = client2.transport.list_backups._session + assert session1 != session2 + session1 = client1.transport.get_backup._session + session2 = client2.transport.get_backup._session + assert session1 != session2 + session1 = client1.transport.update_backup._session + session2 = client2.transport.update_backup._session + assert session1 != session2 + session1 = client1.transport.delete_backup._session + session2 = client2.transport.delete_backup._session + assert session1 != session2 + session1 = client1.transport.restore_backup._session + session2 = client2.transport.restore_backup._session + assert session1 != session2 + session1 = client1.transport.create_backup_plan._session + session2 = client2.transport.create_backup_plan._session + assert session1 != session2 + session1 = client1.transport.get_backup_plan._session + session2 = client2.transport.get_backup_plan._session + assert session1 != session2 + session1 = client1.transport.list_backup_plans._session + session2 = client2.transport.list_backup_plans._session + assert session1 != session2 + session1 = client1.transport.delete_backup_plan._session + session2 = client2.transport.delete_backup_plan._session + assert session1 != session2 + session1 = client1.transport.create_backup_plan_association._session + session2 = client2.transport.create_backup_plan_association._session + assert session1 != session2 + session1 = client1.transport.get_backup_plan_association._session + session2 = client2.transport.get_backup_plan_association._session + assert session1 != session2 + session1 = client1.transport.list_backup_plan_associations._session + session2 = client2.transport.list_backup_plan_associations._session + assert session1 != session2 + session1 = client1.transport.delete_backup_plan_association._session + session2 = client2.transport.delete_backup_plan_association._session + assert session1 != session2 + session1 = client1.transport.trigger_backup._session + session2 = client2.transport.trigger_backup._session + assert session1 != session2 def test_backup_dr_grpc_transport_channel(): @@ -4957,6 +23846,153 @@ def test_backup_dr_grpc_lro_async_client(): assert transport.operations_client is transport.operations_client +def test_backup_path(): + project = "squid" + location = "clam" + backupvault = "whelk" + datasource = "octopus" + backup = "oyster" + expected = "projects/{project}/locations/{location}/backupVaults/{backupvault}/dataSources/{datasource}/backups/{backup}".format( + project=project, + location=location, + backupvault=backupvault, + datasource=datasource, + backup=backup, + ) + actual = BackupDRClient.backup_path( + project, location, backupvault, datasource, backup + ) + assert expected == actual + + +def test_parse_backup_path(): + expected = { + "project": "nudibranch", + "location": "cuttlefish", + "backupvault": "mussel", + "datasource": "winkle", + "backup": "nautilus", + } + path = BackupDRClient.backup_path(**expected) + + # Check that the path construction is reversible. + actual = BackupDRClient.parse_backup_path(path) + assert expected == actual + + +def test_backup_plan_path(): + project = "scallop" + location = "abalone" + backup_plan = "squid" + expected = ( + "projects/{project}/locations/{location}/backupPlans/{backup_plan}".format( + project=project, + location=location, + backup_plan=backup_plan, + ) + ) + actual = BackupDRClient.backup_plan_path(project, location, backup_plan) + assert expected == actual + + +def test_parse_backup_plan_path(): + expected = { + "project": "clam", + "location": "whelk", + "backup_plan": "octopus", + } + path = BackupDRClient.backup_plan_path(**expected) + + # Check that the path construction is reversible. + actual = BackupDRClient.parse_backup_plan_path(path) + assert expected == actual + + +def test_backup_plan_association_path(): + project = "oyster" + location = "nudibranch" + backup_plan_association = "cuttlefish" + expected = "projects/{project}/locations/{location}/backupPlanAssociations/{backup_plan_association}".format( + project=project, + location=location, + backup_plan_association=backup_plan_association, + ) + actual = BackupDRClient.backup_plan_association_path( + project, location, backup_plan_association + ) + assert expected == actual + + +def test_parse_backup_plan_association_path(): + expected = { + "project": "mussel", + "location": "winkle", + "backup_plan_association": "nautilus", + } + path = BackupDRClient.backup_plan_association_path(**expected) + + # Check that the path construction is reversible. + actual = BackupDRClient.parse_backup_plan_association_path(path) + assert expected == actual + + +def test_backup_vault_path(): + project = "scallop" + location = "abalone" + backupvault = "squid" + expected = ( + "projects/{project}/locations/{location}/backupVaults/{backupvault}".format( + project=project, + location=location, + backupvault=backupvault, + ) + ) + actual = BackupDRClient.backup_vault_path(project, location, backupvault) + assert expected == actual + + +def test_parse_backup_vault_path(): + expected = { + "project": "clam", + "location": "whelk", + "backupvault": "octopus", + } + path = BackupDRClient.backup_vault_path(**expected) + + # Check that the path construction is reversible. + actual = BackupDRClient.parse_backup_vault_path(path) + assert expected == actual + + +def test_data_source_path(): + project = "oyster" + location = "nudibranch" + backupvault = "cuttlefish" + datasource = "mussel" + expected = "projects/{project}/locations/{location}/backupVaults/{backupvault}/dataSources/{datasource}".format( + project=project, + location=location, + backupvault=backupvault, + datasource=datasource, + ) + actual = BackupDRClient.data_source_path(project, location, backupvault, datasource) + assert expected == actual + + +def test_parse_data_source_path(): + expected = { + "project": "winkle", + "location": "nautilus", + "backupvault": "scallop", + "datasource": "abalone", + } + path = BackupDRClient.data_source_path(**expected) + + # Check that the path construction is reversible. + actual = BackupDRClient.parse_data_source_path(path) + assert expected == actual + + def test_management_server_path(): project = "squid" location = "clam" From e4ac435aaa9508e33090091232ff35df860bfd37 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 8 Oct 2024 10:48:29 -0400 Subject: [PATCH 104/108] feat: [google-cloud-contact-center-insights] Add import / export IssueModel (#13132) - [ ] Regenerate this pull request now. BEGIN_COMMIT_OVERRIDE feat: Add CMEK InitializeLroSpec feat: Add metadata import to IngestConversations feat: Add sampling to IngestConversations docs: Add a comment for valid `order_by` values in ListConversations docs: Add a comment for valid `update_mask` values in UpdateConversation feat: Add import / export IssueModel END_COMMIT_OVERRIDE PiperOrigin-RevId: 683188578 Source-Link: https://github.com/googleapis/googleapis/commit/d0eeab38c8f11c090f05c332f2374b556ae36644 Source-Link: https://github.com/googleapis/googleapis-gen/commit/4d5b300a7249ce24278fbe77c16983a06d6e4a5d Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWNvbnRhY3QtY2VudGVyLWluc2lnaHRzLy5Pd2xCb3QueWFtbCIsImgiOiI0ZDViMzAwYTcyNDljZTI0Mjc4ZmJlNzdjMTY5ODNhMDZkNmU0YTVkIn0= --------- Co-authored-by: Owl Bot --- .../cloud/contact_center_insights/__init__.py | 24 + .../contact_center_insights_v1/__init__.py | 24 + .../gapic_metadata.json | 60 + .../contact_center_insights/async_client.py | 548 +- .../contact_center_insights/client.py | 552 +- .../transports/base.py | 56 + .../transports/grpc.py | 129 +- .../transports/grpc_asyncio.py | 152 +- .../transports/rest.py | 570 +- .../types/__init__.py | 24 + .../types/contact_center_insights.py | 342 +- .../types/resources.py | 175 +- ...enter_insights_export_issue_model_async.py | 60 + ...center_insights_export_issue_model_sync.py | 60 + ...nter_insights_get_encryption_spec_async.py | 52 + ...enter_insights_get_encryption_spec_sync.py | 52 + ...enter_insights_import_issue_model_async.py | 60 + ...center_insights_import_issue_model_sync.py | 60 + ...sights_initialize_encryption_spec_async.py | 59 + ...nsights_initialize_encryption_spec_sync.py | 59 + ...google.cloud.contactcenterinsights.v1.json | 966 +- ...xup_contact_center_insights_v1_keywords.py | 8 +- .../test_contact_center_insights.py | 14094 ++++++++++------ 23 files changed, 12380 insertions(+), 5806 deletions(-) create mode 100644 packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_export_issue_model_async.py create mode 100644 packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_export_issue_model_sync.py create mode 100644 packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_get_encryption_spec_async.py create mode 100644 packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_get_encryption_spec_sync.py create mode 100644 packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_import_issue_model_async.py create mode 100644 packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_import_issue_model_sync.py create mode 100644 packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_initialize_encryption_spec_async.py create mode 100644 packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_initialize_encryption_spec_sync.py diff --git a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights/__init__.py b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights/__init__.py index f19179e38c46..13964e6e628e 100644 --- a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights/__init__.py +++ b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights/__init__.py @@ -56,16 +56,26 @@ ExportInsightsDataMetadata, ExportInsightsDataRequest, ExportInsightsDataResponse, + ExportIssueModelMetadata, + ExportIssueModelRequest, + ExportIssueModelResponse, GetAnalysisRequest, GetConversationRequest, + GetEncryptionSpecRequest, GetIssueModelRequest, GetIssueRequest, GetPhraseMatcherRequest, GetSettingsRequest, GetViewRequest, + ImportIssueModelMetadata, + ImportIssueModelRequest, + ImportIssueModelResponse, IngestConversationsMetadata, IngestConversationsRequest, IngestConversationsResponse, + InitializeEncryptionSpecMetadata, + InitializeEncryptionSpecRequest, + InitializeEncryptionSpecResponse, ListAnalysesRequest, ListAnalysesResponse, ListConversationsRequest, @@ -101,11 +111,13 @@ Conversation, ConversationDataSource, ConversationLevelSentiment, + ConversationLevelSilence, ConversationParticipant, ConversationSummarizationSuggestionData, DialogflowIntent, DialogflowInteractionData, DialogflowSource, + EncryptionSpec, Entity, EntityMentionData, ExactMatchConfig, @@ -170,16 +182,26 @@ "ExportInsightsDataMetadata", "ExportInsightsDataRequest", "ExportInsightsDataResponse", + "ExportIssueModelMetadata", + "ExportIssueModelRequest", + "ExportIssueModelResponse", "GetAnalysisRequest", "GetConversationRequest", + "GetEncryptionSpecRequest", "GetIssueModelRequest", "GetIssueRequest", "GetPhraseMatcherRequest", "GetSettingsRequest", "GetViewRequest", + "ImportIssueModelMetadata", + "ImportIssueModelRequest", + "ImportIssueModelResponse", "IngestConversationsMetadata", "IngestConversationsRequest", "IngestConversationsResponse", + "InitializeEncryptionSpecMetadata", + "InitializeEncryptionSpecRequest", + "InitializeEncryptionSpecResponse", "ListAnalysesRequest", "ListAnalysesResponse", "ListConversationsRequest", @@ -214,11 +236,13 @@ "Conversation", "ConversationDataSource", "ConversationLevelSentiment", + "ConversationLevelSilence", "ConversationParticipant", "ConversationSummarizationSuggestionData", "DialogflowIntent", "DialogflowInteractionData", "DialogflowSource", + "EncryptionSpec", "Entity", "EntityMentionData", "ExactMatchConfig", diff --git a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/__init__.py b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/__init__.py index 5f169fc674d2..8548409a4056 100644 --- a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/__init__.py +++ b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/__init__.py @@ -54,16 +54,26 @@ ExportInsightsDataMetadata, ExportInsightsDataRequest, ExportInsightsDataResponse, + ExportIssueModelMetadata, + ExportIssueModelRequest, + ExportIssueModelResponse, GetAnalysisRequest, GetConversationRequest, + GetEncryptionSpecRequest, GetIssueModelRequest, GetIssueRequest, GetPhraseMatcherRequest, GetSettingsRequest, GetViewRequest, + ImportIssueModelMetadata, + ImportIssueModelRequest, + ImportIssueModelResponse, IngestConversationsMetadata, IngestConversationsRequest, IngestConversationsResponse, + InitializeEncryptionSpecMetadata, + InitializeEncryptionSpecRequest, + InitializeEncryptionSpecResponse, ListAnalysesRequest, ListAnalysesResponse, ListConversationsRequest, @@ -99,11 +109,13 @@ Conversation, ConversationDataSource, ConversationLevelSentiment, + ConversationLevelSilence, ConversationParticipant, ConversationSummarizationSuggestionData, DialogflowIntent, DialogflowInteractionData, DialogflowSource, + EncryptionSpec, Entity, EntityMentionData, ExactMatchConfig, @@ -158,6 +170,7 @@ "Conversation", "ConversationDataSource", "ConversationLevelSentiment", + "ConversationLevelSilence", "ConversationParticipant", "ConversationSummarizationSuggestionData", "ConversationView", @@ -181,25 +194,36 @@ "DialogflowIntent", "DialogflowInteractionData", "DialogflowSource", + "EncryptionSpec", "Entity", "EntityMentionData", "ExactMatchConfig", "ExportInsightsDataMetadata", "ExportInsightsDataRequest", "ExportInsightsDataResponse", + "ExportIssueModelMetadata", + "ExportIssueModelRequest", + "ExportIssueModelResponse", "FaqAnswerData", "GcsSource", "GetAnalysisRequest", "GetConversationRequest", + "GetEncryptionSpecRequest", "GetIssueModelRequest", "GetIssueRequest", "GetPhraseMatcherRequest", "GetSettingsRequest", "GetViewRequest", "HoldData", + "ImportIssueModelMetadata", + "ImportIssueModelRequest", + "ImportIssueModelResponse", "IngestConversationsMetadata", "IngestConversationsRequest", "IngestConversationsResponse", + "InitializeEncryptionSpecMetadata", + "InitializeEncryptionSpecRequest", + "InitializeEncryptionSpecResponse", "Intent", "IntentMatchData", "InterruptionData", diff --git a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/gapic_metadata.json b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/gapic_metadata.json index af5c8ce82e8f..0e9a96c732d4 100644 --- a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/gapic_metadata.json +++ b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/gapic_metadata.json @@ -95,6 +95,11 @@ "export_insights_data" ] }, + "ExportIssueModel": { + "methods": [ + "export_issue_model" + ] + }, "GetAnalysis": { "methods": [ "get_analysis" @@ -105,6 +110,11 @@ "get_conversation" ] }, + "GetEncryptionSpec": { + "methods": [ + "get_encryption_spec" + ] + }, "GetIssue": { "methods": [ "get_issue" @@ -130,11 +140,21 @@ "get_view" ] }, + "ImportIssueModel": { + "methods": [ + "import_issue_model" + ] + }, "IngestConversations": { "methods": [ "ingest_conversations" ] }, + "InitializeEncryptionSpec": { + "methods": [ + "initialize_encryption_spec" + ] + }, "ListAnalyses": { "methods": [ "list_analyses" @@ -295,6 +315,11 @@ "export_insights_data" ] }, + "ExportIssueModel": { + "methods": [ + "export_issue_model" + ] + }, "GetAnalysis": { "methods": [ "get_analysis" @@ -305,6 +330,11 @@ "get_conversation" ] }, + "GetEncryptionSpec": { + "methods": [ + "get_encryption_spec" + ] + }, "GetIssue": { "methods": [ "get_issue" @@ -330,11 +360,21 @@ "get_view" ] }, + "ImportIssueModel": { + "methods": [ + "import_issue_model" + ] + }, "IngestConversations": { "methods": [ "ingest_conversations" ] }, + "InitializeEncryptionSpec": { + "methods": [ + "initialize_encryption_spec" + ] + }, "ListAnalyses": { "methods": [ "list_analyses" @@ -495,6 +535,11 @@ "export_insights_data" ] }, + "ExportIssueModel": { + "methods": [ + "export_issue_model" + ] + }, "GetAnalysis": { "methods": [ "get_analysis" @@ -505,6 +550,11 @@ "get_conversation" ] }, + "GetEncryptionSpec": { + "methods": [ + "get_encryption_spec" + ] + }, "GetIssue": { "methods": [ "get_issue" @@ -530,11 +580,21 @@ "get_view" ] }, + "ImportIssueModel": { + "methods": [ + "import_issue_model" + ] + }, "IngestConversations": { "methods": [ "ingest_conversations" ] }, + "InitializeEncryptionSpec": { + "methods": [ + "initialize_encryption_spec" + ] + }, "ListAnalyses": { "methods": [ "list_analyses" diff --git a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/async_client.py b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/async_client.py index 3e0bb0884cb2..e792496b0905 100644 --- a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/async_client.py +++ b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/async_client.py @@ -89,6 +89,12 @@ class ContactCenterInsightsAsyncClient: parse_conversation_profile_path = staticmethod( ContactCenterInsightsClient.parse_conversation_profile_path ) + encryption_spec_path = staticmethod( + ContactCenterInsightsClient.encryption_spec_path + ) + parse_encryption_spec_path = staticmethod( + ContactCenterInsightsClient.parse_encryption_spec_path + ) issue_path = staticmethod(ContactCenterInsightsClient.issue_path) parse_issue_path = staticmethod(ContactCenterInsightsClient.parse_issue_path) issue_model_path = staticmethod(ContactCenterInsightsClient.issue_model_path) @@ -320,7 +326,9 @@ async def create_conversation( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> resources.Conversation: - r"""Creates a conversation. + r"""Creates a conversation. Note that this method does not support + audio transcription or redaction. Use ``conversations.upload`` + instead. .. code-block:: python @@ -448,9 +456,9 @@ async def upload_conversation( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: - r"""Create a longrunning conversation upload operation. - This method differs from CreateConversation by allowing - audio transcription and optional DLP redaction. + r"""Create a long-running conversation upload operation. This method + differs from ``CreateConversation`` by allowing audio + transcription and optional DLP redaction. .. code-block:: python @@ -590,7 +598,21 @@ async def sample_update_conversation(): on the ``request`` instance; if ``request`` is provided, this should not be set. update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - The list of fields to be updated. + The list of fields to be updated. All possible fields + can be updated by passing ``*``, or a subset of the + following updateable fields can be provided: + + - ``agent_id`` + - ``language_code`` + - ``labels`` + - ``metadata`` + - ``quality_metadata`` + - ``call_metadata`` + - ``start_time`` + - ``expire_time`` or ``ttl`` + - ``data_source.gcs_source.audio_uri`` or + ``data_source.dialogflow_source.audio_uri`` + This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -2801,6 +2823,256 @@ async def sample_undeploy_issue_model(): # Done; return the response. return response + async def export_issue_model( + self, + request: Optional[ + Union[contact_center_insights.ExportIssueModelRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Exports an issue model to the provided destination. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import contact_center_insights_v1 + + async def sample_export_issue_model(): + # Create a client + client = contact_center_insights_v1.ContactCenterInsightsAsyncClient() + + # Initialize request argument(s) + gcs_destination = contact_center_insights_v1.GcsDestination() + gcs_destination.object_uri = "object_uri_value" + + request = contact_center_insights_v1.ExportIssueModelRequest( + gcs_destination=gcs_destination, + name="name_value", + ) + + # Make the request + operation = client.export_issue_model(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.contact_center_insights_v1.types.ExportIssueModelRequest, dict]]): + The request object. Request to export an issue model. + name (:class:`str`): + Required. The issue model to export. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.contact_center_insights_v1.types.ExportIssueModelResponse` + Response from export issue model + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, contact_center_insights.ExportIssueModelRequest): + request = contact_center_insights.ExportIssueModelRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.export_issue_model + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + contact_center_insights.ExportIssueModelResponse, + metadata_type=contact_center_insights.ExportIssueModelMetadata, + ) + + # Done; return the response. + return response + + async def import_issue_model( + self, + request: Optional[ + Union[contact_center_insights.ImportIssueModelRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Imports an issue model from a Cloud Storage bucket. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import contact_center_insights_v1 + + async def sample_import_issue_model(): + # Create a client + client = contact_center_insights_v1.ContactCenterInsightsAsyncClient() + + # Initialize request argument(s) + gcs_source = contact_center_insights_v1.GcsSource() + gcs_source.object_uri = "object_uri_value" + + request = contact_center_insights_v1.ImportIssueModelRequest( + gcs_source=gcs_source, + parent="parent_value", + ) + + # Make the request + operation = client.import_issue_model(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.contact_center_insights_v1.types.ImportIssueModelRequest, dict]]): + The request object. Request to import an issue model. + parent (:class:`str`): + Required. The parent resource of the + issue model. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.contact_center_insights_v1.types.ImportIssueModelResponse` + Response from import issue model + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, contact_center_insights.ImportIssueModelRequest): + request = contact_center_insights.ImportIssueModelRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.import_issue_model + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + contact_center_insights.ImportIssueModelResponse, + metadata_type=contact_center_insights.ImportIssueModelMetadata, + ) + + # Done; return the response. + return response + async def get_issue( self, request: Optional[Union[contact_center_insights.GetIssueRequest, dict]] = None, @@ -4061,7 +4333,13 @@ async def sample_get_settings(): Returns: google.cloud.contact_center_insights_v1.types.Settings: - The settings resource. + The CCAI Insights project wide settings. + Use these settings to configure the behavior of + Insights. View these settings with + [getsettings](https://cloud.google.com/contact-center/insights/docs/reference/rest/v1/projects.locations/getSettings) + and change the settings with + [updateSettings](https://cloud.google.com/contact-center/insights/docs/reference/rest/v1/projects.locations/updateSettings). + """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -4172,7 +4450,13 @@ async def sample_update_settings(): Returns: google.cloud.contact_center_insights_v1.types.Settings: - The settings resource. + The CCAI Insights project wide settings. + Use these settings to configure the behavior of + Insights. View these settings with + [getsettings](https://cloud.google.com/contact-center/insights/docs/reference/rest/v1/projects.locations/getSettings) + and change the settings with + [updateSettings](https://cloud.google.com/contact-center/insights/docs/reference/rest/v1/projects.locations/updateSettings). + """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -4224,6 +4508,256 @@ async def sample_update_settings(): # Done; return the response. return response + async def get_encryption_spec( + self, + request: Optional[ + Union[contact_center_insights.GetEncryptionSpecRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.EncryptionSpec: + r"""Gets location-level encryption key specification. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import contact_center_insights_v1 + + async def sample_get_encryption_spec(): + # Create a client + client = contact_center_insights_v1.ContactCenterInsightsAsyncClient() + + # Initialize request argument(s) + request = contact_center_insights_v1.GetEncryptionSpecRequest( + name="name_value", + ) + + # Make the request + response = await client.get_encryption_spec(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.contact_center_insights_v1.types.GetEncryptionSpecRequest, dict]]): + The request object. The request to get location-level + encryption specification. + name (:class:`str`): + Required. The name of the encryption + spec resource to get. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.contact_center_insights_v1.types.EncryptionSpec: + A customer-managed encryption key + specification that can be applied to all + created resources (e.g. Conversation). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, contact_center_insights.GetEncryptionSpecRequest): + request = contact_center_insights.GetEncryptionSpecRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_encryption_spec + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def initialize_encryption_spec( + self, + request: Optional[ + Union[contact_center_insights.InitializeEncryptionSpecRequest, dict] + ] = None, + *, + encryption_spec: Optional[resources.EncryptionSpec] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Initializes a location-level encryption key + specification. An error will be thrown if the location + has resources already created before the initialization. + Once the encryption specification is initialized at a + location, it is immutable and all newly created + resources under the location will be encrypted with the + existing specification. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import contact_center_insights_v1 + + async def sample_initialize_encryption_spec(): + # Create a client + client = contact_center_insights_v1.ContactCenterInsightsAsyncClient() + + # Initialize request argument(s) + encryption_spec = contact_center_insights_v1.EncryptionSpec() + encryption_spec.kms_key = "kms_key_value" + + request = contact_center_insights_v1.InitializeEncryptionSpecRequest( + encryption_spec=encryption_spec, + ) + + # Make the request + operation = client.initialize_encryption_spec(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.contact_center_insights_v1.types.InitializeEncryptionSpecRequest, dict]]): + The request object. The request to initialize a + location-level encryption specification. + encryption_spec (:class:`google.cloud.contact_center_insights_v1.types.EncryptionSpec`): + Required. The encryption spec used for CMEK encryption. + It is required that the kms key is in the same region as + the endpoint. The same key will be used for all + provisioned resources, if encryption is available. If + the kms_key_name is left empty, no encryption will be + enforced. + + This corresponds to the ``encryption_spec`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.contact_center_insights_v1.types.InitializeEncryptionSpecResponse` + The response to initialize a location-level encryption + specification. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([encryption_spec]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, contact_center_insights.InitializeEncryptionSpecRequest + ): + request = contact_center_insights.InitializeEncryptionSpecRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if encryption_spec is not None: + request.encryption_spec = encryption_spec + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.initialize_encryption_spec + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("encryption_spec.name", request.encryption_spec.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + contact_center_insights.InitializeEncryptionSpecResponse, + metadata_type=contact_center_insights.InitializeEncryptionSpecMetadata, + ) + + # Done; return the response. + return response + async def create_view( self, request: Optional[ diff --git a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/client.py b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/client.py index 5a264ad26d88..a81a8b69ce6f 100644 --- a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/client.py +++ b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/client.py @@ -266,6 +266,26 @@ def parse_conversation_profile_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def encryption_spec_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified encryption_spec string.""" + return "projects/{project}/locations/{location}/encryptionSpec".format( + project=project, + location=location, + ) + + @staticmethod + def parse_encryption_spec_path(path: str) -> Dict[str, str]: + """Parses a encryption_spec path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/encryptionSpec$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def issue_path( project: str, @@ -915,7 +935,9 @@ def create_conversation( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> resources.Conversation: - r"""Creates a conversation. + r"""Creates a conversation. Note that this method does not support + audio transcription or redaction. Use ``conversations.upload`` + instead. .. code-block:: python @@ -1040,9 +1062,9 @@ def upload_conversation( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: - r"""Create a longrunning conversation upload operation. - This method differs from CreateConversation by allowing - audio transcription and optional DLP redaction. + r"""Create a long-running conversation upload operation. This method + differs from ``CreateConversation`` by allowing audio + transcription and optional DLP redaction. .. code-block:: python @@ -1180,7 +1202,21 @@ def sample_update_conversation(): on the ``request`` instance; if ``request`` is provided, this should not be set. update_mask (google.protobuf.field_mask_pb2.FieldMask): - The list of fields to be updated. + The list of fields to be updated. All possible fields + can be updated by passing ``*``, or a subset of the + following updateable fields can be provided: + + - ``agent_id`` + - ``language_code`` + - ``labels`` + - ``metadata`` + - ``quality_metadata`` + - ``call_metadata`` + - ``start_time`` + - ``expire_time`` or ``ttl`` + - ``data_source.gcs_source.audio_uri`` or + ``data_source.dialogflow_source.audio_uri`` + This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -3338,6 +3374,250 @@ def sample_undeploy_issue_model(): # Done; return the response. return response + def export_issue_model( + self, + request: Optional[ + Union[contact_center_insights.ExportIssueModelRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Exports an issue model to the provided destination. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import contact_center_insights_v1 + + def sample_export_issue_model(): + # Create a client + client = contact_center_insights_v1.ContactCenterInsightsClient() + + # Initialize request argument(s) + gcs_destination = contact_center_insights_v1.GcsDestination() + gcs_destination.object_uri = "object_uri_value" + + request = contact_center_insights_v1.ExportIssueModelRequest( + gcs_destination=gcs_destination, + name="name_value", + ) + + # Make the request + operation = client.export_issue_model(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.contact_center_insights_v1.types.ExportIssueModelRequest, dict]): + The request object. Request to export an issue model. + name (str): + Required. The issue model to export. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.contact_center_insights_v1.types.ExportIssueModelResponse` + Response from export issue model + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, contact_center_insights.ExportIssueModelRequest): + request = contact_center_insights.ExportIssueModelRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.export_issue_model] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + contact_center_insights.ExportIssueModelResponse, + metadata_type=contact_center_insights.ExportIssueModelMetadata, + ) + + # Done; return the response. + return response + + def import_issue_model( + self, + request: Optional[ + Union[contact_center_insights.ImportIssueModelRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Imports an issue model from a Cloud Storage bucket. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import contact_center_insights_v1 + + def sample_import_issue_model(): + # Create a client + client = contact_center_insights_v1.ContactCenterInsightsClient() + + # Initialize request argument(s) + gcs_source = contact_center_insights_v1.GcsSource() + gcs_source.object_uri = "object_uri_value" + + request = contact_center_insights_v1.ImportIssueModelRequest( + gcs_source=gcs_source, + parent="parent_value", + ) + + # Make the request + operation = client.import_issue_model(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.contact_center_insights_v1.types.ImportIssueModelRequest, dict]): + The request object. Request to import an issue model. + parent (str): + Required. The parent resource of the + issue model. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.contact_center_insights_v1.types.ImportIssueModelResponse` + Response from import issue model + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, contact_center_insights.ImportIssueModelRequest): + request = contact_center_insights.ImportIssueModelRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.import_issue_model] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + contact_center_insights.ImportIssueModelResponse, + metadata_type=contact_center_insights.ImportIssueModelMetadata, + ) + + # Done; return the response. + return response + def get_issue( self, request: Optional[Union[contact_center_insights.GetIssueRequest, dict]] = None, @@ -4567,7 +4847,13 @@ def sample_get_settings(): Returns: google.cloud.contact_center_insights_v1.types.Settings: - The settings resource. + The CCAI Insights project wide settings. + Use these settings to configure the behavior of + Insights. View these settings with + [getsettings](https://cloud.google.com/contact-center/insights/docs/reference/rest/v1/projects.locations/getSettings) + and change the settings with + [updateSettings](https://cloud.google.com/contact-center/insights/docs/reference/rest/v1/projects.locations/updateSettings). + """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -4675,7 +4961,13 @@ def sample_update_settings(): Returns: google.cloud.contact_center_insights_v1.types.Settings: - The settings resource. + The CCAI Insights project wide settings. + Use these settings to configure the behavior of + Insights. View these settings with + [getsettings](https://cloud.google.com/contact-center/insights/docs/reference/rest/v1/projects.locations/getSettings) + and change the settings with + [updateSettings](https://cloud.google.com/contact-center/insights/docs/reference/rest/v1/projects.locations/updateSettings). + """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -4724,6 +5016,252 @@ def sample_update_settings(): # Done; return the response. return response + def get_encryption_spec( + self, + request: Optional[ + Union[contact_center_insights.GetEncryptionSpecRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.EncryptionSpec: + r"""Gets location-level encryption key specification. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import contact_center_insights_v1 + + def sample_get_encryption_spec(): + # Create a client + client = contact_center_insights_v1.ContactCenterInsightsClient() + + # Initialize request argument(s) + request = contact_center_insights_v1.GetEncryptionSpecRequest( + name="name_value", + ) + + # Make the request + response = client.get_encryption_spec(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.contact_center_insights_v1.types.GetEncryptionSpecRequest, dict]): + The request object. The request to get location-level + encryption specification. + name (str): + Required. The name of the encryption + spec resource to get. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.contact_center_insights_v1.types.EncryptionSpec: + A customer-managed encryption key + specification that can be applied to all + created resources (e.g. Conversation). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, contact_center_insights.GetEncryptionSpecRequest): + request = contact_center_insights.GetEncryptionSpecRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_encryption_spec] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def initialize_encryption_spec( + self, + request: Optional[ + Union[contact_center_insights.InitializeEncryptionSpecRequest, dict] + ] = None, + *, + encryption_spec: Optional[resources.EncryptionSpec] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Initializes a location-level encryption key + specification. An error will be thrown if the location + has resources already created before the initialization. + Once the encryption specification is initialized at a + location, it is immutable and all newly created + resources under the location will be encrypted with the + existing specification. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import contact_center_insights_v1 + + def sample_initialize_encryption_spec(): + # Create a client + client = contact_center_insights_v1.ContactCenterInsightsClient() + + # Initialize request argument(s) + encryption_spec = contact_center_insights_v1.EncryptionSpec() + encryption_spec.kms_key = "kms_key_value" + + request = contact_center_insights_v1.InitializeEncryptionSpecRequest( + encryption_spec=encryption_spec, + ) + + # Make the request + operation = client.initialize_encryption_spec(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.contact_center_insights_v1.types.InitializeEncryptionSpecRequest, dict]): + The request object. The request to initialize a + location-level encryption specification. + encryption_spec (google.cloud.contact_center_insights_v1.types.EncryptionSpec): + Required. The encryption spec used for CMEK encryption. + It is required that the kms key is in the same region as + the endpoint. The same key will be used for all + provisioned resources, if encryption is available. If + the kms_key_name is left empty, no encryption will be + enforced. + + This corresponds to the ``encryption_spec`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.contact_center_insights_v1.types.InitializeEncryptionSpecResponse` + The response to initialize a location-level encryption + specification. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([encryption_spec]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, contact_center_insights.InitializeEncryptionSpecRequest + ): + request = contact_center_insights.InitializeEncryptionSpecRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if encryption_spec is not None: + request.encryption_spec = encryption_spec + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.initialize_encryption_spec + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("encryption_spec.name", request.encryption_spec.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + contact_center_insights.InitializeEncryptionSpecResponse, + metadata_type=contact_center_insights.InitializeEncryptionSpecMetadata, + ) + + # Done; return the response. + return response + def create_view( self, request: Optional[ diff --git a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/transports/base.py b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/transports/base.py index 36ab3e540e1a..1ccd673466bf 100644 --- a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/transports/base.py +++ b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/transports/base.py @@ -238,6 +238,16 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.export_issue_model: gapic_v1.method.wrap_method( + self.export_issue_model, + default_timeout=None, + client_info=client_info, + ), + self.import_issue_model: gapic_v1.method.wrap_method( + self.import_issue_model, + default_timeout=None, + client_info=client_info, + ), self.get_issue: gapic_v1.method.wrap_method( self.get_issue, default_timeout=None, @@ -303,6 +313,16 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.get_encryption_spec: gapic_v1.method.wrap_method( + self.get_encryption_spec, + default_timeout=None, + client_info=client_info, + ), + self.initialize_encryption_spec: gapic_v1.method.wrap_method( + self.initialize_encryption_spec, + default_timeout=None, + client_info=client_info, + ), self.create_view: gapic_v1.method.wrap_method( self.create_view, default_timeout=None, @@ -542,6 +562,24 @@ def undeploy_issue_model( ]: raise NotImplementedError() + @property + def export_issue_model( + self, + ) -> Callable[ + [contact_center_insights.ExportIssueModelRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def import_issue_model( + self, + ) -> Callable[ + [contact_center_insights.ImportIssueModelRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def get_issue( self, @@ -671,6 +709,24 @@ def update_settings( ]: raise NotImplementedError() + @property + def get_encryption_spec( + self, + ) -> Callable[ + [contact_center_insights.GetEncryptionSpecRequest], + Union[resources.EncryptionSpec, Awaitable[resources.EncryptionSpec]], + ]: + raise NotImplementedError() + + @property + def initialize_encryption_spec( + self, + ) -> Callable[ + [contact_center_insights.InitializeEncryptionSpecRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def create_view( self, diff --git a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/transports/grpc.py b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/transports/grpc.py index 9028aa2b9559..adb2ad6f700c 100644 --- a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/transports/grpc.py +++ b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/transports/grpc.py @@ -263,7 +263,9 @@ def create_conversation( ]: r"""Return a callable for the create conversation method over gRPC. - Creates a conversation. + Creates a conversation. Note that this method does not support + audio transcription or redaction. Use ``conversations.upload`` + instead. Returns: Callable[[~.CreateConversationRequest], @@ -291,9 +293,9 @@ def upload_conversation( ]: r"""Return a callable for the upload conversation method over gRPC. - Create a longrunning conversation upload operation. - This method differs from CreateConversation by allowing - audio transcription and optional DLP redaction. + Create a long-running conversation upload operation. This method + differs from ``CreateConversation`` by allowing audio + transcription and optional DLP redaction. Returns: Callable[[~.UploadConversationRequest], @@ -849,6 +851,62 @@ def undeploy_issue_model( ) return self._stubs["undeploy_issue_model"] + @property + def export_issue_model( + self, + ) -> Callable[ + [contact_center_insights.ExportIssueModelRequest], operations_pb2.Operation + ]: + r"""Return a callable for the export issue model method over gRPC. + + Exports an issue model to the provided destination. + + Returns: + Callable[[~.ExportIssueModelRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "export_issue_model" not in self._stubs: + self._stubs["export_issue_model"] = self.grpc_channel.unary_unary( + "/google.cloud.contactcenterinsights.v1.ContactCenterInsights/ExportIssueModel", + request_serializer=contact_center_insights.ExportIssueModelRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["export_issue_model"] + + @property + def import_issue_model( + self, + ) -> Callable[ + [contact_center_insights.ImportIssueModelRequest], operations_pb2.Operation + ]: + r"""Return a callable for the import issue model method over gRPC. + + Imports an issue model from a Cloud Storage bucket. + + Returns: + Callable[[~.ImportIssueModelRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "import_issue_model" not in self._stubs: + self._stubs["import_issue_model"] = self.grpc_channel.unary_unary( + "/google.cloud.contactcenterinsights.v1.ContactCenterInsights/ImportIssueModel", + request_serializer=contact_center_insights.ImportIssueModelRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["import_issue_model"] + @property def get_issue( self, @@ -1207,6 +1265,69 @@ def update_settings( ) return self._stubs["update_settings"] + @property + def get_encryption_spec( + self, + ) -> Callable[ + [contact_center_insights.GetEncryptionSpecRequest], resources.EncryptionSpec + ]: + r"""Return a callable for the get encryption spec method over gRPC. + + Gets location-level encryption key specification. + + Returns: + Callable[[~.GetEncryptionSpecRequest], + ~.EncryptionSpec]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_encryption_spec" not in self._stubs: + self._stubs["get_encryption_spec"] = self.grpc_channel.unary_unary( + "/google.cloud.contactcenterinsights.v1.ContactCenterInsights/GetEncryptionSpec", + request_serializer=contact_center_insights.GetEncryptionSpecRequest.serialize, + response_deserializer=resources.EncryptionSpec.deserialize, + ) + return self._stubs["get_encryption_spec"] + + @property + def initialize_encryption_spec( + self, + ) -> Callable[ + [contact_center_insights.InitializeEncryptionSpecRequest], + operations_pb2.Operation, + ]: + r"""Return a callable for the initialize encryption spec method over gRPC. + + Initializes a location-level encryption key + specification. An error will be thrown if the location + has resources already created before the initialization. + Once the encryption specification is initialized at a + location, it is immutable and all newly created + resources under the location will be encrypted with the + existing specification. + + Returns: + Callable[[~.InitializeEncryptionSpecRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "initialize_encryption_spec" not in self._stubs: + self._stubs["initialize_encryption_spec"] = self.grpc_channel.unary_unary( + "/google.cloud.contactcenterinsights.v1.ContactCenterInsights/InitializeEncryptionSpec", + request_serializer=contact_center_insights.InitializeEncryptionSpecRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["initialize_encryption_spec"] + @property def create_view( self, diff --git a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/transports/grpc_asyncio.py b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/transports/grpc_asyncio.py index f04fd8885c78..abfe06f88262 100644 --- a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/transports/grpc_asyncio.py +++ b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/transports/grpc_asyncio.py @@ -270,7 +270,9 @@ def create_conversation( ]: r"""Return a callable for the create conversation method over gRPC. - Creates a conversation. + Creates a conversation. Note that this method does not support + audio transcription or redaction. Use ``conversations.upload`` + instead. Returns: Callable[[~.CreateConversationRequest], @@ -299,9 +301,9 @@ def upload_conversation( ]: r"""Return a callable for the upload conversation method over gRPC. - Create a longrunning conversation upload operation. - This method differs from CreateConversation by allowing - audio transcription and optional DLP redaction. + Create a long-running conversation upload operation. This method + differs from ``CreateConversation`` by allowing audio + transcription and optional DLP redaction. Returns: Callable[[~.UploadConversationRequest], @@ -875,6 +877,64 @@ def undeploy_issue_model( ) return self._stubs["undeploy_issue_model"] + @property + def export_issue_model( + self, + ) -> Callable[ + [contact_center_insights.ExportIssueModelRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the export issue model method over gRPC. + + Exports an issue model to the provided destination. + + Returns: + Callable[[~.ExportIssueModelRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "export_issue_model" not in self._stubs: + self._stubs["export_issue_model"] = self.grpc_channel.unary_unary( + "/google.cloud.contactcenterinsights.v1.ContactCenterInsights/ExportIssueModel", + request_serializer=contact_center_insights.ExportIssueModelRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["export_issue_model"] + + @property + def import_issue_model( + self, + ) -> Callable[ + [contact_center_insights.ImportIssueModelRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the import issue model method over gRPC. + + Imports an issue model from a Cloud Storage bucket. + + Returns: + Callable[[~.ImportIssueModelRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "import_issue_model" not in self._stubs: + self._stubs["import_issue_model"] = self.grpc_channel.unary_unary( + "/google.cloud.contactcenterinsights.v1.ContactCenterInsights/ImportIssueModel", + request_serializer=contact_center_insights.ImportIssueModelRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["import_issue_model"] + @property def get_issue( self, @@ -1246,6 +1306,70 @@ def update_settings( ) return self._stubs["update_settings"] + @property + def get_encryption_spec( + self, + ) -> Callable[ + [contact_center_insights.GetEncryptionSpecRequest], + Awaitable[resources.EncryptionSpec], + ]: + r"""Return a callable for the get encryption spec method over gRPC. + + Gets location-level encryption key specification. + + Returns: + Callable[[~.GetEncryptionSpecRequest], + Awaitable[~.EncryptionSpec]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_encryption_spec" not in self._stubs: + self._stubs["get_encryption_spec"] = self.grpc_channel.unary_unary( + "/google.cloud.contactcenterinsights.v1.ContactCenterInsights/GetEncryptionSpec", + request_serializer=contact_center_insights.GetEncryptionSpecRequest.serialize, + response_deserializer=resources.EncryptionSpec.deserialize, + ) + return self._stubs["get_encryption_spec"] + + @property + def initialize_encryption_spec( + self, + ) -> Callable[ + [contact_center_insights.InitializeEncryptionSpecRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the initialize encryption spec method over gRPC. + + Initializes a location-level encryption key + specification. An error will be thrown if the location + has resources already created before the initialization. + Once the encryption specification is initialized at a + location, it is immutable and all newly created + resources under the location will be encrypted with the + existing specification. + + Returns: + Callable[[~.InitializeEncryptionSpecRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "initialize_encryption_spec" not in self._stubs: + self._stubs["initialize_encryption_spec"] = self.grpc_channel.unary_unary( + "/google.cloud.contactcenterinsights.v1.ContactCenterInsights/InitializeEncryptionSpec", + request_serializer=contact_center_insights.InitializeEncryptionSpecRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["initialize_encryption_spec"] + @property def create_view( self, @@ -1493,6 +1617,16 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.export_issue_model: gapic_v1.method_async.wrap_method( + self.export_issue_model, + default_timeout=None, + client_info=client_info, + ), + self.import_issue_model: gapic_v1.method_async.wrap_method( + self.import_issue_model, + default_timeout=None, + client_info=client_info, + ), self.get_issue: gapic_v1.method_async.wrap_method( self.get_issue, default_timeout=None, @@ -1558,6 +1692,16 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.get_encryption_spec: gapic_v1.method_async.wrap_method( + self.get_encryption_spec, + default_timeout=None, + client_info=client_info, + ), + self.initialize_encryption_spec: gapic_v1.method_async.wrap_method( + self.initialize_encryption_spec, + default_timeout=None, + client_info=client_info, + ), self.create_view: gapic_v1.method_async.wrap_method( self.create_view, default_timeout=None, diff --git a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/transports/rest.py b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/transports/rest.py index ab1260ec3b6c..4c44d1bd3e8a 100644 --- a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/transports/rest.py +++ b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/transports/rest.py @@ -191,6 +191,14 @@ def post_export_insights_data(self, response): logging.log(f"Received response: {response}") return response + def pre_export_issue_model(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_export_issue_model(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_analysis(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -207,6 +215,14 @@ def post_get_conversation(self, response): logging.log(f"Received response: {response}") return response + def pre_get_encryption_spec(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_encryption_spec(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_issue(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -247,6 +263,14 @@ def post_get_view(self, response): logging.log(f"Received response: {response}") return response + def pre_import_issue_model(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_import_issue_model(self, response): + logging.log(f"Received response: {response}") + return response + def pre_ingest_conversations(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -255,6 +279,14 @@ def post_ingest_conversations(self, response): logging.log(f"Received response: {response}") return response + def pre_initialize_encryption_spec(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_initialize_encryption_spec(self, response): + logging.log(f"Received response: {response}") + return response + def pre_list_analyses(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -738,6 +770,31 @@ def post_export_insights_data( """ return response + def pre_export_issue_model( + self, + request: contact_center_insights.ExportIssueModelRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + contact_center_insights.ExportIssueModelRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for export_issue_model + + Override in a subclass to manipulate the request or metadata + before they are sent to the ContactCenterInsights server. + """ + return request, metadata + + def post_export_issue_model( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for export_issue_model + + Override in a subclass to manipulate the response + after it is returned by the ContactCenterInsights server but before + it is returned to user code. + """ + return response + def pre_get_analysis( self, request: contact_center_insights.GetAnalysisRequest, @@ -784,6 +841,31 @@ def post_get_conversation( """ return response + def pre_get_encryption_spec( + self, + request: contact_center_insights.GetEncryptionSpecRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + contact_center_insights.GetEncryptionSpecRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for get_encryption_spec + + Override in a subclass to manipulate the request or metadata + before they are sent to the ContactCenterInsights server. + """ + return request, metadata + + def post_get_encryption_spec( + self, response: resources.EncryptionSpec + ) -> resources.EncryptionSpec: + """Post-rpc interceptor for get_encryption_spec + + Override in a subclass to manipulate the response + after it is returned by the ContactCenterInsights server but before + it is returned to user code. + """ + return response + def pre_get_issue( self, request: contact_center_insights.GetIssueRequest, @@ -895,6 +977,31 @@ def post_get_view(self, response: resources.View) -> resources.View: """ return response + def pre_import_issue_model( + self, + request: contact_center_insights.ImportIssueModelRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + contact_center_insights.ImportIssueModelRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for import_issue_model + + Override in a subclass to manipulate the request or metadata + before they are sent to the ContactCenterInsights server. + """ + return request, metadata + + def post_import_issue_model( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for import_issue_model + + Override in a subclass to manipulate the response + after it is returned by the ContactCenterInsights server but before + it is returned to user code. + """ + return response + def pre_ingest_conversations( self, request: contact_center_insights.IngestConversationsRequest, @@ -920,6 +1027,32 @@ def post_ingest_conversations( """ return response + def pre_initialize_encryption_spec( + self, + request: contact_center_insights.InitializeEncryptionSpecRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + contact_center_insights.InitializeEncryptionSpecRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for initialize_encryption_spec + + Override in a subclass to manipulate the request or metadata + before they are sent to the ContactCenterInsights server. + """ + return request, metadata + + def post_initialize_encryption_spec( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for initialize_encryption_spec + + Override in a subclass to manipulate the response + after it is returned by the ContactCenterInsights server but before + it is returned to user code. + """ + return response + def pre_list_analyses( self, request: contact_center_insights.ListAnalysesRequest, @@ -2960,6 +3093,101 @@ def __call__( resp = self._interceptor.post_export_insights_data(resp) return resp + class _ExportIssueModel(ContactCenterInsightsRestStub): + def __hash__(self): + return hash("ExportIssueModel") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: contact_center_insights.ExportIssueModelRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the export issue model method over HTTP. + + Args: + request (~.contact_center_insights.ExportIssueModelRequest): + The request object. Request to export an issue model. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/issueModels/*}:export", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_export_issue_model( + request, metadata + ) + pb_request = contact_center_insights.ExportIssueModelRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_export_issue_model(resp) + return resp + class _GetAnalysis(ContactCenterInsightsRestStub): def __hash__(self): return hash("GetAnalysis") @@ -3132,6 +3360,97 @@ def __call__( resp = self._interceptor.post_get_conversation(resp) return resp + class _GetEncryptionSpec(ContactCenterInsightsRestStub): + def __hash__(self): + return hash("GetEncryptionSpec") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: contact_center_insights.GetEncryptionSpecRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.EncryptionSpec: + r"""Call the get encryption spec method over HTTP. + + Args: + request (~.contact_center_insights.GetEncryptionSpecRequest): + The request object. The request to get location-level + encryption specification. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.resources.EncryptionSpec: + A customer-managed encryption key + specification that can be applied to all + created resources (e.g. Conversation). + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/encryptionSpec}", + }, + ] + request, metadata = self._interceptor.pre_get_encryption_spec( + request, metadata + ) + pb_request = contact_center_insights.GetEncryptionSpecRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resources.EncryptionSpec() + pb_resp = resources.EncryptionSpec.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_encryption_spec(resp) + return resp + class _GetIssue(ContactCenterInsightsRestStub): def __hash__(self): return hash("GetIssue") @@ -3426,7 +3745,13 @@ def __call__( Returns: ~.resources.Settings: - The settings resource. + The CCAI Insights project wide settings. Use these + settings to configure the behavior of Insights. View + these settings with + ```getsettings`` `__ + and change the settings with + ```updateSettings`` `__. + """ http_options: List[Dict[str, str]] = [ @@ -3561,6 +3886,101 @@ def __call__( resp = self._interceptor.post_get_view(resp) return resp + class _ImportIssueModel(ContactCenterInsightsRestStub): + def __hash__(self): + return hash("ImportIssueModel") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: contact_center_insights.ImportIssueModelRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the import issue model method over HTTP. + + Args: + request (~.contact_center_insights.ImportIssueModelRequest): + The request object. Request to import an issue model. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/issueModels:import", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_import_issue_model( + request, metadata + ) + pb_request = contact_center_insights.ImportIssueModelRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_import_issue_model(resp) + return resp + class _IngestConversations(ContactCenterInsightsRestStub): def __hash__(self): return hash("IngestConversations") @@ -3656,6 +4076,105 @@ def __call__( resp = self._interceptor.post_ingest_conversations(resp) return resp + class _InitializeEncryptionSpec(ContactCenterInsightsRestStub): + def __hash__(self): + return hash("InitializeEncryptionSpec") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: contact_center_insights.InitializeEncryptionSpecRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the initialize encryption + spec method over HTTP. + + Args: + request (~.contact_center_insights.InitializeEncryptionSpecRequest): + The request object. The request to initialize a + location-level encryption specification. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{encryption_spec.name=projects/*/locations/*/encryptionSpec}:initialize", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_initialize_encryption_spec( + request, metadata + ) + pb_request = contact_center_insights.InitializeEncryptionSpecRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_initialize_encryption_spec(resp) + return resp + class _ListAnalyses(ContactCenterInsightsRestStub): def __hash__(self): return hash("ListAnalyses") @@ -4685,7 +5204,13 @@ def __call__( Returns: ~.resources.Settings: - The settings resource. + The CCAI Insights project wide settings. Use these + settings to configure the behavior of Insights. View + these settings with + ```getsettings`` `__ + and change the settings with + ```updateSettings`` `__. + """ http_options: List[Dict[str, str]] = [ @@ -5093,6 +5618,16 @@ def export_insights_data( # In C++ this would require a dynamic_cast return self._ExportInsightsData(self._session, self._host, self._interceptor) # type: ignore + @property + def export_issue_model( + self, + ) -> Callable[ + [contact_center_insights.ExportIssueModelRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ExportIssueModel(self._session, self._host, self._interceptor) # type: ignore + @property def get_analysis( self, @@ -5111,6 +5646,16 @@ def get_conversation( # In C++ this would require a dynamic_cast return self._GetConversation(self._session, self._host, self._interceptor) # type: ignore + @property + def get_encryption_spec( + self, + ) -> Callable[ + [contact_center_insights.GetEncryptionSpecRequest], resources.EncryptionSpec + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetEncryptionSpec(self._session, self._host, self._interceptor) # type: ignore + @property def get_issue( self, @@ -5153,6 +5698,16 @@ def get_view( # In C++ this would require a dynamic_cast return self._GetView(self._session, self._host, self._interceptor) # type: ignore + @property + def import_issue_model( + self, + ) -> Callable[ + [contact_center_insights.ImportIssueModelRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ImportIssueModel(self._session, self._host, self._interceptor) # type: ignore + @property def ingest_conversations( self, @@ -5163,6 +5718,17 @@ def ingest_conversations( # In C++ this would require a dynamic_cast return self._IngestConversations(self._session, self._host, self._interceptor) # type: ignore + @property + def initialize_encryption_spec( + self, + ) -> Callable[ + [contact_center_insights.InitializeEncryptionSpecRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._InitializeEncryptionSpec(self._session, self._host, self._interceptor) # type: ignore + @property def list_analyses( self, diff --git a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/types/__init__.py b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/types/__init__.py index be8c70ff77d9..30e7ff857b46 100644 --- a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/types/__init__.py +++ b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/types/__init__.py @@ -45,16 +45,26 @@ ExportInsightsDataMetadata, ExportInsightsDataRequest, ExportInsightsDataResponse, + ExportIssueModelMetadata, + ExportIssueModelRequest, + ExportIssueModelResponse, GetAnalysisRequest, GetConversationRequest, + GetEncryptionSpecRequest, GetIssueModelRequest, GetIssueRequest, GetPhraseMatcherRequest, GetSettingsRequest, GetViewRequest, + ImportIssueModelMetadata, + ImportIssueModelRequest, + ImportIssueModelResponse, IngestConversationsMetadata, IngestConversationsRequest, IngestConversationsResponse, + InitializeEncryptionSpecMetadata, + InitializeEncryptionSpecRequest, + InitializeEncryptionSpecResponse, ListAnalysesRequest, ListAnalysesResponse, ListConversationsRequest, @@ -90,11 +100,13 @@ Conversation, ConversationDataSource, ConversationLevelSentiment, + ConversationLevelSilence, ConversationParticipant, ConversationSummarizationSuggestionData, DialogflowIntent, DialogflowInteractionData, DialogflowSource, + EncryptionSpec, Entity, EntityMentionData, ExactMatchConfig, @@ -157,16 +169,26 @@ "ExportInsightsDataMetadata", "ExportInsightsDataRequest", "ExportInsightsDataResponse", + "ExportIssueModelMetadata", + "ExportIssueModelRequest", + "ExportIssueModelResponse", "GetAnalysisRequest", "GetConversationRequest", + "GetEncryptionSpecRequest", "GetIssueModelRequest", "GetIssueRequest", "GetPhraseMatcherRequest", "GetSettingsRequest", "GetViewRequest", + "ImportIssueModelMetadata", + "ImportIssueModelRequest", + "ImportIssueModelResponse", "IngestConversationsMetadata", "IngestConversationsRequest", "IngestConversationsResponse", + "InitializeEncryptionSpecMetadata", + "InitializeEncryptionSpecRequest", + "InitializeEncryptionSpecResponse", "ListAnalysesRequest", "ListAnalysesResponse", "ListConversationsRequest", @@ -201,11 +223,13 @@ "Conversation", "ConversationDataSource", "ConversationLevelSentiment", + "ConversationLevelSilence", "ConversationParticipant", "ConversationSummarizationSuggestionData", "DialogflowIntent", "DialogflowInteractionData", "DialogflowSource", + "EncryptionSpec", "Entity", "EntityMentionData", "ExactMatchConfig", diff --git a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/types/contact_center_insights.py b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/types/contact_center_insights.py index 60816f4c061f..1229fb10453f 100644 --- a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/types/contact_center_insights.py +++ b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/types/contact_center_insights.py @@ -71,6 +71,12 @@ "UndeployIssueModelRequest", "UndeployIssueModelResponse", "UndeployIssueModelMetadata", + "ExportIssueModelRequest", + "ExportIssueModelResponse", + "ExportIssueModelMetadata", + "ImportIssueModelRequest", + "ImportIssueModelResponse", + "ImportIssueModelMetadata", "GetIssueRequest", "ListIssuesRequest", "ListIssuesResponse", @@ -86,6 +92,10 @@ "UpdatePhraseMatcherRequest", "GetSettingsRequest", "UpdateSettingsRequest", + "GetEncryptionSpecRequest", + "InitializeEncryptionSpecRequest", + "InitializeEncryptionSpecResponse", + "InitializeEncryptionSpecMetadata", "CreateViewRequest", "GetViewRequest", "ListViewsRequest", @@ -394,7 +404,7 @@ class UploadConversationRequest(proto.Message): class UploadConversationMetadata(proto.Message): - r"""The metadata for an UploadConversation operation. + r"""The metadata for an ``UploadConversation`` operation. Attributes: create_time (google.protobuf.timestamp_pb2.Timestamp): @@ -449,9 +459,9 @@ class ListConversationsRequest(proto.Message): page_size (int): The maximum number of conversations to return in the response. A valid page size ranges from 0 - to 1,000 inclusive. If the page size is zero or - unspecified, a default page size of 100 will be - chosen. Note that a call might return fewer + to 100,000 inclusive. If the page size is zero + or unspecified, a default page size of 100 will + be chosen. Note that a call might return fewer results than the requested page size. page_token (str): The value returned by the last @@ -462,6 +472,23 @@ class ListConversationsRequest(proto.Message): A filter to reduce results to a specific subset. Useful for querying conversations with specific properties. + order_by (str): + Optional. The attribute by which to order conversations in + the response. If empty, conversations will be ordered by + descending creation time. Supported values are one of the + following: + + - create_time + - customer_satisfaction_rating + - duration + - latest_analysis + - start_time + - turn_count + + The default sort order is ascending. To specify order, + append ``asc`` or ``desc`` (``create_time desc``). For more + details, see `Google AIPs + Ordering `__. view (google.cloud.contact_center_insights_v1.types.ConversationView): The level of details of the conversation. Default is ``BASIC``. @@ -483,6 +510,10 @@ class ListConversationsRequest(proto.Message): proto.STRING, number=4, ) + order_by: str = proto.Field( + proto.STRING, + number=7, + ) view: "ConversationView" = proto.Field( proto.ENUM, number=5, @@ -549,7 +580,20 @@ class UpdateConversationRequest(proto.Message): Required. The new values for the conversation. update_mask (google.protobuf.field_mask_pb2.FieldMask): - The list of fields to be updated. + The list of fields to be updated. All possible fields can be + updated by passing ``*``, or a subset of the following + updateable fields can be provided: + + - ``agent_id`` + - ``language_code`` + - ``labels`` + - ``metadata`` + - ``quality_metadata`` + - ``call_metadata`` + - ``start_time`` + - ``expire_time`` or ``ttl`` + - ``data_source.gcs_source.audio_uri`` or + ``data_source.dialogflow_source.audio_uri`` """ conversation: resources.Conversation = proto.Field( @@ -619,11 +663,22 @@ class IngestConversationsRequest(proto.Message): Optional. Default Speech-to-Text configuration. Optional, will default to the config specified in Settings. + sample_size (int): + Optional. If set, this fields indicates the + number of objects to ingest from the Cloud + Storage bucket. If empty, the entire bucket will + be ingested. Unless they are first deleted, + conversations produced through sampling won't be + ingested by subsequent ingest requests. + + This field is a member of `oneof`_ ``_sample_size``. """ class GcsSource(proto.Message): r"""Configuration for Cloud Storage bucket sources. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: bucket_uri (str): Required. The Cloud Storage bucket containing @@ -631,6 +686,22 @@ class GcsSource(proto.Message): bucket_object_type (google.cloud.contact_center_insights_v1.types.IngestConversationsRequest.GcsSource.BucketObjectType): Optional. Specifies the type of the objects in ``bucket_uri``. + metadata_bucket_uri (str): + Optional. The Cloud Storage path to the conversation + metadata. Note that: [1] Metadata files are expected to be + in JSON format. [2] Metadata and source files (transcripts + or audio) must be in separate buckets. [3] A source file and + its corresponding metadata file must share the same name to + be properly ingested, E.g. + ``gs://bucket/audio/conversation1.mp3`` and + ``gs://bucket/metadata/conversation1.json``. + + This field is a member of `oneof`_ ``_metadata_bucket_uri``. + custom_metadata_keys (MutableSequence[str]): + Optional. Custom keys to extract as conversation labels from + metadata files in ``metadata_bucket_uri``. Keys not included + in this field will be ignored. Note that there is a limit of + 20 labels per conversation. """ class BucketObjectType(proto.Enum): @@ -660,6 +731,15 @@ class BucketObjectType(proto.Enum): enum="IngestConversationsRequest.GcsSource.BucketObjectType", ) ) + metadata_bucket_uri: str = proto.Field( + proto.STRING, + number=3, + optional=True, + ) + custom_metadata_keys: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=12, + ) class TranscriptObjectConfig(proto.Message): r"""Configuration for processing transcript objects. @@ -681,8 +761,10 @@ class ConversationConfig(proto.Message): Attributes: agent_id (str): - An opaque, user-specified string representing - the human agent who handled the conversations. + Optional. An opaque, user-specified string representing a + human agent who handled all conversations in the import. + Note that this will be overridden if per-conversation + metadata is provided through the ``metadata_bucket_uri``. agent_channel (int): Optional. Indicates which of the channels, 1 or 2, contains the agent. Note that this must be @@ -739,6 +821,11 @@ class ConversationConfig(proto.Message): number=6, message=resources.SpeechConfig, ) + sample_size: int = proto.Field( + proto.INT32, + number=7, + optional=True, + ) class IngestConversationsMetadata(proto.Message): @@ -1559,6 +1646,163 @@ class UndeployIssueModelMetadata(proto.Message): ) +class ExportIssueModelRequest(proto.Message): + r"""Request to export an issue model. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + gcs_destination (google.cloud.contact_center_insights_v1.types.ExportIssueModelRequest.GcsDestination): + Google Cloud Storage URI to export the issue + model to. + + This field is a member of `oneof`_ ``Destination``. + name (str): + Required. The issue model to export. + """ + + class GcsDestination(proto.Message): + r"""Google Cloud Storage Object URI to save the issue model to. + + Attributes: + object_uri (str): + Required. Format: ``gs:///`` + """ + + object_uri: str = proto.Field( + proto.STRING, + number=1, + ) + + gcs_destination: GcsDestination = proto.Field( + proto.MESSAGE, + number=2, + oneof="Destination", + message=GcsDestination, + ) + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ExportIssueModelResponse(proto.Message): + r"""Response from export issue model""" + + +class ExportIssueModelMetadata(proto.Message): + r"""Metadata used for export issue model. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + The time the operation was created. + end_time (google.protobuf.timestamp_pb2.Timestamp): + The time the operation finished running. + request (google.cloud.contact_center_insights_v1.types.ExportIssueModelRequest): + The original export request. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + request: "ExportIssueModelRequest" = proto.Field( + proto.MESSAGE, + number=3, + message="ExportIssueModelRequest", + ) + + +class ImportIssueModelRequest(proto.Message): + r"""Request to import an issue model. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + gcs_source (google.cloud.contact_center_insights_v1.types.ImportIssueModelRequest.GcsSource): + Google Cloud Storage source message. + + This field is a member of `oneof`_ ``Source``. + parent (str): + Required. The parent resource of the issue + model. + create_new_model (bool): + Optional. If set to true, will create an + issue model from the imported file with randomly + generated IDs for the issue model and + corresponding issues. Otherwise, replaces an + existing model with the same ID as the file. + """ + + class GcsSource(proto.Message): + r"""Google Cloud Storage Object URI to get the issue model file + from. + + Attributes: + object_uri (str): + Required. Format: ``gs:///`` + """ + + object_uri: str = proto.Field( + proto.STRING, + number=1, + ) + + gcs_source: GcsSource = proto.Field( + proto.MESSAGE, + number=2, + oneof="Source", + message=GcsSource, + ) + parent: str = proto.Field( + proto.STRING, + number=1, + ) + create_new_model: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class ImportIssueModelResponse(proto.Message): + r"""Response from import issue model""" + + +class ImportIssueModelMetadata(proto.Message): + r"""Metadata used for import issue model. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + The time the operation was created. + end_time (google.protobuf.timestamp_pb2.Timestamp): + The time the operation finished running. + request (google.cloud.contact_center_insights_v1.types.ImportIssueModelRequest): + The original import request. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + request: "ImportIssueModelRequest" = proto.Field( + proto.MESSAGE, + number=3, + message="ImportIssueModelRequest", + ) + + class GetIssueRequest(proto.Message): r"""The request to get an issue. @@ -1855,6 +2099,90 @@ class UpdateSettingsRequest(proto.Message): ) +class GetEncryptionSpecRequest(proto.Message): + r"""The request to get location-level encryption specification. + + Attributes: + name (str): + Required. The name of the encryption spec + resource to get. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class InitializeEncryptionSpecRequest(proto.Message): + r"""The request to initialize a location-level encryption + specification. + + Attributes: + encryption_spec (google.cloud.contact_center_insights_v1.types.EncryptionSpec): + Required. The encryption spec used for CMEK encryption. It + is required that the kms key is in the same region as the + endpoint. The same key will be used for all provisioned + resources, if encryption is available. If the kms_key_name + is left empty, no encryption will be enforced. + """ + + encryption_spec: resources.EncryptionSpec = proto.Field( + proto.MESSAGE, + number=1, + message=resources.EncryptionSpec, + ) + + +class InitializeEncryptionSpecResponse(proto.Message): + r"""The response to initialize a location-level encryption + specification. + + """ + + +class InitializeEncryptionSpecMetadata(proto.Message): + r"""Metadata for initializing a location-level encryption + specification. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation was + created. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation finished + running. + request (google.cloud.contact_center_insights_v1.types.InitializeEncryptionSpecRequest): + Output only. The original request for + initialization. + partial_errors (MutableSequence[google.rpc.status_pb2.Status]): + Partial errors during initialising operation + that might cause the operation output to be + incomplete. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + request: "InitializeEncryptionSpecRequest" = proto.Field( + proto.MESSAGE, + number=3, + message="InitializeEncryptionSpecRequest", + ) + partial_errors: MutableSequence[status_pb2.Status] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=status_pb2.Status, + ) + + class CreateViewRequest(proto.Message): r"""The request to create a view. diff --git a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/types/resources.py b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/types/resources.py index 0302667ebc6e..83de2aec7b7e 100644 --- a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/types/resources.py +++ b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/types/resources.py @@ -32,6 +32,7 @@ "AnalysisResult", "IssueModelResult", "ConversationLevelSentiment", + "ConversationLevelSilence", "IssueAssignment", "CallAnnotation", "AnnotationBoundary", @@ -55,6 +56,7 @@ "PhraseMatchRuleConfig", "ExactMatchConfig", "Settings", + "EncryptionSpec", "RedactionConfig", "SpeechConfig", "RuntimeAnnotation", @@ -129,6 +131,11 @@ class Conversation(proto.Message): quality_metadata (google.cloud.contact_center_insights_v1.types.Conversation.QualityMetadata): Conversation metadata related to quality management. + metadata_json (str): + Input only. JSON Metadata encoded as a + string. This field is primarily used by Insights + integrations with various telphony systems and + must be in one of Insights' supported formats. transcript (google.cloud.contact_center_insights_v1.types.Conversation.Transcript): Output only. The conversation transcript. medium (google.cloud.contact_center_insights_v1.types.Conversation.Medium): @@ -482,6 +489,10 @@ class DialogflowSegmentMetadata(proto.Message): number=24, message=QualityMetadata, ) + metadata_json: str = proto.Field( + proto.STRING, + number=25, + ) transcript: Transcript = proto.Field( proto.MESSAGE, number=8, @@ -691,6 +702,9 @@ class CallAnalysisMetadata(proto.Message): sentiments (MutableSequence[google.cloud.contact_center_insights_v1.types.ConversationLevelSentiment]): Overall conversation-level sentiment for each channel of the call. + silence (google.cloud.contact_center_insights_v1.types.ConversationLevelSilence): + Overall conversation-level silence during the + call. intents (MutableMapping[str, google.cloud.contact_center_insights_v1.types.Intent]): All the matched intents in the call. phrase_matchers (MutableMapping[str, google.cloud.contact_center_insights_v1.types.PhraseMatchData]): @@ -716,6 +730,11 @@ class CallAnalysisMetadata(proto.Message): number=4, message="ConversationLevelSentiment", ) + silence: "ConversationLevelSilence" = proto.Field( + proto.MESSAGE, + number=11, + message="ConversationLevelSilence", + ) intents: MutableMapping[str, "Intent"] = proto.MapField( proto.STRING, proto.MESSAGE, @@ -791,6 +810,28 @@ class ConversationLevelSentiment(proto.Message): ) +class ConversationLevelSilence(proto.Message): + r"""Conversation-level silence data. + + Attributes: + silence_duration (google.protobuf.duration_pb2.Duration): + Amount of time calculated to be in silence. + silence_percentage (float): + Percentage of the total conversation spent in + silence. + """ + + silence_duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=1, + message=duration_pb2.Duration, + ) + silence_percentage: float = proto.Field( + proto.FLOAT, + number=2, + ) + + class IssueAssignment(proto.Message): r"""Information about the issue. @@ -1456,6 +1497,8 @@ class Issue(proto.Message): Output only. Resource names of the sample representative utterances that match to this issue. + display_description (str): + Representative description of the issue. """ name: str = proto.Field( @@ -1480,6 +1523,10 @@ class Issue(proto.Message): proto.STRING, number=6, ) + display_description: str = proto.Field( + proto.STRING, + number=14, + ) class IssueModelLabelStats(proto.Message): @@ -1764,7 +1811,11 @@ class ExactMatchConfig(proto.Message): class Settings(proto.Message): - r"""The settings resource. + r"""The CCAI Insights project wide settings. Use these settings to + configure the behavior of Insights. View these settings with + ```getsettings`` `__ + and change the settings with + ```updateSettings`` `__. Attributes: name (str): @@ -1807,21 +1858,30 @@ class Settings(proto.Message): created. - "export-insights-data": Notify each time an export is complete. + - "ingest-conversations": Notify each time an + IngestConversations LRO is complete. - "update-conversation": Notify each time a conversation is updated via UpdateConversation. + - "upload-conversation": Notify when an UploadConversation + LRO is complete. Values are Pub/Sub topics. The format of each Pub/Sub topic is: projects/{project}/topics/{topic} analysis_config (google.cloud.contact_center_insights_v1.types.Settings.AnalysisConfig): Default analysis settings. redaction_config (google.cloud.contact_center_insights_v1.types.RedactionConfig): - Default DLP redaction resources to be applied - while ingesting conversations. + Default DLP redaction resources to be applied while + ingesting conversations. This applies to conversations + ingested from the ``UploadConversation`` and + ``IngestConversations`` endpoints, including conversations + coming from CCAI Platform. speech_config (google.cloud.contact_center_insights_v1.types.SpeechConfig): - Optional. Default Speech-to-Text resources to - be used while ingesting audio files. Optional, - CCAI Insights will create a default if not - provided. + Optional. Default Speech-to-Text resources to use while + ingesting audio files. Optional, CCAI Insights will create a + default if not provided. This applies to conversations + ingested from the ``UploadConversation`` and + ``IngestConversations`` endpoints, including conversations + coming from CCAI Platform. """ class AnalysisConfig(proto.Message): @@ -1900,9 +1960,44 @@ class AnalysisConfig(proto.Message): ) +class EncryptionSpec(proto.Message): + r"""A customer-managed encryption key specification that can be + applied to all created resources (e.g. Conversation). + + Attributes: + name (str): + Immutable. The resource name of the + encryption key specification resource. Format: + + projects/{project}/locations/{location}/encryptionSpec + kms_key (str): + Required. The name of customer-managed encryption key that + is used to secure a resource and its sub-resources. If + empty, the resource is secured by the default Google + encryption key. Only the key in the same location as this + resource is allowed to be used for encryption. Format: + ``projects/{project}/locations/{location}/keyRings/{keyRing}/cryptoKeys/{key}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + kms_key: str = proto.Field( + proto.STRING, + number=2, + ) + + class RedactionConfig(proto.Message): - r"""DLP resources used for redaction while ingesting - conversations. + r"""DLP resources used for redaction while ingesting conversations. DLP + settings are applied to conversations ingested from the + ``UploadConversation`` and ``IngestConversations`` endpoints, + including conversation coming from CCAI Platform. They are not + applied to conversations ingested from the ``CreateConversation`` + endpoint or the Dialogflow / Agent Assist runtime integrations. When + using Dialogflow / Agent Assist runtime integrations, redaction + should be performed in Dialogflow / Agent Assist. Attributes: deidentify_template (str): @@ -1926,7 +2021,11 @@ class RedactionConfig(proto.Message): class SpeechConfig(proto.Message): - r"""Speech-to-Text configuration. + r"""Speech-to-Text configuration. Speech-to-Text settings are applied to + conversations ingested from the ``UploadConversation`` and + ``IngestConversations`` endpoints, including conversation coming + from CCAI Platform. They are not applied to conversations ingested + from the ``CreateConversation`` endpoint. Attributes: speech_recognizer (str): @@ -1991,8 +2090,56 @@ class RuntimeAnnotation(proto.Message): answer_feedback (google.cloud.contact_center_insights_v1.types.AnswerFeedback): The feedback that the customer has about the answer in ``data``. + user_input (google.cloud.contact_center_insights_v1.types.RuntimeAnnotation.UserInput): + Explicit input used for generating the answer """ + class UserInput(proto.Message): + r"""Explicit input used for generating the answer + + Attributes: + query (str): + Query text. Article Search uses this to store + the input query used to generate the search + results. + generator_name (str): + The resource name of associated generator. Format: + ``projects//locations//generators/`` + query_source (google.cloud.contact_center_insights_v1.types.RuntimeAnnotation.UserInput.QuerySource): + Query source for the answer. + """ + + class QuerySource(proto.Enum): + r"""The source of the query. + + Values: + QUERY_SOURCE_UNSPECIFIED (0): + Unknown query source. + AGENT_QUERY (1): + The query is from agents. + SUGGESTED_QUERY (2): + The query is a query from previous + suggestions, e.g. from a preceding + SuggestKnowledgeAssist response. + """ + QUERY_SOURCE_UNSPECIFIED = 0 + AGENT_QUERY = 1 + SUGGESTED_QUERY = 2 + + query: str = proto.Field( + proto.STRING, + number=1, + ) + generator_name: str = proto.Field( + proto.STRING, + number=2, + ) + query_source: "RuntimeAnnotation.UserInput.QuerySource" = proto.Field( + proto.ENUM, + number=3, + enum="RuntimeAnnotation.UserInput.QuerySource", + ) + article_suggestion: "ArticleSuggestionData" = proto.Field( proto.MESSAGE, number=6, @@ -2055,6 +2202,11 @@ class RuntimeAnnotation(proto.Message): number=5, message="AnswerFeedback", ) + user_input: UserInput = proto.Field( + proto.MESSAGE, + number=16, + message=UserInput, + ) class AnswerFeedback(proto.Message): @@ -2566,9 +2718,12 @@ class SummarizationModel(proto.Enum): Unspecified summarization model. BASELINE_MODEL (1): The CCAI baseline model. + BASELINE_MODEL_V2_0 (2): + The CCAI baseline model, V2.0. """ SUMMARIZATION_MODEL_UNSPECIFIED = 0 BASELINE_MODEL = 1 + BASELINE_MODEL_V2_0 = 2 conversation_profile: str = proto.Field( proto.STRING, diff --git a/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_export_issue_model_async.py b/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_export_issue_model_async.py new file mode 100644 index 000000000000..ec4c508ac2b3 --- /dev/null +++ b/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_export_issue_model_async.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExportIssueModel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-contact-center-insights + + +# [START contactcenterinsights_v1_generated_ContactCenterInsights_ExportIssueModel_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import contact_center_insights_v1 + + +async def sample_export_issue_model(): + # Create a client + client = contact_center_insights_v1.ContactCenterInsightsAsyncClient() + + # Initialize request argument(s) + gcs_destination = contact_center_insights_v1.GcsDestination() + gcs_destination.object_uri = "object_uri_value" + + request = contact_center_insights_v1.ExportIssueModelRequest( + gcs_destination=gcs_destination, + name="name_value", + ) + + # Make the request + operation = client.export_issue_model(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END contactcenterinsights_v1_generated_ContactCenterInsights_ExportIssueModel_async] diff --git a/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_export_issue_model_sync.py b/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_export_issue_model_sync.py new file mode 100644 index 000000000000..b9fa9152f794 --- /dev/null +++ b/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_export_issue_model_sync.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExportIssueModel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-contact-center-insights + + +# [START contactcenterinsights_v1_generated_ContactCenterInsights_ExportIssueModel_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import contact_center_insights_v1 + + +def sample_export_issue_model(): + # Create a client + client = contact_center_insights_v1.ContactCenterInsightsClient() + + # Initialize request argument(s) + gcs_destination = contact_center_insights_v1.GcsDestination() + gcs_destination.object_uri = "object_uri_value" + + request = contact_center_insights_v1.ExportIssueModelRequest( + gcs_destination=gcs_destination, + name="name_value", + ) + + # Make the request + operation = client.export_issue_model(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END contactcenterinsights_v1_generated_ContactCenterInsights_ExportIssueModel_sync] diff --git a/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_get_encryption_spec_async.py b/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_get_encryption_spec_async.py new file mode 100644 index 000000000000..9217d1773b5f --- /dev/null +++ b/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_get_encryption_spec_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEncryptionSpec +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-contact-center-insights + + +# [START contactcenterinsights_v1_generated_ContactCenterInsights_GetEncryptionSpec_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import contact_center_insights_v1 + + +async def sample_get_encryption_spec(): + # Create a client + client = contact_center_insights_v1.ContactCenterInsightsAsyncClient() + + # Initialize request argument(s) + request = contact_center_insights_v1.GetEncryptionSpecRequest( + name="name_value", + ) + + # Make the request + response = await client.get_encryption_spec(request=request) + + # Handle the response + print(response) + +# [END contactcenterinsights_v1_generated_ContactCenterInsights_GetEncryptionSpec_async] diff --git a/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_get_encryption_spec_sync.py b/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_get_encryption_spec_sync.py new file mode 100644 index 000000000000..90f3743e6570 --- /dev/null +++ b/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_get_encryption_spec_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEncryptionSpec +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-contact-center-insights + + +# [START contactcenterinsights_v1_generated_ContactCenterInsights_GetEncryptionSpec_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import contact_center_insights_v1 + + +def sample_get_encryption_spec(): + # Create a client + client = contact_center_insights_v1.ContactCenterInsightsClient() + + # Initialize request argument(s) + request = contact_center_insights_v1.GetEncryptionSpecRequest( + name="name_value", + ) + + # Make the request + response = client.get_encryption_spec(request=request) + + # Handle the response + print(response) + +# [END contactcenterinsights_v1_generated_ContactCenterInsights_GetEncryptionSpec_sync] diff --git a/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_import_issue_model_async.py b/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_import_issue_model_async.py new file mode 100644 index 000000000000..e90dd84dc650 --- /dev/null +++ b/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_import_issue_model_async.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ImportIssueModel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-contact-center-insights + + +# [START contactcenterinsights_v1_generated_ContactCenterInsights_ImportIssueModel_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import contact_center_insights_v1 + + +async def sample_import_issue_model(): + # Create a client + client = contact_center_insights_v1.ContactCenterInsightsAsyncClient() + + # Initialize request argument(s) + gcs_source = contact_center_insights_v1.GcsSource() + gcs_source.object_uri = "object_uri_value" + + request = contact_center_insights_v1.ImportIssueModelRequest( + gcs_source=gcs_source, + parent="parent_value", + ) + + # Make the request + operation = client.import_issue_model(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END contactcenterinsights_v1_generated_ContactCenterInsights_ImportIssueModel_async] diff --git a/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_import_issue_model_sync.py b/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_import_issue_model_sync.py new file mode 100644 index 000000000000..74ee99de39bc --- /dev/null +++ b/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_import_issue_model_sync.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ImportIssueModel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-contact-center-insights + + +# [START contactcenterinsights_v1_generated_ContactCenterInsights_ImportIssueModel_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import contact_center_insights_v1 + + +def sample_import_issue_model(): + # Create a client + client = contact_center_insights_v1.ContactCenterInsightsClient() + + # Initialize request argument(s) + gcs_source = contact_center_insights_v1.GcsSource() + gcs_source.object_uri = "object_uri_value" + + request = contact_center_insights_v1.ImportIssueModelRequest( + gcs_source=gcs_source, + parent="parent_value", + ) + + # Make the request + operation = client.import_issue_model(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END contactcenterinsights_v1_generated_ContactCenterInsights_ImportIssueModel_sync] diff --git a/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_initialize_encryption_spec_async.py b/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_initialize_encryption_spec_async.py new file mode 100644 index 000000000000..bc5767e0a1c1 --- /dev/null +++ b/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_initialize_encryption_spec_async.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for InitializeEncryptionSpec +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-contact-center-insights + + +# [START contactcenterinsights_v1_generated_ContactCenterInsights_InitializeEncryptionSpec_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import contact_center_insights_v1 + + +async def sample_initialize_encryption_spec(): + # Create a client + client = contact_center_insights_v1.ContactCenterInsightsAsyncClient() + + # Initialize request argument(s) + encryption_spec = contact_center_insights_v1.EncryptionSpec() + encryption_spec.kms_key = "kms_key_value" + + request = contact_center_insights_v1.InitializeEncryptionSpecRequest( + encryption_spec=encryption_spec, + ) + + # Make the request + operation = client.initialize_encryption_spec(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END contactcenterinsights_v1_generated_ContactCenterInsights_InitializeEncryptionSpec_async] diff --git a/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_initialize_encryption_spec_sync.py b/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_initialize_encryption_spec_sync.py new file mode 100644 index 000000000000..26a058ab9ae1 --- /dev/null +++ b/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_initialize_encryption_spec_sync.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for InitializeEncryptionSpec +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-contact-center-insights + + +# [START contactcenterinsights_v1_generated_ContactCenterInsights_InitializeEncryptionSpec_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import contact_center_insights_v1 + + +def sample_initialize_encryption_spec(): + # Create a client + client = contact_center_insights_v1.ContactCenterInsightsClient() + + # Initialize request argument(s) + encryption_spec = contact_center_insights_v1.EncryptionSpec() + encryption_spec.kms_key = "kms_key_value" + + request = contact_center_insights_v1.InitializeEncryptionSpecRequest( + encryption_spec=encryption_spec, + ) + + # Make the request + operation = client.initialize_encryption_spec(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END contactcenterinsights_v1_generated_ContactCenterInsights_InitializeEncryptionSpec_sync] diff --git a/packages/google-cloud-contact-center-insights/samples/generated_samples/snippet_metadata_google.cloud.contactcenterinsights.v1.json b/packages/google-cloud-contact-center-insights/samples/generated_samples/snippet_metadata_google.cloud.contactcenterinsights.v1.json index bb09ad8db4c2..ab3be662543b 100644 --- a/packages/google-cloud-contact-center-insights/samples/generated_samples/snippet_metadata_google.cloud.contactcenterinsights.v1.json +++ b/packages/google-cloud-contact-center-insights/samples/generated_samples/snippet_metadata_google.cloud.contactcenterinsights.v1.json @@ -2798,19 +2798,19 @@ "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient", "shortName": "ContactCenterInsightsAsyncClient" }, - "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient.get_analysis", + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient.export_issue_model", "method": { - "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetAnalysis", + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.ExportIssueModel", "service": { "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights", "shortName": "ContactCenterInsights" }, - "shortName": "GetAnalysis" + "shortName": "ExportIssueModel" }, "parameters": [ { "name": "request", - "type": "google.cloud.contact_center_insights_v1.types.GetAnalysisRequest" + "type": "google.cloud.contact_center_insights_v1.types.ExportIssueModelRequest" }, { "name": "name", @@ -2829,22 +2829,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.contact_center_insights_v1.types.Analysis", - "shortName": "get_analysis" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "export_issue_model" }, - "description": "Sample for GetAnalysis", - "file": "contactcenterinsights_v1_generated_contact_center_insights_get_analysis_async.py", + "description": "Sample for ExportIssueModel", + "file": "contactcenterinsights_v1_generated_contact_center_insights_export_issue_model_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetAnalysis_async", + "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_ExportIssueModel_async", "segments": [ { - "end": 51, + "end": 59, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 59, "start": 27, "type": "SHORT" }, @@ -2854,22 +2854,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 49, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 56, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 60, + "start": 57, "type": "RESPONSE_HANDLING" } ], - "title": "contactcenterinsights_v1_generated_contact_center_insights_get_analysis_async.py" + "title": "contactcenterinsights_v1_generated_contact_center_insights_export_issue_model_async.py" }, { "canonical": true, @@ -2878,19 +2878,19 @@ "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient", "shortName": "ContactCenterInsightsClient" }, - "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient.get_analysis", + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient.export_issue_model", "method": { - "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetAnalysis", + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.ExportIssueModel", "service": { "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights", "shortName": "ContactCenterInsights" }, - "shortName": "GetAnalysis" + "shortName": "ExportIssueModel" }, "parameters": [ { "name": "request", - "type": "google.cloud.contact_center_insights_v1.types.GetAnalysisRequest" + "type": "google.cloud.contact_center_insights_v1.types.ExportIssueModelRequest" }, { "name": "name", @@ -2909,22 +2909,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.contact_center_insights_v1.types.Analysis", - "shortName": "get_analysis" + "resultType": "google.api_core.operation.Operation", + "shortName": "export_issue_model" }, - "description": "Sample for GetAnalysis", - "file": "contactcenterinsights_v1_generated_contact_center_insights_get_analysis_sync.py", + "description": "Sample for ExportIssueModel", + "file": "contactcenterinsights_v1_generated_contact_center_insights_export_issue_model_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetAnalysis_sync", + "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_ExportIssueModel_sync", "segments": [ { - "end": 51, + "end": 59, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 59, "start": 27, "type": "SHORT" }, @@ -2934,22 +2934,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 49, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 56, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 60, + "start": 57, "type": "RESPONSE_HANDLING" } ], - "title": "contactcenterinsights_v1_generated_contact_center_insights_get_analysis_sync.py" + "title": "contactcenterinsights_v1_generated_contact_center_insights_export_issue_model_sync.py" }, { "canonical": true, @@ -2959,19 +2959,19 @@ "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient", "shortName": "ContactCenterInsightsAsyncClient" }, - "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient.get_conversation", + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient.get_analysis", "method": { - "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetConversation", + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetAnalysis", "service": { "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights", "shortName": "ContactCenterInsights" }, - "shortName": "GetConversation" + "shortName": "GetAnalysis" }, "parameters": [ { "name": "request", - "type": "google.cloud.contact_center_insights_v1.types.GetConversationRequest" + "type": "google.cloud.contact_center_insights_v1.types.GetAnalysisRequest" }, { "name": "name", @@ -2990,14 +2990,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.contact_center_insights_v1.types.Conversation", - "shortName": "get_conversation" + "resultType": "google.cloud.contact_center_insights_v1.types.Analysis", + "shortName": "get_analysis" }, - "description": "Sample for GetConversation", - "file": "contactcenterinsights_v1_generated_contact_center_insights_get_conversation_async.py", + "description": "Sample for GetAnalysis", + "file": "contactcenterinsights_v1_generated_contact_center_insights_get_analysis_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetConversation_async", + "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetAnalysis_async", "segments": [ { "end": 51, @@ -3030,7 +3030,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "contactcenterinsights_v1_generated_contact_center_insights_get_conversation_async.py" + "title": "contactcenterinsights_v1_generated_contact_center_insights_get_analysis_async.py" }, { "canonical": true, @@ -3039,19 +3039,19 @@ "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient", "shortName": "ContactCenterInsightsClient" }, - "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient.get_conversation", + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient.get_analysis", "method": { - "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetConversation", + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetAnalysis", "service": { "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights", "shortName": "ContactCenterInsights" }, - "shortName": "GetConversation" + "shortName": "GetAnalysis" }, "parameters": [ { "name": "request", - "type": "google.cloud.contact_center_insights_v1.types.GetConversationRequest" + "type": "google.cloud.contact_center_insights_v1.types.GetAnalysisRequest" }, { "name": "name", @@ -3070,14 +3070,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.contact_center_insights_v1.types.Conversation", - "shortName": "get_conversation" + "resultType": "google.cloud.contact_center_insights_v1.types.Analysis", + "shortName": "get_analysis" }, - "description": "Sample for GetConversation", - "file": "contactcenterinsights_v1_generated_contact_center_insights_get_conversation_sync.py", + "description": "Sample for GetAnalysis", + "file": "contactcenterinsights_v1_generated_contact_center_insights_get_analysis_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetConversation_sync", + "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetAnalysis_sync", "segments": [ { "end": 51, @@ -3110,7 +3110,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "contactcenterinsights_v1_generated_contact_center_insights_get_conversation_sync.py" + "title": "contactcenterinsights_v1_generated_contact_center_insights_get_analysis_sync.py" }, { "canonical": true, @@ -3120,19 +3120,19 @@ "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient", "shortName": "ContactCenterInsightsAsyncClient" }, - "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient.get_issue_model", + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient.get_conversation", "method": { - "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetIssueModel", + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetConversation", "service": { "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights", "shortName": "ContactCenterInsights" }, - "shortName": "GetIssueModel" + "shortName": "GetConversation" }, "parameters": [ { "name": "request", - "type": "google.cloud.contact_center_insights_v1.types.GetIssueModelRequest" + "type": "google.cloud.contact_center_insights_v1.types.GetConversationRequest" }, { "name": "name", @@ -3151,14 +3151,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.contact_center_insights_v1.types.IssueModel", - "shortName": "get_issue_model" + "resultType": "google.cloud.contact_center_insights_v1.types.Conversation", + "shortName": "get_conversation" }, - "description": "Sample for GetIssueModel", - "file": "contactcenterinsights_v1_generated_contact_center_insights_get_issue_model_async.py", + "description": "Sample for GetConversation", + "file": "contactcenterinsights_v1_generated_contact_center_insights_get_conversation_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetIssueModel_async", + "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetConversation_async", "segments": [ { "end": 51, @@ -3191,7 +3191,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "contactcenterinsights_v1_generated_contact_center_insights_get_issue_model_async.py" + "title": "contactcenterinsights_v1_generated_contact_center_insights_get_conversation_async.py" }, { "canonical": true, @@ -3200,19 +3200,19 @@ "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient", "shortName": "ContactCenterInsightsClient" }, - "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient.get_issue_model", + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient.get_conversation", "method": { - "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetIssueModel", + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetConversation", "service": { "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights", "shortName": "ContactCenterInsights" }, - "shortName": "GetIssueModel" + "shortName": "GetConversation" }, "parameters": [ { "name": "request", - "type": "google.cloud.contact_center_insights_v1.types.GetIssueModelRequest" + "type": "google.cloud.contact_center_insights_v1.types.GetConversationRequest" }, { "name": "name", @@ -3231,14 +3231,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.contact_center_insights_v1.types.IssueModel", - "shortName": "get_issue_model" + "resultType": "google.cloud.contact_center_insights_v1.types.Conversation", + "shortName": "get_conversation" }, - "description": "Sample for GetIssueModel", - "file": "contactcenterinsights_v1_generated_contact_center_insights_get_issue_model_sync.py", + "description": "Sample for GetConversation", + "file": "contactcenterinsights_v1_generated_contact_center_insights_get_conversation_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetIssueModel_sync", + "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetConversation_sync", "segments": [ { "end": 51, @@ -3271,7 +3271,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "contactcenterinsights_v1_generated_contact_center_insights_get_issue_model_sync.py" + "title": "contactcenterinsights_v1_generated_contact_center_insights_get_conversation_sync.py" }, { "canonical": true, @@ -3281,19 +3281,19 @@ "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient", "shortName": "ContactCenterInsightsAsyncClient" }, - "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient.get_issue", + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient.get_encryption_spec", "method": { - "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetIssue", + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetEncryptionSpec", "service": { "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights", "shortName": "ContactCenterInsights" }, - "shortName": "GetIssue" + "shortName": "GetEncryptionSpec" }, "parameters": [ { "name": "request", - "type": "google.cloud.contact_center_insights_v1.types.GetIssueRequest" + "type": "google.cloud.contact_center_insights_v1.types.GetEncryptionSpecRequest" }, { "name": "name", @@ -3312,14 +3312,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.contact_center_insights_v1.types.Issue", - "shortName": "get_issue" + "resultType": "google.cloud.contact_center_insights_v1.types.EncryptionSpec", + "shortName": "get_encryption_spec" }, - "description": "Sample for GetIssue", - "file": "contactcenterinsights_v1_generated_contact_center_insights_get_issue_async.py", + "description": "Sample for GetEncryptionSpec", + "file": "contactcenterinsights_v1_generated_contact_center_insights_get_encryption_spec_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetIssue_async", + "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetEncryptionSpec_async", "segments": [ { "end": 51, @@ -3352,7 +3352,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "contactcenterinsights_v1_generated_contact_center_insights_get_issue_async.py" + "title": "contactcenterinsights_v1_generated_contact_center_insights_get_encryption_spec_async.py" }, { "canonical": true, @@ -3361,19 +3361,19 @@ "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient", "shortName": "ContactCenterInsightsClient" }, - "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient.get_issue", + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient.get_encryption_spec", "method": { - "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetIssue", + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetEncryptionSpec", "service": { "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights", "shortName": "ContactCenterInsights" }, - "shortName": "GetIssue" + "shortName": "GetEncryptionSpec" }, "parameters": [ { "name": "request", - "type": "google.cloud.contact_center_insights_v1.types.GetIssueRequest" + "type": "google.cloud.contact_center_insights_v1.types.GetEncryptionSpecRequest" }, { "name": "name", @@ -3392,14 +3392,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.contact_center_insights_v1.types.Issue", - "shortName": "get_issue" + "resultType": "google.cloud.contact_center_insights_v1.types.EncryptionSpec", + "shortName": "get_encryption_spec" }, - "description": "Sample for GetIssue", - "file": "contactcenterinsights_v1_generated_contact_center_insights_get_issue_sync.py", + "description": "Sample for GetEncryptionSpec", + "file": "contactcenterinsights_v1_generated_contact_center_insights_get_encryption_spec_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetIssue_sync", + "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetEncryptionSpec_sync", "segments": [ { "end": 51, @@ -3432,7 +3432,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "contactcenterinsights_v1_generated_contact_center_insights_get_issue_sync.py" + "title": "contactcenterinsights_v1_generated_contact_center_insights_get_encryption_spec_sync.py" }, { "canonical": true, @@ -3442,19 +3442,19 @@ "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient", "shortName": "ContactCenterInsightsAsyncClient" }, - "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient.get_phrase_matcher", + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient.get_issue_model", "method": { - "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetPhraseMatcher", + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetIssueModel", "service": { "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights", "shortName": "ContactCenterInsights" }, - "shortName": "GetPhraseMatcher" + "shortName": "GetIssueModel" }, "parameters": [ { "name": "request", - "type": "google.cloud.contact_center_insights_v1.types.GetPhraseMatcherRequest" + "type": "google.cloud.contact_center_insights_v1.types.GetIssueModelRequest" }, { "name": "name", @@ -3473,14 +3473,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.contact_center_insights_v1.types.PhraseMatcher", - "shortName": "get_phrase_matcher" + "resultType": "google.cloud.contact_center_insights_v1.types.IssueModel", + "shortName": "get_issue_model" }, - "description": "Sample for GetPhraseMatcher", - "file": "contactcenterinsights_v1_generated_contact_center_insights_get_phrase_matcher_async.py", + "description": "Sample for GetIssueModel", + "file": "contactcenterinsights_v1_generated_contact_center_insights_get_issue_model_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetPhraseMatcher_async", + "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetIssueModel_async", "segments": [ { "end": 51, @@ -3513,7 +3513,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "contactcenterinsights_v1_generated_contact_center_insights_get_phrase_matcher_async.py" + "title": "contactcenterinsights_v1_generated_contact_center_insights_get_issue_model_async.py" }, { "canonical": true, @@ -3522,19 +3522,19 @@ "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient", "shortName": "ContactCenterInsightsClient" }, - "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient.get_phrase_matcher", + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient.get_issue_model", "method": { - "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetPhraseMatcher", + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetIssueModel", "service": { "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights", "shortName": "ContactCenterInsights" }, - "shortName": "GetPhraseMatcher" + "shortName": "GetIssueModel" }, "parameters": [ { "name": "request", - "type": "google.cloud.contact_center_insights_v1.types.GetPhraseMatcherRequest" + "type": "google.cloud.contact_center_insights_v1.types.GetIssueModelRequest" }, { "name": "name", @@ -3553,14 +3553,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.contact_center_insights_v1.types.PhraseMatcher", - "shortName": "get_phrase_matcher" + "resultType": "google.cloud.contact_center_insights_v1.types.IssueModel", + "shortName": "get_issue_model" }, - "description": "Sample for GetPhraseMatcher", - "file": "contactcenterinsights_v1_generated_contact_center_insights_get_phrase_matcher_sync.py", + "description": "Sample for GetIssueModel", + "file": "contactcenterinsights_v1_generated_contact_center_insights_get_issue_model_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetPhraseMatcher_sync", + "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetIssueModel_sync", "segments": [ { "end": 51, @@ -3593,7 +3593,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "contactcenterinsights_v1_generated_contact_center_insights_get_phrase_matcher_sync.py" + "title": "contactcenterinsights_v1_generated_contact_center_insights_get_issue_model_sync.py" }, { "canonical": true, @@ -3603,19 +3603,19 @@ "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient", "shortName": "ContactCenterInsightsAsyncClient" }, - "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient.get_settings", + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient.get_issue", "method": { - "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetSettings", + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetIssue", "service": { "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights", "shortName": "ContactCenterInsights" }, - "shortName": "GetSettings" + "shortName": "GetIssue" }, "parameters": [ { "name": "request", - "type": "google.cloud.contact_center_insights_v1.types.GetSettingsRequest" + "type": "google.cloud.contact_center_insights_v1.types.GetIssueRequest" }, { "name": "name", @@ -3634,14 +3634,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.contact_center_insights_v1.types.Settings", - "shortName": "get_settings" + "resultType": "google.cloud.contact_center_insights_v1.types.Issue", + "shortName": "get_issue" }, - "description": "Sample for GetSettings", - "file": "contactcenterinsights_v1_generated_contact_center_insights_get_settings_async.py", + "description": "Sample for GetIssue", + "file": "contactcenterinsights_v1_generated_contact_center_insights_get_issue_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetSettings_async", + "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetIssue_async", "segments": [ { "end": 51, @@ -3674,7 +3674,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "contactcenterinsights_v1_generated_contact_center_insights_get_settings_async.py" + "title": "contactcenterinsights_v1_generated_contact_center_insights_get_issue_async.py" }, { "canonical": true, @@ -3683,19 +3683,19 @@ "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient", "shortName": "ContactCenterInsightsClient" }, - "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient.get_settings", + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient.get_issue", "method": { - "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetSettings", + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetIssue", "service": { "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights", "shortName": "ContactCenterInsights" }, - "shortName": "GetSettings" + "shortName": "GetIssue" }, "parameters": [ { "name": "request", - "type": "google.cloud.contact_center_insights_v1.types.GetSettingsRequest" + "type": "google.cloud.contact_center_insights_v1.types.GetIssueRequest" }, { "name": "name", @@ -3714,14 +3714,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.contact_center_insights_v1.types.Settings", - "shortName": "get_settings" + "resultType": "google.cloud.contact_center_insights_v1.types.Issue", + "shortName": "get_issue" }, - "description": "Sample for GetSettings", - "file": "contactcenterinsights_v1_generated_contact_center_insights_get_settings_sync.py", + "description": "Sample for GetIssue", + "file": "contactcenterinsights_v1_generated_contact_center_insights_get_issue_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetSettings_sync", + "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetIssue_sync", "segments": [ { "end": 51, @@ -3754,7 +3754,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "contactcenterinsights_v1_generated_contact_center_insights_get_settings_sync.py" + "title": "contactcenterinsights_v1_generated_contact_center_insights_get_issue_sync.py" }, { "canonical": true, @@ -3764,19 +3764,19 @@ "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient", "shortName": "ContactCenterInsightsAsyncClient" }, - "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient.get_view", + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient.get_phrase_matcher", "method": { - "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetView", + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetPhraseMatcher", "service": { "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights", "shortName": "ContactCenterInsights" }, - "shortName": "GetView" + "shortName": "GetPhraseMatcher" }, "parameters": [ { "name": "request", - "type": "google.cloud.contact_center_insights_v1.types.GetViewRequest" + "type": "google.cloud.contact_center_insights_v1.types.GetPhraseMatcherRequest" }, { "name": "name", @@ -3795,14 +3795,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.contact_center_insights_v1.types.View", - "shortName": "get_view" + "resultType": "google.cloud.contact_center_insights_v1.types.PhraseMatcher", + "shortName": "get_phrase_matcher" }, - "description": "Sample for GetView", - "file": "contactcenterinsights_v1_generated_contact_center_insights_get_view_async.py", + "description": "Sample for GetPhraseMatcher", + "file": "contactcenterinsights_v1_generated_contact_center_insights_get_phrase_matcher_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetView_async", + "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetPhraseMatcher_async", "segments": [ { "end": 51, @@ -3835,7 +3835,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "contactcenterinsights_v1_generated_contact_center_insights_get_view_async.py" + "title": "contactcenterinsights_v1_generated_contact_center_insights_get_phrase_matcher_async.py" }, { "canonical": true, @@ -3844,14 +3844,256 @@ "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient", "shortName": "ContactCenterInsightsClient" }, - "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient.get_view", + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient.get_phrase_matcher", "method": { - "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetView", + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetPhraseMatcher", "service": { "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights", "shortName": "ContactCenterInsights" }, - "shortName": "GetView" + "shortName": "GetPhraseMatcher" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.contact_center_insights_v1.types.GetPhraseMatcherRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.contact_center_insights_v1.types.PhraseMatcher", + "shortName": "get_phrase_matcher" + }, + "description": "Sample for GetPhraseMatcher", + "file": "contactcenterinsights_v1_generated_contact_center_insights_get_phrase_matcher_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetPhraseMatcher_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "contactcenterinsights_v1_generated_contact_center_insights_get_phrase_matcher_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient", + "shortName": "ContactCenterInsightsAsyncClient" + }, + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient.get_settings", + "method": { + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetSettings", + "service": { + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights", + "shortName": "ContactCenterInsights" + }, + "shortName": "GetSettings" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.contact_center_insights_v1.types.GetSettingsRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.contact_center_insights_v1.types.Settings", + "shortName": "get_settings" + }, + "description": "Sample for GetSettings", + "file": "contactcenterinsights_v1_generated_contact_center_insights_get_settings_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetSettings_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "contactcenterinsights_v1_generated_contact_center_insights_get_settings_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient", + "shortName": "ContactCenterInsightsClient" + }, + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient.get_settings", + "method": { + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetSettings", + "service": { + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights", + "shortName": "ContactCenterInsights" + }, + "shortName": "GetSettings" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.contact_center_insights_v1.types.GetSettingsRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.contact_center_insights_v1.types.Settings", + "shortName": "get_settings" + }, + "description": "Sample for GetSettings", + "file": "contactcenterinsights_v1_generated_contact_center_insights_get_settings_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetSettings_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "contactcenterinsights_v1_generated_contact_center_insights_get_settings_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient", + "shortName": "ContactCenterInsightsAsyncClient" + }, + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient.get_view", + "method": { + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetView", + "service": { + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights", + "shortName": "ContactCenterInsights" + }, + "shortName": "GetView" }, "parameters": [ { @@ -3859,7 +4101,168 @@ "type": "google.cloud.contact_center_insights_v1.types.GetViewRequest" }, { - "name": "name", + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.contact_center_insights_v1.types.View", + "shortName": "get_view" + }, + "description": "Sample for GetView", + "file": "contactcenterinsights_v1_generated_contact_center_insights_get_view_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetView_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "contactcenterinsights_v1_generated_contact_center_insights_get_view_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient", + "shortName": "ContactCenterInsightsClient" + }, + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient.get_view", + "method": { + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetView", + "service": { + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights", + "shortName": "ContactCenterInsights" + }, + "shortName": "GetView" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.contact_center_insights_v1.types.GetViewRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.contact_center_insights_v1.types.View", + "shortName": "get_view" + }, + "description": "Sample for GetView", + "file": "contactcenterinsights_v1_generated_contact_center_insights_get_view_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetView_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "contactcenterinsights_v1_generated_contact_center_insights_get_view_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient", + "shortName": "ContactCenterInsightsAsyncClient" + }, + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient.import_issue_model", + "method": { + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.ImportIssueModel", + "service": { + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights", + "shortName": "ContactCenterInsights" + }, + "shortName": "ImportIssueModel" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.contact_center_insights_v1.types.ImportIssueModelRequest" + }, + { + "name": "parent", "type": "str" }, { @@ -3875,22 +4278,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.contact_center_insights_v1.types.View", - "shortName": "get_view" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "import_issue_model" }, - "description": "Sample for GetView", - "file": "contactcenterinsights_v1_generated_contact_center_insights_get_view_sync.py", + "description": "Sample for ImportIssueModel", + "file": "contactcenterinsights_v1_generated_contact_center_insights_import_issue_model_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetView_sync", + "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_ImportIssueModel_async", "segments": [ { - "end": 51, + "end": 59, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 59, "start": 27, "type": "SHORT" }, @@ -3900,22 +4303,102 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 49, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 56, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 60, + "start": 57, "type": "RESPONSE_HANDLING" } ], - "title": "contactcenterinsights_v1_generated_contact_center_insights_get_view_sync.py" + "title": "contactcenterinsights_v1_generated_contact_center_insights_import_issue_model_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient", + "shortName": "ContactCenterInsightsClient" + }, + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient.import_issue_model", + "method": { + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.ImportIssueModel", + "service": { + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights", + "shortName": "ContactCenterInsights" + }, + "shortName": "ImportIssueModel" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.contact_center_insights_v1.types.ImportIssueModelRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "import_issue_model" + }, + "description": "Sample for ImportIssueModel", + "file": "contactcenterinsights_v1_generated_contact_center_insights_import_issue_model_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_ImportIssueModel_sync", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "contactcenterinsights_v1_generated_contact_center_insights_import_issue_model_sync.py" }, { "canonical": true, @@ -4078,6 +4561,167 @@ ], "title": "contactcenterinsights_v1_generated_contact_center_insights_ingest_conversations_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient", + "shortName": "ContactCenterInsightsAsyncClient" + }, + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient.initialize_encryption_spec", + "method": { + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.InitializeEncryptionSpec", + "service": { + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights", + "shortName": "ContactCenterInsights" + }, + "shortName": "InitializeEncryptionSpec" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.contact_center_insights_v1.types.InitializeEncryptionSpecRequest" + }, + { + "name": "encryption_spec", + "type": "google.cloud.contact_center_insights_v1.types.EncryptionSpec" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "initialize_encryption_spec" + }, + "description": "Sample for InitializeEncryptionSpec", + "file": "contactcenterinsights_v1_generated_contact_center_insights_initialize_encryption_spec_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_InitializeEncryptionSpec_async", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "contactcenterinsights_v1_generated_contact_center_insights_initialize_encryption_spec_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient", + "shortName": "ContactCenterInsightsClient" + }, + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient.initialize_encryption_spec", + "method": { + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.InitializeEncryptionSpec", + "service": { + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights", + "shortName": "ContactCenterInsights" + }, + "shortName": "InitializeEncryptionSpec" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.contact_center_insights_v1.types.InitializeEncryptionSpecRequest" + }, + { + "name": "encryption_spec", + "type": "google.cloud.contact_center_insights_v1.types.EncryptionSpec" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "initialize_encryption_spec" + }, + "description": "Sample for InitializeEncryptionSpec", + "file": "contactcenterinsights_v1_generated_contact_center_insights_initialize_encryption_spec_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_InitializeEncryptionSpec_sync", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "contactcenterinsights_v1_generated_contact_center_insights_initialize_encryption_spec_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-contact-center-insights/scripts/fixup_contact_center_insights_v1_keywords.py b/packages/google-cloud-contact-center-insights/scripts/fixup_contact_center_insights_v1_keywords.py index ca15d27e1fd8..10d05e634fa6 100644 --- a/packages/google-cloud-contact-center-insights/scripts/fixup_contact_center_insights_v1_keywords.py +++ b/packages/google-cloud-contact-center-insights/scripts/fixup_contact_center_insights_v1_keywords.py @@ -56,16 +56,20 @@ class contact_center_insightsCallTransformer(cst.CSTTransformer): 'delete_view': ('name', ), 'deploy_issue_model': ('name', ), 'export_insights_data': ('parent', 'big_query_destination', 'filter', 'kms_key', 'write_disposition', ), + 'export_issue_model': ('name', 'gcs_destination', ), 'get_analysis': ('name', ), 'get_conversation': ('name', 'view', ), + 'get_encryption_spec': ('name', ), 'get_issue': ('name', ), 'get_issue_model': ('name', ), 'get_phrase_matcher': ('name', ), 'get_settings': ('name', ), 'get_view': ('name', ), - 'ingest_conversations': ('parent', 'gcs_source', 'transcript_object_config', 'conversation_config', 'redaction_config', 'speech_config', ), + 'import_issue_model': ('parent', 'gcs_source', 'create_new_model', ), + 'ingest_conversations': ('parent', 'gcs_source', 'transcript_object_config', 'conversation_config', 'redaction_config', 'speech_config', 'sample_size', ), + 'initialize_encryption_spec': ('encryption_spec', ), 'list_analyses': ('parent', 'page_size', 'page_token', 'filter', ), - 'list_conversations': ('parent', 'page_size', 'page_token', 'filter', 'view', ), + 'list_conversations': ('parent', 'page_size', 'page_token', 'filter', 'order_by', 'view', ), 'list_issue_models': ('parent', ), 'list_issues': ('parent', ), 'list_phrase_matchers': ('parent', 'page_size', 'page_token', 'filter', ), diff --git a/packages/google-cloud-contact-center-insights/tests/unit/gapic/contact_center_insights_v1/test_contact_center_insights.py b/packages/google-cloud-contact-center-insights/tests/unit/gapic/contact_center_insights_v1/test_contact_center_insights.py index 962ffb010308..034282057f4a 100644 --- a/packages/google-cloud-contact-center-insights/tests/unit/gapic/contact_center_insights_v1/test_contact_center_insights.py +++ b/packages/google-cloud-contact-center-insights/tests/unit/gapic/contact_center_insights_v1/test_contact_center_insights.py @@ -1230,6 +1230,7 @@ def test_create_conversation(request_type, transport: str = "grpc"): name="name_value", language_code="language_code_value", agent_id="agent_id_value", + metadata_json="metadata_json_value", medium=resources.Conversation.Medium.PHONE_CALL, turn_count=1105, obfuscated_user_id="obfuscated_user_id_value", @@ -1247,6 +1248,7 @@ def test_create_conversation(request_type, transport: str = "grpc"): assert response.name == "name_value" assert response.language_code == "language_code_value" assert response.agent_id == "agent_id_value" + assert response.metadata_json == "metadata_json_value" assert response.medium == resources.Conversation.Medium.PHONE_CALL assert response.turn_count == 1105 assert response.obfuscated_user_id == "obfuscated_user_id_value" @@ -1363,6 +1365,7 @@ async def test_create_conversation_empty_call_async(): name="name_value", language_code="language_code_value", agent_id="agent_id_value", + metadata_json="metadata_json_value", medium=resources.Conversation.Medium.PHONE_CALL, turn_count=1105, obfuscated_user_id="obfuscated_user_id_value", @@ -1440,6 +1443,7 @@ async def test_create_conversation_async( name="name_value", language_code="language_code_value", agent_id="agent_id_value", + metadata_json="metadata_json_value", medium=resources.Conversation.Medium.PHONE_CALL, turn_count=1105, obfuscated_user_id="obfuscated_user_id_value", @@ -1458,6 +1462,7 @@ async def test_create_conversation_async( assert response.name == "name_value" assert response.language_code == "language_code_value" assert response.agent_id == "agent_id_value" + assert response.metadata_json == "metadata_json_value" assert response.medium == resources.Conversation.Medium.PHONE_CALL assert response.turn_count == 1105 assert response.obfuscated_user_id == "obfuscated_user_id_value" @@ -1983,6 +1988,7 @@ def test_update_conversation(request_type, transport: str = "grpc"): name="name_value", language_code="language_code_value", agent_id="agent_id_value", + metadata_json="metadata_json_value", medium=resources.Conversation.Medium.PHONE_CALL, turn_count=1105, obfuscated_user_id="obfuscated_user_id_value", @@ -2000,6 +2006,7 @@ def test_update_conversation(request_type, transport: str = "grpc"): assert response.name == "name_value" assert response.language_code == "language_code_value" assert response.agent_id == "agent_id_value" + assert response.metadata_json == "metadata_json_value" assert response.medium == resources.Conversation.Medium.PHONE_CALL assert response.turn_count == 1105 assert response.obfuscated_user_id == "obfuscated_user_id_value" @@ -2110,6 +2117,7 @@ async def test_update_conversation_empty_call_async(): name="name_value", language_code="language_code_value", agent_id="agent_id_value", + metadata_json="metadata_json_value", medium=resources.Conversation.Medium.PHONE_CALL, turn_count=1105, obfuscated_user_id="obfuscated_user_id_value", @@ -2187,6 +2195,7 @@ async def test_update_conversation_async( name="name_value", language_code="language_code_value", agent_id="agent_id_value", + metadata_json="metadata_json_value", medium=resources.Conversation.Medium.PHONE_CALL, turn_count=1105, obfuscated_user_id="obfuscated_user_id_value", @@ -2205,6 +2214,7 @@ async def test_update_conversation_async( assert response.name == "name_value" assert response.language_code == "language_code_value" assert response.agent_id == "agent_id_value" + assert response.metadata_json == "metadata_json_value" assert response.medium == resources.Conversation.Medium.PHONE_CALL assert response.turn_count == 1105 assert response.obfuscated_user_id == "obfuscated_user_id_value" @@ -2412,6 +2422,7 @@ def test_get_conversation(request_type, transport: str = "grpc"): name="name_value", language_code="language_code_value", agent_id="agent_id_value", + metadata_json="metadata_json_value", medium=resources.Conversation.Medium.PHONE_CALL, turn_count=1105, obfuscated_user_id="obfuscated_user_id_value", @@ -2429,6 +2440,7 @@ def test_get_conversation(request_type, transport: str = "grpc"): assert response.name == "name_value" assert response.language_code == "language_code_value" assert response.agent_id == "agent_id_value" + assert response.metadata_json == "metadata_json_value" assert response.medium == resources.Conversation.Medium.PHONE_CALL assert response.turn_count == 1105 assert response.obfuscated_user_id == "obfuscated_user_id_value" @@ -2535,6 +2547,7 @@ async def test_get_conversation_empty_call_async(): name="name_value", language_code="language_code_value", agent_id="agent_id_value", + metadata_json="metadata_json_value", medium=resources.Conversation.Medium.PHONE_CALL, turn_count=1105, obfuscated_user_id="obfuscated_user_id_value", @@ -2610,6 +2623,7 @@ async def test_get_conversation_async( name="name_value", language_code="language_code_value", agent_id="agent_id_value", + metadata_json="metadata_json_value", medium=resources.Conversation.Medium.PHONE_CALL, turn_count=1105, obfuscated_user_id="obfuscated_user_id_value", @@ -2628,6 +2642,7 @@ async def test_get_conversation_async( assert response.name == "name_value" assert response.language_code == "language_code_value" assert response.agent_id == "agent_id_value" + assert response.metadata_json == "metadata_json_value" assert response.medium == resources.Conversation.Medium.PHONE_CALL assert response.turn_count == 1105 assert response.obfuscated_user_id == "obfuscated_user_id_value" @@ -2855,6 +2870,7 @@ def test_list_conversations_non_empty_request_with_auto_populated_field(): parent="parent_value", page_token="page_token_value", filter="filter_value", + order_by="order_by_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2871,6 +2887,7 @@ def test_list_conversations_non_empty_request_with_auto_populated_field(): parent="parent_value", page_token="page_token_value", filter="filter_value", + order_by="order_by_value", ) @@ -9762,11 +9779,11 @@ async def test_undeploy_issue_model_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.GetIssueRequest, + contact_center_insights.ExportIssueModelRequest, dict, ], ) -def test_get_issue(request_type, transport: str = "grpc"): +def test_export_issue_model(request_type, transport: str = "grpc"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9777,29 +9794,24 @@ def test_get_issue(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_issue), "__call__") as call: + with mock.patch.object( + type(client.transport.export_issue_model), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.Issue( - name="name_value", - display_name="display_name_value", - sample_utterances=["sample_utterances_value"], - ) - response = client.get_issue(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.export_issue_model(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = contact_center_insights.GetIssueRequest() + request = contact_center_insights.ExportIssueModelRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.Issue) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.sample_utterances == ["sample_utterances_value"] + assert isinstance(response, future.Future) -def test_get_issue_empty_call(): +def test_export_issue_model_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsClient( @@ -9808,17 +9820,19 @@ def test_get_issue_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_issue), "__call__") as call: + with mock.patch.object( + type(client.transport.export_issue_model), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_issue() + client.export_issue_model() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.GetIssueRequest() + assert args[0] == contact_center_insights.ExportIssueModelRequest() -def test_get_issue_non_empty_request_with_auto_populated_field(): +def test_export_issue_model_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = ContactCenterInsightsClient( @@ -9829,24 +9843,26 @@ def test_get_issue_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = contact_center_insights.GetIssueRequest( + request = contact_center_insights.ExportIssueModelRequest( name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_issue), "__call__") as call: + with mock.patch.object( + type(client.transport.export_issue_model), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_issue(request=request) + client.export_issue_model(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.GetIssueRequest( + assert args[0] == contact_center_insights.ExportIssueModelRequest( name="name_value", ) -def test_get_issue_use_cached_wrapped_rpc(): +def test_export_issue_model_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -9860,21 +9876,30 @@ def test_get_issue_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_issue in client._transport._wrapped_methods + assert ( + client._transport.export_issue_model in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_issue] = mock_rpc + client._transport._wrapped_methods[ + client._transport.export_issue_model + ] = mock_rpc request = {} - client.get_issue(request) + client.export_issue_model(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_issue(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.export_issue_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -9882,7 +9907,7 @@ def test_get_issue_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_issue_empty_call_async(): +async def test_export_issue_model_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsAsyncClient( @@ -9891,23 +9916,23 @@ async def test_get_issue_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_issue), "__call__") as call: + with mock.patch.object( + type(client.transport.export_issue_model), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.Issue( - name="name_value", - display_name="display_name_value", - sample_utterances=["sample_utterances_value"], - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.get_issue() + response = await client.export_issue_model() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.GetIssueRequest() + assert args[0] == contact_center_insights.ExportIssueModelRequest() @pytest.mark.asyncio -async def test_get_issue_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_export_issue_model_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -9922,7 +9947,7 @@ async def test_get_issue_async_use_cached_wrapped_rpc(transport: str = "grpc_asy # Ensure method has been cached assert ( - client._client._transport.get_issue + client._client._transport.export_issue_model in client._client._transport._wrapped_methods ) @@ -9930,16 +9955,21 @@ async def test_get_issue_async_use_cached_wrapped_rpc(transport: str = "grpc_asy mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_issue + client._client._transport.export_issue_model ] = mock_rpc request = {} - await client.get_issue(request) + await client.export_issue_model(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.get_issue(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.export_issue_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -9947,9 +9977,9 @@ async def test_get_issue_async_use_cached_wrapped_rpc(transport: str = "grpc_asy @pytest.mark.asyncio -async def test_get_issue_async( +async def test_export_issue_model_async( transport: str = "grpc_asyncio", - request_type=contact_center_insights.GetIssueRequest, + request_type=contact_center_insights.ExportIssueModelRequest, ): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9961,50 +9991,47 @@ async def test_get_issue_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_issue), "__call__") as call: + with mock.patch.object( + type(client.transport.export_issue_model), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.Issue( - name="name_value", - display_name="display_name_value", - sample_utterances=["sample_utterances_value"], - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.get_issue(request) + response = await client.export_issue_model(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = contact_center_insights.GetIssueRequest() + request = contact_center_insights.ExportIssueModelRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.Issue) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.sample_utterances == ["sample_utterances_value"] + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_get_issue_async_from_dict(): - await test_get_issue_async(request_type=dict) +async def test_export_issue_model_async_from_dict(): + await test_export_issue_model_async(request_type=dict) -def test_get_issue_field_headers(): +def test_export_issue_model_field_headers(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.GetIssueRequest() + request = contact_center_insights.ExportIssueModelRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_issue), "__call__") as call: - call.return_value = resources.Issue() - client.get_issue(request) + with mock.patch.object( + type(client.transport.export_issue_model), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.export_issue_model(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -10020,21 +10047,25 @@ def test_get_issue_field_headers(): @pytest.mark.asyncio -async def test_get_issue_field_headers_async(): +async def test_export_issue_model_field_headers_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.GetIssueRequest() + request = contact_center_insights.ExportIssueModelRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_issue), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Issue()) - await client.get_issue(request) + with mock.patch.object( + type(client.transport.export_issue_model), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.export_issue_model(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -10049,18 +10080,20 @@ async def test_get_issue_field_headers_async(): ) in kw["metadata"] -def test_get_issue_flattened(): +def test_export_issue_model_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_issue), "__call__") as call: + with mock.patch.object( + type(client.transport.export_issue_model), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.Issue() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_issue( + client.export_issue_model( name="name_value", ) @@ -10073,7 +10106,7 @@ def test_get_issue_flattened(): assert arg == mock_val -def test_get_issue_flattened_error(): +def test_export_issue_model_flattened_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -10081,27 +10114,31 @@ def test_get_issue_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_issue( - contact_center_insights.GetIssueRequest(), + client.export_issue_model( + contact_center_insights.ExportIssueModelRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_get_issue_flattened_async(): +async def test_export_issue_model_flattened_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_issue), "__call__") as call: + with mock.patch.object( + type(client.transport.export_issue_model), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.Issue() + call.return_value = operations_pb2.Operation(name="operations/op") - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Issue()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_issue( + response = await client.export_issue_model( name="name_value", ) @@ -10115,7 +10152,7 @@ async def test_get_issue_flattened_async(): @pytest.mark.asyncio -async def test_get_issue_flattened_error_async(): +async def test_export_issue_model_flattened_error_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -10123,8 +10160,8 @@ async def test_get_issue_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_issue( - contact_center_insights.GetIssueRequest(), + await client.export_issue_model( + contact_center_insights.ExportIssueModelRequest(), name="name_value", ) @@ -10132,11 +10169,11 @@ async def test_get_issue_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.ListIssuesRequest, + contact_center_insights.ImportIssueModelRequest, dict, ], ) -def test_list_issues(request_type, transport: str = "grpc"): +def test_import_issue_model(request_type, transport: str = "grpc"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -10147,22 +10184,24 @@ def test_list_issues(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_issues), "__call__") as call: + with mock.patch.object( + type(client.transport.import_issue_model), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = contact_center_insights.ListIssuesResponse() - response = client.list_issues(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.import_issue_model(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = contact_center_insights.ListIssuesRequest() + request = contact_center_insights.ImportIssueModelRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, contact_center_insights.ListIssuesResponse) + assert isinstance(response, future.Future) -def test_list_issues_empty_call(): +def test_import_issue_model_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsClient( @@ -10171,17 +10210,19 @@ def test_list_issues_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_issues), "__call__") as call: + with mock.patch.object( + type(client.transport.import_issue_model), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_issues() + client.import_issue_model() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.ListIssuesRequest() + assert args[0] == contact_center_insights.ImportIssueModelRequest() -def test_list_issues_non_empty_request_with_auto_populated_field(): +def test_import_issue_model_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = ContactCenterInsightsClient( @@ -10192,24 +10233,26 @@ def test_list_issues_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = contact_center_insights.ListIssuesRequest( + request = contact_center_insights.ImportIssueModelRequest( parent="parent_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_issues), "__call__") as call: + with mock.patch.object( + type(client.transport.import_issue_model), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_issues(request=request) + client.import_issue_model(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.ListIssuesRequest( + assert args[0] == contact_center_insights.ImportIssueModelRequest( parent="parent_value", ) -def test_list_issues_use_cached_wrapped_rpc(): +def test_import_issue_model_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -10223,21 +10266,30 @@ def test_list_issues_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_issues in client._transport._wrapped_methods + assert ( + client._transport.import_issue_model in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_issues] = mock_rpc + client._transport._wrapped_methods[ + client._transport.import_issue_model + ] = mock_rpc request = {} - client.list_issues(request) + client.import_issue_model(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_issues(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.import_issue_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -10245,7 +10297,7 @@ def test_list_issues_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_issues_empty_call_async(): +async def test_import_issue_model_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsAsyncClient( @@ -10254,19 +10306,21 @@ async def test_list_issues_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_issues), "__call__") as call: + with mock.patch.object( + type(client.transport.import_issue_model), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - contact_center_insights.ListIssuesResponse() + operations_pb2.Operation(name="operations/spam") ) - response = await client.list_issues() + response = await client.import_issue_model() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.ListIssuesRequest() + assert args[0] == contact_center_insights.ImportIssueModelRequest() @pytest.mark.asyncio -async def test_list_issues_async_use_cached_wrapped_rpc( +async def test_import_issue_model_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -10283,7 +10337,7 @@ async def test_list_issues_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.list_issues + client._client._transport.import_issue_model in client._client._transport._wrapped_methods ) @@ -10291,16 +10345,21 @@ async def test_list_issues_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_issues + client._client._transport.import_issue_model ] = mock_rpc request = {} - await client.list_issues(request) + await client.import_issue_model(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.list_issues(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.import_issue_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -10308,9 +10367,9 @@ async def test_list_issues_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_list_issues_async( +async def test_import_issue_model_async( transport: str = "grpc_asyncio", - request_type=contact_center_insights.ListIssuesRequest, + request_type=contact_center_insights.ImportIssueModelRequest, ): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -10322,43 +10381,47 @@ async def test_list_issues_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_issues), "__call__") as call: + with mock.patch.object( + type(client.transport.import_issue_model), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - contact_center_insights.ListIssuesResponse() + operations_pb2.Operation(name="operations/spam") ) - response = await client.list_issues(request) + response = await client.import_issue_model(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = contact_center_insights.ListIssuesRequest() + request = contact_center_insights.ImportIssueModelRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, contact_center_insights.ListIssuesResponse) + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_list_issues_async_from_dict(): - await test_list_issues_async(request_type=dict) +async def test_import_issue_model_async_from_dict(): + await test_import_issue_model_async(request_type=dict) -def test_list_issues_field_headers(): +def test_import_issue_model_field_headers(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.ListIssuesRequest() + request = contact_center_insights.ImportIssueModelRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_issues), "__call__") as call: - call.return_value = contact_center_insights.ListIssuesResponse() - client.list_issues(request) + with mock.patch.object( + type(client.transport.import_issue_model), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.import_issue_model(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -10374,23 +10437,25 @@ def test_list_issues_field_headers(): @pytest.mark.asyncio -async def test_list_issues_field_headers_async(): +async def test_import_issue_model_field_headers_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.ListIssuesRequest() + request = contact_center_insights.ImportIssueModelRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_issues), "__call__") as call: + with mock.patch.object( + type(client.transport.import_issue_model), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - contact_center_insights.ListIssuesResponse() + operations_pb2.Operation(name="operations/op") ) - await client.list_issues(request) + await client.import_issue_model(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -10405,18 +10470,20 @@ async def test_list_issues_field_headers_async(): ) in kw["metadata"] -def test_list_issues_flattened(): +def test_import_issue_model_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_issues), "__call__") as call: + with mock.patch.object( + type(client.transport.import_issue_model), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = contact_center_insights.ListIssuesResponse() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_issues( + client.import_issue_model( parent="parent_value", ) @@ -10429,7 +10496,7 @@ def test_list_issues_flattened(): assert arg == mock_val -def test_list_issues_flattened_error(): +def test_import_issue_model_flattened_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -10437,29 +10504,31 @@ def test_list_issues_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_issues( - contact_center_insights.ListIssuesRequest(), + client.import_issue_model( + contact_center_insights.ImportIssueModelRequest(), parent="parent_value", ) @pytest.mark.asyncio -async def test_list_issues_flattened_async(): +async def test_import_issue_model_flattened_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_issues), "__call__") as call: + with mock.patch.object( + type(client.transport.import_issue_model), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = contact_center_insights.ListIssuesResponse() + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - contact_center_insights.ListIssuesResponse() + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_issues( + response = await client.import_issue_model( parent="parent_value", ) @@ -10473,7 +10542,7 @@ async def test_list_issues_flattened_async(): @pytest.mark.asyncio -async def test_list_issues_flattened_error_async(): +async def test_import_issue_model_flattened_error_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -10481,8 +10550,8 @@ async def test_list_issues_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_issues( - contact_center_insights.ListIssuesRequest(), + await client.import_issue_model( + contact_center_insights.ImportIssueModelRequest(), parent="parent_value", ) @@ -10490,11 +10559,11 @@ async def test_list_issues_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.UpdateIssueRequest, + contact_center_insights.GetIssueRequest, dict, ], ) -def test_update_issue(request_type, transport: str = "grpc"): +def test_get_issue(request_type, transport: str = "grpc"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -10505,19 +10574,20 @@ def test_update_issue(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_issue), "__call__") as call: + with mock.patch.object(type(client.transport.get_issue), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = resources.Issue( name="name_value", display_name="display_name_value", sample_utterances=["sample_utterances_value"], + display_description="display_description_value", ) - response = client.update_issue(request) + response = client.get_issue(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = contact_center_insights.UpdateIssueRequest() + request = contact_center_insights.GetIssueRequest() assert args[0] == request # Establish that the response is the type that we expect. @@ -10525,9 +10595,10 @@ def test_update_issue(request_type, transport: str = "grpc"): assert response.name == "name_value" assert response.display_name == "display_name_value" assert response.sample_utterances == ["sample_utterances_value"] + assert response.display_description == "display_description_value" -def test_update_issue_empty_call(): +def test_get_issue_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsClient( @@ -10536,17 +10607,17 @@ def test_update_issue_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_issue), "__call__") as call: + with mock.patch.object(type(client.transport.get_issue), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_issue() + client.get_issue() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.UpdateIssueRequest() + assert args[0] == contact_center_insights.GetIssueRequest() -def test_update_issue_non_empty_request_with_auto_populated_field(): +def test_get_issue_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = ContactCenterInsightsClient( @@ -10557,20 +10628,24 @@ def test_update_issue_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = contact_center_insights.UpdateIssueRequest() + request = contact_center_insights.GetIssueRequest( + name="name_value", + ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_issue), "__call__") as call: + with mock.patch.object(type(client.transport.get_issue), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_issue(request=request) + client.get_issue(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.UpdateIssueRequest() + assert args[0] == contact_center_insights.GetIssueRequest( + name="name_value", + ) -def test_update_issue_use_cached_wrapped_rpc(): +def test_get_issue_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -10584,21 +10659,21 @@ def test_update_issue_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_issue in client._transport._wrapped_methods + assert client._transport.get_issue in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.update_issue] = mock_rpc + client._transport._wrapped_methods[client._transport.get_issue] = mock_rpc request = {} - client.update_issue(request) + client.get_issue(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_issue(request) + client.get_issue(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -10606,7 +10681,7 @@ def test_update_issue_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_update_issue_empty_call_async(): +async def test_get_issue_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsAsyncClient( @@ -10615,25 +10690,24 @@ async def test_update_issue_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_issue), "__call__") as call: + with mock.patch.object(type(client.transport.get_issue), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.Issue( name="name_value", display_name="display_name_value", sample_utterances=["sample_utterances_value"], + display_description="display_description_value", ) ) - response = await client.update_issue() + response = await client.get_issue() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.UpdateIssueRequest() + assert args[0] == contact_center_insights.GetIssueRequest() @pytest.mark.asyncio -async def test_update_issue_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): +async def test_get_issue_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -10648,7 +10722,7 @@ async def test_update_issue_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.update_issue + client._client._transport.get_issue in client._client._transport._wrapped_methods ) @@ -10656,16 +10730,16 @@ async def test_update_issue_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.update_issue + client._client._transport.get_issue ] = mock_rpc request = {} - await client.update_issue(request) + await client.get_issue(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.update_issue(request) + await client.get_issue(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -10673,9 +10747,9 @@ async def test_update_issue_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_update_issue_async( +async def test_get_issue_async( transport: str = "grpc_asyncio", - request_type=contact_center_insights.UpdateIssueRequest, + request_type=contact_center_insights.GetIssueRequest, ): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -10687,21 +10761,22 @@ async def test_update_issue_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_issue), "__call__") as call: + with mock.patch.object(type(client.transport.get_issue), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.Issue( name="name_value", display_name="display_name_value", sample_utterances=["sample_utterances_value"], + display_description="display_description_value", ) ) - response = await client.update_issue(request) + response = await client.get_issue(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = contact_center_insights.UpdateIssueRequest() + request = contact_center_insights.GetIssueRequest() assert args[0] == request # Establish that the response is the type that we expect. @@ -10709,28 +10784,29 @@ async def test_update_issue_async( assert response.name == "name_value" assert response.display_name == "display_name_value" assert response.sample_utterances == ["sample_utterances_value"] + assert response.display_description == "display_description_value" @pytest.mark.asyncio -async def test_update_issue_async_from_dict(): - await test_update_issue_async(request_type=dict) +async def test_get_issue_async_from_dict(): + await test_get_issue_async(request_type=dict) -def test_update_issue_field_headers(): +def test_get_issue_field_headers(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.UpdateIssueRequest() + request = contact_center_insights.GetIssueRequest() - request.issue.name = "name_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_issue), "__call__") as call: + with mock.patch.object(type(client.transport.get_issue), "__call__") as call: call.return_value = resources.Issue() - client.update_issue(request) + client.get_issue(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -10741,26 +10817,26 @@ def test_update_issue_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "issue.name=name_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_update_issue_field_headers_async(): +async def test_get_issue_field_headers_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.UpdateIssueRequest() + request = contact_center_insights.GetIssueRequest() - request.issue.name = "name_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_issue), "__call__") as call: + with mock.patch.object(type(client.transport.get_issue), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Issue()) - await client.update_issue(request) + await client.get_issue(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -10771,39 +10847,35 @@ async def test_update_issue_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "issue.name=name_value", + "name=name_value", ) in kw["metadata"] -def test_update_issue_flattened(): +def test_get_issue_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_issue), "__call__") as call: + with mock.patch.object(type(client.transport.get_issue), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = resources.Issue() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.update_issue( - issue=resources.Issue(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.get_issue( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].issue - mock_val = resources.Issue(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_update_issue_flattened_error(): +def test_get_issue_flattened_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -10811,46 +10883,41 @@ def test_update_issue_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_issue( - contact_center_insights.UpdateIssueRequest(), - issue=resources.Issue(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.get_issue( + contact_center_insights.GetIssueRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_update_issue_flattened_async(): +async def test_get_issue_flattened_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_issue), "__call__") as call: + with mock.patch.object(type(client.transport.get_issue), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = resources.Issue() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Issue()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.update_issue( - issue=resources.Issue(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + response = await client.get_issue( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].issue - mock_val = resources.Issue(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_update_issue_flattened_error_async(): +async def test_get_issue_flattened_error_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -10858,21 +10925,20 @@ async def test_update_issue_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_issue( - contact_center_insights.UpdateIssueRequest(), - issue=resources.Issue(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + await client.get_issue( + contact_center_insights.GetIssueRequest(), + name="name_value", ) @pytest.mark.parametrize( "request_type", [ - contact_center_insights.DeleteIssueRequest, + contact_center_insights.ListIssuesRequest, dict, ], ) -def test_delete_issue(request_type, transport: str = "grpc"): +def test_list_issues(request_type, transport: str = "grpc"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -10883,22 +10949,22 @@ def test_delete_issue(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_issue), "__call__") as call: + with mock.patch.object(type(client.transport.list_issues), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_issue(request) + call.return_value = contact_center_insights.ListIssuesResponse() + response = client.list_issues(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = contact_center_insights.DeleteIssueRequest() + request = contact_center_insights.ListIssuesRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, contact_center_insights.ListIssuesResponse) -def test_delete_issue_empty_call(): +def test_list_issues_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsClient( @@ -10907,17 +10973,17 @@ def test_delete_issue_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_issue), "__call__") as call: + with mock.patch.object(type(client.transport.list_issues), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_issue() + client.list_issues() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.DeleteIssueRequest() + assert args[0] == contact_center_insights.ListIssuesRequest() -def test_delete_issue_non_empty_request_with_auto_populated_field(): +def test_list_issues_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = ContactCenterInsightsClient( @@ -10928,24 +10994,24 @@ def test_delete_issue_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = contact_center_insights.DeleteIssueRequest( - name="name_value", + request = contact_center_insights.ListIssuesRequest( + parent="parent_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_issue), "__call__") as call: + with mock.patch.object(type(client.transport.list_issues), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_issue(request=request) + client.list_issues(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.DeleteIssueRequest( - name="name_value", + assert args[0] == contact_center_insights.ListIssuesRequest( + parent="parent_value", ) -def test_delete_issue_use_cached_wrapped_rpc(): +def test_list_issues_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -10959,21 +11025,21 @@ def test_delete_issue_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_issue in client._transport._wrapped_methods + assert client._transport.list_issues in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.delete_issue] = mock_rpc + client._transport._wrapped_methods[client._transport.list_issues] = mock_rpc request = {} - client.delete_issue(request) + client.list_issues(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_issue(request) + client.list_issues(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -10981,7 +11047,7 @@ def test_delete_issue_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_delete_issue_empty_call_async(): +async def test_list_issues_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsAsyncClient( @@ -10990,17 +11056,19 @@ async def test_delete_issue_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_issue), "__call__") as call: + with mock.patch.object(type(client.transport.list_issues), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_issue() + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + contact_center_insights.ListIssuesResponse() + ) + response = await client.list_issues() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.DeleteIssueRequest() + assert args[0] == contact_center_insights.ListIssuesRequest() @pytest.mark.asyncio -async def test_delete_issue_async_use_cached_wrapped_rpc( +async def test_list_issues_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -11017,7 +11085,7 @@ async def test_delete_issue_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.delete_issue + client._client._transport.list_issues in client._client._transport._wrapped_methods ) @@ -11025,16 +11093,16 @@ async def test_delete_issue_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.delete_issue + client._client._transport.list_issues ] = mock_rpc request = {} - await client.delete_issue(request) + await client.list_issues(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.delete_issue(request) + await client.list_issues(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -11042,9 +11110,9 @@ async def test_delete_issue_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_delete_issue_async( +async def test_list_issues_async( transport: str = "grpc_asyncio", - request_type=contact_center_insights.DeleteIssueRequest, + request_type=contact_center_insights.ListIssuesRequest, ): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11056,41 +11124,43 @@ async def test_delete_issue_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_issue), "__call__") as call: + with mock.patch.object(type(client.transport.list_issues), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_issue(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + contact_center_insights.ListIssuesResponse() + ) + response = await client.list_issues(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = contact_center_insights.DeleteIssueRequest() + request = contact_center_insights.ListIssuesRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, contact_center_insights.ListIssuesResponse) @pytest.mark.asyncio -async def test_delete_issue_async_from_dict(): - await test_delete_issue_async(request_type=dict) +async def test_list_issues_async_from_dict(): + await test_list_issues_async(request_type=dict) -def test_delete_issue_field_headers(): +def test_list_issues_field_headers(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.DeleteIssueRequest() + request = contact_center_insights.ListIssuesRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_issue), "__call__") as call: - call.return_value = None - client.delete_issue(request) + with mock.patch.object(type(client.transport.list_issues), "__call__") as call: + call.return_value = contact_center_insights.ListIssuesResponse() + client.list_issues(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -11101,26 +11171,28 @@ def test_delete_issue_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_delete_issue_field_headers_async(): +async def test_list_issues_field_headers_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.DeleteIssueRequest() + request = contact_center_insights.ListIssuesRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_issue), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_issue(request) + with mock.patch.object(type(client.transport.list_issues), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + contact_center_insights.ListIssuesResponse() + ) + await client.list_issues(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -11131,35 +11203,35 @@ async def test_delete_issue_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_delete_issue_flattened(): +def test_list_issues_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_issue), "__call__") as call: + with mock.patch.object(type(client.transport.list_issues), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = None + call.return_value = contact_center_insights.ListIssuesResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_issue( - name="name_value", + client.list_issues( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val -def test_delete_issue_flattened_error(): +def test_list_issues_flattened_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -11167,41 +11239,43 @@ def test_delete_issue_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_issue( - contact_center_insights.DeleteIssueRequest(), - name="name_value", + client.list_issues( + contact_center_insights.ListIssuesRequest(), + parent="parent_value", ) @pytest.mark.asyncio -async def test_delete_issue_flattened_async(): +async def test_list_issues_flattened_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_issue), "__call__") as call: + with mock.patch.object(type(client.transport.list_issues), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = None + call.return_value = contact_center_insights.ListIssuesResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + contact_center_insights.ListIssuesResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_issue( - name="name_value", + response = await client.list_issues( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio -async def test_delete_issue_flattened_error_async(): +async def test_list_issues_flattened_error_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -11209,20 +11283,20 @@ async def test_delete_issue_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_issue( - contact_center_insights.DeleteIssueRequest(), - name="name_value", + await client.list_issues( + contact_center_insights.ListIssuesRequest(), + parent="parent_value", ) @pytest.mark.parametrize( "request_type", [ - contact_center_insights.CalculateIssueModelStatsRequest, + contact_center_insights.UpdateIssueRequest, dict, ], ) -def test_calculate_issue_model_stats(request_type, transport: str = "grpc"): +def test_update_issue(request_type, transport: str = "grpc"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -11233,26 +11307,31 @@ def test_calculate_issue_model_stats(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.calculate_issue_model_stats), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_issue), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = contact_center_insights.CalculateIssueModelStatsResponse() - response = client.calculate_issue_model_stats(request) + call.return_value = resources.Issue( + name="name_value", + display_name="display_name_value", + sample_utterances=["sample_utterances_value"], + display_description="display_description_value", + ) + response = client.update_issue(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = contact_center_insights.CalculateIssueModelStatsRequest() + request = contact_center_insights.UpdateIssueRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance( - response, contact_center_insights.CalculateIssueModelStatsResponse - ) + assert isinstance(response, resources.Issue) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.sample_utterances == ["sample_utterances_value"] + assert response.display_description == "display_description_value" -def test_calculate_issue_model_stats_empty_call(): +def test_update_issue_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsClient( @@ -11261,19 +11340,17 @@ def test_calculate_issue_model_stats_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.calculate_issue_model_stats), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_issue), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.calculate_issue_model_stats() + client.update_issue() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.CalculateIssueModelStatsRequest() + assert args[0] == contact_center_insights.UpdateIssueRequest() -def test_calculate_issue_model_stats_non_empty_request_with_auto_populated_field(): +def test_update_issue_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = ContactCenterInsightsClient( @@ -11284,26 +11361,20 @@ def test_calculate_issue_model_stats_non_empty_request_with_auto_populated_field # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = contact_center_insights.CalculateIssueModelStatsRequest( - issue_model="issue_model_value", - ) + request = contact_center_insights.UpdateIssueRequest() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.calculate_issue_model_stats), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_issue), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.calculate_issue_model_stats(request=request) + client.update_issue(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.CalculateIssueModelStatsRequest( - issue_model="issue_model_value", - ) + assert args[0] == contact_center_insights.UpdateIssueRequest() -def test_calculate_issue_model_stats_use_cached_wrapped_rpc(): +def test_update_issue_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -11317,26 +11388,21 @@ def test_calculate_issue_model_stats_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.calculate_issue_model_stats - in client._transport._wrapped_methods - ) + assert client._transport.update_issue in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.calculate_issue_model_stats - ] = mock_rpc + client._transport._wrapped_methods[client._transport.update_issue] = mock_rpc request = {} - client.calculate_issue_model_stats(request) + client.update_issue(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.calculate_issue_model_stats(request) + client.update_issue(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -11344,7 +11410,7 @@ def test_calculate_issue_model_stats_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_calculate_issue_model_stats_empty_call_async(): +async def test_update_issue_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsAsyncClient( @@ -11353,21 +11419,24 @@ async def test_calculate_issue_model_stats_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.calculate_issue_model_stats), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_issue), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - contact_center_insights.CalculateIssueModelStatsResponse() + resources.Issue( + name="name_value", + display_name="display_name_value", + sample_utterances=["sample_utterances_value"], + display_description="display_description_value", + ) ) - response = await client.calculate_issue_model_stats() + response = await client.update_issue() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.CalculateIssueModelStatsRequest() + assert args[0] == contact_center_insights.UpdateIssueRequest() @pytest.mark.asyncio -async def test_calculate_issue_model_stats_async_use_cached_wrapped_rpc( +async def test_update_issue_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -11384,7 +11453,7 @@ async def test_calculate_issue_model_stats_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.calculate_issue_model_stats + client._client._transport.update_issue in client._client._transport._wrapped_methods ) @@ -11392,16 +11461,16 @@ async def test_calculate_issue_model_stats_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.calculate_issue_model_stats + client._client._transport.update_issue ] = mock_rpc request = {} - await client.calculate_issue_model_stats(request) + await client.update_issue(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.calculate_issue_model_stats(request) + await client.update_issue(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -11409,9 +11478,9 @@ async def test_calculate_issue_model_stats_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_calculate_issue_model_stats_async( +async def test_update_issue_async( transport: str = "grpc_asyncio", - request_type=contact_center_insights.CalculateIssueModelStatsRequest, + request_type=contact_center_insights.UpdateIssueRequest, ): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11423,49 +11492,52 @@ async def test_calculate_issue_model_stats_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.calculate_issue_model_stats), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_issue), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - contact_center_insights.CalculateIssueModelStatsResponse() + resources.Issue( + name="name_value", + display_name="display_name_value", + sample_utterances=["sample_utterances_value"], + display_description="display_description_value", + ) ) - response = await client.calculate_issue_model_stats(request) + response = await client.update_issue(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = contact_center_insights.CalculateIssueModelStatsRequest() + request = contact_center_insights.UpdateIssueRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance( - response, contact_center_insights.CalculateIssueModelStatsResponse - ) + assert isinstance(response, resources.Issue) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.sample_utterances == ["sample_utterances_value"] + assert response.display_description == "display_description_value" @pytest.mark.asyncio -async def test_calculate_issue_model_stats_async_from_dict(): - await test_calculate_issue_model_stats_async(request_type=dict) +async def test_update_issue_async_from_dict(): + await test_update_issue_async(request_type=dict) -def test_calculate_issue_model_stats_field_headers(): +def test_update_issue_field_headers(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.CalculateIssueModelStatsRequest() + request = contact_center_insights.UpdateIssueRequest() - request.issue_model = "issue_model_value" + request.issue.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.calculate_issue_model_stats), "__call__" - ) as call: - call.return_value = contact_center_insights.CalculateIssueModelStatsResponse() - client.calculate_issue_model_stats(request) + with mock.patch.object(type(client.transport.update_issue), "__call__") as call: + call.return_value = resources.Issue() + client.update_issue(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -11476,30 +11548,26 @@ def test_calculate_issue_model_stats_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "issue_model=issue_model_value", + "issue.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_calculate_issue_model_stats_field_headers_async(): +async def test_update_issue_field_headers_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.CalculateIssueModelStatsRequest() + request = contact_center_insights.UpdateIssueRequest() - request.issue_model = "issue_model_value" + request.issue.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.calculate_issue_model_stats), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - contact_center_insights.CalculateIssueModelStatsResponse() - ) - await client.calculate_issue_model_stats(request) + with mock.patch.object(type(client.transport.update_issue), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Issue()) + await client.update_issue(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -11510,37 +11578,39 @@ async def test_calculate_issue_model_stats_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "issue_model=issue_model_value", + "issue.name=name_value", ) in kw["metadata"] -def test_calculate_issue_model_stats_flattened(): +def test_update_issue_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.calculate_issue_model_stats), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_issue), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = contact_center_insights.CalculateIssueModelStatsResponse() + call.return_value = resources.Issue() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.calculate_issue_model_stats( - issue_model="issue_model_value", + client.update_issue( + issue=resources.Issue(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].issue_model - mock_val = "issue_model_value" + arg = args[0].issue + mock_val = resources.Issue(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val -def test_calculate_issue_model_stats_flattened_error(): +def test_update_issue_flattened_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -11548,45 +11618,46 @@ def test_calculate_issue_model_stats_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.calculate_issue_model_stats( - contact_center_insights.CalculateIssueModelStatsRequest(), - issue_model="issue_model_value", + client.update_issue( + contact_center_insights.UpdateIssueRequest(), + issue=resources.Issue(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio -async def test_calculate_issue_model_stats_flattened_async(): +async def test_update_issue_flattened_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.calculate_issue_model_stats), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_issue), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = contact_center_insights.CalculateIssueModelStatsResponse() + call.return_value = resources.Issue() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - contact_center_insights.CalculateIssueModelStatsResponse() - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Issue()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.calculate_issue_model_stats( - issue_model="issue_model_value", + response = await client.update_issue( + issue=resources.Issue(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].issue_model - mock_val = "issue_model_value" + arg = args[0].issue + mock_val = resources.Issue(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @pytest.mark.asyncio -async def test_calculate_issue_model_stats_flattened_error_async(): +async def test_update_issue_flattened_error_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -11594,20 +11665,21 @@ async def test_calculate_issue_model_stats_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.calculate_issue_model_stats( - contact_center_insights.CalculateIssueModelStatsRequest(), - issue_model="issue_model_value", - ) + await client.update_issue( + contact_center_insights.UpdateIssueRequest(), + issue=resources.Issue(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) @pytest.mark.parametrize( "request_type", [ - contact_center_insights.CreatePhraseMatcherRequest, + contact_center_insights.DeleteIssueRequest, dict, ], ) -def test_create_phrase_matcher(request_type, transport: str = "grpc"): +def test_delete_issue(request_type, transport: str = "grpc"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -11618,39 +11690,22 @@ def test_create_phrase_matcher(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_phrase_matcher), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_issue), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.PhraseMatcher( - name="name_value", - revision_id="revision_id_value", - version_tag="version_tag_value", - display_name="display_name_value", - type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, - active=True, - role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, - ) - response = client.create_phrase_matcher(request) + call.return_value = None + response = client.delete_issue(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = contact_center_insights.CreatePhraseMatcherRequest() + request = contact_center_insights.DeleteIssueRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.PhraseMatcher) - assert response.name == "name_value" - assert response.revision_id == "revision_id_value" - assert response.version_tag == "version_tag_value" - assert response.display_name == "display_name_value" - assert response.type_ == resources.PhraseMatcher.PhraseMatcherType.ALL_OF - assert response.active is True - assert response.role_match == resources.ConversationParticipant.Role.HUMAN_AGENT + assert response is None -def test_create_phrase_matcher_empty_call(): +def test_delete_issue_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsClient( @@ -11659,19 +11714,17 @@ def test_create_phrase_matcher_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_phrase_matcher), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_issue), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_phrase_matcher() + client.delete_issue() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.CreatePhraseMatcherRequest() + assert args[0] == contact_center_insights.DeleteIssueRequest() -def test_create_phrase_matcher_non_empty_request_with_auto_populated_field(): +def test_delete_issue_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = ContactCenterInsightsClient( @@ -11682,26 +11735,24 @@ def test_create_phrase_matcher_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = contact_center_insights.CreatePhraseMatcherRequest( - parent="parent_value", + request = contact_center_insights.DeleteIssueRequest( + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_phrase_matcher), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_issue), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_phrase_matcher(request=request) + client.delete_issue(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.CreatePhraseMatcherRequest( - parent="parent_value", + assert args[0] == contact_center_insights.DeleteIssueRequest( + name="name_value", ) -def test_create_phrase_matcher_use_cached_wrapped_rpc(): +def test_delete_issue_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -11715,26 +11766,21 @@ def test_create_phrase_matcher_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.create_phrase_matcher - in client._transport._wrapped_methods - ) + assert client._transport.delete_issue in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_phrase_matcher - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_issue] = mock_rpc request = {} - client.create_phrase_matcher(request) + client.delete_issue(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_phrase_matcher(request) + client.delete_issue(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -11742,7 +11788,7 @@ def test_create_phrase_matcher_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_create_phrase_matcher_empty_call_async(): +async def test_delete_issue_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsAsyncClient( @@ -11751,29 +11797,17 @@ async def test_create_phrase_matcher_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_phrase_matcher), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_issue), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.PhraseMatcher( - name="name_value", - revision_id="revision_id_value", - version_tag="version_tag_value", - display_name="display_name_value", - type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, - active=True, - role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, - ) - ) - response = await client.create_phrase_matcher() + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_issue() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.CreatePhraseMatcherRequest() + assert args[0] == contact_center_insights.DeleteIssueRequest() @pytest.mark.asyncio -async def test_create_phrase_matcher_async_use_cached_wrapped_rpc( +async def test_delete_issue_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -11790,7 +11824,7 @@ async def test_create_phrase_matcher_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.create_phrase_matcher + client._client._transport.delete_issue in client._client._transport._wrapped_methods ) @@ -11798,16 +11832,16 @@ async def test_create_phrase_matcher_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.create_phrase_matcher + client._client._transport.delete_issue ] = mock_rpc request = {} - await client.create_phrase_matcher(request) + await client.delete_issue(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.create_phrase_matcher(request) + await client.delete_issue(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -11815,9 +11849,9 @@ async def test_create_phrase_matcher_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_create_phrase_matcher_async( +async def test_delete_issue_async( transport: str = "grpc_asyncio", - request_type=contact_center_insights.CreatePhraseMatcherRequest, + request_type=contact_center_insights.DeleteIssueRequest, ): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11829,62 +11863,41 @@ async def test_create_phrase_matcher_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_phrase_matcher), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_issue), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.PhraseMatcher( - name="name_value", - revision_id="revision_id_value", - version_tag="version_tag_value", - display_name="display_name_value", - type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, - active=True, - role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, - ) - ) - response = await client.create_phrase_matcher(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_issue(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = contact_center_insights.CreatePhraseMatcherRequest() + request = contact_center_insights.DeleteIssueRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.PhraseMatcher) - assert response.name == "name_value" - assert response.revision_id == "revision_id_value" - assert response.version_tag == "version_tag_value" - assert response.display_name == "display_name_value" - assert response.type_ == resources.PhraseMatcher.PhraseMatcherType.ALL_OF - assert response.active is True - assert response.role_match == resources.ConversationParticipant.Role.HUMAN_AGENT + assert response is None @pytest.mark.asyncio -async def test_create_phrase_matcher_async_from_dict(): - await test_create_phrase_matcher_async(request_type=dict) +async def test_delete_issue_async_from_dict(): + await test_delete_issue_async(request_type=dict) -def test_create_phrase_matcher_field_headers(): +def test_delete_issue_field_headers(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.CreatePhraseMatcherRequest() + request = contact_center_insights.DeleteIssueRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_phrase_matcher), "__call__" - ) as call: - call.return_value = resources.PhraseMatcher() - client.create_phrase_matcher(request) + with mock.patch.object(type(client.transport.delete_issue), "__call__") as call: + call.return_value = None + client.delete_issue(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -11895,30 +11908,26 @@ def test_create_phrase_matcher_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_create_phrase_matcher_field_headers_async(): +async def test_delete_issue_field_headers_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.CreatePhraseMatcherRequest() + request = contact_center_insights.DeleteIssueRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_phrase_matcher), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.PhraseMatcher() - ) - await client.create_phrase_matcher(request) + with mock.patch.object(type(client.transport.delete_issue), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_issue(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -11929,41 +11938,35 @@ async def test_create_phrase_matcher_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_create_phrase_matcher_flattened(): +def test_delete_issue_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_phrase_matcher), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_issue), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.PhraseMatcher() + call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.create_phrase_matcher( - parent="parent_value", - phrase_matcher=resources.PhraseMatcher(name="name_value"), + client.delete_issue( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].phrase_matcher - mock_val = resources.PhraseMatcher(name="name_value") + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_create_phrase_matcher_flattened_error(): +def test_delete_issue_flattened_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -11971,50 +11974,41 @@ def test_create_phrase_matcher_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_phrase_matcher( - contact_center_insights.CreatePhraseMatcherRequest(), - parent="parent_value", - phrase_matcher=resources.PhraseMatcher(name="name_value"), + client.delete_issue( + contact_center_insights.DeleteIssueRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_create_phrase_matcher_flattened_async(): +async def test_delete_issue_flattened_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_phrase_matcher), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_issue), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.PhraseMatcher() + call.return_value = None - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.PhraseMatcher() - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.create_phrase_matcher( - parent="parent_value", - phrase_matcher=resources.PhraseMatcher(name="name_value"), + response = await client.delete_issue( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].phrase_matcher - mock_val = resources.PhraseMatcher(name="name_value") + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_create_phrase_matcher_flattened_error_async(): +async def test_delete_issue_flattened_error_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -12022,21 +12016,20 @@ async def test_create_phrase_matcher_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_phrase_matcher( - contact_center_insights.CreatePhraseMatcherRequest(), - parent="parent_value", - phrase_matcher=resources.PhraseMatcher(name="name_value"), + await client.delete_issue( + contact_center_insights.DeleteIssueRequest(), + name="name_value", ) @pytest.mark.parametrize( "request_type", [ - contact_center_insights.GetPhraseMatcherRequest, + contact_center_insights.CalculateIssueModelStatsRequest, dict, ], ) -def test_get_phrase_matcher(request_type, transport: str = "grpc"): +def test_calculate_issue_model_stats(request_type, transport: str = "grpc"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -12048,38 +12041,25 @@ def test_get_phrase_matcher(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_phrase_matcher), "__call__" + type(client.transport.calculate_issue_model_stats), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.PhraseMatcher( - name="name_value", - revision_id="revision_id_value", - version_tag="version_tag_value", - display_name="display_name_value", - type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, - active=True, - role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, - ) - response = client.get_phrase_matcher(request) + call.return_value = contact_center_insights.CalculateIssueModelStatsResponse() + response = client.calculate_issue_model_stats(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = contact_center_insights.GetPhraseMatcherRequest() + request = contact_center_insights.CalculateIssueModelStatsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.PhraseMatcher) - assert response.name == "name_value" - assert response.revision_id == "revision_id_value" - assert response.version_tag == "version_tag_value" - assert response.display_name == "display_name_value" - assert response.type_ == resources.PhraseMatcher.PhraseMatcherType.ALL_OF - assert response.active is True - assert response.role_match == resources.ConversationParticipant.Role.HUMAN_AGENT + assert isinstance( + response, contact_center_insights.CalculateIssueModelStatsResponse + ) -def test_get_phrase_matcher_empty_call(): +def test_calculate_issue_model_stats_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsClient( @@ -12089,18 +12069,18 @@ def test_get_phrase_matcher_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_phrase_matcher), "__call__" + type(client.transport.calculate_issue_model_stats), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_phrase_matcher() + client.calculate_issue_model_stats() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.GetPhraseMatcherRequest() + assert args[0] == contact_center_insights.CalculateIssueModelStatsRequest() -def test_get_phrase_matcher_non_empty_request_with_auto_populated_field(): +def test_calculate_issue_model_stats_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = ContactCenterInsightsClient( @@ -12111,26 +12091,26 @@ def test_get_phrase_matcher_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = contact_center_insights.GetPhraseMatcherRequest( - name="name_value", + request = contact_center_insights.CalculateIssueModelStatsRequest( + issue_model="issue_model_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_phrase_matcher), "__call__" + type(client.transport.calculate_issue_model_stats), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_phrase_matcher(request=request) + client.calculate_issue_model_stats(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.GetPhraseMatcherRequest( - name="name_value", + assert args[0] == contact_center_insights.CalculateIssueModelStatsRequest( + issue_model="issue_model_value", ) -def test_get_phrase_matcher_use_cached_wrapped_rpc(): +def test_calculate_issue_model_stats_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -12145,7 +12125,8 @@ def test_get_phrase_matcher_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_phrase_matcher in client._transport._wrapped_methods + client._transport.calculate_issue_model_stats + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -12154,15 +12135,15 @@ def test_get_phrase_matcher_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_phrase_matcher + client._transport.calculate_issue_model_stats ] = mock_rpc request = {} - client.get_phrase_matcher(request) + client.calculate_issue_model_stats(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_phrase_matcher(request) + client.calculate_issue_model_stats(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -12170,7 +12151,7 @@ def test_get_phrase_matcher_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_phrase_matcher_empty_call_async(): +async def test_calculate_issue_model_stats_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsAsyncClient( @@ -12180,28 +12161,20 @@ async def test_get_phrase_matcher_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_phrase_matcher), "__call__" + type(client.transport.calculate_issue_model_stats), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.PhraseMatcher( - name="name_value", - revision_id="revision_id_value", - version_tag="version_tag_value", - display_name="display_name_value", - type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, - active=True, - role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, - ) + contact_center_insights.CalculateIssueModelStatsResponse() ) - response = await client.get_phrase_matcher() + response = await client.calculate_issue_model_stats() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.GetPhraseMatcherRequest() + assert args[0] == contact_center_insights.CalculateIssueModelStatsRequest() @pytest.mark.asyncio -async def test_get_phrase_matcher_async_use_cached_wrapped_rpc( +async def test_calculate_issue_model_stats_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -12218,7 +12191,7 @@ async def test_get_phrase_matcher_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_phrase_matcher + client._client._transport.calculate_issue_model_stats in client._client._transport._wrapped_methods ) @@ -12226,16 +12199,16 @@ async def test_get_phrase_matcher_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_phrase_matcher + client._client._transport.calculate_issue_model_stats ] = mock_rpc request = {} - await client.get_phrase_matcher(request) + await client.calculate_issue_model_stats(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.get_phrase_matcher(request) + await client.calculate_issue_model_stats(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -12243,9 +12216,9 @@ async def test_get_phrase_matcher_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_get_phrase_matcher_async( +async def test_calculate_issue_model_stats_async( transport: str = "grpc_asyncio", - request_type=contact_center_insights.GetPhraseMatcherRequest, + request_type=contact_center_insights.CalculateIssueModelStatsRequest, ): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12258,61 +12231,48 @@ async def test_get_phrase_matcher_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_phrase_matcher), "__call__" + type(client.transport.calculate_issue_model_stats), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.PhraseMatcher( - name="name_value", - revision_id="revision_id_value", - version_tag="version_tag_value", - display_name="display_name_value", - type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, - active=True, - role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, - ) + contact_center_insights.CalculateIssueModelStatsResponse() ) - response = await client.get_phrase_matcher(request) + response = await client.calculate_issue_model_stats(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = contact_center_insights.GetPhraseMatcherRequest() + request = contact_center_insights.CalculateIssueModelStatsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.PhraseMatcher) - assert response.name == "name_value" - assert response.revision_id == "revision_id_value" - assert response.version_tag == "version_tag_value" - assert response.display_name == "display_name_value" - assert response.type_ == resources.PhraseMatcher.PhraseMatcherType.ALL_OF - assert response.active is True - assert response.role_match == resources.ConversationParticipant.Role.HUMAN_AGENT + assert isinstance( + response, contact_center_insights.CalculateIssueModelStatsResponse + ) @pytest.mark.asyncio -async def test_get_phrase_matcher_async_from_dict(): - await test_get_phrase_matcher_async(request_type=dict) +async def test_calculate_issue_model_stats_async_from_dict(): + await test_calculate_issue_model_stats_async(request_type=dict) -def test_get_phrase_matcher_field_headers(): +def test_calculate_issue_model_stats_field_headers(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.GetPhraseMatcherRequest() + request = contact_center_insights.CalculateIssueModelStatsRequest() - request.name = "name_value" + request.issue_model = "issue_model_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_phrase_matcher), "__call__" + type(client.transport.calculate_issue_model_stats), "__call__" ) as call: - call.return_value = resources.PhraseMatcher() - client.get_phrase_matcher(request) + call.return_value = contact_center_insights.CalculateIssueModelStatsResponse() + client.calculate_issue_model_stats(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -12323,30 +12283,30 @@ def test_get_phrase_matcher_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "issue_model=issue_model_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_phrase_matcher_field_headers_async(): +async def test_calculate_issue_model_stats_field_headers_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.GetPhraseMatcherRequest() + request = contact_center_insights.CalculateIssueModelStatsRequest() - request.name = "name_value" + request.issue_model = "issue_model_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_phrase_matcher), "__call__" + type(client.transport.calculate_issue_model_stats), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.PhraseMatcher() + contact_center_insights.CalculateIssueModelStatsResponse() ) - await client.get_phrase_matcher(request) + await client.calculate_issue_model_stats(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -12357,37 +12317,37 @@ async def test_get_phrase_matcher_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "issue_model=issue_model_value", ) in kw["metadata"] -def test_get_phrase_matcher_flattened(): +def test_calculate_issue_model_stats_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_phrase_matcher), "__call__" + type(client.transport.calculate_issue_model_stats), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.PhraseMatcher() + call.return_value = contact_center_insights.CalculateIssueModelStatsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_phrase_matcher( - name="name_value", + client.calculate_issue_model_stats( + issue_model="issue_model_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].issue_model + mock_val = "issue_model_value" assert arg == mock_val -def test_get_phrase_matcher_flattened_error(): +def test_calculate_issue_model_stats_flattened_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -12395,45 +12355,45 @@ def test_get_phrase_matcher_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_phrase_matcher( - contact_center_insights.GetPhraseMatcherRequest(), - name="name_value", + client.calculate_issue_model_stats( + contact_center_insights.CalculateIssueModelStatsRequest(), + issue_model="issue_model_value", ) @pytest.mark.asyncio -async def test_get_phrase_matcher_flattened_async(): +async def test_calculate_issue_model_stats_flattened_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_phrase_matcher), "__call__" + type(client.transport.calculate_issue_model_stats), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.PhraseMatcher() + call.return_value = contact_center_insights.CalculateIssueModelStatsResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.PhraseMatcher() + contact_center_insights.CalculateIssueModelStatsResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_phrase_matcher( - name="name_value", + response = await client.calculate_issue_model_stats( + issue_model="issue_model_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].issue_model + mock_val = "issue_model_value" assert arg == mock_val @pytest.mark.asyncio -async def test_get_phrase_matcher_flattened_error_async(): +async def test_calculate_issue_model_stats_flattened_error_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -12441,20 +12401,20 @@ async def test_get_phrase_matcher_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_phrase_matcher( - contact_center_insights.GetPhraseMatcherRequest(), - name="name_value", + await client.calculate_issue_model_stats( + contact_center_insights.CalculateIssueModelStatsRequest(), + issue_model="issue_model_value", ) @pytest.mark.parametrize( "request_type", [ - contact_center_insights.ListPhraseMatchersRequest, + contact_center_insights.CreatePhraseMatcherRequest, dict, ], ) -def test_list_phrase_matchers(request_type, transport: str = "grpc"): +def test_create_phrase_matcher(request_type, transport: str = "grpc"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -12466,26 +12426,38 @@ def test_list_phrase_matchers(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_phrase_matchers), "__call__" + type(client.transport.create_phrase_matcher), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = contact_center_insights.ListPhraseMatchersResponse( - next_page_token="next_page_token_value", + call.return_value = resources.PhraseMatcher( + name="name_value", + revision_id="revision_id_value", + version_tag="version_tag_value", + display_name="display_name_value", + type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, + active=True, + role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, ) - response = client.list_phrase_matchers(request) + response = client.create_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = contact_center_insights.ListPhraseMatchersRequest() + request = contact_center_insights.CreatePhraseMatcherRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListPhraseMatchersPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, resources.PhraseMatcher) + assert response.name == "name_value" + assert response.revision_id == "revision_id_value" + assert response.version_tag == "version_tag_value" + assert response.display_name == "display_name_value" + assert response.type_ == resources.PhraseMatcher.PhraseMatcherType.ALL_OF + assert response.active is True + assert response.role_match == resources.ConversationParticipant.Role.HUMAN_AGENT -def test_list_phrase_matchers_empty_call(): +def test_create_phrase_matcher_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsClient( @@ -12495,18 +12467,18 @@ def test_list_phrase_matchers_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_phrase_matchers), "__call__" + type(client.transport.create_phrase_matcher), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_phrase_matchers() + client.create_phrase_matcher() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.ListPhraseMatchersRequest() + assert args[0] == contact_center_insights.CreatePhraseMatcherRequest() -def test_list_phrase_matchers_non_empty_request_with_auto_populated_field(): +def test_create_phrase_matcher_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = ContactCenterInsightsClient( @@ -12517,30 +12489,26 @@ def test_list_phrase_matchers_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = contact_center_insights.ListPhraseMatchersRequest( + request = contact_center_insights.CreatePhraseMatcherRequest( parent="parent_value", - page_token="page_token_value", - filter="filter_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_phrase_matchers), "__call__" + type(client.transport.create_phrase_matcher), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_phrase_matchers(request=request) + client.create_phrase_matcher(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.ListPhraseMatchersRequest( + assert args[0] == contact_center_insights.CreatePhraseMatcherRequest( parent="parent_value", - page_token="page_token_value", - filter="filter_value", ) -def test_list_phrase_matchers_use_cached_wrapped_rpc(): +def test_create_phrase_matcher_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -12555,7 +12523,8 @@ def test_list_phrase_matchers_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_phrase_matchers in client._transport._wrapped_methods + client._transport.create_phrase_matcher + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -12564,15 +12533,15 @@ def test_list_phrase_matchers_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_phrase_matchers + client._transport.create_phrase_matcher ] = mock_rpc request = {} - client.list_phrase_matchers(request) + client.create_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_phrase_matchers(request) + client.create_phrase_matcher(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -12580,7 +12549,7 @@ def test_list_phrase_matchers_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_phrase_matchers_empty_call_async(): +async def test_create_phrase_matcher_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsAsyncClient( @@ -12590,22 +12559,28 @@ async def test_list_phrase_matchers_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_phrase_matchers), "__call__" + type(client.transport.create_phrase_matcher), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - contact_center_insights.ListPhraseMatchersResponse( - next_page_token="next_page_token_value", + resources.PhraseMatcher( + name="name_value", + revision_id="revision_id_value", + version_tag="version_tag_value", + display_name="display_name_value", + type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, + active=True, + role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, ) ) - response = await client.list_phrase_matchers() + response = await client.create_phrase_matcher() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.ListPhraseMatchersRequest() + assert args[0] == contact_center_insights.CreatePhraseMatcherRequest() @pytest.mark.asyncio -async def test_list_phrase_matchers_async_use_cached_wrapped_rpc( +async def test_create_phrase_matcher_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -12622,7 +12597,7 @@ async def test_list_phrase_matchers_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.list_phrase_matchers + client._client._transport.create_phrase_matcher in client._client._transport._wrapped_methods ) @@ -12630,16 +12605,16 @@ async def test_list_phrase_matchers_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_phrase_matchers + client._client._transport.create_phrase_matcher ] = mock_rpc request = {} - await client.list_phrase_matchers(request) + await client.create_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.list_phrase_matchers(request) + await client.create_phrase_matcher(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -12647,9 +12622,9 @@ async def test_list_phrase_matchers_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_list_phrase_matchers_async( +async def test_create_phrase_matcher_async( transport: str = "grpc_asyncio", - request_type=contact_center_insights.ListPhraseMatchersRequest, + request_type=contact_center_insights.CreatePhraseMatcherRequest, ): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12662,49 +12637,61 @@ async def test_list_phrase_matchers_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_phrase_matchers), "__call__" + type(client.transport.create_phrase_matcher), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - contact_center_insights.ListPhraseMatchersResponse( - next_page_token="next_page_token_value", + resources.PhraseMatcher( + name="name_value", + revision_id="revision_id_value", + version_tag="version_tag_value", + display_name="display_name_value", + type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, + active=True, + role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, ) ) - response = await client.list_phrase_matchers(request) + response = await client.create_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = contact_center_insights.ListPhraseMatchersRequest() + request = contact_center_insights.CreatePhraseMatcherRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListPhraseMatchersAsyncPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, resources.PhraseMatcher) + assert response.name == "name_value" + assert response.revision_id == "revision_id_value" + assert response.version_tag == "version_tag_value" + assert response.display_name == "display_name_value" + assert response.type_ == resources.PhraseMatcher.PhraseMatcherType.ALL_OF + assert response.active is True + assert response.role_match == resources.ConversationParticipant.Role.HUMAN_AGENT @pytest.mark.asyncio -async def test_list_phrase_matchers_async_from_dict(): - await test_list_phrase_matchers_async(request_type=dict) +async def test_create_phrase_matcher_async_from_dict(): + await test_create_phrase_matcher_async(request_type=dict) -def test_list_phrase_matchers_field_headers(): +def test_create_phrase_matcher_field_headers(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.ListPhraseMatchersRequest() + request = contact_center_insights.CreatePhraseMatcherRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_phrase_matchers), "__call__" + type(client.transport.create_phrase_matcher), "__call__" ) as call: - call.return_value = contact_center_insights.ListPhraseMatchersResponse() - client.list_phrase_matchers(request) + call.return_value = resources.PhraseMatcher() + client.create_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -12720,25 +12707,25 @@ def test_list_phrase_matchers_field_headers(): @pytest.mark.asyncio -async def test_list_phrase_matchers_field_headers_async(): +async def test_create_phrase_matcher_field_headers_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.ListPhraseMatchersRequest() + request = contact_center_insights.CreatePhraseMatcherRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_phrase_matchers), "__call__" + type(client.transport.create_phrase_matcher), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - contact_center_insights.ListPhraseMatchersResponse() + resources.PhraseMatcher() ) - await client.list_phrase_matchers(request) + await client.create_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -12753,21 +12740,22 @@ async def test_list_phrase_matchers_field_headers_async(): ) in kw["metadata"] -def test_list_phrase_matchers_flattened(): +def test_create_phrase_matcher_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_phrase_matchers), "__call__" + type(client.transport.create_phrase_matcher), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = contact_center_insights.ListPhraseMatchersResponse() + call.return_value = resources.PhraseMatcher() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_phrase_matchers( + client.create_phrase_matcher( parent="parent_value", + phrase_matcher=resources.PhraseMatcher(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -12777,9 +12765,12 @@ def test_list_phrase_matchers_flattened(): arg = args[0].parent mock_val = "parent_value" assert arg == mock_val + arg = args[0].phrase_matcher + mock_val = resources.PhraseMatcher(name="name_value") + assert arg == mock_val -def test_list_phrase_matchers_flattened_error(): +def test_create_phrase_matcher_flattened_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -12787,32 +12778,34 @@ def test_list_phrase_matchers_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_phrase_matchers( - contact_center_insights.ListPhraseMatchersRequest(), + client.create_phrase_matcher( + contact_center_insights.CreatePhraseMatcherRequest(), parent="parent_value", + phrase_matcher=resources.PhraseMatcher(name="name_value"), ) @pytest.mark.asyncio -async def test_list_phrase_matchers_flattened_async(): +async def test_create_phrase_matcher_flattened_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_phrase_matchers), "__call__" + type(client.transport.create_phrase_matcher), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = contact_center_insights.ListPhraseMatchersResponse() + call.return_value = resources.PhraseMatcher() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - contact_center_insights.ListPhraseMatchersResponse() + resources.PhraseMatcher() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_phrase_matchers( + response = await client.create_phrase_matcher( parent="parent_value", + phrase_matcher=resources.PhraseMatcher(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -12822,10 +12815,13 @@ async def test_list_phrase_matchers_flattened_async(): arg = args[0].parent mock_val = "parent_value" assert arg == mock_val + arg = args[0].phrase_matcher + mock_val = resources.PhraseMatcher(name="name_value") + assert arg == mock_val @pytest.mark.asyncio -async def test_list_phrase_matchers_flattened_error_async(): +async def test_create_phrase_matcher_flattened_error_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -12833,301 +12829,115 @@ async def test_list_phrase_matchers_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_phrase_matchers( - contact_center_insights.ListPhraseMatchersRequest(), + await client.create_phrase_matcher( + contact_center_insights.CreatePhraseMatcherRequest(), parent="parent_value", + phrase_matcher=resources.PhraseMatcher(name="name_value"), ) -def test_list_phrase_matchers_pager(transport_name: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + contact_center_insights.GetPhraseMatcherRequest, + dict, + ], +) +def test_get_phrase_matcher(request_type, transport: str = "grpc"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_phrase_matchers), "__call__" + type(client.transport.get_phrase_matcher), "__call__" ) as call: - # Set the response to a series of pages. - call.side_effect = ( - contact_center_insights.ListPhraseMatchersResponse( - phrase_matchers=[ - resources.PhraseMatcher(), - resources.PhraseMatcher(), - resources.PhraseMatcher(), - ], - next_page_token="abc", - ), - contact_center_insights.ListPhraseMatchersResponse( - phrase_matchers=[], - next_page_token="def", - ), - contact_center_insights.ListPhraseMatchersResponse( - phrase_matchers=[ - resources.PhraseMatcher(), - ], - next_page_token="ghi", - ), - contact_center_insights.ListPhraseMatchersResponse( - phrase_matchers=[ - resources.PhraseMatcher(), - resources.PhraseMatcher(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + # Designate an appropriate return value for the call. + call.return_value = resources.PhraseMatcher( + name="name_value", + revision_id="revision_id_value", + version_tag="version_tag_value", + display_name="display_name_value", + type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, + active=True, + role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, ) - pager = client.list_phrase_matchers(request={}, retry=retry, timeout=timeout) + response = client.get_phrase_matcher(request) - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = contact_center_insights.GetPhraseMatcherRequest() + assert args[0] == request - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.PhraseMatcher) for i in results) + # Establish that the response is the type that we expect. + assert isinstance(response, resources.PhraseMatcher) + assert response.name == "name_value" + assert response.revision_id == "revision_id_value" + assert response.version_tag == "version_tag_value" + assert response.display_name == "display_name_value" + assert response.type_ == resources.PhraseMatcher.PhraseMatcherType.ALL_OF + assert response.active is True + assert response.role_match == resources.ConversationParticipant.Role.HUMAN_AGENT -def test_list_phrase_matchers_pages(transport_name: str = "grpc"): +def test_get_phrase_matcher_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_phrase_matchers), "__call__" + type(client.transport.get_phrase_matcher), "__call__" ) as call: - # Set the response to a series of pages. - call.side_effect = ( - contact_center_insights.ListPhraseMatchersResponse( - phrase_matchers=[ - resources.PhraseMatcher(), - resources.PhraseMatcher(), - resources.PhraseMatcher(), - ], - next_page_token="abc", - ), - contact_center_insights.ListPhraseMatchersResponse( - phrase_matchers=[], - next_page_token="def", - ), - contact_center_insights.ListPhraseMatchersResponse( - phrase_matchers=[ - resources.PhraseMatcher(), - ], - next_page_token="ghi", - ), - contact_center_insights.ListPhraseMatchersResponse( - phrase_matchers=[ - resources.PhraseMatcher(), - resources.PhraseMatcher(), - ], - ), - RuntimeError, + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) - pages = list(client.list_phrase_matchers(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + client.get_phrase_matcher() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == contact_center_insights.GetPhraseMatcherRequest() -@pytest.mark.asyncio -async def test_list_phrase_matchers_async_pager(): - client = ContactCenterInsightsAsyncClient( +def test_get_phrase_matcher_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_phrase_matchers), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - contact_center_insights.ListPhraseMatchersResponse( - phrase_matchers=[ - resources.PhraseMatcher(), - resources.PhraseMatcher(), - resources.PhraseMatcher(), - ], - next_page_token="abc", - ), - contact_center_insights.ListPhraseMatchersResponse( - phrase_matchers=[], - next_page_token="def", - ), - contact_center_insights.ListPhraseMatchersResponse( - phrase_matchers=[ - resources.PhraseMatcher(), - ], - next_page_token="ghi", - ), - contact_center_insights.ListPhraseMatchersResponse( - phrase_matchers=[ - resources.PhraseMatcher(), - resources.PhraseMatcher(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_phrase_matchers( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, resources.PhraseMatcher) for i in responses) - - -@pytest.mark.asyncio -async def test_list_phrase_matchers_async_pages(): - client = ContactCenterInsightsAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_phrase_matchers), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - contact_center_insights.ListPhraseMatchersResponse( - phrase_matchers=[ - resources.PhraseMatcher(), - resources.PhraseMatcher(), - resources.PhraseMatcher(), - ], - next_page_token="abc", - ), - contact_center_insights.ListPhraseMatchersResponse( - phrase_matchers=[], - next_page_token="def", - ), - contact_center_insights.ListPhraseMatchersResponse( - phrase_matchers=[ - resources.PhraseMatcher(), - ], - next_page_token="ghi", - ), - contact_center_insights.ListPhraseMatchersResponse( - phrase_matchers=[ - resources.PhraseMatcher(), - resources.PhraseMatcher(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_phrase_matchers(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - contact_center_insights.DeletePhraseMatcherRequest, - dict, - ], -) -def test_delete_phrase_matcher(request_type, transport: str = "grpc"): - client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_phrase_matcher), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_phrase_matcher(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = contact_center_insights.DeletePhraseMatcherRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_phrase_matcher_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_phrase_matcher), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_phrase_matcher() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.DeletePhraseMatcherRequest() - - -def test_delete_phrase_matcher_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = contact_center_insights.DeletePhraseMatcherRequest( + request = contact_center_insights.GetPhraseMatcherRequest( name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_phrase_matcher), "__call__" + type(client.transport.get_phrase_matcher), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_phrase_matcher(request=request) + client.get_phrase_matcher(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.DeletePhraseMatcherRequest( + assert args[0] == contact_center_insights.GetPhraseMatcherRequest( name="name_value", ) -def test_delete_phrase_matcher_use_cached_wrapped_rpc(): +def test_get_phrase_matcher_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -13142,8 +12952,7 @@ def test_delete_phrase_matcher_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.delete_phrase_matcher - in client._transport._wrapped_methods + client._transport.get_phrase_matcher in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -13152,15 +12961,15 @@ def test_delete_phrase_matcher_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.delete_phrase_matcher + client._transport.get_phrase_matcher ] = mock_rpc request = {} - client.delete_phrase_matcher(request) + client.get_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_phrase_matcher(request) + client.get_phrase_matcher(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -13168,7 +12977,7 @@ def test_delete_phrase_matcher_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_delete_phrase_matcher_empty_call_async(): +async def test_get_phrase_matcher_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsAsyncClient( @@ -13178,18 +12987,28 @@ async def test_delete_phrase_matcher_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_phrase_matcher), "__call__" + type(client.transport.get_phrase_matcher), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_phrase_matcher() + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.PhraseMatcher( + name="name_value", + revision_id="revision_id_value", + version_tag="version_tag_value", + display_name="display_name_value", + type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, + active=True, + role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, + ) + ) + response = await client.get_phrase_matcher() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.DeletePhraseMatcherRequest() + assert args[0] == contact_center_insights.GetPhraseMatcherRequest() @pytest.mark.asyncio -async def test_delete_phrase_matcher_async_use_cached_wrapped_rpc( +async def test_get_phrase_matcher_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -13206,7 +13025,7 @@ async def test_delete_phrase_matcher_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.delete_phrase_matcher + client._client._transport.get_phrase_matcher in client._client._transport._wrapped_methods ) @@ -13214,16 +13033,16 @@ async def test_delete_phrase_matcher_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.delete_phrase_matcher + client._client._transport.get_phrase_matcher ] = mock_rpc request = {} - await client.delete_phrase_matcher(request) + await client.get_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.delete_phrase_matcher(request) + await client.get_phrase_matcher(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -13231,9 +13050,9 @@ async def test_delete_phrase_matcher_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_delete_phrase_matcher_async( +async def test_get_phrase_matcher_async( transport: str = "grpc_asyncio", - request_type=contact_center_insights.DeletePhraseMatcherRequest, + request_type=contact_center_insights.GetPhraseMatcherRequest, ): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13246,44 +13065,61 @@ async def test_delete_phrase_matcher_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_phrase_matcher), "__call__" + type(client.transport.get_phrase_matcher), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_phrase_matcher(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.PhraseMatcher( + name="name_value", + revision_id="revision_id_value", + version_tag="version_tag_value", + display_name="display_name_value", + type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, + active=True, + role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, + ) + ) + response = await client.get_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = contact_center_insights.DeletePhraseMatcherRequest() + request = contact_center_insights.GetPhraseMatcherRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, resources.PhraseMatcher) + assert response.name == "name_value" + assert response.revision_id == "revision_id_value" + assert response.version_tag == "version_tag_value" + assert response.display_name == "display_name_value" + assert response.type_ == resources.PhraseMatcher.PhraseMatcherType.ALL_OF + assert response.active is True + assert response.role_match == resources.ConversationParticipant.Role.HUMAN_AGENT @pytest.mark.asyncio -async def test_delete_phrase_matcher_async_from_dict(): - await test_delete_phrase_matcher_async(request_type=dict) +async def test_get_phrase_matcher_async_from_dict(): + await test_get_phrase_matcher_async(request_type=dict) -def test_delete_phrase_matcher_field_headers(): +def test_get_phrase_matcher_field_headers(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.DeletePhraseMatcherRequest() + request = contact_center_insights.GetPhraseMatcherRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_phrase_matcher), "__call__" + type(client.transport.get_phrase_matcher), "__call__" ) as call: - call.return_value = None - client.delete_phrase_matcher(request) + call.return_value = resources.PhraseMatcher() + client.get_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -13299,23 +13135,25 @@ def test_delete_phrase_matcher_field_headers(): @pytest.mark.asyncio -async def test_delete_phrase_matcher_field_headers_async(): +async def test_get_phrase_matcher_field_headers_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.DeletePhraseMatcherRequest() + request = contact_center_insights.GetPhraseMatcherRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_phrase_matcher), "__call__" + type(client.transport.get_phrase_matcher), "__call__" ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_phrase_matcher(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.PhraseMatcher() + ) + await client.get_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -13330,20 +13168,20 @@ async def test_delete_phrase_matcher_field_headers_async(): ) in kw["metadata"] -def test_delete_phrase_matcher_flattened(): +def test_get_phrase_matcher_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_phrase_matcher), "__call__" + type(client.transport.get_phrase_matcher), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = None + call.return_value = resources.PhraseMatcher() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_phrase_matcher( + client.get_phrase_matcher( name="name_value", ) @@ -13356,7 +13194,7 @@ def test_delete_phrase_matcher_flattened(): assert arg == mock_val -def test_delete_phrase_matcher_flattened_error(): +def test_get_phrase_matcher_flattened_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -13364,29 +13202,31 @@ def test_delete_phrase_matcher_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_phrase_matcher( - contact_center_insights.DeletePhraseMatcherRequest(), + client.get_phrase_matcher( + contact_center_insights.GetPhraseMatcherRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_delete_phrase_matcher_flattened_async(): +async def test_get_phrase_matcher_flattened_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_phrase_matcher), "__call__" + type(client.transport.get_phrase_matcher), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = None + call.return_value = resources.PhraseMatcher() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.PhraseMatcher() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_phrase_matcher( + response = await client.get_phrase_matcher( name="name_value", ) @@ -13400,7 +13240,7 @@ async def test_delete_phrase_matcher_flattened_async(): @pytest.mark.asyncio -async def test_delete_phrase_matcher_flattened_error_async(): +async def test_get_phrase_matcher_flattened_error_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -13408,8 +13248,8 @@ async def test_delete_phrase_matcher_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_phrase_matcher( - contact_center_insights.DeletePhraseMatcherRequest(), + await client.get_phrase_matcher( + contact_center_insights.GetPhraseMatcherRequest(), name="name_value", ) @@ -13417,11 +13257,11 @@ async def test_delete_phrase_matcher_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.UpdatePhraseMatcherRequest, + contact_center_insights.ListPhraseMatchersRequest, dict, ], ) -def test_update_phrase_matcher(request_type, transport: str = "grpc"): +def test_list_phrase_matchers(request_type, transport: str = "grpc"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -13433,38 +13273,26 @@ def test_update_phrase_matcher(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_phrase_matcher), "__call__" + type(client.transport.list_phrase_matchers), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.PhraseMatcher( - name="name_value", - revision_id="revision_id_value", - version_tag="version_tag_value", - display_name="display_name_value", - type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, - active=True, - role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, + call.return_value = contact_center_insights.ListPhraseMatchersResponse( + next_page_token="next_page_token_value", ) - response = client.update_phrase_matcher(request) + response = client.list_phrase_matchers(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = contact_center_insights.UpdatePhraseMatcherRequest() + request = contact_center_insights.ListPhraseMatchersRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.PhraseMatcher) - assert response.name == "name_value" - assert response.revision_id == "revision_id_value" - assert response.version_tag == "version_tag_value" - assert response.display_name == "display_name_value" - assert response.type_ == resources.PhraseMatcher.PhraseMatcherType.ALL_OF - assert response.active is True - assert response.role_match == resources.ConversationParticipant.Role.HUMAN_AGENT + assert isinstance(response, pagers.ListPhraseMatchersPager) + assert response.next_page_token == "next_page_token_value" -def test_update_phrase_matcher_empty_call(): +def test_list_phrase_matchers_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsClient( @@ -13474,18 +13302,18 @@ def test_update_phrase_matcher_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_phrase_matcher), "__call__" + type(client.transport.list_phrase_matchers), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_phrase_matcher() + client.list_phrase_matchers() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.UpdatePhraseMatcherRequest() + assert args[0] == contact_center_insights.ListPhraseMatchersRequest() -def test_update_phrase_matcher_non_empty_request_with_auto_populated_field(): +def test_list_phrase_matchers_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = ContactCenterInsightsClient( @@ -13496,22 +13324,30 @@ def test_update_phrase_matcher_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = contact_center_insights.UpdatePhraseMatcherRequest() + request = contact_center_insights.ListPhraseMatchersRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_phrase_matcher), "__call__" + type(client.transport.list_phrase_matchers), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_phrase_matcher(request=request) + client.list_phrase_matchers(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.UpdatePhraseMatcherRequest() + assert args[0] == contact_center_insights.ListPhraseMatchersRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + ) -def test_update_phrase_matcher_use_cached_wrapped_rpc(): +def test_list_phrase_matchers_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -13526,8 +13362,7 @@ def test_update_phrase_matcher_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.update_phrase_matcher - in client._transport._wrapped_methods + client._transport.list_phrase_matchers in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -13536,15 +13371,15 @@ def test_update_phrase_matcher_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.update_phrase_matcher + client._transport.list_phrase_matchers ] = mock_rpc request = {} - client.update_phrase_matcher(request) + client.list_phrase_matchers(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_phrase_matcher(request) + client.list_phrase_matchers(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -13552,7 +13387,7 @@ def test_update_phrase_matcher_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_update_phrase_matcher_empty_call_async(): +async def test_list_phrase_matchers_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsAsyncClient( @@ -13562,28 +13397,22 @@ async def test_update_phrase_matcher_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_phrase_matcher), "__call__" + type(client.transport.list_phrase_matchers), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.PhraseMatcher( - name="name_value", - revision_id="revision_id_value", - version_tag="version_tag_value", - display_name="display_name_value", - type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, - active=True, - role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, + contact_center_insights.ListPhraseMatchersResponse( + next_page_token="next_page_token_value", ) ) - response = await client.update_phrase_matcher() + response = await client.list_phrase_matchers() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.UpdatePhraseMatcherRequest() + assert args[0] == contact_center_insights.ListPhraseMatchersRequest() @pytest.mark.asyncio -async def test_update_phrase_matcher_async_use_cached_wrapped_rpc( +async def test_list_phrase_matchers_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -13600,7 +13429,7 @@ async def test_update_phrase_matcher_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.update_phrase_matcher + client._client._transport.list_phrase_matchers in client._client._transport._wrapped_methods ) @@ -13608,16 +13437,16 @@ async def test_update_phrase_matcher_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.update_phrase_matcher + client._client._transport.list_phrase_matchers ] = mock_rpc request = {} - await client.update_phrase_matcher(request) + await client.list_phrase_matchers(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.update_phrase_matcher(request) + await client.list_phrase_matchers(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -13625,9 +13454,9 @@ async def test_update_phrase_matcher_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_update_phrase_matcher_async( +async def test_list_phrase_matchers_async( transport: str = "grpc_asyncio", - request_type=contact_center_insights.UpdatePhraseMatcherRequest, + request_type=contact_center_insights.ListPhraseMatchersRequest, ): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13640,61 +13469,49 @@ async def test_update_phrase_matcher_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_phrase_matcher), "__call__" + type(client.transport.list_phrase_matchers), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.PhraseMatcher( - name="name_value", - revision_id="revision_id_value", - version_tag="version_tag_value", - display_name="display_name_value", - type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, - active=True, - role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, + contact_center_insights.ListPhraseMatchersResponse( + next_page_token="next_page_token_value", ) ) - response = await client.update_phrase_matcher(request) + response = await client.list_phrase_matchers(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = contact_center_insights.UpdatePhraseMatcherRequest() + request = contact_center_insights.ListPhraseMatchersRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.PhraseMatcher) - assert response.name == "name_value" - assert response.revision_id == "revision_id_value" - assert response.version_tag == "version_tag_value" - assert response.display_name == "display_name_value" - assert response.type_ == resources.PhraseMatcher.PhraseMatcherType.ALL_OF - assert response.active is True - assert response.role_match == resources.ConversationParticipant.Role.HUMAN_AGENT + assert isinstance(response, pagers.ListPhraseMatchersAsyncPager) + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio -async def test_update_phrase_matcher_async_from_dict(): - await test_update_phrase_matcher_async(request_type=dict) +async def test_list_phrase_matchers_async_from_dict(): + await test_list_phrase_matchers_async(request_type=dict) -def test_update_phrase_matcher_field_headers(): +def test_list_phrase_matchers_field_headers(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.UpdatePhraseMatcherRequest() + request = contact_center_insights.ListPhraseMatchersRequest() - request.phrase_matcher.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_phrase_matcher), "__call__" + type(client.transport.list_phrase_matchers), "__call__" ) as call: - call.return_value = resources.PhraseMatcher() - client.update_phrase_matcher(request) + call.return_value = contact_center_insights.ListPhraseMatchersResponse() + client.list_phrase_matchers(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -13705,30 +13522,30 @@ def test_update_phrase_matcher_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "phrase_matcher.name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_update_phrase_matcher_field_headers_async(): +async def test_list_phrase_matchers_field_headers_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.UpdatePhraseMatcherRequest() + request = contact_center_insights.ListPhraseMatchersRequest() - request.phrase_matcher.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_phrase_matcher), "__call__" + type(client.transport.list_phrase_matchers), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.PhraseMatcher() + contact_center_insights.ListPhraseMatchersResponse() ) - await client.update_phrase_matcher(request) + await client.list_phrase_matchers(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -13739,41 +13556,37 @@ async def test_update_phrase_matcher_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "phrase_matcher.name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_update_phrase_matcher_flattened(): +def test_list_phrase_matchers_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_phrase_matcher), "__call__" + type(client.transport.list_phrase_matchers), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.PhraseMatcher() + call.return_value = contact_center_insights.ListPhraseMatchersResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.update_phrase_matcher( - phrase_matcher=resources.PhraseMatcher(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.list_phrase_matchers( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].phrase_matcher - mock_val = resources.PhraseMatcher(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val -def test_update_phrase_matcher_flattened_error(): +def test_list_phrase_matchers_flattened_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -13781,50 +13594,45 @@ def test_update_phrase_matcher_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_phrase_matcher( - contact_center_insights.UpdatePhraseMatcherRequest(), - phrase_matcher=resources.PhraseMatcher(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.list_phrase_matchers( + contact_center_insights.ListPhraseMatchersRequest(), + parent="parent_value", ) @pytest.mark.asyncio -async def test_update_phrase_matcher_flattened_async(): +async def test_list_phrase_matchers_flattened_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_phrase_matcher), "__call__" + type(client.transport.list_phrase_matchers), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.PhraseMatcher() + call.return_value = contact_center_insights.ListPhraseMatchersResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.PhraseMatcher() + contact_center_insights.ListPhraseMatchersResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.update_phrase_matcher( - phrase_matcher=resources.PhraseMatcher(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + response = await client.list_phrase_matchers( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].phrase_matcher - mock_val = resources.PhraseMatcher(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio -async def test_update_phrase_matcher_flattened_error_async(): +async def test_list_phrase_matchers_flattened_error_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -13832,21 +13640,222 @@ async def test_update_phrase_matcher_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_phrase_matcher( - contact_center_insights.UpdatePhraseMatcherRequest(), - phrase_matcher=resources.PhraseMatcher(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + await client.list_phrase_matchers( + contact_center_insights.ListPhraseMatchersRequest(), + parent="parent_value", ) -@pytest.mark.parametrize( - "request_type", - [ - contact_center_insights.CalculateStatsRequest, - dict, - ], +def test_list_phrase_matchers_pager(transport_name: str = "grpc"): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_phrase_matchers), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + contact_center_insights.ListPhraseMatchersResponse( + phrase_matchers=[ + resources.PhraseMatcher(), + resources.PhraseMatcher(), + resources.PhraseMatcher(), + ], + next_page_token="abc", + ), + contact_center_insights.ListPhraseMatchersResponse( + phrase_matchers=[], + next_page_token="def", + ), + contact_center_insights.ListPhraseMatchersResponse( + phrase_matchers=[ + resources.PhraseMatcher(), + ], + next_page_token="ghi", + ), + contact_center_insights.ListPhraseMatchersResponse( + phrase_matchers=[ + resources.PhraseMatcher(), + resources.PhraseMatcher(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_phrase_matchers(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.PhraseMatcher) for i in results) + + +def test_list_phrase_matchers_pages(transport_name: str = "grpc"): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_phrase_matchers), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + contact_center_insights.ListPhraseMatchersResponse( + phrase_matchers=[ + resources.PhraseMatcher(), + resources.PhraseMatcher(), + resources.PhraseMatcher(), + ], + next_page_token="abc", + ), + contact_center_insights.ListPhraseMatchersResponse( + phrase_matchers=[], + next_page_token="def", + ), + contact_center_insights.ListPhraseMatchersResponse( + phrase_matchers=[ + resources.PhraseMatcher(), + ], + next_page_token="ghi", + ), + contact_center_insights.ListPhraseMatchersResponse( + phrase_matchers=[ + resources.PhraseMatcher(), + resources.PhraseMatcher(), + ], + ), + RuntimeError, + ) + pages = list(client.list_phrase_matchers(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_phrase_matchers_async_pager(): + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_phrase_matchers), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + contact_center_insights.ListPhraseMatchersResponse( + phrase_matchers=[ + resources.PhraseMatcher(), + resources.PhraseMatcher(), + resources.PhraseMatcher(), + ], + next_page_token="abc", + ), + contact_center_insights.ListPhraseMatchersResponse( + phrase_matchers=[], + next_page_token="def", + ), + contact_center_insights.ListPhraseMatchersResponse( + phrase_matchers=[ + resources.PhraseMatcher(), + ], + next_page_token="ghi", + ), + contact_center_insights.ListPhraseMatchersResponse( + phrase_matchers=[ + resources.PhraseMatcher(), + resources.PhraseMatcher(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_phrase_matchers( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.PhraseMatcher) for i in responses) + + +@pytest.mark.asyncio +async def test_list_phrase_matchers_async_pages(): + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_phrase_matchers), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + contact_center_insights.ListPhraseMatchersResponse( + phrase_matchers=[ + resources.PhraseMatcher(), + resources.PhraseMatcher(), + resources.PhraseMatcher(), + ], + next_page_token="abc", + ), + contact_center_insights.ListPhraseMatchersResponse( + phrase_matchers=[], + next_page_token="def", + ), + contact_center_insights.ListPhraseMatchersResponse( + phrase_matchers=[ + resources.PhraseMatcher(), + ], + next_page_token="ghi", + ), + contact_center_insights.ListPhraseMatchersResponse( + phrase_matchers=[ + resources.PhraseMatcher(), + resources.PhraseMatcher(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_phrase_matchers(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + contact_center_insights.DeletePhraseMatcherRequest, + dict, + ], ) -def test_calculate_stats(request_type, transport: str = "grpc"): +def test_delete_phrase_matcher(request_type, transport: str = "grpc"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -13857,27 +13866,24 @@ def test_calculate_stats(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.calculate_stats), "__call__") as call: + with mock.patch.object( + type(client.transport.delete_phrase_matcher), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = contact_center_insights.CalculateStatsResponse( - average_turn_count=1931, - conversation_count=1955, - ) - response = client.calculate_stats(request) + call.return_value = None + response = client.delete_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = contact_center_insights.CalculateStatsRequest() + request = contact_center_insights.DeletePhraseMatcherRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, contact_center_insights.CalculateStatsResponse) - assert response.average_turn_count == 1931 - assert response.conversation_count == 1955 + assert response is None -def test_calculate_stats_empty_call(): +def test_delete_phrase_matcher_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsClient( @@ -13886,17 +13892,19 @@ def test_calculate_stats_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.calculate_stats), "__call__") as call: + with mock.patch.object( + type(client.transport.delete_phrase_matcher), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.calculate_stats() + client.delete_phrase_matcher() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.CalculateStatsRequest() + assert args[0] == contact_center_insights.DeletePhraseMatcherRequest() -def test_calculate_stats_non_empty_request_with_auto_populated_field(): +def test_delete_phrase_matcher_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = ContactCenterInsightsClient( @@ -13907,26 +13915,26 @@ def test_calculate_stats_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = contact_center_insights.CalculateStatsRequest( - location="location_value", - filter="filter_value", + request = contact_center_insights.DeletePhraseMatcherRequest( + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.calculate_stats), "__call__") as call: + with mock.patch.object( + type(client.transport.delete_phrase_matcher), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.calculate_stats(request=request) + client.delete_phrase_matcher(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.CalculateStatsRequest( - location="location_value", - filter="filter_value", + assert args[0] == contact_center_insights.DeletePhraseMatcherRequest( + name="name_value", ) -def test_calculate_stats_use_cached_wrapped_rpc(): +def test_delete_phrase_matcher_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -13940,21 +13948,26 @@ def test_calculate_stats_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.calculate_stats in client._transport._wrapped_methods + assert ( + client._transport.delete_phrase_matcher + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.calculate_stats] = mock_rpc + client._transport._wrapped_methods[ + client._transport.delete_phrase_matcher + ] = mock_rpc request = {} - client.calculate_stats(request) + client.delete_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.calculate_stats(request) + client.delete_phrase_matcher(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -13962,7 +13975,7 @@ def test_calculate_stats_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_calculate_stats_empty_call_async(): +async def test_delete_phrase_matcher_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsAsyncClient( @@ -13971,22 +13984,19 @@ async def test_calculate_stats_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.calculate_stats), "__call__") as call: + with mock.patch.object( + type(client.transport.delete_phrase_matcher), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - contact_center_insights.CalculateStatsResponse( - average_turn_count=1931, - conversation_count=1955, - ) - ) - response = await client.calculate_stats() + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_phrase_matcher() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.CalculateStatsRequest() + assert args[0] == contact_center_insights.DeletePhraseMatcherRequest() @pytest.mark.asyncio -async def test_calculate_stats_async_use_cached_wrapped_rpc( +async def test_delete_phrase_matcher_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -14003,7 +14013,7 @@ async def test_calculate_stats_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.calculate_stats + client._client._transport.delete_phrase_matcher in client._client._transport._wrapped_methods ) @@ -14011,16 +14021,16 @@ async def test_calculate_stats_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.calculate_stats + client._client._transport.delete_phrase_matcher ] = mock_rpc request = {} - await client.calculate_stats(request) + await client.delete_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.calculate_stats(request) + await client.delete_phrase_matcher(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -14028,9 +14038,9 @@ async def test_calculate_stats_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_calculate_stats_async( +async def test_delete_phrase_matcher_async( transport: str = "grpc_asyncio", - request_type=contact_center_insights.CalculateStatsRequest, + request_type=contact_center_insights.DeletePhraseMatcherRequest, ): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -14042,48 +14052,45 @@ async def test_calculate_stats_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.calculate_stats), "__call__") as call: + with mock.patch.object( + type(client.transport.delete_phrase_matcher), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - contact_center_insights.CalculateStatsResponse( - average_turn_count=1931, - conversation_count=1955, - ) - ) - response = await client.calculate_stats(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = contact_center_insights.CalculateStatsRequest() + request = contact_center_insights.DeletePhraseMatcherRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, contact_center_insights.CalculateStatsResponse) - assert response.average_turn_count == 1931 - assert response.conversation_count == 1955 + assert response is None @pytest.mark.asyncio -async def test_calculate_stats_async_from_dict(): - await test_calculate_stats_async(request_type=dict) +async def test_delete_phrase_matcher_async_from_dict(): + await test_delete_phrase_matcher_async(request_type=dict) -def test_calculate_stats_field_headers(): +def test_delete_phrase_matcher_field_headers(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.CalculateStatsRequest() + request = contact_center_insights.DeletePhraseMatcherRequest() - request.location = "location_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.calculate_stats), "__call__") as call: - call.return_value = contact_center_insights.CalculateStatsResponse() - client.calculate_stats(request) + with mock.patch.object( + type(client.transport.delete_phrase_matcher), "__call__" + ) as call: + call.return_value = None + client.delete_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -14094,28 +14101,28 @@ def test_calculate_stats_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "location=location_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_calculate_stats_field_headers_async(): +async def test_delete_phrase_matcher_field_headers_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.CalculateStatsRequest() + request = contact_center_insights.DeletePhraseMatcherRequest() - request.location = "location_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.calculate_stats), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - contact_center_insights.CalculateStatsResponse() - ) - await client.calculate_stats(request) + with mock.patch.object( + type(client.transport.delete_phrase_matcher), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -14126,35 +14133,37 @@ async def test_calculate_stats_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "location=location_value", + "name=name_value", ) in kw["metadata"] -def test_calculate_stats_flattened(): +def test_delete_phrase_matcher_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.calculate_stats), "__call__") as call: + with mock.patch.object( + type(client.transport.delete_phrase_matcher), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = contact_center_insights.CalculateStatsResponse() + call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.calculate_stats( - location="location_value", + client.delete_phrase_matcher( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].location - mock_val = "location_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_calculate_stats_flattened_error(): +def test_delete_phrase_matcher_flattened_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -14162,43 +14171,43 @@ def test_calculate_stats_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.calculate_stats( - contact_center_insights.CalculateStatsRequest(), - location="location_value", + client.delete_phrase_matcher( + contact_center_insights.DeletePhraseMatcherRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_calculate_stats_flattened_async(): +async def test_delete_phrase_matcher_flattened_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.calculate_stats), "__call__") as call: + with mock.patch.object( + type(client.transport.delete_phrase_matcher), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = contact_center_insights.CalculateStatsResponse() + call.return_value = None - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - contact_center_insights.CalculateStatsResponse() - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.calculate_stats( - location="location_value", + response = await client.delete_phrase_matcher( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].location - mock_val = "location_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_calculate_stats_flattened_error_async(): +async def test_delete_phrase_matcher_flattened_error_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -14206,20 +14215,20 @@ async def test_calculate_stats_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.calculate_stats( - contact_center_insights.CalculateStatsRequest(), - location="location_value", + await client.delete_phrase_matcher( + contact_center_insights.DeletePhraseMatcherRequest(), + name="name_value", ) @pytest.mark.parametrize( "request_type", [ - contact_center_insights.GetSettingsRequest, + contact_center_insights.UpdatePhraseMatcherRequest, dict, ], ) -def test_get_settings(request_type, transport: str = "grpc"): +def test_update_phrase_matcher(request_type, transport: str = "grpc"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -14230,27 +14239,39 @@ def test_get_settings(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + with mock.patch.object( + type(client.transport.update_phrase_matcher), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.Settings( + call.return_value = resources.PhraseMatcher( name="name_value", - language_code="language_code_value", + revision_id="revision_id_value", + version_tag="version_tag_value", + display_name="display_name_value", + type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, + active=True, + role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, ) - response = client.get_settings(request) + response = client.update_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = contact_center_insights.GetSettingsRequest() + request = contact_center_insights.UpdatePhraseMatcherRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.Settings) + assert isinstance(response, resources.PhraseMatcher) assert response.name == "name_value" - assert response.language_code == "language_code_value" + assert response.revision_id == "revision_id_value" + assert response.version_tag == "version_tag_value" + assert response.display_name == "display_name_value" + assert response.type_ == resources.PhraseMatcher.PhraseMatcherType.ALL_OF + assert response.active is True + assert response.role_match == resources.ConversationParticipant.Role.HUMAN_AGENT -def test_get_settings_empty_call(): +def test_update_phrase_matcher_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsClient( @@ -14259,17 +14280,19 @@ def test_get_settings_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + with mock.patch.object( + type(client.transport.update_phrase_matcher), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_settings() + client.update_phrase_matcher() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.GetSettingsRequest() + assert args[0] == contact_center_insights.UpdatePhraseMatcherRequest() -def test_get_settings_non_empty_request_with_auto_populated_field(): +def test_update_phrase_matcher_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = ContactCenterInsightsClient( @@ -14280,24 +14303,22 @@ def test_get_settings_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = contact_center_insights.GetSettingsRequest( - name="name_value", - ) + request = contact_center_insights.UpdatePhraseMatcherRequest() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + with mock.patch.object( + type(client.transport.update_phrase_matcher), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_settings(request=request) + client.update_phrase_matcher(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.GetSettingsRequest( - name="name_value", - ) + assert args[0] == contact_center_insights.UpdatePhraseMatcherRequest() -def test_get_settings_use_cached_wrapped_rpc(): +def test_update_phrase_matcher_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -14311,21 +14332,26 @@ def test_get_settings_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_settings in client._transport._wrapped_methods + assert ( + client._transport.update_phrase_matcher + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_settings] = mock_rpc + client._transport._wrapped_methods[ + client._transport.update_phrase_matcher + ] = mock_rpc request = {} - client.get_settings(request) + client.update_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_settings(request) + client.update_phrase_matcher(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -14333,7 +14359,7 @@ def test_get_settings_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_settings_empty_call_async(): +async def test_update_phrase_matcher_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsAsyncClient( @@ -14342,22 +14368,29 @@ async def test_get_settings_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + with mock.patch.object( + type(client.transport.update_phrase_matcher), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.Settings( + resources.PhraseMatcher( name="name_value", - language_code="language_code_value", + revision_id="revision_id_value", + version_tag="version_tag_value", + display_name="display_name_value", + type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, + active=True, + role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, ) ) - response = await client.get_settings() + response = await client.update_phrase_matcher() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.GetSettingsRequest() + assert args[0] == contact_center_insights.UpdatePhraseMatcherRequest() @pytest.mark.asyncio -async def test_get_settings_async_use_cached_wrapped_rpc( +async def test_update_phrase_matcher_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -14374,7 +14407,7 @@ async def test_get_settings_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_settings + client._client._transport.update_phrase_matcher in client._client._transport._wrapped_methods ) @@ -14382,16 +14415,16 @@ async def test_get_settings_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_settings + client._client._transport.update_phrase_matcher ] = mock_rpc request = {} - await client.get_settings(request) + await client.update_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.get_settings(request) + await client.update_phrase_matcher(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -14399,9 +14432,9 @@ async def test_get_settings_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_get_settings_async( +async def test_update_phrase_matcher_async( transport: str = "grpc_asyncio", - request_type=contact_center_insights.GetSettingsRequest, + request_type=contact_center_insights.UpdatePhraseMatcherRequest, ): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -14413,48 +14446,62 @@ async def test_get_settings_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + with mock.patch.object( + type(client.transport.update_phrase_matcher), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.Settings( + resources.PhraseMatcher( name="name_value", - language_code="language_code_value", + revision_id="revision_id_value", + version_tag="version_tag_value", + display_name="display_name_value", + type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, + active=True, + role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, ) ) - response = await client.get_settings(request) + response = await client.update_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = contact_center_insights.GetSettingsRequest() + request = contact_center_insights.UpdatePhraseMatcherRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.Settings) + assert isinstance(response, resources.PhraseMatcher) assert response.name == "name_value" - assert response.language_code == "language_code_value" + assert response.revision_id == "revision_id_value" + assert response.version_tag == "version_tag_value" + assert response.display_name == "display_name_value" + assert response.type_ == resources.PhraseMatcher.PhraseMatcherType.ALL_OF + assert response.active is True + assert response.role_match == resources.ConversationParticipant.Role.HUMAN_AGENT @pytest.mark.asyncio -async def test_get_settings_async_from_dict(): - await test_get_settings_async(request_type=dict) +async def test_update_phrase_matcher_async_from_dict(): + await test_update_phrase_matcher_async(request_type=dict) -def test_get_settings_field_headers(): +def test_update_phrase_matcher_field_headers(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.GetSettingsRequest() + request = contact_center_insights.UpdatePhraseMatcherRequest() - request.name = "name_value" + request.phrase_matcher.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_settings), "__call__") as call: - call.return_value = resources.Settings() - client.get_settings(request) + with mock.patch.object( + type(client.transport.update_phrase_matcher), "__call__" + ) as call: + call.return_value = resources.PhraseMatcher() + client.update_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -14465,26 +14512,30 @@ def test_get_settings_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "phrase_matcher.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_settings_field_headers_async(): +async def test_update_phrase_matcher_field_headers_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.GetSettingsRequest() + request = contact_center_insights.UpdatePhraseMatcherRequest() - request.name = "name_value" + request.phrase_matcher.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_settings), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Settings()) - await client.get_settings(request) + with mock.patch.object( + type(client.transport.update_phrase_matcher), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.PhraseMatcher() + ) + await client.update_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -14495,35 +14546,41 @@ async def test_get_settings_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "phrase_matcher.name=name_value", ) in kw["metadata"] -def test_get_settings_flattened(): +def test_update_phrase_matcher_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + with mock.patch.object( + type(client.transport.update_phrase_matcher), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.Settings() + call.return_value = resources.PhraseMatcher() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_settings( - name="name_value", + client.update_phrase_matcher( + phrase_matcher=resources.PhraseMatcher(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].phrase_matcher + mock_val = resources.PhraseMatcher(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val -def test_get_settings_flattened_error(): +def test_update_phrase_matcher_flattened_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -14531,41 +14588,50 @@ def test_get_settings_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_settings( - contact_center_insights.GetSettingsRequest(), - name="name_value", + client.update_phrase_matcher( + contact_center_insights.UpdatePhraseMatcherRequest(), + phrase_matcher=resources.PhraseMatcher(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio -async def test_get_settings_flattened_async(): +async def test_update_phrase_matcher_flattened_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + with mock.patch.object( + type(client.transport.update_phrase_matcher), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.Settings() + call.return_value = resources.PhraseMatcher() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Settings()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.PhraseMatcher() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_settings( - name="name_value", + response = await client.update_phrase_matcher( + phrase_matcher=resources.PhraseMatcher(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].phrase_matcher + mock_val = resources.PhraseMatcher(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @pytest.mark.asyncio -async def test_get_settings_flattened_error_async(): +async def test_update_phrase_matcher_flattened_error_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -14573,20 +14639,21 @@ async def test_get_settings_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_settings( - contact_center_insights.GetSettingsRequest(), - name="name_value", + await client.update_phrase_matcher( + contact_center_insights.UpdatePhraseMatcherRequest(), + phrase_matcher=resources.PhraseMatcher(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.parametrize( "request_type", [ - contact_center_insights.UpdateSettingsRequest, + contact_center_insights.CalculateStatsRequest, dict, ], ) -def test_update_settings(request_type, transport: str = "grpc"): +def test_calculate_stats(request_type, transport: str = "grpc"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -14597,27 +14664,27 @@ def test_update_settings(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + with mock.patch.object(type(client.transport.calculate_stats), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.Settings( - name="name_value", - language_code="language_code_value", + call.return_value = contact_center_insights.CalculateStatsResponse( + average_turn_count=1931, + conversation_count=1955, ) - response = client.update_settings(request) + response = client.calculate_stats(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = contact_center_insights.UpdateSettingsRequest() + request = contact_center_insights.CalculateStatsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.Settings) - assert response.name == "name_value" - assert response.language_code == "language_code_value" + assert isinstance(response, contact_center_insights.CalculateStatsResponse) + assert response.average_turn_count == 1931 + assert response.conversation_count == 1955 -def test_update_settings_empty_call(): +def test_calculate_stats_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsClient( @@ -14626,17 +14693,17 @@ def test_update_settings_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + with mock.patch.object(type(client.transport.calculate_stats), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_settings() + client.calculate_stats() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.UpdateSettingsRequest() + assert args[0] == contact_center_insights.CalculateStatsRequest() -def test_update_settings_non_empty_request_with_auto_populated_field(): +def test_calculate_stats_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = ContactCenterInsightsClient( @@ -14647,20 +14714,26 @@ def test_update_settings_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = contact_center_insights.UpdateSettingsRequest() + request = contact_center_insights.CalculateStatsRequest( + location="location_value", + filter="filter_value", + ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + with mock.patch.object(type(client.transport.calculate_stats), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_settings(request=request) + client.calculate_stats(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.UpdateSettingsRequest() + assert args[0] == contact_center_insights.CalculateStatsRequest( + location="location_value", + filter="filter_value", + ) -def test_update_settings_use_cached_wrapped_rpc(): +def test_calculate_stats_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -14674,21 +14747,21 @@ def test_update_settings_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_settings in client._transport._wrapped_methods + assert client._transport.calculate_stats in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.update_settings] = mock_rpc + client._transport._wrapped_methods[client._transport.calculate_stats] = mock_rpc request = {} - client.update_settings(request) + client.calculate_stats(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_settings(request) + client.calculate_stats(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -14696,7 +14769,7 @@ def test_update_settings_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_update_settings_empty_call_async(): +async def test_calculate_stats_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsAsyncClient( @@ -14705,22 +14778,22 @@ async def test_update_settings_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + with mock.patch.object(type(client.transport.calculate_stats), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.Settings( - name="name_value", - language_code="language_code_value", + contact_center_insights.CalculateStatsResponse( + average_turn_count=1931, + conversation_count=1955, ) ) - response = await client.update_settings() + response = await client.calculate_stats() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.UpdateSettingsRequest() + assert args[0] == contact_center_insights.CalculateStatsRequest() @pytest.mark.asyncio -async def test_update_settings_async_use_cached_wrapped_rpc( +async def test_calculate_stats_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -14737,7 +14810,7 @@ async def test_update_settings_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.update_settings + client._client._transport.calculate_stats in client._client._transport._wrapped_methods ) @@ -14745,16 +14818,16 @@ async def test_update_settings_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.update_settings + client._client._transport.calculate_stats ] = mock_rpc request = {} - await client.update_settings(request) + await client.calculate_stats(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.update_settings(request) + await client.calculate_stats(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -14762,9 +14835,9 @@ async def test_update_settings_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_update_settings_async( +async def test_calculate_stats_async( transport: str = "grpc_asyncio", - request_type=contact_center_insights.UpdateSettingsRequest, + request_type=contact_center_insights.CalculateStatsRequest, ): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -14776,48 +14849,48 @@ async def test_update_settings_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + with mock.patch.object(type(client.transport.calculate_stats), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.Settings( - name="name_value", - language_code="language_code_value", + contact_center_insights.CalculateStatsResponse( + average_turn_count=1931, + conversation_count=1955, ) ) - response = await client.update_settings(request) + response = await client.calculate_stats(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = contact_center_insights.UpdateSettingsRequest() + request = contact_center_insights.CalculateStatsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.Settings) - assert response.name == "name_value" - assert response.language_code == "language_code_value" + assert isinstance(response, contact_center_insights.CalculateStatsResponse) + assert response.average_turn_count == 1931 + assert response.conversation_count == 1955 @pytest.mark.asyncio -async def test_update_settings_async_from_dict(): - await test_update_settings_async(request_type=dict) +async def test_calculate_stats_async_from_dict(): + await test_calculate_stats_async(request_type=dict) -def test_update_settings_field_headers(): +def test_calculate_stats_field_headers(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.UpdateSettingsRequest() + request = contact_center_insights.CalculateStatsRequest() - request.settings.name = "name_value" + request.location = "location_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_settings), "__call__") as call: - call.return_value = resources.Settings() - client.update_settings(request) + with mock.patch.object(type(client.transport.calculate_stats), "__call__") as call: + call.return_value = contact_center_insights.CalculateStatsResponse() + client.calculate_stats(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -14828,26 +14901,28 @@ def test_update_settings_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "settings.name=name_value", + "location=location_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_update_settings_field_headers_async(): +async def test_calculate_stats_field_headers_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.UpdateSettingsRequest() + request = contact_center_insights.CalculateStatsRequest() - request.settings.name = "name_value" + request.location = "location_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_settings), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Settings()) - await client.update_settings(request) + with mock.patch.object(type(client.transport.calculate_stats), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + contact_center_insights.CalculateStatsResponse() + ) + await client.calculate_stats(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -14858,39 +14933,35 @@ async def test_update_settings_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "settings.name=name_value", + "location=location_value", ) in kw["metadata"] -def test_update_settings_flattened(): +def test_calculate_stats_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + with mock.patch.object(type(client.transport.calculate_stats), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.Settings() + call.return_value = contact_center_insights.CalculateStatsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.update_settings( - settings=resources.Settings(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.calculate_stats( + location="location_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].settings - mock_val = resources.Settings(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].location + mock_val = "location_value" assert arg == mock_val -def test_update_settings_flattened_error(): +def test_calculate_stats_flattened_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -14898,46 +14969,43 @@ def test_update_settings_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_settings( - contact_center_insights.UpdateSettingsRequest(), - settings=resources.Settings(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.calculate_stats( + contact_center_insights.CalculateStatsRequest(), + location="location_value", ) @pytest.mark.asyncio -async def test_update_settings_flattened_async(): +async def test_calculate_stats_flattened_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + with mock.patch.object(type(client.transport.calculate_stats), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.Settings() + call.return_value = contact_center_insights.CalculateStatsResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Settings()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + contact_center_insights.CalculateStatsResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.update_settings( - settings=resources.Settings(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + response = await client.calculate_stats( + location="location_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].settings - mock_val = resources.Settings(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].location + mock_val = "location_value" assert arg == mock_val @pytest.mark.asyncio -async def test_update_settings_flattened_error_async(): +async def test_calculate_stats_flattened_error_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -14945,21 +15013,20 @@ async def test_update_settings_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_settings( - contact_center_insights.UpdateSettingsRequest(), - settings=resources.Settings(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + await client.calculate_stats( + contact_center_insights.CalculateStatsRequest(), + location="location_value", ) @pytest.mark.parametrize( "request_type", [ - contact_center_insights.CreateViewRequest, + contact_center_insights.GetSettingsRequest, dict, ], ) -def test_create_view(request_type, transport: str = "grpc"): +def test_get_settings(request_type, transport: str = "grpc"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -14970,29 +15037,27 @@ def test_create_view(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_view), "__call__") as call: + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.View( + call.return_value = resources.Settings( name="name_value", - display_name="display_name_value", - value="value_value", + language_code="language_code_value", ) - response = client.create_view(request) + response = client.get_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = contact_center_insights.CreateViewRequest() + request = contact_center_insights.GetSettingsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.View) + assert isinstance(response, resources.Settings) assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.value == "value_value" + assert response.language_code == "language_code_value" -def test_create_view_empty_call(): +def test_get_settings_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsClient( @@ -15001,17 +15066,17 @@ def test_create_view_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_view), "__call__") as call: + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_view() + client.get_settings() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.CreateViewRequest() + assert args[0] == contact_center_insights.GetSettingsRequest() -def test_create_view_non_empty_request_with_auto_populated_field(): +def test_get_settings_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = ContactCenterInsightsClient( @@ -15022,24 +15087,24 @@ def test_create_view_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = contact_center_insights.CreateViewRequest( - parent="parent_value", + request = contact_center_insights.GetSettingsRequest( + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_view), "__call__") as call: + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_view(request=request) + client.get_settings(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.CreateViewRequest( - parent="parent_value", + assert args[0] == contact_center_insights.GetSettingsRequest( + name="name_value", ) -def test_create_view_use_cached_wrapped_rpc(): +def test_get_settings_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -15053,21 +15118,21 @@ def test_create_view_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_view in client._transport._wrapped_methods + assert client._transport.get_settings in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_view] = mock_rpc + client._transport._wrapped_methods[client._transport.get_settings] = mock_rpc request = {} - client.create_view(request) + client.get_settings(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_view(request) + client.get_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -15075,7 +15140,7 @@ def test_create_view_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_create_view_empty_call_async(): +async def test_get_settings_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsAsyncClient( @@ -15084,23 +15149,22 @@ async def test_create_view_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_view), "__call__") as call: + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.View( + resources.Settings( name="name_value", - display_name="display_name_value", - value="value_value", + language_code="language_code_value", ) ) - response = await client.create_view() + response = await client.get_settings() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.CreateViewRequest() + assert args[0] == contact_center_insights.GetSettingsRequest() @pytest.mark.asyncio -async def test_create_view_async_use_cached_wrapped_rpc( +async def test_get_settings_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -15117,7 +15181,7 @@ async def test_create_view_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.create_view + client._client._transport.get_settings in client._client._transport._wrapped_methods ) @@ -15125,16 +15189,16 @@ async def test_create_view_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.create_view + client._client._transport.get_settings ] = mock_rpc request = {} - await client.create_view(request) + await client.get_settings(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.create_view(request) + await client.get_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -15142,9 +15206,9 @@ async def test_create_view_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_create_view_async( +async def test_get_settings_async( transport: str = "grpc_asyncio", - request_type=contact_center_insights.CreateViewRequest, + request_type=contact_center_insights.GetSettingsRequest, ): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15156,50 +15220,48 @@ async def test_create_view_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_view), "__call__") as call: + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.View( + resources.Settings( name="name_value", - display_name="display_name_value", - value="value_value", + language_code="language_code_value", ) ) - response = await client.create_view(request) + response = await client.get_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = contact_center_insights.CreateViewRequest() + request = contact_center_insights.GetSettingsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.View) + assert isinstance(response, resources.Settings) assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.value == "value_value" + assert response.language_code == "language_code_value" @pytest.mark.asyncio -async def test_create_view_async_from_dict(): - await test_create_view_async(request_type=dict) +async def test_get_settings_async_from_dict(): + await test_get_settings_async(request_type=dict) -def test_create_view_field_headers(): +def test_get_settings_field_headers(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.CreateViewRequest() + request = contact_center_insights.GetSettingsRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_view), "__call__") as call: - call.return_value = resources.View() - client.create_view(request) + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + call.return_value = resources.Settings() + client.get_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -15210,26 +15272,26 @@ def test_create_view_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_create_view_field_headers_async(): +async def test_get_settings_field_headers_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.CreateViewRequest() + request = contact_center_insights.GetSettingsRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_view), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.View()) - await client.create_view(request) + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Settings()) + await client.get_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -15240,39 +15302,35 @@ async def test_create_view_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_create_view_flattened(): +def test_get_settings_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_view), "__call__") as call: + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.View() + call.return_value = resources.Settings() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.create_view( - parent="parent_value", - view=resources.View(name="name_value"), + client.get_settings( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].view - mock_val = resources.View(name="name_value") + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_create_view_flattened_error(): +def test_get_settings_flattened_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -15280,46 +15338,41 @@ def test_create_view_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_view( - contact_center_insights.CreateViewRequest(), - parent="parent_value", - view=resources.View(name="name_value"), + client.get_settings( + contact_center_insights.GetSettingsRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_create_view_flattened_async(): +async def test_get_settings_flattened_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_view), "__call__") as call: + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.View() + call.return_value = resources.Settings() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.View()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Settings()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.create_view( - parent="parent_value", - view=resources.View(name="name_value"), + response = await client.get_settings( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].view - mock_val = resources.View(name="name_value") + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_create_view_flattened_error_async(): +async def test_get_settings_flattened_error_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -15327,21 +15380,20 @@ async def test_create_view_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_view( - contact_center_insights.CreateViewRequest(), - parent="parent_value", - view=resources.View(name="name_value"), + await client.get_settings( + contact_center_insights.GetSettingsRequest(), + name="name_value", ) @pytest.mark.parametrize( "request_type", [ - contact_center_insights.GetViewRequest, + contact_center_insights.UpdateSettingsRequest, dict, ], ) -def test_get_view(request_type, transport: str = "grpc"): +def test_update_settings(request_type, transport: str = "grpc"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -15352,29 +15404,27 @@ def test_get_view(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_view), "__call__") as call: + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.View( + call.return_value = resources.Settings( name="name_value", - display_name="display_name_value", - value="value_value", + language_code="language_code_value", ) - response = client.get_view(request) + response = client.update_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = contact_center_insights.GetViewRequest() + request = contact_center_insights.UpdateSettingsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.View) + assert isinstance(response, resources.Settings) assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.value == "value_value" + assert response.language_code == "language_code_value" -def test_get_view_empty_call(): +def test_update_settings_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsClient( @@ -15383,17 +15433,17 @@ def test_get_view_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_view), "__call__") as call: + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_view() + client.update_settings() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.GetViewRequest() + assert args[0] == contact_center_insights.UpdateSettingsRequest() -def test_get_view_non_empty_request_with_auto_populated_field(): +def test_update_settings_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = ContactCenterInsightsClient( @@ -15404,24 +15454,20 @@ def test_get_view_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = contact_center_insights.GetViewRequest( - name="name_value", - ) + request = contact_center_insights.UpdateSettingsRequest() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_view), "__call__") as call: + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_view(request=request) + client.update_settings(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.GetViewRequest( - name="name_value", - ) + assert args[0] == contact_center_insights.UpdateSettingsRequest() -def test_get_view_use_cached_wrapped_rpc(): +def test_update_settings_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -15435,21 +15481,21 @@ def test_get_view_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_view in client._transport._wrapped_methods + assert client._transport.update_settings in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_view] = mock_rpc + client._transport._wrapped_methods[client._transport.update_settings] = mock_rpc request = {} - client.get_view(request) + client.update_settings(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_view(request) + client.update_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -15457,7 +15503,7 @@ def test_get_view_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_view_empty_call_async(): +async def test_update_settings_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsAsyncClient( @@ -15466,23 +15512,24 @@ async def test_get_view_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_view), "__call__") as call: + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.View( + resources.Settings( name="name_value", - display_name="display_name_value", - value="value_value", + language_code="language_code_value", ) ) - response = await client.get_view() + response = await client.update_settings() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.GetViewRequest() + assert args[0] == contact_center_insights.UpdateSettingsRequest() @pytest.mark.asyncio -async def test_get_view_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_update_settings_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -15497,7 +15544,7 @@ async def test_get_view_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn # Ensure method has been cached assert ( - client._client._transport.get_view + client._client._transport.update_settings in client._client._transport._wrapped_methods ) @@ -15505,16 +15552,16 @@ async def test_get_view_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_view + client._client._transport.update_settings ] = mock_rpc request = {} - await client.get_view(request) + await client.update_settings(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.get_view(request) + await client.update_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -15522,8 +15569,9 @@ async def test_get_view_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn @pytest.mark.asyncio -async def test_get_view_async( - transport: str = "grpc_asyncio", request_type=contact_center_insights.GetViewRequest +async def test_update_settings_async( + transport: str = "grpc_asyncio", + request_type=contact_center_insights.UpdateSettingsRequest, ): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15535,50 +15583,48 @@ async def test_get_view_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_view), "__call__") as call: + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.View( + resources.Settings( name="name_value", - display_name="display_name_value", - value="value_value", + language_code="language_code_value", ) ) - response = await client.get_view(request) + response = await client.update_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = contact_center_insights.GetViewRequest() + request = contact_center_insights.UpdateSettingsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.View) + assert isinstance(response, resources.Settings) assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.value == "value_value" + assert response.language_code == "language_code_value" @pytest.mark.asyncio -async def test_get_view_async_from_dict(): - await test_get_view_async(request_type=dict) +async def test_update_settings_async_from_dict(): + await test_update_settings_async(request_type=dict) -def test_get_view_field_headers(): +def test_update_settings_field_headers(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.GetViewRequest() + request = contact_center_insights.UpdateSettingsRequest() - request.name = "name_value" + request.settings.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_view), "__call__") as call: - call.return_value = resources.View() - client.get_view(request) + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + call.return_value = resources.Settings() + client.update_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -15589,26 +15635,26 @@ def test_get_view_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "settings.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_view_field_headers_async(): +async def test_update_settings_field_headers_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.GetViewRequest() + request = contact_center_insights.UpdateSettingsRequest() - request.name = "name_value" + request.settings.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_view), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.View()) - await client.get_view(request) + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Settings()) + await client.update_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -15619,35 +15665,39 @@ async def test_get_view_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "settings.name=name_value", ) in kw["metadata"] -def test_get_view_flattened(): +def test_update_settings_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_view), "__call__") as call: + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.View() + call.return_value = resources.Settings() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_view( - name="name_value", + client.update_settings( + settings=resources.Settings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].settings + mock_val = resources.Settings(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val -def test_get_view_flattened_error(): +def test_update_settings_flattened_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -15655,41 +15705,46 @@ def test_get_view_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_view( - contact_center_insights.GetViewRequest(), - name="name_value", + client.update_settings( + contact_center_insights.UpdateSettingsRequest(), + settings=resources.Settings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio -async def test_get_view_flattened_async(): +async def test_update_settings_flattened_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_view), "__call__") as call: + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.View() + call.return_value = resources.Settings() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.View()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Settings()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_view( - name="name_value", + response = await client.update_settings( + settings=resources.Settings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].settings + mock_val = resources.Settings(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @pytest.mark.asyncio -async def test_get_view_flattened_error_async(): +async def test_update_settings_flattened_error_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -15697,20 +15752,21 @@ async def test_get_view_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_view( - contact_center_insights.GetViewRequest(), - name="name_value", + await client.update_settings( + contact_center_insights.UpdateSettingsRequest(), + settings=resources.Settings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.parametrize( "request_type", [ - contact_center_insights.ListViewsRequest, + contact_center_insights.GetEncryptionSpecRequest, dict, ], ) -def test_list_views(request_type, transport: str = "grpc"): +def test_get_encryption_spec(request_type, transport: str = "grpc"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -15721,25 +15777,29 @@ def test_list_views(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_views), "__call__") as call: + with mock.patch.object( + type(client.transport.get_encryption_spec), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = contact_center_insights.ListViewsResponse( - next_page_token="next_page_token_value", + call.return_value = resources.EncryptionSpec( + name="name_value", + kms_key="kms_key_value", ) - response = client.list_views(request) + response = client.get_encryption_spec(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = contact_center_insights.ListViewsRequest() + request = contact_center_insights.GetEncryptionSpecRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListViewsPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, resources.EncryptionSpec) + assert response.name == "name_value" + assert response.kms_key == "kms_key_value" -def test_list_views_empty_call(): +def test_get_encryption_spec_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsClient( @@ -15748,17 +15808,19 @@ def test_list_views_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_views), "__call__") as call: + with mock.patch.object( + type(client.transport.get_encryption_spec), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_views() + client.get_encryption_spec() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.ListViewsRequest() + assert args[0] == contact_center_insights.GetEncryptionSpecRequest() -def test_list_views_non_empty_request_with_auto_populated_field(): +def test_get_encryption_spec_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = ContactCenterInsightsClient( @@ -15769,26 +15831,26 @@ def test_list_views_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = contact_center_insights.ListViewsRequest( - parent="parent_value", - page_token="page_token_value", + request = contact_center_insights.GetEncryptionSpecRequest( + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_views), "__call__") as call: + with mock.patch.object( + type(client.transport.get_encryption_spec), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_views(request=request) + client.get_encryption_spec(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.ListViewsRequest( - parent="parent_value", - page_token="page_token_value", + assert args[0] == contact_center_insights.GetEncryptionSpecRequest( + name="name_value", ) -def test_list_views_use_cached_wrapped_rpc(): +def test_get_encryption_spec_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -15802,21 +15864,25 @@ def test_list_views_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_views in client._transport._wrapped_methods + assert ( + client._transport.get_encryption_spec in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_views] = mock_rpc + client._transport._wrapped_methods[ + client._transport.get_encryption_spec + ] = mock_rpc request = {} - client.list_views(request) + client.get_encryption_spec(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_views(request) + client.get_encryption_spec(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -15824,7 +15890,7 @@ def test_list_views_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_views_empty_call_async(): +async def test_get_encryption_spec_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsAsyncClient( @@ -15833,21 +15899,26 @@ async def test_list_views_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_views), "__call__") as call: + with mock.patch.object( + type(client.transport.get_encryption_spec), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - contact_center_insights.ListViewsResponse( - next_page_token="next_page_token_value", + resources.EncryptionSpec( + name="name_value", + kms_key="kms_key_value", ) ) - response = await client.list_views() + response = await client.get_encryption_spec() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.ListViewsRequest() + assert args[0] == contact_center_insights.GetEncryptionSpecRequest() @pytest.mark.asyncio -async def test_list_views_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_get_encryption_spec_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -15862,7 +15933,7 @@ async def test_list_views_async_use_cached_wrapped_rpc(transport: str = "grpc_as # Ensure method has been cached assert ( - client._client._transport.list_views + client._client._transport.get_encryption_spec in client._client._transport._wrapped_methods ) @@ -15870,16 +15941,16 @@ async def test_list_views_async_use_cached_wrapped_rpc(transport: str = "grpc_as mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_views + client._client._transport.get_encryption_spec ] = mock_rpc request = {} - await client.list_views(request) + await client.get_encryption_spec(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.list_views(request) + await client.get_encryption_spec(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -15887,9 +15958,9 @@ async def test_list_views_async_use_cached_wrapped_rpc(transport: str = "grpc_as @pytest.mark.asyncio -async def test_list_views_async( +async def test_get_encryption_spec_async( transport: str = "grpc_asyncio", - request_type=contact_center_insights.ListViewsRequest, + request_type=contact_center_insights.GetEncryptionSpecRequest, ): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15901,46 +15972,52 @@ async def test_list_views_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_views), "__call__") as call: + with mock.patch.object( + type(client.transport.get_encryption_spec), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - contact_center_insights.ListViewsResponse( - next_page_token="next_page_token_value", + resources.EncryptionSpec( + name="name_value", + kms_key="kms_key_value", ) ) - response = await client.list_views(request) + response = await client.get_encryption_spec(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = contact_center_insights.ListViewsRequest() + request = contact_center_insights.GetEncryptionSpecRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListViewsAsyncPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, resources.EncryptionSpec) + assert response.name == "name_value" + assert response.kms_key == "kms_key_value" @pytest.mark.asyncio -async def test_list_views_async_from_dict(): - await test_list_views_async(request_type=dict) +async def test_get_encryption_spec_async_from_dict(): + await test_get_encryption_spec_async(request_type=dict) -def test_list_views_field_headers(): +def test_get_encryption_spec_field_headers(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.ListViewsRequest() + request = contact_center_insights.GetEncryptionSpecRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_views), "__call__") as call: - call.return_value = contact_center_insights.ListViewsResponse() - client.list_views(request) + with mock.patch.object( + type(client.transport.get_encryption_spec), "__call__" + ) as call: + call.return_value = resources.EncryptionSpec() + client.get_encryption_spec(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -15951,28 +16028,30 @@ def test_list_views_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_list_views_field_headers_async(): +async def test_get_encryption_spec_field_headers_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.ListViewsRequest() + request = contact_center_insights.GetEncryptionSpecRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_views), "__call__") as call: + with mock.patch.object( + type(client.transport.get_encryption_spec), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - contact_center_insights.ListViewsResponse() + resources.EncryptionSpec() ) - await client.list_views(request) + await client.get_encryption_spec(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -15983,35 +16062,37 @@ async def test_list_views_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_list_views_flattened(): +def test_get_encryption_spec_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_views), "__call__") as call: + with mock.patch.object( + type(client.transport.get_encryption_spec), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = contact_center_insights.ListViewsResponse() + call.return_value = resources.EncryptionSpec() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_views( - parent="parent_value", + client.get_encryption_spec( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_list_views_flattened_error(): +def test_get_encryption_spec_flattened_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -16019,43 +16100,45 @@ def test_list_views_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_views( - contact_center_insights.ListViewsRequest(), - parent="parent_value", + client.get_encryption_spec( + contact_center_insights.GetEncryptionSpecRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_list_views_flattened_async(): +async def test_get_encryption_spec_flattened_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_views), "__call__") as call: + with mock.patch.object( + type(client.transport.get_encryption_spec), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = contact_center_insights.ListViewsResponse() + call.return_value = resources.EncryptionSpec() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - contact_center_insights.ListViewsResponse() + resources.EncryptionSpec() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_views( - parent="parent_value", + response = await client.get_encryption_spec( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_list_views_flattened_error_async(): +async def test_get_encryption_spec_flattened_error_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -16063,351 +16146,164 @@ async def test_list_views_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_views( - contact_center_insights.ListViewsRequest(), - parent="parent_value", + await client.get_encryption_spec( + contact_center_insights.GetEncryptionSpecRequest(), + name="name_value", ) -def test_list_views_pager(transport_name: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + contact_center_insights.InitializeEncryptionSpecRequest, + dict, + ], +) +def test_initialize_encryption_spec(request_type, transport: str = "grpc"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_views), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - contact_center_insights.ListViewsResponse( - views=[ - resources.View(), - resources.View(), - resources.View(), - ], - next_page_token="abc", - ), - contact_center_insights.ListViewsResponse( - views=[], - next_page_token="def", - ), - contact_center_insights.ListViewsResponse( - views=[ - resources.View(), - ], - next_page_token="ghi", - ), - contact_center_insights.ListViewsResponse( - views=[ - resources.View(), - resources.View(), - ], - ), - RuntimeError, - ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) - pager = client.list_views(request={}, retry=retry, timeout=timeout) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.initialize_encryption_spec), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.initialize_encryption_spec(request) - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = contact_center_insights.InitializeEncryptionSpecRequest() + assert args[0] == request - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.View) for i in results) + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) -def test_list_views_pages(transport_name: str = "grpc"): +def test_initialize_encryption_spec_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_views), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - contact_center_insights.ListViewsResponse( - views=[ - resources.View(), - resources.View(), - resources.View(), - ], - next_page_token="abc", - ), - contact_center_insights.ListViewsResponse( - views=[], - next_page_token="def", - ), - contact_center_insights.ListViewsResponse( - views=[ - resources.View(), - ], - next_page_token="ghi", - ), - contact_center_insights.ListViewsResponse( - views=[ - resources.View(), - resources.View(), - ], - ), - RuntimeError, + with mock.patch.object( + type(client.transport.initialize_encryption_spec), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) - pages = list(client.list_views(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + client.initialize_encryption_spec() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == contact_center_insights.InitializeEncryptionSpecRequest() -@pytest.mark.asyncio -async def test_list_views_async_pager(): - client = ContactCenterInsightsAsyncClient( +def test_initialize_encryption_spec_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = contact_center_insights.InitializeEncryptionSpecRequest() + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_views), "__call__", new_callable=mock.AsyncMock + type(client.transport.initialize_encryption_spec), "__call__" ) as call: - # Set the response to a series of pages. - call.side_effect = ( - contact_center_insights.ListViewsResponse( - views=[ - resources.View(), - resources.View(), - resources.View(), - ], - next_page_token="abc", - ), - contact_center_insights.ListViewsResponse( - views=[], - next_page_token="def", - ), - contact_center_insights.ListViewsResponse( - views=[ - resources.View(), - ], - next_page_token="ghi", - ), - contact_center_insights.ListViewsResponse( - views=[ - resources.View(), - resources.View(), - ], - ), - RuntimeError, + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) - async_pager = await client.list_views( - request={}, + client.initialize_encryption_spec(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == contact_center_insights.InitializeEncryptionSpecRequest() + + +def test_initialize_encryption_spec_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - assert len(responses) == 6 - assert all(isinstance(i, resources.View) for i in responses) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.initialize_encryption_spec + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.initialize_encryption_spec + ] = mock_rpc + request = {} + client.initialize_encryption_spec(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.initialize_encryption_spec(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_list_views_async_pages(): +async def test_initialize_encryption_spec_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_views), "__call__", new_callable=mock.AsyncMock + type(client.transport.initialize_encryption_spec), "__call__" ) as call: - # Set the response to a series of pages. - call.side_effect = ( - contact_center_insights.ListViewsResponse( - views=[ - resources.View(), - resources.View(), - resources.View(), - ], - next_page_token="abc", - ), - contact_center_insights.ListViewsResponse( - views=[], - next_page_token="def", - ), - contact_center_insights.ListViewsResponse( - views=[ - resources.View(), - ], - next_page_token="ghi", - ), - contact_center_insights.ListViewsResponse( - views=[ - resources.View(), - resources.View(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_views(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - contact_center_insights.UpdateViewRequest, - dict, - ], -) -def test_update_view(request_type, transport: str = "grpc"): - client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_view), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = resources.View( - name="name_value", - display_name="display_name_value", - value="value_value", - ) - response = client.update_view(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = contact_center_insights.UpdateViewRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, resources.View) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.value == "value_value" - - -def test_update_view_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_view), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.update_view() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.UpdateViewRequest() - - -def test_update_view_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = contact_center_insights.UpdateViewRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_view), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.update_view(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.UpdateViewRequest() - - -def test_update_view_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_view in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.update_view] = mock_rpc - request = {} - client.update_view(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_view(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_update_view_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ContactCenterInsightsAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_view), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.View( - name="name_value", - display_name="display_name_value", - value="value_value", - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.update_view() + response = await client.initialize_encryption_spec() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.UpdateViewRequest() + assert args[0] == contact_center_insights.InitializeEncryptionSpecRequest() @pytest.mark.asyncio -async def test_update_view_async_use_cached_wrapped_rpc( +async def test_initialize_encryption_spec_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -16424,7 +16320,7 @@ async def test_update_view_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.update_view + client._client._transport.initialize_encryption_spec in client._client._transport._wrapped_methods ) @@ -16432,16 +16328,21 @@ async def test_update_view_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.update_view + client._client._transport.initialize_encryption_spec ] = mock_rpc request = {} - await client.update_view(request) + await client.initialize_encryption_spec(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.update_view(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.initialize_encryption_spec(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -16449,9 +16350,9 @@ async def test_update_view_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_update_view_async( +async def test_initialize_encryption_spec_async( transport: str = "grpc_asyncio", - request_type=contact_center_insights.UpdateViewRequest, + request_type=contact_center_insights.InitializeEncryptionSpecRequest, ): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16463,50 +16364,47 @@ async def test_update_view_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_view), "__call__") as call: + with mock.patch.object( + type(client.transport.initialize_encryption_spec), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.View( - name="name_value", - display_name="display_name_value", - value="value_value", - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.update_view(request) + response = await client.initialize_encryption_spec(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = contact_center_insights.UpdateViewRequest() + request = contact_center_insights.InitializeEncryptionSpecRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.View) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.value == "value_value" + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_update_view_async_from_dict(): - await test_update_view_async(request_type=dict) +async def test_initialize_encryption_spec_async_from_dict(): + await test_initialize_encryption_spec_async(request_type=dict) -def test_update_view_field_headers(): +def test_initialize_encryption_spec_field_headers(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.UpdateViewRequest() + request = contact_center_insights.InitializeEncryptionSpecRequest() - request.view.name = "name_value" + request.encryption_spec.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_view), "__call__") as call: - call.return_value = resources.View() - client.update_view(request) + with mock.patch.object( + type(client.transport.initialize_encryption_spec), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.initialize_encryption_spec(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -16517,26 +16415,30 @@ def test_update_view_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "view.name=name_value", + "encryption_spec.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_update_view_field_headers_async(): +async def test_initialize_encryption_spec_field_headers_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.UpdateViewRequest() + request = contact_center_insights.InitializeEncryptionSpecRequest() - request.view.name = "name_value" + request.encryption_spec.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_view), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.View()) - await client.update_view(request) + with mock.patch.object( + type(client.transport.initialize_encryption_spec), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.initialize_encryption_spec(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -16547,39 +16449,37 @@ async def test_update_view_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "view.name=name_value", + "encryption_spec.name=name_value", ) in kw["metadata"] -def test_update_view_flattened(): +def test_initialize_encryption_spec_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_view), "__call__") as call: + with mock.patch.object( + type(client.transport.initialize_encryption_spec), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.View() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.update_view( - view=resources.View(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.initialize_encryption_spec( + encryption_spec=resources.EncryptionSpec(name="name_value"), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].view - mock_val = resources.View(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].encryption_spec + mock_val = resources.EncryptionSpec(name="name_value") assert arg == mock_val -def test_update_view_flattened_error(): +def test_initialize_encryption_spec_flattened_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -16587,46 +16487,45 @@ def test_update_view_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_view( - contact_center_insights.UpdateViewRequest(), - view=resources.View(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.initialize_encryption_spec( + contact_center_insights.InitializeEncryptionSpecRequest(), + encryption_spec=resources.EncryptionSpec(name="name_value"), ) @pytest.mark.asyncio -async def test_update_view_flattened_async(): +async def test_initialize_encryption_spec_flattened_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_view), "__call__") as call: + with mock.patch.object( + type(client.transport.initialize_encryption_spec), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.View() + call.return_value = operations_pb2.Operation(name="operations/op") - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.View()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.update_view( - view=resources.View(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + response = await client.initialize_encryption_spec( + encryption_spec=resources.EncryptionSpec(name="name_value"), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].view - mock_val = resources.View(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].encryption_spec + mock_val = resources.EncryptionSpec(name="name_value") assert arg == mock_val @pytest.mark.asyncio -async def test_update_view_flattened_error_async(): +async def test_initialize_encryption_spec_flattened_error_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -16634,21 +16533,20 @@ async def test_update_view_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_view( - contact_center_insights.UpdateViewRequest(), - view=resources.View(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + await client.initialize_encryption_spec( + contact_center_insights.InitializeEncryptionSpecRequest(), + encryption_spec=resources.EncryptionSpec(name="name_value"), ) @pytest.mark.parametrize( "request_type", [ - contact_center_insights.DeleteViewRequest, + contact_center_insights.CreateViewRequest, dict, ], ) -def test_delete_view(request_type, transport: str = "grpc"): +def test_create_view(request_type, transport: str = "grpc"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -16659,22 +16557,29 @@ def test_delete_view(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + with mock.patch.object(type(client.transport.create_view), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_view(request) + call.return_value = resources.View( + name="name_value", + display_name="display_name_value", + value="value_value", + ) + response = client.create_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = contact_center_insights.DeleteViewRequest() + request = contact_center_insights.CreateViewRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, resources.View) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.value == "value_value" -def test_delete_view_empty_call(): +def test_create_view_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsClient( @@ -16683,17 +16588,17 @@ def test_delete_view_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + with mock.patch.object(type(client.transport.create_view), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_view() + client.create_view() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.DeleteViewRequest() + assert args[0] == contact_center_insights.CreateViewRequest() -def test_delete_view_non_empty_request_with_auto_populated_field(): +def test_create_view_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = ContactCenterInsightsClient( @@ -16704,24 +16609,24 @@ def test_delete_view_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = contact_center_insights.DeleteViewRequest( - name="name_value", + request = contact_center_insights.CreateViewRequest( + parent="parent_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + with mock.patch.object(type(client.transport.create_view), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_view(request=request) + client.create_view(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.DeleteViewRequest( - name="name_value", + assert args[0] == contact_center_insights.CreateViewRequest( + parent="parent_value", ) -def test_delete_view_use_cached_wrapped_rpc(): +def test_create_view_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -16735,21 +16640,21 @@ def test_delete_view_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_view in client._transport._wrapped_methods + assert client._transport.create_view in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.delete_view] = mock_rpc + client._transport._wrapped_methods[client._transport.create_view] = mock_rpc request = {} - client.delete_view(request) + client.create_view(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_view(request) + client.create_view(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -16757,7 +16662,7 @@ def test_delete_view_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_delete_view_empty_call_async(): +async def test_create_view_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsAsyncClient( @@ -16766,17 +16671,23 @@ async def test_delete_view_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + with mock.patch.object(type(client.transport.create_view), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_view() + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.View( + name="name_value", + display_name="display_name_value", + value="value_value", + ) + ) + response = await client.create_view() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.DeleteViewRequest() + assert args[0] == contact_center_insights.CreateViewRequest() @pytest.mark.asyncio -async def test_delete_view_async_use_cached_wrapped_rpc( +async def test_create_view_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -16793,7 +16704,7 @@ async def test_delete_view_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.delete_view + client._client._transport.create_view in client._client._transport._wrapped_methods ) @@ -16801,16 +16712,16 @@ async def test_delete_view_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.delete_view + client._client._transport.create_view ] = mock_rpc request = {} - await client.delete_view(request) + await client.create_view(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.delete_view(request) + await client.create_view(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -16818,9 +16729,9 @@ async def test_delete_view_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_delete_view_async( +async def test_create_view_async( transport: str = "grpc_asyncio", - request_type=contact_center_insights.DeleteViewRequest, + request_type=contact_center_insights.CreateViewRequest, ): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16832,41 +16743,50 @@ async def test_delete_view_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + with mock.patch.object(type(client.transport.create_view), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_view(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.View( + name="name_value", + display_name="display_name_value", + value="value_value", + ) + ) + response = await client.create_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = contact_center_insights.DeleteViewRequest() + request = contact_center_insights.CreateViewRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, resources.View) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.value == "value_value" @pytest.mark.asyncio -async def test_delete_view_async_from_dict(): - await test_delete_view_async(request_type=dict) +async def test_create_view_async_from_dict(): + await test_create_view_async(request_type=dict) -def test_delete_view_field_headers(): +def test_create_view_field_headers(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.DeleteViewRequest() + request = contact_center_insights.CreateViewRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_view), "__call__") as call: - call.return_value = None - client.delete_view(request) + with mock.patch.object(type(client.transport.create_view), "__call__") as call: + call.return_value = resources.View() + client.create_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -16877,26 +16797,26 @@ def test_delete_view_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_delete_view_field_headers_async(): +async def test_create_view_field_headers_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.DeleteViewRequest() + request = contact_center_insights.CreateViewRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_view), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_view(request) + with mock.patch.object(type(client.transport.create_view), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.View()) + await client.create_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -16907,35 +16827,39 @@ async def test_delete_view_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_delete_view_flattened(): +def test_create_view_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + with mock.patch.object(type(client.transport.create_view), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = None + call.return_value = resources.View() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_view( - name="name_value", + client.create_view( + parent="parent_value", + view=resources.View(name="name_value"), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].view + mock_val = resources.View(name="name_value") assert arg == mock_val -def test_delete_view_flattened_error(): +def test_create_view_flattened_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -16943,41 +16867,46 @@ def test_delete_view_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_view( - contact_center_insights.DeleteViewRequest(), - name="name_value", + client.create_view( + contact_center_insights.CreateViewRequest(), + parent="parent_value", + view=resources.View(name="name_value"), ) @pytest.mark.asyncio -async def test_delete_view_flattened_async(): +async def test_create_view_flattened_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + with mock.patch.object(type(client.transport.create_view), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = None + call.return_value = resources.View() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.View()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_view( - name="name_value", + response = await client.create_view( + parent="parent_value", + view=resources.View(name="name_value"), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].view + mock_val = resources.View(name="name_value") assert arg == mock_val @pytest.mark.asyncio -async def test_delete_view_flattened_error_async(): +async def test_create_view_flattened_error_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -16985,72 +16914,2599 @@ async def test_delete_view_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_view( - contact_center_insights.DeleteViewRequest(), - name="name_value", + await client.create_view( + contact_center_insights.CreateViewRequest(), + parent="parent_value", + view=resources.View(name="name_value"), ) @pytest.mark.parametrize( "request_type", [ - contact_center_insights.CreateConversationRequest, + contact_center_insights.GetViewRequest, dict, ], ) -def test_create_conversation_rest(request_type): +def test_get_view(request_type, transport: str = "grpc"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["conversation"] = { - "call_metadata": {"customer_channel": 1706, "agent_channel": 1351}, - "expire_time": {"seconds": 751, "nanos": 543}, - "ttl": {"seconds": 751, "nanos": 543}, - "name": "name_value", - "data_source": { - "gcs_source": { - "audio_uri": "audio_uri_value", - "transcript_uri": "transcript_uri_value", - }, - "dialogflow_source": { - "dialogflow_conversation": "dialogflow_conversation_value", - "audio_uri": "audio_uri_value", - }, - }, - "create_time": {}, - "update_time": {}, - "start_time": {}, - "language_code": "language_code_value", - "agent_id": "agent_id_value", - "labels": {}, - "quality_metadata": { - "customer_satisfaction_rating": 3005, - "wait_duration": {}, - "menu_path": "menu_path_value", - "agent_info": [ - { - "agent_id": "agent_id_value", - "display_name": "display_name_value", - "team": "team_value", - "disposition_code": "disposition_code_value", - } - ], - }, - "transcript": { - "transcript_segments": [ - { - "message_time": {}, - "text": "text_value", - "confidence": 0.1038, - "words": [ - { - "start_offset": {}, - "end_offset": {}, - "word": "word_value", + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.View( + name="name_value", + display_name="display_name_value", + value="value_value", + ) + response = client.get_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = contact_center_insights.GetViewRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.View) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.value == "value_value" + + +def test_get_view_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_view), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_view() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == contact_center_insights.GetViewRequest() + + +def test_get_view_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = contact_center_insights.GetViewRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_view), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_view(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == contact_center_insights.GetViewRequest( + name="name_value", + ) + + +def test_get_view_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_view in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_view] = mock_rpc + request = {} + client.get_view(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_view(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_view_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.View( + name="name_value", + display_name="display_name_value", + value="value_value", + ) + ) + response = await client.get_view() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == contact_center_insights.GetViewRequest() + + +@pytest.mark.asyncio +async def test_get_view_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_view + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_view + ] = mock_rpc + + request = {} + await client.get_view(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_view(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_view_async( + transport: str = "grpc_asyncio", request_type=contact_center_insights.GetViewRequest +): + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.View( + name="name_value", + display_name="display_name_value", + value="value_value", + ) + ) + response = await client.get_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = contact_center_insights.GetViewRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.View) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.value == "value_value" + + +@pytest.mark.asyncio +async def test_get_view_async_from_dict(): + await test_get_view_async(request_type=dict) + + +def test_get_view_field_headers(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = contact_center_insights.GetViewRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_view), "__call__") as call: + call.return_value = resources.View() + client.get_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_view_field_headers_async(): + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = contact_center_insights.GetViewRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_view), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.View()) + await client.get_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_view_flattened(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.View() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_view( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_view_flattened_error(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_view( + contact_center_insights.GetViewRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_view_flattened_async(): + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.View() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.View()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_view( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_view_flattened_error_async(): + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_view( + contact_center_insights.GetViewRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + contact_center_insights.ListViewsRequest, + dict, + ], +) +def test_list_views(request_type, transport: str = "grpc"): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_views), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = contact_center_insights.ListViewsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_views(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = contact_center_insights.ListViewsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListViewsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_views_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_views), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_views() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == contact_center_insights.ListViewsRequest() + + +def test_list_views_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = contact_center_insights.ListViewsRequest( + parent="parent_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_views), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_views(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == contact_center_insights.ListViewsRequest( + parent="parent_value", + page_token="page_token_value", + ) + + +def test_list_views_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_views in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_views] = mock_rpc + request = {} + client.list_views(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_views(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_views_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_views), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + contact_center_insights.ListViewsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_views() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == contact_center_insights.ListViewsRequest() + + +@pytest.mark.asyncio +async def test_list_views_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_views + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_views + ] = mock_rpc + + request = {} + await client.list_views(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_views(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_views_async( + transport: str = "grpc_asyncio", + request_type=contact_center_insights.ListViewsRequest, +): + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_views), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + contact_center_insights.ListViewsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_views(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = contact_center_insights.ListViewsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListViewsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_views_async_from_dict(): + await test_list_views_async(request_type=dict) + + +def test_list_views_field_headers(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = contact_center_insights.ListViewsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_views), "__call__") as call: + call.return_value = contact_center_insights.ListViewsResponse() + client.list_views(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_views_field_headers_async(): + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = contact_center_insights.ListViewsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_views), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + contact_center_insights.ListViewsResponse() + ) + await client.list_views(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_views_flattened(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_views), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = contact_center_insights.ListViewsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_views( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_views_flattened_error(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_views( + contact_center_insights.ListViewsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_views_flattened_async(): + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_views), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = contact_center_insights.ListViewsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + contact_center_insights.ListViewsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_views( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_views_flattened_error_async(): + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_views( + contact_center_insights.ListViewsRequest(), + parent="parent_value", + ) + + +def test_list_views_pager(transport_name: str = "grpc"): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_views), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + contact_center_insights.ListViewsResponse( + views=[ + resources.View(), + resources.View(), + resources.View(), + ], + next_page_token="abc", + ), + contact_center_insights.ListViewsResponse( + views=[], + next_page_token="def", + ), + contact_center_insights.ListViewsResponse( + views=[ + resources.View(), + ], + next_page_token="ghi", + ), + contact_center_insights.ListViewsResponse( + views=[ + resources.View(), + resources.View(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_views(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.View) for i in results) + + +def test_list_views_pages(transport_name: str = "grpc"): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_views), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + contact_center_insights.ListViewsResponse( + views=[ + resources.View(), + resources.View(), + resources.View(), + ], + next_page_token="abc", + ), + contact_center_insights.ListViewsResponse( + views=[], + next_page_token="def", + ), + contact_center_insights.ListViewsResponse( + views=[ + resources.View(), + ], + next_page_token="ghi", + ), + contact_center_insights.ListViewsResponse( + views=[ + resources.View(), + resources.View(), + ], + ), + RuntimeError, + ) + pages = list(client.list_views(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_views_async_pager(): + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_views), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + contact_center_insights.ListViewsResponse( + views=[ + resources.View(), + resources.View(), + resources.View(), + ], + next_page_token="abc", + ), + contact_center_insights.ListViewsResponse( + views=[], + next_page_token="def", + ), + contact_center_insights.ListViewsResponse( + views=[ + resources.View(), + ], + next_page_token="ghi", + ), + contact_center_insights.ListViewsResponse( + views=[ + resources.View(), + resources.View(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_views( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.View) for i in responses) + + +@pytest.mark.asyncio +async def test_list_views_async_pages(): + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_views), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + contact_center_insights.ListViewsResponse( + views=[ + resources.View(), + resources.View(), + resources.View(), + ], + next_page_token="abc", + ), + contact_center_insights.ListViewsResponse( + views=[], + next_page_token="def", + ), + contact_center_insights.ListViewsResponse( + views=[ + resources.View(), + ], + next_page_token="ghi", + ), + contact_center_insights.ListViewsResponse( + views=[ + resources.View(), + resources.View(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_views(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + contact_center_insights.UpdateViewRequest, + dict, + ], +) +def test_update_view(request_type, transport: str = "grpc"): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.View( + name="name_value", + display_name="display_name_value", + value="value_value", + ) + response = client.update_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = contact_center_insights.UpdateViewRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.View) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.value == "value_value" + + +def test_update_view_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_view), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_view() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == contact_center_insights.UpdateViewRequest() + + +def test_update_view_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = contact_center_insights.UpdateViewRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_view), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_view(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == contact_center_insights.UpdateViewRequest() + + +def test_update_view_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_view in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_view] = mock_rpc + request = {} + client.update_view(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_view(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_view_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.View( + name="name_value", + display_name="display_name_value", + value="value_value", + ) + ) + response = await client.update_view() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == contact_center_insights.UpdateViewRequest() + + +@pytest.mark.asyncio +async def test_update_view_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_view + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_view + ] = mock_rpc + + request = {} + await client.update_view(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.update_view(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_view_async( + transport: str = "grpc_asyncio", + request_type=contact_center_insights.UpdateViewRequest, +): + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.View( + name="name_value", + display_name="display_name_value", + value="value_value", + ) + ) + response = await client.update_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = contact_center_insights.UpdateViewRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.View) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.value == "value_value" + + +@pytest.mark.asyncio +async def test_update_view_async_from_dict(): + await test_update_view_async(request_type=dict) + + +def test_update_view_field_headers(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = contact_center_insights.UpdateViewRequest() + + request.view.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_view), "__call__") as call: + call.return_value = resources.View() + client.update_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "view.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_view_field_headers_async(): + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = contact_center_insights.UpdateViewRequest() + + request.view.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_view), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.View()) + await client.update_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "view.name=name_value", + ) in kw["metadata"] + + +def test_update_view_flattened(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.View() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_view( + view=resources.View(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].view + mock_val = resources.View(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_view_flattened_error(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_view( + contact_center_insights.UpdateViewRequest(), + view=resources.View(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_view_flattened_async(): + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.View() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.View()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_view( + view=resources.View(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].view + mock_val = resources.View(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_view_flattened_error_async(): + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_view( + contact_center_insights.UpdateViewRequest(), + view=resources.View(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + contact_center_insights.DeleteViewRequest, + dict, + ], +) +def test_delete_view(request_type, transport: str = "grpc"): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = contact_center_insights.DeleteViewRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_view_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_view() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == contact_center_insights.DeleteViewRequest() + + +def test_delete_view_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = contact_center_insights.DeleteViewRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_view(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == contact_center_insights.DeleteViewRequest( + name="name_value", + ) + + +def test_delete_view_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_view in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_view] = mock_rpc + request = {} + client.delete_view(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_view(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_view_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_view() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == contact_center_insights.DeleteViewRequest() + + +@pytest.mark.asyncio +async def test_delete_view_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_view + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_view + ] = mock_rpc + + request = {} + await client.delete_view(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_view(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_view_async( + transport: str = "grpc_asyncio", + request_type=contact_center_insights.DeleteViewRequest, +): + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = contact_center_insights.DeleteViewRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_view_async_from_dict(): + await test_delete_view_async(request_type=dict) + + +def test_delete_view_field_headers(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = contact_center_insights.DeleteViewRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + call.return_value = None + client.delete_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_view_field_headers_async(): + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = contact_center_insights.DeleteViewRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_view_flattened(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_view( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_view_flattened_error(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_view( + contact_center_insights.DeleteViewRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_view_flattened_async(): + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_view( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_view_flattened_error_async(): + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_view( + contact_center_insights.DeleteViewRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + contact_center_insights.CreateConversationRequest, + dict, + ], +) +def test_create_conversation_rest(request_type): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["conversation"] = { + "call_metadata": {"customer_channel": 1706, "agent_channel": 1351}, + "expire_time": {"seconds": 751, "nanos": 543}, + "ttl": {"seconds": 751, "nanos": 543}, + "name": "name_value", + "data_source": { + "gcs_source": { + "audio_uri": "audio_uri_value", + "transcript_uri": "transcript_uri_value", + }, + "dialogflow_source": { + "dialogflow_conversation": "dialogflow_conversation_value", + "audio_uri": "audio_uri_value", + }, + }, + "create_time": {}, + "update_time": {}, + "start_time": {}, + "language_code": "language_code_value", + "agent_id": "agent_id_value", + "labels": {}, + "quality_metadata": { + "customer_satisfaction_rating": 3005, + "wait_duration": {}, + "menu_path": "menu_path_value", + "agent_info": [ + { + "agent_id": "agent_id_value", + "display_name": "display_name_value", + "team": "team_value", + "disposition_code": "disposition_code_value", + } + ], + }, + "metadata_json": "metadata_json_value", + "transcript": { + "transcript_segments": [ + { + "message_time": {}, + "text": "text_value", + "confidence": 0.1038, + "words": [ + { + "start_offset": {}, + "end_offset": {}, + "word": "word_value", + "confidence": 0.1038, + } + ], + "language_code": "language_code_value", + "channel_tag": 1140, + "segment_participant": { + "dialogflow_participant_name": "dialogflow_participant_name_value", + "user_id": "user_id_value", + "dialogflow_participant": "dialogflow_participant_value", + "obfuscated_external_user_id": "obfuscated_external_user_id_value", + "role": 1, + }, + "dialogflow_segment_metadata": { + "smart_reply_allowlist_covered": True + }, + "sentiment": {"magnitude": 0.9580000000000001, "score": 0.54}, + } + ] + }, + "medium": 1, + "duration": {}, + "turn_count": 1105, + "latest_analysis": { + "name": "name_value", + "request_time": {}, + "create_time": {}, + "analysis_result": { + "call_analysis_metadata": { + "annotations": [ + { + "interruption_data": {}, + "sentiment_data": {}, + "silence_data": {}, + "hold_data": {}, + "entity_mention_data": { + "entity_unique_id": "entity_unique_id_value", + "type_": 1, + "sentiment": {}, + }, + "intent_match_data": { + "intent_unique_id": "intent_unique_id_value" + }, + "phrase_match_data": { + "phrase_matcher": "phrase_matcher_value", + "display_name": "display_name_value", + }, + "issue_match_data": { + "issue_assignment": { + "issue": "issue_value", + "score": 0.54, + "display_name": "display_name_value", + } + }, + "channel_tag": 1140, + "annotation_start_boundary": { + "word_index": 1075, + "transcript_index": 1729, + }, + "annotation_end_boundary": {}, + } + ], + "entities": {}, + "sentiments": [{"channel_tag": 1140, "sentiment_data": {}}], + "silence": {"silence_duration": {}, "silence_percentage": 0.1888}, + "intents": {}, + "phrase_matchers": {}, + "issue_model_result": { + "issue_model": "issue_model_value", + "issues": {}, + }, + }, + "end_time": {}, + }, + "annotator_selector": { + "run_interruption_annotator": True, + "run_silence_annotator": True, + "run_phrase_matcher_annotator": True, + "phrase_matchers": ["phrase_matchers_value1", "phrase_matchers_value2"], + "run_sentiment_annotator": True, + "run_entity_annotator": True, + "run_intent_annotator": True, + "run_issue_model_annotator": True, + "issue_models": ["issue_models_value1", "issue_models_value2"], + "run_summarization_annotator": True, + "summarization_config": { + "conversation_profile": "conversation_profile_value", + "summarization_model": 1, + }, + }, + }, + "latest_summary": { + "text": "text_value", + "text_sections": {}, + "confidence": 0.1038, + "metadata": {}, + "answer_record": "answer_record_value", + "conversation_model": "conversation_model_value", + }, + "runtime_annotations": [ + { + "article_suggestion": { + "title": "title_value", + "uri": "uri_value", + "confidence_score": 0.1673, + "metadata": {}, + "query_record": "query_record_value", + "source": "source_value", + }, + "faq_answer": { + "answer": "answer_value", + "confidence_score": 0.1673, + "question": "question_value", + "metadata": {}, + "query_record": "query_record_value", + "source": "source_value", + }, + "smart_reply": { + "reply": "reply_value", + "confidence_score": 0.1673, + "metadata": {}, + "query_record": "query_record_value", + }, + "smart_compose_suggestion": { + "suggestion": "suggestion_value", + "confidence_score": 0.1673, + "metadata": {}, + "query_record": "query_record_value", + }, + "dialogflow_interaction": { + "dialogflow_intent_id": "dialogflow_intent_id_value", + "confidence": 0.1038, + }, + "conversation_summarization_suggestion": {}, + "annotation_id": "annotation_id_value", + "create_time": {}, + "start_boundary": {}, + "end_boundary": {}, + "answer_feedback": { + "correctness_level": 1, + "clicked": True, + "displayed": True, + }, + "user_input": { + "query": "query_value", + "generator_name": "generator_name_value", + "query_source": 1, + }, + } + ], + "dialogflow_intents": {}, + "obfuscated_user_id": "obfuscated_user_id_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = contact_center_insights.CreateConversationRequest.meta.fields[ + "conversation" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["conversation"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["conversation"][field])): + del request_init["conversation"][field][i][subfield] + else: + del request_init["conversation"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Conversation( + name="name_value", + language_code="language_code_value", + agent_id="agent_id_value", + metadata_json="metadata_json_value", + medium=resources.Conversation.Medium.PHONE_CALL, + turn_count=1105, + obfuscated_user_id="obfuscated_user_id_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Conversation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_conversation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Conversation) + assert response.name == "name_value" + assert response.language_code == "language_code_value" + assert response.agent_id == "agent_id_value" + assert response.metadata_json == "metadata_json_value" + assert response.medium == resources.Conversation.Medium.PHONE_CALL + assert response.turn_count == 1105 + assert response.obfuscated_user_id == "obfuscated_user_id_value" + + +def test_create_conversation_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_conversation in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_conversation + ] = mock_rpc + + request = {} + client.create_conversation(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_conversation(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_conversation_rest_required_fields( + request_type=contact_center_insights.CreateConversationRequest, +): + transport_class = transports.ContactCenterInsightsRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_conversation._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_conversation._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("conversation_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resources.Conversation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.Conversation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_conversation(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_conversation_rest_unset_required_fields(): + transport = transports.ContactCenterInsightsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_conversation._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("conversationId",)) + & set( + ( + "parent", + "conversation", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_conversation_rest_interceptors(null_interceptor): + transport = transports.ContactCenterInsightsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ContactCenterInsightsRestInterceptor(), + ) + client = ContactCenterInsightsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, "post_create_conversation" + ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, "pre_create_conversation" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = contact_center_insights.CreateConversationRequest.pb( + contact_center_insights.CreateConversationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = resources.Conversation.to_json( + resources.Conversation() + ) + + request = contact_center_insights.CreateConversationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resources.Conversation() + + client.create_conversation( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_conversation_rest_bad_request( + transport: str = "rest", + request_type=contact_center_insights.CreateConversationRequest, +): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_conversation(request) + + +def test_create_conversation_rest_flattened(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Conversation() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + conversation=resources.Conversation( + call_metadata=resources.Conversation.CallMetadata(customer_channel=1706) + ), + conversation_id="conversation_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Conversation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_conversation(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/conversations" + % client.transport._host, + args[1], + ) + + +def test_create_conversation_rest_flattened_error(transport: str = "rest"): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_conversation( + contact_center_insights.CreateConversationRequest(), + parent="parent_value", + conversation=resources.Conversation( + call_metadata=resources.Conversation.CallMetadata(customer_channel=1706) + ), + conversation_id="conversation_id_value", + ) + + +def test_create_conversation_rest_error(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + contact_center_insights.UploadConversationRequest, + dict, + ], +) +def test_upload_conversation_rest(request_type): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.upload_conversation(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_upload_conversation_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.upload_conversation in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.upload_conversation + ] = mock_rpc + + request = {} + client.upload_conversation(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.upload_conversation(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_upload_conversation_rest_required_fields( + request_type=contact_center_insights.UploadConversationRequest, +): + transport_class = transports.ContactCenterInsightsRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).upload_conversation._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).upload_conversation._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.upload_conversation(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_upload_conversation_rest_unset_required_fields(): + transport = transports.ContactCenterInsightsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.upload_conversation._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "conversation", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_upload_conversation_rest_interceptors(null_interceptor): + transport = transports.ContactCenterInsightsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ContactCenterInsightsRestInterceptor(), + ) + client = ContactCenterInsightsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, "post_upload_conversation" + ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, "pre_upload_conversation" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = contact_center_insights.UploadConversationRequest.pb( + contact_center_insights.UploadConversationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = contact_center_insights.UploadConversationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.upload_conversation( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_upload_conversation_rest_bad_request( + transport: str = "rest", + request_type=contact_center_insights.UploadConversationRequest, +): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.upload_conversation(request) + + +def test_upload_conversation_rest_error(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + contact_center_insights.UpdateConversationRequest, + dict, + ], +) +def test_update_conversation_rest(request_type): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "conversation": { + "name": "projects/sample1/locations/sample2/conversations/sample3" + } + } + request_init["conversation"] = { + "call_metadata": {"customer_channel": 1706, "agent_channel": 1351}, + "expire_time": {"seconds": 751, "nanos": 543}, + "ttl": {"seconds": 751, "nanos": 543}, + "name": "projects/sample1/locations/sample2/conversations/sample3", + "data_source": { + "gcs_source": { + "audio_uri": "audio_uri_value", + "transcript_uri": "transcript_uri_value", + }, + "dialogflow_source": { + "dialogflow_conversation": "dialogflow_conversation_value", + "audio_uri": "audio_uri_value", + }, + }, + "create_time": {}, + "update_time": {}, + "start_time": {}, + "language_code": "language_code_value", + "agent_id": "agent_id_value", + "labels": {}, + "quality_metadata": { + "customer_satisfaction_rating": 3005, + "wait_duration": {}, + "menu_path": "menu_path_value", + "agent_info": [ + { + "agent_id": "agent_id_value", + "display_name": "display_name_value", + "team": "team_value", + "disposition_code": "disposition_code_value", + } + ], + }, + "metadata_json": "metadata_json_value", + "transcript": { + "transcript_segments": [ + { + "message_time": {}, + "text": "text_value", + "confidence": 0.1038, + "words": [ + { + "start_offset": {}, + "end_offset": {}, + "word": "word_value", "confidence": 0.1038, } ], @@ -17114,6 +19570,7 @@ def test_create_conversation_rest(request_type): ], "entities": {}, "sentiments": [{"channel_tag": 1140, "sentiment_data": {}}], + "silence": {"silence_duration": {}, "silence_percentage": 0.1888}, "intents": {}, "phrase_matchers": {}, "issue_model_result": { @@ -17192,116 +19649,766 @@ def test_create_conversation_rest(request_type): "clicked": True, "displayed": True, }, + "user_input": { + "query": "query_value", + "generator_name": "generator_name_value", + "query_source": 1, + }, + } + ], + "dialogflow_intents": {}, + "obfuscated_user_id": "obfuscated_user_id_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = contact_center_insights.UpdateConversationRequest.meta.fields[ + "conversation" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["conversation"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["conversation"][field])): + del request_init["conversation"][field][i][subfield] + else: + del request_init["conversation"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Conversation( + name="name_value", + language_code="language_code_value", + agent_id="agent_id_value", + metadata_json="metadata_json_value", + medium=resources.Conversation.Medium.PHONE_CALL, + turn_count=1105, + obfuscated_user_id="obfuscated_user_id_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Conversation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_conversation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Conversation) + assert response.name == "name_value" + assert response.language_code == "language_code_value" + assert response.agent_id == "agent_id_value" + assert response.metadata_json == "metadata_json_value" + assert response.medium == resources.Conversation.Medium.PHONE_CALL + assert response.turn_count == 1105 + assert response.obfuscated_user_id == "obfuscated_user_id_value" + + +def test_update_conversation_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_conversation in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_conversation + ] = mock_rpc + + request = {} + client.update_conversation(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_conversation(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_conversation_rest_required_fields( + request_type=contact_center_insights.UpdateConversationRequest, +): + transport_class = transports.ContactCenterInsightsRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_conversation._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_conversation._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resources.Conversation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.Conversation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_conversation(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_conversation_rest_unset_required_fields(): + transport = transports.ContactCenterInsightsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_conversation._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("conversation",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_conversation_rest_interceptors(null_interceptor): + transport = transports.ContactCenterInsightsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ContactCenterInsightsRestInterceptor(), + ) + client = ContactCenterInsightsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, "post_update_conversation" + ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, "pre_update_conversation" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = contact_center_insights.UpdateConversationRequest.pb( + contact_center_insights.UpdateConversationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = resources.Conversation.to_json( + resources.Conversation() + ) + + request = contact_center_insights.UpdateConversationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resources.Conversation() + + client.update_conversation( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_conversation_rest_bad_request( + transport: str = "rest", + request_type=contact_center_insights.UpdateConversationRequest, +): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "conversation": { + "name": "projects/sample1/locations/sample2/conversations/sample3" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_conversation(request) + + +def test_update_conversation_rest_flattened(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Conversation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "conversation": { + "name": "projects/sample1/locations/sample2/conversations/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + conversation=resources.Conversation( + call_metadata=resources.Conversation.CallMetadata(customer_channel=1706) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Conversation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_conversation(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{conversation.name=projects/*/locations/*/conversations/*}" + % client.transport._host, + args[1], + ) + + +def test_update_conversation_rest_flattened_error(transport: str = "rest"): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_conversation( + contact_center_insights.UpdateConversationRequest(), + conversation=resources.Conversation( + call_metadata=resources.Conversation.CallMetadata(customer_channel=1706) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_conversation_rest_error(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + contact_center_insights.GetConversationRequest, + dict, + ], +) +def test_get_conversation_rest(request_type): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/conversations/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Conversation( + name="name_value", + language_code="language_code_value", + agent_id="agent_id_value", + metadata_json="metadata_json_value", + medium=resources.Conversation.Medium.PHONE_CALL, + turn_count=1105, + obfuscated_user_id="obfuscated_user_id_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Conversation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_conversation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Conversation) + assert response.name == "name_value" + assert response.language_code == "language_code_value" + assert response.agent_id == "agent_id_value" + assert response.metadata_json == "metadata_json_value" + assert response.medium == resources.Conversation.Medium.PHONE_CALL + assert response.turn_count == 1105 + assert response.obfuscated_user_id == "obfuscated_user_id_value" + + +def test_get_conversation_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_conversation in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_conversation + ] = mock_rpc + + request = {} + client.get_conversation(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_conversation(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_conversation_rest_required_fields( + request_type=contact_center_insights.GetConversationRequest, +): + transport_class = transports.ContactCenterInsightsRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_conversation._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_conversation._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("view",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resources.Conversation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } - ], - "dialogflow_intents": {}, - "obfuscated_user_id": "obfuscated_user_id_value", - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 + transcode.return_value = transcode_result - # Determine if the message type is proto-plus or protobuf - test_field = contact_center_insights.CreateConversationRequest.meta.fields[ - "conversation" - ] + response_value = Response() + response_value.status_code = 200 - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] + # Convert return value to protobuf type + return_value = resources.Conversation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields + response = client.get_conversation(request) - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params - subfields_not_in_runtime = [] - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["conversation"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value +def test_get_conversation_rest_unset_required_fields(): + transport = transports.ContactCenterInsightsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) + unset_fields = transport.get_conversation._get_unset_required_fields({}) + assert set(unset_fields) == (set(("view",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_conversation_rest_interceptors(null_interceptor): + transport = transports.ContactCenterInsightsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ContactCenterInsightsRestInterceptor(), + ) + client = ContactCenterInsightsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, "post_get_conversation" + ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, "pre_get_conversation" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = contact_center_insights.GetConversationRequest.pb( + contact_center_insights.GetConversationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = resources.Conversation.to_json( + resources.Conversation() + ) + + request = contact_center_insights.GetConversationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resources.Conversation() + + client.get_conversation( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_conversation_rest_bad_request( + transport: str = "rest", request_type=contact_center_insights.GetConversationRequest +): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/conversations/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_conversation(request) + + +def test_get_conversation_rest_flattened(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Conversation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/conversations/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Conversation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_conversation(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/conversations/*}" + % client.transport._host, + args[1], + ) + + +def test_get_conversation_rest_flattened_error(transport: str = "rest"): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_conversation( + contact_center_insights.GetConversationRequest(), + name="name_value", + ) + + +def test_get_conversation_rest_error(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + contact_center_insights.ListConversationsRequest, + dict, + ], +) +def test_list_conversations_rest(request_type): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["conversation"][field])): - del request_init["conversation"][field][i][subfield] - else: - del request_init["conversation"][field][subfield] + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Conversation( - name="name_value", - language_code="language_code_value", - agent_id="agent_id_value", - medium=resources.Conversation.Medium.PHONE_CALL, - turn_count=1105, - obfuscated_user_id="obfuscated_user_id_value", + return_value = contact_center_insights.ListConversationsResponse( + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.Conversation.pb(return_value) + return_value = contact_center_insights.ListConversationsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_conversation(request) + response = client.list_conversations(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.Conversation) - assert response.name == "name_value" - assert response.language_code == "language_code_value" - assert response.agent_id == "agent_id_value" - assert response.medium == resources.Conversation.Medium.PHONE_CALL - assert response.turn_count == 1105 - assert response.obfuscated_user_id == "obfuscated_user_id_value" + assert isinstance(response, pagers.ListConversationsPager) + assert response.next_page_token == "next_page_token_value" -def test_create_conversation_rest_use_cached_wrapped_rpc(): +def test_list_conversations_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -17316,7 +20423,7 @@ def test_create_conversation_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_conversation in client._transport._wrapped_methods + client._transport.list_conversations in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -17325,24 +20432,24 @@ def test_create_conversation_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_conversation + client._transport.list_conversations ] = mock_rpc request = {} - client.create_conversation(request) + client.list_conversations(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_conversation(request) + client.list_conversations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_conversation_rest_required_fields( - request_type=contact_center_insights.CreateConversationRequest, +def test_list_conversations_rest_required_fields( + request_type=contact_center_insights.ListConversationsRequest, ): transport_class = transports.ContactCenterInsightsRestTransport @@ -17358,7 +20465,7 @@ def test_create_conversation_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_conversation._get_unset_required_fields(jsonified_request) + ).list_conversations._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -17367,9 +20474,17 @@ def test_create_conversation_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_conversation._get_unset_required_fields(jsonified_request) + ).list_conversations._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("conversation_id",)) + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + "view", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -17383,7 +20498,7 @@ def test_create_conversation_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.Conversation() + return_value = contact_center_insights.ListConversationsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -17395,48 +20510,52 @@ def test_create_conversation_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.Conversation.pb(return_value) + return_value = contact_center_insights.ListConversationsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_conversation(request) + response = client.list_conversations(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_conversation_rest_unset_required_fields(): +def test_list_conversations_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_conversation._get_unset_required_fields({}) + unset_fields = transport.list_conversations._get_unset_required_fields({}) assert set(unset_fields) == ( - set(("conversationId",)) - & set( + set( ( - "parent", - "conversation", + "filter", + "orderBy", + "pageSize", + "pageToken", + "view", ) ) + & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_conversation_rest_interceptors(null_interceptor): +def test_list_conversations_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -17449,14 +20568,14 @@ def test_create_conversation_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_create_conversation" + transports.ContactCenterInsightsRestInterceptor, "post_list_conversations" ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_create_conversation" + transports.ContactCenterInsightsRestInterceptor, "pre_list_conversations" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.CreateConversationRequest.pb( - contact_center_insights.CreateConversationRequest() + pb_message = contact_center_insights.ListConversationsRequest.pb( + contact_center_insights.ListConversationsRequest() ) transcode.return_value = { "method": "post", @@ -17468,19 +20587,21 @@ def test_create_conversation_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.Conversation.to_json( - resources.Conversation() + req.return_value._content = ( + contact_center_insights.ListConversationsResponse.to_json( + contact_center_insights.ListConversationsResponse() + ) ) - request = contact_center_insights.CreateConversationRequest() + request = contact_center_insights.ListConversationsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.Conversation() + post.return_value = contact_center_insights.ListConversationsResponse() - client.create_conversation( + client.list_conversations( request, metadata=[ ("key", "val"), @@ -17492,9 +20613,9 @@ def test_create_conversation_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_conversation_rest_bad_request( +def test_list_conversations_rest_bad_request( transport: str = "rest", - request_type=contact_center_insights.CreateConversationRequest, + request_type=contact_center_insights.ListConversationsRequest, ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -17514,10 +20635,10 @@ def test_create_conversation_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_conversation(request) + client.list_conversations(request) -def test_create_conversation_rest_flattened(): +def test_list_conversations_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -17526,7 +20647,7 @@ def test_create_conversation_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Conversation() + return_value = contact_center_insights.ListConversationsResponse() # get arguments that satisfy an http rule for this method sample_request = {"parent": "projects/sample1/locations/sample2"} @@ -17534,10 +20655,6 @@ def test_create_conversation_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", - conversation=resources.Conversation( - call_metadata=resources.Conversation.CallMetadata(customer_channel=1706) - ), - conversation_id="conversation_id_value", ) mock_args.update(sample_request) @@ -17545,12 +20662,14 @@ def test_create_conversation_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.Conversation.pb(return_value) + return_value = contact_center_insights.ListConversationsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_conversation(**mock_args) + client.list_conversations(**mock_args) # Establish that the underlying call was made with the expected # request object values. @@ -17563,7 +20682,7 @@ def test_create_conversation_rest_flattened(): ) -def test_create_conversation_rest_flattened_error(transport: str = "rest"): +def test_list_conversations_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -17572,58 +20691,112 @@ def test_create_conversation_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_conversation( - contact_center_insights.CreateConversationRequest(), + client.list_conversations( + contact_center_insights.ListConversationsRequest(), parent="parent_value", - conversation=resources.Conversation( - call_metadata=resources.Conversation.CallMetadata(customer_channel=1706) - ), - conversation_id="conversation_id_value", ) -def test_create_conversation_rest_error(): +def test_list_conversations_rest_pager(transport: str = "rest"): client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + contact_center_insights.ListConversationsResponse( + conversations=[ + resources.Conversation(), + resources.Conversation(), + resources.Conversation(), + ], + next_page_token="abc", + ), + contact_center_insights.ListConversationsResponse( + conversations=[], + next_page_token="def", + ), + contact_center_insights.ListConversationsResponse( + conversations=[ + resources.Conversation(), + ], + next_page_token="ghi", + ), + contact_center_insights.ListConversationsResponse( + conversations=[ + resources.Conversation(), + resources.Conversation(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + contact_center_insights.ListConversationsResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_conversations(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Conversation) for i in results) + + pages = list(client.list_conversations(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - contact_center_insights.UploadConversationRequest, + contact_center_insights.DeleteConversationRequest, dict, ], ) -def test_upload_conversation_rest(request_type): +def test_delete_conversation_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/conversations/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = None # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.upload_conversation(request) + response = client.delete_conversation(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert response is None -def test_upload_conversation_rest_use_cached_wrapped_rpc(): +def test_delete_conversation_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -17638,7 +20811,7 @@ def test_upload_conversation_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.upload_conversation in client._transport._wrapped_methods + client._transport.delete_conversation in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -17647,33 +20820,29 @@ def test_upload_conversation_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.upload_conversation + client._transport.delete_conversation ] = mock_rpc request = {} - client.upload_conversation(request) + client.delete_conversation(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.upload_conversation(request) + client.delete_conversation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_upload_conversation_rest_required_fields( - request_type=contact_center_insights.UploadConversationRequest, +def test_delete_conversation_rest_required_fields( + request_type=contact_center_insights.DeleteConversationRequest, ): transport_class = transports.ContactCenterInsightsRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -17684,21 +20853,23 @@ def test_upload_conversation_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).upload_conversation._get_unset_required_fields(jsonified_request) + ).delete_conversation._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).upload_conversation._get_unset_required_fields(jsonified_request) + ).delete_conversation._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("force",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -17707,7 +20878,7 @@ def test_upload_conversation_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = None # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -17719,45 +20890,36 @@ def test_upload_conversation_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.upload_conversation(request) + response = client.delete_conversation(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_upload_conversation_rest_unset_required_fields(): +def test_delete_conversation_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.upload_conversation._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(()) - & set( - ( - "parent", - "conversation", - ) - ) - ) + unset_fields = transport.delete_conversation._get_unset_required_fields({}) + assert set(unset_fields) == (set(("force",)) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_upload_conversation_rest_interceptors(null_interceptor): +def test_delete_conversation_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -17770,16 +20932,11 @@ def test_upload_conversation_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_upload_conversation" - ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_upload_conversation" + transports.ContactCenterInsightsRestInterceptor, "pre_delete_conversation" ) as pre: pre.assert_not_called() - post.assert_not_called() - pb_message = contact_center_insights.UploadConversationRequest.pb( - contact_center_insights.UploadConversationRequest() + pb_message = contact_center_insights.DeleteConversationRequest.pb( + contact_center_insights.DeleteConversationRequest() ) transcode.return_value = { "method": "post", @@ -17791,19 +20948,15 @@ def test_upload_conversation_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() - ) - request = contact_center_insights.UploadConversationRequest() + request = contact_center_insights.DeleteConversationRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - client.upload_conversation( + client.delete_conversation( request, metadata=[ ("key", "val"), @@ -17812,12 +20965,11 @@ def test_upload_conversation_rest_interceptors(null_interceptor): ) pre.assert_called_once() - post.assert_called_once() -def test_upload_conversation_rest_bad_request( +def test_delete_conversation_rest_bad_request( transport: str = "rest", - request_type=contact_center_insights.UploadConversationRequest, + request_type=contact_center_insights.DeleteConversationRequest, ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -17825,7 +20977,7 @@ def test_upload_conversation_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/conversations/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -17837,10 +20989,67 @@ def test_upload_conversation_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.upload_conversation(request) + client.delete_conversation(request) -def test_upload_conversation_rest_error(): +def test_delete_conversation_rest_flattened(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/conversations/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_conversation(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/conversations/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_conversation_rest_flattened_error(transport: str = "rest"): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_conversation( + contact_center_insights.DeleteConversationRequest(), + name="name_value", + ) + + +def test_delete_conversation_rest_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -17849,11 +21058,11 @@ def test_upload_conversation_rest_error(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.UpdateConversationRequest, + contact_center_insights.CreateAnalysisRequest, dict, ], ) -def test_update_conversation_rest(request_type): +def test_create_analysis_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -17861,209 +21070,88 @@ def test_update_conversation_rest(request_type): # send a request that will satisfy transcoding request_init = { - "conversation": { - "name": "projects/sample1/locations/sample2/conversations/sample3" - } + "parent": "projects/sample1/locations/sample2/conversations/sample3" } - request_init["conversation"] = { - "call_metadata": {"customer_channel": 1706, "agent_channel": 1351}, - "expire_time": {"seconds": 751, "nanos": 543}, - "ttl": {"seconds": 751, "nanos": 543}, - "name": "projects/sample1/locations/sample2/conversations/sample3", - "data_source": { - "gcs_source": { - "audio_uri": "audio_uri_value", - "transcript_uri": "transcript_uri_value", - }, - "dialogflow_source": { - "dialogflow_conversation": "dialogflow_conversation_value", - "audio_uri": "audio_uri_value", - }, - }, + request_init["analysis"] = { + "name": "name_value", + "request_time": {"seconds": 751, "nanos": 543}, "create_time": {}, - "update_time": {}, - "start_time": {}, - "language_code": "language_code_value", - "agent_id": "agent_id_value", - "labels": {}, - "quality_metadata": { - "customer_satisfaction_rating": 3005, - "wait_duration": {}, - "menu_path": "menu_path_value", - "agent_info": [ - { - "agent_id": "agent_id_value", - "display_name": "display_name_value", - "team": "team_value", - "disposition_code": "disposition_code_value", - } - ], - }, - "transcript": { - "transcript_segments": [ - { - "message_time": {}, - "text": "text_value", - "confidence": 0.1038, - "words": [ - { - "start_offset": {}, - "end_offset": {}, - "word": "word_value", - "confidence": 0.1038, - } - ], - "language_code": "language_code_value", - "channel_tag": 1140, - "segment_participant": { - "dialogflow_participant_name": "dialogflow_participant_name_value", - "user_id": "user_id_value", - "dialogflow_participant": "dialogflow_participant_value", - "obfuscated_external_user_id": "obfuscated_external_user_id_value", - "role": 1, - }, - "dialogflow_segment_metadata": { - "smart_reply_allowlist_covered": True - }, - "sentiment": {"magnitude": 0.9580000000000001, "score": 0.54}, - } - ] - }, - "medium": 1, - "duration": {}, - "turn_count": 1105, - "latest_analysis": { - "name": "name_value", - "request_time": {}, - "create_time": {}, - "analysis_result": { - "call_analysis_metadata": { - "annotations": [ - { - "interruption_data": {}, - "sentiment_data": {}, - "silence_data": {}, - "hold_data": {}, - "entity_mention_data": { - "entity_unique_id": "entity_unique_id_value", - "type_": 1, - "sentiment": {}, - }, - "intent_match_data": { - "intent_unique_id": "intent_unique_id_value" - }, - "phrase_match_data": { - "phrase_matcher": "phrase_matcher_value", - "display_name": "display_name_value", - }, - "issue_match_data": { - "issue_assignment": { - "issue": "issue_value", - "score": 0.54, - "display_name": "display_name_value", - } - }, - "channel_tag": 1140, - "annotation_start_boundary": { - "word_index": 1075, - "transcript_index": 1729, - }, - "annotation_end_boundary": {}, - } - ], - "entities": {}, - "sentiments": [{"channel_tag": 1140, "sentiment_data": {}}], - "intents": {}, - "phrase_matchers": {}, - "issue_model_result": { - "issue_model": "issue_model_value", - "issues": {}, - }, + "analysis_result": { + "call_analysis_metadata": { + "annotations": [ + { + "interruption_data": {}, + "sentiment_data": { + "magnitude": 0.9580000000000001, + "score": 0.54, + }, + "silence_data": {}, + "hold_data": {}, + "entity_mention_data": { + "entity_unique_id": "entity_unique_id_value", + "type_": 1, + "sentiment": {}, + }, + "intent_match_data": { + "intent_unique_id": "intent_unique_id_value" + }, + "phrase_match_data": { + "phrase_matcher": "phrase_matcher_value", + "display_name": "display_name_value", + }, + "issue_match_data": { + "issue_assignment": { + "issue": "issue_value", + "score": 0.54, + "display_name": "display_name_value", + } + }, + "channel_tag": 1140, + "annotation_start_boundary": { + "word_index": 1075, + "transcript_index": 1729, + }, + "annotation_end_boundary": {}, + } + ], + "entities": {}, + "sentiments": [{"channel_tag": 1140, "sentiment_data": {}}], + "silence": { + "silence_duration": {"seconds": 751, "nanos": 543}, + "silence_percentage": 0.1888, }, - "end_time": {}, - }, - "annotator_selector": { - "run_interruption_annotator": True, - "run_silence_annotator": True, - "run_phrase_matcher_annotator": True, - "phrase_matchers": ["phrase_matchers_value1", "phrase_matchers_value2"], - "run_sentiment_annotator": True, - "run_entity_annotator": True, - "run_intent_annotator": True, - "run_issue_model_annotator": True, - "issue_models": ["issue_models_value1", "issue_models_value2"], - "run_summarization_annotator": True, - "summarization_config": { - "conversation_profile": "conversation_profile_value", - "summarization_model": 1, + "intents": {}, + "phrase_matchers": {}, + "issue_model_result": { + "issue_model": "issue_model_value", + "issues": {}, }, }, + "end_time": {}, }, - "latest_summary": { - "text": "text_value", - "text_sections": {}, - "confidence": 0.1038, - "metadata": {}, - "answer_record": "answer_record_value", - "conversation_model": "conversation_model_value", + "annotator_selector": { + "run_interruption_annotator": True, + "run_silence_annotator": True, + "run_phrase_matcher_annotator": True, + "phrase_matchers": ["phrase_matchers_value1", "phrase_matchers_value2"], + "run_sentiment_annotator": True, + "run_entity_annotator": True, + "run_intent_annotator": True, + "run_issue_model_annotator": True, + "issue_models": ["issue_models_value1", "issue_models_value2"], + "run_summarization_annotator": True, + "summarization_config": { + "conversation_profile": "conversation_profile_value", + "summarization_model": 1, + }, }, - "runtime_annotations": [ - { - "article_suggestion": { - "title": "title_value", - "uri": "uri_value", - "confidence_score": 0.1673, - "metadata": {}, - "query_record": "query_record_value", - "source": "source_value", - }, - "faq_answer": { - "answer": "answer_value", - "confidence_score": 0.1673, - "question": "question_value", - "metadata": {}, - "query_record": "query_record_value", - "source": "source_value", - }, - "smart_reply": { - "reply": "reply_value", - "confidence_score": 0.1673, - "metadata": {}, - "query_record": "query_record_value", - }, - "smart_compose_suggestion": { - "suggestion": "suggestion_value", - "confidence_score": 0.1673, - "metadata": {}, - "query_record": "query_record_value", - }, - "dialogflow_interaction": { - "dialogflow_intent_id": "dialogflow_intent_id_value", - "confidence": 0.1038, - }, - "conversation_summarization_suggestion": {}, - "annotation_id": "annotation_id_value", - "create_time": {}, - "start_boundary": {}, - "end_boundary": {}, - "answer_feedback": { - "correctness_level": 1, - "clicked": True, - "displayed": True, - }, - } - ], - "dialogflow_intents": {}, - "obfuscated_user_id": "obfuscated_user_id_value", } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = contact_center_insights.UpdateConversationRequest.meta.fields[ - "conversation" - ] + test_field = contact_center_insights.CreateAnalysisRequest.meta.fields["analysis"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -18091,7 +21179,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["conversation"].items(): # pragma: NO COVER + for field, value in request_init["analysis"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -18121,46 +21209,31 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["conversation"][field])): - del request_init["conversation"][field][i][subfield] + for i in range(0, len(request_init["analysis"][field])): + del request_init["analysis"][field][i][subfield] else: - del request_init["conversation"][field][subfield] + del request_init["analysis"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Conversation( - name="name_value", - language_code="language_code_value", - agent_id="agent_id_value", - medium=resources.Conversation.Medium.PHONE_CALL, - turn_count=1105, - obfuscated_user_id="obfuscated_user_id_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.Conversation.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_conversation(request) + response = client.create_analysis(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.Conversation) - assert response.name == "name_value" - assert response.language_code == "language_code_value" - assert response.agent_id == "agent_id_value" - assert response.medium == resources.Conversation.Medium.PHONE_CALL - assert response.turn_count == 1105 - assert response.obfuscated_user_id == "obfuscated_user_id_value" + assert response.operation.name == "operations/spam" -def test_update_conversation_rest_use_cached_wrapped_rpc(): +def test_create_analysis_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -18174,38 +21247,39 @@ def test_update_conversation_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.update_conversation in client._transport._wrapped_methods - ) + assert client._transport.create_analysis in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_conversation - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_analysis] = mock_rpc request = {} - client.update_conversation(request) + client.create_analysis(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_conversation(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_analysis(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_conversation_rest_required_fields( - request_type=contact_center_insights.UpdateConversationRequest, +def test_create_analysis_rest_required_fields( + request_type=contact_center_insights.CreateAnalysisRequest, ): transport_class = transports.ContactCenterInsightsRestTransport request_init = {} + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -18216,19 +21290,21 @@ def test_update_conversation_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_conversation._get_unset_required_fields(jsonified_request) + ).create_analysis._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["parent"] = "parent_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_conversation._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) + ).create_analysis._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -18237,7 +21313,7 @@ def test_update_conversation_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.Conversation() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -18249,7 +21325,7 @@ def test_update_conversation_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "post", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -18257,32 +21333,37 @@ def test_update_conversation_rest_required_fields( response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = resources.Conversation.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_conversation(request) + response = client.create_analysis(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_conversation_rest_unset_required_fields(): +def test_create_analysis_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_conversation._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask",)) & set(("conversation",))) + unset_fields = transport.create_analysis._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "analysis", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_conversation_rest_interceptors(null_interceptor): +def test_create_analysis_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -18295,14 +21376,16 @@ def test_update_conversation_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_update_conversation" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, "post_create_analysis" ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_update_conversation" + transports.ContactCenterInsightsRestInterceptor, "pre_create_analysis" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.UpdateConversationRequest.pb( - contact_center_insights.UpdateConversationRequest() + pb_message = contact_center_insights.CreateAnalysisRequest.pb( + contact_center_insights.CreateAnalysisRequest() ) transcode.return_value = { "method": "post", @@ -18314,19 +21397,19 @@ def test_update_conversation_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.Conversation.to_json( - resources.Conversation() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = contact_center_insights.UpdateConversationRequest() + request = contact_center_insights.CreateAnalysisRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.Conversation() + post.return_value = operations_pb2.Operation() - client.update_conversation( + client.create_analysis( request, metadata=[ ("key", "val"), @@ -18338,9 +21421,8 @@ def test_update_conversation_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_conversation_rest_bad_request( - transport: str = "rest", - request_type=contact_center_insights.UpdateConversationRequest, +def test_create_analysis_rest_bad_request( + transport: str = "rest", request_type=contact_center_insights.CreateAnalysisRequest ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -18349,9 +21431,7 @@ def test_update_conversation_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "conversation": { - "name": "projects/sample1/locations/sample2/conversations/sample3" - } + "parent": "projects/sample1/locations/sample2/conversations/sample3" } request = request_type(**request_init) @@ -18364,10 +21444,10 @@ def test_update_conversation_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_conversation(request) + client.create_analysis(request) -def test_update_conversation_rest_flattened(): +def test_create_analysis_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -18376,47 +21456,41 @@ def test_update_conversation_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Conversation() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method sample_request = { - "conversation": { - "name": "projects/sample1/locations/sample2/conversations/sample3" - } + "parent": "projects/sample1/locations/sample2/conversations/sample3" } # get truthy value for each flattened field mock_args = dict( - conversation=resources.Conversation( - call_metadata=resources.Conversation.CallMetadata(customer_channel=1706) - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + parent="parent_value", + analysis=resources.Analysis(name="name_value"), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.Conversation.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_conversation(**mock_args) + client.create_analysis(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{conversation.name=projects/*/locations/*/conversations/*}" + "%s/v1/{parent=projects/*/locations/*/conversations/*}/analyses" % client.transport._host, args[1], ) -def test_update_conversation_rest_flattened_error(transport: str = "rest"): +def test_create_analysis_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -18425,16 +21499,14 @@ def test_update_conversation_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_conversation( - contact_center_insights.UpdateConversationRequest(), - conversation=resources.Conversation( - call_metadata=resources.Conversation.CallMetadata(customer_channel=1706) - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.create_analysis( + contact_center_insights.CreateAnalysisRequest(), + parent="parent_value", + analysis=resources.Analysis(name="name_value"), ) -def test_update_conversation_rest_error(): +def test_create_analysis_rest_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -18443,54 +21515,46 @@ def test_update_conversation_rest_error(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.GetConversationRequest, + contact_center_insights.GetAnalysisRequest, dict, ], ) -def test_get_conversation_rest(request_type): +def test_get_analysis_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/conversations/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/conversations/sample3/analyses/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Conversation( + return_value = resources.Analysis( name="name_value", - language_code="language_code_value", - agent_id="agent_id_value", - medium=resources.Conversation.Medium.PHONE_CALL, - turn_count=1105, - obfuscated_user_id="obfuscated_user_id_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.Conversation.pb(return_value) + return_value = resources.Analysis.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_conversation(request) + response = client.get_analysis(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.Conversation) + assert isinstance(response, resources.Analysis) assert response.name == "name_value" - assert response.language_code == "language_code_value" - assert response.agent_id == "agent_id_value" - assert response.medium == resources.Conversation.Medium.PHONE_CALL - assert response.turn_count == 1105 - assert response.obfuscated_user_id == "obfuscated_user_id_value" -def test_get_conversation_rest_use_cached_wrapped_rpc(): +def test_get_analysis_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -18504,32 +21568,30 @@ def test_get_conversation_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_conversation in client._transport._wrapped_methods + assert client._transport.get_analysis in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_conversation - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_analysis] = mock_rpc request = {} - client.get_conversation(request) + client.get_analysis(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_conversation(request) + client.get_analysis(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_conversation_rest_required_fields( - request_type=contact_center_insights.GetConversationRequest, +def test_get_analysis_rest_required_fields( + request_type=contact_center_insights.GetAnalysisRequest, ): transport_class = transports.ContactCenterInsightsRestTransport @@ -18545,7 +21607,7 @@ def test_get_conversation_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_conversation._get_unset_required_fields(jsonified_request) + ).get_analysis._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -18554,9 +21616,7 @@ def test_get_conversation_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_conversation._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("view",)) + ).get_analysis._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -18570,7 +21630,7 @@ def test_get_conversation_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.Conversation() + return_value = resources.Analysis() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -18591,30 +21651,30 @@ def test_get_conversation_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.Conversation.pb(return_value) + return_value = resources.Analysis.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_conversation(request) + response = client.get_analysis(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_conversation_rest_unset_required_fields(): +def test_get_analysis_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_conversation._get_unset_required_fields({}) - assert set(unset_fields) == (set(("view",)) & set(("name",))) + unset_fields = transport.get_analysis._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_conversation_rest_interceptors(null_interceptor): +def test_get_analysis_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -18627,14 +21687,14 @@ def test_get_conversation_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_get_conversation" + transports.ContactCenterInsightsRestInterceptor, "post_get_analysis" ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_get_conversation" + transports.ContactCenterInsightsRestInterceptor, "pre_get_analysis" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.GetConversationRequest.pb( - contact_center_insights.GetConversationRequest() + pb_message = contact_center_insights.GetAnalysisRequest.pb( + contact_center_insights.GetAnalysisRequest() ) transcode.return_value = { "method": "post", @@ -18646,19 +21706,17 @@ def test_get_conversation_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.Conversation.to_json( - resources.Conversation() - ) + req.return_value._content = resources.Analysis.to_json(resources.Analysis()) - request = contact_center_insights.GetConversationRequest() + request = contact_center_insights.GetAnalysisRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.Conversation() + post.return_value = resources.Analysis() - client.get_conversation( + client.get_analysis( request, metadata=[ ("key", "val"), @@ -18670,8 +21728,8 @@ def test_get_conversation_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_conversation_rest_bad_request( - transport: str = "rest", request_type=contact_center_insights.GetConversationRequest +def test_get_analysis_rest_bad_request( + transport: str = "rest", request_type=contact_center_insights.GetAnalysisRequest ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -18679,7 +21737,9 @@ def test_get_conversation_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/conversations/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/conversations/sample3/analyses/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -18691,10 +21751,10 @@ def test_get_conversation_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_conversation(request) + client.get_analysis(request) -def test_get_conversation_rest_flattened(): +def test_get_analysis_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -18703,11 +21763,11 @@ def test_get_conversation_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Conversation() + return_value = resources.Analysis() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/conversations/sample3" + "name": "projects/sample1/locations/sample2/conversations/sample3/analyses/sample4" } # get truthy value for each flattened field @@ -18720,25 +21780,25 @@ def test_get_conversation_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.Conversation.pb(return_value) + return_value = resources.Analysis.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_conversation(**mock_args) + client.get_analysis(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/conversations/*}" + "%s/v1/{name=projects/*/locations/*/conversations/*/analyses/*}" % client.transport._host, args[1], ) -def test_get_conversation_rest_flattened_error(transport: str = "rest"): +def test_get_analysis_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -18747,13 +21807,13 @@ def test_get_conversation_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_conversation( - contact_center_insights.GetConversationRequest(), + client.get_analysis( + contact_center_insights.GetAnalysisRequest(), name="name_value", ) -def test_get_conversation_rest_error(): +def test_get_analysis_rest_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -18762,24 +21822,26 @@ def test_get_conversation_rest_error(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.ListConversationsRequest, + contact_center_insights.ListAnalysesRequest, dict, ], ) -def test_list_conversations_rest(request_type): +def test_list_analyses_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "parent": "projects/sample1/locations/sample2/conversations/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = contact_center_insights.ListConversationsResponse( + return_value = contact_center_insights.ListAnalysesResponse( next_page_token="next_page_token_value", ) @@ -18787,21 +21849,19 @@ def test_list_conversations_rest(request_type): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = contact_center_insights.ListConversationsResponse.pb( - return_value - ) + return_value = contact_center_insights.ListAnalysesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_conversations(request) + response = client.list_analyses(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListConversationsPager) + assert isinstance(response, pagers.ListAnalysesPager) assert response.next_page_token == "next_page_token_value" -def test_list_conversations_rest_use_cached_wrapped_rpc(): +def test_list_analyses_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -18815,34 +21875,30 @@ def test_list_conversations_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.list_conversations in client._transport._wrapped_methods - ) + assert client._transport.list_analyses in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_conversations - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_analyses] = mock_rpc request = {} - client.list_conversations(request) + client.list_analyses(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_conversations(request) + client.list_analyses(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_conversations_rest_required_fields( - request_type=contact_center_insights.ListConversationsRequest, +def test_list_analyses_rest_required_fields( + request_type=contact_center_insights.ListAnalysesRequest, ): transport_class = transports.ContactCenterInsightsRestTransport @@ -18858,7 +21914,7 @@ def test_list_conversations_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_conversations._get_unset_required_fields(jsonified_request) + ).list_analyses._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -18867,14 +21923,13 @@ def test_list_conversations_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_conversations._get_unset_required_fields(jsonified_request) + ).list_analyses._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( "filter", "page_size", "page_token", - "view", ) ) jsonified_request.update(unset_fields) @@ -18890,7 +21945,7 @@ def test_list_conversations_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = contact_center_insights.ListConversationsResponse() + return_value = contact_center_insights.ListAnalysesResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -18911,34 +21966,31 @@ def test_list_conversations_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = contact_center_insights.ListConversationsResponse.pb( - return_value - ) + return_value = contact_center_insights.ListAnalysesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_conversations(request) + response = client.list_analyses(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_conversations_rest_unset_required_fields(): +def test_list_analyses_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_conversations._get_unset_required_fields({}) + unset_fields = transport.list_analyses._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( "filter", "pageSize", "pageToken", - "view", ) ) & set(("parent",)) @@ -18946,7 +21998,7 @@ def test_list_conversations_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_conversations_rest_interceptors(null_interceptor): +def test_list_analyses_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -18959,14 +22011,14 @@ def test_list_conversations_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_list_conversations" + transports.ContactCenterInsightsRestInterceptor, "post_list_analyses" ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_list_conversations" + transports.ContactCenterInsightsRestInterceptor, "pre_list_analyses" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.ListConversationsRequest.pb( - contact_center_insights.ListConversationsRequest() + pb_message = contact_center_insights.ListAnalysesRequest.pb( + contact_center_insights.ListAnalysesRequest() ) transcode.return_value = { "method": "post", @@ -18979,20 +22031,20 @@ def test_list_conversations_rest_interceptors(null_interceptor): req.return_value.status_code = 200 req.return_value.request = PreparedRequest() req.return_value._content = ( - contact_center_insights.ListConversationsResponse.to_json( - contact_center_insights.ListConversationsResponse() + contact_center_insights.ListAnalysesResponse.to_json( + contact_center_insights.ListAnalysesResponse() ) ) - request = contact_center_insights.ListConversationsRequest() + request = contact_center_insights.ListAnalysesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = contact_center_insights.ListConversationsResponse() + post.return_value = contact_center_insights.ListAnalysesResponse() - client.list_conversations( + client.list_analyses( request, metadata=[ ("key", "val"), @@ -19004,9 +22056,8 @@ def test_list_conversations_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_conversations_rest_bad_request( - transport: str = "rest", - request_type=contact_center_insights.ListConversationsRequest, +def test_list_analyses_rest_bad_request( + transport: str = "rest", request_type=contact_center_insights.ListAnalysesRequest ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -19014,7 +22065,9 @@ def test_list_conversations_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "parent": "projects/sample1/locations/sample2/conversations/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -19026,10 +22079,10 @@ def test_list_conversations_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_conversations(request) + client.list_analyses(request) -def test_list_conversations_rest_flattened(): +def test_list_analyses_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -19038,10 +22091,12 @@ def test_list_conversations_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = contact_center_insights.ListConversationsResponse() + return_value = contact_center_insights.ListAnalysesResponse() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "parent": "projects/sample1/locations/sample2/conversations/sample3" + } # get truthy value for each flattened field mock_args = dict( @@ -19053,27 +22108,25 @@ def test_list_conversations_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = contact_center_insights.ListConversationsResponse.pb( - return_value - ) + return_value = contact_center_insights.ListAnalysesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_conversations(**mock_args) + client.list_analyses(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/conversations" + "%s/v1/{parent=projects/*/locations/*/conversations/*}/analyses" % client.transport._host, args[1], ) -def test_list_conversations_rest_flattened_error(transport: str = "rest"): +def test_list_analyses_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -19082,13 +22135,13 @@ def test_list_conversations_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_conversations( - contact_center_insights.ListConversationsRequest(), + client.list_analyses( + contact_center_insights.ListAnalysesRequest(), parent="parent_value", ) -def test_list_conversations_rest_pager(transport: str = "rest"): +def test_list_analyses_rest_pager(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -19100,28 +22153,28 @@ def test_list_conversations_rest_pager(transport: str = "rest"): # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( - contact_center_insights.ListConversationsResponse( - conversations=[ - resources.Conversation(), - resources.Conversation(), - resources.Conversation(), + contact_center_insights.ListAnalysesResponse( + analyses=[ + resources.Analysis(), + resources.Analysis(), + resources.Analysis(), ], next_page_token="abc", ), - contact_center_insights.ListConversationsResponse( - conversations=[], + contact_center_insights.ListAnalysesResponse( + analyses=[], next_page_token="def", ), - contact_center_insights.ListConversationsResponse( - conversations=[ - resources.Conversation(), + contact_center_insights.ListAnalysesResponse( + analyses=[ + resources.Analysis(), ], next_page_token="ghi", ), - contact_center_insights.ListConversationsResponse( - conversations=[ - resources.Conversation(), - resources.Conversation(), + contact_center_insights.ListAnalysesResponse( + analyses=[ + resources.Analysis(), + resources.Analysis(), ], ), ) @@ -19130,8 +22183,7 @@ def test_list_conversations_rest_pager(transport: str = "rest"): # Wrap the values into proper Response objs response = tuple( - contact_center_insights.ListConversationsResponse.to_json(x) - for x in response + contact_center_insights.ListAnalysesResponse.to_json(x) for x in response ) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): @@ -19139,15 +22191,17 @@ def test_list_conversations_rest_pager(transport: str = "rest"): return_val.status_code = 200 req.side_effect = return_values - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "parent": "projects/sample1/locations/sample2/conversations/sample3" + } - pager = client.list_conversations(request=sample_request) + pager = client.list_analyses(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, resources.Conversation) for i in results) + assert all(isinstance(i, resources.Analysis) for i in results) - pages = list(client.list_conversations(request=sample_request).pages) + pages = list(client.list_analyses(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -19155,18 +22209,20 @@ def test_list_conversations_rest_pager(transport: str = "rest"): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.DeleteConversationRequest, + contact_center_insights.DeleteAnalysisRequest, dict, ], ) -def test_delete_conversation_rest(request_type): +def test_delete_analysis_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/conversations/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/conversations/sample3/analyses/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -19181,13 +22237,13 @@ def test_delete_conversation_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_conversation(request) + response = client.delete_analysis(request) # Establish that the response is the type that we expect. assert response is None -def test_delete_conversation_rest_use_cached_wrapped_rpc(): +def test_delete_analysis_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -19201,34 +22257,30 @@ def test_delete_conversation_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.delete_conversation in client._transport._wrapped_methods - ) + assert client._transport.delete_analysis in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_conversation - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_analysis] = mock_rpc request = {} - client.delete_conversation(request) + client.delete_analysis(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_conversation(request) + client.delete_analysis(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_conversation_rest_required_fields( - request_type=contact_center_insights.DeleteConversationRequest, +def test_delete_analysis_rest_required_fields( + request_type=contact_center_insights.DeleteAnalysisRequest, ): transport_class = transports.ContactCenterInsightsRestTransport @@ -19244,7 +22296,7 @@ def test_delete_conversation_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_conversation._get_unset_required_fields(jsonified_request) + ).delete_analysis._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -19253,9 +22305,7 @@ def test_delete_conversation_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_conversation._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("force",)) + ).delete_analysis._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -19293,24 +22343,24 @@ def test_delete_conversation_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_conversation(request) + response = client.delete_analysis(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_conversation_rest_unset_required_fields(): +def test_delete_analysis_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_conversation._get_unset_required_fields({}) - assert set(unset_fields) == (set(("force",)) & set(("name",))) + unset_fields = transport.delete_analysis._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_conversation_rest_interceptors(null_interceptor): +def test_delete_analysis_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -19323,11 +22373,11 @@ def test_delete_conversation_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_delete_conversation" + transports.ContactCenterInsightsRestInterceptor, "pre_delete_analysis" ) as pre: pre.assert_not_called() - pb_message = contact_center_insights.DeleteConversationRequest.pb( - contact_center_insights.DeleteConversationRequest() + pb_message = contact_center_insights.DeleteAnalysisRequest.pb( + contact_center_insights.DeleteAnalysisRequest() ) transcode.return_value = { "method": "post", @@ -19340,14 +22390,14 @@ def test_delete_conversation_rest_interceptors(null_interceptor): req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - request = contact_center_insights.DeleteConversationRequest() + request = contact_center_insights.DeleteAnalysisRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - client.delete_conversation( + client.delete_analysis( request, metadata=[ ("key", "val"), @@ -19358,9 +22408,8 @@ def test_delete_conversation_rest_interceptors(null_interceptor): pre.assert_called_once() -def test_delete_conversation_rest_bad_request( - transport: str = "rest", - request_type=contact_center_insights.DeleteConversationRequest, +def test_delete_analysis_rest_bad_request( + transport: str = "rest", request_type=contact_center_insights.DeleteAnalysisRequest ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -19368,7 +22417,9 @@ def test_delete_conversation_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/conversations/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/conversations/sample3/analyses/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -19380,10 +22431,10 @@ def test_delete_conversation_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_conversation(request) + client.delete_analysis(request) -def test_delete_conversation_rest_flattened(): +def test_delete_analysis_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -19396,7 +22447,7 @@ def test_delete_conversation_rest_flattened(): # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/conversations/sample3" + "name": "projects/sample1/locations/sample2/conversations/sample3/analyses/sample4" } # get truthy value for each flattened field @@ -19412,20 +22463,20 @@ def test_delete_conversation_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_conversation(**mock_args) + client.delete_analysis(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/conversations/*}" + "%s/v1/{name=projects/*/locations/*/conversations/*/analyses/*}" % client.transport._host, args[1], ) -def test_delete_conversation_rest_flattened_error(transport: str = "rest"): +def test_delete_analysis_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -19434,13 +22485,13 @@ def test_delete_conversation_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_conversation( - contact_center_insights.DeleteConversationRequest(), + client.delete_analysis( + contact_center_insights.DeleteAnalysisRequest(), name="name_value", ) -def test_delete_conversation_rest_error(): +def test_delete_analysis_rest_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -19449,157 +22500,18 @@ def test_delete_conversation_rest_error(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.CreateAnalysisRequest, + contact_center_insights.BulkAnalyzeConversationsRequest, dict, ], ) -def test_create_analysis_rest(request_type): +def test_bulk_analyze_conversations_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/conversations/sample3" - } - request_init["analysis"] = { - "name": "name_value", - "request_time": {"seconds": 751, "nanos": 543}, - "create_time": {}, - "analysis_result": { - "call_analysis_metadata": { - "annotations": [ - { - "interruption_data": {}, - "sentiment_data": { - "magnitude": 0.9580000000000001, - "score": 0.54, - }, - "silence_data": {}, - "hold_data": {}, - "entity_mention_data": { - "entity_unique_id": "entity_unique_id_value", - "type_": 1, - "sentiment": {}, - }, - "intent_match_data": { - "intent_unique_id": "intent_unique_id_value" - }, - "phrase_match_data": { - "phrase_matcher": "phrase_matcher_value", - "display_name": "display_name_value", - }, - "issue_match_data": { - "issue_assignment": { - "issue": "issue_value", - "score": 0.54, - "display_name": "display_name_value", - } - }, - "channel_tag": 1140, - "annotation_start_boundary": { - "word_index": 1075, - "transcript_index": 1729, - }, - "annotation_end_boundary": {}, - } - ], - "entities": {}, - "sentiments": [{"channel_tag": 1140, "sentiment_data": {}}], - "intents": {}, - "phrase_matchers": {}, - "issue_model_result": { - "issue_model": "issue_model_value", - "issues": {}, - }, - }, - "end_time": {}, - }, - "annotator_selector": { - "run_interruption_annotator": True, - "run_silence_annotator": True, - "run_phrase_matcher_annotator": True, - "phrase_matchers": ["phrase_matchers_value1", "phrase_matchers_value2"], - "run_sentiment_annotator": True, - "run_entity_annotator": True, - "run_intent_annotator": True, - "run_issue_model_annotator": True, - "issue_models": ["issue_models_value1", "issue_models_value2"], - "run_summarization_annotator": True, - "summarization_config": { - "conversation_profile": "conversation_profile_value", - "summarization_model": 1, - }, - }, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = contact_center_insights.CreateAnalysisRequest.meta.fields["analysis"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["analysis"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["analysis"][field])): - del request_init["analysis"][field][i][subfield] - else: - del request_init["analysis"][field][subfield] + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -19614,13 +22526,13 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_analysis(request) + response = client.bulk_analyze_conversations(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_create_analysis_rest_use_cached_wrapped_rpc(): +def test_bulk_analyze_conversations_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -19634,17 +22546,22 @@ def test_create_analysis_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_analysis in client._transport._wrapped_methods + assert ( + client._transport.bulk_analyze_conversations + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_analysis] = mock_rpc + client._transport._wrapped_methods[ + client._transport.bulk_analyze_conversations + ] = mock_rpc request = {} - client.create_analysis(request) + client.bulk_analyze_conversations(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -19653,20 +22570,22 @@ def test_create_analysis_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.create_analysis(request) + client.bulk_analyze_conversations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_analysis_rest_required_fields( - request_type=contact_center_insights.CreateAnalysisRequest, +def test_bulk_analyze_conversations_rest_required_fields( + request_type=contact_center_insights.BulkAnalyzeConversationsRequest, ): transport_class = transports.ContactCenterInsightsRestTransport request_init = {} request_init["parent"] = "" + request_init["filter"] = "" + request_init["analysis_percentage"] = 0.0 request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -19677,21 +22596,27 @@ def test_create_analysis_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_analysis._get_unset_required_fields(jsonified_request) + ).bulk_analyze_conversations._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["parent"] = "parent_value" + jsonified_request["filter"] = "filter_value" + jsonified_request["analysisPercentage"] = 0.20170000000000002 unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_analysis._get_unset_required_fields(jsonified_request) + ).bulk_analyze_conversations._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" + assert "filter" in jsonified_request + assert jsonified_request["filter"] == "filter_value" + assert "analysisPercentage" in jsonified_request + assert jsonified_request["analysisPercentage"] == 0.20170000000000002 client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -19725,32 +22650,33 @@ def test_create_analysis_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_analysis(request) + response = client.bulk_analyze_conversations(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_analysis_rest_unset_required_fields(): +def test_bulk_analyze_conversations_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_analysis._get_unset_required_fields({}) + unset_fields = transport.bulk_analyze_conversations._get_unset_required_fields({}) assert set(unset_fields) == ( set(()) & set( ( "parent", - "analysis", + "filter", + "analysisPercentage", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_analysis_rest_interceptors(null_interceptor): +def test_bulk_analyze_conversations_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -19765,14 +22691,16 @@ def test_create_analysis_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_create_analysis" + transports.ContactCenterInsightsRestInterceptor, + "post_bulk_analyze_conversations", ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_create_analysis" + transports.ContactCenterInsightsRestInterceptor, + "pre_bulk_analyze_conversations", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.CreateAnalysisRequest.pb( - contact_center_insights.CreateAnalysisRequest() + pb_message = contact_center_insights.BulkAnalyzeConversationsRequest.pb( + contact_center_insights.BulkAnalyzeConversationsRequest() ) transcode.return_value = { "method": "post", @@ -19788,7 +22716,7 @@ def test_create_analysis_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = contact_center_insights.CreateAnalysisRequest() + request = contact_center_insights.BulkAnalyzeConversationsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -19796,7 +22724,7 @@ def test_create_analysis_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.create_analysis( + client.bulk_analyze_conversations( request, metadata=[ ("key", "val"), @@ -19808,8 +22736,9 @@ def test_create_analysis_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_analysis_rest_bad_request( - transport: str = "rest", request_type=contact_center_insights.CreateAnalysisRequest +def test_bulk_analyze_conversations_rest_bad_request( + transport: str = "rest", + request_type=contact_center_insights.BulkAnalyzeConversationsRequest, ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -19817,9 +22746,7 @@ def test_create_analysis_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/conversations/sample3" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -19831,10 +22758,10 @@ def test_create_analysis_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_analysis(request) + client.bulk_analyze_conversations(request) -def test_create_analysis_rest_flattened(): +def test_bulk_analyze_conversations_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -19846,14 +22773,13 @@ def test_create_analysis_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = { - "parent": "projects/sample1/locations/sample2/conversations/sample3" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( parent="parent_value", - analysis=resources.Analysis(name="name_value"), + filter="filter_value", + analysis_percentage=0.20170000000000002, ) mock_args.update(sample_request) @@ -19864,20 +22790,20 @@ def test_create_analysis_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_analysis(**mock_args) + client.bulk_analyze_conversations(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/conversations/*}/analyses" + "%s/v1/{parent=projects/*/locations/*}/conversations:bulkAnalyze" % client.transport._host, args[1], ) -def test_create_analysis_rest_flattened_error(transport: str = "rest"): +def test_bulk_analyze_conversations_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -19886,14 +22812,15 @@ def test_create_analysis_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_analysis( - contact_center_insights.CreateAnalysisRequest(), + client.bulk_analyze_conversations( + contact_center_insights.BulkAnalyzeConversationsRequest(), parent="parent_value", - analysis=resources.Analysis(name="name_value"), + filter="filter_value", + analysis_percentage=0.20170000000000002, ) -def test_create_analysis_rest_error(): +def test_bulk_analyze_conversations_rest_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -19902,46 +22829,39 @@ def test_create_analysis_rest_error(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.GetAnalysisRequest, + contact_center_insights.BulkDeleteConversationsRequest, dict, ], ) -def test_get_analysis_rest(request_type): +def test_bulk_delete_conversations_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/conversations/sample3/analyses/sample4" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Analysis( - name="name_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.Analysis.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_analysis(request) + response = client.bulk_delete_conversations(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.Analysis) - assert response.name == "name_value" + assert response.operation.name == "operations/spam" -def test_get_analysis_rest_use_cached_wrapped_rpc(): +def test_bulk_delete_conversations_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -19955,35 +22875,44 @@ def test_get_analysis_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_analysis in client._transport._wrapped_methods + assert ( + client._transport.bulk_delete_conversations + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_analysis] = mock_rpc + client._transport._wrapped_methods[ + client._transport.bulk_delete_conversations + ] = mock_rpc request = {} - client.get_analysis(request) + client.bulk_delete_conversations(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_analysis(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.bulk_delete_conversations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_analysis_rest_required_fields( - request_type=contact_center_insights.GetAnalysisRequest, +def test_bulk_delete_conversations_rest_required_fields( + request_type=contact_center_insights.BulkDeleteConversationsRequest, ): transport_class = transports.ContactCenterInsightsRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -19994,21 +22923,21 @@ def test_get_analysis_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_analysis._get_unset_required_fields(jsonified_request) + ).bulk_delete_conversations._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_analysis._get_unset_required_fields(jsonified_request) + ).bulk_delete_conversations._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -20017,7 +22946,7 @@ def test_get_analysis_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.Analysis() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -20029,39 +22958,37 @@ def test_get_analysis_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = resources.Analysis.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_analysis(request) + response = client.bulk_delete_conversations(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_analysis_rest_unset_required_fields(): +def test_bulk_delete_conversations_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_analysis._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.bulk_delete_conversations._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_analysis_rest_interceptors(null_interceptor): +def test_bulk_delete_conversations_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -20074,14 +23001,17 @@ def test_get_analysis_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_get_analysis" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, + "post_bulk_delete_conversations", ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_get_analysis" + transports.ContactCenterInsightsRestInterceptor, "pre_bulk_delete_conversations" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.GetAnalysisRequest.pb( - contact_center_insights.GetAnalysisRequest() + pb_message = contact_center_insights.BulkDeleteConversationsRequest.pb( + contact_center_insights.BulkDeleteConversationsRequest() ) transcode.return_value = { "method": "post", @@ -20093,17 +23023,19 @@ def test_get_analysis_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.Analysis.to_json(resources.Analysis()) + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) - request = contact_center_insights.GetAnalysisRequest() + request = contact_center_insights.BulkDeleteConversationsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.Analysis() + post.return_value = operations_pb2.Operation() - client.get_analysis( + client.bulk_delete_conversations( request, metadata=[ ("key", "val"), @@ -20115,8 +23047,9 @@ def test_get_analysis_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_analysis_rest_bad_request( - transport: str = "rest", request_type=contact_center_insights.GetAnalysisRequest +def test_bulk_delete_conversations_rest_bad_request( + transport: str = "rest", + request_type=contact_center_insights.BulkDeleteConversationsRequest, ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -20124,9 +23057,7 @@ def test_get_analysis_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/conversations/sample3/analyses/sample4" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -20138,10 +23069,10 @@ def test_get_analysis_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_analysis(request) + client.bulk_delete_conversations(request) -def test_get_analysis_rest_flattened(): +def test_bulk_delete_conversations_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -20150,42 +23081,39 @@ def test_get_analysis_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Analysis() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/conversations/sample3/analyses/sample4" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", + filter="filter_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.Analysis.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_analysis(**mock_args) + client.bulk_delete_conversations(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/conversations/*/analyses/*}" + "%s/v1/{parent=projects/*/locations/*}/conversations:bulkDelete" % client.transport._host, args[1], ) -def test_get_analysis_rest_flattened_error(transport: str = "rest"): +def test_bulk_delete_conversations_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -20194,13 +23122,14 @@ def test_get_analysis_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_analysis( - contact_center_insights.GetAnalysisRequest(), - name="name_value", + client.bulk_delete_conversations( + contact_center_insights.BulkDeleteConversationsRequest(), + parent="parent_value", + filter="filter_value", ) -def test_get_analysis_rest_error(): +def test_bulk_delete_conversations_rest_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -20209,46 +23138,39 @@ def test_get_analysis_rest_error(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.ListAnalysesRequest, + contact_center_insights.IngestConversationsRequest, dict, ], ) -def test_list_analyses_rest(request_type): +def test_ingest_conversations_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/conversations/sample3" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = contact_center_insights.ListAnalysesResponse( - next_page_token="next_page_token_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = contact_center_insights.ListAnalysesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_analyses(request) + response = client.ingest_conversations(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAnalysesPager) - assert response.next_page_token == "next_page_token_value" + assert response.operation.name == "operations/spam" -def test_list_analyses_rest_use_cached_wrapped_rpc(): +def test_ingest_conversations_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -20262,30 +23184,38 @@ def test_list_analyses_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_analyses in client._transport._wrapped_methods + assert ( + client._transport.ingest_conversations in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_analyses] = mock_rpc + client._transport._wrapped_methods[ + client._transport.ingest_conversations + ] = mock_rpc request = {} - client.list_analyses(request) + client.ingest_conversations(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_analyses(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.ingest_conversations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_analyses_rest_required_fields( - request_type=contact_center_insights.ListAnalysesRequest, +def test_ingest_conversations_rest_required_fields( + request_type=contact_center_insights.IngestConversationsRequest, ): transport_class = transports.ContactCenterInsightsRestTransport @@ -20301,7 +23231,7 @@ def test_list_analyses_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_analyses._get_unset_required_fields(jsonified_request) + ).ingest_conversations._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -20310,15 +23240,7 @@ def test_list_analyses_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_analyses._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "page_size", - "page_token", - ) - ) + ).ingest_conversations._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -20332,7 +23254,7 @@ def test_list_analyses_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = contact_center_insights.ListAnalysesResponse() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -20344,48 +23266,37 @@ def test_list_analyses_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = contact_center_insights.ListAnalysesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_analyses(request) + response = client.ingest_conversations(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_analyses_rest_unset_required_fields(): +def test_ingest_conversations_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_analyses._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) + unset_fields = transport.ingest_conversations._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_analyses_rest_interceptors(null_interceptor): +def test_ingest_conversations_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -20398,14 +23309,16 @@ def test_list_analyses_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_list_analyses" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, "post_ingest_conversations" ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_list_analyses" + transports.ContactCenterInsightsRestInterceptor, "pre_ingest_conversations" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.ListAnalysesRequest.pb( - contact_center_insights.ListAnalysesRequest() + pb_message = contact_center_insights.IngestConversationsRequest.pb( + contact_center_insights.IngestConversationsRequest() ) transcode.return_value = { "method": "post", @@ -20417,21 +23330,19 @@ def test_list_analyses_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ( - contact_center_insights.ListAnalysesResponse.to_json( - contact_center_insights.ListAnalysesResponse() - ) + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = contact_center_insights.ListAnalysesRequest() + request = contact_center_insights.IngestConversationsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = contact_center_insights.ListAnalysesResponse() + post.return_value = operations_pb2.Operation() - client.list_analyses( + client.ingest_conversations( request, metadata=[ ("key", "val"), @@ -20443,8 +23354,9 @@ def test_list_analyses_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_analyses_rest_bad_request( - transport: str = "rest", request_type=contact_center_insights.ListAnalysesRequest +def test_ingest_conversations_rest_bad_request( + transport: str = "rest", + request_type=contact_center_insights.IngestConversationsRequest, ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -20452,9 +23364,7 @@ def test_list_analyses_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/conversations/sample3" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -20466,10 +23376,10 @@ def test_list_analyses_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_analyses(request) + client.ingest_conversations(request) -def test_list_analyses_rest_flattened(): +def test_ingest_conversations_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -20478,12 +23388,10 @@ def test_list_analyses_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = contact_center_insights.ListAnalysesResponse() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = { - "parent": "projects/sample1/locations/sample2/conversations/sample3" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( @@ -20494,26 +23402,24 @@ def test_list_analyses_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = contact_center_insights.ListAnalysesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_analyses(**mock_args) + client.ingest_conversations(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/conversations/*}/analyses" + "%s/v1/{parent=projects/*/locations/*}/conversations:ingest" % client.transport._host, args[1], ) -def test_list_analyses_rest_flattened_error(transport: str = "rest"): +def test_ingest_conversations_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -20522,115 +23428,54 @@ def test_list_analyses_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_analyses( - contact_center_insights.ListAnalysesRequest(), + client.ingest_conversations( + contact_center_insights.IngestConversationsRequest(), parent="parent_value", ) -def test_list_analyses_rest_pager(transport: str = "rest"): +def test_ingest_conversations_rest_error(): client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - contact_center_insights.ListAnalysesResponse( - analyses=[ - resources.Analysis(), - resources.Analysis(), - resources.Analysis(), - ], - next_page_token="abc", - ), - contact_center_insights.ListAnalysesResponse( - analyses=[], - next_page_token="def", - ), - contact_center_insights.ListAnalysesResponse( - analyses=[ - resources.Analysis(), - ], - next_page_token="ghi", - ), - contact_center_insights.ListAnalysesResponse( - analyses=[ - resources.Analysis(), - resources.Analysis(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - contact_center_insights.ListAnalysesResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = { - "parent": "projects/sample1/locations/sample2/conversations/sample3" - } - - pager = client.list_analyses(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.Analysis) for i in results) - - pages = list(client.list_analyses(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - contact_center_insights.DeleteAnalysisRequest, + contact_center_insights.ExportInsightsDataRequest, dict, ], ) -def test_delete_analysis_rest(request_type): +def test_export_insights_data_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/conversations/sample3/analyses/sample4" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_analysis(request) + response = client.export_insights_data(request) # Establish that the response is the type that we expect. - assert response is None + assert response.operation.name == "operations/spam" -def test_delete_analysis_rest_use_cached_wrapped_rpc(): +def test_export_insights_data_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -20644,35 +23489,43 @@ def test_delete_analysis_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_analysis in client._transport._wrapped_methods + assert ( + client._transport.export_insights_data in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.delete_analysis] = mock_rpc + client._transport._wrapped_methods[ + client._transport.export_insights_data + ] = mock_rpc request = {} - client.delete_analysis(request) + client.export_insights_data(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_analysis(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.export_insights_data(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_analysis_rest_required_fields( - request_type=contact_center_insights.DeleteAnalysisRequest, +def test_export_insights_data_rest_required_fields( + request_type=contact_center_insights.ExportInsightsDataRequest, ): transport_class = transports.ContactCenterInsightsRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -20683,21 +23536,21 @@ def test_delete_analysis_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_analysis._get_unset_required_fields(jsonified_request) + ).export_insights_data._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_analysis._get_unset_required_fields(jsonified_request) + ).export_insights_data._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -20706,7 +23559,7 @@ def test_delete_analysis_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = None + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -20718,36 +23571,37 @@ def test_delete_analysis_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = "" + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_analysis(request) + response = client.export_insights_data(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_analysis_rest_unset_required_fields(): +def test_export_insights_data_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_analysis._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.export_insights_data._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_analysis_rest_interceptors(null_interceptor): +def test_export_insights_data_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -20760,11 +23614,16 @@ def test_delete_analysis_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_delete_analysis" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, "post_export_insights_data" + ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, "pre_export_insights_data" ) as pre: pre.assert_not_called() - pb_message = contact_center_insights.DeleteAnalysisRequest.pb( - contact_center_insights.DeleteAnalysisRequest() + post.assert_not_called() + pb_message = contact_center_insights.ExportInsightsDataRequest.pb( + contact_center_insights.ExportInsightsDataRequest() ) transcode.return_value = { "method": "post", @@ -20776,15 +23635,19 @@ def test_delete_analysis_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) - request = contact_center_insights.DeleteAnalysisRequest() + request = contact_center_insights.ExportInsightsDataRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() - client.delete_analysis( + client.export_insights_data( request, metadata=[ ("key", "val"), @@ -20793,10 +23656,12 @@ def test_delete_analysis_rest_interceptors(null_interceptor): ) pre.assert_called_once() + post.assert_called_once() -def test_delete_analysis_rest_bad_request( - transport: str = "rest", request_type=contact_center_insights.DeleteAnalysisRequest +def test_export_insights_data_rest_bad_request( + transport: str = "rest", + request_type=contact_center_insights.ExportInsightsDataRequest, ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -20804,9 +23669,7 @@ def test_delete_analysis_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/conversations/sample3/analyses/sample4" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -20818,10 +23681,10 @@ def test_delete_analysis_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_analysis(request) + client.export_insights_data(request) -def test_delete_analysis_rest_flattened(): +def test_export_insights_data_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -20830,40 +23693,38 @@ def test_delete_analysis_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/conversations/sample3/analyses/sample4" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_analysis(**mock_args) + client.export_insights_data(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/conversations/*/analyses/*}" + "%s/v1/{parent=projects/*/locations/*}/insightsdata:export" % client.transport._host, args[1], ) -def test_delete_analysis_rest_flattened_error(transport: str = "rest"): +def test_export_insights_data_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -20872,13 +23733,13 @@ def test_delete_analysis_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_analysis( - contact_center_insights.DeleteAnalysisRequest(), - name="name_value", + client.export_insights_data( + contact_center_insights.ExportInsightsDataRequest(), + parent="parent_value", ) -def test_delete_analysis_rest_error(): +def test_export_insights_data_rest_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -20887,11 +23748,11 @@ def test_delete_analysis_rest_error(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.BulkAnalyzeConversationsRequest, + contact_center_insights.CreateIssueModelRequest, dict, ], ) -def test_bulk_analyze_conversations_rest(request_type): +def test_create_issue_model_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -20899,6 +23760,95 @@ def test_bulk_analyze_conversations_rest(request_type): # send a request that will satisfy transcoding request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["issue_model"] = { + "name": "name_value", + "display_name": "display_name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "issue_count": 1201, + "state": 1, + "input_data_config": { + "medium": 1, + "training_conversations_count": 3025, + "filter": "filter_value", + }, + "training_stats": { + "analyzed_conversations_count": 3021, + "unclassified_conversations_count": 3439, + "issue_stats": {}, + }, + "model_type": 1, + "language_code": "language_code_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = contact_center_insights.CreateIssueModelRequest.meta.fields[ + "issue_model" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["issue_model"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["issue_model"][field])): + del request_init["issue_model"][field][i][subfield] + else: + del request_init["issue_model"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -20913,13 +23863,13 @@ def test_bulk_analyze_conversations_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.bulk_analyze_conversations(request) + response = client.create_issue_model(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_bulk_analyze_conversations_rest_use_cached_wrapped_rpc(): +def test_create_issue_model_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -20934,8 +23884,7 @@ def test_bulk_analyze_conversations_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.bulk_analyze_conversations - in client._transport._wrapped_methods + client._transport.create_issue_model in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -20944,11 +23893,11 @@ def test_bulk_analyze_conversations_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.bulk_analyze_conversations + client._transport.create_issue_model ] = mock_rpc request = {} - client.bulk_analyze_conversations(request) + client.create_issue_model(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -20957,22 +23906,20 @@ def test_bulk_analyze_conversations_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.bulk_analyze_conversations(request) + client.create_issue_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_bulk_analyze_conversations_rest_required_fields( - request_type=contact_center_insights.BulkAnalyzeConversationsRequest, +def test_create_issue_model_rest_required_fields( + request_type=contact_center_insights.CreateIssueModelRequest, ): transport_class = transports.ContactCenterInsightsRestTransport request_init = {} request_init["parent"] = "" - request_init["filter"] = "" - request_init["analysis_percentage"] = 0.0 request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -20983,27 +23930,21 @@ def test_bulk_analyze_conversations_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).bulk_analyze_conversations._get_unset_required_fields(jsonified_request) + ).create_issue_model._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["parent"] = "parent_value" - jsonified_request["filter"] = "filter_value" - jsonified_request["analysisPercentage"] = 0.20170000000000002 unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).bulk_analyze_conversations._get_unset_required_fields(jsonified_request) + ).create_issue_model._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" - assert "filter" in jsonified_request - assert jsonified_request["filter"] == "filter_value" - assert "analysisPercentage" in jsonified_request - assert jsonified_request["analysisPercentage"] == 0.20170000000000002 client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -21037,33 +23978,32 @@ def test_bulk_analyze_conversations_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.bulk_analyze_conversations(request) + response = client.create_issue_model(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_bulk_analyze_conversations_rest_unset_required_fields(): +def test_create_issue_model_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.bulk_analyze_conversations._get_unset_required_fields({}) + unset_fields = transport.create_issue_model._get_unset_required_fields({}) assert set(unset_fields) == ( set(()) & set( ( "parent", - "filter", - "analysisPercentage", + "issueModel", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_bulk_analyze_conversations_rest_interceptors(null_interceptor): +def test_create_issue_model_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -21078,16 +24018,14 @@ def test_bulk_analyze_conversations_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, - "post_bulk_analyze_conversations", + transports.ContactCenterInsightsRestInterceptor, "post_create_issue_model" ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, - "pre_bulk_analyze_conversations", + transports.ContactCenterInsightsRestInterceptor, "pre_create_issue_model" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.BulkAnalyzeConversationsRequest.pb( - contact_center_insights.BulkAnalyzeConversationsRequest() + pb_message = contact_center_insights.CreateIssueModelRequest.pb( + contact_center_insights.CreateIssueModelRequest() ) transcode.return_value = { "method": "post", @@ -21103,7 +24041,7 @@ def test_bulk_analyze_conversations_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = contact_center_insights.BulkAnalyzeConversationsRequest() + request = contact_center_insights.CreateIssueModelRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -21111,7 +24049,7 @@ def test_bulk_analyze_conversations_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.bulk_analyze_conversations( + client.create_issue_model( request, metadata=[ ("key", "val"), @@ -21123,9 +24061,9 @@ def test_bulk_analyze_conversations_rest_interceptors(null_interceptor): post.assert_called_once() -def test_bulk_analyze_conversations_rest_bad_request( +def test_create_issue_model_rest_bad_request( transport: str = "rest", - request_type=contact_center_insights.BulkAnalyzeConversationsRequest, + request_type=contact_center_insights.CreateIssueModelRequest, ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -21145,10 +24083,10 @@ def test_bulk_analyze_conversations_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.bulk_analyze_conversations(request) + client.create_issue_model(request) -def test_bulk_analyze_conversations_rest_flattened(): +def test_create_issue_model_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -21165,8 +24103,7 @@ def test_bulk_analyze_conversations_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", - filter="filter_value", - analysis_percentage=0.20170000000000002, + issue_model=resources.IssueModel(name="name_value"), ) mock_args.update(sample_request) @@ -21177,78 +24114,185 @@ def test_bulk_analyze_conversations_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.bulk_analyze_conversations(**mock_args) + client.create_issue_model(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/conversations:bulkAnalyze" + "%s/v1/{parent=projects/*/locations/*}/issueModels" % client.transport._host, args[1], ) -def test_bulk_analyze_conversations_rest_flattened_error(transport: str = "rest"): +def test_create_issue_model_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.bulk_analyze_conversations( - contact_center_insights.BulkAnalyzeConversationsRequest(), - parent="parent_value", - filter="filter_value", - analysis_percentage=0.20170000000000002, - ) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_issue_model( + contact_center_insights.CreateIssueModelRequest(), + parent="parent_value", + issue_model=resources.IssueModel(name="name_value"), + ) + + +def test_create_issue_model_rest_error(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + contact_center_insights.UpdateIssueModelRequest, + dict, + ], +) +def test_update_issue_model_rest(request_type): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "issue_model": { + "name": "projects/sample1/locations/sample2/issueModels/sample3" + } + } + request_init["issue_model"] = { + "name": "projects/sample1/locations/sample2/issueModels/sample3", + "display_name": "display_name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "issue_count": 1201, + "state": 1, + "input_data_config": { + "medium": 1, + "training_conversations_count": 3025, + "filter": "filter_value", + }, + "training_stats": { + "analyzed_conversations_count": 3021, + "unclassified_conversations_count": 3439, + "issue_stats": {}, + }, + "model_type": 1, + "language_code": "language_code_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = contact_center_insights.UpdateIssueModelRequest.meta.fields[ + "issue_model" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields -def test_bulk_analyze_conversations_rest_error(): - client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + subfields_not_in_runtime = [] -@pytest.mark.parametrize( - "request_type", - [ - contact_center_insights.BulkDeleteConversationsRequest, - dict, - ], -) -def test_bulk_delete_conversations_rest(request_type): - client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["issue_model"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["issue_model"][field])): + del request_init["issue_model"][field][i][subfield] + else: + del request_init["issue_model"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = resources.IssueModel( + name="name_value", + display_name="display_name_value", + issue_count=1201, + state=resources.IssueModel.State.UNDEPLOYED, + model_type=resources.IssueModel.ModelType.TYPE_V1, + language_code="language_code_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.IssueModel.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.bulk_delete_conversations(request) + response = client.update_issue_model(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, resources.IssueModel) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.issue_count == 1201 + assert response.state == resources.IssueModel.State.UNDEPLOYED + assert response.model_type == resources.IssueModel.ModelType.TYPE_V1 + assert response.language_code == "language_code_value" -def test_bulk_delete_conversations_rest_use_cached_wrapped_rpc(): +def test_update_issue_model_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -21263,8 +24307,7 @@ def test_bulk_delete_conversations_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.bulk_delete_conversations - in client._transport._wrapped_methods + client._transport.update_issue_model in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -21273,33 +24316,28 @@ def test_bulk_delete_conversations_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.bulk_delete_conversations + client._transport.update_issue_model ] = mock_rpc request = {} - client.bulk_delete_conversations(request) + client.update_issue_model(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.bulk_delete_conversations(request) + client.update_issue_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_bulk_delete_conversations_rest_required_fields( - request_type=contact_center_insights.BulkDeleteConversationsRequest, +def test_update_issue_model_rest_required_fields( + request_type=contact_center_insights.UpdateIssueModelRequest, ): transport_class = transports.ContactCenterInsightsRestTransport request_init = {} - request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -21310,21 +24348,19 @@ def test_bulk_delete_conversations_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).bulk_delete_conversations._get_unset_required_fields(jsonified_request) + ).update_issue_model._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).bulk_delete_conversations._get_unset_required_fields(jsonified_request) + ).update_issue_model._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -21333,7 +24369,7 @@ def test_bulk_delete_conversations_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = resources.IssueModel() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -21345,7 +24381,7 @@ def test_bulk_delete_conversations_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "patch", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -21353,29 +24389,32 @@ def test_bulk_delete_conversations_rest_required_fields( response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.IssueModel.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.bulk_delete_conversations(request) + response = client.update_issue_model(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_bulk_delete_conversations_rest_unset_required_fields(): +def test_update_issue_model_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.bulk_delete_conversations._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent",))) + unset_fields = transport.update_issue_model._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("issueModel",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_bulk_delete_conversations_rest_interceptors(null_interceptor): +def test_update_issue_model_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -21388,17 +24427,14 @@ def test_bulk_delete_conversations_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, - "post_bulk_delete_conversations", + transports.ContactCenterInsightsRestInterceptor, "post_update_issue_model" ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_bulk_delete_conversations" + transports.ContactCenterInsightsRestInterceptor, "pre_update_issue_model" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.BulkDeleteConversationsRequest.pb( - contact_center_insights.BulkDeleteConversationsRequest() + pb_message = contact_center_insights.UpdateIssueModelRequest.pb( + contact_center_insights.UpdateIssueModelRequest() ) transcode.return_value = { "method": "post", @@ -21410,19 +24446,17 @@ def test_bulk_delete_conversations_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() - ) + req.return_value._content = resources.IssueModel.to_json(resources.IssueModel()) - request = contact_center_insights.BulkDeleteConversationsRequest() + request = contact_center_insights.UpdateIssueModelRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = resources.IssueModel() - client.bulk_delete_conversations( + client.update_issue_model( request, metadata=[ ("key", "val"), @@ -21434,9 +24468,9 @@ def test_bulk_delete_conversations_rest_interceptors(null_interceptor): post.assert_called_once() -def test_bulk_delete_conversations_rest_bad_request( +def test_update_issue_model_rest_bad_request( transport: str = "rest", - request_type=contact_center_insights.BulkDeleteConversationsRequest, + request_type=contact_center_insights.UpdateIssueModelRequest, ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -21444,7 +24478,11 @@ def test_bulk_delete_conversations_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "issue_model": { + "name": "projects/sample1/locations/sample2/issueModels/sample3" + } + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -21456,10 +24494,10 @@ def test_bulk_delete_conversations_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.bulk_delete_conversations(request) + client.update_issue_model(request) -def test_bulk_delete_conversations_rest_flattened(): +def test_update_issue_model_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -21468,39 +24506,45 @@ def test_bulk_delete_conversations_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = resources.IssueModel() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "issue_model": { + "name": "projects/sample1/locations/sample2/issueModels/sample3" + } + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - filter="filter_value", + issue_model=resources.IssueModel(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.IssueModel.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.bulk_delete_conversations(**mock_args) + client.update_issue_model(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/conversations:bulkDelete" + "%s/v1/{issue_model.name=projects/*/locations/*/issueModels/*}" % client.transport._host, args[1], ) -def test_bulk_delete_conversations_rest_flattened_error(transport: str = "rest"): +def test_update_issue_model_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -21509,14 +24553,14 @@ def test_bulk_delete_conversations_rest_flattened_error(transport: str = "rest") # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.bulk_delete_conversations( - contact_center_insights.BulkDeleteConversationsRequest(), - parent="parent_value", - filter="filter_value", + client.update_issue_model( + contact_center_insights.UpdateIssueModelRequest(), + issue_model=resources.IssueModel(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_bulk_delete_conversations_rest_error(): +def test_update_issue_model_rest_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -21525,39 +24569,54 @@ def test_bulk_delete_conversations_rest_error(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.IngestConversationsRequest, + contact_center_insights.GetIssueModelRequest, dict, ], ) -def test_ingest_conversations_rest(request_type): +def test_get_issue_model_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/issueModels/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = resources.IssueModel( + name="name_value", + display_name="display_name_value", + issue_count=1201, + state=resources.IssueModel.State.UNDEPLOYED, + model_type=resources.IssueModel.ModelType.TYPE_V1, + language_code="language_code_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.IssueModel.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.ingest_conversations(request) + response = client.get_issue_model(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, resources.IssueModel) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.issue_count == 1201 + assert response.state == resources.IssueModel.State.UNDEPLOYED + assert response.model_type == resources.IssueModel.ModelType.TYPE_V1 + assert response.language_code == "language_code_value" -def test_ingest_conversations_rest_use_cached_wrapped_rpc(): +def test_get_issue_model_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -21571,43 +24630,35 @@ def test_ingest_conversations_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.ingest_conversations in client._transport._wrapped_methods - ) + assert client._transport.get_issue_model in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.ingest_conversations - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_issue_model] = mock_rpc request = {} - client.ingest_conversations(request) + client.get_issue_model(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.ingest_conversations(request) + client.get_issue_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_ingest_conversations_rest_required_fields( - request_type=contact_center_insights.IngestConversationsRequest, +def test_get_issue_model_rest_required_fields( + request_type=contact_center_insights.GetIssueModelRequest, ): transport_class = transports.ContactCenterInsightsRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -21618,21 +24669,21 @@ def test_ingest_conversations_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).ingest_conversations._get_unset_required_fields(jsonified_request) + ).get_issue_model._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).ingest_conversations._get_unset_required_fields(jsonified_request) + ).get_issue_model._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -21641,7 +24692,7 @@ def test_ingest_conversations_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = resources.IssueModel() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -21653,37 +24704,39 @@ def test_ingest_conversations_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.IssueModel.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.ingest_conversations(request) + response = client.get_issue_model(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_ingest_conversations_rest_unset_required_fields(): +def test_get_issue_model_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.ingest_conversations._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent",))) + unset_fields = transport.get_issue_model._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_ingest_conversations_rest_interceptors(null_interceptor): +def test_get_issue_model_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -21696,16 +24749,14 @@ def test_ingest_conversations_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_ingest_conversations" + transports.ContactCenterInsightsRestInterceptor, "post_get_issue_model" ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_ingest_conversations" + transports.ContactCenterInsightsRestInterceptor, "pre_get_issue_model" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.IngestConversationsRequest.pb( - contact_center_insights.IngestConversationsRequest() + pb_message = contact_center_insights.GetIssueModelRequest.pb( + contact_center_insights.GetIssueModelRequest() ) transcode.return_value = { "method": "post", @@ -21717,19 +24768,17 @@ def test_ingest_conversations_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() - ) + req.return_value._content = resources.IssueModel.to_json(resources.IssueModel()) - request = contact_center_insights.IngestConversationsRequest() + request = contact_center_insights.GetIssueModelRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = resources.IssueModel() - client.ingest_conversations( + client.get_issue_model( request, metadata=[ ("key", "val"), @@ -21741,9 +24790,8 @@ def test_ingest_conversations_rest_interceptors(null_interceptor): post.assert_called_once() -def test_ingest_conversations_rest_bad_request( - transport: str = "rest", - request_type=contact_center_insights.IngestConversationsRequest, +def test_get_issue_model_rest_bad_request( + transport: str = "rest", request_type=contact_center_insights.GetIssueModelRequest ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -21751,7 +24799,7 @@ def test_ingest_conversations_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/issueModels/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -21763,10 +24811,10 @@ def test_ingest_conversations_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.ingest_conversations(request) + client.get_issue_model(request) -def test_ingest_conversations_rest_flattened(): +def test_get_issue_model_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -21775,38 +24823,42 @@ def test_ingest_conversations_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = resources.IssueModel() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "name": "projects/sample1/locations/sample2/issueModels/sample3" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.IssueModel.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.ingest_conversations(**mock_args) + client.get_issue_model(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/conversations:ingest" + "%s/v1/{name=projects/*/locations/*/issueModels/*}" % client.transport._host, args[1], ) -def test_ingest_conversations_rest_flattened_error(transport: str = "rest"): +def test_get_issue_model_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -21815,13 +24867,13 @@ def test_ingest_conversations_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.ingest_conversations( - contact_center_insights.IngestConversationsRequest(), - parent="parent_value", + client.get_issue_model( + contact_center_insights.GetIssueModelRequest(), + name="name_value", ) -def test_ingest_conversations_rest_error(): +def test_get_issue_model_rest_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -21830,11 +24882,11 @@ def test_ingest_conversations_rest_error(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.ExportInsightsDataRequest, + contact_center_insights.ListIssueModelsRequest, dict, ], ) -def test_export_insights_data_rest(request_type): +def test_list_issue_models_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -21847,22 +24899,24 @@ def test_export_insights_data_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = contact_center_insights.ListIssueModelsResponse() # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = contact_center_insights.ListIssueModelsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.export_insights_data(request) + response = client.list_issue_models(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, contact_center_insights.ListIssueModelsResponse) -def test_export_insights_data_rest_use_cached_wrapped_rpc(): +def test_list_issue_models_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -21876,9 +24930,7 @@ def test_export_insights_data_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.export_insights_data in client._transport._wrapped_methods - ) + assert client._transport.list_issue_models in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -21886,28 +24938,24 @@ def test_export_insights_data_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.export_insights_data + client._transport.list_issue_models ] = mock_rpc request = {} - client.export_insights_data(request) + client.list_issue_models(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.export_insights_data(request) + client.list_issue_models(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_export_insights_data_rest_required_fields( - request_type=contact_center_insights.ExportInsightsDataRequest, +def test_list_issue_models_rest_required_fields( + request_type=contact_center_insights.ListIssueModelsRequest, ): transport_class = transports.ContactCenterInsightsRestTransport @@ -21923,7 +24971,7 @@ def test_export_insights_data_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).export_insights_data._get_unset_required_fields(jsonified_request) + ).list_issue_models._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -21932,7 +24980,7 @@ def test_export_insights_data_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).export_insights_data._get_unset_required_fields(jsonified_request) + ).list_issue_models._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -21946,7 +24994,7 @@ def test_export_insights_data_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = contact_center_insights.ListIssueModelsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -21958,37 +25006,41 @@ def test_export_insights_data_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = contact_center_insights.ListIssueModelsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.export_insights_data(request) + response = client.list_issue_models(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_export_insights_data_rest_unset_required_fields(): +def test_list_issue_models_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.export_insights_data._get_unset_required_fields({}) + unset_fields = transport.list_issue_models._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("parent",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_export_insights_data_rest_interceptors(null_interceptor): +def test_list_issue_models_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -22001,16 +25053,14 @@ def test_export_insights_data_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_export_insights_data" + transports.ContactCenterInsightsRestInterceptor, "post_list_issue_models" ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_export_insights_data" + transports.ContactCenterInsightsRestInterceptor, "pre_list_issue_models" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.ExportInsightsDataRequest.pb( - contact_center_insights.ExportInsightsDataRequest() + pb_message = contact_center_insights.ListIssueModelsRequest.pb( + contact_center_insights.ListIssueModelsRequest() ) transcode.return_value = { "method": "post", @@ -22022,19 +25072,21 @@ def test_export_insights_data_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = ( + contact_center_insights.ListIssueModelsResponse.to_json( + contact_center_insights.ListIssueModelsResponse() + ) ) - request = contact_center_insights.ExportInsightsDataRequest() + request = contact_center_insights.ListIssueModelsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = contact_center_insights.ListIssueModelsResponse() - client.export_insights_data( + client.list_issue_models( request, metadata=[ ("key", "val"), @@ -22046,9 +25098,8 @@ def test_export_insights_data_rest_interceptors(null_interceptor): post.assert_called_once() -def test_export_insights_data_rest_bad_request( - transport: str = "rest", - request_type=contact_center_insights.ExportInsightsDataRequest, +def test_list_issue_models_rest_bad_request( + transport: str = "rest", request_type=contact_center_insights.ListIssueModelsRequest ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -22068,10 +25119,10 @@ def test_export_insights_data_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.export_insights_data(request) + client.list_issue_models(request) -def test_export_insights_data_rest_flattened(): +def test_list_issue_models_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -22080,7 +25131,7 @@ def test_export_insights_data_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = contact_center_insights.ListIssueModelsResponse() # get arguments that satisfy an http rule for this method sample_request = {"parent": "projects/sample1/locations/sample2"} @@ -22094,148 +25145,61 @@ def test_export_insights_data_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = contact_center_insights.ListIssueModelsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.export_insights_data(**mock_args) + client.list_issue_models(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/insightsdata:export" + "%s/v1/{parent=projects/*/locations/*}/issueModels" % client.transport._host, args[1], ) -def test_export_insights_data_rest_flattened_error(transport: str = "rest"): - client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.export_insights_data( - contact_center_insights.ExportInsightsDataRequest(), - parent="parent_value", - ) - - -def test_export_insights_data_rest_error(): - client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - contact_center_insights.CreateIssueModelRequest, - dict, - ], -) -def test_create_issue_model_rest(request_type): +def test_list_issue_models_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["issue_model"] = { - "name": "name_value", - "display_name": "display_name_value", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "issue_count": 1201, - "state": 1, - "input_data_config": { - "medium": 1, - "training_conversations_count": 3025, - "filter": "filter_value", - }, - "training_stats": { - "analyzed_conversations_count": 3021, - "unclassified_conversations_count": 3439, - "issue_stats": {}, - }, - "model_type": 1, - "language_code": "language_code_value", - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = contact_center_insights.CreateIssueModelRequest.meta.fields[ - "issue_model" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_issue_models( + contact_center_insights.ListIssueModelsRequest(), + parent="parent_value", + ) - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - subfields_not_in_runtime = [] +def test_list_issue_models_rest_error(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["issue_model"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) +@pytest.mark.parametrize( + "request_type", + [ + contact_center_insights.DeleteIssueModelRequest, + dict, + ], +) +def test_delete_issue_model_rest(request_type): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["issue_model"][field])): - del request_init["issue_model"][field][i][subfield] - else: - del request_init["issue_model"][field][subfield] + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/issueModels/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -22250,13 +25214,13 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_issue_model(request) + response = client.delete_issue_model(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_create_issue_model_rest_use_cached_wrapped_rpc(): +def test_delete_issue_model_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -22271,7 +25235,7 @@ def test_create_issue_model_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_issue_model in client._transport._wrapped_methods + client._transport.delete_issue_model in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -22280,11 +25244,11 @@ def test_create_issue_model_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_issue_model + client._transport.delete_issue_model ] = mock_rpc request = {} - client.create_issue_model(request) + client.delete_issue_model(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -22293,20 +25257,20 @@ def test_create_issue_model_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.create_issue_model(request) + client.delete_issue_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_issue_model_rest_required_fields( - request_type=contact_center_insights.CreateIssueModelRequest, +def test_delete_issue_model_rest_required_fields( + request_type=contact_center_insights.DeleteIssueModelRequest, ): transport_class = transports.ContactCenterInsightsRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -22317,21 +25281,21 @@ def test_create_issue_model_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_issue_model._get_unset_required_fields(jsonified_request) + ).delete_issue_model._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_issue_model._get_unset_required_fields(jsonified_request) + ).delete_issue_model._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -22352,10 +25316,9 @@ def test_create_issue_model_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -22365,32 +25328,24 @@ def test_create_issue_model_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_issue_model(request) + response = client.delete_issue_model(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_issue_model_rest_unset_required_fields(): +def test_delete_issue_model_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_issue_model._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(()) - & set( - ( - "parent", - "issueModel", - ) - ) - ) + unset_fields = transport.delete_issue_model._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_issue_model_rest_interceptors(null_interceptor): +def test_delete_issue_model_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -22405,14 +25360,14 @@ def test_create_issue_model_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_create_issue_model" + transports.ContactCenterInsightsRestInterceptor, "post_delete_issue_model" ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_create_issue_model" + transports.ContactCenterInsightsRestInterceptor, "pre_delete_issue_model" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.CreateIssueModelRequest.pb( - contact_center_insights.CreateIssueModelRequest() + pb_message = contact_center_insights.DeleteIssueModelRequest.pb( + contact_center_insights.DeleteIssueModelRequest() ) transcode.return_value = { "method": "post", @@ -22428,7 +25383,7 @@ def test_create_issue_model_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = contact_center_insights.CreateIssueModelRequest() + request = contact_center_insights.DeleteIssueModelRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -22436,7 +25391,7 @@ def test_create_issue_model_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.create_issue_model( + client.delete_issue_model( request, metadata=[ ("key", "val"), @@ -22448,9 +25403,9 @@ def test_create_issue_model_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_issue_model_rest_bad_request( +def test_delete_issue_model_rest_bad_request( transport: str = "rest", - request_type=contact_center_insights.CreateIssueModelRequest, + request_type=contact_center_insights.DeleteIssueModelRequest, ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -22458,7 +25413,7 @@ def test_create_issue_model_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/issueModels/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -22470,10 +25425,10 @@ def test_create_issue_model_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_issue_model(request) + client.delete_issue_model(request) -def test_create_issue_model_rest_flattened(): +def test_delete_issue_model_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -22485,12 +25440,13 @@ def test_create_issue_model_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "name": "projects/sample1/locations/sample2/issueModels/sample3" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - issue_model=resources.IssueModel(name="name_value"), + name="name_value", ) mock_args.update(sample_request) @@ -22501,20 +25457,20 @@ def test_create_issue_model_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_issue_model(**mock_args) + client.delete_issue_model(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/issueModels" + "%s/v1/{name=projects/*/locations/*/issueModels/*}" % client.transport._host, args[1], ) -def test_create_issue_model_rest_flattened_error(transport: str = "rest"): +def test_delete_issue_model_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -22523,163 +25479,54 @@ def test_create_issue_model_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_issue_model( - contact_center_insights.CreateIssueModelRequest(), - parent="parent_value", - issue_model=resources.IssueModel(name="name_value"), + client.delete_issue_model( + contact_center_insights.DeleteIssueModelRequest(), + name="name_value", ) -def test_create_issue_model_rest_error(): - client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - contact_center_insights.UpdateIssueModelRequest, - dict, - ], -) -def test_update_issue_model_rest(request_type): - client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "issue_model": { - "name": "projects/sample1/locations/sample2/issueModels/sample3" - } - } - request_init["issue_model"] = { - "name": "projects/sample1/locations/sample2/issueModels/sample3", - "display_name": "display_name_value", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "issue_count": 1201, - "state": 1, - "input_data_config": { - "medium": 1, - "training_conversations_count": 3025, - "filter": "filter_value", - }, - "training_stats": { - "analyzed_conversations_count": 3021, - "unclassified_conversations_count": 3439, - "issue_stats": {}, - }, - "model_type": 1, - "language_code": "language_code_value", - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = contact_center_insights.UpdateIssueModelRequest.meta.fields[ - "issue_model" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["issue_model"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) +def test_delete_issue_model_rest_error(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["issue_model"][field])): - del request_init["issue_model"][field][i][subfield] - else: - del request_init["issue_model"][field][subfield] + +@pytest.mark.parametrize( + "request_type", + [ + contact_center_insights.DeployIssueModelRequest, + dict, + ], +) +def test_deploy_issue_model_rest(request_type): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/issueModels/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.IssueModel( - name="name_value", - display_name="display_name_value", - issue_count=1201, - state=resources.IssueModel.State.UNDEPLOYED, - model_type=resources.IssueModel.ModelType.TYPE_V1, - language_code="language_code_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.IssueModel.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_issue_model(request) + response = client.deploy_issue_model(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.IssueModel) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.issue_count == 1201 - assert response.state == resources.IssueModel.State.UNDEPLOYED - assert response.model_type == resources.IssueModel.ModelType.TYPE_V1 - assert response.language_code == "language_code_value" + assert response.operation.name == "operations/spam" -def test_update_issue_model_rest_use_cached_wrapped_rpc(): +def test_deploy_issue_model_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -22694,7 +25541,7 @@ def test_update_issue_model_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.update_issue_model in client._transport._wrapped_methods + client._transport.deploy_issue_model in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -22703,28 +25550,33 @@ def test_update_issue_model_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.update_issue_model + client._transport.deploy_issue_model ] = mock_rpc request = {} - client.update_issue_model(request) + client.deploy_issue_model(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_issue_model(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.deploy_issue_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_issue_model_rest_required_fields( - request_type=contact_center_insights.UpdateIssueModelRequest, +def test_deploy_issue_model_rest_required_fields( + request_type=contact_center_insights.DeployIssueModelRequest, ): transport_class = transports.ContactCenterInsightsRestTransport request_init = {} + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -22735,19 +25587,21 @@ def test_update_issue_model_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_issue_model._get_unset_required_fields(jsonified_request) + ).deploy_issue_model._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_issue_model._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) + ).deploy_issue_model._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -22756,7 +25610,7 @@ def test_update_issue_model_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.IssueModel() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -22768,7 +25622,7 @@ def test_update_issue_model_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "post", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -22776,32 +25630,29 @@ def test_update_issue_model_rest_required_fields( response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = resources.IssueModel.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_issue_model(request) + response = client.deploy_issue_model(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_issue_model_rest_unset_required_fields(): +def test_deploy_issue_model_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_issue_model._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask",)) & set(("issueModel",))) + unset_fields = transport.deploy_issue_model._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_issue_model_rest_interceptors(null_interceptor): +def test_deploy_issue_model_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -22814,14 +25665,16 @@ def test_update_issue_model_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_update_issue_model" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, "post_deploy_issue_model" ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_update_issue_model" + transports.ContactCenterInsightsRestInterceptor, "pre_deploy_issue_model" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.UpdateIssueModelRequest.pb( - contact_center_insights.UpdateIssueModelRequest() + pb_message = contact_center_insights.DeployIssueModelRequest.pb( + contact_center_insights.DeployIssueModelRequest() ) transcode.return_value = { "method": "post", @@ -22833,17 +25686,19 @@ def test_update_issue_model_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.IssueModel.to_json(resources.IssueModel()) + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) - request = contact_center_insights.UpdateIssueModelRequest() + request = contact_center_insights.DeployIssueModelRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.IssueModel() + post.return_value = operations_pb2.Operation() - client.update_issue_model( + client.deploy_issue_model( request, metadata=[ ("key", "val"), @@ -22855,9 +25710,9 @@ def test_update_issue_model_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_issue_model_rest_bad_request( +def test_deploy_issue_model_rest_bad_request( transport: str = "rest", - request_type=contact_center_insights.UpdateIssueModelRequest, + request_type=contact_center_insights.DeployIssueModelRequest, ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -22865,11 +25720,7 @@ def test_update_issue_model_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "issue_model": { - "name": "projects/sample1/locations/sample2/issueModels/sample3" - } - } + request_init = {"name": "projects/sample1/locations/sample2/issueModels/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -22881,10 +25732,10 @@ def test_update_issue_model_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_issue_model(request) + client.deploy_issue_model(request) -def test_update_issue_model_rest_flattened(): +def test_deploy_issue_model_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -22893,45 +25744,40 @@ def test_update_issue_model_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.IssueModel() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method sample_request = { - "issue_model": { - "name": "projects/sample1/locations/sample2/issueModels/sample3" - } + "name": "projects/sample1/locations/sample2/issueModels/sample3" } # get truthy value for each flattened field mock_args = dict( - issue_model=resources.IssueModel(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.IssueModel.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_issue_model(**mock_args) + client.deploy_issue_model(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{issue_model.name=projects/*/locations/*/issueModels/*}" + "%s/v1/{name=projects/*/locations/*/issueModels/*}:deploy" % client.transport._host, args[1], ) -def test_update_issue_model_rest_flattened_error(transport: str = "rest"): +def test_deploy_issue_model_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -22940,14 +25786,13 @@ def test_update_issue_model_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_issue_model( - contact_center_insights.UpdateIssueModelRequest(), - issue_model=resources.IssueModel(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.deploy_issue_model( + contact_center_insights.DeployIssueModelRequest(), + name="name_value", ) -def test_update_issue_model_rest_error(): +def test_deploy_issue_model_rest_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -22956,11 +25801,11 @@ def test_update_issue_model_rest_error(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.GetIssueModelRequest, + contact_center_insights.UndeployIssueModelRequest, dict, ], ) -def test_get_issue_model_rest(request_type): +def test_undeploy_issue_model_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -22973,37 +25818,22 @@ def test_get_issue_model_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.IssueModel( - name="name_value", - display_name="display_name_value", - issue_count=1201, - state=resources.IssueModel.State.UNDEPLOYED, - model_type=resources.IssueModel.ModelType.TYPE_V1, - language_code="language_code_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.IssueModel.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_issue_model(request) + response = client.undeploy_issue_model(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.IssueModel) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.issue_count == 1201 - assert response.state == resources.IssueModel.State.UNDEPLOYED - assert response.model_type == resources.IssueModel.ModelType.TYPE_V1 - assert response.language_code == "language_code_value" + assert response.operation.name == "operations/spam" -def test_get_issue_model_rest_use_cached_wrapped_rpc(): +def test_undeploy_issue_model_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -23017,30 +25847,38 @@ def test_get_issue_model_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_issue_model in client._transport._wrapped_methods + assert ( + client._transport.undeploy_issue_model in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_issue_model] = mock_rpc + client._transport._wrapped_methods[ + client._transport.undeploy_issue_model + ] = mock_rpc request = {} - client.get_issue_model(request) + client.undeploy_issue_model(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_issue_model(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.undeploy_issue_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_issue_model_rest_required_fields( - request_type=contact_center_insights.GetIssueModelRequest, +def test_undeploy_issue_model_rest_required_fields( + request_type=contact_center_insights.UndeployIssueModelRequest, ): transport_class = transports.ContactCenterInsightsRestTransport @@ -23056,7 +25894,7 @@ def test_get_issue_model_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_issue_model._get_unset_required_fields(jsonified_request) + ).undeploy_issue_model._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -23065,7 +25903,7 @@ def test_get_issue_model_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_issue_model._get_unset_required_fields(jsonified_request) + ).undeploy_issue_model._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -23079,7 +25917,7 @@ def test_get_issue_model_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.IssueModel() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -23091,39 +25929,37 @@ def test_get_issue_model_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = resources.IssueModel.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_issue_model(request) + response = client.undeploy_issue_model(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_issue_model_rest_unset_required_fields(): +def test_undeploy_issue_model_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_issue_model._get_unset_required_fields({}) + unset_fields = transport.undeploy_issue_model._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_issue_model_rest_interceptors(null_interceptor): +def test_undeploy_issue_model_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -23136,14 +25972,16 @@ def test_get_issue_model_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_get_issue_model" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, "post_undeploy_issue_model" ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_get_issue_model" + transports.ContactCenterInsightsRestInterceptor, "pre_undeploy_issue_model" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.GetIssueModelRequest.pb( - contact_center_insights.GetIssueModelRequest() + pb_message = contact_center_insights.UndeployIssueModelRequest.pb( + contact_center_insights.UndeployIssueModelRequest() ) transcode.return_value = { "method": "post", @@ -23155,17 +25993,19 @@ def test_get_issue_model_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.IssueModel.to_json(resources.IssueModel()) + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) - request = contact_center_insights.GetIssueModelRequest() + request = contact_center_insights.UndeployIssueModelRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.IssueModel() + post.return_value = operations_pb2.Operation() - client.get_issue_model( + client.undeploy_issue_model( request, metadata=[ ("key", "val"), @@ -23177,8 +26017,9 @@ def test_get_issue_model_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_issue_model_rest_bad_request( - transport: str = "rest", request_type=contact_center_insights.GetIssueModelRequest +def test_undeploy_issue_model_rest_bad_request( + transport: str = "rest", + request_type=contact_center_insights.UndeployIssueModelRequest, ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -23198,10 +26039,10 @@ def test_get_issue_model_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_issue_model(request) + client.undeploy_issue_model(request) -def test_get_issue_model_rest_flattened(): +def test_undeploy_issue_model_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -23210,7 +26051,7 @@ def test_get_issue_model_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.IssueModel() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method sample_request = { @@ -23226,26 +26067,24 @@ def test_get_issue_model_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.IssueModel.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_issue_model(**mock_args) + client.undeploy_issue_model(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/issueModels/*}" + "%s/v1/{name=projects/*/locations/*/issueModels/*}:undeploy" % client.transport._host, args[1], ) -def test_get_issue_model_rest_flattened_error(transport: str = "rest"): +def test_undeploy_issue_model_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -23254,13 +26093,13 @@ def test_get_issue_model_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_issue_model( - contact_center_insights.GetIssueModelRequest(), + client.undeploy_issue_model( + contact_center_insights.UndeployIssueModelRequest(), name="name_value", ) -def test_get_issue_model_rest_error(): +def test_undeploy_issue_model_rest_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -23269,41 +26108,39 @@ def test_get_issue_model_rest_error(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.ListIssueModelsRequest, + contact_center_insights.ExportIssueModelRequest, dict, ], ) -def test_list_issue_models_rest(request_type): +def test_export_issue_model_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/issueModels/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = contact_center_insights.ListIssueModelsResponse() + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = contact_center_insights.ListIssueModelsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_issue_models(request) + response = client.export_issue_model(request) # Establish that the response is the type that we expect. - assert isinstance(response, contact_center_insights.ListIssueModelsResponse) + assert response.operation.name == "operations/spam" -def test_list_issue_models_rest_use_cached_wrapped_rpc(): +def test_export_issue_model_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -23317,7 +26154,9 @@ def test_list_issue_models_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_issue_models in client._transport._wrapped_methods + assert ( + client._transport.export_issue_model in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -23325,29 +26164,33 @@ def test_list_issue_models_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_issue_models + client._transport.export_issue_model ] = mock_rpc request = {} - client.list_issue_models(request) + client.export_issue_model(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_issue_models(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.export_issue_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_issue_models_rest_required_fields( - request_type=contact_center_insights.ListIssueModelsRequest, +def test_export_issue_model_rest_required_fields( + request_type=contact_center_insights.ExportIssueModelRequest, ): transport_class = transports.ContactCenterInsightsRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -23358,21 +26201,21 @@ def test_list_issue_models_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_issue_models._get_unset_required_fields(jsonified_request) + ).export_issue_model._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_issue_models._get_unset_required_fields(jsonified_request) + ).export_issue_model._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -23381,7 +26224,7 @@ def test_list_issue_models_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = contact_center_insights.ListIssueModelsResponse() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -23393,41 +26236,37 @@ def test_list_issue_models_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = contact_center_insights.ListIssueModelsResponse.pb( - return_value - ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_issue_models(request) + response = client.export_issue_model(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_issue_models_rest_unset_required_fields(): +def test_export_issue_model_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_issue_models._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent",))) + unset_fields = transport.export_issue_model._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_issue_models_rest_interceptors(null_interceptor): +def test_export_issue_model_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -23440,14 +26279,16 @@ def test_list_issue_models_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_list_issue_models" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, "post_export_issue_model" ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_list_issue_models" + transports.ContactCenterInsightsRestInterceptor, "pre_export_issue_model" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.ListIssueModelsRequest.pb( - contact_center_insights.ListIssueModelsRequest() + pb_message = contact_center_insights.ExportIssueModelRequest.pb( + contact_center_insights.ExportIssueModelRequest() ) transcode.return_value = { "method": "post", @@ -23459,21 +26300,19 @@ def test_list_issue_models_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ( - contact_center_insights.ListIssueModelsResponse.to_json( - contact_center_insights.ListIssueModelsResponse() - ) + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = contact_center_insights.ListIssueModelsRequest() + request = contact_center_insights.ExportIssueModelRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = contact_center_insights.ListIssueModelsResponse() + post.return_value = operations_pb2.Operation() - client.list_issue_models( + client.export_issue_model( request, metadata=[ ("key", "val"), @@ -23485,8 +26324,9 @@ def test_list_issue_models_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_issue_models_rest_bad_request( - transport: str = "rest", request_type=contact_center_insights.ListIssueModelsRequest +def test_export_issue_model_rest_bad_request( + transport: str = "rest", + request_type=contact_center_insights.ExportIssueModelRequest, ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -23494,7 +26334,7 @@ def test_list_issue_models_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/issueModels/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -23506,10 +26346,10 @@ def test_list_issue_models_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_issue_models(request) + client.export_issue_model(request) -def test_list_issue_models_rest_flattened(): +def test_export_issue_model_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -23518,40 +26358,40 @@ def test_list_issue_models_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = contact_center_insights.ListIssueModelsResponse() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "name": "projects/sample1/locations/sample2/issueModels/sample3" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = contact_center_insights.ListIssueModelsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_issue_models(**mock_args) + client.export_issue_model(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/issueModels" + "%s/v1/{name=projects/*/locations/*/issueModels/*}:export" % client.transport._host, args[1], ) -def test_list_issue_models_rest_flattened_error(transport: str = "rest"): +def test_export_issue_model_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -23560,13 +26400,13 @@ def test_list_issue_models_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_issue_models( - contact_center_insights.ListIssueModelsRequest(), - parent="parent_value", + client.export_issue_model( + contact_center_insights.ExportIssueModelRequest(), + name="name_value", ) -def test_list_issue_models_rest_error(): +def test_export_issue_model_rest_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -23575,18 +26415,18 @@ def test_list_issue_models_rest_error(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.DeleteIssueModelRequest, + contact_center_insights.ImportIssueModelRequest, dict, ], ) -def test_delete_issue_model_rest(request_type): +def test_import_issue_model_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/issueModels/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -23601,13 +26441,13 @@ def test_delete_issue_model_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_issue_model(request) + response = client.import_issue_model(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_delete_issue_model_rest_use_cached_wrapped_rpc(): +def test_import_issue_model_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -23622,7 +26462,7 @@ def test_delete_issue_model_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.delete_issue_model in client._transport._wrapped_methods + client._transport.import_issue_model in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -23631,11 +26471,11 @@ def test_delete_issue_model_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.delete_issue_model + client._transport.import_issue_model ] = mock_rpc request = {} - client.delete_issue_model(request) + client.import_issue_model(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -23644,20 +26484,20 @@ def test_delete_issue_model_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.delete_issue_model(request) + client.import_issue_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_issue_model_rest_required_fields( - request_type=contact_center_insights.DeleteIssueModelRequest, +def test_import_issue_model_rest_required_fields( + request_type=contact_center_insights.ImportIssueModelRequest, ): transport_class = transports.ContactCenterInsightsRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -23668,21 +26508,21 @@ def test_delete_issue_model_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_issue_model._get_unset_required_fields(jsonified_request) + ).import_issue_model._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_issue_model._get_unset_required_fields(jsonified_request) + ).import_issue_model._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -23703,9 +26543,10 @@ def test_delete_issue_model_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -23715,24 +26556,24 @@ def test_delete_issue_model_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_issue_model(request) + response = client.import_issue_model(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_issue_model_rest_unset_required_fields(): +def test_import_issue_model_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_issue_model._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.import_issue_model._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_issue_model_rest_interceptors(null_interceptor): +def test_import_issue_model_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -23747,14 +26588,14 @@ def test_delete_issue_model_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_delete_issue_model" + transports.ContactCenterInsightsRestInterceptor, "post_import_issue_model" ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_delete_issue_model" + transports.ContactCenterInsightsRestInterceptor, "pre_import_issue_model" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.DeleteIssueModelRequest.pb( - contact_center_insights.DeleteIssueModelRequest() + pb_message = contact_center_insights.ImportIssueModelRequest.pb( + contact_center_insights.ImportIssueModelRequest() ) transcode.return_value = { "method": "post", @@ -23770,7 +26611,7 @@ def test_delete_issue_model_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = contact_center_insights.DeleteIssueModelRequest() + request = contact_center_insights.ImportIssueModelRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -23778,7 +26619,7 @@ def test_delete_issue_model_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.delete_issue_model( + client.import_issue_model( request, metadata=[ ("key", "val"), @@ -23790,9 +26631,9 @@ def test_delete_issue_model_rest_interceptors(null_interceptor): post.assert_called_once() -def test_delete_issue_model_rest_bad_request( +def test_import_issue_model_rest_bad_request( transport: str = "rest", - request_type=contact_center_insights.DeleteIssueModelRequest, + request_type=contact_center_insights.ImportIssueModelRequest, ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -23800,7 +26641,7 @@ def test_delete_issue_model_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/issueModels/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -23812,10 +26653,10 @@ def test_delete_issue_model_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_issue_model(request) + client.import_issue_model(request) -def test_delete_issue_model_rest_flattened(): +def test_import_issue_model_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -23827,13 +26668,11 @@ def test_delete_issue_model_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/issueModels/sample3" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) @@ -23844,20 +26683,20 @@ def test_delete_issue_model_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_issue_model(**mock_args) + client.import_issue_model(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/issueModels/*}" + "%s/v1/{parent=projects/*/locations/*}/issueModels:import" % client.transport._host, args[1], ) -def test_delete_issue_model_rest_flattened_error(transport: str = "rest"): +def test_import_issue_model_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -23866,13 +26705,13 @@ def test_delete_issue_model_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_issue_model( - contact_center_insights.DeleteIssueModelRequest(), - name="name_value", + client.import_issue_model( + contact_center_insights.ImportIssueModelRequest(), + parent="parent_value", ) -def test_delete_issue_model_rest_error(): +def test_import_issue_model_rest_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -23881,39 +26720,52 @@ def test_delete_issue_model_rest_error(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.DeployIssueModelRequest, + contact_center_insights.GetIssueRequest, dict, ], ) -def test_deploy_issue_model_rest(request_type): +def test_get_issue_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/issueModels/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/issueModels/sample3/issues/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = resources.Issue( + name="name_value", + display_name="display_name_value", + sample_utterances=["sample_utterances_value"], + display_description="display_description_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Issue.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.deploy_issue_model(request) + response = client.get_issue(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, resources.Issue) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.sample_utterances == ["sample_utterances_value"] + assert response.display_description == "display_description_value" -def test_deploy_issue_model_rest_use_cached_wrapped_rpc(): +def test_get_issue_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -23927,38 +26779,30 @@ def test_deploy_issue_model_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.deploy_issue_model in client._transport._wrapped_methods - ) + assert client._transport.get_issue in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.deploy_issue_model - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_issue] = mock_rpc request = {} - client.deploy_issue_model(request) + client.get_issue(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.deploy_issue_model(request) + client.get_issue(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_deploy_issue_model_rest_required_fields( - request_type=contact_center_insights.DeployIssueModelRequest, +def test_get_issue_rest_required_fields( + request_type=contact_center_insights.GetIssueRequest, ): transport_class = transports.ContactCenterInsightsRestTransport @@ -23974,7 +26818,7 @@ def test_deploy_issue_model_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).deploy_issue_model._get_unset_required_fields(jsonified_request) + ).get_issue._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -23983,7 +26827,7 @@ def test_deploy_issue_model_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).deploy_issue_model._get_unset_required_fields(jsonified_request) + ).get_issue._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -23997,7 +26841,7 @@ def test_deploy_issue_model_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = resources.Issue() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -24009,37 +26853,39 @@ def test_deploy_issue_model_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.Issue.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.deploy_issue_model(request) + response = client.get_issue(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_deploy_issue_model_rest_unset_required_fields(): +def test_get_issue_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.deploy_issue_model._get_unset_required_fields({}) + unset_fields = transport.get_issue._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_deploy_issue_model_rest_interceptors(null_interceptor): +def test_get_issue_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -24052,16 +26898,14 @@ def test_deploy_issue_model_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_deploy_issue_model" + transports.ContactCenterInsightsRestInterceptor, "post_get_issue" ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_deploy_issue_model" + transports.ContactCenterInsightsRestInterceptor, "pre_get_issue" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.DeployIssueModelRequest.pb( - contact_center_insights.DeployIssueModelRequest() + pb_message = contact_center_insights.GetIssueRequest.pb( + contact_center_insights.GetIssueRequest() ) transcode.return_value = { "method": "post", @@ -24073,19 +26917,17 @@ def test_deploy_issue_model_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() - ) + req.return_value._content = resources.Issue.to_json(resources.Issue()) - request = contact_center_insights.DeployIssueModelRequest() + request = contact_center_insights.GetIssueRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = resources.Issue() - client.deploy_issue_model( + client.get_issue( request, metadata=[ ("key", "val"), @@ -24097,9 +26939,8 @@ def test_deploy_issue_model_rest_interceptors(null_interceptor): post.assert_called_once() -def test_deploy_issue_model_rest_bad_request( - transport: str = "rest", - request_type=contact_center_insights.DeployIssueModelRequest, +def test_get_issue_rest_bad_request( + transport: str = "rest", request_type=contact_center_insights.GetIssueRequest ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -24107,7 +26948,9 @@ def test_deploy_issue_model_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/issueModels/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/issueModels/sample3/issues/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -24119,10 +26962,10 @@ def test_deploy_issue_model_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.deploy_issue_model(request) + client.get_issue(request) -def test_deploy_issue_model_rest_flattened(): +def test_get_issue_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -24131,11 +26974,11 @@ def test_deploy_issue_model_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = resources.Issue() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/issueModels/sample3" + "name": "projects/sample1/locations/sample2/issueModels/sample3/issues/sample4" } # get truthy value for each flattened field @@ -24147,24 +26990,26 @@ def test_deploy_issue_model_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Issue.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.deploy_issue_model(**mock_args) + client.get_issue(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/issueModels/*}:deploy" + "%s/v1/{name=projects/*/locations/*/issueModels/*/issues/*}" % client.transport._host, args[1], ) -def test_deploy_issue_model_rest_flattened_error(transport: str = "rest"): +def test_get_issue_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -24173,13 +27018,13 @@ def test_deploy_issue_model_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.deploy_issue_model( - contact_center_insights.DeployIssueModelRequest(), + client.get_issue( + contact_center_insights.GetIssueRequest(), name="name_value", ) -def test_deploy_issue_model_rest_error(): +def test_get_issue_rest_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -24188,39 +27033,41 @@ def test_deploy_issue_model_rest_error(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.UndeployIssueModelRequest, + contact_center_insights.ListIssuesRequest, dict, ], ) -def test_undeploy_issue_model_rest(request_type): +def test_list_issues_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/issueModels/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2/issueModels/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = contact_center_insights.ListIssuesResponse() # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = contact_center_insights.ListIssuesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.undeploy_issue_model(request) + response = client.list_issues(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, contact_center_insights.ListIssuesResponse) -def test_undeploy_issue_model_rest_use_cached_wrapped_rpc(): +def test_list_issues_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -24234,43 +27081,35 @@ def test_undeploy_issue_model_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.undeploy_issue_model in client._transport._wrapped_methods - ) + assert client._transport.list_issues in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.undeploy_issue_model - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_issues] = mock_rpc request = {} - client.undeploy_issue_model(request) + client.list_issues(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.undeploy_issue_model(request) + client.list_issues(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_undeploy_issue_model_rest_required_fields( - request_type=contact_center_insights.UndeployIssueModelRequest, +def test_list_issues_rest_required_fields( + request_type=contact_center_insights.ListIssuesRequest, ): transport_class = transports.ContactCenterInsightsRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -24281,21 +27120,21 @@ def test_undeploy_issue_model_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).undeploy_issue_model._get_unset_required_fields(jsonified_request) + ).list_issues._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).undeploy_issue_model._get_unset_required_fields(jsonified_request) + ).list_issues._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -24304,7 +27143,7 @@ def test_undeploy_issue_model_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = contact_center_insights.ListIssuesResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -24316,37 +27155,39 @@ def test_undeploy_issue_model_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = contact_center_insights.ListIssuesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.undeploy_issue_model(request) + response = client.list_issues(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_undeploy_issue_model_rest_unset_required_fields(): +def test_list_issues_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.undeploy_issue_model._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.list_issues._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_undeploy_issue_model_rest_interceptors(null_interceptor): +def test_list_issues_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -24359,16 +27200,14 @@ def test_undeploy_issue_model_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_undeploy_issue_model" + transports.ContactCenterInsightsRestInterceptor, "post_list_issues" ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_undeploy_issue_model" + transports.ContactCenterInsightsRestInterceptor, "pre_list_issues" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.UndeployIssueModelRequest.pb( - contact_center_insights.UndeployIssueModelRequest() + pb_message = contact_center_insights.ListIssuesRequest.pb( + contact_center_insights.ListIssuesRequest() ) transcode.return_value = { "method": "post", @@ -24380,19 +27219,19 @@ def test_undeploy_issue_model_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = contact_center_insights.ListIssuesResponse.to_json( + contact_center_insights.ListIssuesResponse() ) - request = contact_center_insights.UndeployIssueModelRequest() + request = contact_center_insights.ListIssuesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = contact_center_insights.ListIssuesResponse() - client.undeploy_issue_model( + client.list_issues( request, metadata=[ ("key", "val"), @@ -24404,9 +27243,8 @@ def test_undeploy_issue_model_rest_interceptors(null_interceptor): post.assert_called_once() -def test_undeploy_issue_model_rest_bad_request( - transport: str = "rest", - request_type=contact_center_insights.UndeployIssueModelRequest, +def test_list_issues_rest_bad_request( + transport: str = "rest", request_type=contact_center_insights.ListIssuesRequest ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -24414,7 +27252,7 @@ def test_undeploy_issue_model_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/issueModels/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2/issueModels/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -24426,10 +27264,10 @@ def test_undeploy_issue_model_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.undeploy_issue_model(request) + client.list_issues(request) -def test_undeploy_issue_model_rest_flattened(): +def test_list_issues_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -24438,40 +27276,42 @@ def test_undeploy_issue_model_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = contact_center_insights.ListIssuesResponse() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/issueModels/sample3" + "parent": "projects/sample1/locations/sample2/issueModels/sample3" } # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = contact_center_insights.ListIssuesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.undeploy_issue_model(**mock_args) + client.list_issues(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/issueModels/*}:undeploy" + "%s/v1/{parent=projects/*/locations/*/issueModels/*}/issues" % client.transport._host, args[1], ) -def test_undeploy_issue_model_rest_flattened_error(transport: str = "rest"): +def test_list_issues_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -24480,13 +27320,13 @@ def test_undeploy_issue_model_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.undeploy_issue_model( - contact_center_insights.UndeployIssueModelRequest(), - name="name_value", + client.list_issues( + contact_center_insights.ListIssuesRequest(), + parent="parent_value", ) -def test_undeploy_issue_model_rest_error(): +def test_list_issues_rest_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -24495,11 +27335,11 @@ def test_undeploy_issue_model_rest_error(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.GetIssueRequest, + contact_center_insights.UpdateIssueRequest, dict, ], ) -def test_get_issue_rest(request_type): +def test_update_issue_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -24507,8 +27347,85 @@ def test_get_issue_rest(request_type): # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/issueModels/sample3/issues/sample4" + "issue": { + "name": "projects/sample1/locations/sample2/issueModels/sample3/issues/sample4" + } } + request_init["issue"] = { + "name": "projects/sample1/locations/sample2/issueModels/sample3/issues/sample4", + "display_name": "display_name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "sample_utterances": ["sample_utterances_value1", "sample_utterances_value2"], + "display_description": "display_description_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = contact_center_insights.UpdateIssueRequest.meta.fields["issue"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["issue"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["issue"][field])): + del request_init["issue"][field][i][subfield] + else: + del request_init["issue"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -24518,6 +27435,7 @@ def test_get_issue_rest(request_type): name="name_value", display_name="display_name_value", sample_utterances=["sample_utterances_value"], + display_description="display_description_value", ) # Wrap the value into a proper Response obj @@ -24529,16 +27447,17 @@ def test_get_issue_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_issue(request) + response = client.update_issue(request) # Establish that the response is the type that we expect. assert isinstance(response, resources.Issue) assert response.name == "name_value" assert response.display_name == "display_name_value" assert response.sample_utterances == ["sample_utterances_value"] + assert response.display_description == "display_description_value" -def test_get_issue_rest_use_cached_wrapped_rpc(): +def test_update_issue_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -24552,35 +27471,34 @@ def test_get_issue_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_issue in client._transport._wrapped_methods + assert client._transport.update_issue in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_issue] = mock_rpc + client._transport._wrapped_methods[client._transport.update_issue] = mock_rpc request = {} - client.get_issue(request) + client.update_issue(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_issue(request) + client.update_issue(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_issue_rest_required_fields( - request_type=contact_center_insights.GetIssueRequest, +def test_update_issue_rest_required_fields( + request_type=contact_center_insights.UpdateIssueRequest, ): transport_class = transports.ContactCenterInsightsRestTransport request_init = {} - request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -24591,21 +27509,19 @@ def test_get_issue_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_issue._get_unset_required_fields(jsonified_request) + ).update_issue._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" + # verify required fields with default values are now present unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_issue._get_unset_required_fields(jsonified_request) + ).update_issue._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -24626,9 +27542,10 @@ def test_get_issue_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -24641,24 +27558,24 @@ def test_get_issue_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_issue(request) + response = client.update_issue(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_issue_rest_unset_required_fields(): +def test_update_issue_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_issue._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.update_issue._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("issue",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_issue_rest_interceptors(null_interceptor): +def test_update_issue_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -24671,14 +27588,14 @@ def test_get_issue_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_get_issue" + transports.ContactCenterInsightsRestInterceptor, "post_update_issue" ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_get_issue" + transports.ContactCenterInsightsRestInterceptor, "pre_update_issue" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.GetIssueRequest.pb( - contact_center_insights.GetIssueRequest() + pb_message = contact_center_insights.UpdateIssueRequest.pb( + contact_center_insights.UpdateIssueRequest() ) transcode.return_value = { "method": "post", @@ -24692,7 +27609,7 @@ def test_get_issue_rest_interceptors(null_interceptor): req.return_value.request = PreparedRequest() req.return_value._content = resources.Issue.to_json(resources.Issue()) - request = contact_center_insights.GetIssueRequest() + request = contact_center_insights.UpdateIssueRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -24700,7 +27617,7 @@ def test_get_issue_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = resources.Issue() - client.get_issue( + client.update_issue( request, metadata=[ ("key", "val"), @@ -24712,8 +27629,8 @@ def test_get_issue_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_issue_rest_bad_request( - transport: str = "rest", request_type=contact_center_insights.GetIssueRequest +def test_update_issue_rest_bad_request( + transport: str = "rest", request_type=contact_center_insights.UpdateIssueRequest ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -24722,7 +27639,9 @@ def test_get_issue_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/issueModels/sample3/issues/sample4" + "issue": { + "name": "projects/sample1/locations/sample2/issueModels/sample3/issues/sample4" + } } request = request_type(**request_init) @@ -24735,10 +27654,10 @@ def test_get_issue_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_issue(request) + client.update_issue(request) -def test_get_issue_rest_flattened(): +def test_update_issue_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -24751,12 +27670,15 @@ def test_get_issue_rest_flattened(): # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/issueModels/sample3/issues/sample4" + "issue": { + "name": "projects/sample1/locations/sample2/issueModels/sample3/issues/sample4" + } } # get truthy value for each flattened field mock_args = dict( - name="name_value", + issue=resources.Issue(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -24769,20 +27691,20 @@ def test_get_issue_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_issue(**mock_args) + client.update_issue(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/issueModels/*/issues/*}" + "%s/v1/{issue.name=projects/*/locations/*/issueModels/*/issues/*}" % client.transport._host, args[1], ) -def test_get_issue_rest_flattened_error(transport: str = "rest"): +def test_update_issue_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -24791,13 +27713,14 @@ def test_get_issue_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_issue( - contact_center_insights.GetIssueRequest(), - name="name_value", + client.update_issue( + contact_center_insights.UpdateIssueRequest(), + issue=resources.Issue(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_get_issue_rest_error(): +def test_update_issue_rest_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -24806,41 +27729,41 @@ def test_get_issue_rest_error(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.ListIssuesRequest, + contact_center_insights.DeleteIssueRequest, dict, ], ) -def test_list_issues_rest(request_type): +def test_delete_issue_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/issueModels/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/issueModels/sample3/issues/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = contact_center_insights.ListIssuesResponse() + return_value = None # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = contact_center_insights.ListIssuesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_issues(request) + response = client.delete_issue(request) # Establish that the response is the type that we expect. - assert isinstance(response, contact_center_insights.ListIssuesResponse) + assert response is None -def test_list_issues_rest_use_cached_wrapped_rpc(): +def test_delete_issue_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -24854,35 +27777,35 @@ def test_list_issues_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_issues in client._transport._wrapped_methods + assert client._transport.delete_issue in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_issues] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_issue] = mock_rpc request = {} - client.list_issues(request) + client.delete_issue(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_issues(request) + client.delete_issue(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_issues_rest_required_fields( - request_type=contact_center_insights.ListIssuesRequest, +def test_delete_issue_rest_required_fields( + request_type=contact_center_insights.DeleteIssueRequest, ): transport_class = transports.ContactCenterInsightsRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -24893,21 +27816,21 @@ def test_list_issues_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_issues._get_unset_required_fields(jsonified_request) + ).delete_issue._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_issues._get_unset_required_fields(jsonified_request) + ).delete_issue._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -24916,7 +27839,7 @@ def test_list_issues_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = contact_center_insights.ListIssuesResponse() + return_value = None # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -24928,39 +27851,36 @@ def test_list_issues_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "delete", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = contact_center_insights.ListIssuesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_issues(request) + response = client.delete_issue(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_issues_rest_unset_required_fields(): +def test_delete_issue_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_issues._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent",))) + unset_fields = transport.delete_issue._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_issues_rest_interceptors(null_interceptor): +def test_delete_issue_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -24973,14 +27893,11 @@ def test_list_issues_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_list_issues" - ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_list_issues" + transports.ContactCenterInsightsRestInterceptor, "pre_delete_issue" ) as pre: pre.assert_not_called() - post.assert_not_called() - pb_message = contact_center_insights.ListIssuesRequest.pb( - contact_center_insights.ListIssuesRequest() + pb_message = contact_center_insights.DeleteIssueRequest.pb( + contact_center_insights.DeleteIssueRequest() ) transcode.return_value = { "method": "post", @@ -24992,19 +27909,15 @@ def test_list_issues_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = contact_center_insights.ListIssuesResponse.to_json( - contact_center_insights.ListIssuesResponse() - ) - request = contact_center_insights.ListIssuesRequest() + request = contact_center_insights.DeleteIssueRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = contact_center_insights.ListIssuesResponse() - client.list_issues( + client.delete_issue( request, metadata=[ ("key", "val"), @@ -25013,11 +27926,10 @@ def test_list_issues_rest_interceptors(null_interceptor): ) pre.assert_called_once() - post.assert_called_once() -def test_list_issues_rest_bad_request( - transport: str = "rest", request_type=contact_center_insights.ListIssuesRequest +def test_delete_issue_rest_bad_request( + transport: str = "rest", request_type=contact_center_insights.DeleteIssueRequest ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -25025,7 +27937,9 @@ def test_list_issues_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/issueModels/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/issueModels/sample3/issues/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -25037,10 +27951,10 @@ def test_list_issues_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_issues(request) + client.delete_issue(request) -def test_list_issues_rest_flattened(): +def test_delete_issue_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -25049,42 +27963,40 @@ def test_list_issues_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = contact_center_insights.ListIssuesResponse() + return_value = None # get arguments that satisfy an http rule for this method sample_request = { - "parent": "projects/sample1/locations/sample2/issueModels/sample3" + "name": "projects/sample1/locations/sample2/issueModels/sample3/issues/sample4" } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = contact_center_insights.ListIssuesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_issues(**mock_args) + client.delete_issue(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/issueModels/*}/issues" + "%s/v1/{name=projects/*/locations/*/issueModels/*/issues/*}" % client.transport._host, args[1], ) -def test_list_issues_rest_flattened_error(transport: str = "rest"): +def test_delete_issue_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -25093,141 +28005,62 @@ def test_list_issues_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_issues( - contact_center_insights.ListIssuesRequest(), - parent="parent_value", - ) - - -def test_list_issues_rest_error(): - client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - contact_center_insights.UpdateIssueRequest, - dict, - ], -) -def test_update_issue_rest(request_type): - client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "issue": { - "name": "projects/sample1/locations/sample2/issueModels/sample3/issues/sample4" - } - } - request_init["issue"] = { - "name": "projects/sample1/locations/sample2/issueModels/sample3/issues/sample4", - "display_name": "display_name_value", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "sample_utterances": ["sample_utterances_value1", "sample_utterances_value2"], - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = contact_center_insights.UpdateIssueRequest.meta.fields["issue"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["issue"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value + client.delete_issue( + contact_center_insights.DeleteIssueRequest(), + name="name_value", + ) - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["issue"][field])): - del request_init["issue"][field][i][subfield] - else: - del request_init["issue"][field][subfield] +def test_delete_issue_rest_error(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + contact_center_insights.CalculateIssueModelStatsRequest, + dict, + ], +) +def test_calculate_issue_model_stats_rest(request_type): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "issue_model": "projects/sample1/locations/sample2/issueModels/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Issue( - name="name_value", - display_name="display_name_value", - sample_utterances=["sample_utterances_value"], - ) + return_value = contact_center_insights.CalculateIssueModelStatsResponse() # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.Issue.pb(return_value) + return_value = contact_center_insights.CalculateIssueModelStatsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_issue(request) + response = client.calculate_issue_model_stats(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.Issue) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.sample_utterances == ["sample_utterances_value"] + assert isinstance( + response, contact_center_insights.CalculateIssueModelStatsResponse + ) -def test_update_issue_rest_use_cached_wrapped_rpc(): +def test_calculate_issue_model_stats_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -25241,34 +28074,40 @@ def test_update_issue_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_issue in client._transport._wrapped_methods + assert ( + client._transport.calculate_issue_model_stats + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.update_issue] = mock_rpc + client._transport._wrapped_methods[ + client._transport.calculate_issue_model_stats + ] = mock_rpc request = {} - client.update_issue(request) + client.calculate_issue_model_stats(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_issue(request) + client.calculate_issue_model_stats(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_issue_rest_required_fields( - request_type=contact_center_insights.UpdateIssueRequest, +def test_calculate_issue_model_stats_rest_required_fields( + request_type=contact_center_insights.CalculateIssueModelStatsRequest, ): transport_class = transports.ContactCenterInsightsRestTransport request_init = {} + request_init["issue_model"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -25279,19 +28118,21 @@ def test_update_issue_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_issue._get_unset_required_fields(jsonified_request) + ).calculate_issue_model_stats._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["issueModel"] = "issue_model_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_issue._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) + ).calculate_issue_model_stats._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "issueModel" in jsonified_request + assert jsonified_request["issueModel"] == "issue_model_value" client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -25300,7 +28141,7 @@ def test_update_issue_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.Issue() + return_value = contact_center_insights.CalculateIssueModelStatsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -25312,40 +28153,41 @@ def test_update_issue_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.Issue.pb(return_value) + return_value = contact_center_insights.CalculateIssueModelStatsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_issue(request) + response = client.calculate_issue_model_stats(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_issue_rest_unset_required_fields(): +def test_calculate_issue_model_stats_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_issue._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask",)) & set(("issue",))) + unset_fields = transport.calculate_issue_model_stats._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("issueModel",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_issue_rest_interceptors(null_interceptor): +def test_calculate_issue_model_stats_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -25358,14 +28200,16 @@ def test_update_issue_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_update_issue" + transports.ContactCenterInsightsRestInterceptor, + "post_calculate_issue_model_stats", ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_update_issue" + transports.ContactCenterInsightsRestInterceptor, + "pre_calculate_issue_model_stats", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.UpdateIssueRequest.pb( - contact_center_insights.UpdateIssueRequest() + pb_message = contact_center_insights.CalculateIssueModelStatsRequest.pb( + contact_center_insights.CalculateIssueModelStatsRequest() ) transcode.return_value = { "method": "post", @@ -25377,17 +28221,21 @@ def test_update_issue_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.Issue.to_json(resources.Issue()) + req.return_value._content = ( + contact_center_insights.CalculateIssueModelStatsResponse.to_json( + contact_center_insights.CalculateIssueModelStatsResponse() + ) + ) - request = contact_center_insights.UpdateIssueRequest() + request = contact_center_insights.CalculateIssueModelStatsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.Issue() + post.return_value = contact_center_insights.CalculateIssueModelStatsResponse() - client.update_issue( + client.calculate_issue_model_stats( request, metadata=[ ("key", "val"), @@ -25399,8 +28247,9 @@ def test_update_issue_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_issue_rest_bad_request( - transport: str = "rest", request_type=contact_center_insights.UpdateIssueRequest +def test_calculate_issue_model_stats_rest_bad_request( + transport: str = "rest", + request_type=contact_center_insights.CalculateIssueModelStatsRequest, ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -25409,9 +28258,7 @@ def test_update_issue_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "issue": { - "name": "projects/sample1/locations/sample2/issueModels/sample3/issues/sample4" - } + "issue_model": "projects/sample1/locations/sample2/issueModels/sample3" } request = request_type(**request_init) @@ -25424,10 +28271,10 @@ def test_update_issue_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_issue(request) + client.calculate_issue_model_stats(request) -def test_update_issue_rest_flattened(): +def test_calculate_issue_model_stats_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -25436,19 +28283,16 @@ def test_update_issue_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Issue() + return_value = contact_center_insights.CalculateIssueModelStatsResponse() # get arguments that satisfy an http rule for this method sample_request = { - "issue": { - "name": "projects/sample1/locations/sample2/issueModels/sample3/issues/sample4" - } + "issue_model": "projects/sample1/locations/sample2/issueModels/sample3" } # get truthy value for each flattened field mock_args = dict( - issue=resources.Issue(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + issue_model="issue_model_value", ) mock_args.update(sample_request) @@ -25456,25 +28300,27 @@ def test_update_issue_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.Issue.pb(return_value) + return_value = contact_center_insights.CalculateIssueModelStatsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_issue(**mock_args) + client.calculate_issue_model_stats(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{issue.name=projects/*/locations/*/issueModels/*/issues/*}" + "%s/v1/{issue_model=projects/*/locations/*/issueModels/*}:calculateIssueModelStats" % client.transport._host, args[1], ) -def test_update_issue_rest_flattened_error(transport: str = "rest"): +def test_calculate_issue_model_stats_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -25483,14 +28329,13 @@ def test_update_issue_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_issue( - contact_center_insights.UpdateIssueRequest(), - issue=resources.Issue(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.calculate_issue_model_stats( + contact_center_insights.CalculateIssueModelStatsRequest(), + issue_model="issue_model_value", ) -def test_update_issue_rest_error(): +def test_calculate_issue_model_stats_rest_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -25499,41 +28344,149 @@ def test_update_issue_rest_error(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.DeleteIssueRequest, + contact_center_insights.CreatePhraseMatcherRequest, dict, ], ) -def test_delete_issue_rest(request_type): +def test_create_phrase_matcher_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/issueModels/sample3/issues/sample4" - } + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["phrase_matcher"] = { + "name": "name_value", + "revision_id": "revision_id_value", + "version_tag": "version_tag_value", + "revision_create_time": {"seconds": 751, "nanos": 543}, + "display_name": "display_name_value", + "type_": 1, + "active": True, + "phrase_match_rule_groups": [ + { + "type_": 1, + "phrase_match_rules": [ + { + "query": "query_value", + "negated": True, + "config": {"exact_match_config": {"case_sensitive": True}}, + } + ], + } + ], + "activation_update_time": {}, + "role_match": 1, + "update_time": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = contact_center_insights.CreatePhraseMatcherRequest.meta.fields[ + "phrase_matcher" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["phrase_matcher"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["phrase_matcher"][field])): + del request_init["phrase_matcher"][field][i][subfield] + else: + del request_init["phrase_matcher"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = resources.PhraseMatcher( + name="name_value", + revision_id="revision_id_value", + version_tag="version_tag_value", + display_name="display_name_value", + type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, + active=True, + role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = resources.PhraseMatcher.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_issue(request) + response = client.create_phrase_matcher(request) # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, resources.PhraseMatcher) + assert response.name == "name_value" + assert response.revision_id == "revision_id_value" + assert response.version_tag == "version_tag_value" + assert response.display_name == "display_name_value" + assert response.type_ == resources.PhraseMatcher.PhraseMatcherType.ALL_OF + assert response.active is True + assert response.role_match == resources.ConversationParticipant.Role.HUMAN_AGENT -def test_delete_issue_rest_use_cached_wrapped_rpc(): +def test_create_phrase_matcher_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -25547,35 +28500,40 @@ def test_delete_issue_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_issue in client._transport._wrapped_methods + assert ( + client._transport.create_phrase_matcher + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.delete_issue] = mock_rpc + client._transport._wrapped_methods[ + client._transport.create_phrase_matcher + ] = mock_rpc request = {} - client.delete_issue(request) + client.create_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_issue(request) + client.create_phrase_matcher(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_issue_rest_required_fields( - request_type=contact_center_insights.DeleteIssueRequest, +def test_create_phrase_matcher_rest_required_fields( + request_type=contact_center_insights.CreatePhraseMatcherRequest, ): transport_class = transports.ContactCenterInsightsRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -25586,21 +28544,21 @@ def test_delete_issue_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_issue._get_unset_required_fields(jsonified_request) + ).create_phrase_matcher._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_issue._get_unset_required_fields(jsonified_request) + ).create_phrase_matcher._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -25609,7 +28567,7 @@ def test_delete_issue_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = None + return_value = resources.PhraseMatcher() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -25621,36 +28579,48 @@ def test_delete_issue_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = "" + + # Convert return value to protobuf type + return_value = resources.PhraseMatcher.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_issue(request) + response = client.create_phrase_matcher(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_issue_rest_unset_required_fields(): +def test_create_phrase_matcher_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_issue._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.create_phrase_matcher._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "phraseMatcher", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_issue_rest_interceptors(null_interceptor): +def test_create_phrase_matcher_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -25663,11 +28633,14 @@ def test_delete_issue_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_delete_issue" + transports.ContactCenterInsightsRestInterceptor, "post_create_phrase_matcher" + ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, "pre_create_phrase_matcher" ) as pre: pre.assert_not_called() - pb_message = contact_center_insights.DeleteIssueRequest.pb( - contact_center_insights.DeleteIssueRequest() + post.assert_not_called() + pb_message = contact_center_insights.CreatePhraseMatcherRequest.pb( + contact_center_insights.CreatePhraseMatcherRequest() ) transcode.return_value = { "method": "post", @@ -25679,15 +28652,19 @@ def test_delete_issue_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() + req.return_value._content = resources.PhraseMatcher.to_json( + resources.PhraseMatcher() + ) - request = contact_center_insights.DeleteIssueRequest() + request = contact_center_insights.CreatePhraseMatcherRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata + post.return_value = resources.PhraseMatcher() - client.delete_issue( + client.create_phrase_matcher( request, metadata=[ ("key", "val"), @@ -25696,10 +28673,12 @@ def test_delete_issue_rest_interceptors(null_interceptor): ) pre.assert_called_once() + post.assert_called_once() -def test_delete_issue_rest_bad_request( - transport: str = "rest", request_type=contact_center_insights.DeleteIssueRequest +def test_create_phrase_matcher_rest_bad_request( + transport: str = "rest", + request_type=contact_center_insights.CreatePhraseMatcherRequest, ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -25707,9 +28686,7 @@ def test_delete_issue_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/issueModels/sample3/issues/sample4" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -25721,10 +28698,10 @@ def test_delete_issue_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_issue(request) + client.create_phrase_matcher(request) -def test_delete_issue_rest_flattened(): +def test_create_phrase_matcher_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -25733,40 +28710,41 @@ def test_delete_issue_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = resources.PhraseMatcher() # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/issueModels/sample3/issues/sample4" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", + phrase_matcher=resources.PhraseMatcher(name="name_value"), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = resources.PhraseMatcher.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_issue(**mock_args) + client.create_phrase_matcher(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/issueModels/*/issues/*}" + "%s/v1/{parent=projects/*/locations/*}/phraseMatchers" % client.transport._host, args[1], ) -def test_delete_issue_rest_flattened_error(transport: str = "rest"): +def test_create_phrase_matcher_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -25775,13 +28753,14 @@ def test_delete_issue_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_issue( - contact_center_insights.DeleteIssueRequest(), - name="name_value", + client.create_phrase_matcher( + contact_center_insights.CreatePhraseMatcherRequest(), + parent="parent_value", + phrase_matcher=resources.PhraseMatcher(name="name_value"), ) -def test_delete_issue_rest_error(): +def test_create_phrase_matcher_rest_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -25790,47 +28769,56 @@ def test_delete_issue_rest_error(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.CalculateIssueModelStatsRequest, + contact_center_insights.GetPhraseMatcherRequest, dict, ], ) -def test_calculate_issue_model_stats_rest(request_type): +def test_get_phrase_matcher_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "issue_model": "projects/sample1/locations/sample2/issueModels/sample3" - } + request_init = {"name": "projects/sample1/locations/sample2/phraseMatchers/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = contact_center_insights.CalculateIssueModelStatsResponse() + return_value = resources.PhraseMatcher( + name="name_value", + revision_id="revision_id_value", + version_tag="version_tag_value", + display_name="display_name_value", + type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, + active=True, + role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = contact_center_insights.CalculateIssueModelStatsResponse.pb( - return_value - ) + return_value = resources.PhraseMatcher.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.calculate_issue_model_stats(request) + response = client.get_phrase_matcher(request) # Establish that the response is the type that we expect. - assert isinstance( - response, contact_center_insights.CalculateIssueModelStatsResponse - ) + assert isinstance(response, resources.PhraseMatcher) + assert response.name == "name_value" + assert response.revision_id == "revision_id_value" + assert response.version_tag == "version_tag_value" + assert response.display_name == "display_name_value" + assert response.type_ == resources.PhraseMatcher.PhraseMatcherType.ALL_OF + assert response.active is True + assert response.role_match == resources.ConversationParticipant.Role.HUMAN_AGENT -def test_calculate_issue_model_stats_rest_use_cached_wrapped_rpc(): +def test_get_phrase_matcher_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -25845,8 +28833,7 @@ def test_calculate_issue_model_stats_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.calculate_issue_model_stats - in client._transport._wrapped_methods + client._transport.get_phrase_matcher in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -25855,29 +28842,29 @@ def test_calculate_issue_model_stats_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.calculate_issue_model_stats + client._transport.get_phrase_matcher ] = mock_rpc request = {} - client.calculate_issue_model_stats(request) + client.get_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.calculate_issue_model_stats(request) + client.get_phrase_matcher(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_calculate_issue_model_stats_rest_required_fields( - request_type=contact_center_insights.CalculateIssueModelStatsRequest, +def test_get_phrase_matcher_rest_required_fields( + request_type=contact_center_insights.GetPhraseMatcherRequest, ): transport_class = transports.ContactCenterInsightsRestTransport request_init = {} - request_init["issue_model"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -25888,21 +28875,21 @@ def test_calculate_issue_model_stats_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).calculate_issue_model_stats._get_unset_required_fields(jsonified_request) + ).get_phrase_matcher._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["issueModel"] = "issue_model_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).calculate_issue_model_stats._get_unset_required_fields(jsonified_request) + ).get_phrase_matcher._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "issueModel" in jsonified_request - assert jsonified_request["issueModel"] == "issue_model_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -25911,7 +28898,7 @@ def test_calculate_issue_model_stats_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = contact_center_insights.CalculateIssueModelStatsResponse() + return_value = resources.PhraseMatcher() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -25932,32 +28919,30 @@ def test_calculate_issue_model_stats_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = contact_center_insights.CalculateIssueModelStatsResponse.pb( - return_value - ) + return_value = resources.PhraseMatcher.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.calculate_issue_model_stats(request) + response = client.get_phrase_matcher(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_calculate_issue_model_stats_rest_unset_required_fields(): +def test_get_phrase_matcher_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.calculate_issue_model_stats._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("issueModel",))) + unset_fields = transport.get_phrase_matcher._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_calculate_issue_model_stats_rest_interceptors(null_interceptor): +def test_get_phrase_matcher_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -25970,16 +28955,14 @@ def test_calculate_issue_model_stats_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, - "post_calculate_issue_model_stats", + transports.ContactCenterInsightsRestInterceptor, "post_get_phrase_matcher" ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, - "pre_calculate_issue_model_stats", + transports.ContactCenterInsightsRestInterceptor, "pre_get_phrase_matcher" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.CalculateIssueModelStatsRequest.pb( - contact_center_insights.CalculateIssueModelStatsRequest() + pb_message = contact_center_insights.GetPhraseMatcherRequest.pb( + contact_center_insights.GetPhraseMatcherRequest() ) transcode.return_value = { "method": "post", @@ -25991,21 +28974,19 @@ def test_calculate_issue_model_stats_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ( - contact_center_insights.CalculateIssueModelStatsResponse.to_json( - contact_center_insights.CalculateIssueModelStatsResponse() - ) + req.return_value._content = resources.PhraseMatcher.to_json( + resources.PhraseMatcher() ) - request = contact_center_insights.CalculateIssueModelStatsRequest() + request = contact_center_insights.GetPhraseMatcherRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = contact_center_insights.CalculateIssueModelStatsResponse() + post.return_value = resources.PhraseMatcher() - client.calculate_issue_model_stats( + client.get_phrase_matcher( request, metadata=[ ("key", "val"), @@ -26017,9 +28998,9 @@ def test_calculate_issue_model_stats_rest_interceptors(null_interceptor): post.assert_called_once() -def test_calculate_issue_model_stats_rest_bad_request( +def test_get_phrase_matcher_rest_bad_request( transport: str = "rest", - request_type=contact_center_insights.CalculateIssueModelStatsRequest, + request_type=contact_center_insights.GetPhraseMatcherRequest, ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -26027,9 +29008,7 @@ def test_calculate_issue_model_stats_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "issue_model": "projects/sample1/locations/sample2/issueModels/sample3" - } + request_init = {"name": "projects/sample1/locations/sample2/phraseMatchers/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -26041,10 +29020,10 @@ def test_calculate_issue_model_stats_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.calculate_issue_model_stats(request) + client.get_phrase_matcher(request) -def test_calculate_issue_model_stats_rest_flattened(): +def test_get_phrase_matcher_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -26053,16 +29032,16 @@ def test_calculate_issue_model_stats_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = contact_center_insights.CalculateIssueModelStatsResponse() + return_value = resources.PhraseMatcher() # get arguments that satisfy an http rule for this method sample_request = { - "issue_model": "projects/sample1/locations/sample2/issueModels/sample3" + "name": "projects/sample1/locations/sample2/phraseMatchers/sample3" } # get truthy value for each flattened field mock_args = dict( - issue_model="issue_model_value", + name="name_value", ) mock_args.update(sample_request) @@ -26070,27 +29049,25 @@ def test_calculate_issue_model_stats_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = contact_center_insights.CalculateIssueModelStatsResponse.pb( - return_value - ) + return_value = resources.PhraseMatcher.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.calculate_issue_model_stats(**mock_args) + client.get_phrase_matcher(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{issue_model=projects/*/locations/*/issueModels/*}:calculateIssueModelStats" + "%s/v1/{name=projects/*/locations/*/phraseMatchers/*}" % client.transport._host, args[1], ) -def test_calculate_issue_model_stats_rest_flattened_error(transport: str = "rest"): +def test_get_phrase_matcher_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -26099,13 +29076,13 @@ def test_calculate_issue_model_stats_rest_flattened_error(transport: str = "rest # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.calculate_issue_model_stats( - contact_center_insights.CalculateIssueModelStatsRequest(), - issue_model="issue_model_value", + client.get_phrase_matcher( + contact_center_insights.GetPhraseMatcherRequest(), + name="name_value", ) -def test_calculate_issue_model_stats_rest_error(): +def test_get_phrase_matcher_rest_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -26114,11 +29091,11 @@ def test_calculate_issue_model_stats_rest_error(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.CreatePhraseMatcherRequest, + contact_center_insights.ListPhraseMatchersRequest, dict, ], ) -def test_create_phrase_matcher_rest(request_type): +def test_list_phrase_matchers_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -26126,137 +29103,34 @@ def test_create_phrase_matcher_rest(request_type): # send a request that will satisfy transcoding request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["phrase_matcher"] = { - "name": "name_value", - "revision_id": "revision_id_value", - "version_tag": "version_tag_value", - "revision_create_time": {"seconds": 751, "nanos": 543}, - "display_name": "display_name_value", - "type_": 1, - "active": True, - "phrase_match_rule_groups": [ - { - "type_": 1, - "phrase_match_rules": [ - { - "query": "query_value", - "negated": True, - "config": {"exact_match_config": {"case_sensitive": True}}, - } - ], - } - ], - "activation_update_time": {}, - "role_match": 1, - "update_time": {}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = contact_center_insights.CreatePhraseMatcherRequest.meta.fields[ - "phrase_matcher" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["phrase_matcher"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["phrase_matcher"][field])): - del request_init["phrase_matcher"][field][i][subfield] - else: - del request_init["phrase_matcher"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.PhraseMatcher( - name="name_value", - revision_id="revision_id_value", - version_tag="version_tag_value", - display_name="display_name_value", - type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, - active=True, - role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, + return_value = contact_center_insights.ListPhraseMatchersResponse( + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.PhraseMatcher.pb(return_value) + return_value = contact_center_insights.ListPhraseMatchersResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_phrase_matcher(request) + response = client.list_phrase_matchers(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.PhraseMatcher) - assert response.name == "name_value" - assert response.revision_id == "revision_id_value" - assert response.version_tag == "version_tag_value" - assert response.display_name == "display_name_value" - assert response.type_ == resources.PhraseMatcher.PhraseMatcherType.ALL_OF - assert response.active is True - assert response.role_match == resources.ConversationParticipant.Role.HUMAN_AGENT + assert isinstance(response, pagers.ListPhraseMatchersPager) + assert response.next_page_token == "next_page_token_value" -def test_create_phrase_matcher_rest_use_cached_wrapped_rpc(): +def test_list_phrase_matchers_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -26271,8 +29145,7 @@ def test_create_phrase_matcher_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_phrase_matcher - in client._transport._wrapped_methods + client._transport.list_phrase_matchers in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -26281,24 +29154,24 @@ def test_create_phrase_matcher_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_phrase_matcher + client._transport.list_phrase_matchers ] = mock_rpc request = {} - client.create_phrase_matcher(request) + client.list_phrase_matchers(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_phrase_matcher(request) + client.list_phrase_matchers(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_phrase_matcher_rest_required_fields( - request_type=contact_center_insights.CreatePhraseMatcherRequest, +def test_list_phrase_matchers_rest_required_fields( + request_type=contact_center_insights.ListPhraseMatchersRequest, ): transport_class = transports.ContactCenterInsightsRestTransport @@ -26314,7 +29187,7 @@ def test_create_phrase_matcher_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_phrase_matcher._get_unset_required_fields(jsonified_request) + ).list_phrase_matchers._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -26323,7 +29196,15 @@ def test_create_phrase_matcher_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_phrase_matcher._get_unset_required_fields(jsonified_request) + ).list_phrase_matchers._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -26337,7 +29218,7 @@ def test_create_phrase_matcher_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.PhraseMatcher() + return_value = contact_center_insights.ListPhraseMatchersResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -26349,48 +29230,50 @@ def test_create_phrase_matcher_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.PhraseMatcher.pb(return_value) + return_value = contact_center_insights.ListPhraseMatchersResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_phrase_matcher(request) + response = client.list_phrase_matchers(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_phrase_matcher_rest_unset_required_fields(): +def test_list_phrase_matchers_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_phrase_matcher._get_unset_required_fields({}) + unset_fields = transport.list_phrase_matchers._get_unset_required_fields({}) assert set(unset_fields) == ( - set(()) - & set( + set( ( - "parent", - "phraseMatcher", + "filter", + "pageSize", + "pageToken", ) ) + & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_phrase_matcher_rest_interceptors(null_interceptor): +def test_list_phrase_matchers_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -26403,14 +29286,14 @@ def test_create_phrase_matcher_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_create_phrase_matcher" + transports.ContactCenterInsightsRestInterceptor, "post_list_phrase_matchers" ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_create_phrase_matcher" + transports.ContactCenterInsightsRestInterceptor, "pre_list_phrase_matchers" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.CreatePhraseMatcherRequest.pb( - contact_center_insights.CreatePhraseMatcherRequest() + pb_message = contact_center_insights.ListPhraseMatchersRequest.pb( + contact_center_insights.ListPhraseMatchersRequest() ) transcode.return_value = { "method": "post", @@ -26422,19 +29305,21 @@ def test_create_phrase_matcher_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.PhraseMatcher.to_json( - resources.PhraseMatcher() + req.return_value._content = ( + contact_center_insights.ListPhraseMatchersResponse.to_json( + contact_center_insights.ListPhraseMatchersResponse() + ) ) - request = contact_center_insights.CreatePhraseMatcherRequest() + request = contact_center_insights.ListPhraseMatchersRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.PhraseMatcher() + post.return_value = contact_center_insights.ListPhraseMatchersResponse() - client.create_phrase_matcher( + client.list_phrase_matchers( request, metadata=[ ("key", "val"), @@ -26446,9 +29331,9 @@ def test_create_phrase_matcher_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_phrase_matcher_rest_bad_request( +def test_list_phrase_matchers_rest_bad_request( transport: str = "rest", - request_type=contact_center_insights.CreatePhraseMatcherRequest, + request_type=contact_center_insights.ListPhraseMatchersRequest, ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -26468,10 +29353,10 @@ def test_create_phrase_matcher_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_phrase_matcher(request) + client.list_phrase_matchers(request) -def test_create_phrase_matcher_rest_flattened(): +def test_list_phrase_matchers_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -26480,7 +29365,7 @@ def test_create_phrase_matcher_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.PhraseMatcher() + return_value = contact_center_insights.ListPhraseMatchersResponse() # get arguments that satisfy an http rule for this method sample_request = {"parent": "projects/sample1/locations/sample2"} @@ -26488,7 +29373,6 @@ def test_create_phrase_matcher_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", - phrase_matcher=resources.PhraseMatcher(name="name_value"), ) mock_args.update(sample_request) @@ -26496,12 +29380,14 @@ def test_create_phrase_matcher_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.PhraseMatcher.pb(return_value) + return_value = contact_center_insights.ListPhraseMatchersResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_phrase_matcher(**mock_args) + client.list_phrase_matchers(**mock_args) # Establish that the underlying call was made with the expected # request object values. @@ -26514,7 +29400,7 @@ def test_create_phrase_matcher_rest_flattened(): ) -def test_create_phrase_matcher_rest_flattened_error(transport: str = "rest"): +def test_list_phrase_matchers_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -26523,27 +29409,84 @@ def test_create_phrase_matcher_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_phrase_matcher( - contact_center_insights.CreatePhraseMatcherRequest(), + client.list_phrase_matchers( + contact_center_insights.ListPhraseMatchersRequest(), parent="parent_value", - phrase_matcher=resources.PhraseMatcher(name="name_value"), ) -def test_create_phrase_matcher_rest_error(): +def test_list_phrase_matchers_rest_pager(transport: str = "rest"): client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + contact_center_insights.ListPhraseMatchersResponse( + phrase_matchers=[ + resources.PhraseMatcher(), + resources.PhraseMatcher(), + resources.PhraseMatcher(), + ], + next_page_token="abc", + ), + contact_center_insights.ListPhraseMatchersResponse( + phrase_matchers=[], + next_page_token="def", + ), + contact_center_insights.ListPhraseMatchersResponse( + phrase_matchers=[ + resources.PhraseMatcher(), + ], + next_page_token="ghi", + ), + contact_center_insights.ListPhraseMatchersResponse( + phrase_matchers=[ + resources.PhraseMatcher(), + resources.PhraseMatcher(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + contact_center_insights.ListPhraseMatchersResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_phrase_matchers(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.PhraseMatcher) for i in results) + + pages = list(client.list_phrase_matchers(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - contact_center_insights.GetPhraseMatcherRequest, + contact_center_insights.DeletePhraseMatcherRequest, dict, ], ) -def test_get_phrase_matcher_rest(request_type): +def test_delete_phrase_matcher_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -26556,39 +29499,22 @@ def test_get_phrase_matcher_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.PhraseMatcher( - name="name_value", - revision_id="revision_id_value", - version_tag="version_tag_value", - display_name="display_name_value", - type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, - active=True, - role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, - ) + return_value = None # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.PhraseMatcher.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_phrase_matcher(request) + response = client.delete_phrase_matcher(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.PhraseMatcher) - assert response.name == "name_value" - assert response.revision_id == "revision_id_value" - assert response.version_tag == "version_tag_value" - assert response.display_name == "display_name_value" - assert response.type_ == resources.PhraseMatcher.PhraseMatcherType.ALL_OF - assert response.active is True - assert response.role_match == resources.ConversationParticipant.Role.HUMAN_AGENT + assert response is None -def test_get_phrase_matcher_rest_use_cached_wrapped_rpc(): +def test_delete_phrase_matcher_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -26603,7 +29529,8 @@ def test_get_phrase_matcher_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_phrase_matcher in client._transport._wrapped_methods + client._transport.delete_phrase_matcher + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -26612,24 +29539,24 @@ def test_get_phrase_matcher_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_phrase_matcher + client._transport.delete_phrase_matcher ] = mock_rpc request = {} - client.get_phrase_matcher(request) + client.delete_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_phrase_matcher(request) + client.delete_phrase_matcher(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_phrase_matcher_rest_required_fields( - request_type=contact_center_insights.GetPhraseMatcherRequest, +def test_delete_phrase_matcher_rest_required_fields( + request_type=contact_center_insights.DeletePhraseMatcherRequest, ): transport_class = transports.ContactCenterInsightsRestTransport @@ -26645,7 +29572,7 @@ def test_get_phrase_matcher_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_phrase_matcher._get_unset_required_fields(jsonified_request) + ).delete_phrase_matcher._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -26654,7 +29581,7 @@ def test_get_phrase_matcher_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_phrase_matcher._get_unset_required_fields(jsonified_request) + ).delete_phrase_matcher._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -26668,7 +29595,7 @@ def test_get_phrase_matcher_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.PhraseMatcher() + return_value = None # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -26680,39 +29607,36 @@ def test_get_phrase_matcher_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "delete", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = resources.PhraseMatcher.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_phrase_matcher(request) + response = client.delete_phrase_matcher(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_phrase_matcher_rest_unset_required_fields(): +def test_delete_phrase_matcher_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_phrase_matcher._get_unset_required_fields({}) + unset_fields = transport.delete_phrase_matcher._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_phrase_matcher_rest_interceptors(null_interceptor): +def test_delete_phrase_matcher_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -26725,14 +29649,11 @@ def test_get_phrase_matcher_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_get_phrase_matcher" - ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_get_phrase_matcher" + transports.ContactCenterInsightsRestInterceptor, "pre_delete_phrase_matcher" ) as pre: pre.assert_not_called() - post.assert_not_called() - pb_message = contact_center_insights.GetPhraseMatcherRequest.pb( - contact_center_insights.GetPhraseMatcherRequest() + pb_message = contact_center_insights.DeletePhraseMatcherRequest.pb( + contact_center_insights.DeletePhraseMatcherRequest() ) transcode.return_value = { "method": "post", @@ -26744,19 +29665,15 @@ def test_get_phrase_matcher_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.PhraseMatcher.to_json( - resources.PhraseMatcher() - ) - request = contact_center_insights.GetPhraseMatcherRequest() + request = contact_center_insights.DeletePhraseMatcherRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.PhraseMatcher() - client.get_phrase_matcher( + client.delete_phrase_matcher( request, metadata=[ ("key", "val"), @@ -26765,12 +29682,11 @@ def test_get_phrase_matcher_rest_interceptors(null_interceptor): ) pre.assert_called_once() - post.assert_called_once() -def test_get_phrase_matcher_rest_bad_request( +def test_delete_phrase_matcher_rest_bad_request( transport: str = "rest", - request_type=contact_center_insights.GetPhraseMatcherRequest, + request_type=contact_center_insights.DeletePhraseMatcherRequest, ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -26790,10 +29706,10 @@ def test_get_phrase_matcher_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_phrase_matcher(request) + client.delete_phrase_matcher(request) -def test_get_phrase_matcher_rest_flattened(): +def test_delete_phrase_matcher_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -26802,7 +29718,7 @@ def test_get_phrase_matcher_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.PhraseMatcher() + return_value = None # get arguments that satisfy an http rule for this method sample_request = { @@ -26818,13 +29734,11 @@ def test_get_phrase_matcher_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.PhraseMatcher.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_phrase_matcher(**mock_args) + client.delete_phrase_matcher(**mock_args) # Establish that the underlying call was made with the expected # request object values. @@ -26837,7 +29751,7 @@ def test_get_phrase_matcher_rest_flattened(): ) -def test_get_phrase_matcher_rest_flattened_error(transport: str = "rest"): +def test_delete_phrase_matcher_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -26846,61 +29760,168 @@ def test_get_phrase_matcher_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_phrase_matcher( - contact_center_insights.GetPhraseMatcherRequest(), + client.delete_phrase_matcher( + contact_center_insights.DeletePhraseMatcherRequest(), name="name_value", ) -def test_get_phrase_matcher_rest_error(): +def test_delete_phrase_matcher_rest_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) -@pytest.mark.parametrize( - "request_type", - [ - contact_center_insights.ListPhraseMatchersRequest, - dict, - ], -) -def test_list_phrase_matchers_rest(request_type): - client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) +@pytest.mark.parametrize( + "request_type", + [ + contact_center_insights.UpdatePhraseMatcherRequest, + dict, + ], +) +def test_update_phrase_matcher_rest(request_type): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "phrase_matcher": { + "name": "projects/sample1/locations/sample2/phraseMatchers/sample3" + } + } + request_init["phrase_matcher"] = { + "name": "projects/sample1/locations/sample2/phraseMatchers/sample3", + "revision_id": "revision_id_value", + "version_tag": "version_tag_value", + "revision_create_time": {"seconds": 751, "nanos": 543}, + "display_name": "display_name_value", + "type_": 1, + "active": True, + "phrase_match_rule_groups": [ + { + "type_": 1, + "phrase_match_rules": [ + { + "query": "query_value", + "negated": True, + "config": {"exact_match_config": {"case_sensitive": True}}, + } + ], + } + ], + "activation_update_time": {}, + "role_match": 1, + "update_time": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = contact_center_insights.UpdatePhraseMatcherRequest.meta.fields[ + "phrase_matcher" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["phrase_matcher"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["phrase_matcher"][field])): + del request_init["phrase_matcher"][field][i][subfield] + else: + del request_init["phrase_matcher"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = contact_center_insights.ListPhraseMatchersResponse( - next_page_token="next_page_token_value", + return_value = resources.PhraseMatcher( + name="name_value", + revision_id="revision_id_value", + version_tag="version_tag_value", + display_name="display_name_value", + type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, + active=True, + role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = contact_center_insights.ListPhraseMatchersResponse.pb( - return_value - ) + return_value = resources.PhraseMatcher.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_phrase_matchers(request) + response = client.update_phrase_matcher(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListPhraseMatchersPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, resources.PhraseMatcher) + assert response.name == "name_value" + assert response.revision_id == "revision_id_value" + assert response.version_tag == "version_tag_value" + assert response.display_name == "display_name_value" + assert response.type_ == resources.PhraseMatcher.PhraseMatcherType.ALL_OF + assert response.active is True + assert response.role_match == resources.ConversationParticipant.Role.HUMAN_AGENT -def test_list_phrase_matchers_rest_use_cached_wrapped_rpc(): +def test_update_phrase_matcher_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -26915,7 +29936,8 @@ def test_list_phrase_matchers_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_phrase_matchers in client._transport._wrapped_methods + client._transport.update_phrase_matcher + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -26924,29 +29946,28 @@ def test_list_phrase_matchers_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_phrase_matchers + client._transport.update_phrase_matcher ] = mock_rpc request = {} - client.list_phrase_matchers(request) + client.update_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_phrase_matchers(request) + client.update_phrase_matcher(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_phrase_matchers_rest_required_fields( - request_type=contact_center_insights.ListPhraseMatchersRequest, +def test_update_phrase_matcher_rest_required_fields( + request_type=contact_center_insights.UpdatePhraseMatcherRequest, ): transport_class = transports.ContactCenterInsightsRestTransport request_init = {} - request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -26957,29 +29978,19 @@ def test_list_phrase_matchers_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_phrase_matchers._get_unset_required_fields(jsonified_request) + ).update_phrase_matcher._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_phrase_matchers._get_unset_required_fields(jsonified_request) + ).update_phrase_matcher._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "page_size", - "page_token", - ) - ) + assert not set(unset_fields) - set(("update_mask",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -26988,7 +29999,7 @@ def test_list_phrase_matchers_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = contact_center_insights.ListPhraseMatchersResponse() + return_value = resources.PhraseMatcher() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -27000,50 +30011,40 @@ def test_list_phrase_matchers_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = contact_center_insights.ListPhraseMatchersResponse.pb( - return_value - ) + return_value = resources.PhraseMatcher.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_phrase_matchers(request) + response = client.update_phrase_matcher(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_phrase_matchers_rest_unset_required_fields(): +def test_update_phrase_matcher_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_phrase_matchers._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) + unset_fields = transport.update_phrase_matcher._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("phraseMatcher",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_phrase_matchers_rest_interceptors(null_interceptor): +def test_update_phrase_matcher_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -27056,14 +30057,14 @@ def test_list_phrase_matchers_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_list_phrase_matchers" + transports.ContactCenterInsightsRestInterceptor, "post_update_phrase_matcher" ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_list_phrase_matchers" + transports.ContactCenterInsightsRestInterceptor, "pre_update_phrase_matcher" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.ListPhraseMatchersRequest.pb( - contact_center_insights.ListPhraseMatchersRequest() + pb_message = contact_center_insights.UpdatePhraseMatcherRequest.pb( + contact_center_insights.UpdatePhraseMatcherRequest() ) transcode.return_value = { "method": "post", @@ -27075,21 +30076,19 @@ def test_list_phrase_matchers_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ( - contact_center_insights.ListPhraseMatchersResponse.to_json( - contact_center_insights.ListPhraseMatchersResponse() - ) + req.return_value._content = resources.PhraseMatcher.to_json( + resources.PhraseMatcher() ) - request = contact_center_insights.ListPhraseMatchersRequest() + request = contact_center_insights.UpdatePhraseMatcherRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = contact_center_insights.ListPhraseMatchersResponse() + post.return_value = resources.PhraseMatcher() - client.list_phrase_matchers( + client.update_phrase_matcher( request, metadata=[ ("key", "val"), @@ -27101,9 +30100,9 @@ def test_list_phrase_matchers_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_phrase_matchers_rest_bad_request( +def test_update_phrase_matcher_rest_bad_request( transport: str = "rest", - request_type=contact_center_insights.ListPhraseMatchersRequest, + request_type=contact_center_insights.UpdatePhraseMatcherRequest, ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -27111,7 +30110,11 @@ def test_list_phrase_matchers_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "phrase_matcher": { + "name": "projects/sample1/locations/sample2/phraseMatchers/sample3" + } + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -27123,10 +30126,10 @@ def test_list_phrase_matchers_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_phrase_matchers(request) + client.update_phrase_matcher(request) -def test_list_phrase_matchers_rest_flattened(): +def test_update_phrase_matcher_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -27135,14 +30138,19 @@ def test_list_phrase_matchers_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = contact_center_insights.ListPhraseMatchersResponse() + return_value = resources.PhraseMatcher() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "phrase_matcher": { + "name": "projects/sample1/locations/sample2/phraseMatchers/sample3" + } + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + phrase_matcher=resources.PhraseMatcher(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -27150,141 +30158,89 @@ def test_list_phrase_matchers_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = contact_center_insights.ListPhraseMatchersResponse.pb( - return_value - ) + return_value = resources.PhraseMatcher.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.list_phrase_matchers(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/phraseMatchers" - % client.transport._host, - args[1], - ) - - -def test_list_phrase_matchers_rest_flattened_error(transport: str = "rest"): - client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_phrase_matchers( - contact_center_insights.ListPhraseMatchersRequest(), - parent="parent_value", - ) - - -def test_list_phrase_matchers_rest_pager(transport: str = "rest"): - client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - contact_center_insights.ListPhraseMatchersResponse( - phrase_matchers=[ - resources.PhraseMatcher(), - resources.PhraseMatcher(), - resources.PhraseMatcher(), - ], - next_page_token="abc", - ), - contact_center_insights.ListPhraseMatchersResponse( - phrase_matchers=[], - next_page_token="def", - ), - contact_center_insights.ListPhraseMatchersResponse( - phrase_matchers=[ - resources.PhraseMatcher(), - ], - next_page_token="ghi", - ), - contact_center_insights.ListPhraseMatchersResponse( - phrase_matchers=[ - resources.PhraseMatcher(), - resources.PhraseMatcher(), - ], - ), - ) - # Two responses for two calls - response = response + response + req.return_value = response_value - # Wrap the values into proper Response objs - response = tuple( - contact_center_insights.ListPhraseMatchersResponse.to_json(x) - for x in response + client.update_phrase_matcher(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{phrase_matcher.name=projects/*/locations/*/phraseMatchers/*}" + % client.transport._host, + args[1], ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - sample_request = {"parent": "projects/sample1/locations/sample2"} - pager = client.list_phrase_matchers(request=sample_request) +def test_update_phrase_matcher_rest_flattened_error(transport: str = "rest"): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.PhraseMatcher) for i in results) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_phrase_matcher( + contact_center_insights.UpdatePhraseMatcherRequest(), + phrase_matcher=resources.PhraseMatcher(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) - pages = list(client.list_phrase_matchers(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + +def test_update_phrase_matcher_rest_error(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) @pytest.mark.parametrize( "request_type", [ - contact_center_insights.DeletePhraseMatcherRequest, + contact_center_insights.CalculateStatsRequest, dict, ], ) -def test_delete_phrase_matcher_rest(request_type): +def test_calculate_stats_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/phraseMatchers/sample3"} + request_init = {"location": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = contact_center_insights.CalculateStatsResponse( + average_turn_count=1931, + conversation_count=1955, + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = contact_center_insights.CalculateStatsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_phrase_matcher(request) + response = client.calculate_stats(request) # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, contact_center_insights.CalculateStatsResponse) + assert response.average_turn_count == 1931 + assert response.conversation_count == 1955 -def test_delete_phrase_matcher_rest_use_cached_wrapped_rpc(): +def test_calculate_stats_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -27298,40 +30254,35 @@ def test_delete_phrase_matcher_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.delete_phrase_matcher - in client._transport._wrapped_methods - ) + assert client._transport.calculate_stats in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_phrase_matcher - ] = mock_rpc + client._transport._wrapped_methods[client._transport.calculate_stats] = mock_rpc request = {} - client.delete_phrase_matcher(request) + client.calculate_stats(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_phrase_matcher(request) + client.calculate_stats(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_phrase_matcher_rest_required_fields( - request_type=contact_center_insights.DeletePhraseMatcherRequest, +def test_calculate_stats_rest_required_fields( + request_type=contact_center_insights.CalculateStatsRequest, ): transport_class = transports.ContactCenterInsightsRestTransport request_init = {} - request_init["name"] = "" + request_init["location"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -27342,21 +30293,23 @@ def test_delete_phrase_matcher_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_phrase_matcher._get_unset_required_fields(jsonified_request) + ).calculate_stats._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["location"] = "location_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_phrase_matcher._get_unset_required_fields(jsonified_request) + ).calculate_stats._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "location" in jsonified_request + assert jsonified_request["location"] == "location_value" client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -27365,7 +30318,7 @@ def test_delete_phrase_matcher_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = None + return_value = contact_center_insights.CalculateStatsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -27377,36 +30330,41 @@ def test_delete_phrase_matcher_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "get", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = "" + + # Convert return value to protobuf type + return_value = contact_center_insights.CalculateStatsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_phrase_matcher(request) + response = client.calculate_stats(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_phrase_matcher_rest_unset_required_fields(): +def test_calculate_stats_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_phrase_matcher._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.calculate_stats._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter",)) & set(("location",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_phrase_matcher_rest_interceptors(null_interceptor): +def test_calculate_stats_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -27419,11 +30377,14 @@ def test_delete_phrase_matcher_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_delete_phrase_matcher" + transports.ContactCenterInsightsRestInterceptor, "post_calculate_stats" + ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, "pre_calculate_stats" ) as pre: pre.assert_not_called() - pb_message = contact_center_insights.DeletePhraseMatcherRequest.pb( - contact_center_insights.DeletePhraseMatcherRequest() + post.assert_not_called() + pb_message = contact_center_insights.CalculateStatsRequest.pb( + contact_center_insights.CalculateStatsRequest() ) transcode.return_value = { "method": "post", @@ -27435,15 +30396,21 @@ def test_delete_phrase_matcher_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() + req.return_value._content = ( + contact_center_insights.CalculateStatsResponse.to_json( + contact_center_insights.CalculateStatsResponse() + ) + ) - request = contact_center_insights.DeletePhraseMatcherRequest() + request = contact_center_insights.CalculateStatsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata + post.return_value = contact_center_insights.CalculateStatsResponse() - client.delete_phrase_matcher( + client.calculate_stats( request, metadata=[ ("key", "val"), @@ -27452,11 +30419,11 @@ def test_delete_phrase_matcher_rest_interceptors(null_interceptor): ) pre.assert_called_once() + post.assert_called_once() -def test_delete_phrase_matcher_rest_bad_request( - transport: str = "rest", - request_type=contact_center_insights.DeletePhraseMatcherRequest, +def test_calculate_stats_rest_bad_request( + transport: str = "rest", request_type=contact_center_insights.CalculateStatsRequest ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -27464,7 +30431,7 @@ def test_delete_phrase_matcher_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/phraseMatchers/sample3"} + request_init = {"location": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -27476,10 +30443,10 @@ def test_delete_phrase_matcher_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_phrase_matcher(request) + client.calculate_stats(request) -def test_delete_phrase_matcher_rest_flattened(): +def test_calculate_stats_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -27488,40 +30455,40 @@ def test_delete_phrase_matcher_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = contact_center_insights.CalculateStatsResponse() # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/phraseMatchers/sample3" - } + sample_request = {"location": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + location="location_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = contact_center_insights.CalculateStatsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_phrase_matcher(**mock_args) + client.calculate_stats(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/phraseMatchers/*}" + "%s/v1/{location=projects/*/locations/*}/conversations:calculateStats" % client.transport._host, args[1], ) -def test_delete_phrase_matcher_rest_flattened_error(transport: str = "rest"): +def test_calculate_stats_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -27530,168 +30497,61 @@ def test_delete_phrase_matcher_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_phrase_matcher( - contact_center_insights.DeletePhraseMatcherRequest(), - name="name_value", - ) - - -def test_delete_phrase_matcher_rest_error(): - client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - contact_center_insights.UpdatePhraseMatcherRequest, - dict, - ], -) -def test_update_phrase_matcher_rest(request_type): - client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "phrase_matcher": { - "name": "projects/sample1/locations/sample2/phraseMatchers/sample3" - } - } - request_init["phrase_matcher"] = { - "name": "projects/sample1/locations/sample2/phraseMatchers/sample3", - "revision_id": "revision_id_value", - "version_tag": "version_tag_value", - "revision_create_time": {"seconds": 751, "nanos": 543}, - "display_name": "display_name_value", - "type_": 1, - "active": True, - "phrase_match_rule_groups": [ - { - "type_": 1, - "phrase_match_rules": [ - { - "query": "query_value", - "negated": True, - "config": {"exact_match_config": {"case_sensitive": True}}, - } - ], - } - ], - "activation_update_time": {}, - "role_match": 1, - "update_time": {}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = contact_center_insights.UpdatePhraseMatcherRequest.meta.fields[ - "phrase_matcher" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] + client.calculate_stats( + contact_center_insights.CalculateStatsRequest(), + location="location_value", + ) - subfields_not_in_runtime = [] - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["phrase_matcher"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value +def test_calculate_stats_rest_error(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["phrase_matcher"][field])): - del request_init["phrase_matcher"][field][i][subfield] - else: - del request_init["phrase_matcher"][field][subfield] +@pytest.mark.parametrize( + "request_type", + [ + contact_center_insights.GetSettingsRequest, + dict, + ], +) +def test_get_settings_rest(request_type): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/settings"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.PhraseMatcher( + return_value = resources.Settings( name="name_value", - revision_id="revision_id_value", - version_tag="version_tag_value", - display_name="display_name_value", - type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, - active=True, - role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, + language_code="language_code_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.PhraseMatcher.pb(return_value) + return_value = resources.Settings.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_phrase_matcher(request) + response = client.get_settings(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.PhraseMatcher) + assert isinstance(response, resources.Settings) assert response.name == "name_value" - assert response.revision_id == "revision_id_value" - assert response.version_tag == "version_tag_value" - assert response.display_name == "display_name_value" - assert response.type_ == resources.PhraseMatcher.PhraseMatcherType.ALL_OF - assert response.active is True - assert response.role_match == resources.ConversationParticipant.Role.HUMAN_AGENT + assert response.language_code == "language_code_value" -def test_update_phrase_matcher_rest_use_cached_wrapped_rpc(): +def test_get_settings_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -27705,39 +30565,35 @@ def test_update_phrase_matcher_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.update_phrase_matcher - in client._transport._wrapped_methods - ) + assert client._transport.get_settings in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_phrase_matcher - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_settings] = mock_rpc request = {} - client.update_phrase_matcher(request) + client.get_settings(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_phrase_matcher(request) + client.get_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_phrase_matcher_rest_required_fields( - request_type=contact_center_insights.UpdatePhraseMatcherRequest, +def test_get_settings_rest_required_fields( + request_type=contact_center_insights.GetSettingsRequest, ): transport_class = transports.ContactCenterInsightsRestTransport request_init = {} + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -27748,19 +30604,21 @@ def test_update_phrase_matcher_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_phrase_matcher._get_unset_required_fields(jsonified_request) + ).get_settings._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_phrase_matcher._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) + ).get_settings._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -27769,7 +30627,7 @@ def test_update_phrase_matcher_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.PhraseMatcher() + return_value = resources.Settings() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -27781,40 +30639,39 @@ def test_update_phrase_matcher_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.PhraseMatcher.pb(return_value) + return_value = resources.Settings.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_phrase_matcher(request) + response = client.get_settings(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_phrase_matcher_rest_unset_required_fields(): +def test_get_settings_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_phrase_matcher._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask",)) & set(("phraseMatcher",))) + unset_fields = transport.get_settings._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_phrase_matcher_rest_interceptors(null_interceptor): +def test_get_settings_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -27827,14 +30684,14 @@ def test_update_phrase_matcher_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_update_phrase_matcher" + transports.ContactCenterInsightsRestInterceptor, "post_get_settings" ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_update_phrase_matcher" + transports.ContactCenterInsightsRestInterceptor, "pre_get_settings" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.UpdatePhraseMatcherRequest.pb( - contact_center_insights.UpdatePhraseMatcherRequest() + pb_message = contact_center_insights.GetSettingsRequest.pb( + contact_center_insights.GetSettingsRequest() ) transcode.return_value = { "method": "post", @@ -27846,19 +30703,17 @@ def test_update_phrase_matcher_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.PhraseMatcher.to_json( - resources.PhraseMatcher() - ) + req.return_value._content = resources.Settings.to_json(resources.Settings()) - request = contact_center_insights.UpdatePhraseMatcherRequest() + request = contact_center_insights.GetSettingsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.PhraseMatcher() + post.return_value = resources.Settings() - client.update_phrase_matcher( + client.get_settings( request, metadata=[ ("key", "val"), @@ -27870,9 +30725,8 @@ def test_update_phrase_matcher_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_phrase_matcher_rest_bad_request( - transport: str = "rest", - request_type=contact_center_insights.UpdatePhraseMatcherRequest, +def test_get_settings_rest_bad_request( + transport: str = "rest", request_type=contact_center_insights.GetSettingsRequest ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -27880,11 +30734,7 @@ def test_update_phrase_matcher_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "phrase_matcher": { - "name": "projects/sample1/locations/sample2/phraseMatchers/sample3" - } - } + request_init = {"name": "projects/sample1/locations/sample2/settings"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -27896,10 +30746,10 @@ def test_update_phrase_matcher_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_phrase_matcher(request) + client.get_settings(request) -def test_update_phrase_matcher_rest_flattened(): +def test_get_settings_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -27908,19 +30758,14 @@ def test_update_phrase_matcher_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.PhraseMatcher() + return_value = resources.Settings() # get arguments that satisfy an http rule for this method - sample_request = { - "phrase_matcher": { - "name": "projects/sample1/locations/sample2/phraseMatchers/sample3" - } - } + sample_request = {"name": "projects/sample1/locations/sample2/settings"} # get truthy value for each flattened field mock_args = dict( - phrase_matcher=resources.PhraseMatcher(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + name="name_value", ) mock_args.update(sample_request) @@ -27928,89 +30773,187 @@ def test_update_phrase_matcher_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.PhraseMatcher.pb(return_value) + return_value = resources.Settings.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_phrase_matcher(**mock_args) + client.get_settings(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{phrase_matcher.name=projects/*/locations/*/phraseMatchers/*}" - % client.transport._host, + "%s/v1/{name=projects/*/locations/*/settings}" % client.transport._host, args[1], ) -def test_update_phrase_matcher_rest_flattened_error(transport: str = "rest"): - client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) +def test_get_settings_rest_flattened_error(transport: str = "rest"): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_settings( + contact_center_insights.GetSettingsRequest(), + name="name_value", + ) + + +def test_get_settings_rest_error(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + contact_center_insights.UpdateSettingsRequest, + dict, + ], +) +def test_update_settings_rest(request_type): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"settings": {"name": "projects/sample1/locations/sample2/settings"}} + request_init["settings"] = { + "name": "projects/sample1/locations/sample2/settings", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "language_code": "language_code_value", + "conversation_ttl": {"seconds": 751, "nanos": 543}, + "pubsub_notification_settings": {}, + "analysis_config": { + "runtime_integration_analysis_percentage": 0.4167, + "upload_conversation_analysis_percentage": 0.41590000000000005, + "annotator_selector": { + "run_interruption_annotator": True, + "run_silence_annotator": True, + "run_phrase_matcher_annotator": True, + "phrase_matchers": ["phrase_matchers_value1", "phrase_matchers_value2"], + "run_sentiment_annotator": True, + "run_entity_annotator": True, + "run_intent_annotator": True, + "run_issue_model_annotator": True, + "issue_models": ["issue_models_value1", "issue_models_value2"], + "run_summarization_annotator": True, + "summarization_config": { + "conversation_profile": "conversation_profile_value", + "summarization_model": 1, + }, + }, + }, + "redaction_config": { + "deidentify_template": "deidentify_template_value", + "inspect_template": "inspect_template_value", + }, + "speech_config": {"speech_recognizer": "speech_recognizer_value"}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = contact_center_insights.UpdateSettingsRequest.meta.fields["settings"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_phrase_matcher( - contact_center_insights.UpdatePhraseMatcherRequest(), - phrase_matcher=resources.PhraseMatcher(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] -def test_update_phrase_matcher_rest_error(): - client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) + subfields_not_in_runtime = [] + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["settings"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value -@pytest.mark.parametrize( - "request_type", - [ - contact_center_insights.CalculateStatsRequest, - dict, - ], -) -def test_calculate_stats_rest(request_type): - client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) - # send a request that will satisfy transcoding - request_init = {"location": "projects/sample1/locations/sample2"} + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["settings"][field])): + del request_init["settings"][field][i][subfield] + else: + del request_init["settings"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = contact_center_insights.CalculateStatsResponse( - average_turn_count=1931, - conversation_count=1955, + return_value = resources.Settings( + name="name_value", + language_code="language_code_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = contact_center_insights.CalculateStatsResponse.pb(return_value) + return_value = resources.Settings.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.calculate_stats(request) + response = client.update_settings(request) # Establish that the response is the type that we expect. - assert isinstance(response, contact_center_insights.CalculateStatsResponse) - assert response.average_turn_count == 1931 - assert response.conversation_count == 1955 + assert isinstance(response, resources.Settings) + assert response.name == "name_value" + assert response.language_code == "language_code_value" -def test_calculate_stats_rest_use_cached_wrapped_rpc(): +def test_update_settings_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -28024,35 +30967,34 @@ def test_calculate_stats_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.calculate_stats in client._transport._wrapped_methods + assert client._transport.update_settings in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.calculate_stats] = mock_rpc + client._transport._wrapped_methods[client._transport.update_settings] = mock_rpc request = {} - client.calculate_stats(request) + client.update_settings(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.calculate_stats(request) + client.update_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_calculate_stats_rest_required_fields( - request_type=contact_center_insights.CalculateStatsRequest, +def test_update_settings_rest_required_fields( + request_type=contact_center_insights.UpdateSettingsRequest, ): transport_class = transports.ContactCenterInsightsRestTransport request_init = {} - request_init["location"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -28063,23 +31005,19 @@ def test_calculate_stats_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).calculate_stats._get_unset_required_fields(jsonified_request) + ).update_settings._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["location"] = "location_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).calculate_stats._get_unset_required_fields(jsonified_request) + ).update_settings._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter",)) + assert not set(unset_fields) - set(("update_mask",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "location" in jsonified_request - assert jsonified_request["location"] == "location_value" client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -28088,7 +31026,7 @@ def test_calculate_stats_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = contact_center_insights.CalculateStatsResponse() + return_value = resources.Settings() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -28100,41 +31038,48 @@ def test_calculate_stats_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = contact_center_insights.CalculateStatsResponse.pb( - return_value - ) + return_value = resources.Settings.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.calculate_stats(request) + response = client.update_settings(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_calculate_stats_rest_unset_required_fields(): +def test_update_settings_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.calculate_stats._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter",)) & set(("location",))) + unset_fields = transport.update_settings._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "settings", + "updateMask", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_calculate_stats_rest_interceptors(null_interceptor): +def test_update_settings_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -28147,14 +31092,14 @@ def test_calculate_stats_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_calculate_stats" + transports.ContactCenterInsightsRestInterceptor, "post_update_settings" ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_calculate_stats" + transports.ContactCenterInsightsRestInterceptor, "pre_update_settings" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.CalculateStatsRequest.pb( - contact_center_insights.CalculateStatsRequest() + pb_message = contact_center_insights.UpdateSettingsRequest.pb( + contact_center_insights.UpdateSettingsRequest() ) transcode.return_value = { "method": "post", @@ -28166,21 +31111,17 @@ def test_calculate_stats_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ( - contact_center_insights.CalculateStatsResponse.to_json( - contact_center_insights.CalculateStatsResponse() - ) - ) + req.return_value._content = resources.Settings.to_json(resources.Settings()) - request = contact_center_insights.CalculateStatsRequest() + request = contact_center_insights.UpdateSettingsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = contact_center_insights.CalculateStatsResponse() + post.return_value = resources.Settings() - client.calculate_stats( + client.update_settings( request, metadata=[ ("key", "val"), @@ -28192,8 +31133,8 @@ def test_calculate_stats_rest_interceptors(null_interceptor): post.assert_called_once() -def test_calculate_stats_rest_bad_request( - transport: str = "rest", request_type=contact_center_insights.CalculateStatsRequest +def test_update_settings_rest_bad_request( + transport: str = "rest", request_type=contact_center_insights.UpdateSettingsRequest ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -28201,7 +31142,7 @@ def test_calculate_stats_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"location": "projects/sample1/locations/sample2"} + request_init = {"settings": {"name": "projects/sample1/locations/sample2/settings"}} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -28213,10 +31154,10 @@ def test_calculate_stats_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.calculate_stats(request) + client.update_settings(request) -def test_calculate_stats_rest_flattened(): +def test_update_settings_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -28225,14 +31166,17 @@ def test_calculate_stats_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = contact_center_insights.CalculateStatsResponse() + return_value = resources.Settings() # get arguments that satisfy an http rule for this method - sample_request = {"location": "projects/sample1/locations/sample2"} + sample_request = { + "settings": {"name": "projects/sample1/locations/sample2/settings"} + } # get truthy value for each flattened field mock_args = dict( - location="location_value", + settings=resources.Settings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -28240,25 +31184,25 @@ def test_calculate_stats_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = contact_center_insights.CalculateStatsResponse.pb(return_value) + return_value = resources.Settings.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.calculate_stats(**mock_args) + client.update_settings(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{location=projects/*/locations/*}/conversations:calculateStats" + "%s/v1/{settings.name=projects/*/locations/*/settings}" % client.transport._host, args[1], ) -def test_calculate_stats_rest_flattened_error(transport: str = "rest"): +def test_update_settings_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -28267,13 +31211,14 @@ def test_calculate_stats_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.calculate_stats( - contact_center_insights.CalculateStatsRequest(), - location="location_value", + client.update_settings( + contact_center_insights.UpdateSettingsRequest(), + settings=resources.Settings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_calculate_stats_rest_error(): +def test_update_settings_rest_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -28282,46 +31227,46 @@ def test_calculate_stats_rest_error(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.GetSettingsRequest, + contact_center_insights.GetEncryptionSpecRequest, dict, ], ) -def test_get_settings_rest(request_type): +def test_get_encryption_spec_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/settings"} + request_init = {"name": "projects/sample1/locations/sample2/encryptionSpec"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Settings( + return_value = resources.EncryptionSpec( name="name_value", - language_code="language_code_value", + kms_key="kms_key_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.Settings.pb(return_value) + return_value = resources.EncryptionSpec.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_settings(request) + response = client.get_encryption_spec(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.Settings) + assert isinstance(response, resources.EncryptionSpec) assert response.name == "name_value" - assert response.language_code == "language_code_value" + assert response.kms_key == "kms_key_value" -def test_get_settings_rest_use_cached_wrapped_rpc(): +def test_get_encryption_spec_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -28335,30 +31280,34 @@ def test_get_settings_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_settings in client._transport._wrapped_methods + assert ( + client._transport.get_encryption_spec in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_settings] = mock_rpc + client._transport._wrapped_methods[ + client._transport.get_encryption_spec + ] = mock_rpc request = {} - client.get_settings(request) + client.get_encryption_spec(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_settings(request) + client.get_encryption_spec(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_settings_rest_required_fields( - request_type=contact_center_insights.GetSettingsRequest, +def test_get_encryption_spec_rest_required_fields( + request_type=contact_center_insights.GetEncryptionSpecRequest, ): transport_class = transports.ContactCenterInsightsRestTransport @@ -28374,7 +31323,7 @@ def test_get_settings_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_settings._get_unset_required_fields(jsonified_request) + ).get_encryption_spec._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -28383,7 +31332,7 @@ def test_get_settings_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_settings._get_unset_required_fields(jsonified_request) + ).get_encryption_spec._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -28397,7 +31346,7 @@ def test_get_settings_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.Settings() + return_value = resources.EncryptionSpec() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -28418,30 +31367,30 @@ def test_get_settings_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.Settings.pb(return_value) + return_value = resources.EncryptionSpec.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_settings(request) + response = client.get_encryption_spec(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_settings_rest_unset_required_fields(): +def test_get_encryption_spec_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_settings._get_unset_required_fields({}) + unset_fields = transport.get_encryption_spec._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_settings_rest_interceptors(null_interceptor): +def test_get_encryption_spec_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -28454,14 +31403,14 @@ def test_get_settings_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_get_settings" + transports.ContactCenterInsightsRestInterceptor, "post_get_encryption_spec" ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_get_settings" + transports.ContactCenterInsightsRestInterceptor, "pre_get_encryption_spec" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.GetSettingsRequest.pb( - contact_center_insights.GetSettingsRequest() + pb_message = contact_center_insights.GetEncryptionSpecRequest.pb( + contact_center_insights.GetEncryptionSpecRequest() ) transcode.return_value = { "method": "post", @@ -28473,17 +31422,19 @@ def test_get_settings_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.Settings.to_json(resources.Settings()) + req.return_value._content = resources.EncryptionSpec.to_json( + resources.EncryptionSpec() + ) - request = contact_center_insights.GetSettingsRequest() + request = contact_center_insights.GetEncryptionSpecRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.Settings() + post.return_value = resources.EncryptionSpec() - client.get_settings( + client.get_encryption_spec( request, metadata=[ ("key", "val"), @@ -28495,8 +31446,9 @@ def test_get_settings_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_settings_rest_bad_request( - transport: str = "rest", request_type=contact_center_insights.GetSettingsRequest +def test_get_encryption_spec_rest_bad_request( + transport: str = "rest", + request_type=contact_center_insights.GetEncryptionSpecRequest, ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -28504,7 +31456,7 @@ def test_get_settings_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/settings"} + request_init = {"name": "projects/sample1/locations/sample2/encryptionSpec"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -28516,10 +31468,10 @@ def test_get_settings_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_settings(request) + client.get_encryption_spec(request) -def test_get_settings_rest_flattened(): +def test_get_encryption_spec_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -28528,10 +31480,10 @@ def test_get_settings_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Settings() + return_value = resources.EncryptionSpec() # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/locations/sample2/settings"} + sample_request = {"name": "projects/sample1/locations/sample2/encryptionSpec"} # get truthy value for each flattened field mock_args = dict( @@ -28543,24 +31495,25 @@ def test_get_settings_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.Settings.pb(return_value) + return_value = resources.EncryptionSpec.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_settings(**mock_args) + client.get_encryption_spec(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/settings}" % client.transport._host, + "%s/v1/{name=projects/*/locations/*/encryptionSpec}" + % client.transport._host, args[1], ) -def test_get_settings_rest_flattened_error(transport: str = "rest"): +def test_get_encryption_spec_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -28569,13 +31522,13 @@ def test_get_settings_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_settings( - contact_center_insights.GetSettingsRequest(), + client.get_encryption_spec( + contact_center_insights.GetEncryptionSpecRequest(), name="name_value", ) -def test_get_settings_rest_error(): +def test_get_encryption_spec_rest_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -28584,146 +31537,41 @@ def test_get_settings_rest_error(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.UpdateSettingsRequest, + contact_center_insights.InitializeEncryptionSpecRequest, dict, ], ) -def test_update_settings_rest(request_type): +def test_initialize_encryption_spec_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"settings": {"name": "projects/sample1/locations/sample2/settings"}} - request_init["settings"] = { - "name": "projects/sample1/locations/sample2/settings", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "language_code": "language_code_value", - "conversation_ttl": {"seconds": 751, "nanos": 543}, - "pubsub_notification_settings": {}, - "analysis_config": { - "runtime_integration_analysis_percentage": 0.4167, - "upload_conversation_analysis_percentage": 0.41590000000000005, - "annotator_selector": { - "run_interruption_annotator": True, - "run_silence_annotator": True, - "run_phrase_matcher_annotator": True, - "phrase_matchers": ["phrase_matchers_value1", "phrase_matchers_value2"], - "run_sentiment_annotator": True, - "run_entity_annotator": True, - "run_intent_annotator": True, - "run_issue_model_annotator": True, - "issue_models": ["issue_models_value1", "issue_models_value2"], - "run_summarization_annotator": True, - "summarization_config": { - "conversation_profile": "conversation_profile_value", - "summarization_model": 1, - }, - }, - }, - "redaction_config": { - "deidentify_template": "deidentify_template_value", - "inspect_template": "inspect_template_value", - }, - "speech_config": {"speech_recognizer": "speech_recognizer_value"}, + request_init = { + "encryption_spec": {"name": "projects/sample1/locations/sample2/encryptionSpec"} } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = contact_center_insights.UpdateSettingsRequest.meta.fields["settings"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["settings"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["settings"][field])): - del request_init["settings"][field][i][subfield] - else: - del request_init["settings"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Settings( - name="name_value", - language_code="language_code_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.Settings.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_settings(request) + response = client.initialize_encryption_spec(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.Settings) - assert response.name == "name_value" - assert response.language_code == "language_code_value" + assert response.operation.name == "operations/spam" -def test_update_settings_rest_use_cached_wrapped_rpc(): +def test_initialize_encryption_spec_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -28737,30 +31585,39 @@ def test_update_settings_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_settings in client._transport._wrapped_methods + assert ( + client._transport.initialize_encryption_spec + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.update_settings] = mock_rpc + client._transport._wrapped_methods[ + client._transport.initialize_encryption_spec + ] = mock_rpc request = {} - client.update_settings(request) + client.initialize_encryption_spec(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_settings(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.initialize_encryption_spec(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_settings_rest_required_fields( - request_type=contact_center_insights.UpdateSettingsRequest, +def test_initialize_encryption_spec_rest_required_fields( + request_type=contact_center_insights.InitializeEncryptionSpecRequest, ): transport_class = transports.ContactCenterInsightsRestTransport @@ -28775,16 +31632,14 @@ def test_update_settings_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_settings._get_unset_required_fields(jsonified_request) + ).initialize_encryption_spec._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_settings._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) + ).initialize_encryption_spec._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -28796,7 +31651,7 @@ def test_update_settings_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.Settings() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -28808,7 +31663,7 @@ def test_update_settings_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "post", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -28816,40 +31671,29 @@ def test_update_settings_rest_required_fields( response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = resources.Settings.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_settings(request) + response = client.initialize_encryption_spec(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_settings_rest_unset_required_fields(): +def test_initialize_encryption_spec_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_settings._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("updateMask",)) - & set( - ( - "settings", - "updateMask", - ) - ) - ) + unset_fields = transport.initialize_encryption_spec._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("encryptionSpec",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_settings_rest_interceptors(null_interceptor): +def test_initialize_encryption_spec_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -28862,14 +31706,18 @@ def test_update_settings_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_update_settings" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, + "post_initialize_encryption_spec", ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_update_settings" + transports.ContactCenterInsightsRestInterceptor, + "pre_initialize_encryption_spec", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.UpdateSettingsRequest.pb( - contact_center_insights.UpdateSettingsRequest() + pb_message = contact_center_insights.InitializeEncryptionSpecRequest.pb( + contact_center_insights.InitializeEncryptionSpecRequest() ) transcode.return_value = { "method": "post", @@ -28881,17 +31729,19 @@ def test_update_settings_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.Settings.to_json(resources.Settings()) + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) - request = contact_center_insights.UpdateSettingsRequest() + request = contact_center_insights.InitializeEncryptionSpecRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.Settings() + post.return_value = operations_pb2.Operation() - client.update_settings( + client.initialize_encryption_spec( request, metadata=[ ("key", "val"), @@ -28903,8 +31753,9 @@ def test_update_settings_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_settings_rest_bad_request( - transport: str = "rest", request_type=contact_center_insights.UpdateSettingsRequest +def test_initialize_encryption_spec_rest_bad_request( + transport: str = "rest", + request_type=contact_center_insights.InitializeEncryptionSpecRequest, ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -28912,7 +31763,9 @@ def test_update_settings_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"settings": {"name": "projects/sample1/locations/sample2/settings"}} + request_init = { + "encryption_spec": {"name": "projects/sample1/locations/sample2/encryptionSpec"} + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -28924,10 +31777,10 @@ def test_update_settings_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_settings(request) + client.initialize_encryption_spec(request) -def test_update_settings_rest_flattened(): +def test_initialize_encryption_spec_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -28936,43 +31789,42 @@ def test_update_settings_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Settings() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method sample_request = { - "settings": {"name": "projects/sample1/locations/sample2/settings"} + "encryption_spec": { + "name": "projects/sample1/locations/sample2/encryptionSpec" + } } # get truthy value for each flattened field mock_args = dict( - settings=resources.Settings(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + encryption_spec=resources.EncryptionSpec(name="name_value"), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.Settings.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_settings(**mock_args) + client.initialize_encryption_spec(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{settings.name=projects/*/locations/*/settings}" + "%s/v1/{encryption_spec.name=projects/*/locations/*/encryptionSpec}:initialize" % client.transport._host, args[1], ) -def test_update_settings_rest_flattened_error(transport: str = "rest"): +def test_initialize_encryption_spec_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -28981,14 +31833,13 @@ def test_update_settings_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_settings( - contact_center_insights.UpdateSettingsRequest(), - settings=resources.Settings(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.initialize_encryption_spec( + contact_center_insights.InitializeEncryptionSpecRequest(), + encryption_spec=resources.EncryptionSpec(name="name_value"), ) -def test_update_settings_rest_error(): +def test_initialize_encryption_spec_rest_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -30889,6 +33740,8 @@ def test_contact_center_insights_base_transport(): "delete_issue_model", "deploy_issue_model", "undeploy_issue_model", + "export_issue_model", + "import_issue_model", "get_issue", "list_issues", "update_issue", @@ -30902,6 +33755,8 @@ def test_contact_center_insights_base_transport(): "calculate_stats", "get_settings", "update_settings", + "get_encryption_spec", + "initialize_encryption_spec", "create_view", "get_view", "list_views", @@ -31257,6 +34112,12 @@ def test_contact_center_insights_client_transport_session_collision(transport_na session1 = client1.transport.undeploy_issue_model._session session2 = client2.transport.undeploy_issue_model._session assert session1 != session2 + session1 = client1.transport.export_issue_model._session + session2 = client2.transport.export_issue_model._session + assert session1 != session2 + session1 = client1.transport.import_issue_model._session + session2 = client2.transport.import_issue_model._session + assert session1 != session2 session1 = client1.transport.get_issue._session session2 = client2.transport.get_issue._session assert session1 != session2 @@ -31296,6 +34157,12 @@ def test_contact_center_insights_client_transport_session_collision(transport_na session1 = client1.transport.update_settings._session session2 = client2.transport.update_settings._session assert session1 != session2 + session1 = client1.transport.get_encryption_spec._session + session2 = client2.transport.get_encryption_spec._session + assert session1 != session2 + session1 = client1.transport.initialize_encryption_spec._session + session2 = client2.transport.initialize_encryption_spec._session + assert session1 != session2 session1 = client1.transport.create_view._session session2 = client2.transport.create_view._session assert session1 != session2 @@ -31562,11 +34429,34 @@ def test_parse_conversation_profile_path(): assert expected == actual -def test_issue_path(): +def test_encryption_spec_path(): project = "winkle" location = "nautilus" - issue_model = "scallop" - issue = "abalone" + expected = "projects/{project}/locations/{location}/encryptionSpec".format( + project=project, + location=location, + ) + actual = ContactCenterInsightsClient.encryption_spec_path(project, location) + assert expected == actual + + +def test_parse_encryption_spec_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = ContactCenterInsightsClient.encryption_spec_path(**expected) + + # Check that the path construction is reversible. + actual = ContactCenterInsightsClient.parse_encryption_spec_path(path) + assert expected == actual + + +def test_issue_path(): + project = "squid" + location = "clam" + issue_model = "whelk" + issue = "octopus" expected = "projects/{project}/locations/{location}/issueModels/{issue_model}/issues/{issue}".format( project=project, location=location, @@ -31581,10 +34471,10 @@ def test_issue_path(): def test_parse_issue_path(): expected = { - "project": "squid", - "location": "clam", - "issue_model": "whelk", - "issue": "octopus", + "project": "oyster", + "location": "nudibranch", + "issue_model": "cuttlefish", + "issue": "mussel", } path = ContactCenterInsightsClient.issue_path(**expected) @@ -31594,9 +34484,9 @@ def test_parse_issue_path(): def test_issue_model_path(): - project = "oyster" - location = "nudibranch" - issue_model = "cuttlefish" + project = "winkle" + location = "nautilus" + issue_model = "scallop" expected = ( "projects/{project}/locations/{location}/issueModels/{issue_model}".format( project=project, @@ -31612,9 +34502,9 @@ def test_issue_model_path(): def test_parse_issue_model_path(): expected = { - "project": "mussel", - "location": "winkle", - "issue_model": "nautilus", + "project": "abalone", + "location": "squid", + "issue_model": "clam", } path = ContactCenterInsightsClient.issue_model_path(**expected) @@ -31624,9 +34514,9 @@ def test_parse_issue_model_path(): def test_participant_path(): - project = "scallop" - conversation = "abalone" - participant = "squid" + project = "whelk" + conversation = "octopus" + participant = "oyster" expected = "projects/{project}/conversations/{conversation}/participants/{participant}".format( project=project, conversation=conversation, @@ -31640,9 +34530,9 @@ def test_participant_path(): def test_parse_participant_path(): expected = { - "project": "clam", - "conversation": "whelk", - "participant": "octopus", + "project": "nudibranch", + "conversation": "cuttlefish", + "participant": "mussel", } path = ContactCenterInsightsClient.participant_path(**expected) @@ -31652,9 +34542,9 @@ def test_parse_participant_path(): def test_phrase_matcher_path(): - project = "oyster" - location = "nudibranch" - phrase_matcher = "cuttlefish" + project = "winkle" + location = "nautilus" + phrase_matcher = "scallop" expected = "projects/{project}/locations/{location}/phraseMatchers/{phrase_matcher}".format( project=project, location=location, @@ -31668,9 +34558,9 @@ def test_phrase_matcher_path(): def test_parse_phrase_matcher_path(): expected = { - "project": "mussel", - "location": "winkle", - "phrase_matcher": "nautilus", + "project": "abalone", + "location": "squid", + "phrase_matcher": "clam", } path = ContactCenterInsightsClient.phrase_matcher_path(**expected) @@ -31680,9 +34570,9 @@ def test_parse_phrase_matcher_path(): def test_recognizer_path(): - project = "scallop" - location = "abalone" - recognizer = "squid" + project = "whelk" + location = "octopus" + recognizer = "oyster" expected = ( "projects/{project}/locations/{location}/recognizers/{recognizer}".format( project=project, @@ -31696,9 +34586,9 @@ def test_recognizer_path(): def test_parse_recognizer_path(): expected = { - "project": "clam", - "location": "whelk", - "recognizer": "octopus", + "project": "nudibranch", + "location": "cuttlefish", + "recognizer": "mussel", } path = ContactCenterInsightsClient.recognizer_path(**expected) @@ -31708,8 +34598,8 @@ def test_parse_recognizer_path(): def test_settings_path(): - project = "oyster" - location = "nudibranch" + project = "winkle" + location = "nautilus" expected = "projects/{project}/locations/{location}/settings".format( project=project, location=location, @@ -31720,8 +34610,8 @@ def test_settings_path(): def test_parse_settings_path(): expected = { - "project": "cuttlefish", - "location": "mussel", + "project": "scallop", + "location": "abalone", } path = ContactCenterInsightsClient.settings_path(**expected) @@ -31731,9 +34621,9 @@ def test_parse_settings_path(): def test_view_path(): - project = "winkle" - location = "nautilus" - view = "scallop" + project = "squid" + location = "clam" + view = "whelk" expected = "projects/{project}/locations/{location}/views/{view}".format( project=project, location=location, @@ -31745,9 +34635,9 @@ def test_view_path(): def test_parse_view_path(): expected = { - "project": "abalone", - "location": "squid", - "view": "clam", + "project": "octopus", + "location": "oyster", + "view": "nudibranch", } path = ContactCenterInsightsClient.view_path(**expected) @@ -31757,7 +34647,7 @@ def test_parse_view_path(): def test_common_billing_account_path(): - billing_account = "whelk" + billing_account = "cuttlefish" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -31767,7 +34657,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "octopus", + "billing_account": "mussel", } path = ContactCenterInsightsClient.common_billing_account_path(**expected) @@ -31777,7 +34667,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "oyster" + folder = "winkle" expected = "folders/{folder}".format( folder=folder, ) @@ -31787,7 +34677,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "nudibranch", + "folder": "nautilus", } path = ContactCenterInsightsClient.common_folder_path(**expected) @@ -31797,7 +34687,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "cuttlefish" + organization = "scallop" expected = "organizations/{organization}".format( organization=organization, ) @@ -31807,7 +34697,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "mussel", + "organization": "abalone", } path = ContactCenterInsightsClient.common_organization_path(**expected) @@ -31817,7 +34707,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "winkle" + project = "squid" expected = "projects/{project}".format( project=project, ) @@ -31827,7 +34717,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "nautilus", + "project": "clam", } path = ContactCenterInsightsClient.common_project_path(**expected) @@ -31837,8 +34727,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "scallop" - location = "abalone" + project = "whelk" + location = "octopus" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -31849,8 +34739,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "squid", - "location": "clam", + "project": "oyster", + "location": "nudibranch", } path = ContactCenterInsightsClient.common_location_path(**expected) From c859d14990dbdf2c59a09265b1c91479f134aaa6 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 8 Oct 2024 11:16:35 -0400 Subject: [PATCH 105/108] feat: [google-shopping-merchant-datasources] adding some more information about supplemental data sources (#13108) - [ ] Regenerate this pull request now. BEGIN_COMMIT_OVERRIDE feat: adding some more information about supplemental data sources feat: Add FileUploads service docs: A comment for enum value `PRODUCTS` in enum `Channel` is changed END_COMMIT_OVERRIDE PiperOrigin-RevId: 681900944 Source-Link: https://github.com/googleapis/googleapis/commit/78d1fb208e4af3022a0aaf27bafa578ff326326e Source-Link: https://github.com/googleapis/googleapis-gen/commit/19badfac8741859ca06f191977455b370e84e14b Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLXNob3BwaW5nLW1lcmNoYW50LWRhdGFzb3VyY2VzLy5Pd2xCb3QueWFtbCIsImgiOiIxOWJhZGZhYzg3NDE4NTljYTA2ZjE5MTk3NzQ1NWIzNzBlODRlMTRiIn0= BEGIN_NESTED_COMMIT feat: [google-shopping-merchant-datasources] Add FileUploads service docs: A comment for enum value `PRODUCTS` in enum `Channel` is changed PiperOrigin-RevId: 678641097 Source-Link: https://github.com/googleapis/googleapis/commit/9c4c174205d923f9490d534c6e54c2d18ddc9d8f Source-Link: https://github.com/googleapis/googleapis-gen/commit/13cfe504a6f0aedc0416c23851f278157834f905 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLXNob3BwaW5nLW1lcmNoYW50LWRhdGFzb3VyY2VzLy5Pd2xCb3QueWFtbCIsImgiOiIxM2NmZTUwNGE2ZjBhZWRjMDQxNmMyMzg1MWYyNzgxNTc4MzRmOTA1In0= END_NESTED_COMMIT --------- Co-authored-by: Owl Bot Co-authored-by: ohmayr Co-authored-by: Anthonios Partheniou --- .../file_uploads_service.rst | 6 + .../merchant_datasources_v1beta/services_.rst | 1 + .../shopping/merchant_datasources/__init__.py | 16 + .../merchant_datasources_v1beta/__init__.py | 11 + .../gapic_metadata.json | 34 + .../services/file_uploads_service/__init__.py | 22 + .../file_uploads_service/async_client.py | 389 +++ .../services/file_uploads_service/client.py | 815 +++++ .../transports/__init__.py | 38 + .../file_uploads_service/transports/base.py | 161 + .../file_uploads_service/transports/grpc.py | 272 ++ .../transports/grpc_asyncio.py | 284 ++ .../file_uploads_service/transports/rest.py | 309 ++ .../types/__init__.py | 5 + .../types/datasourcetypes.py | 118 +- .../types/fileuploads.py | 205 ++ ...e_uploads_service_get_file_upload_async.py | 52 + ...le_uploads_service_get_file_upload_sync.py | 52 + ....shopping.merchant.datasources.v1beta.json | 161 + ...up_merchant_datasources_v1beta_keywords.py | 1 + .../test_data_sources_service.py | 20 + .../test_file_uploads_service.py | 2632 +++++++++++++++++ 22 files changed, 5603 insertions(+), 1 deletion(-) create mode 100644 packages/google-shopping-merchant-datasources/docs/merchant_datasources_v1beta/file_uploads_service.rst create mode 100644 packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/file_uploads_service/__init__.py create mode 100644 packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/file_uploads_service/async_client.py create mode 100644 packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/file_uploads_service/client.py create mode 100644 packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/file_uploads_service/transports/__init__.py create mode 100644 packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/file_uploads_service/transports/base.py create mode 100644 packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/file_uploads_service/transports/grpc.py create mode 100644 packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/file_uploads_service/transports/grpc_asyncio.py create mode 100644 packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/file_uploads_service/transports/rest.py create mode 100644 packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/types/fileuploads.py create mode 100644 packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_file_uploads_service_get_file_upload_async.py create mode 100644 packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_file_uploads_service_get_file_upload_sync.py create mode 100644 packages/google-shopping-merchant-datasources/tests/unit/gapic/merchant_datasources_v1beta/test_file_uploads_service.py diff --git a/packages/google-shopping-merchant-datasources/docs/merchant_datasources_v1beta/file_uploads_service.rst b/packages/google-shopping-merchant-datasources/docs/merchant_datasources_v1beta/file_uploads_service.rst new file mode 100644 index 000000000000..2cb7cfc2bd21 --- /dev/null +++ b/packages/google-shopping-merchant-datasources/docs/merchant_datasources_v1beta/file_uploads_service.rst @@ -0,0 +1,6 @@ +FileUploadsService +------------------------------------ + +.. automodule:: google.shopping.merchant_datasources_v1beta.services.file_uploads_service + :members: + :inherited-members: diff --git a/packages/google-shopping-merchant-datasources/docs/merchant_datasources_v1beta/services_.rst b/packages/google-shopping-merchant-datasources/docs/merchant_datasources_v1beta/services_.rst index e81d9b15d547..c306312a77f1 100644 --- a/packages/google-shopping-merchant-datasources/docs/merchant_datasources_v1beta/services_.rst +++ b/packages/google-shopping-merchant-datasources/docs/merchant_datasources_v1beta/services_.rst @@ -4,3 +4,4 @@ Services for Google Shopping Merchant Datasources v1beta API :maxdepth: 2 data_sources_service + file_uploads_service diff --git a/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources/__init__.py b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources/__init__.py index 5c29874728d6..d96be2f6dc0b 100644 --- a/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources/__init__.py +++ b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources/__init__.py @@ -24,6 +24,12 @@ from google.shopping.merchant_datasources_v1beta.services.data_sources_service.client import ( DataSourcesServiceClient, ) +from google.shopping.merchant_datasources_v1beta.services.file_uploads_service.async_client import ( + FileUploadsServiceAsyncClient, +) +from google.shopping.merchant_datasources_v1beta.services.file_uploads_service.client import ( + FileUploadsServiceClient, +) from google.shopping.merchant_datasources_v1beta.types.datasources import ( CreateDataSourceRequest, DataSource, @@ -35,6 +41,7 @@ UpdateDataSourceRequest, ) from google.shopping.merchant_datasources_v1beta.types.datasourcetypes import ( + DataSourceReference, LocalInventoryDataSource, PrimaryProductDataSource, PromotionDataSource, @@ -42,10 +49,16 @@ SupplementalProductDataSource, ) from google.shopping.merchant_datasources_v1beta.types.fileinputs import FileInput +from google.shopping.merchant_datasources_v1beta.types.fileuploads import ( + FileUpload, + GetFileUploadRequest, +) __all__ = ( "DataSourcesServiceClient", "DataSourcesServiceAsyncClient", + "FileUploadsServiceClient", + "FileUploadsServiceAsyncClient", "CreateDataSourceRequest", "DataSource", "DeleteDataSourceRequest", @@ -54,10 +67,13 @@ "ListDataSourcesRequest", "ListDataSourcesResponse", "UpdateDataSourceRequest", + "DataSourceReference", "LocalInventoryDataSource", "PrimaryProductDataSource", "PromotionDataSource", "RegionalInventoryDataSource", "SupplementalProductDataSource", "FileInput", + "FileUpload", + "GetFileUploadRequest", ) diff --git a/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/__init__.py b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/__init__.py index 7819b598ee4a..5a8e3cd79467 100644 --- a/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/__init__.py +++ b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/__init__.py @@ -22,6 +22,10 @@ DataSourcesServiceAsyncClient, DataSourcesServiceClient, ) +from .services.file_uploads_service import ( + FileUploadsServiceAsyncClient, + FileUploadsServiceClient, +) from .types.datasources import ( CreateDataSourceRequest, DataSource, @@ -33,6 +37,7 @@ UpdateDataSourceRequest, ) from .types.datasourcetypes import ( + DataSourceReference, LocalInventoryDataSource, PrimaryProductDataSource, PromotionDataSource, @@ -40,16 +45,22 @@ SupplementalProductDataSource, ) from .types.fileinputs import FileInput +from .types.fileuploads import FileUpload, GetFileUploadRequest __all__ = ( "DataSourcesServiceAsyncClient", + "FileUploadsServiceAsyncClient", "CreateDataSourceRequest", "DataSource", + "DataSourceReference", "DataSourcesServiceClient", "DeleteDataSourceRequest", "FetchDataSourceRequest", "FileInput", + "FileUpload", + "FileUploadsServiceClient", "GetDataSourceRequest", + "GetFileUploadRequest", "ListDataSourcesRequest", "ListDataSourcesResponse", "LocalInventoryDataSource", diff --git a/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/gapic_metadata.json b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/gapic_metadata.json index 22bf4c71ef65..ec4728128d9c 100644 --- a/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/gapic_metadata.json +++ b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/gapic_metadata.json @@ -113,6 +113,40 @@ } } } + }, + "FileUploadsService": { + "clients": { + "grpc": { + "libraryClient": "FileUploadsServiceClient", + "rpcs": { + "GetFileUpload": { + "methods": [ + "get_file_upload" + ] + } + } + }, + "grpc-async": { + "libraryClient": "FileUploadsServiceAsyncClient", + "rpcs": { + "GetFileUpload": { + "methods": [ + "get_file_upload" + ] + } + } + }, + "rest": { + "libraryClient": "FileUploadsServiceClient", + "rpcs": { + "GetFileUpload": { + "methods": [ + "get_file_upload" + ] + } + } + } + } } } } diff --git a/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/file_uploads_service/__init__.py b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/file_uploads_service/__init__.py new file mode 100644 index 000000000000..0adf352dc7ad --- /dev/null +++ b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/file_uploads_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import FileUploadsServiceAsyncClient +from .client import FileUploadsServiceClient + +__all__ = ( + "FileUploadsServiceClient", + "FileUploadsServiceAsyncClient", +) diff --git a/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/file_uploads_service/async_client.py b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/file_uploads_service/async_client.py new file mode 100644 index 000000000000..909683366907 --- /dev/null +++ b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/file_uploads_service/async_client.py @@ -0,0 +1,389 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.shopping.merchant_datasources_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.protobuf import timestamp_pb2 # type: ignore + +from google.shopping.merchant_datasources_v1beta.types import fileuploads + +from .client import FileUploadsServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, FileUploadsServiceTransport +from .transports.grpc_asyncio import FileUploadsServiceGrpcAsyncIOTransport + + +class FileUploadsServiceAsyncClient: + """Service to manage data source file uploads.""" + + _client: FileUploadsServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = FileUploadsServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = FileUploadsServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = FileUploadsServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = FileUploadsServiceClient._DEFAULT_UNIVERSE + + file_upload_path = staticmethod(FileUploadsServiceClient.file_upload_path) + parse_file_upload_path = staticmethod( + FileUploadsServiceClient.parse_file_upload_path + ) + common_billing_account_path = staticmethod( + FileUploadsServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + FileUploadsServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(FileUploadsServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + FileUploadsServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + FileUploadsServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + FileUploadsServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(FileUploadsServiceClient.common_project_path) + parse_common_project_path = staticmethod( + FileUploadsServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(FileUploadsServiceClient.common_location_path) + parse_common_location_path = staticmethod( + FileUploadsServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + FileUploadsServiceAsyncClient: The constructed client. + """ + return FileUploadsServiceClient.from_service_account_info.__func__(FileUploadsServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + FileUploadsServiceAsyncClient: The constructed client. + """ + return FileUploadsServiceClient.from_service_account_file.__func__(FileUploadsServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return FileUploadsServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> FileUploadsServiceTransport: + """Returns the transport used by the client instance. + + Returns: + FileUploadsServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = FileUploadsServiceClient.get_transport_class + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + FileUploadsServiceTransport, + Callable[..., FileUploadsServiceTransport], + ] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the file uploads service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,FileUploadsServiceTransport,Callable[..., FileUploadsServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the FileUploadsServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = FileUploadsServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def get_file_upload( + self, + request: Optional[Union[fileuploads.GetFileUploadRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> fileuploads.FileUpload: + r"""Gets the latest data source file upload. Only the ``latest`` + alias is accepted for a file upload. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_datasources_v1beta + + async def sample_get_file_upload(): + # Create a client + client = merchant_datasources_v1beta.FileUploadsServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_datasources_v1beta.GetFileUploadRequest( + name="name_value", + ) + + # Make the request + response = await client.get_file_upload(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.shopping.merchant_datasources_v1beta.types.GetFileUploadRequest, dict]]): + The request object. Request message for the + GetFileUploadRequest method. + name (:class:`str`): + Required. The name of the data source file upload to + retrieve. Format: + ``accounts/{account}/dataSources/{datasource}/fileUploads/latest`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_datasources_v1beta.types.FileUpload: + The file upload of a specific data + source, that is, the result of the + retrieval of the data source at a + certain timestamp computed + asynchronously when the data source + processing is finished. Only applicable + to file data sources. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, fileuploads.GetFileUploadRequest): + request = fileuploads.GetFileUploadRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_file_upload + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "FileUploadsServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("FileUploadsServiceAsyncClient",) diff --git a/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/file_uploads_service/client.py b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/file_uploads_service/client.py new file mode 100644 index 000000000000..45141cbec8cf --- /dev/null +++ b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/file_uploads_service/client.py @@ -0,0 +1,815 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.shopping.merchant_datasources_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.protobuf import timestamp_pb2 # type: ignore + +from google.shopping.merchant_datasources_v1beta.types import fileuploads + +from .transports.base import DEFAULT_CLIENT_INFO, FileUploadsServiceTransport +from .transports.grpc import FileUploadsServiceGrpcTransport +from .transports.grpc_asyncio import FileUploadsServiceGrpcAsyncIOTransport +from .transports.rest import FileUploadsServiceRestTransport + + +class FileUploadsServiceClientMeta(type): + """Metaclass for the FileUploadsService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[FileUploadsServiceTransport]] + _transport_registry["grpc"] = FileUploadsServiceGrpcTransport + _transport_registry["grpc_asyncio"] = FileUploadsServiceGrpcAsyncIOTransport + _transport_registry["rest"] = FileUploadsServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[FileUploadsServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class FileUploadsServiceClient(metaclass=FileUploadsServiceClientMeta): + """Service to manage data source file uploads.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "merchantapi.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "merchantapi.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + FileUploadsServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + FileUploadsServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> FileUploadsServiceTransport: + """Returns the transport used by the client instance. + + Returns: + FileUploadsServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def file_upload_path( + account: str, + datasource: str, + fileupload: str, + ) -> str: + """Returns a fully-qualified file_upload string.""" + return "accounts/{account}/dataSources/{datasource}/fileUploads/{fileupload}".format( + account=account, + datasource=datasource, + fileupload=fileupload, + ) + + @staticmethod + def parse_file_upload_path(path: str) -> Dict[str, str]: + """Parses a file_upload path into its component segments.""" + m = re.match( + r"^accounts/(?P.+?)/dataSources/(?P.+?)/fileUploads/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = FileUploadsServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = FileUploadsServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = FileUploadsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = FileUploadsServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = FileUploadsServiceClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or FileUploadsServiceClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + FileUploadsServiceTransport, + Callable[..., FileUploadsServiceTransport], + ] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the file uploads service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,FileUploadsServiceTransport,Callable[..., FileUploadsServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the FileUploadsServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = FileUploadsServiceClient._read_environment_variables() + self._client_cert_source = FileUploadsServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = FileUploadsServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, FileUploadsServiceTransport) + if transport_provided: + # transport is a FileUploadsServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(FileUploadsServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or FileUploadsServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[FileUploadsServiceTransport], + Callable[..., FileUploadsServiceTransport], + ] = ( + FileUploadsServiceClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., FileUploadsServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def get_file_upload( + self, + request: Optional[Union[fileuploads.GetFileUploadRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> fileuploads.FileUpload: + r"""Gets the latest data source file upload. Only the ``latest`` + alias is accepted for a file upload. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import merchant_datasources_v1beta + + def sample_get_file_upload(): + # Create a client + client = merchant_datasources_v1beta.FileUploadsServiceClient() + + # Initialize request argument(s) + request = merchant_datasources_v1beta.GetFileUploadRequest( + name="name_value", + ) + + # Make the request + response = client.get_file_upload(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.shopping.merchant_datasources_v1beta.types.GetFileUploadRequest, dict]): + The request object. Request message for the + GetFileUploadRequest method. + name (str): + Required. The name of the data source file upload to + retrieve. Format: + ``accounts/{account}/dataSources/{datasource}/fileUploads/latest`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.merchant_datasources_v1beta.types.FileUpload: + The file upload of a specific data + source, that is, the result of the + retrieval of the data source at a + certain timestamp computed + asynchronously when the data source + processing is finished. Only applicable + to file data sources. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, fileuploads.GetFileUploadRequest): + request = fileuploads.GetFileUploadRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_file_upload] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "FileUploadsServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("FileUploadsServiceClient",) diff --git a/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/file_uploads_service/transports/__init__.py b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/file_uploads_service/transports/__init__.py new file mode 100644 index 000000000000..c3db09f22723 --- /dev/null +++ b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/file_uploads_service/transports/__init__.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import FileUploadsServiceTransport +from .grpc import FileUploadsServiceGrpcTransport +from .grpc_asyncio import FileUploadsServiceGrpcAsyncIOTransport +from .rest import FileUploadsServiceRestInterceptor, FileUploadsServiceRestTransport + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[FileUploadsServiceTransport]] +_transport_registry["grpc"] = FileUploadsServiceGrpcTransport +_transport_registry["grpc_asyncio"] = FileUploadsServiceGrpcAsyncIOTransport +_transport_registry["rest"] = FileUploadsServiceRestTransport + +__all__ = ( + "FileUploadsServiceTransport", + "FileUploadsServiceGrpcTransport", + "FileUploadsServiceGrpcAsyncIOTransport", + "FileUploadsServiceRestTransport", + "FileUploadsServiceRestInterceptor", +) diff --git a/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/file_uploads_service/transports/base.py b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/file_uploads_service/transports/base.py new file mode 100644 index 000000000000..a55aee7a66de --- /dev/null +++ b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/file_uploads_service/transports/base.py @@ -0,0 +1,161 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.shopping.merchant_datasources_v1beta import gapic_version as package_version +from google.shopping.merchant_datasources_v1beta.types import fileuploads + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class FileUploadsServiceTransport(abc.ABC): + """Abstract transport class for FileUploadsService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/content",) + + DEFAULT_HOST: str = "merchantapi.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.get_file_upload: gapic_v1.method.wrap_method( + self.get_file_upload, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def get_file_upload( + self, + ) -> Callable[ + [fileuploads.GetFileUploadRequest], + Union[fileuploads.FileUpload, Awaitable[fileuploads.FileUpload]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("FileUploadsServiceTransport",) diff --git a/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/file_uploads_service/transports/grpc.py b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/file_uploads_service/transports/grpc.py new file mode 100644 index 000000000000..7bbea5efafca --- /dev/null +++ b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/file_uploads_service/transports/grpc.py @@ -0,0 +1,272 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore + +from google.shopping.merchant_datasources_v1beta.types import fileuploads + +from .base import DEFAULT_CLIENT_INFO, FileUploadsServiceTransport + + +class FileUploadsServiceGrpcTransport(FileUploadsServiceTransport): + """gRPC backend transport for FileUploadsService. + + Service to manage data source file uploads. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def get_file_upload( + self, + ) -> Callable[[fileuploads.GetFileUploadRequest], fileuploads.FileUpload]: + r"""Return a callable for the get file upload method over gRPC. + + Gets the latest data source file upload. Only the ``latest`` + alias is accepted for a file upload. + + Returns: + Callable[[~.GetFileUploadRequest], + ~.FileUpload]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_file_upload" not in self._stubs: + self._stubs["get_file_upload"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.datasources.v1beta.FileUploadsService/GetFileUpload", + request_serializer=fileuploads.GetFileUploadRequest.serialize, + response_deserializer=fileuploads.FileUpload.deserialize, + ) + return self._stubs["get_file_upload"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("FileUploadsServiceGrpcTransport",) diff --git a/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/file_uploads_service/transports/grpc_asyncio.py b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/file_uploads_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..fc8a254fac63 --- /dev/null +++ b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/file_uploads_service/transports/grpc_asyncio.py @@ -0,0 +1,284 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.shopping.merchant_datasources_v1beta.types import fileuploads + +from .base import DEFAULT_CLIENT_INFO, FileUploadsServiceTransport +from .grpc import FileUploadsServiceGrpcTransport + + +class FileUploadsServiceGrpcAsyncIOTransport(FileUploadsServiceTransport): + """gRPC AsyncIO backend transport for FileUploadsService. + + Service to manage data source file uploads. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def get_file_upload( + self, + ) -> Callable[ + [fileuploads.GetFileUploadRequest], Awaitable[fileuploads.FileUpload] + ]: + r"""Return a callable for the get file upload method over gRPC. + + Gets the latest data source file upload. Only the ``latest`` + alias is accepted for a file upload. + + Returns: + Callable[[~.GetFileUploadRequest], + Awaitable[~.FileUpload]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_file_upload" not in self._stubs: + self._stubs["get_file_upload"] = self.grpc_channel.unary_unary( + "/google.shopping.merchant.datasources.v1beta.FileUploadsService/GetFileUpload", + request_serializer=fileuploads.GetFileUploadRequest.serialize, + response_deserializer=fileuploads.FileUpload.deserialize, + ) + return self._stubs["get_file_upload"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.get_file_upload: gapic_v1.method_async.wrap_method( + self.get_file_upload, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("FileUploadsServiceGrpcAsyncIOTransport",) diff --git a/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/file_uploads_service/transports/rest.py b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/file_uploads_service/transports/rest.py new file mode 100644 index 000000000000..1647f7ff13a9 --- /dev/null +++ b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/services/file_uploads_service/transports/rest.py @@ -0,0 +1,309 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.shopping.merchant_datasources_v1beta.types import fileuploads + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import FileUploadsServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class FileUploadsServiceRestInterceptor: + """Interceptor for FileUploadsService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the FileUploadsServiceRestTransport. + + .. code-block:: python + class MyCustomFileUploadsServiceInterceptor(FileUploadsServiceRestInterceptor): + def pre_get_file_upload(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_file_upload(self, response): + logging.log(f"Received response: {response}") + return response + + transport = FileUploadsServiceRestTransport(interceptor=MyCustomFileUploadsServiceInterceptor()) + client = FileUploadsServiceClient(transport=transport) + + + """ + + def pre_get_file_upload( + self, + request: fileuploads.GetFileUploadRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[fileuploads.GetFileUploadRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_file_upload + + Override in a subclass to manipulate the request or metadata + before they are sent to the FileUploadsService server. + """ + return request, metadata + + def post_get_file_upload( + self, response: fileuploads.FileUpload + ) -> fileuploads.FileUpload: + """Post-rpc interceptor for get_file_upload + + Override in a subclass to manipulate the response + after it is returned by the FileUploadsService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class FileUploadsServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: FileUploadsServiceRestInterceptor + + +class FileUploadsServiceRestTransport(FileUploadsServiceTransport): + """REST backend transport for FileUploadsService. + + Service to manage data source file uploads. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "merchantapi.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[FileUploadsServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'merchantapi.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or FileUploadsServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _GetFileUpload(FileUploadsServiceRestStub): + def __hash__(self): + return hash("GetFileUpload") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: fileuploads.GetFileUploadRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> fileuploads.FileUpload: + r"""Call the get file upload method over HTTP. + + Args: + request (~.fileuploads.GetFileUploadRequest): + The request object. Request message for the + GetFileUploadRequest method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.fileuploads.FileUpload: + The file upload of a specific data + source, that is, the result of the + retrieval of the data source at a + certain timestamp computed + asynchronously when the data source + processing is finished. Only applicable + to file data sources. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/datasources/v1beta/{name=accounts/*/dataSources/*/fileUploads/*}", + }, + ] + request, metadata = self._interceptor.pre_get_file_upload(request, metadata) + pb_request = fileuploads.GetFileUploadRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = fileuploads.FileUpload() + pb_resp = fileuploads.FileUpload.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_file_upload(resp) + return resp + + @property + def get_file_upload( + self, + ) -> Callable[[fileuploads.GetFileUploadRequest], fileuploads.FileUpload]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetFileUpload(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("FileUploadsServiceRestTransport",) diff --git a/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/types/__init__.py b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/types/__init__.py index 495c5a32635f..22df9907872e 100644 --- a/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/types/__init__.py +++ b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/types/__init__.py @@ -24,6 +24,7 @@ UpdateDataSourceRequest, ) from .datasourcetypes import ( + DataSourceReference, LocalInventoryDataSource, PrimaryProductDataSource, PromotionDataSource, @@ -31,6 +32,7 @@ SupplementalProductDataSource, ) from .fileinputs import FileInput +from .fileuploads import FileUpload, GetFileUploadRequest __all__ = ( "CreateDataSourceRequest", @@ -41,10 +43,13 @@ "ListDataSourcesRequest", "ListDataSourcesResponse", "UpdateDataSourceRequest", + "DataSourceReference", "LocalInventoryDataSource", "PrimaryProductDataSource", "PromotionDataSource", "RegionalInventoryDataSource", "SupplementalProductDataSource", "FileInput", + "FileUpload", + "GetFileUploadRequest", ) diff --git a/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/types/datasourcetypes.py b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/types/datasourcetypes.py index 61ec51caa2ca..ca1671d2461a 100644 --- a/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/types/datasourcetypes.py +++ b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/types/datasourcetypes.py @@ -27,6 +27,7 @@ "LocalInventoryDataSource", "RegionalInventoryDataSource", "PromotionDataSource", + "DataSourceReference", }, ) @@ -76,6 +77,10 @@ class PrimaryProductDataSource(proto.Message): Optional. The countries where the items may be displayed. Represented as a `CLDR territory code `__. + default_rule (google.shopping.merchant_datasources_v1beta.types.PrimaryProductDataSource.DefaultRule): + Optional. Default rule management of the data + source. If set, the linked data sources will be + replaced. """ class Channel(proto.Enum): @@ -93,13 +98,53 @@ class Channel(proto.Enum): Local product. PRODUCTS (3): Unified data source for both local and online - products. + products. Note: Products management through the + API is not possible for this channel. """ CHANNEL_UNSPECIFIED = 0 ONLINE_PRODUCTS = 1 LOCAL_PRODUCTS = 2 PRODUCTS = 3 + class DefaultRule(proto.Message): + r"""Default rule management of the data source. + + Attributes: + take_from_data_sources (MutableSequence[google.shopping.merchant_datasources_v1beta.types.DataSourceReference]): + Required. The list of data sources linked in the `default + rule `__. + This list is ordered by the default rule priority of joining + the data. It might include none or multiple references to + ``self`` and supplemental data sources. + + The list must not be empty. + + To link the data source to the default rule, you need to add + a new reference to this list (in sequential order). + + To unlink the data source from the default rule, you need to + remove the given reference from this list. To create + attribute rules that are different from the default rule, + see `Set up your attribute + rules `__. + + Changing the order of this list will result in changing the + priority of data sources in the default rule. + + For example, providing the following list: [``1001``, + ``self``] will take attribute values from supplemental data + source ``1001``, and fallback to ``self`` if the attribute + is not set in ``1001``. + """ + + take_from_data_sources: MutableSequence[ + "DataSourceReference" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="DataSourceReference", + ) + channel: Channel = proto.Field( proto.ENUM, number=3, @@ -119,10 +164,22 @@ class Channel(proto.Enum): proto.STRING, number=6, ) + default_rule: DefaultRule = proto.Field( + proto.MESSAGE, + number=7, + message=DefaultRule, + ) class SupplementalProductDataSource(proto.Message): r"""The supplemental data source for local and online products. + Supplemental API data sources must not have ``feedLabel`` and + ``contentLanguage`` fields set. You can only use supplemental data + sources to update existing products. For information about creating + a supplemental data source, see `Create a supplemental data source + and link it to the primary data + source `__. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -159,6 +216,12 @@ class SupplementalProductDataSource(proto.Message): produts without that restriction. This field is a member of `oneof`_ ``_content_language``. + referencing_primary_data_sources (MutableSequence[google.shopping.merchant_datasources_v1beta.types.DataSourceReference]): + Output only. The (unordered and deduplicated) + list of all primary data sources linked to this + data source in either default or custom rules. + Supplemental data source cannot be deleted + before all links are removed. """ feed_label: str = proto.Field( @@ -171,6 +234,13 @@ class SupplementalProductDataSource(proto.Message): number=5, optional=True, ) + referencing_primary_data_sources: MutableSequence[ + "DataSourceReference" + ] = proto.RepeatedField( + proto.MESSAGE, + number=7, + message="DataSourceReference", + ) class LocalInventoryDataSource(proto.Message): @@ -257,4 +327,50 @@ class PromotionDataSource(proto.Message): ) +class DataSourceReference(proto.Message): + r"""Data source reference can be used to manage related data + sources within the data source service. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + self_ (bool): + Self should be used to reference the primary + data source itself. + + This field is a member of `oneof`_ ``data_source_id``. + primary_data_source_name (str): + Optional. The name of the primary data source. Format: + ``accounts/{account}/dataSources/{datasource}`` + + This field is a member of `oneof`_ ``data_source_id``. + supplemental_data_source_name (str): + Optional. The name of the supplemental data source. Format: + ``accounts/{account}/dataSources/{datasource}`` + + This field is a member of `oneof`_ ``data_source_id``. + """ + + self_: bool = proto.Field( + proto.BOOL, + number=1, + oneof="data_source_id", + ) + primary_data_source_name: str = proto.Field( + proto.STRING, + number=3, + oneof="data_source_id", + ) + supplemental_data_source_name: str = proto.Field( + proto.STRING, + number=2, + oneof="data_source_id", + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/types/fileuploads.py b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/types/fileuploads.py new file mode 100644 index 000000000000..c2369af53cb5 --- /dev/null +++ b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1beta/types/fileuploads.py @@ -0,0 +1,205 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.shopping.merchant.datasources.v1beta", + manifest={ + "FileUpload", + "GetFileUploadRequest", + }, +) + + +class FileUpload(proto.Message): + r"""The file upload of a specific data source, that is, the + result of the retrieval of the data source at a certain + timestamp computed asynchronously when the data source + processing is finished. Only applicable to file data sources. + + Attributes: + name (str): + Identifier. The name of the data source file upload. Format: + ``{datasource.name=accounts/{account}/dataSources/{datasource}/fileUploads/{fileupload}}`` + data_source_id (int): + Output only. The data source id. + processing_state (google.shopping.merchant_datasources_v1beta.types.FileUpload.ProcessingState): + Output only. The processing state of the data + source. + issues (MutableSequence[google.shopping.merchant_datasources_v1beta.types.FileUpload.Issue]): + Output only. The list of issues occurring in + the data source. + items_total (int): + Output only. The number of items in the data + source that were processed. + items_created (int): + Output only. The number of items in the data + source that were created. + items_updated (int): + Output only. The number of items in the data + source that were updated. + upload_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The date at which the file of + the data source was uploaded. + """ + + class ProcessingState(proto.Enum): + r"""The processing state of the data source. + + Values: + PROCESSING_STATE_UNSPECIFIED (0): + Processing state unspecified. + FAILED (1): + The data source could not be processed or all + the items had errors. + IN_PROGRESS (2): + The data source is being processed. + SUCCEEDED (3): + The data source was processed successfully, + though some items might have had errors. + """ + PROCESSING_STATE_UNSPECIFIED = 0 + FAILED = 1 + IN_PROGRESS = 2 + SUCCEEDED = 3 + + class Issue(proto.Message): + r"""An error occurring in the data source, like "invalid price". + + Attributes: + title (str): + Output only. The title of the issue, for + example, "Item too big". + description (str): + Output only. The error description, for + example, "Your data source contains items which + have too many attributes, or are too big. These + items will be dropped". + code (str): + Output only. The code of the error, for example, + "validation/invalid_value". Returns "?" if the code is + unknown. + count (int): + Output only. The number of occurrences of the + error in the file upload. + severity (google.shopping.merchant_datasources_v1beta.types.FileUpload.Issue.Severity): + Output only. The severity of the issue. + documentation_uri (str): + Output only. Link to the documentation + explaining the issue in more details, if + available. + """ + + class Severity(proto.Enum): + r"""The severity of the issue. + + Values: + SEVERITY_UNSPECIFIED (0): + Severity unspecified. + WARNING (1): + The issue is the warning. + ERROR (2): + The issue is an error. + """ + SEVERITY_UNSPECIFIED = 0 + WARNING = 1 + ERROR = 2 + + title: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + code: str = proto.Field( + proto.STRING, + number=3, + ) + count: int = proto.Field( + proto.INT64, + number=4, + ) + severity: "FileUpload.Issue.Severity" = proto.Field( + proto.ENUM, + number=5, + enum="FileUpload.Issue.Severity", + ) + documentation_uri: str = proto.Field( + proto.STRING, + number=6, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + data_source_id: int = proto.Field( + proto.INT64, + number=2, + ) + processing_state: ProcessingState = proto.Field( + proto.ENUM, + number=3, + enum=ProcessingState, + ) + issues: MutableSequence[Issue] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=Issue, + ) + items_total: int = proto.Field( + proto.INT64, + number=5, + ) + items_created: int = proto.Field( + proto.INT64, + number=6, + ) + items_updated: int = proto.Field( + proto.INT64, + number=7, + ) + upload_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + + +class GetFileUploadRequest(proto.Message): + r"""Request message for the GetFileUploadRequest method. + + Attributes: + name (str): + Required. The name of the data source file upload to + retrieve. Format: + ``accounts/{account}/dataSources/{datasource}/fileUploads/latest`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_file_uploads_service_get_file_upload_async.py b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_file_uploads_service_get_file_upload_async.py new file mode 100644 index 000000000000..69eed065c6f5 --- /dev/null +++ b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_file_uploads_service_get_file_upload_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetFileUpload +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-datasources + + +# [START merchantapi_v1beta_generated_FileUploadsService_GetFileUpload_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_datasources_v1beta + + +async def sample_get_file_upload(): + # Create a client + client = merchant_datasources_v1beta.FileUploadsServiceAsyncClient() + + # Initialize request argument(s) + request = merchant_datasources_v1beta.GetFileUploadRequest( + name="name_value", + ) + + # Make the request + response = await client.get_file_upload(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_FileUploadsService_GetFileUpload_async] diff --git a/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_file_uploads_service_get_file_upload_sync.py b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_file_uploads_service_get_file_upload_sync.py new file mode 100644 index 000000000000..8ca612c4e3bd --- /dev/null +++ b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_file_uploads_service_get_file_upload_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetFileUpload +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-shopping-merchant-datasources + + +# [START merchantapi_v1beta_generated_FileUploadsService_GetFileUpload_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.shopping import merchant_datasources_v1beta + + +def sample_get_file_upload(): + # Create a client + client = merchant_datasources_v1beta.FileUploadsServiceClient() + + # Initialize request argument(s) + request = merchant_datasources_v1beta.GetFileUploadRequest( + name="name_value", + ) + + # Make the request + response = client.get_file_upload(request=request) + + # Handle the response + print(response) + +# [END merchantapi_v1beta_generated_FileUploadsService_GetFileUpload_sync] diff --git a/packages/google-shopping-merchant-datasources/samples/generated_samples/snippet_metadata_google.shopping.merchant.datasources.v1beta.json b/packages/google-shopping-merchant-datasources/samples/generated_samples/snippet_metadata_google.shopping.merchant.datasources.v1beta.json index ee381d03839d..8af0e5a52d60 100644 --- a/packages/google-shopping-merchant-datasources/samples/generated_samples/snippet_metadata_google.shopping.merchant.datasources.v1beta.json +++ b/packages/google-shopping-merchant-datasources/samples/generated_samples/snippet_metadata_google.shopping.merchant.datasources.v1beta.json @@ -972,6 +972,167 @@ } ], "title": "merchantapi_v1beta_generated_data_sources_service_update_data_source_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.shopping.merchant_datasources_v1beta.FileUploadsServiceAsyncClient", + "shortName": "FileUploadsServiceAsyncClient" + }, + "fullName": "google.shopping.merchant_datasources_v1beta.FileUploadsServiceAsyncClient.get_file_upload", + "method": { + "fullName": "google.shopping.merchant.datasources.v1beta.FileUploadsService.GetFileUpload", + "service": { + "fullName": "google.shopping.merchant.datasources.v1beta.FileUploadsService", + "shortName": "FileUploadsService" + }, + "shortName": "GetFileUpload" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_datasources_v1beta.types.GetFileUploadRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_datasources_v1beta.types.FileUpload", + "shortName": "get_file_upload" + }, + "description": "Sample for GetFileUpload", + "file": "merchantapi_v1beta_generated_file_uploads_service_get_file_upload_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_FileUploadsService_GetFileUpload_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_file_uploads_service_get_file_upload_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.shopping.merchant_datasources_v1beta.FileUploadsServiceClient", + "shortName": "FileUploadsServiceClient" + }, + "fullName": "google.shopping.merchant_datasources_v1beta.FileUploadsServiceClient.get_file_upload", + "method": { + "fullName": "google.shopping.merchant.datasources.v1beta.FileUploadsService.GetFileUpload", + "service": { + "fullName": "google.shopping.merchant.datasources.v1beta.FileUploadsService", + "shortName": "FileUploadsService" + }, + "shortName": "GetFileUpload" + }, + "parameters": [ + { + "name": "request", + "type": "google.shopping.merchant_datasources_v1beta.types.GetFileUploadRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.shopping.merchant_datasources_v1beta.types.FileUpload", + "shortName": "get_file_upload" + }, + "description": "Sample for GetFileUpload", + "file": "merchantapi_v1beta_generated_file_uploads_service_get_file_upload_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "merchantapi_v1beta_generated_FileUploadsService_GetFileUpload_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "merchantapi_v1beta_generated_file_uploads_service_get_file_upload_sync.py" } ] } diff --git a/packages/google-shopping-merchant-datasources/scripts/fixup_merchant_datasources_v1beta_keywords.py b/packages/google-shopping-merchant-datasources/scripts/fixup_merchant_datasources_v1beta_keywords.py index 77f6b0db701f..74286e5cd17b 100644 --- a/packages/google-shopping-merchant-datasources/scripts/fixup_merchant_datasources_v1beta_keywords.py +++ b/packages/google-shopping-merchant-datasources/scripts/fixup_merchant_datasources_v1beta_keywords.py @@ -43,6 +43,7 @@ class merchant_datasourcesCallTransformer(cst.CSTTransformer): 'delete_data_source': ('name', ), 'fetch_data_source': ('name', ), 'get_data_source': ('name', ), + 'get_file_upload': ('name', ), 'list_data_sources': ('parent', 'page_size', 'page_token', ), 'update_data_source': ('data_source', 'update_mask', ), } diff --git a/packages/google-shopping-merchant-datasources/tests/unit/gapic/merchant_datasources_v1beta/test_data_sources_service.py b/packages/google-shopping-merchant-datasources/tests/unit/gapic/merchant_datasources_v1beta/test_data_sources_service.py index 9bd09642ee77..5ef814bac36e 100644 --- a/packages/google-shopping-merchant-datasources/tests/unit/gapic/merchant_datasources_v1beta/test_data_sources_service.py +++ b/packages/google-shopping-merchant-datasources/tests/unit/gapic/merchant_datasources_v1beta/test_data_sources_service.py @@ -4366,10 +4366,20 @@ def test_create_data_source_rest(request_type): "feed_label": "feed_label_value", "content_language": "content_language_value", "countries": ["countries_value1", "countries_value2"], + "default_rule": { + "take_from_data_sources": [ + { + "self_": True, + "primary_data_source_name": "primary_data_source_name_value", + "supplemental_data_source_name": "supplemental_data_source_name_value", + } + ] + }, }, "supplemental_product_data_source": { "feed_label": "feed_label_value", "content_language": "content_language_value", + "referencing_primary_data_sources": {}, }, "local_inventory_data_source": { "feed_label": "feed_label_value", @@ -4813,10 +4823,20 @@ def test_update_data_source_rest(request_type): "feed_label": "feed_label_value", "content_language": "content_language_value", "countries": ["countries_value1", "countries_value2"], + "default_rule": { + "take_from_data_sources": [ + { + "self_": True, + "primary_data_source_name": "primary_data_source_name_value", + "supplemental_data_source_name": "supplemental_data_source_name_value", + } + ] + }, }, "supplemental_product_data_source": { "feed_label": "feed_label_value", "content_language": "content_language_value", + "referencing_primary_data_sources": {}, }, "local_inventory_data_source": { "feed_label": "feed_label_value", diff --git a/packages/google-shopping-merchant-datasources/tests/unit/gapic/merchant_datasources_v1beta/test_file_uploads_service.py b/packages/google-shopping-merchant-datasources/tests/unit/gapic/merchant_datasources_v1beta/test_file_uploads_service.py new file mode 100644 index 000000000000..7c1b989a8758 --- /dev/null +++ b/packages/google-shopping-merchant-datasources/tests/unit/gapic/merchant_datasources_v1beta/test_file_uploads_service.py @@ -0,0 +1,2632 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.oauth2 import service_account +from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.shopping.merchant_datasources_v1beta.services.file_uploads_service import ( + FileUploadsServiceAsyncClient, + FileUploadsServiceClient, + transports, +) +from google.shopping.merchant_datasources_v1beta.types import fileuploads + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert FileUploadsServiceClient._get_default_mtls_endpoint(None) is None + assert ( + FileUploadsServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + FileUploadsServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + FileUploadsServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + FileUploadsServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + FileUploadsServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +def test__read_environment_variables(): + assert FileUploadsServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert FileUploadsServiceClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert FileUploadsServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + FileUploadsServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert FileUploadsServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert FileUploadsServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert FileUploadsServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + FileUploadsServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert FileUploadsServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert FileUploadsServiceClient._get_client_cert_source(None, False) is None + assert ( + FileUploadsServiceClient._get_client_cert_source( + mock_provided_cert_source, False + ) + is None + ) + assert ( + FileUploadsServiceClient._get_client_cert_source( + mock_provided_cert_source, True + ) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + FileUploadsServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + FileUploadsServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + FileUploadsServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(FileUploadsServiceClient), +) +@mock.patch.object( + FileUploadsServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(FileUploadsServiceAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = FileUploadsServiceClient._DEFAULT_UNIVERSE + default_endpoint = FileUploadsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = FileUploadsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + FileUploadsServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + FileUploadsServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == FileUploadsServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + FileUploadsServiceClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + FileUploadsServiceClient._get_api_endpoint( + None, None, default_universe, "always" + ) + == FileUploadsServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + FileUploadsServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == FileUploadsServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + FileUploadsServiceClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + FileUploadsServiceClient._get_api_endpoint( + None, None, default_universe, "never" + ) + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + FileUploadsServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + FileUploadsServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + FileUploadsServiceClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + FileUploadsServiceClient._get_universe_domain(None, None) + == FileUploadsServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + FileUploadsServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (FileUploadsServiceClient, transports.FileUploadsServiceGrpcTransport, "grpc"), + (FileUploadsServiceClient, transports.FileUploadsServiceRestTransport, "rest"), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (FileUploadsServiceClient, "grpc"), + (FileUploadsServiceAsyncClient, "grpc_asyncio"), + (FileUploadsServiceClient, "rest"), + ], +) +def test_file_uploads_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "merchantapi.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com/" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.FileUploadsServiceGrpcTransport, "grpc"), + (transports.FileUploadsServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.FileUploadsServiceRestTransport, "rest"), + ], +) +def test_file_uploads_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (FileUploadsServiceClient, "grpc"), + (FileUploadsServiceAsyncClient, "grpc_asyncio"), + (FileUploadsServiceClient, "rest"), + ], +) +def test_file_uploads_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "merchantapi.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com/" + ) + + +def test_file_uploads_service_client_get_transport_class(): + transport = FileUploadsServiceClient.get_transport_class() + available_transports = [ + transports.FileUploadsServiceGrpcTransport, + transports.FileUploadsServiceRestTransport, + ] + assert transport in available_transports + + transport = FileUploadsServiceClient.get_transport_class("grpc") + assert transport == transports.FileUploadsServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (FileUploadsServiceClient, transports.FileUploadsServiceGrpcTransport, "grpc"), + ( + FileUploadsServiceAsyncClient, + transports.FileUploadsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (FileUploadsServiceClient, transports.FileUploadsServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + FileUploadsServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(FileUploadsServiceClient), +) +@mock.patch.object( + FileUploadsServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(FileUploadsServiceAsyncClient), +) +def test_file_uploads_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(FileUploadsServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(FileUploadsServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com/" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com/", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + FileUploadsServiceClient, + transports.FileUploadsServiceGrpcTransport, + "grpc", + "true", + ), + ( + FileUploadsServiceAsyncClient, + transports.FileUploadsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + FileUploadsServiceClient, + transports.FileUploadsServiceGrpcTransport, + "grpc", + "false", + ), + ( + FileUploadsServiceAsyncClient, + transports.FileUploadsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + FileUploadsServiceClient, + transports.FileUploadsServiceRestTransport, + "rest", + "true", + ), + ( + FileUploadsServiceClient, + transports.FileUploadsServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + FileUploadsServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(FileUploadsServiceClient), +) +@mock.patch.object( + FileUploadsServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(FileUploadsServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_file_uploads_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [FileUploadsServiceClient, FileUploadsServiceAsyncClient] +) +@mock.patch.object( + FileUploadsServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(FileUploadsServiceClient), +) +@mock.patch.object( + FileUploadsServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(FileUploadsServiceAsyncClient), +) +def test_file_uploads_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", [FileUploadsServiceClient, FileUploadsServiceAsyncClient] +) +@mock.patch.object( + FileUploadsServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(FileUploadsServiceClient), +) +@mock.patch.object( + FileUploadsServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(FileUploadsServiceAsyncClient), +) +def test_file_uploads_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = FileUploadsServiceClient._DEFAULT_UNIVERSE + default_endpoint = FileUploadsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = FileUploadsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (FileUploadsServiceClient, transports.FileUploadsServiceGrpcTransport, "grpc"), + ( + FileUploadsServiceAsyncClient, + transports.FileUploadsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (FileUploadsServiceClient, transports.FileUploadsServiceRestTransport, "rest"), + ], +) +def test_file_uploads_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + FileUploadsServiceClient, + transports.FileUploadsServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + FileUploadsServiceAsyncClient, + transports.FileUploadsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + FileUploadsServiceClient, + transports.FileUploadsServiceRestTransport, + "rest", + None, + ), + ], +) +def test_file_uploads_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_file_uploads_service_client_client_options_from_dict(): + with mock.patch( + "google.shopping.merchant_datasources_v1beta.services.file_uploads_service.transports.FileUploadsServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = FileUploadsServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + FileUploadsServiceClient, + transports.FileUploadsServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + FileUploadsServiceAsyncClient, + transports.FileUploadsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_file_uploads_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "merchantapi.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/content",), + scopes=None, + default_host="merchantapi.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + fileuploads.GetFileUploadRequest, + dict, + ], +) +def test_get_file_upload(request_type, transport: str = "grpc"): + client = FileUploadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_file_upload), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = fileuploads.FileUpload( + name="name_value", + data_source_id=1462, + processing_state=fileuploads.FileUpload.ProcessingState.FAILED, + items_total=1189, + items_created=1369, + items_updated=1384, + ) + response = client.get_file_upload(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = fileuploads.GetFileUploadRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, fileuploads.FileUpload) + assert response.name == "name_value" + assert response.data_source_id == 1462 + assert response.processing_state == fileuploads.FileUpload.ProcessingState.FAILED + assert response.items_total == 1189 + assert response.items_created == 1369 + assert response.items_updated == 1384 + + +def test_get_file_upload_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FileUploadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_file_upload), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_file_upload() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == fileuploads.GetFileUploadRequest() + + +def test_get_file_upload_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = FileUploadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = fileuploads.GetFileUploadRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_file_upload), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_file_upload(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == fileuploads.GetFileUploadRequest( + name="name_value", + ) + + +def test_get_file_upload_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FileUploadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_file_upload in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_file_upload] = mock_rpc + request = {} + client.get_file_upload(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_file_upload(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_file_upload_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FileUploadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_file_upload), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + fileuploads.FileUpload( + name="name_value", + data_source_id=1462, + processing_state=fileuploads.FileUpload.ProcessingState.FAILED, + items_total=1189, + items_created=1369, + items_updated=1384, + ) + ) + response = await client.get_file_upload() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == fileuploads.GetFileUploadRequest() + + +@pytest.mark.asyncio +async def test_get_file_upload_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = FileUploadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_file_upload + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_file_upload + ] = mock_rpc + + request = {} + await client.get_file_upload(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_file_upload(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_file_upload_async( + transport: str = "grpc_asyncio", request_type=fileuploads.GetFileUploadRequest +): + client = FileUploadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_file_upload), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + fileuploads.FileUpload( + name="name_value", + data_source_id=1462, + processing_state=fileuploads.FileUpload.ProcessingState.FAILED, + items_total=1189, + items_created=1369, + items_updated=1384, + ) + ) + response = await client.get_file_upload(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = fileuploads.GetFileUploadRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, fileuploads.FileUpload) + assert response.name == "name_value" + assert response.data_source_id == 1462 + assert response.processing_state == fileuploads.FileUpload.ProcessingState.FAILED + assert response.items_total == 1189 + assert response.items_created == 1369 + assert response.items_updated == 1384 + + +@pytest.mark.asyncio +async def test_get_file_upload_async_from_dict(): + await test_get_file_upload_async(request_type=dict) + + +def test_get_file_upload_field_headers(): + client = FileUploadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = fileuploads.GetFileUploadRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_file_upload), "__call__") as call: + call.return_value = fileuploads.FileUpload() + client.get_file_upload(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_file_upload_field_headers_async(): + client = FileUploadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = fileuploads.GetFileUploadRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_file_upload), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + fileuploads.FileUpload() + ) + await client.get_file_upload(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_file_upload_flattened(): + client = FileUploadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_file_upload), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = fileuploads.FileUpload() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_file_upload( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_file_upload_flattened_error(): + client = FileUploadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_file_upload( + fileuploads.GetFileUploadRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_file_upload_flattened_async(): + client = FileUploadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_file_upload), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = fileuploads.FileUpload() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + fileuploads.FileUpload() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_file_upload( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_file_upload_flattened_error_async(): + client = FileUploadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_file_upload( + fileuploads.GetFileUploadRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + fileuploads.GetFileUploadRequest, + dict, + ], +) +def test_get_file_upload_rest(request_type): + client = FileUploadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "accounts/sample1/dataSources/sample2/fileUploads/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = fileuploads.FileUpload( + name="name_value", + data_source_id=1462, + processing_state=fileuploads.FileUpload.ProcessingState.FAILED, + items_total=1189, + items_created=1369, + items_updated=1384, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = fileuploads.FileUpload.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_file_upload(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, fileuploads.FileUpload) + assert response.name == "name_value" + assert response.data_source_id == 1462 + assert response.processing_state == fileuploads.FileUpload.ProcessingState.FAILED + assert response.items_total == 1189 + assert response.items_created == 1369 + assert response.items_updated == 1384 + + +def test_get_file_upload_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FileUploadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_file_upload in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_file_upload] = mock_rpc + + request = {} + client.get_file_upload(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_file_upload(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_file_upload_rest_required_fields( + request_type=fileuploads.GetFileUploadRequest, +): + transport_class = transports.FileUploadsServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_file_upload._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_file_upload._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = FileUploadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = fileuploads.FileUpload() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = fileuploads.FileUpload.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_file_upload(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_file_upload_rest_unset_required_fields(): + transport = transports.FileUploadsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_file_upload._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_file_upload_rest_interceptors(null_interceptor): + transport = transports.FileUploadsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.FileUploadsServiceRestInterceptor(), + ) + client = FileUploadsServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.FileUploadsServiceRestInterceptor, "post_get_file_upload" + ) as post, mock.patch.object( + transports.FileUploadsServiceRestInterceptor, "pre_get_file_upload" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = fileuploads.GetFileUploadRequest.pb( + fileuploads.GetFileUploadRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = fileuploads.FileUpload.to_json( + fileuploads.FileUpload() + ) + + request = fileuploads.GetFileUploadRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = fileuploads.FileUpload() + + client.get_file_upload( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_file_upload_rest_bad_request( + transport: str = "rest", request_type=fileuploads.GetFileUploadRequest +): + client = FileUploadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "accounts/sample1/dataSources/sample2/fileUploads/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_file_upload(request) + + +def test_get_file_upload_rest_flattened(): + client = FileUploadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = fileuploads.FileUpload() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "accounts/sample1/dataSources/sample2/fileUploads/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = fileuploads.FileUpload.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_file_upload(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/datasources/v1beta/{name=accounts/*/dataSources/*/fileUploads/*}" + % client.transport._host, + args[1], + ) + + +def test_get_file_upload_rest_flattened_error(transport: str = "rest"): + client = FileUploadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_file_upload( + fileuploads.GetFileUploadRequest(), + name="name_value", + ) + + +def test_get_file_upload_rest_error(): + client = FileUploadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.FileUploadsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = FileUploadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.FileUploadsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = FileUploadsServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.FileUploadsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = FileUploadsServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = FileUploadsServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.FileUploadsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = FileUploadsServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.FileUploadsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = FileUploadsServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.FileUploadsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.FileUploadsServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.FileUploadsServiceGrpcTransport, + transports.FileUploadsServiceGrpcAsyncIOTransport, + transports.FileUploadsServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = FileUploadsServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = FileUploadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.FileUploadsServiceGrpcTransport, + ) + + +def test_file_uploads_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.FileUploadsServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_file_uploads_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.shopping.merchant_datasources_v1beta.services.file_uploads_service.transports.FileUploadsServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.FileUploadsServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ("get_file_upload",) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_file_uploads_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.shopping.merchant_datasources_v1beta.services.file_uploads_service.transports.FileUploadsServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.FileUploadsServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/content",), + quota_project_id="octopus", + ) + + +def test_file_uploads_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.shopping.merchant_datasources_v1beta.services.file_uploads_service.transports.FileUploadsServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.FileUploadsServiceTransport() + adc.assert_called_once() + + +def test_file_uploads_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + FileUploadsServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/content",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.FileUploadsServiceGrpcTransport, + transports.FileUploadsServiceGrpcAsyncIOTransport, + ], +) +def test_file_uploads_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/content",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.FileUploadsServiceGrpcTransport, + transports.FileUploadsServiceGrpcAsyncIOTransport, + transports.FileUploadsServiceRestTransport, + ], +) +def test_file_uploads_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com/" + api_audience_tests = [None, "https://language2.com/"] + api_audience_expect = [host, "https://language2.com/"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.FileUploadsServiceGrpcTransport, grpc_helpers), + (transports.FileUploadsServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_file_uploads_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "merchantapi.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/content",), + scopes=["1", "2"], + default_host="merchantapi.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.FileUploadsServiceGrpcTransport, + transports.FileUploadsServiceGrpcAsyncIOTransport, + ], +) +def test_file_uploads_service_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_file_uploads_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.FileUploadsServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_file_uploads_service_host_no_port(transport_name): + client = FileUploadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="merchantapi.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "merchantapi.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com/" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_file_uploads_service_host_with_port(transport_name): + client = FileUploadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="merchantapi.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "merchantapi.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://merchantapi.googleapis.com:8000/" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_file_uploads_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = FileUploadsServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = FileUploadsServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get_file_upload._session + session2 = client2.transport.get_file_upload._session + assert session1 != session2 + + +def test_file_uploads_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.FileUploadsServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_file_uploads_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.FileUploadsServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.FileUploadsServiceGrpcTransport, + transports.FileUploadsServiceGrpcAsyncIOTransport, + ], +) +def test_file_uploads_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.FileUploadsServiceGrpcTransport, + transports.FileUploadsServiceGrpcAsyncIOTransport, + ], +) +def test_file_uploads_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_file_upload_path(): + account = "squid" + datasource = "clam" + fileupload = "whelk" + expected = ( + "accounts/{account}/dataSources/{datasource}/fileUploads/{fileupload}".format( + account=account, + datasource=datasource, + fileupload=fileupload, + ) + ) + actual = FileUploadsServiceClient.file_upload_path(account, datasource, fileupload) + assert expected == actual + + +def test_parse_file_upload_path(): + expected = { + "account": "octopus", + "datasource": "oyster", + "fileupload": "nudibranch", + } + path = FileUploadsServiceClient.file_upload_path(**expected) + + # Check that the path construction is reversible. + actual = FileUploadsServiceClient.parse_file_upload_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "cuttlefish" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = FileUploadsServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "mussel", + } + path = FileUploadsServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = FileUploadsServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "winkle" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = FileUploadsServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nautilus", + } + path = FileUploadsServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = FileUploadsServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "scallop" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = FileUploadsServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "abalone", + } + path = FileUploadsServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = FileUploadsServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "squid" + expected = "projects/{project}".format( + project=project, + ) + actual = FileUploadsServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "clam", + } + path = FileUploadsServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = FileUploadsServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "whelk" + location = "octopus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = FileUploadsServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = FileUploadsServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = FileUploadsServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.FileUploadsServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = FileUploadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.FileUploadsServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = FileUploadsServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = FileUploadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = FileUploadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = FileUploadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (FileUploadsServiceClient, transports.FileUploadsServiceGrpcTransport), + ( + FileUploadsServiceAsyncClient, + transports.FileUploadsServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) From 6db79dc964b540f1c9c21d96122e4916aca66d98 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 8 Oct 2024 15:53:34 +0000 Subject: [PATCH 106/108] feat: [google-ads-admanager] Added support for Interactive Reporting (#13123) - [ ] Regenerate this pull request now. BEGIN_COMMIT_OVERRIDE feat: Added support for Interactive Reporting fix!: Removed closed beta services that had data discrepancies with the SOAP API END_COMMIT_OVERRIDE Temporarily removed the LineItem, Creative, Contact, Label, and Team services until data discrepancies with the SOAP API are resolved. PiperOrigin-RevId: 681864022 Source-Link: https://github.com/googleapis/googleapis/commit/672cd6a381c7a0aea16438e2335dc7799bd70e4d Source-Link: https://github.com/googleapis/googleapis-gen/commit/039270990aba3bf810f2a274e8ffdd2fd6a954e1 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWFkcy1hZG1hbmFnZXIvLk93bEJvdC55YW1sIiwiaCI6IjAzOTI3MDk5MGFiYTNiZjgxMGYyYTI3NGU4ZmZkZDJmZDZhOTU0ZTEifQ== --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../docs/admanager_v1/ad_partner_service.rst | 10 - .../docs/admanager_v1/contact_service.rst | 10 - .../docs/admanager_v1/creative_service.rst | 10 - .../entity_signals_mapping_service.rst | 10 + .../docs/admanager_v1/label_service.rst | 10 - .../docs/admanager_v1/line_item_service.rst | 10 - .../docs/admanager_v1/report_service.rst | 4 + .../docs/admanager_v1/services_.rst | 8 +- .../taxonomy_category_service.rst | 10 + .../docs/admanager_v1/team_service.rst | 10 - .../docs/admanager_v1/user_service.rst | 4 - .../google/ads/admanager/__init__.py | 265 +- .../google/ads/admanager_v1/__init__.py | 236 +- .../ads/admanager_v1/gapic_metadata.json | 154 +- .../ad_partner_service/transports/__init__.py | 30 - .../services/ad_unit_service/client.py | 122 +- .../services/ad_unit_service/pagers.py | 78 +- .../ad_unit_service/transports/base.py | 21 +- .../ad_unit_service/transports/rest.py | 153 +- .../services/company_service/client.py | 6 +- .../services/company_service/pagers.py | 4 +- .../company_service/transports/base.py | 4 +- .../company_service/transports/rest.py | 22 +- .../services/contact_service/__init__.py | 18 - .../services/contact_service/client.py | 986 ---- .../services/contact_service/pagers.py | 115 - .../contact_service/transports/__init__.py | 30 - .../contact_service/transports/base.py | 188 - .../contact_service/transports/rest.py | 526 -- .../creative_service/transports/rest.py | 527 -- .../services/custom_field_service/client.py | 12 +- .../services/custom_field_service/pagers.py | 4 +- .../custom_field_service/transports/base.py | 6 +- .../custom_field_service/transports/rest.py | 24 +- .../custom_targeting_key_service/client.py | 3 +- .../custom_targeting_key_service/pagers.py | 7 +- .../transports/base.py | 9 +- .../transports/rest.py | 23 +- .../custom_targeting_value_service/client.py | 3 +- .../custom_targeting_value_service/pagers.py | 9 +- .../transports/base.py | 9 +- .../transports/rest.py | 23 +- .../__init__.py | 4 +- .../client.py | 785 ++- .../pagers.py | 45 +- .../transports/__init__.py | 19 +- .../transports/base.py | 108 +- .../transports/rest.py | 1153 ++++ .../services/label_service/__init__.py | 18 - .../services/label_service/client.py | 977 ---- .../services/label_service/pagers.py | 115 - .../label_service/transports/__init__.py | 30 - .../services/label_service/transports/base.py | 188 - .../services/label_service/transports/rest.py | 520 -- .../services/line_item_service/__init__.py | 18 - .../services/line_item_service/client.py | 1021 ---- .../services/line_item_service/pagers.py | 115 - .../line_item_service/transports/__init__.py | 30 - .../line_item_service/transports/base.py | 188 - .../line_item_service/transports/rest.py | 528 -- .../services/network_service/client.py | 77 +- .../network_service/transports/base.py | 21 +- .../network_service/transports/rest.py | 135 +- .../services/order_service/client.py | 29 +- .../services/order_service/pagers.py | 4 +- .../services/order_service/transports/base.py | 4 +- .../services/order_service/transports/rest.py | 18 +- .../services/placement_service/client.py | 8 +- .../services/placement_service/pagers.py | 4 +- .../placement_service/transports/base.py | 4 +- .../placement_service/transports/rest.py | 22 +- .../services/report_service/client.py | 660 ++- .../services/report_service/pagers.py | 189 + .../report_service/transports/base.py | 84 +- .../report_service/transports/rest.py | 691 ++- .../services/role_service/client.py | 23 +- .../services/role_service/pagers.py | 4 +- .../services/role_service/transports/base.py | 4 +- .../services/role_service/transports/rest.py | 30 +- .../__init__.py | 4 +- .../client.py | 210 +- .../pagers.py | 39 +- .../transports/__init__.py | 19 +- .../transports/base.py | 36 +- .../transports/rest.py | 184 +- .../services/team_service/__init__.py | 18 - .../services/team_service/client.py | 977 ---- .../services/team_service/pagers.py | 115 - .../services/team_service/transports/base.py | 187 - .../services/team_service/transports/rest.py | 516 -- .../services/user_service/client.py | 140 +- .../services/user_service/pagers.py | 115 - .../services/user_service/transports/base.py | 20 +- .../services/user_service/transports/rest.py | 144 +- .../google/ads/admanager_v1/types/__init__.py | 221 +- .../types/ad_partner_declaration.py | 80 - .../ads/admanager_v1/types/ad_unit_enums.py | 93 +- .../admanager_v1/types/ad_unit_messages.py | 369 ++ .../ads/admanager_v1/types/ad_unit_service.py | 463 +- .../ads/admanager_v1/types/ad_unit_size.py | 67 - .../admanager_v1/types/company_messages.py | 174 + .../ads/admanager_v1/types/company_service.py | 144 +- .../admanager_v1/types/company_type_enum.py | 4 - .../types/computed_status_enum.py | 90 - .../admanager_v1/types/contact_messages.py | 56 + .../ads/admanager_v1/types/contact_service.py | 174 - .../types/creative_placeholder.py | 99 - .../admanager_v1/types/creative_service.py | 229 - .../types/custom_field_messages.py | 138 + .../types/custom_field_service.py | 124 +- .../admanager_v1/types/custom_field_value.py | 114 + .../types/custom_targeting_key_messages.py | 93 + .../types/custom_targeting_key_service.py | 70 +- .../types/custom_targeting_value_messages.py | 81 + .../types/custom_targeting_value_service.py | 56 +- .../types/entity_signals_mapping_messages.py | 96 + .../types/entity_signals_mapping_service.py | 306 ++ .../ads/admanager_v1/types/frequency_cap.py | 46 +- .../google/ads/admanager_v1/types/goal.py | 204 - .../ads/admanager_v1/types/label_messages.py | 46 + .../ads/admanager_v1/types/label_service.py | 168 - .../ads/admanager_v1/types/line_item_enums.py | 314 -- .../admanager_v1/types/line_item_service.py | 491 -- .../admanager_v1/types/network_messages.py | 106 + .../ads/admanager_v1/types/network_service.py | 90 +- .../ads/admanager_v1/types/order_enums.py | 73 + .../ads/admanager_v1/types/order_messages.py | 278 + .../ads/admanager_v1/types/order_service.py | 273 +- .../admanager_v1/types/placement_messages.py | 102 + .../admanager_v1/types/placement_service.py | 77 +- .../ads/admanager_v1/types/report_service.py | 4663 ++++++++++++++++- .../ads/admanager_v1/types/role_enums.py | 52 + .../ads/admanager_v1/types/role_messages.py | 79 + .../ads/admanager_v1/types/role_service.py | 47 +- .../google/ads/admanager_v1/types/size.py | 55 +- .../ads/admanager_v1/types/size_type_enum.py | 78 + .../types/taxonomy_category_messages.py | 96 + ...ervice.py => taxonomy_category_service.py} | 78 +- .../admanager_v1/types/taxonomy_type_enum.py | 62 + .../ads/admanager_v1/types/team_messages.py | 54 + .../ads/admanager_v1/types/team_service.py | 168 - .../ads/admanager_v1/types/time_unit_enum.py | 69 + .../ads/admanager_v1/types/user_messages.py | 109 + .../ads/admanager_v1/types/user_service.py | 187 - ...d_unit_service_list_ad_unit_sizes_sync.py} | 14 +- ...tch_create_entity_signals_mappings_sync.py | 58 + ...tch_update_entity_signals_mappings_sync.py | 57 + ...vice_create_entity_signals_mapping_sync.py | 57 + ...ervice_get_entity_signals_mapping_sync.py} | 14 +- ...vice_list_entity_signals_mappings_sync.py} | 14 +- ...vice_update_entity_signals_mapping_sync.py | 56 + ...ed_line_item_service_get_line_item_sync.py | 52 - ...ted_network_service_list_networks_sync.py} | 15 +- ...ated_report_service_create_report_sync.py} | 23 +- ..._service_fetch_report_result_rows_sync.py} | 15 +- ...nerated_report_service_get_report_sync.py} | 14 +- ...rated_report_service_list_reports_sync.py} | 14 +- ...nerated_report_service_run_report_sync.py} | 14 +- ...ated_report_service_update_report_sync.py} | 21 +- ...ory_service_get_taxonomy_category_sync.py} | 14 +- ..._service_list_taxonomy_categories_sync.py} | 14 +- ..._generated_team_service_list_teams_sync.py | 53 - ...ppet_metadata_google.ads.admanager.v1.json | 1260 +++-- .../doc-formatting.yaml | 1 + .../scripts/fixup_admanager_v1_keywords.py | 30 +- .../admanager_v1/test_ad_unit_service.py | 458 +- .../admanager_v1/test_company_service.py | 42 +- .../admanager_v1/test_contact_service.py | 2180 -------- .../admanager_v1/test_creative_service.py | 2249 -------- .../admanager_v1/test_custom_field_service.py | 44 +- .../test_custom_targeting_key_service.py | 39 +- .../test_custom_targeting_value_service.py | 39 +- .../test_entity_signals_mapping_service.py | 3898 ++++++++++++++ .../gapic/admanager_v1/test_label_service.py | 2151 -------- .../admanager_v1/test_line_item_service.py | 2295 -------- .../admanager_v1/test_network_service.py | 190 +- .../gapic/admanager_v1/test_order_service.py | 133 +- .../admanager_v1/test_placement_service.py | 44 +- .../gapic/admanager_v1/test_report_service.py | 2084 +++++++- .../gapic/admanager_v1/test_role_service.py | 48 +- ...e.py => test_taxonomy_category_service.py} | 679 +-- .../gapic/admanager_v1/test_team_service.py | 2145 -------- .../gapic/admanager_v1/test_user_service.py | 469 +- .../doc-formatting.yaml | 24 + 184 files changed, 20721 insertions(+), 26016 deletions(-) delete mode 100644 packages/google-ads-admanager/docs/admanager_v1/ad_partner_service.rst delete mode 100644 packages/google-ads-admanager/docs/admanager_v1/contact_service.rst delete mode 100644 packages/google-ads-admanager/docs/admanager_v1/creative_service.rst create mode 100644 packages/google-ads-admanager/docs/admanager_v1/entity_signals_mapping_service.rst delete mode 100644 packages/google-ads-admanager/docs/admanager_v1/label_service.rst delete mode 100644 packages/google-ads-admanager/docs/admanager_v1/line_item_service.rst create mode 100644 packages/google-ads-admanager/docs/admanager_v1/taxonomy_category_service.rst delete mode 100644 packages/google-ads-admanager/docs/admanager_v1/team_service.rst delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/services/ad_partner_service/transports/__init__.py delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/services/contact_service/__init__.py delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/services/contact_service/client.py delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/services/contact_service/pagers.py delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/services/contact_service/transports/__init__.py delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/services/contact_service/transports/base.py delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/services/contact_service/transports/rest.py delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/services/creative_service/transports/rest.py rename packages/google-ads-admanager/google/ads/admanager_v1/services/{creative_service => entity_signals_mapping_service}/__init__.py (85%) rename packages/google-ads-admanager/google/ads/admanager_v1/services/{creative_service => entity_signals_mapping_service}/client.py (53%) rename packages/google-ads-admanager/google/ads/admanager_v1/services/{creative_service => entity_signals_mapping_service}/pagers.py (69%) rename packages/google-ads-admanager/google/ads/admanager_v1/services/{team_service => entity_signals_mapping_service}/transports/__init__.py (60%) rename packages/google-ads-admanager/google/ads/admanager_v1/services/{ad_partner_service => entity_signals_mapping_service}/transports/base.py (63%) create mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/transports/rest.py delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/services/label_service/__init__.py delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/services/label_service/client.py delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/services/label_service/pagers.py delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/services/label_service/transports/__init__.py delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/services/label_service/transports/base.py delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/services/label_service/transports/rest.py delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/services/line_item_service/__init__.py delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/services/line_item_service/client.py delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/services/line_item_service/pagers.py delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/services/line_item_service/transports/__init__.py delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/services/line_item_service/transports/base.py delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/services/line_item_service/transports/rest.py create mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/services/report_service/pagers.py rename packages/google-ads-admanager/google/ads/admanager_v1/services/{ad_partner_service => taxonomy_category_service}/__init__.py (86%) rename packages/google-ads-admanager/google/ads/admanager_v1/services/{ad_partner_service => taxonomy_category_service}/client.py (84%) rename packages/google-ads-admanager/google/ads/admanager_v1/services/{ad_partner_service => taxonomy_category_service}/pagers.py (71%) rename packages/google-ads-admanager/google/ads/admanager_v1/services/{creative_service => taxonomy_category_service}/transports/__init__.py (61%) rename packages/google-ads-admanager/google/ads/admanager_v1/services/{creative_service => taxonomy_category_service}/transports/base.py (86%) rename packages/google-ads-admanager/google/ads/admanager_v1/services/{ad_partner_service => taxonomy_category_service}/transports/rest.py (73%) delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/services/team_service/__init__.py delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/services/team_service/client.py delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/services/team_service/pagers.py delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/services/team_service/transports/base.py delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/services/team_service/transports/rest.py delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/services/user_service/pagers.py delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/types/ad_partner_declaration.py create mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/types/ad_unit_messages.py delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/types/ad_unit_size.py create mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/types/company_messages.py delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/types/computed_status_enum.py create mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/types/contact_messages.py delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/types/contact_service.py delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/types/creative_placeholder.py delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/types/creative_service.py create mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/types/custom_field_messages.py create mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/types/custom_field_value.py create mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/types/custom_targeting_key_messages.py create mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/types/custom_targeting_value_messages.py create mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/types/entity_signals_mapping_messages.py create mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/types/entity_signals_mapping_service.py delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/types/goal.py create mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/types/label_messages.py delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/types/label_service.py delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/types/line_item_enums.py delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/types/line_item_service.py create mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/types/network_messages.py create mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/types/order_enums.py create mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/types/order_messages.py create mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/types/placement_messages.py create mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/types/role_enums.py create mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/types/role_messages.py create mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/types/size_type_enum.py create mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/types/taxonomy_category_messages.py rename packages/google-ads-admanager/google/ads/admanager_v1/types/{ad_partner_service.py => taxonomy_category_service.py} (63%) create mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/types/taxonomy_type_enum.py create mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/types/team_messages.py delete mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/types/team_service.py create mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/types/time_unit_enum.py create mode 100644 packages/google-ads-admanager/google/ads/admanager_v1/types/user_messages.py rename packages/google-ads-admanager/samples/generated_samples/{admanager_v1_generated_line_item_service_list_line_items_sync.py => admanager_v1_generated_ad_unit_service_list_ad_unit_sizes_sync.py} (81%) create mode 100644 packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_entity_signals_mapping_service_batch_create_entity_signals_mappings_sync.py create mode 100644 packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_entity_signals_mapping_service_batch_update_entity_signals_mappings_sync.py create mode 100644 packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_entity_signals_mapping_service_create_entity_signals_mapping_sync.py rename packages/google-ads-admanager/samples/generated_samples/{admanager_v1_generated_creative_service_get_creative_sync.py => admanager_v1_generated_entity_signals_mapping_service_get_entity_signals_mapping_sync.py} (77%) rename packages/google-ads-admanager/samples/generated_samples/{admanager_v1_generated_label_service_list_labels_sync.py => admanager_v1_generated_entity_signals_mapping_service_list_entity_signals_mappings_sync.py} (77%) create mode 100644 packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_entity_signals_mapping_service_update_entity_signals_mapping_sync.py delete mode 100644 packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_line_item_service_get_line_item_sync.py rename packages/google-ads-admanager/samples/generated_samples/{admanager_v1_generated_label_service_get_label_sync.py => admanager_v1_generated_network_service_list_networks_sync.py} (81%) rename packages/google-ads-admanager/samples/generated_samples/{admanager_v1_generated_contact_service_list_contacts_sync.py => admanager_v1_generated_report_service_create_report_sync.py} (70%) rename packages/google-ads-admanager/samples/generated_samples/{admanager_v1_generated_creative_service_list_creatives_sync.py => admanager_v1_generated_report_service_fetch_report_result_rows_sync.py} (79%) rename packages/google-ads-admanager/samples/generated_samples/{admanager_v1_generated_team_service_get_team_sync.py => admanager_v1_generated_report_service_get_report_sync.py} (82%) rename packages/google-ads-admanager/samples/generated_samples/{admanager_v1_generated_user_service_list_users_sync.py => admanager_v1_generated_report_service_list_reports_sync.py} (82%) rename packages/google-ads-admanager/samples/generated_samples/{admanager_v1_generated_report_service_export_saved_report_sync.py => admanager_v1_generated_report_service_run_report_sync.py} (82%) rename packages/google-ads-admanager/samples/generated_samples/{admanager_v1_generated_contact_service_get_contact_sync.py => admanager_v1_generated_report_service_update_report_sync.py} (71%) rename packages/google-ads-admanager/samples/generated_samples/{admanager_v1_generated_ad_partner_service_get_ad_partner_sync.py => admanager_v1_generated_taxonomy_category_service_get_taxonomy_category_sync.py} (78%) rename packages/google-ads-admanager/samples/generated_samples/{admanager_v1_generated_ad_partner_service_list_ad_partners_sync.py => admanager_v1_generated_taxonomy_category_service_list_taxonomy_categories_sync.py} (78%) delete mode 100644 packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_team_service_list_teams_sync.py create mode 120000 packages/google-ads-admanager/scripts/client-post-processing/doc-formatting.yaml delete mode 100644 packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_contact_service.py delete mode 100644 packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_creative_service.py create mode 100644 packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_entity_signals_mapping_service.py delete mode 100644 packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_label_service.py delete mode 100644 packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_line_item_service.py rename packages/google-ads-admanager/tests/unit/gapic/admanager_v1/{test_ad_partner_service.py => test_taxonomy_category_service.py} (75%) delete mode 100644 packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_team_service.py diff --git a/packages/google-ads-admanager/docs/admanager_v1/ad_partner_service.rst b/packages/google-ads-admanager/docs/admanager_v1/ad_partner_service.rst deleted file mode 100644 index 7ccc095d3628..000000000000 --- a/packages/google-ads-admanager/docs/admanager_v1/ad_partner_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -AdPartnerService ----------------------------------- - -.. automodule:: google.ads.admanager_v1.services.ad_partner_service - :members: - :inherited-members: - -.. automodule:: google.ads.admanager_v1.services.ad_partner_service.pagers - :members: - :inherited-members: diff --git a/packages/google-ads-admanager/docs/admanager_v1/contact_service.rst b/packages/google-ads-admanager/docs/admanager_v1/contact_service.rst deleted file mode 100644 index 478ccc08a803..000000000000 --- a/packages/google-ads-admanager/docs/admanager_v1/contact_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -ContactService --------------------------------- - -.. automodule:: google.ads.admanager_v1.services.contact_service - :members: - :inherited-members: - -.. automodule:: google.ads.admanager_v1.services.contact_service.pagers - :members: - :inherited-members: diff --git a/packages/google-ads-admanager/docs/admanager_v1/creative_service.rst b/packages/google-ads-admanager/docs/admanager_v1/creative_service.rst deleted file mode 100644 index 2f4e457a7ab3..000000000000 --- a/packages/google-ads-admanager/docs/admanager_v1/creative_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -CreativeService ---------------------------------- - -.. automodule:: google.ads.admanager_v1.services.creative_service - :members: - :inherited-members: - -.. automodule:: google.ads.admanager_v1.services.creative_service.pagers - :members: - :inherited-members: diff --git a/packages/google-ads-admanager/docs/admanager_v1/entity_signals_mapping_service.rst b/packages/google-ads-admanager/docs/admanager_v1/entity_signals_mapping_service.rst new file mode 100644 index 000000000000..d4e1f7fa5634 --- /dev/null +++ b/packages/google-ads-admanager/docs/admanager_v1/entity_signals_mapping_service.rst @@ -0,0 +1,10 @@ +EntitySignalsMappingService +--------------------------------------------- + +.. automodule:: google.ads.admanager_v1.services.entity_signals_mapping_service + :members: + :inherited-members: + +.. automodule:: google.ads.admanager_v1.services.entity_signals_mapping_service.pagers + :members: + :inherited-members: diff --git a/packages/google-ads-admanager/docs/admanager_v1/label_service.rst b/packages/google-ads-admanager/docs/admanager_v1/label_service.rst deleted file mode 100644 index f3408d1767f5..000000000000 --- a/packages/google-ads-admanager/docs/admanager_v1/label_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -LabelService ------------------------------- - -.. automodule:: google.ads.admanager_v1.services.label_service - :members: - :inherited-members: - -.. automodule:: google.ads.admanager_v1.services.label_service.pagers - :members: - :inherited-members: diff --git a/packages/google-ads-admanager/docs/admanager_v1/line_item_service.rst b/packages/google-ads-admanager/docs/admanager_v1/line_item_service.rst deleted file mode 100644 index 6b4388d90085..000000000000 --- a/packages/google-ads-admanager/docs/admanager_v1/line_item_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -LineItemService ---------------------------------- - -.. automodule:: google.ads.admanager_v1.services.line_item_service - :members: - :inherited-members: - -.. automodule:: google.ads.admanager_v1.services.line_item_service.pagers - :members: - :inherited-members: diff --git a/packages/google-ads-admanager/docs/admanager_v1/report_service.rst b/packages/google-ads-admanager/docs/admanager_v1/report_service.rst index 96130cad2289..a655ad73d7a3 100644 --- a/packages/google-ads-admanager/docs/admanager_v1/report_service.rst +++ b/packages/google-ads-admanager/docs/admanager_v1/report_service.rst @@ -4,3 +4,7 @@ ReportService .. automodule:: google.ads.admanager_v1.services.report_service :members: :inherited-members: + +.. automodule:: google.ads.admanager_v1.services.report_service.pagers + :members: + :inherited-members: diff --git a/packages/google-ads-admanager/docs/admanager_v1/services_.rst b/packages/google-ads-admanager/docs/admanager_v1/services_.rst index a9b93b8a07c2..a1522b62dc40 100644 --- a/packages/google-ads-admanager/docs/admanager_v1/services_.rst +++ b/packages/google-ads-admanager/docs/admanager_v1/services_.rst @@ -3,20 +3,16 @@ Services for Google Ads Admanager v1 API .. toctree:: :maxdepth: 2 - ad_partner_service ad_unit_service company_service - contact_service - creative_service custom_field_service custom_targeting_key_service custom_targeting_value_service - label_service - line_item_service + entity_signals_mapping_service network_service order_service placement_service report_service role_service - team_service + taxonomy_category_service user_service diff --git a/packages/google-ads-admanager/docs/admanager_v1/taxonomy_category_service.rst b/packages/google-ads-admanager/docs/admanager_v1/taxonomy_category_service.rst new file mode 100644 index 000000000000..61f13e739e19 --- /dev/null +++ b/packages/google-ads-admanager/docs/admanager_v1/taxonomy_category_service.rst @@ -0,0 +1,10 @@ +TaxonomyCategoryService +----------------------------------------- + +.. automodule:: google.ads.admanager_v1.services.taxonomy_category_service + :members: + :inherited-members: + +.. automodule:: google.ads.admanager_v1.services.taxonomy_category_service.pagers + :members: + :inherited-members: diff --git a/packages/google-ads-admanager/docs/admanager_v1/team_service.rst b/packages/google-ads-admanager/docs/admanager_v1/team_service.rst deleted file mode 100644 index 4d3e14c6f6c1..000000000000 --- a/packages/google-ads-admanager/docs/admanager_v1/team_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -TeamService ------------------------------ - -.. automodule:: google.ads.admanager_v1.services.team_service - :members: - :inherited-members: - -.. automodule:: google.ads.admanager_v1.services.team_service.pagers - :members: - :inherited-members: diff --git a/packages/google-ads-admanager/docs/admanager_v1/user_service.rst b/packages/google-ads-admanager/docs/admanager_v1/user_service.rst index 9bae86979749..c7be2db4394e 100644 --- a/packages/google-ads-admanager/docs/admanager_v1/user_service.rst +++ b/packages/google-ads-admanager/docs/admanager_v1/user_service.rst @@ -4,7 +4,3 @@ UserService .. automodule:: google.ads.admanager_v1.services.user_service :members: :inherited-members: - -.. automodule:: google.ads.admanager_v1.services.user_service.pagers - :members: - :inherited-members: diff --git a/packages/google-ads-admanager/google/ads/admanager/__init__.py b/packages/google-ads-admanager/google/ads/admanager/__init__.py index 39067144e427..9672929d7e9f 100644 --- a/packages/google-ads-admanager/google/ads/admanager/__init__.py +++ b/packages/google-ads-admanager/google/ads/admanager/__init__.py @@ -18,15 +18,8 @@ __version__ = package_version.__version__ -from google.ads.admanager_v1.services.ad_partner_service.client import ( - AdPartnerServiceClient, -) from google.ads.admanager_v1.services.ad_unit_service.client import AdUnitServiceClient from google.ads.admanager_v1.services.company_service.client import CompanyServiceClient -from google.ads.admanager_v1.services.contact_service.client import ContactServiceClient -from google.ads.admanager_v1.services.creative_service.client import ( - CreativeServiceClient, -) from google.ads.admanager_v1.services.custom_field_service.client import ( CustomFieldServiceClient, ) @@ -36,9 +29,8 @@ from google.ads.admanager_v1.services.custom_targeting_value_service.client import ( CustomTargetingValueServiceClient, ) -from google.ads.admanager_v1.services.label_service.client import LabelServiceClient -from google.ads.admanager_v1.services.line_item_service.client import ( - LineItemServiceClient, +from google.ads.admanager_v1.services.entity_signals_mapping_service.client import ( + EntitySignalsMappingServiceClient, ) from google.ads.admanager_v1.services.network_service.client import NetworkServiceClient from google.ads.admanager_v1.services.order_service.client import OrderServiceClient @@ -47,76 +39,66 @@ ) from google.ads.admanager_v1.services.report_service.client import ReportServiceClient from google.ads.admanager_v1.services.role_service.client import RoleServiceClient -from google.ads.admanager_v1.services.team_service.client import TeamServiceClient -from google.ads.admanager_v1.services.user_service.client import UserServiceClient -from google.ads.admanager_v1.types.ad_partner_declaration import ( - AdPartnerDeclaration, - DeclarationTypeEnum, +from google.ads.admanager_v1.services.taxonomy_category_service.client import ( + TaxonomyCategoryServiceClient, ) -from google.ads.admanager_v1.types.ad_partner_service import ( - AdPartner, - GetAdPartnerRequest, - ListAdPartnersRequest, - ListAdPartnersResponse, +from google.ads.admanager_v1.services.user_service.client import UserServiceClient +from google.ads.admanager_v1.types.ad_unit_enums import ( + AdUnitStatusEnum, + SmartSizeModeEnum, + TargetWindowEnum, ) -from google.ads.admanager_v1.types.ad_unit_enums import AppliedAdsenseEnabledEnum -from google.ads.admanager_v1.types.ad_unit_service import ( +from google.ads.admanager_v1.types.ad_unit_messages import ( AdUnit, AdUnitParent, - GetAdUnitRequest, + AdUnitSize, LabelFrequencyCap, +) +from google.ads.admanager_v1.types.ad_unit_service import ( + GetAdUnitRequest, + ListAdUnitSizesRequest, + ListAdUnitSizesResponse, ListAdUnitsRequest, ListAdUnitsResponse, - SmartSizeModeEnum, - TargetWindowEnum, ) -from google.ads.admanager_v1.types.ad_unit_size import AdUnitSize from google.ads.admanager_v1.types.admanager_error import AdManagerError from google.ads.admanager_v1.types.applied_label import AppliedLabel from google.ads.admanager_v1.types.company_credit_status_enum import ( CompanyCreditStatusEnum, ) +from google.ads.admanager_v1.types.company_messages import Company from google.ads.admanager_v1.types.company_service import ( - Company, GetCompanyRequest, ListCompaniesRequest, ListCompaniesResponse, ) from google.ads.admanager_v1.types.company_type_enum import CompanyTypeEnum -from google.ads.admanager_v1.types.computed_status_enum import ComputedStatusEnum -from google.ads.admanager_v1.types.contact_service import ( - Contact, - GetContactRequest, - ListContactsRequest, - ListContactsResponse, -) -from google.ads.admanager_v1.types.creative_placeholder import CreativePlaceholder -from google.ads.admanager_v1.types.creative_service import ( - Creative, - GetCreativeRequest, - ListCreativesRequest, - ListCreativesResponse, -) +from google.ads.admanager_v1.types.contact_messages import Contact from google.ads.admanager_v1.types.custom_field_enums import ( CustomFieldDataTypeEnum, CustomFieldEntityTypeEnum, CustomFieldStatusEnum, CustomFieldVisibilityEnum, ) -from google.ads.admanager_v1.types.custom_field_service import ( +from google.ads.admanager_v1.types.custom_field_messages import ( CustomField, CustomFieldOption, +) +from google.ads.admanager_v1.types.custom_field_service import ( GetCustomFieldRequest, ListCustomFieldsRequest, ListCustomFieldsResponse, ) +from google.ads.admanager_v1.types.custom_field_value import CustomFieldValue from google.ads.admanager_v1.types.custom_targeting_key_enums import ( CustomTargetingKeyReportableTypeEnum, CustomTargetingKeyStatusEnum, CustomTargetingKeyTypeEnum, ) -from google.ads.admanager_v1.types.custom_targeting_key_service import ( +from google.ads.admanager_v1.types.custom_targeting_key_messages import ( CustomTargetingKey, +) +from google.ads.admanager_v1.types.custom_targeting_key_service import ( GetCustomTargetingKeyRequest, ListCustomTargetingKeysRequest, ListCustomTargetingKeysResponse, @@ -125,109 +107,114 @@ CustomTargetingValueMatchTypeEnum, CustomTargetingValueStatusEnum, ) -from google.ads.admanager_v1.types.custom_targeting_value_service import ( +from google.ads.admanager_v1.types.custom_targeting_value_messages import ( CustomTargetingValue, +) +from google.ads.admanager_v1.types.custom_targeting_value_service import ( GetCustomTargetingValueRequest, ListCustomTargetingValuesRequest, ListCustomTargetingValuesResponse, ) -from google.ads.admanager_v1.types.environment_type_enum import EnvironmentTypeEnum -from google.ads.admanager_v1.types.frequency_cap import FrequencyCap, TimeUnitEnum -from google.ads.admanager_v1.types.goal import Goal, GoalTypeEnum, UnitTypeEnum -from google.ads.admanager_v1.types.label_service import ( - GetLabelRequest, - Label, - ListLabelsRequest, - ListLabelsResponse, +from google.ads.admanager_v1.types.entity_signals_mapping_messages import ( + EntitySignalsMapping, ) -from google.ads.admanager_v1.types.line_item_enums import ( - CreativeRotationTypeEnum, - DeliveryRateTypeEnum, - LineItemCostTypeEnum, - LineItemDiscountTypeEnum, - LineItemTypeEnum, - ReservationStatusEnum, +from google.ads.admanager_v1.types.entity_signals_mapping_service import ( + BatchCreateEntitySignalsMappingsRequest, + BatchCreateEntitySignalsMappingsResponse, + BatchUpdateEntitySignalsMappingsRequest, + BatchUpdateEntitySignalsMappingsResponse, + CreateEntitySignalsMappingRequest, + GetEntitySignalsMappingRequest, + ListEntitySignalsMappingsRequest, + ListEntitySignalsMappingsResponse, + UpdateEntitySignalsMappingRequest, ) -from google.ads.admanager_v1.types.line_item_service import ( - GetLineItemRequest, - LineItem, - ListLineItemsRequest, - ListLineItemsResponse, +from google.ads.admanager_v1.types.environment_type_enum import EnvironmentTypeEnum +from google.ads.admanager_v1.types.frequency_cap import FrequencyCap +from google.ads.admanager_v1.types.label_messages import Label +from google.ads.admanager_v1.types.network_messages import Network +from google.ads.admanager_v1.types.network_service import ( + GetNetworkRequest, + ListNetworksRequest, + ListNetworksResponse, ) -from google.ads.admanager_v1.types.network_service import GetNetworkRequest, Network +from google.ads.admanager_v1.types.order_enums import OrderStatusEnum +from google.ads.admanager_v1.types.order_messages import Order from google.ads.admanager_v1.types.order_service import ( GetOrderRequest, ListOrdersRequest, ListOrdersResponse, - Order, ) from google.ads.admanager_v1.types.placement_enums import PlacementStatusEnum +from google.ads.admanager_v1.types.placement_messages import Placement from google.ads.admanager_v1.types.placement_service import ( GetPlacementRequest, ListPlacementsRequest, ListPlacementsResponse, - Placement, ) from google.ads.admanager_v1.types.report_service import ( - ExportSavedReportMetadata, - ExportSavedReportRequest, - ExportSavedReportResponse, + CreateReportRequest, + FetchReportResultRowsRequest, + FetchReportResultRowsResponse, + GetReportRequest, + ListReportsRequest, + ListReportsResponse, Report, + ReportDefinition, + RunReportMetadata, + RunReportRequest, + RunReportResponse, + Schedule, + ScheduleOptions, + UpdateReportRequest, ) +from google.ads.admanager_v1.types.role_enums import RoleStatusEnum +from google.ads.admanager_v1.types.role_messages import Role from google.ads.admanager_v1.types.role_service import ( GetRoleRequest, ListRolesRequest, ListRolesResponse, - Role, -) -from google.ads.admanager_v1.types.size import Size, SizeTypeEnum -from google.ads.admanager_v1.types.team_service import ( - GetTeamRequest, - ListTeamsRequest, - ListTeamsResponse, - Team, ) -from google.ads.admanager_v1.types.user_service import ( - GetUserRequest, - ListUsersRequest, - ListUsersResponse, - User, +from google.ads.admanager_v1.types.size import Size +from google.ads.admanager_v1.types.size_type_enum import SizeTypeEnum +from google.ads.admanager_v1.types.taxonomy_category_messages import TaxonomyCategory +from google.ads.admanager_v1.types.taxonomy_category_service import ( + GetTaxonomyCategoryRequest, + ListTaxonomyCategoriesRequest, + ListTaxonomyCategoriesResponse, ) +from google.ads.admanager_v1.types.taxonomy_type_enum import TaxonomyTypeEnum +from google.ads.admanager_v1.types.team_messages import Team +from google.ads.admanager_v1.types.time_unit_enum import TimeUnitEnum +from google.ads.admanager_v1.types.user_messages import User +from google.ads.admanager_v1.types.user_service import GetUserRequest __all__ = ( - "AdPartnerServiceClient", "AdUnitServiceClient", "CompanyServiceClient", - "ContactServiceClient", - "CreativeServiceClient", "CustomFieldServiceClient", "CustomTargetingKeyServiceClient", "CustomTargetingValueServiceClient", - "LabelServiceClient", - "LineItemServiceClient", + "EntitySignalsMappingServiceClient", "NetworkServiceClient", "OrderServiceClient", "PlacementServiceClient", "ReportServiceClient", "RoleServiceClient", - "TeamServiceClient", + "TaxonomyCategoryServiceClient", "UserServiceClient", - "AdPartnerDeclaration", - "DeclarationTypeEnum", - "AdPartner", - "GetAdPartnerRequest", - "ListAdPartnersRequest", - "ListAdPartnersResponse", - "AppliedAdsenseEnabledEnum", + "AdUnitStatusEnum", + "SmartSizeModeEnum", + "TargetWindowEnum", "AdUnit", "AdUnitParent", - "GetAdUnitRequest", + "AdUnitSize", "LabelFrequencyCap", + "GetAdUnitRequest", + "ListAdUnitSizesRequest", + "ListAdUnitSizesResponse", "ListAdUnitsRequest", "ListAdUnitsResponse", - "SmartSizeModeEnum", - "TargetWindowEnum", - "AdUnitSize", "AdManagerError", "AppliedLabel", "CompanyCreditStatusEnum", @@ -236,16 +223,7 @@ "ListCompaniesRequest", "ListCompaniesResponse", "CompanyTypeEnum", - "ComputedStatusEnum", "Contact", - "GetContactRequest", - "ListContactsRequest", - "ListContactsResponse", - "CreativePlaceholder", - "Creative", - "GetCreativeRequest", - "ListCreativesRequest", - "ListCreativesResponse", "CustomFieldDataTypeEnum", "CustomFieldEntityTypeEnum", "CustomFieldStatusEnum", @@ -255,6 +233,7 @@ "GetCustomFieldRequest", "ListCustomFieldsRequest", "ListCustomFieldsResponse", + "CustomFieldValue", "CustomTargetingKeyReportableTypeEnum", "CustomTargetingKeyStatusEnum", "CustomTargetingKeyTypeEnum", @@ -268,53 +247,61 @@ "GetCustomTargetingValueRequest", "ListCustomTargetingValuesRequest", "ListCustomTargetingValuesResponse", + "EntitySignalsMapping", + "BatchCreateEntitySignalsMappingsRequest", + "BatchCreateEntitySignalsMappingsResponse", + "BatchUpdateEntitySignalsMappingsRequest", + "BatchUpdateEntitySignalsMappingsResponse", + "CreateEntitySignalsMappingRequest", + "GetEntitySignalsMappingRequest", + "ListEntitySignalsMappingsRequest", + "ListEntitySignalsMappingsResponse", + "UpdateEntitySignalsMappingRequest", "EnvironmentTypeEnum", "FrequencyCap", - "TimeUnitEnum", - "Goal", - "GoalTypeEnum", - "UnitTypeEnum", - "GetLabelRequest", "Label", - "ListLabelsRequest", - "ListLabelsResponse", - "CreativeRotationTypeEnum", - "DeliveryRateTypeEnum", - "LineItemCostTypeEnum", - "LineItemDiscountTypeEnum", - "LineItemTypeEnum", - "ReservationStatusEnum", - "GetLineItemRequest", - "LineItem", - "ListLineItemsRequest", - "ListLineItemsResponse", - "GetNetworkRequest", "Network", + "GetNetworkRequest", + "ListNetworksRequest", + "ListNetworksResponse", + "OrderStatusEnum", + "Order", "GetOrderRequest", "ListOrdersRequest", "ListOrdersResponse", - "Order", "PlacementStatusEnum", + "Placement", "GetPlacementRequest", "ListPlacementsRequest", "ListPlacementsResponse", - "Placement", - "ExportSavedReportMetadata", - "ExportSavedReportRequest", - "ExportSavedReportResponse", + "CreateReportRequest", + "FetchReportResultRowsRequest", + "FetchReportResultRowsResponse", + "GetReportRequest", + "ListReportsRequest", + "ListReportsResponse", "Report", + "ReportDefinition", + "RunReportMetadata", + "RunReportRequest", + "RunReportResponse", + "Schedule", + "ScheduleOptions", + "UpdateReportRequest", + "RoleStatusEnum", + "Role", "GetRoleRequest", "ListRolesRequest", "ListRolesResponse", - "Role", "Size", "SizeTypeEnum", - "GetTeamRequest", - "ListTeamsRequest", - "ListTeamsResponse", + "TaxonomyCategory", + "GetTaxonomyCategoryRequest", + "ListTaxonomyCategoriesRequest", + "ListTaxonomyCategoriesResponse", + "TaxonomyTypeEnum", "Team", - "GetUserRequest", - "ListUsersRequest", - "ListUsersResponse", + "TimeUnitEnum", "User", + "GetUserRequest", ) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/__init__.py b/packages/google-ads-admanager/google/ads/admanager_v1/__init__.py index b13eac5f2835..e2d73bf488ba 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/__init__.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/__init__.py @@ -18,86 +18,59 @@ __version__ = package_version.__version__ -from .services.ad_partner_service import AdPartnerServiceClient from .services.ad_unit_service import AdUnitServiceClient from .services.company_service import CompanyServiceClient -from .services.contact_service import ContactServiceClient -from .services.creative_service import CreativeServiceClient from .services.custom_field_service import CustomFieldServiceClient from .services.custom_targeting_key_service import CustomTargetingKeyServiceClient from .services.custom_targeting_value_service import CustomTargetingValueServiceClient -from .services.label_service import LabelServiceClient -from .services.line_item_service import LineItemServiceClient +from .services.entity_signals_mapping_service import EntitySignalsMappingServiceClient from .services.network_service import NetworkServiceClient from .services.order_service import OrderServiceClient from .services.placement_service import PlacementServiceClient from .services.report_service import ReportServiceClient from .services.role_service import RoleServiceClient -from .services.team_service import TeamServiceClient +from .services.taxonomy_category_service import TaxonomyCategoryServiceClient from .services.user_service import UserServiceClient -from .types.ad_partner_declaration import AdPartnerDeclaration, DeclarationTypeEnum -from .types.ad_partner_service import ( - AdPartner, - GetAdPartnerRequest, - ListAdPartnersRequest, - ListAdPartnersResponse, -) -from .types.ad_unit_enums import AppliedAdsenseEnabledEnum +from .types.ad_unit_enums import AdUnitStatusEnum, SmartSizeModeEnum, TargetWindowEnum +from .types.ad_unit_messages import AdUnit, AdUnitParent, AdUnitSize, LabelFrequencyCap from .types.ad_unit_service import ( - AdUnit, - AdUnitParent, GetAdUnitRequest, - LabelFrequencyCap, + ListAdUnitSizesRequest, + ListAdUnitSizesResponse, ListAdUnitsRequest, ListAdUnitsResponse, - SmartSizeModeEnum, - TargetWindowEnum, ) -from .types.ad_unit_size import AdUnitSize from .types.admanager_error import AdManagerError from .types.applied_label import AppliedLabel from .types.company_credit_status_enum import CompanyCreditStatusEnum +from .types.company_messages import Company from .types.company_service import ( - Company, GetCompanyRequest, ListCompaniesRequest, ListCompaniesResponse, ) from .types.company_type_enum import CompanyTypeEnum -from .types.computed_status_enum import ComputedStatusEnum -from .types.contact_service import ( - Contact, - GetContactRequest, - ListContactsRequest, - ListContactsResponse, -) -from .types.creative_placeholder import CreativePlaceholder -from .types.creative_service import ( - Creative, - GetCreativeRequest, - ListCreativesRequest, - ListCreativesResponse, -) +from .types.contact_messages import Contact from .types.custom_field_enums import ( CustomFieldDataTypeEnum, CustomFieldEntityTypeEnum, CustomFieldStatusEnum, CustomFieldVisibilityEnum, ) +from .types.custom_field_messages import CustomField, CustomFieldOption from .types.custom_field_service import ( - CustomField, - CustomFieldOption, GetCustomFieldRequest, ListCustomFieldsRequest, ListCustomFieldsResponse, ) +from .types.custom_field_value import CustomFieldValue from .types.custom_targeting_key_enums import ( CustomTargetingKeyReportableTypeEnum, CustomTargetingKeyStatusEnum, CustomTargetingKeyTypeEnum, ) +from .types.custom_targeting_key_messages import CustomTargetingKey from .types.custom_targeting_key_service import ( - CustomTargetingKey, GetCustomTargetingKeyRequest, ListCustomTargetingKeysRequest, ListCustomTargetingKeysResponse, @@ -106,103 +79,102 @@ CustomTargetingValueMatchTypeEnum, CustomTargetingValueStatusEnum, ) +from .types.custom_targeting_value_messages import CustomTargetingValue from .types.custom_targeting_value_service import ( - CustomTargetingValue, GetCustomTargetingValueRequest, ListCustomTargetingValuesRequest, ListCustomTargetingValuesResponse, ) -from .types.environment_type_enum import EnvironmentTypeEnum -from .types.frequency_cap import FrequencyCap, TimeUnitEnum -from .types.goal import Goal, GoalTypeEnum, UnitTypeEnum -from .types.label_service import ( - GetLabelRequest, - Label, - ListLabelsRequest, - ListLabelsResponse, -) -from .types.line_item_enums import ( - CreativeRotationTypeEnum, - DeliveryRateTypeEnum, - LineItemCostTypeEnum, - LineItemDiscountTypeEnum, - LineItemTypeEnum, - ReservationStatusEnum, +from .types.entity_signals_mapping_messages import EntitySignalsMapping +from .types.entity_signals_mapping_service import ( + BatchCreateEntitySignalsMappingsRequest, + BatchCreateEntitySignalsMappingsResponse, + BatchUpdateEntitySignalsMappingsRequest, + BatchUpdateEntitySignalsMappingsResponse, + CreateEntitySignalsMappingRequest, + GetEntitySignalsMappingRequest, + ListEntitySignalsMappingsRequest, + ListEntitySignalsMappingsResponse, + UpdateEntitySignalsMappingRequest, ) -from .types.line_item_service import ( - GetLineItemRequest, - LineItem, - ListLineItemsRequest, - ListLineItemsResponse, -) -from .types.network_service import GetNetworkRequest, Network -from .types.order_service import ( - GetOrderRequest, - ListOrdersRequest, - ListOrdersResponse, - Order, +from .types.environment_type_enum import EnvironmentTypeEnum +from .types.frequency_cap import FrequencyCap +from .types.label_messages import Label +from .types.network_messages import Network +from .types.network_service import ( + GetNetworkRequest, + ListNetworksRequest, + ListNetworksResponse, ) +from .types.order_enums import OrderStatusEnum +from .types.order_messages import Order +from .types.order_service import GetOrderRequest, ListOrdersRequest, ListOrdersResponse from .types.placement_enums import PlacementStatusEnum +from .types.placement_messages import Placement from .types.placement_service import ( GetPlacementRequest, ListPlacementsRequest, ListPlacementsResponse, - Placement, ) from .types.report_service import ( - ExportSavedReportMetadata, - ExportSavedReportRequest, - ExportSavedReportResponse, + CreateReportRequest, + FetchReportResultRowsRequest, + FetchReportResultRowsResponse, + GetReportRequest, + ListReportsRequest, + ListReportsResponse, Report, + ReportDefinition, + RunReportMetadata, + RunReportRequest, + RunReportResponse, + Schedule, + ScheduleOptions, + UpdateReportRequest, ) -from .types.role_service import ( - GetRoleRequest, - ListRolesRequest, - ListRolesResponse, - Role, -) -from .types.size import Size, SizeTypeEnum -from .types.team_service import ( - GetTeamRequest, - ListTeamsRequest, - ListTeamsResponse, - Team, -) -from .types.user_service import ( - GetUserRequest, - ListUsersRequest, - ListUsersResponse, - User, +from .types.role_enums import RoleStatusEnum +from .types.role_messages import Role +from .types.role_service import GetRoleRequest, ListRolesRequest, ListRolesResponse +from .types.size import Size +from .types.size_type_enum import SizeTypeEnum +from .types.taxonomy_category_messages import TaxonomyCategory +from .types.taxonomy_category_service import ( + GetTaxonomyCategoryRequest, + ListTaxonomyCategoriesRequest, + ListTaxonomyCategoriesResponse, ) +from .types.taxonomy_type_enum import TaxonomyTypeEnum +from .types.team_messages import Team +from .types.time_unit_enum import TimeUnitEnum +from .types.user_messages import User +from .types.user_service import GetUserRequest __all__ = ( "AdManagerError", - "AdPartner", - "AdPartnerDeclaration", - "AdPartnerServiceClient", "AdUnit", "AdUnitParent", "AdUnitServiceClient", "AdUnitSize", - "AppliedAdsenseEnabledEnum", + "AdUnitStatusEnum", "AppliedLabel", + "BatchCreateEntitySignalsMappingsRequest", + "BatchCreateEntitySignalsMappingsResponse", + "BatchUpdateEntitySignalsMappingsRequest", + "BatchUpdateEntitySignalsMappingsResponse", "Company", "CompanyCreditStatusEnum", "CompanyServiceClient", "CompanyTypeEnum", - "ComputedStatusEnum", "Contact", - "ContactServiceClient", - "Creative", - "CreativePlaceholder", - "CreativeRotationTypeEnum", - "CreativeServiceClient", + "CreateEntitySignalsMappingRequest", + "CreateReportRequest", "CustomField", "CustomFieldDataTypeEnum", "CustomFieldEntityTypeEnum", "CustomFieldOption", "CustomFieldServiceClient", "CustomFieldStatusEnum", + "CustomFieldValue", "CustomFieldVisibilityEnum", "CustomTargetingKey", "CustomTargetingKeyReportableTypeEnum", @@ -213,89 +185,83 @@ "CustomTargetingValueMatchTypeEnum", "CustomTargetingValueServiceClient", "CustomTargetingValueStatusEnum", - "DeclarationTypeEnum", - "DeliveryRateTypeEnum", + "EntitySignalsMapping", + "EntitySignalsMappingServiceClient", "EnvironmentTypeEnum", - "ExportSavedReportMetadata", - "ExportSavedReportRequest", - "ExportSavedReportResponse", + "FetchReportResultRowsRequest", + "FetchReportResultRowsResponse", "FrequencyCap", - "GetAdPartnerRequest", "GetAdUnitRequest", "GetCompanyRequest", - "GetContactRequest", - "GetCreativeRequest", "GetCustomFieldRequest", "GetCustomTargetingKeyRequest", "GetCustomTargetingValueRequest", - "GetLabelRequest", - "GetLineItemRequest", + "GetEntitySignalsMappingRequest", "GetNetworkRequest", "GetOrderRequest", "GetPlacementRequest", + "GetReportRequest", "GetRoleRequest", - "GetTeamRequest", + "GetTaxonomyCategoryRequest", "GetUserRequest", - "Goal", - "GoalTypeEnum", "Label", "LabelFrequencyCap", - "LabelServiceClient", - "LineItem", - "LineItemCostTypeEnum", - "LineItemDiscountTypeEnum", - "LineItemServiceClient", - "LineItemTypeEnum", - "ListAdPartnersRequest", - "ListAdPartnersResponse", + "ListAdUnitSizesRequest", + "ListAdUnitSizesResponse", "ListAdUnitsRequest", "ListAdUnitsResponse", "ListCompaniesRequest", "ListCompaniesResponse", - "ListContactsRequest", - "ListContactsResponse", - "ListCreativesRequest", - "ListCreativesResponse", "ListCustomFieldsRequest", "ListCustomFieldsResponse", "ListCustomTargetingKeysRequest", "ListCustomTargetingKeysResponse", "ListCustomTargetingValuesRequest", "ListCustomTargetingValuesResponse", - "ListLabelsRequest", - "ListLabelsResponse", - "ListLineItemsRequest", - "ListLineItemsResponse", + "ListEntitySignalsMappingsRequest", + "ListEntitySignalsMappingsResponse", + "ListNetworksRequest", + "ListNetworksResponse", "ListOrdersRequest", "ListOrdersResponse", "ListPlacementsRequest", "ListPlacementsResponse", + "ListReportsRequest", + "ListReportsResponse", "ListRolesRequest", "ListRolesResponse", - "ListTeamsRequest", - "ListTeamsResponse", - "ListUsersRequest", - "ListUsersResponse", + "ListTaxonomyCategoriesRequest", + "ListTaxonomyCategoriesResponse", "Network", "NetworkServiceClient", "Order", "OrderServiceClient", + "OrderStatusEnum", "Placement", "PlacementServiceClient", "PlacementStatusEnum", "Report", + "ReportDefinition", "ReportServiceClient", - "ReservationStatusEnum", "Role", "RoleServiceClient", + "RoleStatusEnum", + "RunReportMetadata", + "RunReportRequest", + "RunReportResponse", + "Schedule", + "ScheduleOptions", "Size", "SizeTypeEnum", "SmartSizeModeEnum", "TargetWindowEnum", + "TaxonomyCategory", + "TaxonomyCategoryServiceClient", + "TaxonomyTypeEnum", "Team", - "TeamServiceClient", "TimeUnitEnum", - "UnitTypeEnum", + "UpdateEntitySignalsMappingRequest", + "UpdateReportRequest", "User", "UserServiceClient", ) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/gapic_metadata.json b/packages/google-ads-admanager/google/ads/admanager_v1/gapic_metadata.json index 67680096a5d9..aa173a3cf11e 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/gapic_metadata.json +++ b/packages/google-ads-admanager/google/ads/admanager_v1/gapic_metadata.json @@ -5,25 +5,6 @@ "protoPackage": "google.ads.admanager.v1", "schema": "1.0", "services": { - "AdPartnerService": { - "clients": { - "rest": { - "libraryClient": "AdPartnerServiceClient", - "rpcs": { - "GetAdPartner": { - "methods": [ - "get_ad_partner" - ] - }, - "ListAdPartners": { - "methods": [ - "list_ad_partners" - ] - } - } - } - } - }, "AdUnitService": { "clients": { "rest": { @@ -34,6 +15,11 @@ "get_ad_unit" ] }, + "ListAdUnitSizes": { + "methods": [ + "list_ad_unit_sizes" + ] + }, "ListAdUnits": { "methods": [ "list_ad_units" @@ -62,44 +48,6 @@ } } }, - "ContactService": { - "clients": { - "rest": { - "libraryClient": "ContactServiceClient", - "rpcs": { - "GetContact": { - "methods": [ - "get_contact" - ] - }, - "ListContacts": { - "methods": [ - "list_contacts" - ] - } - } - } - } - }, - "CreativeService": { - "clients": { - "rest": { - "libraryClient": "CreativeServiceClient", - "rpcs": { - "GetCreative": { - "methods": [ - "get_creative" - ] - }, - "ListCreatives": { - "methods": [ - "list_creatives" - ] - } - } - } - } - }, "CustomFieldService": { "clients": { "rest": { @@ -157,38 +105,39 @@ } } }, - "LabelService": { + "EntitySignalsMappingService": { "clients": { "rest": { - "libraryClient": "LabelServiceClient", + "libraryClient": "EntitySignalsMappingServiceClient", "rpcs": { - "GetLabel": { + "BatchCreateEntitySignalsMappings": { "methods": [ - "get_label" + "batch_create_entity_signals_mappings" ] }, - "ListLabels": { + "BatchUpdateEntitySignalsMappings": { "methods": [ - "list_labels" + "batch_update_entity_signals_mappings" ] - } - } - } - } - }, - "LineItemService": { - "clients": { - "rest": { - "libraryClient": "LineItemServiceClient", - "rpcs": { - "GetLineItem": { + }, + "CreateEntitySignalsMapping": { + "methods": [ + "create_entity_signals_mapping" + ] + }, + "GetEntitySignalsMapping": { "methods": [ - "get_line_item" + "get_entity_signals_mapping" ] }, - "ListLineItems": { + "ListEntitySignalsMappings": { "methods": [ - "list_line_items" + "list_entity_signals_mappings" + ] + }, + "UpdateEntitySignalsMapping": { + "methods": [ + "update_entity_signals_mapping" ] } } @@ -204,6 +153,11 @@ "methods": [ "get_network" ] + }, + "ListNetworks": { + "methods": [ + "list_networks" + ] } } } @@ -252,9 +206,34 @@ "rest": { "libraryClient": "ReportServiceClient", "rpcs": { - "ExportSavedReport": { + "CreateReport": { + "methods": [ + "create_report" + ] + }, + "FetchReportResultRows": { + "methods": [ + "fetch_report_result_rows" + ] + }, + "GetReport": { + "methods": [ + "get_report" + ] + }, + "ListReports": { "methods": [ - "export_saved_report" + "list_reports" + ] + }, + "RunReport": { + "methods": [ + "run_report" + ] + }, + "UpdateReport": { + "methods": [ + "update_report" ] } } @@ -280,19 +259,19 @@ } } }, - "TeamService": { + "TaxonomyCategoryService": { "clients": { "rest": { - "libraryClient": "TeamServiceClient", + "libraryClient": "TaxonomyCategoryServiceClient", "rpcs": { - "GetTeam": { + "GetTaxonomyCategory": { "methods": [ - "get_team" + "get_taxonomy_category" ] }, - "ListTeams": { + "ListTaxonomyCategories": { "methods": [ - "list_teams" + "list_taxonomy_categories" ] } } @@ -308,11 +287,6 @@ "methods": [ "get_user" ] - }, - "ListUsers": { - "methods": [ - "list_users" - ] } } } diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_partner_service/transports/__init__.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_partner_service/transports/__init__.py deleted file mode 100644 index 7a88b4ec84e4..000000000000 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_partner_service/transports/__init__.py +++ /dev/null @@ -1,30 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import AdPartnerServiceTransport -from .rest import AdPartnerServiceRestInterceptor, AdPartnerServiceRestTransport - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[AdPartnerServiceTransport]] -_transport_registry["rest"] = AdPartnerServiceRestTransport - -__all__ = ( - "AdPartnerServiceTransport", - "AdPartnerServiceRestTransport", - "AdPartnerServiceRestInterceptor", -) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/client.py index f1200c52e05a..c8c6cb4564df 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/client.py @@ -55,8 +55,8 @@ from google.ads.admanager_v1.services.ad_unit_service import pagers from google.ads.admanager_v1.types import ( ad_unit_enums, + ad_unit_messages, ad_unit_service, - ad_unit_size, applied_label, ) @@ -732,7 +732,7 @@ def get_ad_unit( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> ad_unit_service.AdUnit: + ) -> ad_unit_messages.AdUnit: r"""API to retrieve an AdUnit object. .. code-block:: python @@ -942,6 +942,124 @@ def sample_list_ad_units(): # Done; return the response. return response + def list_ad_unit_sizes( + self, + request: Optional[Union[ad_unit_service.ListAdUnitSizesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAdUnitSizesPager: + r"""API to retrieve a list of AdUnitSize objects. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import admanager_v1 + + def sample_list_ad_unit_sizes(): + # Create a client + client = admanager_v1.AdUnitServiceClient() + + # Initialize request argument(s) + request = admanager_v1.ListAdUnitSizesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_ad_unit_sizes(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.ads.admanager_v1.types.ListAdUnitSizesRequest, dict]): + The request object. Request object for ListAdUnitSizes + method. + parent (str): + Required. The parent, which owns this collection of + AdUnitSizes. Format: ``networks/{network_code}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ads.admanager_v1.services.ad_unit_service.pagers.ListAdUnitSizesPager: + Response object for + ListAdUnitSizesRequest containing + matching AdUnitSizes. Iterating over + this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, ad_unit_service.ListAdUnitSizesRequest): + request = ad_unit_service.ListAdUnitSizesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_ad_unit_sizes] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListAdUnitSizesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "AdUnitServiceClient": return self diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/pagers.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/pagers.py index 2ae957b1dea0..ebf38bec7995 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/pagers.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/pagers.py @@ -38,7 +38,7 @@ OptionalRetry = Union[retries.Retry, object, None] # type: ignore OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore -from google.ads.admanager_v1.types import ad_unit_service +from google.ads.admanager_v1.types import ad_unit_messages, ad_unit_service class ListAdUnitsPager: @@ -107,9 +107,83 @@ def pages(self) -> Iterator[ad_unit_service.ListAdUnitsResponse]: ) yield self._response - def __iter__(self) -> Iterator[ad_unit_service.AdUnit]: + def __iter__(self) -> Iterator[ad_unit_messages.AdUnit]: for page in self.pages: yield from page.ad_units def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListAdUnitSizesPager: + """A pager for iterating through ``list_ad_unit_sizes`` requests. + + This class thinly wraps an initial + :class:`google.ads.admanager_v1.types.ListAdUnitSizesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``ad_unit_sizes`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListAdUnitSizes`` requests and continue to iterate + through the ``ad_unit_sizes`` field on the + corresponding responses. + + All the usual :class:`google.ads.admanager_v1.types.ListAdUnitSizesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., ad_unit_service.ListAdUnitSizesResponse], + request: ad_unit_service.ListAdUnitSizesRequest, + response: ad_unit_service.ListAdUnitSizesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.ads.admanager_v1.types.ListAdUnitSizesRequest): + The initial request object. + response (google.ads.admanager_v1.types.ListAdUnitSizesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = ad_unit_service.ListAdUnitSizesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[ad_unit_service.ListAdUnitSizesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[ad_unit_messages.AdUnitSize]: + for page in self.pages: + yield from page.ad_unit_sizes + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/transports/base.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/transports/base.py index 948cad87abb1..7852b164a55c 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/transports/base.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/transports/base.py @@ -26,7 +26,7 @@ from google.oauth2 import service_account # type: ignore from google.ads.admanager_v1 import gapic_version as package_version -from google.ads.admanager_v1.types import ad_unit_service +from google.ads.admanager_v1.types import ad_unit_messages, ad_unit_service DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ @@ -139,6 +139,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.list_ad_unit_sizes: gapic_v1.method.wrap_method( + self.list_ad_unit_sizes, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -155,7 +160,7 @@ def get_ad_unit( self, ) -> Callable[ [ad_unit_service.GetAdUnitRequest], - Union[ad_unit_service.AdUnit, Awaitable[ad_unit_service.AdUnit]], + Union[ad_unit_messages.AdUnit, Awaitable[ad_unit_messages.AdUnit]], ]: raise NotImplementedError() @@ -171,6 +176,18 @@ def list_ad_units( ]: raise NotImplementedError() + @property + def list_ad_unit_sizes( + self, + ) -> Callable[ + [ad_unit_service.ListAdUnitSizesRequest], + Union[ + ad_unit_service.ListAdUnitSizesResponse, + Awaitable[ad_unit_service.ListAdUnitSizesResponse], + ], + ]: + raise NotImplementedError() + @property def get_operation( self, diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/transports/rest.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/transports/rest.py index c6dd9d86e533..2c1ecebf5b66 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/transports/rest.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/transports/rest.py @@ -38,7 +38,7 @@ from google.longrunning import operations_pb2 # type: ignore -from google.ads.admanager_v1.types import ad_unit_service +from google.ads.admanager_v1.types import ad_unit_messages, ad_unit_service from .base import AdUnitServiceTransport from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO @@ -81,6 +81,14 @@ def post_list_ad_units(self, response): logging.log(f"Received response: {response}") return response + def pre_list_ad_unit_sizes(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_ad_unit_sizes(self, response): + logging.log(f"Received response: {response}") + return response + transport = AdUnitServiceRestTransport(interceptor=MyCustomAdUnitServiceInterceptor()) client = AdUnitServiceClient(transport=transport) @@ -100,8 +108,8 @@ def pre_get_ad_unit( return request, metadata def post_get_ad_unit( - self, response: ad_unit_service.AdUnit - ) -> ad_unit_service.AdUnit: + self, response: ad_unit_messages.AdUnit + ) -> ad_unit_messages.AdUnit: """Post-rpc interceptor for get_ad_unit Override in a subclass to manipulate the response @@ -133,6 +141,29 @@ def post_list_ad_units( """ return response + def pre_list_ad_unit_sizes( + self, + request: ad_unit_service.ListAdUnitSizesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ad_unit_service.ListAdUnitSizesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_ad_unit_sizes + + Override in a subclass to manipulate the request or metadata + before they are sent to the AdUnitService server. + """ + return request, metadata + + def post_list_ad_unit_sizes( + self, response: ad_unit_service.ListAdUnitSizesResponse + ) -> ad_unit_service.ListAdUnitSizesResponse: + """Post-rpc interceptor for list_ad_unit_sizes + + Override in a subclass to manipulate the response + after it is returned by the AdUnitService server but before + it is returned to user code. + """ + return response + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -274,7 +305,7 @@ def __call__( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> ad_unit_service.AdUnit: + ) -> ad_unit_messages.AdUnit: r"""Call the get ad unit method over HTTP. Args: @@ -287,7 +318,7 @@ def __call__( sent along with the request as metadata. Returns: - ~.ad_unit_service.AdUnit: + ~.ad_unit_messages.AdUnit: The AdUnit resource. """ @@ -331,8 +362,8 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = ad_unit_service.AdUnit() - pb_resp = ad_unit_service.AdUnit.pb(resp) + resp = ad_unit_messages.AdUnit() + pb_resp = ad_unit_messages.AdUnit.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_ad_unit(resp) @@ -427,10 +458,101 @@ def __call__( resp = self._interceptor.post_list_ad_units(resp) return resp + class _ListAdUnitSizes(AdUnitServiceRestStub): + def __hash__(self): + return hash("ListAdUnitSizes") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: ad_unit_service.ListAdUnitSizesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> ad_unit_service.ListAdUnitSizesResponse: + r"""Call the list ad unit sizes method over HTTP. + + Args: + request (~.ad_unit_service.ListAdUnitSizesRequest): + The request object. Request object for ListAdUnitSizes + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.ad_unit_service.ListAdUnitSizesResponse: + Response object for + ListAdUnitSizesRequest containing + matching AdUnitSizes. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=networks/*}/adUnitSizes", + }, + ] + request, metadata = self._interceptor.pre_list_ad_unit_sizes( + request, metadata + ) + pb_request = ad_unit_service.ListAdUnitSizesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = ad_unit_service.ListAdUnitSizesResponse() + pb_resp = ad_unit_service.ListAdUnitSizesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_ad_unit_sizes(resp) + return resp + @property def get_ad_unit( self, - ) -> Callable[[ad_unit_service.GetAdUnitRequest], ad_unit_service.AdUnit]: + ) -> Callable[[ad_unit_service.GetAdUnitRequest], ad_unit_messages.AdUnit]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast return self._GetAdUnit(self._session, self._host, self._interceptor) # type: ignore @@ -445,6 +567,17 @@ def list_ad_units( # In C++ this would require a dynamic_cast return self._ListAdUnits(self._session, self._host, self._interceptor) # type: ignore + @property + def list_ad_unit_sizes( + self, + ) -> Callable[ + [ad_unit_service.ListAdUnitSizesRequest], + ad_unit_service.ListAdUnitSizesResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListAdUnitSizes(self._session, self._host, self._interceptor) # type: ignore + @property def get_operation(self): return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore @@ -476,11 +609,11 @@ def __call__( http_options: List[Dict[str, str]] = [ { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/exports/*}", + "uri": "/v1/{name=networks/*/operations/reports/runs/*}", }, { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/runs/*}", + "uri": "/v1/{name=networks/*/operations/reports/exports/*}", }, ] diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/client.py index a1bc3d1c6eab..85b49f138017 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/client.py @@ -49,11 +49,13 @@ OptionalRetry = Union[retries.Retry, object, None] # type: ignore from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore from google.ads.admanager_v1.services.company_service import pagers from google.ads.admanager_v1.types import ( applied_label, company_credit_status_enum, + company_messages, company_service, company_type_enum, ) @@ -753,7 +755,7 @@ def get_company( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> company_service.Company: + ) -> company_messages.Company: r"""API to retrieve a ``Company`` object. .. code-block:: python @@ -902,7 +904,7 @@ def sample_list_companies(): Returns: google.ads.admanager_v1.services.company_service.pagers.ListCompaniesPager: Response object for ListCompaniesRequest containing matching Company - resources. + objects. Iterating over this object will yield results and resolve additional pages automatically. diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/pagers.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/pagers.py index 8dd003e78650..7a1c65b16259 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/pagers.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/pagers.py @@ -38,7 +38,7 @@ OptionalRetry = Union[retries.Retry, object, None] # type: ignore OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore -from google.ads.admanager_v1.types import company_service +from google.ads.admanager_v1.types import company_messages, company_service class ListCompaniesPager: @@ -107,7 +107,7 @@ def pages(self) -> Iterator[company_service.ListCompaniesResponse]: ) yield self._response - def __iter__(self) -> Iterator[company_service.Company]: + def __iter__(self) -> Iterator[company_messages.Company]: for page in self.pages: yield from page.companies diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/transports/base.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/transports/base.py index 0415f3d70be7..3304a05b29c2 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/transports/base.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/transports/base.py @@ -26,7 +26,7 @@ from google.oauth2 import service_account # type: ignore from google.ads.admanager_v1 import gapic_version as package_version -from google.ads.admanager_v1.types import company_service +from google.ads.admanager_v1.types import company_messages, company_service DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ @@ -155,7 +155,7 @@ def get_company( self, ) -> Callable[ [company_service.GetCompanyRequest], - Union[company_service.Company, Awaitable[company_service.Company]], + Union[company_messages.Company, Awaitable[company_messages.Company]], ]: raise NotImplementedError() diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/transports/rest.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/transports/rest.py index 3692c88b6fde..604ec04faf5a 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/transports/rest.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/transports/rest.py @@ -38,7 +38,7 @@ from google.longrunning import operations_pb2 # type: ignore -from google.ads.admanager_v1.types import company_service +from google.ads.admanager_v1.types import company_messages, company_service from .base import CompanyServiceTransport from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO @@ -100,8 +100,8 @@ def pre_get_company( return request, metadata def post_get_company( - self, response: company_service.Company - ) -> company_service.Company: + self, response: company_messages.Company + ) -> company_messages.Company: """Post-rpc interceptor for get_company Override in a subclass to manipulate the response @@ -274,7 +274,7 @@ def __call__( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> company_service.Company: + ) -> company_messages.Company: r"""Call the get company method over HTTP. Args: @@ -287,7 +287,7 @@ def __call__( sent along with the request as metadata. Returns: - ~.company_service.Company: + ~.company_messages.Company: The ``Company`` resource. """ @@ -331,8 +331,8 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = company_service.Company() - pb_resp = company_service.Company.pb(resp) + resp = company_messages.Company() + pb_resp = company_messages.Company.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_company(resp) @@ -374,7 +374,7 @@ def __call__( Returns: ~.company_service.ListCompaniesResponse: Response object for ``ListCompaniesRequest`` containing - matching ``Company`` resources. + matching ``Company`` objects. """ @@ -428,7 +428,7 @@ def __call__( @property def get_company( self, - ) -> Callable[[company_service.GetCompanyRequest], company_service.Company]: + ) -> Callable[[company_service.GetCompanyRequest], company_messages.Company]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast return self._GetCompany(self._session, self._host, self._interceptor) # type: ignore @@ -474,11 +474,11 @@ def __call__( http_options: List[Dict[str, str]] = [ { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/exports/*}", + "uri": "/v1/{name=networks/*/operations/reports/runs/*}", }, { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/runs/*}", + "uri": "/v1/{name=networks/*/operations/reports/exports/*}", }, ] diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/contact_service/__init__.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/contact_service/__init__.py deleted file mode 100644 index 20eee0424097..000000000000 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/contact_service/__init__.py +++ /dev/null @@ -1,18 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import ContactServiceClient - -__all__ = ("ContactServiceClient",) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/contact_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/contact_service/client.py deleted file mode 100644 index 46d892852e64..000000000000 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/contact_service/client.py +++ /dev/null @@ -1,986 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import os -import re -from typing import ( - Callable, - Dict, - Mapping, - MutableMapping, - MutableSequence, - Optional, - Sequence, - Tuple, - Type, - Union, - cast, -) -import warnings - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.ads.admanager_v1 import gapic_version as package_version - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -from google.longrunning import operations_pb2 # type: ignore - -from google.ads.admanager_v1.services.contact_service import pagers -from google.ads.admanager_v1.types import contact_service - -from .transports.base import DEFAULT_CLIENT_INFO, ContactServiceTransport -from .transports.rest import ContactServiceRestTransport - - -class ContactServiceClientMeta(type): - """Metaclass for the ContactService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - - _transport_registry = ( - OrderedDict() - ) # type: Dict[str, Type[ContactServiceTransport]] - _transport_registry["rest"] = ContactServiceRestTransport - - def get_transport_class( - cls, - label: Optional[str] = None, - ) -> Type[ContactServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class ContactServiceClient(metaclass=ContactServiceClientMeta): - """Provides methods for handling Contact objects.""" - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = "admanager.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - _DEFAULT_ENDPOINT_TEMPLATE = "admanager.{UNIVERSE_DOMAIN}" - _DEFAULT_UNIVERSE = "googleapis.com" - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - ContactServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - ContactServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file(filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> ContactServiceTransport: - """Returns the transport used by the client instance. - - Returns: - ContactServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def contact_path( - network_code: str, - contact: str, - ) -> str: - """Returns a fully-qualified contact string.""" - return "networks/{network_code}/contacts/{contact}".format( - network_code=network_code, - contact=contact, - ) - - @staticmethod - def parse_contact_path(path: str) -> Dict[str, str]: - """Parses a contact path into its component segments.""" - m = re.match( - r"^networks/(?P.+?)/contacts/(?P.+?)$", path - ) - return m.groupdict() if m else {} - - @staticmethod - def network_path( - network_code: str, - ) -> str: - """Returns a fully-qualified network string.""" - return "networks/{network_code}".format( - network_code=network_code, - ) - - @staticmethod - def parse_network_path(path: str) -> Dict[str, str]: - """Parses a network path into its component segments.""" - m = re.match(r"^networks/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path( - billing_account: str, - ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format( - billing_account=billing_account, - ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str, str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path( - folder: str, - ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format( - folder=folder, - ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str, str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path( - organization: str, - ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format( - organization=organization, - ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str, str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path( - project: str, - ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format( - project=project, - ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str, str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path( - project: str, - location: str, - ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format( - project=project, - location=location, - ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str, str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source( - cls, client_options: Optional[client_options_lib.ClientOptions] = None - ): - """Deprecated. Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - - warnings.warn( - "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning, - ) - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError( - "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - ) - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError( - "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - ) - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or ( - use_mtls_endpoint == "auto" and client_cert_source - ): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - @staticmethod - def _read_environment_variables(): - """Returns the environment variables used by the client. - - Returns: - Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, - GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. - - Raises: - ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not - any of ["true", "false"]. - google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT - is not any of ["auto", "never", "always"]. - """ - use_client_cert = os.getenv( - "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" - ).lower() - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() - universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError( - "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - ) - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError( - "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - ) - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env - - @staticmethod - def _get_client_cert_source(provided_cert_source, use_cert_flag): - """Return the client cert source to be used by the client. - - Args: - provided_cert_source (bytes): The client certificate source provided. - use_cert_flag (bool): A flag indicating whether to use the client certificate. - - Returns: - bytes or None: The client cert source to be used by the client. - """ - client_cert_source = None - if use_cert_flag: - if provided_cert_source: - client_cert_source = provided_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - return client_cert_source - - @staticmethod - def _get_api_endpoint( - api_override, client_cert_source, universe_domain, use_mtls_endpoint - ): - """Return the API endpoint used by the client. - - Args: - api_override (str): The API endpoint override. If specified, this is always - the return value of this function and the other arguments are not used. - client_cert_source (bytes): The client certificate source used by the client. - universe_domain (str): The universe domain used by the client. - use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. - Possible values are "always", "auto", or "never". - - Returns: - str: The API endpoint to be used by the client. - """ - if api_override is not None: - api_endpoint = api_override - elif use_mtls_endpoint == "always" or ( - use_mtls_endpoint == "auto" and client_cert_source - ): - _default_universe = ContactServiceClient._DEFAULT_UNIVERSE - if universe_domain != _default_universe: - raise MutualTLSChannelError( - f"mTLS is not supported in any universe other than {_default_universe}." - ) - api_endpoint = ContactServiceClient.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = ContactServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=universe_domain - ) - return api_endpoint - - @staticmethod - def _get_universe_domain( - client_universe_domain: Optional[str], universe_domain_env: Optional[str] - ) -> str: - """Return the universe domain used by the client. - - Args: - client_universe_domain (Optional[str]): The universe domain configured via the client options. - universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. - - Returns: - str: The universe domain to be used by the client. - - Raises: - ValueError: If the universe domain is an empty string. - """ - universe_domain = ContactServiceClient._DEFAULT_UNIVERSE - if client_universe_domain is not None: - universe_domain = client_universe_domain - elif universe_domain_env is not None: - universe_domain = universe_domain_env - if len(universe_domain.strip()) == 0: - raise ValueError("Universe Domain cannot be an empty string.") - return universe_domain - - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = ContactServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. - - Raises: - ValueError: If the configured universe domain is not valid. - """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or ContactServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used by the client instance. - """ - return self._universe_domain - - def __init__( - self, - *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[ - Union[str, ContactServiceTransport, Callable[..., ContactServiceTransport]] - ] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the contact service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,ContactServiceTransport,Callable[..., ContactServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the ContactServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that the ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client_options = client_options - if isinstance(self._client_options, dict): - self._client_options = client_options_lib.from_dict(self._client_options) - if self._client_options is None: - self._client_options = client_options_lib.ClientOptions() - self._client_options = cast( - client_options_lib.ClientOptions, self._client_options - ) - - universe_domain_opt = getattr(self._client_options, "universe_domain", None) - - ( - self._use_client_cert, - self._use_mtls_endpoint, - self._universe_domain_env, - ) = ContactServiceClient._read_environment_variables() - self._client_cert_source = ContactServiceClient._get_client_cert_source( - self._client_options.client_cert_source, self._use_client_cert - ) - self._universe_domain = ContactServiceClient._get_universe_domain( - universe_domain_opt, self._universe_domain_env - ) - self._api_endpoint = None # updated below, depending on `transport` - - # Initialize the universe domain validation. - self._is_universe_domain_valid = False - - api_key_value = getattr(self._client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError( - "client_options.api_key and credentials are mutually exclusive" - ) - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - transport_provided = isinstance(transport, ContactServiceTransport) - if transport_provided: - # transport is a ContactServiceTransport instance. - if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError( - "When providing a transport instance, " - "provide its credentials directly." - ) - if self._client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = cast(ContactServiceTransport, transport) - self._api_endpoint = self._transport.host - - self._api_endpoint = ( - self._api_endpoint - or ContactServiceClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint, - ) - ) - - if not transport_provided: - import google.auth._default # type: ignore - - if api_key_value and hasattr( - google.auth._default, "get_api_key_credentials" - ): - credentials = google.auth._default.get_api_key_credentials( - api_key_value - ) - - transport_init: Union[ - Type[ContactServiceTransport], Callable[..., ContactServiceTransport] - ] = ( - ContactServiceClient.get_transport_class(transport) - if isinstance(transport, str) or transport is None - else cast(Callable[..., ContactServiceTransport], transport) - ) - # initialize with the provided callable or the passed in class - self._transport = transport_init( - credentials=credentials, - credentials_file=self._client_options.credentials_file, - host=self._api_endpoint, - scopes=self._client_options.scopes, - client_cert_source_for_mtls=self._client_cert_source, - quota_project_id=self._client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=self._client_options.api_audience, - ) - - def get_contact( - self, - request: Optional[Union[contact_service.GetContactRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> contact_service.Contact: - r"""API to retrieve a Contact object. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.ads import admanager_v1 - - def sample_get_contact(): - # Create a client - client = admanager_v1.ContactServiceClient() - - # Initialize request argument(s) - request = admanager_v1.GetContactRequest( - name="name_value", - ) - - # Make the request - response = client.get_contact(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.ads.admanager_v1.types.GetContactRequest, dict]): - The request object. Request object for GetContact method. - name (str): - Required. The resource name of the Contact. Format: - ``networks/{network_code}/contacts/{contact_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.ads.admanager_v1.types.Contact: - The Contact resource. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, contact_service.GetContactRequest): - request = contact_service.GetContactRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_contact] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_contacts( - self, - request: Optional[Union[contact_service.ListContactsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListContactsPager: - r"""API to retrieve a list of Contact objects. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.ads import admanager_v1 - - def sample_list_contacts(): - # Create a client - client = admanager_v1.ContactServiceClient() - - # Initialize request argument(s) - request = admanager_v1.ListContactsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_contacts(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.ads.admanager_v1.types.ListContactsRequest, dict]): - The request object. Request object for ListContacts - method. - parent (str): - Required. The parent, which owns this collection of - Contacts. Format: ``networks/{network_code}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.ads.admanager_v1.services.contact_service.pagers.ListContactsPager: - Response object for - ListContactsRequest containing matching - Contact resources. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, contact_service.ListContactsRequest): - request = contact_service.ListContactsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_contacts] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListContactsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "ContactServiceClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=package_version.__version__ -) - - -__all__ = ("ContactServiceClient",) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/contact_service/pagers.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/contact_service/pagers.py deleted file mode 100644 index 30f2279d1f01..000000000000 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/contact_service/pagers.py +++ /dev/null @@ -1,115 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import ( - Any, - AsyncIterator, - Awaitable, - Callable, - Iterator, - Optional, - Sequence, - Tuple, - Union, -) - -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import retry_async as retries_async - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[ - retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None - ] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore - -from google.ads.admanager_v1.types import contact_service - - -class ListContactsPager: - """A pager for iterating through ``list_contacts`` requests. - - This class thinly wraps an initial - :class:`google.ads.admanager_v1.types.ListContactsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``contacts`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListContacts`` requests and continue to iterate - through the ``contacts`` field on the - corresponding responses. - - All the usual :class:`google.ads.admanager_v1.types.ListContactsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - - def __init__( - self, - method: Callable[..., contact_service.ListContactsResponse], - request: contact_service.ListContactsRequest, - response: contact_service.ListContactsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () - ): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.ads.admanager_v1.types.ListContactsRequest): - The initial request object. - response (google.ads.admanager_v1.types.ListContactsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = contact_service.ListContactsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[contact_service.ListContactsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method( - self._request, - retry=self._retry, - timeout=self._timeout, - metadata=self._metadata, - ) - yield self._response - - def __iter__(self) -> Iterator[contact_service.Contact]: - for page in self.pages: - yield from page.contacts - - def __repr__(self) -> str: - return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/contact_service/transports/__init__.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/contact_service/transports/__init__.py deleted file mode 100644 index 4dde7a60bd0f..000000000000 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/contact_service/transports/__init__.py +++ /dev/null @@ -1,30 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import ContactServiceTransport -from .rest import ContactServiceRestInterceptor, ContactServiceRestTransport - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[ContactServiceTransport]] -_transport_registry["rest"] = ContactServiceRestTransport - -__all__ = ( - "ContactServiceTransport", - "ContactServiceRestTransport", - "ContactServiceRestInterceptor", -) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/contact_service/transports/base.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/contact_service/transports/base.py deleted file mode 100644 index c6cbbecff076..000000000000 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/contact_service/transports/base.py +++ /dev/null @@ -1,188 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.ads.admanager_v1 import gapic_version as package_version -from google.ads.admanager_v1.types import contact_service - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=package_version.__version__ -) - - -class ContactServiceTransport(abc.ABC): - """Abstract transport class for ContactService.""" - - AUTH_SCOPES = () - - DEFAULT_HOST: str = "admanager.googleapis.com" - - def __init__( - self, - *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'admanager.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - if not hasattr(self, "_ignore_credentials"): - self._ignore_credentials: bool = False - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs( - "'credentials_file' and 'credentials' are mutually exclusive" - ) - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, **scopes_kwargs, quota_project_id=quota_project_id - ) - elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default( - **scopes_kwargs, quota_project_id=quota_project_id - ) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience( - api_audience if api_audience else host - ) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if ( - always_use_jwt_access - and isinstance(credentials, service_account.Credentials) - and hasattr(service_account.Credentials, "with_always_use_jwt_access") - ): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ":" not in host: - host += ":443" - self._host = host - - @property - def host(self): - return self._host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.get_contact: gapic_v1.method.wrap_method( - self.get_contact, - default_timeout=None, - client_info=client_info, - ), - self.list_contacts: gapic_v1.method.wrap_method( - self.list_contacts, - default_timeout=None, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def get_contact( - self, - ) -> Callable[ - [contact_service.GetContactRequest], - Union[contact_service.Contact, Awaitable[contact_service.Contact]], - ]: - raise NotImplementedError() - - @property - def list_contacts( - self, - ) -> Callable[ - [contact_service.ListContactsRequest], - Union[ - contact_service.ListContactsResponse, - Awaitable[contact_service.ListContactsResponse], - ], - ]: - raise NotImplementedError() - - @property - def get_operation( - self, - ) -> Callable[ - [operations_pb2.GetOperationRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ("ContactServiceTransport",) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/contact_service/transports/rest.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/contact_service/transports/rest.py deleted file mode 100644 index 52a4f962b295..000000000000 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/contact_service/transports/rest.py +++ /dev/null @@ -1,526 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -import dataclasses -import json # type: ignore -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - -from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.protobuf import json_format -import grpc # type: ignore -from requests import __version__ as requests_version - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - - -from google.longrunning import operations_pb2 # type: ignore - -from google.ads.admanager_v1.types import contact_service - -from .base import ContactServiceTransport -from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=requests_version, -) - - -class ContactServiceRestInterceptor: - """Interceptor for ContactService. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the ContactServiceRestTransport. - - .. code-block:: python - class MyCustomContactServiceInterceptor(ContactServiceRestInterceptor): - def pre_get_contact(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_contact(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_contacts(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_contacts(self, response): - logging.log(f"Received response: {response}") - return response - - transport = ContactServiceRestTransport(interceptor=MyCustomContactServiceInterceptor()) - client = ContactServiceClient(transport=transport) - - - """ - - def pre_get_contact( - self, - request: contact_service.GetContactRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[contact_service.GetContactRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_contact - - Override in a subclass to manipulate the request or metadata - before they are sent to the ContactService server. - """ - return request, metadata - - def post_get_contact( - self, response: contact_service.Contact - ) -> contact_service.Contact: - """Post-rpc interceptor for get_contact - - Override in a subclass to manipulate the response - after it is returned by the ContactService server but before - it is returned to user code. - """ - return response - - def pre_list_contacts( - self, - request: contact_service.ListContactsRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[contact_service.ListContactsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_contacts - - Override in a subclass to manipulate the request or metadata - before they are sent to the ContactService server. - """ - return request, metadata - - def post_list_contacts( - self, response: contact_service.ListContactsResponse - ) -> contact_service.ListContactsResponse: - """Post-rpc interceptor for list_contacts - - Override in a subclass to manipulate the response - after it is returned by the ContactService server but before - it is returned to user code. - """ - return response - - def pre_get_operation( - self, - request: operations_pb2.GetOperationRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the ContactService server. - """ - return request, metadata - - def post_get_operation( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for get_operation - - Override in a subclass to manipulate the response - after it is returned by the ContactService server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class ContactServiceRestStub: - _session: AuthorizedSession - _host: str - _interceptor: ContactServiceRestInterceptor - - -class ContactServiceRestTransport(ContactServiceTransport): - """REST backend transport for ContactService. - - Provides methods for handling Contact objects. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - - """ - - def __init__( - self, - *, - host: str = "admanager.googleapis.com", - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = "https", - interceptor: Optional[ContactServiceRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'admanager.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError( - f"Unexpected hostname structure: {host}" - ) # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST - ) - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or ContactServiceRestInterceptor() - self._prep_wrapped_messages(client_info) - - class _GetContact(ContactServiceRestStub): - def __hash__(self): - return hash("GetContact") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - def __call__( - self, - request: contact_service.GetContactRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> contact_service.Contact: - r"""Call the get contact method over HTTP. - - Args: - request (~.contact_service.GetContactRequest): - The request object. Request object for GetContact method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.contact_service.Contact: - The Contact resource. - """ - - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{name=networks/*/contacts/*}", - }, - ] - request, metadata = self._interceptor.pre_get_contact(request, metadata) - pb_request = contact_service.GetContactRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] - - # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) - ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = contact_service.Contact() - pb_resp = contact_service.Contact.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_contact(resp) - return resp - - class _ListContacts(ContactServiceRestStub): - def __hash__(self): - return hash("ListContacts") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - def __call__( - self, - request: contact_service.ListContactsRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> contact_service.ListContactsResponse: - r"""Call the list contacts method over HTTP. - - Args: - request (~.contact_service.ListContactsRequest): - The request object. Request object for ListContacts - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.contact_service.ListContactsResponse: - Response object for - ListContactsRequest containing matching - Contact resources. - - """ - - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{parent=networks/*}/contacts", - }, - ] - request, metadata = self._interceptor.pre_list_contacts(request, metadata) - pb_request = contact_service.ListContactsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] - - # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) - ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = contact_service.ListContactsResponse() - pb_resp = contact_service.ListContactsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_contacts(resp) - return resp - - @property - def get_contact( - self, - ) -> Callable[[contact_service.GetContactRequest], contact_service.Contact]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetContact(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_contacts( - self, - ) -> Callable[ - [contact_service.ListContactsRequest], contact_service.ListContactsResponse - ]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListContacts(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore - - class _GetOperation(ContactServiceRestStub): - def __call__( - self, - request: operations_pb2.GetOperationRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. - - Args: - request (operations_pb2.GetOperationRequest): - The request object for GetOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - operations_pb2.Operation: Response from GetOperation method. - """ - - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/exports/*}", - }, - { - "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/runs/*}", - }, - ] - - request, metadata = self._interceptor.pre_get_operation(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode(http_options, **request_kwargs) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request["query_params"])) - - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - resp = operations_pb2.Operation() - resp = json_format.Parse(response.content.decode("utf-8"), resp) - resp = self._interceptor.post_get_operation(resp) - return resp - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__ = ("ContactServiceRestTransport",) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/creative_service/transports/rest.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/creative_service/transports/rest.py deleted file mode 100644 index 33aa7085f6e5..000000000000 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/creative_service/transports/rest.py +++ /dev/null @@ -1,527 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -import dataclasses -import json # type: ignore -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - -from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.protobuf import json_format -import grpc # type: ignore -from requests import __version__ as requests_version - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - - -from google.longrunning import operations_pb2 # type: ignore - -from google.ads.admanager_v1.types import creative_service - -from .base import CreativeServiceTransport -from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=requests_version, -) - - -class CreativeServiceRestInterceptor: - """Interceptor for CreativeService. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the CreativeServiceRestTransport. - - .. code-block:: python - class MyCustomCreativeServiceInterceptor(CreativeServiceRestInterceptor): - def pre_get_creative(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_creative(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_creatives(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_creatives(self, response): - logging.log(f"Received response: {response}") - return response - - transport = CreativeServiceRestTransport(interceptor=MyCustomCreativeServiceInterceptor()) - client = CreativeServiceClient(transport=transport) - - - """ - - def pre_get_creative( - self, - request: creative_service.GetCreativeRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[creative_service.GetCreativeRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_creative - - Override in a subclass to manipulate the request or metadata - before they are sent to the CreativeService server. - """ - return request, metadata - - def post_get_creative( - self, response: creative_service.Creative - ) -> creative_service.Creative: - """Post-rpc interceptor for get_creative - - Override in a subclass to manipulate the response - after it is returned by the CreativeService server but before - it is returned to user code. - """ - return response - - def pre_list_creatives( - self, - request: creative_service.ListCreativesRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[creative_service.ListCreativesRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_creatives - - Override in a subclass to manipulate the request or metadata - before they are sent to the CreativeService server. - """ - return request, metadata - - def post_list_creatives( - self, response: creative_service.ListCreativesResponse - ) -> creative_service.ListCreativesResponse: - """Post-rpc interceptor for list_creatives - - Override in a subclass to manipulate the response - after it is returned by the CreativeService server but before - it is returned to user code. - """ - return response - - def pre_get_operation( - self, - request: operations_pb2.GetOperationRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the CreativeService server. - """ - return request, metadata - - def post_get_operation( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for get_operation - - Override in a subclass to manipulate the response - after it is returned by the CreativeService server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class CreativeServiceRestStub: - _session: AuthorizedSession - _host: str - _interceptor: CreativeServiceRestInterceptor - - -class CreativeServiceRestTransport(CreativeServiceTransport): - """REST backend transport for CreativeService. - - Provides methods for handling Creative objects. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - - """ - - def __init__( - self, - *, - host: str = "admanager.googleapis.com", - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = "https", - interceptor: Optional[CreativeServiceRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'admanager.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError( - f"Unexpected hostname structure: {host}" - ) # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST - ) - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or CreativeServiceRestInterceptor() - self._prep_wrapped_messages(client_info) - - class _GetCreative(CreativeServiceRestStub): - def __hash__(self): - return hash("GetCreative") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - def __call__( - self, - request: creative_service.GetCreativeRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> creative_service.Creative: - r"""Call the get creative method over HTTP. - - Args: - request (~.creative_service.GetCreativeRequest): - The request object. Request object for GetCreative - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.creative_service.Creative: - The Creative resource. - """ - - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{name=networks/*/creatives/*}", - }, - ] - request, metadata = self._interceptor.pre_get_creative(request, metadata) - pb_request = creative_service.GetCreativeRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] - - # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) - ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = creative_service.Creative() - pb_resp = creative_service.Creative.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_creative(resp) - return resp - - class _ListCreatives(CreativeServiceRestStub): - def __hash__(self): - return hash("ListCreatives") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - def __call__( - self, - request: creative_service.ListCreativesRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> creative_service.ListCreativesResponse: - r"""Call the list creatives method over HTTP. - - Args: - request (~.creative_service.ListCreativesRequest): - The request object. Request object for ListCreatives - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.creative_service.ListCreativesResponse: - Response object for - ListCreativesRequest containing matching - Creative resources. - - """ - - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{parent=networks/*}/creatives", - }, - ] - request, metadata = self._interceptor.pre_list_creatives(request, metadata) - pb_request = creative_service.ListCreativesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] - - # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) - ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = creative_service.ListCreativesResponse() - pb_resp = creative_service.ListCreativesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_creatives(resp) - return resp - - @property - def get_creative( - self, - ) -> Callable[[creative_service.GetCreativeRequest], creative_service.Creative]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetCreative(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_creatives( - self, - ) -> Callable[ - [creative_service.ListCreativesRequest], creative_service.ListCreativesResponse - ]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListCreatives(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore - - class _GetOperation(CreativeServiceRestStub): - def __call__( - self, - request: operations_pb2.GetOperationRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. - - Args: - request (operations_pb2.GetOperationRequest): - The request object for GetOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - operations_pb2.Operation: Response from GetOperation method. - """ - - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/exports/*}", - }, - { - "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/runs/*}", - }, - ] - - request, metadata = self._interceptor.pre_get_operation(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode(http_options, **request_kwargs) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request["query_params"])) - - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - resp = operations_pb2.Operation() - resp = json_format.Parse(response.content.decode("utf-8"), resp) - resp = self._interceptor.post_get_operation(resp) - return resp - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__ = ("CreativeServiceRestTransport",) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/client.py index 4b1cd58b89f4..986a135d17c5 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/client.py @@ -51,7 +51,11 @@ from google.longrunning import operations_pb2 # type: ignore from google.ads.admanager_v1.services.custom_field_service import pagers -from google.ads.admanager_v1.types import custom_field_enums, custom_field_service +from google.ads.admanager_v1.types import ( + custom_field_enums, + custom_field_messages, + custom_field_service, +) from .transports.base import DEFAULT_CLIENT_INFO, CustomFieldServiceTransport from .transports.rest import CustomFieldServiceRestTransport @@ -702,7 +706,7 @@ def get_custom_field( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> custom_field_service.CustomField: + ) -> custom_field_messages.CustomField: r"""API to retrieve a ``CustomField`` object. .. code-block:: python @@ -749,7 +753,9 @@ def sample_get_custom_field(): Returns: google.ads.admanager_v1.types.CustomField: - The CustomField resource. + An additional, user-created field on + an entity. + """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/pagers.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/pagers.py index f0a4e63f1c52..b11c6be336cc 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/pagers.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/pagers.py @@ -38,7 +38,7 @@ OptionalRetry = Union[retries.Retry, object, None] # type: ignore OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore -from google.ads.admanager_v1.types import custom_field_service +from google.ads.admanager_v1.types import custom_field_messages, custom_field_service class ListCustomFieldsPager: @@ -107,7 +107,7 @@ def pages(self) -> Iterator[custom_field_service.ListCustomFieldsResponse]: ) yield self._response - def __iter__(self) -> Iterator[custom_field_service.CustomField]: + def __iter__(self) -> Iterator[custom_field_messages.CustomField]: for page in self.pages: yield from page.custom_fields diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/transports/base.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/transports/base.py index 3578065cdf3f..97f76ac909c3 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/transports/base.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/transports/base.py @@ -26,7 +26,7 @@ from google.oauth2 import service_account # type: ignore from google.ads.admanager_v1 import gapic_version as package_version -from google.ads.admanager_v1.types import custom_field_service +from google.ads.admanager_v1.types import custom_field_messages, custom_field_service DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ @@ -156,8 +156,8 @@ def get_custom_field( ) -> Callable[ [custom_field_service.GetCustomFieldRequest], Union[ - custom_field_service.CustomField, - Awaitable[custom_field_service.CustomField], + custom_field_messages.CustomField, + Awaitable[custom_field_messages.CustomField], ], ]: raise NotImplementedError() diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/transports/rest.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/transports/rest.py index 0da154858e92..4994a3e75121 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/transports/rest.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/transports/rest.py @@ -38,7 +38,7 @@ from google.longrunning import operations_pb2 # type: ignore -from google.ads.admanager_v1.types import custom_field_service +from google.ads.admanager_v1.types import custom_field_messages, custom_field_service from .base import CustomFieldServiceTransport from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO @@ -100,8 +100,8 @@ def pre_get_custom_field( return request, metadata def post_get_custom_field( - self, response: custom_field_service.CustomField - ) -> custom_field_service.CustomField: + self, response: custom_field_messages.CustomField + ) -> custom_field_messages.CustomField: """Post-rpc interceptor for get_custom_field Override in a subclass to manipulate the response @@ -274,7 +274,7 @@ def __call__( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> custom_field_service.CustomField: + ) -> custom_field_messages.CustomField: r"""Call the get custom field method over HTTP. Args: @@ -287,8 +287,10 @@ def __call__( sent along with the request as metadata. Returns: - ~.custom_field_service.CustomField: - The ``CustomField`` resource. + ~.custom_field_messages.CustomField: + An additional, user-created field on + an entity. + """ http_options: List[Dict[str, str]] = [ @@ -333,8 +335,8 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = custom_field_service.CustomField() - pb_resp = custom_field_service.CustomField.pb(resp) + resp = custom_field_messages.CustomField() + pb_resp = custom_field_messages.CustomField.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_custom_field(resp) @@ -433,7 +435,7 @@ def __call__( def get_custom_field( self, ) -> Callable[ - [custom_field_service.GetCustomFieldRequest], custom_field_service.CustomField + [custom_field_service.GetCustomFieldRequest], custom_field_messages.CustomField ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast @@ -481,11 +483,11 @@ def __call__( http_options: List[Dict[str, str]] = [ { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/exports/*}", + "uri": "/v1/{name=networks/*/operations/reports/runs/*}", }, { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/runs/*}", + "uri": "/v1/{name=networks/*/operations/reports/exports/*}", }, ] diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/client.py index 53ee2f5439d2..63992825ffe4 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/client.py @@ -53,6 +53,7 @@ from google.ads.admanager_v1.services.custom_targeting_key_service import pagers from google.ads.admanager_v1.types import ( custom_targeting_key_enums, + custom_targeting_key_messages, custom_targeting_key_service, ) @@ -712,7 +713,7 @@ def get_custom_targeting_key( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> custom_targeting_key_service.CustomTargetingKey: + ) -> custom_targeting_key_messages.CustomTargetingKey: r"""API to retrieve a ``CustomTargetingKey`` object. .. code-block:: python diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/pagers.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/pagers.py index e56ff58da48c..88953ea7950c 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/pagers.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/pagers.py @@ -38,7 +38,10 @@ OptionalRetry = Union[retries.Retry, object, None] # type: ignore OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore -from google.ads.admanager_v1.types import custom_targeting_key_service +from google.ads.admanager_v1.types import ( + custom_targeting_key_messages, + custom_targeting_key_service, +) class ListCustomTargetingKeysPager: @@ -113,7 +116,7 @@ def pages( ) yield self._response - def __iter__(self) -> Iterator[custom_targeting_key_service.CustomTargetingKey]: + def __iter__(self) -> Iterator[custom_targeting_key_messages.CustomTargetingKey]: for page in self.pages: yield from page.custom_targeting_keys diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/transports/base.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/transports/base.py index a55f7a97d634..7e4925dd049f 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/transports/base.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/transports/base.py @@ -26,7 +26,10 @@ from google.oauth2 import service_account # type: ignore from google.ads.admanager_v1 import gapic_version as package_version -from google.ads.admanager_v1.types import custom_targeting_key_service +from google.ads.admanager_v1.types import ( + custom_targeting_key_messages, + custom_targeting_key_service, +) DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ @@ -156,8 +159,8 @@ def get_custom_targeting_key( ) -> Callable[ [custom_targeting_key_service.GetCustomTargetingKeyRequest], Union[ - custom_targeting_key_service.CustomTargetingKey, - Awaitable[custom_targeting_key_service.CustomTargetingKey], + custom_targeting_key_messages.CustomTargetingKey, + Awaitable[custom_targeting_key_messages.CustomTargetingKey], ], ]: raise NotImplementedError() diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/transports/rest.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/transports/rest.py index 5ea81bb49e6c..6b9540dc0b60 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/transports/rest.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/transports/rest.py @@ -38,7 +38,10 @@ from google.longrunning import operations_pb2 # type: ignore -from google.ads.admanager_v1.types import custom_targeting_key_service +from google.ads.admanager_v1.types import ( + custom_targeting_key_messages, + custom_targeting_key_service, +) from .base import CustomTargetingKeyServiceTransport from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO @@ -103,8 +106,8 @@ def pre_get_custom_targeting_key( return request, metadata def post_get_custom_targeting_key( - self, response: custom_targeting_key_service.CustomTargetingKey - ) -> custom_targeting_key_service.CustomTargetingKey: + self, response: custom_targeting_key_messages.CustomTargetingKey + ) -> custom_targeting_key_messages.CustomTargetingKey: """Post-rpc interceptor for get_custom_targeting_key Override in a subclass to manipulate the response @@ -280,7 +283,7 @@ def __call__( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> custom_targeting_key_service.CustomTargetingKey: + ) -> custom_targeting_key_messages.CustomTargetingKey: r"""Call the get custom targeting key method over HTTP. Args: @@ -293,7 +296,7 @@ def __call__( sent along with the request as metadata. Returns: - ~.custom_targeting_key_service.CustomTargetingKey: + ~.custom_targeting_key_messages.CustomTargetingKey: The ``CustomTargetingKey`` resource. """ @@ -341,8 +344,8 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = custom_targeting_key_service.CustomTargetingKey() - pb_resp = custom_targeting_key_service.CustomTargetingKey.pb(resp) + resp = custom_targeting_key_messages.CustomTargetingKey() + pb_resp = custom_targeting_key_messages.CustomTargetingKey.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_custom_targeting_key(resp) @@ -447,7 +450,7 @@ def get_custom_targeting_key( self, ) -> Callable[ [custom_targeting_key_service.GetCustomTargetingKeyRequest], - custom_targeting_key_service.CustomTargetingKey, + custom_targeting_key_messages.CustomTargetingKey, ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast @@ -495,11 +498,11 @@ def __call__( http_options: List[Dict[str, str]] = [ { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/exports/*}", + "uri": "/v1/{name=networks/*/operations/reports/runs/*}", }, { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/runs/*}", + "uri": "/v1/{name=networks/*/operations/reports/exports/*}", }, ] diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/client.py index 2a1a0435b1c9..6c03f1fb4c53 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/client.py @@ -53,6 +53,7 @@ from google.ads.admanager_v1.services.custom_targeting_value_service import pagers from google.ads.admanager_v1.types import ( custom_targeting_value_enums, + custom_targeting_value_messages, custom_targeting_value_service, ) @@ -723,7 +724,7 @@ def get_custom_targeting_value( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> custom_targeting_value_service.CustomTargetingValue: + ) -> custom_targeting_value_messages.CustomTargetingValue: r"""API to retrieve a ``CustomTargetingValue`` object. .. code-block:: python diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/pagers.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/pagers.py index 09ef836cdb72..214d53becdec 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/pagers.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/pagers.py @@ -38,7 +38,10 @@ OptionalRetry = Union[retries.Retry, object, None] # type: ignore OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore -from google.ads.admanager_v1.types import custom_targeting_value_service +from google.ads.admanager_v1.types import ( + custom_targeting_value_messages, + custom_targeting_value_service, +) class ListCustomTargetingValuesPager: @@ -113,7 +116,9 @@ def pages( ) yield self._response - def __iter__(self) -> Iterator[custom_targeting_value_service.CustomTargetingValue]: + def __iter__( + self, + ) -> Iterator[custom_targeting_value_messages.CustomTargetingValue]: for page in self.pages: yield from page.custom_targeting_values diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/transports/base.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/transports/base.py index a9d002f465f7..0ed99f654001 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/transports/base.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/transports/base.py @@ -26,7 +26,10 @@ from google.oauth2 import service_account # type: ignore from google.ads.admanager_v1 import gapic_version as package_version -from google.ads.admanager_v1.types import custom_targeting_value_service +from google.ads.admanager_v1.types import ( + custom_targeting_value_messages, + custom_targeting_value_service, +) DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ @@ -156,8 +159,8 @@ def get_custom_targeting_value( ) -> Callable[ [custom_targeting_value_service.GetCustomTargetingValueRequest], Union[ - custom_targeting_value_service.CustomTargetingValue, - Awaitable[custom_targeting_value_service.CustomTargetingValue], + custom_targeting_value_messages.CustomTargetingValue, + Awaitable[custom_targeting_value_messages.CustomTargetingValue], ], ]: raise NotImplementedError() diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/transports/rest.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/transports/rest.py index 1154a1bebe68..4706f5043211 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/transports/rest.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/transports/rest.py @@ -38,7 +38,10 @@ from google.longrunning import operations_pb2 # type: ignore -from google.ads.admanager_v1.types import custom_targeting_value_service +from google.ads.admanager_v1.types import ( + custom_targeting_value_messages, + custom_targeting_value_service, +) from .base import CustomTargetingValueServiceTransport from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO @@ -103,8 +106,8 @@ def pre_get_custom_targeting_value( return request, metadata def post_get_custom_targeting_value( - self, response: custom_targeting_value_service.CustomTargetingValue - ) -> custom_targeting_value_service.CustomTargetingValue: + self, response: custom_targeting_value_messages.CustomTargetingValue + ) -> custom_targeting_value_messages.CustomTargetingValue: """Post-rpc interceptor for get_custom_targeting_value Override in a subclass to manipulate the response @@ -280,7 +283,7 @@ def __call__( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> custom_targeting_value_service.CustomTargetingValue: + ) -> custom_targeting_value_messages.CustomTargetingValue: r"""Call the get custom targeting value method over HTTP. @@ -294,7 +297,7 @@ def __call__( sent along with the request as metadata. Returns: - ~.custom_targeting_value_service.CustomTargetingValue: + ~.custom_targeting_value_messages.CustomTargetingValue: The ``CustomTargetingValue`` resource. """ @@ -344,8 +347,8 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = custom_targeting_value_service.CustomTargetingValue() - pb_resp = custom_targeting_value_service.CustomTargetingValue.pb(resp) + resp = custom_targeting_value_messages.CustomTargetingValue() + pb_resp = custom_targeting_value_messages.CustomTargetingValue.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_custom_targeting_value(resp) @@ -454,7 +457,7 @@ def get_custom_targeting_value( self, ) -> Callable[ [custom_targeting_value_service.GetCustomTargetingValueRequest], - custom_targeting_value_service.CustomTargetingValue, + custom_targeting_value_messages.CustomTargetingValue, ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast @@ -502,11 +505,11 @@ def __call__( http_options: List[Dict[str, str]] = [ { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/exports/*}", + "uri": "/v1/{name=networks/*/operations/reports/runs/*}", }, { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/runs/*}", + "uri": "/v1/{name=networks/*/operations/reports/exports/*}", }, ] diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/creative_service/__init__.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/__init__.py similarity index 85% rename from packages/google-ads-admanager/google/ads/admanager_v1/services/creative_service/__init__.py rename to packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/__init__.py index 65fa5abb358e..3b03f6d3f9dc 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/creative_service/__init__.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/__init__.py @@ -13,6 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from .client import CreativeServiceClient +from .client import EntitySignalsMappingServiceClient -__all__ = ("CreativeServiceClient",) +__all__ = ("EntitySignalsMappingServiceClient",) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/creative_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/client.py similarity index 53% rename from packages/google-ads-admanager/google/ads/admanager_v1/services/creative_service/client.py rename to packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/client.py index c04fdc539730..fe94c89d85d2 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/creative_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/client.py @@ -49,17 +49,20 @@ OptionalRetry = Union[retries.Retry, object, None] # type: ignore from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore -from google.ads.admanager_v1.services.creative_service import pagers -from google.ads.admanager_v1.types import ad_partner_declaration, creative_service +from google.ads.admanager_v1.services.entity_signals_mapping_service import pagers +from google.ads.admanager_v1.types import ( + entity_signals_mapping_messages, + entity_signals_mapping_service, +) -from .transports.base import DEFAULT_CLIENT_INFO, CreativeServiceTransport -from .transports.rest import CreativeServiceRestTransport +from .transports.base import DEFAULT_CLIENT_INFO, EntitySignalsMappingServiceTransport +from .transports.rest import EntitySignalsMappingServiceRestTransport -class CreativeServiceClientMeta(type): - """Metaclass for the CreativeService client. +class EntitySignalsMappingServiceClientMeta(type): + """Metaclass for the EntitySignalsMappingService client. This provides class-level methods for building and retrieving support objects (e.g. transport) without polluting the client instance @@ -68,13 +71,13 @@ class CreativeServiceClientMeta(type): _transport_registry = ( OrderedDict() - ) # type: Dict[str, Type[CreativeServiceTransport]] - _transport_registry["rest"] = CreativeServiceRestTransport + ) # type: Dict[str, Type[EntitySignalsMappingServiceTransport]] + _transport_registry["rest"] = EntitySignalsMappingServiceRestTransport def get_transport_class( cls, label: Optional[str] = None, - ) -> Type[CreativeServiceTransport]: + ) -> Type[EntitySignalsMappingServiceTransport]: """Returns an appropriate transport class. Args: @@ -93,8 +96,10 @@ def get_transport_class( return next(iter(cls._transport_registry.values())) -class CreativeServiceClient(metaclass=CreativeServiceClientMeta): - """Provides methods for handling Creative objects.""" +class EntitySignalsMappingServiceClient( + metaclass=EntitySignalsMappingServiceClientMeta +): + """Provides methods for handling ``EntitySignalsMapping`` objects.""" @staticmethod def _get_default_mtls_endpoint(api_endpoint): @@ -146,7 +151,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): kwargs: Additional arguments to pass to the constructor. Returns: - CreativeServiceClient: The constructed client. + EntitySignalsMappingServiceClient: The constructed client. """ credentials = service_account.Credentials.from_service_account_info(info) kwargs["credentials"] = credentials @@ -164,7 +169,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): kwargs: Additional arguments to pass to the constructor. Returns: - CreativeServiceClient: The constructed client. + EntitySignalsMappingServiceClient: The constructed client. """ credentials = service_account.Credentials.from_service_account_file(filename) kwargs["credentials"] = credentials @@ -173,69 +178,32 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): from_service_account_json = from_service_account_file @property - def transport(self) -> CreativeServiceTransport: + def transport(self) -> EntitySignalsMappingServiceTransport: """Returns the transport used by the client instance. Returns: - CreativeServiceTransport: The transport used by the client + EntitySignalsMappingServiceTransport: The transport used by the client instance. """ return self._transport @staticmethod - def ad_partner_path( - network_code: str, - ad_partner: str, - ) -> str: - """Returns a fully-qualified ad_partner string.""" - return "networks/{network_code}/adPartners/{ad_partner}".format( - network_code=network_code, - ad_partner=ad_partner, - ) - - @staticmethod - def parse_ad_partner_path(path: str) -> Dict[str, str]: - """Parses a ad_partner path into its component segments.""" - m = re.match( - r"^networks/(?P.+?)/adPartners/(?P.+?)$", path - ) - return m.groupdict() if m else {} - - @staticmethod - def company_path( - network_code: str, - company: str, - ) -> str: - """Returns a fully-qualified company string.""" - return "networks/{network_code}/companies/{company}".format( - network_code=network_code, - company=company, - ) - - @staticmethod - def parse_company_path(path: str) -> Dict[str, str]: - """Parses a company path into its component segments.""" - m = re.match( - r"^networks/(?P.+?)/companies/(?P.+?)$", path - ) - return m.groupdict() if m else {} - - @staticmethod - def creative_path( + def entity_signals_mapping_path( network_code: str, - creative: str, + entity_signals_mapping: str, ) -> str: - """Returns a fully-qualified creative string.""" - return "networks/{network_code}/creatives/{creative}".format( + """Returns a fully-qualified entity_signals_mapping string.""" + return "networks/{network_code}/entitySignalsMappings/{entity_signals_mapping}".format( network_code=network_code, - creative=creative, + entity_signals_mapping=entity_signals_mapping, ) @staticmethod - def parse_creative_path(path: str) -> Dict[str, str]: - """Parses a creative path into its component segments.""" + def parse_entity_signals_mapping_path(path: str) -> Dict[str, str]: + """Parses a entity_signals_mapping path into its component segments.""" m = re.match( - r"^networks/(?P.+?)/creatives/(?P.+?)$", path + r"^networks/(?P.+?)/entitySignalsMappings/(?P.+?)$", + path, ) return m.groupdict() if m else {} @@ -473,15 +441,17 @@ def _get_api_endpoint( elif use_mtls_endpoint == "always" or ( use_mtls_endpoint == "auto" and client_cert_source ): - _default_universe = CreativeServiceClient._DEFAULT_UNIVERSE + _default_universe = EntitySignalsMappingServiceClient._DEFAULT_UNIVERSE if universe_domain != _default_universe: raise MutualTLSChannelError( f"mTLS is not supported in any universe other than {_default_universe}." ) - api_endpoint = CreativeServiceClient.DEFAULT_MTLS_ENDPOINT + api_endpoint = EntitySignalsMappingServiceClient.DEFAULT_MTLS_ENDPOINT else: - api_endpoint = CreativeServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=universe_domain + api_endpoint = ( + EntitySignalsMappingServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) ) return api_endpoint @@ -501,7 +471,7 @@ def _get_universe_domain( Raises: ValueError: If the universe domain is an empty string. """ - universe_domain = CreativeServiceClient._DEFAULT_UNIVERSE + universe_domain = EntitySignalsMappingServiceClient._DEFAULT_UNIVERSE if client_universe_domain is not None: universe_domain = client_universe_domain elif universe_domain_env is not None: @@ -527,7 +497,7 @@ def _compare_universes( ValueError: when client_universe does not match the universe in credentials. """ - default_universe = CreativeServiceClient._DEFAULT_UNIVERSE + default_universe = EntitySignalsMappingServiceClient._DEFAULT_UNIVERSE credentials_universe = getattr(credentials, "universe_domain", default_universe) if client_universe != credentials_universe: @@ -551,7 +521,7 @@ def _validate_universe_domain(self): """ self._is_universe_domain_valid = ( self._is_universe_domain_valid - or CreativeServiceClient._compare_universes( + or EntitySignalsMappingServiceClient._compare_universes( self.universe_domain, self.transport._credentials ) ) @@ -581,13 +551,15 @@ def __init__( credentials: Optional[ga_credentials.Credentials] = None, transport: Optional[ Union[ - str, CreativeServiceTransport, Callable[..., CreativeServiceTransport] + str, + EntitySignalsMappingServiceTransport, + Callable[..., EntitySignalsMappingServiceTransport], ] ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiates the creative service client. + """Instantiates the entity signals mapping service client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -595,10 +567,10 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Optional[Union[str,CreativeServiceTransport,Callable[..., CreativeServiceTransport]]]): + transport (Optional[Union[str,EntitySignalsMappingServiceTransport,Callable[..., EntitySignalsMappingServiceTransport]]]): The transport to use, or a Callable that constructs and returns a new transport. If a Callable is given, it will be called with the same set of initialization - arguments as used in the CreativeServiceTransport constructor. + arguments as used in the EntitySignalsMappingServiceTransport constructor. If set to None, a transport is chosen automatically. client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. @@ -651,11 +623,13 @@ def __init__( self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env, - ) = CreativeServiceClient._read_environment_variables() - self._client_cert_source = CreativeServiceClient._get_client_cert_source( - self._client_options.client_cert_source, self._use_client_cert + ) = EntitySignalsMappingServiceClient._read_environment_variables() + self._client_cert_source = ( + EntitySignalsMappingServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) ) - self._universe_domain = CreativeServiceClient._get_universe_domain( + self._universe_domain = EntitySignalsMappingServiceClient._get_universe_domain( universe_domain_opt, self._universe_domain_env ) self._api_endpoint = None # updated below, depending on `transport` @@ -672,9 +646,9 @@ def __init__( # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. - transport_provided = isinstance(transport, CreativeServiceTransport) + transport_provided = isinstance(transport, EntitySignalsMappingServiceTransport) if transport_provided: - # transport is a CreativeServiceTransport instance. + # transport is a EntitySignalsMappingServiceTransport instance. if credentials or self._client_options.credentials_file or api_key_value: raise ValueError( "When providing a transport instance, " @@ -685,12 +659,12 @@ def __init__( "When providing a transport instance, provide its scopes " "directly." ) - self._transport = cast(CreativeServiceTransport, transport) + self._transport = cast(EntitySignalsMappingServiceTransport, transport) self._api_endpoint = self._transport.host self._api_endpoint = ( self._api_endpoint - or CreativeServiceClient._get_api_endpoint( + or EntitySignalsMappingServiceClient._get_api_endpoint( self._client_options.api_endpoint, self._client_cert_source, self._universe_domain, @@ -709,11 +683,14 @@ def __init__( ) transport_init: Union[ - Type[CreativeServiceTransport], Callable[..., CreativeServiceTransport] + Type[EntitySignalsMappingServiceTransport], + Callable[..., EntitySignalsMappingServiceTransport], ] = ( - CreativeServiceClient.get_transport_class(transport) + EntitySignalsMappingServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None - else cast(Callable[..., CreativeServiceTransport], transport) + else cast( + Callable[..., EntitySignalsMappingServiceTransport], transport + ) ) # initialize with the provided callable or the passed in class self._transport = transport_init( @@ -728,16 +705,18 @@ def __init__( api_audience=self._client_options.api_audience, ) - def get_creative( + def get_entity_signals_mapping( self, - request: Optional[Union[creative_service.GetCreativeRequest, dict]] = None, + request: Optional[ + Union[entity_signals_mapping_service.GetEntitySignalsMappingRequest, dict] + ] = None, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> creative_service.Creative: - r"""API to retrieve a Creative object. + ) -> entity_signals_mapping_messages.EntitySignalsMapping: + r"""API to retrieve a ``EntitySignalsMapping`` object. .. code-block:: python @@ -750,28 +729,28 @@ def get_creative( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.ads import admanager_v1 - def sample_get_creative(): + def sample_get_entity_signals_mapping(): # Create a client - client = admanager_v1.CreativeServiceClient() + client = admanager_v1.EntitySignalsMappingServiceClient() # Initialize request argument(s) - request = admanager_v1.GetCreativeRequest( + request = admanager_v1.GetEntitySignalsMappingRequest( name="name_value", ) # Make the request - response = client.get_creative(request=request) + response = client.get_entity_signals_mapping(request=request) # Handle the response print(response) Args: - request (Union[google.ads.admanager_v1.types.GetCreativeRequest, dict]): - The request object. Request object for GetCreative - method. + request (Union[google.ads.admanager_v1.types.GetEntitySignalsMappingRequest, dict]): + The request object. Request object for ``GetEntitySignalsMapping`` method. name (str): - Required. The resource name of the Creative. Format: - ``networks/{network_code}/creatives/{creative_id}`` + Required. The resource name of the EntitySignalsMapping. + Format: + ``networks/{network_code}/entitySignalsMappings/{entity_signals_mapping_id}`` This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -783,8 +762,8 @@ def sample_get_creative(): sent along with the request as metadata. Returns: - google.ads.admanager_v1.types.Creative: - The Creative resource. + google.ads.admanager_v1.types.EntitySignalsMapping: + The EntitySignalsMapping resource. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -798,8 +777,12 @@ def sample_get_creative(): # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, creative_service.GetCreativeRequest): - request = creative_service.GetCreativeRequest(request) + if not isinstance( + request, entity_signals_mapping_service.GetEntitySignalsMappingRequest + ): + request = entity_signals_mapping_service.GetEntitySignalsMappingRequest( + request + ) # If we have keyword arguments corresponding to fields on the # request, apply these. if name is not None: @@ -807,7 +790,9 @@ def sample_get_creative(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_creative] + rpc = self._transport._wrapped_methods[ + self._transport.get_entity_signals_mapping + ] # Certain fields should be provided within the metadata header; # add these here. @@ -829,16 +814,18 @@ def sample_get_creative(): # Done; return the response. return response - def list_creatives( + def list_entity_signals_mappings( self, - request: Optional[Union[creative_service.ListCreativesRequest, dict]] = None, + request: Optional[ + Union[entity_signals_mapping_service.ListEntitySignalsMappingsRequest, dict] + ] = None, *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListCreativesPager: - r"""API to retrieve a list of Creative objects. + ) -> pagers.ListEntitySignalsMappingsPager: + r"""API to retrieve a list of ``EntitySignalsMapping`` objects. .. code-block:: python @@ -851,29 +838,29 @@ def list_creatives( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.ads import admanager_v1 - def sample_list_creatives(): + def sample_list_entity_signals_mappings(): # Create a client - client = admanager_v1.CreativeServiceClient() + client = admanager_v1.EntitySignalsMappingServiceClient() # Initialize request argument(s) - request = admanager_v1.ListCreativesRequest( + request = admanager_v1.ListEntitySignalsMappingsRequest( parent="parent_value", ) # Make the request - page_result = client.list_creatives(request=request) + page_result = client.list_entity_signals_mappings(request=request) # Handle the response for response in page_result: print(response) Args: - request (Union[google.ads.admanager_v1.types.ListCreativesRequest, dict]): - The request object. Request object for ListCreatives - method. + request (Union[google.ads.admanager_v1.types.ListEntitySignalsMappingsRequest, dict]): + The request object. Request object for ``ListEntitySignalsMappings`` method. parent (str): Required. The parent, which owns this collection of - Creatives. Format: networks/{network_code} + EntitySignalsMappings. Format: + ``networks/{network_code}`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -885,14 +872,12 @@ def sample_list_creatives(): sent along with the request as metadata. Returns: - google.ads.admanager_v1.services.creative_service.pagers.ListCreativesPager: - Response object for - ListCreativesRequest containing matching - Creative resources. + google.ads.admanager_v1.services.entity_signals_mapping_service.pagers.ListEntitySignalsMappingsPager: + Response object for ListEntitySignalsMappingsRequest containing matching + EntitySignalsMapping resources. - Iterating over this object will yield - results and resolve additional pages - automatically. + Iterating over this object will yield results and + resolve additional pages automatically. """ # Create or coerce a protobuf request object. @@ -907,8 +892,12 @@ def sample_list_creatives(): # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, creative_service.ListCreativesRequest): - request = creative_service.ListCreativesRequest(request) + if not isinstance( + request, entity_signals_mapping_service.ListEntitySignalsMappingsRequest + ): + request = entity_signals_mapping_service.ListEntitySignalsMappingsRequest( + request + ) # If we have keyword arguments corresponding to fields on the # request, apply these. if parent is not None: @@ -916,7 +905,9 @@ def sample_list_creatives(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_creatives] + rpc = self._transport._wrapped_methods[ + self._transport.list_entity_signals_mappings + ] # Certain fields should be provided within the metadata header; # add these here. @@ -937,7 +928,7 @@ def sample_list_creatives(): # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. - response = pagers.ListCreativesPager( + response = pagers.ListEntitySignalsMappingsPager( method=rpc, request=request, response=response, @@ -949,7 +940,547 @@ def sample_list_creatives(): # Done; return the response. return response - def __enter__(self) -> "CreativeServiceClient": + def create_entity_signals_mapping( + self, + request: Optional[ + Union[ + entity_signals_mapping_service.CreateEntitySignalsMappingRequest, dict + ] + ] = None, + *, + parent: Optional[str] = None, + entity_signals_mapping: Optional[ + entity_signals_mapping_messages.EntitySignalsMapping + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> entity_signals_mapping_messages.EntitySignalsMapping: + r"""API to create an ``EntitySignalsMapping`` object. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import admanager_v1 + + def sample_create_entity_signals_mapping(): + # Create a client + client = admanager_v1.EntitySignalsMappingServiceClient() + + # Initialize request argument(s) + entity_signals_mapping = admanager_v1.EntitySignalsMapping() + entity_signals_mapping.audience_segment_id = 1980 + entity_signals_mapping.taxonomy_category_ids = [2268, 2269] + + request = admanager_v1.CreateEntitySignalsMappingRequest( + parent="parent_value", + entity_signals_mapping=entity_signals_mapping, + ) + + # Make the request + response = client.create_entity_signals_mapping(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ads.admanager_v1.types.CreateEntitySignalsMappingRequest, dict]): + The request object. Request object for + 'CreateEntitySignalsMapping' method. + parent (str): + Required. The parent resource where this + EntitySignalsMapping will be created. Format: + ``networks/{network_code}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entity_signals_mapping (google.ads.admanager_v1.types.EntitySignalsMapping): + Required. The EntitySignalsMapping + object to create. + + This corresponds to the ``entity_signals_mapping`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ads.admanager_v1.types.EntitySignalsMapping: + The EntitySignalsMapping resource. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, entity_signals_mapping]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, entity_signals_mapping_service.CreateEntitySignalsMappingRequest + ): + request = entity_signals_mapping_service.CreateEntitySignalsMappingRequest( + request + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if entity_signals_mapping is not None: + request.entity_signals_mapping = entity_signals_mapping + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.create_entity_signals_mapping + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_entity_signals_mapping( + self, + request: Optional[ + Union[ + entity_signals_mapping_service.UpdateEntitySignalsMappingRequest, dict + ] + ] = None, + *, + entity_signals_mapping: Optional[ + entity_signals_mapping_messages.EntitySignalsMapping + ] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> entity_signals_mapping_messages.EntitySignalsMapping: + r"""API to update an ``EntitySignalsMapping`` object. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import admanager_v1 + + def sample_update_entity_signals_mapping(): + # Create a client + client = admanager_v1.EntitySignalsMappingServiceClient() + + # Initialize request argument(s) + entity_signals_mapping = admanager_v1.EntitySignalsMapping() + entity_signals_mapping.audience_segment_id = 1980 + entity_signals_mapping.taxonomy_category_ids = [2268, 2269] + + request = admanager_v1.UpdateEntitySignalsMappingRequest( + entity_signals_mapping=entity_signals_mapping, + ) + + # Make the request + response = client.update_entity_signals_mapping(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ads.admanager_v1.types.UpdateEntitySignalsMappingRequest, dict]): + The request object. Request object for + 'UpdateEntitySignalsMapping' method. + entity_signals_mapping (google.ads.admanager_v1.types.EntitySignalsMapping): + Required. The ``EntitySignalsMapping`` to update. + + The EntitySignalsMapping's name is used to identify the + EntitySignalsMapping to update. Format: + ``networks/{network_code}/entitySignalsMappings/{entity_signals_mapping}`` + + This corresponds to the ``entity_signals_mapping`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to + update. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ads.admanager_v1.types.EntitySignalsMapping: + The EntitySignalsMapping resource. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([entity_signals_mapping, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, entity_signals_mapping_service.UpdateEntitySignalsMappingRequest + ): + request = entity_signals_mapping_service.UpdateEntitySignalsMappingRequest( + request + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if entity_signals_mapping is not None: + request.entity_signals_mapping = entity_signals_mapping + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.update_entity_signals_mapping + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("entity_signals_mapping.name", request.entity_signals_mapping.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def batch_create_entity_signals_mappings( + self, + request: Optional[ + Union[ + entity_signals_mapping_service.BatchCreateEntitySignalsMappingsRequest, + dict, + ] + ] = None, + *, + parent: Optional[str] = None, + requests: Optional[ + MutableSequence[ + entity_signals_mapping_service.CreateEntitySignalsMappingRequest + ] + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> entity_signals_mapping_service.BatchCreateEntitySignalsMappingsResponse: + r"""API to batch create ``EntitySignalsMapping`` objects. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import admanager_v1 + + def sample_batch_create_entity_signals_mappings(): + # Create a client + client = admanager_v1.EntitySignalsMappingServiceClient() + + # Initialize request argument(s) + requests = admanager_v1.CreateEntitySignalsMappingRequest() + requests.parent = "parent_value" + requests.entity_signals_mapping.audience_segment_id = 1980 + requests.entity_signals_mapping.taxonomy_category_ids = [2268, 2269] + + request = admanager_v1.BatchCreateEntitySignalsMappingsRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + response = client.batch_create_entity_signals_mappings(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ads.admanager_v1.types.BatchCreateEntitySignalsMappingsRequest, dict]): + The request object. Request object for ``BatchCreateEntitySignalsMappings`` + method. + parent (str): + Required. The parent resource where + ``EntitySignalsMappings`` will be created. Format: + ``networks/{network_code}`` The parent field in the + CreateEntitySignalsMappingRequest must match this field. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + requests (MutableSequence[google.ads.admanager_v1.types.CreateEntitySignalsMappingRequest]): + Required. The ``EntitySignalsMapping`` objects to + create. A maximum of 100 objects can be created in a + batch. + + This corresponds to the ``requests`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ads.admanager_v1.types.BatchCreateEntitySignalsMappingsResponse: + Response object for BatchCreateEntitySignalsMappings + method. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, requests]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, + entity_signals_mapping_service.BatchCreateEntitySignalsMappingsRequest, + ): + request = ( + entity_signals_mapping_service.BatchCreateEntitySignalsMappingsRequest( + request + ) + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if requests is not None: + request.requests = requests + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.batch_create_entity_signals_mappings + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def batch_update_entity_signals_mappings( + self, + request: Optional[ + Union[ + entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsRequest, + dict, + ] + ] = None, + *, + parent: Optional[str] = None, + requests: Optional[ + MutableSequence[ + entity_signals_mapping_service.UpdateEntitySignalsMappingRequest + ] + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsResponse: + r"""API to batch update ``EntitySignalsMapping`` objects. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import admanager_v1 + + def sample_batch_update_entity_signals_mappings(): + # Create a client + client = admanager_v1.EntitySignalsMappingServiceClient() + + # Initialize request argument(s) + requests = admanager_v1.UpdateEntitySignalsMappingRequest() + requests.entity_signals_mapping.audience_segment_id = 1980 + requests.entity_signals_mapping.taxonomy_category_ids = [2268, 2269] + + request = admanager_v1.BatchUpdateEntitySignalsMappingsRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + response = client.batch_update_entity_signals_mappings(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ads.admanager_v1.types.BatchUpdateEntitySignalsMappingsRequest, dict]): + The request object. Request object for ``BatchUpdateEntitySignalsMappings`` + method. + parent (str): + Required. The parent resource where + ``EntitySignalsMappings`` will be updated. Format: + ``networks/{network_code}`` The parent field in the + UpdateEntitySignalsMappingRequest must match this field. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + requests (MutableSequence[google.ads.admanager_v1.types.UpdateEntitySignalsMappingRequest]): + Required. The ``EntitySignalsMapping`` objects to + update. A maximum of 100 objects can be updated in a + batch. + + This corresponds to the ``requests`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ads.admanager_v1.types.BatchUpdateEntitySignalsMappingsResponse: + Response object for BatchUpdateEntitySignalsMappings + method. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, requests]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, + entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsRequest, + ): + request = ( + entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsRequest( + request + ) + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if requests is not None: + request.requests = requests + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.batch_update_entity_signals_mappings + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "EntitySignalsMappingServiceClient": return self def __exit__(self, type, value, traceback): @@ -1025,4 +1556,4 @@ def get_operation( ) -__all__ = ("CreativeServiceClient",) +__all__ = ("EntitySignalsMappingServiceClient",) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/creative_service/pagers.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/pagers.py similarity index 69% rename from packages/google-ads-admanager/google/ads/admanager_v1/services/creative_service/pagers.py rename to packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/pagers.py index fda9c7f48045..464c0fe8d515 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/creative_service/pagers.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/pagers.py @@ -38,32 +38,37 @@ OptionalRetry = Union[retries.Retry, object, None] # type: ignore OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore -from google.ads.admanager_v1.types import creative_service +from google.ads.admanager_v1.types import ( + entity_signals_mapping_messages, + entity_signals_mapping_service, +) -class ListCreativesPager: - """A pager for iterating through ``list_creatives`` requests. +class ListEntitySignalsMappingsPager: + """A pager for iterating through ``list_entity_signals_mappings`` requests. This class thinly wraps an initial - :class:`google.ads.admanager_v1.types.ListCreativesResponse` object, and + :class:`google.ads.admanager_v1.types.ListEntitySignalsMappingsResponse` object, and provides an ``__iter__`` method to iterate through its - ``creatives`` field. + ``entity_signals_mappings`` field. If there are more pages, the ``__iter__`` method will make additional - ``ListCreatives`` requests and continue to iterate - through the ``creatives`` field on the + ``ListEntitySignalsMappings`` requests and continue to iterate + through the ``entity_signals_mappings`` field on the corresponding responses. - All the usual :class:`google.ads.admanager_v1.types.ListCreativesResponse` + All the usual :class:`google.ads.admanager_v1.types.ListEntitySignalsMappingsResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ def __init__( self, - method: Callable[..., creative_service.ListCreativesResponse], - request: creative_service.ListCreativesRequest, - response: creative_service.ListCreativesResponse, + method: Callable[ + ..., entity_signals_mapping_service.ListEntitySignalsMappingsResponse + ], + request: entity_signals_mapping_service.ListEntitySignalsMappingsRequest, + response: entity_signals_mapping_service.ListEntitySignalsMappingsResponse, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, @@ -74,9 +79,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (google.ads.admanager_v1.types.ListCreativesRequest): + request (google.ads.admanager_v1.types.ListEntitySignalsMappingsRequest): The initial request object. - response (google.ads.admanager_v1.types.ListCreativesResponse): + response (google.ads.admanager_v1.types.ListEntitySignalsMappingsResponse): The initial response object. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -85,7 +90,9 @@ def __init__( sent along with the request as metadata. """ self._method = method - self._request = creative_service.ListCreativesRequest(request) + self._request = entity_signals_mapping_service.ListEntitySignalsMappingsRequest( + request + ) self._response = response self._retry = retry self._timeout = timeout @@ -95,7 +102,9 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterator[creative_service.ListCreativesResponse]: + def pages( + self, + ) -> Iterator[entity_signals_mapping_service.ListEntitySignalsMappingsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token @@ -107,9 +116,11 @@ def pages(self) -> Iterator[creative_service.ListCreativesResponse]: ) yield self._response - def __iter__(self) -> Iterator[creative_service.Creative]: + def __iter__( + self, + ) -> Iterator[entity_signals_mapping_messages.EntitySignalsMapping]: for page in self.pages: - yield from page.creatives + yield from page.entity_signals_mappings def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/team_service/transports/__init__.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/transports/__init__.py similarity index 60% rename from packages/google-ads-admanager/google/ads/admanager_v1/services/team_service/transports/__init__.py rename to packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/transports/__init__.py index 0cdd254a8628..a842b7667625 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/team_service/transports/__init__.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/transports/__init__.py @@ -16,15 +16,20 @@ from collections import OrderedDict from typing import Dict, Type -from .base import TeamServiceTransport -from .rest import TeamServiceRestInterceptor, TeamServiceRestTransport +from .base import EntitySignalsMappingServiceTransport +from .rest import ( + EntitySignalsMappingServiceRestInterceptor, + EntitySignalsMappingServiceRestTransport, +) # Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[TeamServiceTransport]] -_transport_registry["rest"] = TeamServiceRestTransport +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[EntitySignalsMappingServiceTransport]] +_transport_registry["rest"] = EntitySignalsMappingServiceRestTransport __all__ = ( - "TeamServiceTransport", - "TeamServiceRestTransport", - "TeamServiceRestInterceptor", + "EntitySignalsMappingServiceTransport", + "EntitySignalsMappingServiceRestTransport", + "EntitySignalsMappingServiceRestInterceptor", ) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_partner_service/transports/base.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/transports/base.py similarity index 63% rename from packages/google-ads-admanager/google/ads/admanager_v1/services/ad_partner_service/transports/base.py rename to packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/transports/base.py index 40679501a417..cc29ed2e8641 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_partner_service/transports/base.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/transports/base.py @@ -26,15 +26,18 @@ from google.oauth2 import service_account # type: ignore from google.ads.admanager_v1 import gapic_version as package_version -from google.ads.admanager_v1.types import ad_partner_service +from google.ads.admanager_v1.types import ( + entity_signals_mapping_messages, + entity_signals_mapping_service, +) DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ ) -class AdPartnerServiceTransport(abc.ABC): - """Abstract transport class for AdPartnerService.""" +class EntitySignalsMappingServiceTransport(abc.ABC): + """Abstract transport class for EntitySignalsMappingService.""" AUTH_SCOPES = () @@ -129,13 +132,33 @@ def host(self): def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { - self.get_ad_partner: gapic_v1.method.wrap_method( - self.get_ad_partner, + self.get_entity_signals_mapping: gapic_v1.method.wrap_method( + self.get_entity_signals_mapping, + default_timeout=None, + client_info=client_info, + ), + self.list_entity_signals_mappings: gapic_v1.method.wrap_method( + self.list_entity_signals_mappings, + default_timeout=None, + client_info=client_info, + ), + self.create_entity_signals_mapping: gapic_v1.method.wrap_method( + self.create_entity_signals_mapping, + default_timeout=None, + client_info=client_info, + ), + self.update_entity_signals_mapping: gapic_v1.method.wrap_method( + self.update_entity_signals_mapping, + default_timeout=None, + client_info=client_info, + ), + self.batch_create_entity_signals_mappings: gapic_v1.method.wrap_method( + self.batch_create_entity_signals_mappings, default_timeout=None, client_info=client_info, ), - self.list_ad_partners: gapic_v1.method.wrap_method( - self.list_ad_partners, + self.batch_update_entity_signals_mappings: gapic_v1.method.wrap_method( + self.batch_update_entity_signals_mappings, default_timeout=None, client_info=client_info, ), @@ -151,22 +174,77 @@ def close(self): raise NotImplementedError() @property - def get_ad_partner( + def get_entity_signals_mapping( + self, + ) -> Callable[ + [entity_signals_mapping_service.GetEntitySignalsMappingRequest], + Union[ + entity_signals_mapping_messages.EntitySignalsMapping, + Awaitable[entity_signals_mapping_messages.EntitySignalsMapping], + ], + ]: + raise NotImplementedError() + + @property + def list_entity_signals_mappings( self, ) -> Callable[ - [ad_partner_service.GetAdPartnerRequest], - Union[ad_partner_service.AdPartner, Awaitable[ad_partner_service.AdPartner]], + [entity_signals_mapping_service.ListEntitySignalsMappingsRequest], + Union[ + entity_signals_mapping_service.ListEntitySignalsMappingsResponse, + Awaitable[entity_signals_mapping_service.ListEntitySignalsMappingsResponse], + ], + ]: + raise NotImplementedError() + + @property + def create_entity_signals_mapping( + self, + ) -> Callable[ + [entity_signals_mapping_service.CreateEntitySignalsMappingRequest], + Union[ + entity_signals_mapping_messages.EntitySignalsMapping, + Awaitable[entity_signals_mapping_messages.EntitySignalsMapping], + ], + ]: + raise NotImplementedError() + + @property + def update_entity_signals_mapping( + self, + ) -> Callable[ + [entity_signals_mapping_service.UpdateEntitySignalsMappingRequest], + Union[ + entity_signals_mapping_messages.EntitySignalsMapping, + Awaitable[entity_signals_mapping_messages.EntitySignalsMapping], + ], + ]: + raise NotImplementedError() + + @property + def batch_create_entity_signals_mappings( + self, + ) -> Callable[ + [entity_signals_mapping_service.BatchCreateEntitySignalsMappingsRequest], + Union[ + entity_signals_mapping_service.BatchCreateEntitySignalsMappingsResponse, + Awaitable[ + entity_signals_mapping_service.BatchCreateEntitySignalsMappingsResponse + ], + ], ]: raise NotImplementedError() @property - def list_ad_partners( + def batch_update_entity_signals_mappings( self, ) -> Callable[ - [ad_partner_service.ListAdPartnersRequest], + [entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsRequest], Union[ - ad_partner_service.ListAdPartnersResponse, - Awaitable[ad_partner_service.ListAdPartnersResponse], + entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsResponse, + Awaitable[ + entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsResponse + ], ], ]: raise NotImplementedError() @@ -185,4 +263,4 @@ def kind(self) -> str: raise NotImplementedError() -__all__ = ("AdPartnerServiceTransport",) +__all__ = ("EntitySignalsMappingServiceTransport",) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/transports/rest.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/transports/rest.py new file mode 100644 index 000000000000..a6e1199ef6ef --- /dev/null +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/transports/rest.py @@ -0,0 +1,1153 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.ads.admanager_v1.types import ( + entity_signals_mapping_messages, + entity_signals_mapping_service, +) + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import EntitySignalsMappingServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class EntitySignalsMappingServiceRestInterceptor: + """Interceptor for EntitySignalsMappingService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the EntitySignalsMappingServiceRestTransport. + + .. code-block:: python + class MyCustomEntitySignalsMappingServiceInterceptor(EntitySignalsMappingServiceRestInterceptor): + def pre_batch_create_entity_signals_mappings(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_batch_create_entity_signals_mappings(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_batch_update_entity_signals_mappings(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_batch_update_entity_signals_mappings(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_entity_signals_mapping(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_entity_signals_mapping(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_entity_signals_mapping(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_entity_signals_mapping(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_entity_signals_mappings(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_entity_signals_mappings(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_entity_signals_mapping(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_entity_signals_mapping(self, response): + logging.log(f"Received response: {response}") + return response + + transport = EntitySignalsMappingServiceRestTransport(interceptor=MyCustomEntitySignalsMappingServiceInterceptor()) + client = EntitySignalsMappingServiceClient(transport=transport) + + + """ + + def pre_batch_create_entity_signals_mappings( + self, + request: entity_signals_mapping_service.BatchCreateEntitySignalsMappingsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + entity_signals_mapping_service.BatchCreateEntitySignalsMappingsRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for batch_create_entity_signals_mappings + + Override in a subclass to manipulate the request or metadata + before they are sent to the EntitySignalsMappingService server. + """ + return request, metadata + + def post_batch_create_entity_signals_mappings( + self, + response: entity_signals_mapping_service.BatchCreateEntitySignalsMappingsResponse, + ) -> entity_signals_mapping_service.BatchCreateEntitySignalsMappingsResponse: + """Post-rpc interceptor for batch_create_entity_signals_mappings + + Override in a subclass to manipulate the response + after it is returned by the EntitySignalsMappingService server but before + it is returned to user code. + """ + return response + + def pre_batch_update_entity_signals_mappings( + self, + request: entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for batch_update_entity_signals_mappings + + Override in a subclass to manipulate the request or metadata + before they are sent to the EntitySignalsMappingService server. + """ + return request, metadata + + def post_batch_update_entity_signals_mappings( + self, + response: entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsResponse, + ) -> entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsResponse: + """Post-rpc interceptor for batch_update_entity_signals_mappings + + Override in a subclass to manipulate the response + after it is returned by the EntitySignalsMappingService server but before + it is returned to user code. + """ + return response + + def pre_create_entity_signals_mapping( + self, + request: entity_signals_mapping_service.CreateEntitySignalsMappingRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + entity_signals_mapping_service.CreateEntitySignalsMappingRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for create_entity_signals_mapping + + Override in a subclass to manipulate the request or metadata + before they are sent to the EntitySignalsMappingService server. + """ + return request, metadata + + def post_create_entity_signals_mapping( + self, response: entity_signals_mapping_messages.EntitySignalsMapping + ) -> entity_signals_mapping_messages.EntitySignalsMapping: + """Post-rpc interceptor for create_entity_signals_mapping + + Override in a subclass to manipulate the response + after it is returned by the EntitySignalsMappingService server but before + it is returned to user code. + """ + return response + + def pre_get_entity_signals_mapping( + self, + request: entity_signals_mapping_service.GetEntitySignalsMappingRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + entity_signals_mapping_service.GetEntitySignalsMappingRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for get_entity_signals_mapping + + Override in a subclass to manipulate the request or metadata + before they are sent to the EntitySignalsMappingService server. + """ + return request, metadata + + def post_get_entity_signals_mapping( + self, response: entity_signals_mapping_messages.EntitySignalsMapping + ) -> entity_signals_mapping_messages.EntitySignalsMapping: + """Post-rpc interceptor for get_entity_signals_mapping + + Override in a subclass to manipulate the response + after it is returned by the EntitySignalsMappingService server but before + it is returned to user code. + """ + return response + + def pre_list_entity_signals_mappings( + self, + request: entity_signals_mapping_service.ListEntitySignalsMappingsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + entity_signals_mapping_service.ListEntitySignalsMappingsRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for list_entity_signals_mappings + + Override in a subclass to manipulate the request or metadata + before they are sent to the EntitySignalsMappingService server. + """ + return request, metadata + + def post_list_entity_signals_mappings( + self, response: entity_signals_mapping_service.ListEntitySignalsMappingsResponse + ) -> entity_signals_mapping_service.ListEntitySignalsMappingsResponse: + """Post-rpc interceptor for list_entity_signals_mappings + + Override in a subclass to manipulate the response + after it is returned by the EntitySignalsMappingService server but before + it is returned to user code. + """ + return response + + def pre_update_entity_signals_mapping( + self, + request: entity_signals_mapping_service.UpdateEntitySignalsMappingRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + entity_signals_mapping_service.UpdateEntitySignalsMappingRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for update_entity_signals_mapping + + Override in a subclass to manipulate the request or metadata + before they are sent to the EntitySignalsMappingService server. + """ + return request, metadata + + def post_update_entity_signals_mapping( + self, response: entity_signals_mapping_messages.EntitySignalsMapping + ) -> entity_signals_mapping_messages.EntitySignalsMapping: + """Post-rpc interceptor for update_entity_signals_mapping + + Override in a subclass to manipulate the response + after it is returned by the EntitySignalsMappingService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the EntitySignalsMappingService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the EntitySignalsMappingService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class EntitySignalsMappingServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: EntitySignalsMappingServiceRestInterceptor + + +class EntitySignalsMappingServiceRestTransport(EntitySignalsMappingServiceTransport): + """REST backend transport for EntitySignalsMappingService. + + Provides methods for handling ``EntitySignalsMapping`` objects. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "admanager.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[EntitySignalsMappingServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'admanager.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or EntitySignalsMappingServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _BatchCreateEntitySignalsMappings(EntitySignalsMappingServiceRestStub): + def __hash__(self): + return hash("BatchCreateEntitySignalsMappings") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: entity_signals_mapping_service.BatchCreateEntitySignalsMappingsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> entity_signals_mapping_service.BatchCreateEntitySignalsMappingsResponse: + r"""Call the batch create entity + signals mappings method over HTTP. + + Args: + request (~.entity_signals_mapping_service.BatchCreateEntitySignalsMappingsRequest): + The request object. Request object for ``BatchCreateEntitySignalsMappings`` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.entity_signals_mapping_service.BatchCreateEntitySignalsMappingsResponse: + Response object for ``BatchCreateEntitySignalsMappings`` + method. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=networks/*}/entitySignalsMappings:batchCreate", + "body": "*", + }, + ] + ( + request, + metadata, + ) = self._interceptor.pre_batch_create_entity_signals_mappings( + request, metadata + ) + pb_request = entity_signals_mapping_service.BatchCreateEntitySignalsMappingsRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = ( + entity_signals_mapping_service.BatchCreateEntitySignalsMappingsResponse() + ) + pb_resp = entity_signals_mapping_service.BatchCreateEntitySignalsMappingsResponse.pb( + resp + ) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_batch_create_entity_signals_mappings(resp) + return resp + + class _BatchUpdateEntitySignalsMappings(EntitySignalsMappingServiceRestStub): + def __hash__(self): + return hash("BatchUpdateEntitySignalsMappings") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsResponse: + r"""Call the batch update entity + signals mappings method over HTTP. + + Args: + request (~.entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsRequest): + The request object. Request object for ``BatchUpdateEntitySignalsMappings`` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsResponse: + Response object for ``BatchUpdateEntitySignalsMappings`` + method. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=networks/*}/entitySignalsMappings:batchUpdate", + "body": "*", + }, + ] + ( + request, + metadata, + ) = self._interceptor.pre_batch_update_entity_signals_mappings( + request, metadata + ) + pb_request = entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = ( + entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsResponse() + ) + pb_resp = entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsResponse.pb( + resp + ) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_batch_update_entity_signals_mappings(resp) + return resp + + class _CreateEntitySignalsMapping(EntitySignalsMappingServiceRestStub): + def __hash__(self): + return hash("CreateEntitySignalsMapping") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: entity_signals_mapping_service.CreateEntitySignalsMappingRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> entity_signals_mapping_messages.EntitySignalsMapping: + r"""Call the create entity signals + mapping method over HTTP. + + Args: + request (~.entity_signals_mapping_service.CreateEntitySignalsMappingRequest): + The request object. Request object for + 'CreateEntitySignalsMapping' method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.entity_signals_mapping_messages.EntitySignalsMapping: + The ``EntitySignalsMapping`` resource. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=networks/*}/entitySignalsMappings", + "body": "entity_signals_mapping", + }, + ] + request, metadata = self._interceptor.pre_create_entity_signals_mapping( + request, metadata + ) + pb_request = ( + entity_signals_mapping_service.CreateEntitySignalsMappingRequest.pb( + request + ) + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = entity_signals_mapping_messages.EntitySignalsMapping() + pb_resp = entity_signals_mapping_messages.EntitySignalsMapping.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_entity_signals_mapping(resp) + return resp + + class _GetEntitySignalsMapping(EntitySignalsMappingServiceRestStub): + def __hash__(self): + return hash("GetEntitySignalsMapping") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: entity_signals_mapping_service.GetEntitySignalsMappingRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> entity_signals_mapping_messages.EntitySignalsMapping: + r"""Call the get entity signals + mapping method over HTTP. + + Args: + request (~.entity_signals_mapping_service.GetEntitySignalsMappingRequest): + The request object. Request object for ``GetEntitySignalsMapping`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.entity_signals_mapping_messages.EntitySignalsMapping: + The ``EntitySignalsMapping`` resource. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=networks/*/entitySignalsMappings/*}", + }, + ] + request, metadata = self._interceptor.pre_get_entity_signals_mapping( + request, metadata + ) + pb_request = ( + entity_signals_mapping_service.GetEntitySignalsMappingRequest.pb( + request + ) + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = entity_signals_mapping_messages.EntitySignalsMapping() + pb_resp = entity_signals_mapping_messages.EntitySignalsMapping.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_entity_signals_mapping(resp) + return resp + + class _ListEntitySignalsMappings(EntitySignalsMappingServiceRestStub): + def __hash__(self): + return hash("ListEntitySignalsMappings") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: entity_signals_mapping_service.ListEntitySignalsMappingsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> entity_signals_mapping_service.ListEntitySignalsMappingsResponse: + r"""Call the list entity signals + mappings method over HTTP. + + Args: + request (~.entity_signals_mapping_service.ListEntitySignalsMappingsRequest): + The request object. Request object for ``ListEntitySignalsMappings`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.entity_signals_mapping_service.ListEntitySignalsMappingsResponse: + Response object for ``ListEntitySignalsMappingsRequest`` + containing matching ``EntitySignalsMapping`` resources. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=networks/*}/entitySignalsMappings", + }, + ] + request, metadata = self._interceptor.pre_list_entity_signals_mappings( + request, metadata + ) + pb_request = ( + entity_signals_mapping_service.ListEntitySignalsMappingsRequest.pb( + request + ) + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = entity_signals_mapping_service.ListEntitySignalsMappingsResponse() + pb_resp = ( + entity_signals_mapping_service.ListEntitySignalsMappingsResponse.pb( + resp + ) + ) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_entity_signals_mappings(resp) + return resp + + class _UpdateEntitySignalsMapping(EntitySignalsMappingServiceRestStub): + def __hash__(self): + return hash("UpdateEntitySignalsMapping") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: entity_signals_mapping_service.UpdateEntitySignalsMappingRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> entity_signals_mapping_messages.EntitySignalsMapping: + r"""Call the update entity signals + mapping method over HTTP. + + Args: + request (~.entity_signals_mapping_service.UpdateEntitySignalsMappingRequest): + The request object. Request object for + 'UpdateEntitySignalsMapping' method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.entity_signals_mapping_messages.EntitySignalsMapping: + The ``EntitySignalsMapping`` resource. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{entity_signals_mapping.name=networks/*/entitySignalsMappings/*}", + "body": "entity_signals_mapping", + }, + ] + request, metadata = self._interceptor.pre_update_entity_signals_mapping( + request, metadata + ) + pb_request = ( + entity_signals_mapping_service.UpdateEntitySignalsMappingRequest.pb( + request + ) + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = entity_signals_mapping_messages.EntitySignalsMapping() + pb_resp = entity_signals_mapping_messages.EntitySignalsMapping.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_entity_signals_mapping(resp) + return resp + + @property + def batch_create_entity_signals_mappings( + self, + ) -> Callable[ + [entity_signals_mapping_service.BatchCreateEntitySignalsMappingsRequest], + entity_signals_mapping_service.BatchCreateEntitySignalsMappingsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BatchCreateEntitySignalsMappings(self._session, self._host, self._interceptor) # type: ignore + + @property + def batch_update_entity_signals_mappings( + self, + ) -> Callable[ + [entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsRequest], + entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BatchUpdateEntitySignalsMappings(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_entity_signals_mapping( + self, + ) -> Callable[ + [entity_signals_mapping_service.CreateEntitySignalsMappingRequest], + entity_signals_mapping_messages.EntitySignalsMapping, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateEntitySignalsMapping(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_entity_signals_mapping( + self, + ) -> Callable[ + [entity_signals_mapping_service.GetEntitySignalsMappingRequest], + entity_signals_mapping_messages.EntitySignalsMapping, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetEntitySignalsMapping(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_entity_signals_mappings( + self, + ) -> Callable[ + [entity_signals_mapping_service.ListEntitySignalsMappingsRequest], + entity_signals_mapping_service.ListEntitySignalsMappingsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListEntitySignalsMappings(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_entity_signals_mapping( + self, + ) -> Callable[ + [entity_signals_mapping_service.UpdateEntitySignalsMappingRequest], + entity_signals_mapping_messages.EntitySignalsMapping, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateEntitySignalsMapping(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(EntitySignalsMappingServiceRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=networks/*/operations/reports/runs/*}", + }, + { + "method": "get", + "uri": "/v1/{name=networks/*/operations/reports/exports/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("EntitySignalsMappingServiceRestTransport",) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/label_service/__init__.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/label_service/__init__.py deleted file mode 100644 index 2944d1a2145f..000000000000 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/label_service/__init__.py +++ /dev/null @@ -1,18 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import LabelServiceClient - -__all__ = ("LabelServiceClient",) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/label_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/label_service/client.py deleted file mode 100644 index ee9d7253b492..000000000000 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/label_service/client.py +++ /dev/null @@ -1,977 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import os -import re -from typing import ( - Callable, - Dict, - Mapping, - MutableMapping, - MutableSequence, - Optional, - Sequence, - Tuple, - Type, - Union, - cast, -) -import warnings - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.ads.admanager_v1 import gapic_version as package_version - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -from google.longrunning import operations_pb2 # type: ignore - -from google.ads.admanager_v1.services.label_service import pagers -from google.ads.admanager_v1.types import label_service - -from .transports.base import DEFAULT_CLIENT_INFO, LabelServiceTransport -from .transports.rest import LabelServiceRestTransport - - -class LabelServiceClientMeta(type): - """Metaclass for the LabelService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - - _transport_registry = OrderedDict() # type: Dict[str, Type[LabelServiceTransport]] - _transport_registry["rest"] = LabelServiceRestTransport - - def get_transport_class( - cls, - label: Optional[str] = None, - ) -> Type[LabelServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class LabelServiceClient(metaclass=LabelServiceClientMeta): - """Provides methods for handling Label objects.""" - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = "admanager.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - _DEFAULT_ENDPOINT_TEMPLATE = "admanager.{UNIVERSE_DOMAIN}" - _DEFAULT_UNIVERSE = "googleapis.com" - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - LabelServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - LabelServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file(filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> LabelServiceTransport: - """Returns the transport used by the client instance. - - Returns: - LabelServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def label_path( - network_code: str, - label: str, - ) -> str: - """Returns a fully-qualified label string.""" - return "networks/{network_code}/labels/{label}".format( - network_code=network_code, - label=label, - ) - - @staticmethod - def parse_label_path(path: str) -> Dict[str, str]: - """Parses a label path into its component segments.""" - m = re.match(r"^networks/(?P.+?)/labels/(?P